I am using the nodeJS google storage module to get files from a bucket, but the bucket contains loads of files and folders so it is taking a very long time to return , any suggestion on how to make it faster? or more stream line?
this is my code
export const getFiles = async () => {
const BUCKET_NAME = 'archive';
const bucket = new Storage({credentials: creds}).bucket(BUCKET_NAME);
const [ bucketFiles ] = await bucket.getFiles({autoPaginate: false});
console.log(bucketFiles)
return bucket
.getFiles()
.then(([files]) => {
const filesResponse = files.map(({ metadata: file }) => ({
cacheControl: file.cacheControl || "",
contentEncoding: file.contentEncoding || "",
contentType: file.contentType || "",
version: file.generation,
id: file.id,
downloadLink: file.mediaLink,
path: file.name,
size: file.size,
updated: file.updated,
originalFileName: file.originalFileName,
}));
console.log ({ bucket: bucket.name, files: filesResponse });
return ({ bucket: bucket.name, files: filesResponse });
})
.catch();
};
Related
Within my app a user can select a profile image and i would like that image to be uploaded to an s3 bucket when the user saves their profile data
I pass the image data (and json, which consists of name, email, telephone for example) from my app to an express server and upload there
At present I can pass the image data (the url it seems at present) to an s3 bucket and it saves
I don't think i'm actually saving the image itself though, as when downloading from s3 (manually) and trying to open on my mac it states it may be damaged and i cannot see the image
Feel daft for asking but how do i actually upload the image itself? Thanks
React Native Side
const handleFormSubmit = formData => {
const jsonData = JSON.stringify({
...formData,
});
// Handle profile image
if (imageProps && imageProps.uri) {
const data = new FormData();
data.append('formBody', jsonData);
data.append('image', {
uri:
Platform.OS === 'android'
? imageProps.uri
: imageProps.uri.replace('file://', ''),
type: imageProps.type,
name: imageProps.fileName,
});
sendRequest(data);
} else {
sendRequest(jsonData);
}
};
const sendRequest = data => {
let responseData;
fetch('http://localhost:8080/users/api/update_user_profile', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
},
body: data,
})
.then(response => {
responseData = response;
return response.json();
})
.then(jsonData => {
console.log(jsonData)
})
.catch(error => {
console.log(error)
});
};
Server Side
const s3 = new AWS.S3({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY
});
// Setting up S3 upload parameters
const params = {
Bucket: 'bucket-folder',
ACL: 'public-read',
Key: req.files.image.name,
Body: req.files.image.path
};
const stored = await s3.upload(params).promise();
You can use Multer for uploading files to s3.
const multer = require('multer');
const AWS = require('aws-sdk');
const uniqid = require('uniqid');
const storage = multer.memoryStorage();
const upload = multer({ storage });
// ? Posts new file to amazon and saves to db
router.post(
'/:id',
upload.single('attachment'),
async (req, res) => {
const unique = uniqid.time();
const { file } = req;
const { filePath } = req.body;
const { id } = req.params;
const s3FileURL = process.env.AWS_UPLOADED_FILE_URL;
const region = process.env.AWS_REGION;
const secretAccessKey = process.env.AWS_SECRET_ACCESS_KEY;
const accessKeyId = process.env.AWS_ACCESS_KEY_ID;
const Bucket = process.env.AWS_BUCKET_NAME + '/' + filePath;
const Key = `${id}/${unique}-${file.originalname}`;
const Body = file.buffer;
const ContentType = file.mimetype;
const ACL = 'public-read';
const s3bucket = new AWS.S3({
accessKeyId,
secretAccessKey,
region,
});
const params = {
Bucket,
Key,
Body,
ContentType,
ACL,
};
s3bucket.upload(params, async (err, data) => {
if (err) {
res.status(500).json({ error: true, Message: err });
} else {
console.log(params);
const newFileUploaded = {
description: req.body.description,
fileLink: `${s3FileURL}${filePath}/${id}/${unique}-${file.originalname}`,
s3_key: params.Key,
};
try {
const response = await postFile({
name: req.body.name,
attachment: newFileUploaded,
alt: req.body.alt,
user: req.body.user,
relatedID: req.body.relatedID,
});
res.status(200).json({
message: response.message,
success: response.success,
result: response.result,
});
} catch (e) {
res.status(500).json({
message:
'File upoladed but Db couldnt saved request (upload by ID)',
success: false,
result: [],
});
}
}
});
}
);
Whenever a user has to publish an image in the storage I don't do it directly with the firebase storage functions but using an onCall cloud function by passing it a base64 image, modifying it (5 times), and posting it in the storage.
The function is as follows:
exports.uploadImage = functions.https.onCall(async (data, context) => {
var bucket = admin.storage().bucket();
// Convert the base64 string back to an image
var base64EncodedImageString = data.image,
mimeType = 'image/jpeg',
fileName = 'testName',
imageBuffer = Buffer.from(base64EncodedImageString, 'base64');
if (!fs.existsSync(os.tmpdir()+"/myfolder")){
fs.mkdirSync(os.tmpdir()+"/myfolder");
}
const tempFilePath = path.join(os.tmpdir(),"myfolder", fileName+".jpg");
fs.writeFileSync(tempFilePath,base64EncodedImageString,'base64',function(err){
functions.logger.log("file scritto in"+tempFilePath);
})
await bucket.upload(tempFilePath, {
destination: 'test/'+fileName,
metadata: { contentType: mimeType,
metadata: {
firebaseStorageDownloadTokens: uuid()
}
},
});
const tempFilePath_25 = path.join(os.tmpdir(),"myfolder", fileName+"_25.jpg");
spawnSync('convert', [tempFilePath, '-scale', '10%','-scale','1000%>', tempFilePath_25]);
await bucket.upload(tempFilePath_25, {
destination: 'test/'+fileName+"_25.jpg",
metadata: { contentType: mimeType,
metadata: {
firebaseStorageDownloadTokens: uuid()
}
},
});
fs.unlinkSync(tempFilePath_25);
const tempFilePath_50 = path.join(os.tmpdir(),"myfolder", fileName+"_50.jpg");
spawnSync('convert', [tempFilePath, '-scale', '5%','-scale','2000%>', tempFilePath_50]);
await bucket.upload(tempFilePath_50, {
destination: 'test/'+fileName+"_50.jpg",
metadata: { contentType: mimeType,
metadata: {
firebaseStorageDownloadTokens: uuid()
}
},
});
fs.unlinkSync(tempFilePath_50);
const tempFilePath_75 = path.join(os.tmpdir(),"myfolder", fileName+"_75.jpg");
spawnSync('convert', [tempFilePath, '-scale', '3%','-scale','3333%>', tempFilePath_75]);
await bucket.upload(tempFilePath_75, {
destination: 'test/'+fileName+"_75.jpg",
metadata: { contentType: mimeType,
metadata: {
firebaseStorageDownloadTokens: uuid()
}
},
});
fs.unlinkSync(tempFilePath_75);
const tempFilePath_100 = path.join(os.tmpdir(),"myfolder", fileName+"_100.jpg");
spawnSync('convert', [tempFilePath, '-scale', '1%','-scale','10000%>', tempFilePath_100]);
await bucket.upload(tempFilePath_100, {
destination: 'test/'+fileName+"_100.jpg",
metadata: { contentType: mimeType,
metadata: {
firebaseStorageDownloadTokens: uuid()
}
},
});
fs.unlinkSync(tempFilePath_100);
});
I tried to do a simulation with a for loop every 2 seconds and I get the deadline error for 60% of the requests. When I publish the app there will be many users (hopefully) who can potentially call the same function simultaneously to post a photo. How can I solve this problem? Thanks in advance.
i am trying to rename my file originalname when using multer memoryStorage. I am using multer to upload an array of files and when i console.log(req.files) i get:
{
fieldname: 'images',
originalname: 'snake.jpg',
encoding: '7bit',
mimetype: 'image/jpeg',
buffer: <Buffer ff d8 38134 more bytes>,
size: 38184
}
The reason i want to rename my originalname is i am storing the images in AWS S3 and if the images have the same name it gets updated not added as a new image.
I have tried doing so by adding the date next to the originalname when storing in database but then originalname does not change when the images get added in bucket.
Here is my code:
posts.js
const storage=multer.memoryStorage()
const upload = multer({
storage: storage,
limits: { fieldSize: 25 * 1024 * 1024 },
});
router.post(
"/",
[ upload.array("images", config.get("maxImageCount")),
imageResize,
],
async (req, res) => {
const paths = await req.files.map((file) => ({ originalName: file.originalname + "-" + new
Date().toISOString() + "-" + uuidv4()}));
await Post.create({
title: req.body.title,
userId: req.body.userId,
Post_Images: paths.map((x) => ({ images: x.originalName })),
},
{
include: [Post_Image] }).then(
res.status(201).send())
imageResize.js
const sharp = require("sharp");
require("dotenv").config();
const AWS = require('aws-sdk')
const s3 = new AWS.S3({
accessKeyId: process.env.AWS_ID,
secretAccessKey: process.env.AWS_SECRET,
region:process.env.AWS_REGION
})
module.exports = async (req, res, next) => {
const images = [];
const resizePromises = req.files.map(async (file) => {
console.log(file)
await sharp(file.buffer)
.resize(2000)
.jpeg({ quality: 50 })
.toBuffer()
.then(resized=>s3.upload({
Bucket:process.env.AWS_BUCKET,
Key:file.originalname + "_full.jpg",
Body:file.buffer,
ACL: 'public-read'
}).promise()),
await sharp(file.buffer)
.resize(100)
.jpeg({ quality: 30 })
.toBuffer()
.then(resized=>s3.upload({
Bucket:process.env.AWS_BUCKET,
Key:file.originalname + "_thumb.jpg",
Body:file.buffer,
ACL: 'public-read'
}).promise())
images.push(file.originalname);
});
await Promise.all([...resizePromises]);
req.images = images;
next();
};
in other words, i am trying to change my originalname in my req.files to
originalname + "-" + new Date().toISOString() + "-" + uuidv4()
OR
How do i keep the uuid and the date the same in my posts.js and imageResize.js?
I am trying to upload multiple images with Nodejs, Expressjs and Multer-s3, but it's not working.
I have a model called Program and the Program model has an array image attribute but when I try to upload multiple images my req.file returns undefined.
Here is my model
const programSchema = new mongoose.Schema({
programtype: {
type: String,
required: true,
},
title: {
type: String,
required: true,
},
description: {
type: String,
required: true,
},
createdAt: {
type: Date,
required: true,
default: Date.now,
},
programImage: {
type: Array,
require: true,
},
});
and my routes
const Program = require("../models/program");
const fs = require("fs");
const multer = require("multer");
const path = require("path");
var AWS = require("aws-sdk");
var multerS3 = require("multer-s3");
AWS.config.update({
secretAccessKey: process.env.S3_SECRECT,
accessKeyId: process.env.AWS_ACCESS_KEY,
region: process.env.S3_REGION,
});
const uploadPath = path.join("public", Program.programImageBasePath);
const imageMineTypes = ["image/jpeg", "image/png", "image/gif"];
const bucketname = "mybucketname";
s3 = new AWS.S3();
const upload = multer({
storage: multerS3({
s3: s3,
acl: "public-read",
bucket: bucketname,
s3BucketEndpoint: true,
endpoint: "http://" + bucketname + ".s3.amazonaws.com",
key: function (req, file, cb) {
const uploadPathWithOriginalName = uploadPath + "/" + file.originalname;
cb(null, uploadPathWithOriginalName);
},
}),
});
router.post("/create", upload.array("cover", 10), async (req, res, next) => {
console.log(req.file);
const program = new Program({
programtype: req.body.programtype,
title: req.body.title,
description: req.body.description,
programImage: req.file.location,
});
try {
const programs = await program.save();
res.redirect("/programs");
} catch {
if (program.programImage != null) {
removeprogramImage(program.programImage);
}
res.render("programs/new");
}
});
and my views
<h2 style="padding-top: 90px;" > New Programs</h2>
<form action="/programs/create" method="POST" enctype="multipart/form-data">
<div>
<label>Image</label>
<input type="file" name="cover" multiple />
</div>
Cacel
<button type="submit">Create</button>
</form>
You can refer to this example.
const s3 = new AWS.S3({
accessKeyId: 'xxxxxxxxx',
secretAccessKey: 'xxxxxxxxx'
});
const uploadS3 = multer({
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: 'xxxxxxxx',
metadata: (req, file, callBack) => {
callBack(null, { fieldName: file.fieldname })
},
key: (req, file, callBack) => {
var fullPath = 'products/' + file.originalname;//If you want to save into a folder concat de name of the folder to the path
callBack(null, fullPath)
}
}),
limits: { fileSize: 2000000 }, // In bytes: 2000000 bytes = 2 MB
fileFilter: function (req, file, cb) {
checkFileType(file, cb);
}
}).array('photos', 10);
exports.uploadProductsImages = async (req, res) => {
uploadS3(req, res, (error) => {
console.log('files', req.files);
if (error) {
console.log('errors', error);
res.status(500).json({
status: 'fail',
error: error
});
} else {
// If File not found
if (req.files === undefined) {
console.log('uploadProductsImages Error: No File Selected!');
res.status(500).json({
status: 'fail',
message: 'Error: No File Selected'
});
} else {
// If Success
let fileArray = req.files,
fileLocation;
const images = [];
for (let i = 0; i < fileArray.length; i++) {
fileLocation = fileArray[i].location;
console.log('filenm', fileLocation);
images.push(fileLocation)
}
// Save the file name into database
return res.status(200).json({
status: 'ok',
filesArray: fileArray,
locationArray: images
});
}
}
})
};
I am trying to implement an upload profile image feature for a users collection. I implement an async function as such
exports.async_upload_image = async function(req, res) {
const path = require("path");
const os = require("os");
const fs = require("fs");
const { Storage } = require('#google-cloud/storage');// end up no need
let gcs = new Storage({
projectId: config.projectId
});
try {
const newFilePath = req.filepath;
const baseName = newFilePath.split("/").pop();
if (req.mimetype !== "image/jpeg" && req.mimetype !== "image/png") {
console.log("Wrong file type submitted");
return null;
}
// my.image.png => ['my', 'image', 'png']
const image_extension = newFilePath.split(".")[newFilePath.split(".").length - 1];
// 32756238461724837.png
let generated_token = uuid();
let image_filename = `${generated_token}.${image_extension}`;
const processed_path = path.join(os.tmpdir(), image_filename);
//creates a copy of image file inside the temporary path
const input_file = fs.createReadStream(newFilePath);
const output_file = fs.createWriteStream(processed_path);
input_file.pipe(output_file);
//upload to the firebase storage from the temporary path
await gcs.bucket(config.storageBucket).upload(processed_path, {
gzip: true,
metadata: {
cacheControl: "no-cache",
contentType: req.mimetype,
firebaseStorageDownloadTokens: generated_token
}
})
const imageUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${image_filename}?alt=media&token=${generated_token}`;
// await gcs.bucket(config.storageBucket).upload(newFilePath, {
// gzip: true,
// metadata: {
// cacheControl: "no-cache",
// contentType: req.mimetype,
// firebaseStorageDownloadTokens: generated_token
// }
// })
// const imageUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${baseName}?alt=media&token=${generated_token}`;
await db.collection(USERS_PUBLIC_COLLECTION).doc(req.user_id).update({
profile_image:
{
uid: generated_token,
url: imageUrl
}
})
console.log(`Update profile to uploaded image ${generated_token} successfully`);
return success_response();
} catch (error) {
console.log(error);
}
}
And wrote this at the bottom and ran with node file.js in the same file.
const req = {
filepath: some_file_path,
mimetype: "image/png",
user_id: "valid_user_id"
}
exports.async_upload_image(req);
The picture does get uploaded to storage as well as updating my document in the Firestore, but it was intended that accessing URL under the url in the profile_image map will allow me to see the picture. This works for the unprocessed picture, whose code segment is commented out, but not for the changed image. I also noted that the file size is incredibly small, around 20B. Can someone tell me why and what might be a better way to upload images with firebase? Feel free to clarify if more info is required to solve the problem.