Cloudinary uploader.upload executed after next async command in file - node.js

I'm trying to upload file with Multer and cloudinary node.js api and then remove temp file from hard drive, however file gets deleted before upload finished. Then I commented unlink files uploaded correctly
Here is my controller
const createGame = async (req, res) => {
const options = {
use_filename: true,
unique_filename: false,
overwrite: true,
};
try {
const data = req.body;
const files = req.files;
let posterPath = "";
let videoPath = "";
let distroPath = "";
if (files?.poster?.length) {
posterPath = files?.poster[0]?.path || "";
try {
const newPath = await uploader.upload(posterPath, options);
// Bug - unlink happens before file uploaded
await unlink(posterPath);
posterPath = newPath as string;
} catch (err) {
// Upload failure - remove files from disk
await unlink(posterPath);
posterPath = "";
}
}
if (files?.video?.length) {
videoPath = files?.video[0]?.path || "";
try {
const newPath = await uploader.upload(videoPath, {
...options,
resource_type: "video",
});
// Bug - unlink happens before file uploaded
await unlink(videoPath);
videoPath = newPath as string;
} catch (err) {
// Upload failure - remove files from disk
await unlink(videoPath);
videoPath = "";
}
}
const currentPublisher = await publishersRepository.findOneBy({
id: req.user.id,
});
if (!currentPublisher) {
res.status = 301;
return res.json({
message:
"You must be authenticated as publisher in order to add a game",
});
}
const newGameData = gamesRepository.create({
...data,
poster: posterPath,
video: videoPath,
distro: distroPath,
publisher: currentPublisher,
});
const newGame = await gamesRepository.save(newGameData);
res.status(200);
return res.json({ message: "Game added", data: newGame });
} catch (err) {
console.log("Error creating the game", err);
res.status(400);
return res.json({ message: "Error creating the game" });
}
};

I'm assuming you're using just one multer instance.
From what I looked at the docs of multer, the upload function is not a Promise. So the await function in front of uploader.upload method will not work.
What you need is to promisify your upload function. Here's an example:
const util = require('util');
// your code ...
try {
const promisifiedUploadFunc = util.promisify(uploader.upload);
const newPath = await promisifiedUploadFunc(posterPath, options);
// Bug - unlink happens before file uploaded
await unlink(posterPath);
posterPath = newPath as string;
} catch (e) {
// continue code..
}

Related

Modify PDF file uploaded in the request body in nodejs

I'm trying to modify a pdf file that I get from the request then saving it in my server, I achieved to save it first as I received it from the request body and then modifying it, and once more saving it again with the modified version, but I was looking for another approach to modify it in the fly directly after receiving it from the request body then saving it only once in the server. I need to add a logo to the uploaded file, thanks in advance
This is my code
'''
const mongoose = require("mongoose");
const path = require("path");
const ErrorResponse = require("../utils/errorResponse");
const fs = require("fs");
// Form submition with photo
exports.uploadPdf = (req, folderPath, next) => {
if (!req.files) {
console.log("No file");
return next(new ErrorResponse("Please Upload a file", 400));
}
const file = req.files.pdf;
const extention = file.name.split(".").at(-1);
if (extention !== "pdf") {
console.log("It's not a valid PDF file");
return next(new ErrorResponse("Please Upload an pdf file", 400));
}
//Check image size
if (file.size > process.env.MAX_FILE_UPLOAD) {
return next(
new ErrorResponse(
`Please Upload an PDF file less than ${process.env.MAX_PDF_FILE_UPLOAD}`,
400
)
);
}
//Create custom filename
let id;
if (!req.params.id) {
id = mongoose.Types.ObjectId();
} else {
id = req.params.id;
}
file.name = `document_${id}${path.parse(file.name).ext}`;
var dir = `${process.env.FILE_UPLOAD_PATH}/${folderPath}`;
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
file.mv(
`${process.env.FILE_UPLOAD_PATH}/${folderPath}/${file.name}`,
async (err) => {
if (err) {
console.log(err);
return next(new ErrorResponse(`Problem with file upload`, 500));
}
}
);
req.body = {
_id: id,
...req.body,
pdf: file.name,
};
};
'''

Fluent-ffmpeg won't create video file sometime it will create?

anyone know the reason of this problem? what is the issue?
in this code, i am trying to convert a blob into video format. but sometime it is working and sometime not. can anyone help me in this?
const recordVideo = require('../model/recordVideo_model')
const { join, dirname } = require('path');
const { fileURLToPath } = require('url')
const { mkdir, open, unlink, writeFile } = require('fs/promises');
const {Blob, Buffer} = require('buffer');
const { path } = require('#ffmpeg-installer/ffmpeg');
const ffmpeg = require('fluent-ffmpeg');
ffmpeg.setFfmpegPath(path)
// const __dirname = dirname(fileURLToPath(import.meta.url));
const saveData = async(data, roomid, course_id)=>{
//console.log(data);
const videoPath = join(__dirname, '../video');
//final folder name
//const dirName = new Date().toLocaleDateString().replace(/\./g, '_');
const dirName = roomid;
// console.log(`dirName: ${dirName}`);
//const dirPath = `${videoPath}/${dirName}`;
const dirPath = `./public/video/canvideo/${dirName}`;
const fileName= `${Date.now()}-${roomid}.webm`;
//const fileName= `${roomid}.webm`;
const tempFilePath = `${dirPath}/temp-${fileName}`;
const finalFilePath = `${dirPath}/${fileName}`;
let fileHandle;
try {
fileHandle = await open(dirPath);
console.log(`open the file${fileHandle}`)
}
catch {
await mkdir(dirPath);
console.log(`making directory${fileHandle}`)
}
finally {
if (fileHandle) {
fileHandle.close();
console.log(`closing ${fileHandle}`)
}
}
try {
const videoBlob = new Blob(data, {
type: 'video/webm'
})
const videoBuffer = Buffer.from(await videoBlob.arrayBuffer())
const res = await recordVideo.findOne({roomid:roomid, recordType:'canvas'})
if(!res){
await writeFile(tempFilePath, videoBuffer)
await ffmpeg(tempFilePath)
.outputOptions([
'-c:v libvpx-vp9',
'-c:a copy',
'-crf 35',
'-b:v 0',
'-vf scale=1280:720','-max_muxing_queue_size 1024'
])
.on('end', async () => {
await unlink(tempFilePath)
console.log(`*** File ${fileName} created`)
//insert database entry (recorded video entry with same details roomid, finename, finalfilepath, created at)
await recordVideo.create({roomid:roomid,filename:fileName,filePath:finalFilePath,recordType:'canvas', courseid:course_id});
}).on('error', function(err) {
console.log('An error occurred: ' + err.message);
})
.save(finalFilePath, dirPath);
}
}
catch (e) {
console.log('*** Erro in code ', e)
}
}
module.exports = {saveData,};
if any help me in this. it would be great for me.
in this code, i am trying to convert a blob into video format. but sometime it is working and sometime not. can anyone help me in this?

Resizing Images already on Google Storage locally through SHARPJS, and keeping/updating the downloadUrl

There was similar questions/answers but not recently and none with the exact requirements.
I have many pictures for a dating app on Firebase Storage, uploaded from the users, with a downloadUrl saved on Firestore. Just noticed it is saved as very big pictures, and slow down loading of the users. Result: I need to resize and reformat to jpeg all pictures on firebase storage.
My research and trials for now 2 months brought me to the following conclusions:
It's not possible through Google Functions as the quota of 9 minutes is too slow to do the whole resizing.
Sharp is the best library to do this, but better do it locally.
I can use gsutil as in this Question Here to download all pictures and keep the path, resize it, and upload it later.
I was blocked at finding how I can resize/reformat with Sharp and whilst the name will be different and probably the metadata kept out, how can I uploaded it back and at the same time get a new downloadUrl so that I can in turn upload it to firestore in the users collection?
MY POTENTIAL SOLUTION (STEP 4):
Not sure if it will work, but I'd have a listening function for changed (finalized) object and getting info from the image to upload it back on firestore, using a self-made downloadUrl.
MY NEW QUESTION: Is it going to work? I'm afraid to break the pictures of all my users...
For your better understanding, here is my process so far:
1. Download Images
gsutil cp -r gs://my-bucket/data [path where you want to download]
2. Script (typescript) to resize/reformat them.
import * as fs from "fs";
import sharp from "sharp";
import * as path from "path";
const walk = (dir: string, done) => {
let results = [];
fs.readdir(dir, (err, list) => {
if (err) return done(err);
let i = 0;
(function next() {
let file = list[i++];
if (!file) return done(null, results);
file = path.resolve(dir, file);
fs.stat(file, (err, stat) => {
if (stat && stat.isDirectory()) {
walk(file, (err, res) => {
results = results.concat(res);
next();
});
} else {
results.push(file);
next();
}
});
})();
});
};
const reformatImage = async (filesPaths: string[]) => {
let newFilesPaths: string[] = [];
await Promise.all(
filesPaths.map(async (filePath) => {
let newFileName = changeExtensionName(filePath);
let newFilePath = path.join(path.dirname(filePath), NewFileName);
if (filePath === newFilePath) {
newFileName = "rszd-" + newFileName;
newFilePath = path.join(path.dirname(filePath), newFileName);
}
newFilesPaths.push(newFilePath);
try {
await sharp(filePath)
.withMetadata()
.resize(600, 800, {
fit: sharp.fit.inside,
})
.toFormat("jpeg")
.jpeg({
mozjpeg: true,
force: true,
})
.toFile(newFilePath)
.then(async (info) => {
console.log("converted file...", info);
})
.catch((error) => {
console.log("sharp error: ", error);
});
} catch (error) {
console.error("error converting...", error);
}
})
);
console.log("THIS IS THE RESIZED IMAGES");
console.log(newFilesPaths);
};
const changeExtensionName = (filePath: string) => {
const ext = path.extname(filePath || "");
const virginName = path.basename(filePath, ext);
const newName = virginName + ".jpg";
return newName;
};
walk("./xxxxxx.appspot.com", (err, results) => {
if (err) throw err;
console.log("THIS IS THE DOWNLOADED IMAGES");
console.log(results);
reformatImage(results);
});
3. Re-upload the files
gsutil cp -r [path your images] gs://my-bucket/data
4. Listen for new file update through a Firebase Functions, and update the new downloadUrl
export const onOldImageResizedUpdateDowloadUrl = functions.storage
.object()
.onFinalize(async (object: any) => {
if (object) {
functions.logger.log('OBJECT: ', object);
const fileBucket = object.bucket;
const filePath: string = object.name;
const userId = path.basename(path.dirname(filePath));
const fileName = path.basename(filePath);
const isResized = fileName.startsWith('rszd-');
if (!isResized) {return;}
const token = object.metadata.firebaseStorageDownloadTokens;
const downloadUrl = createDownloadUrl(
fileBucket,
token,
userId,
fileName
);
const pictureId = 'picture' + fileName.charAt(5); // pictures are named as eg "rszd-" + 1.jpeg
admin
.firestore()
.collection('users')
.doc(userId)
.update({ [pictureId]: downloadUrl });
}
});
function createDownloadUrl(
bucketPath: string,
downloadToken: string,
uid: string,
fileName: string) {
return `https://firebasestorage.googleapis.com/v0/b/${bucketPath}/o/pictures-profil%2F${uid}%2F${fileName}?alt=media&token=${downloadToken}`;
}

Saving uploaded file to Pinata IPFS in NodeJS

I've been trying to save uploaded image files to IPFS in NodeJs , while it seems Pinata saves them, the files are pretty much gibberish (after downloading the images are broken).
My code :
// Nodejs route.
exports.postImage = async (req, res, next) => {
// Using multer to get the file.
fileUploadMiddleware(req, res, async (err) => {
// getting bunch of data from query string.
let meta = {
origin,
originid,
context,
ownerid,
format
} = req.query;
if(!meta.format || !req.files) {
return next(new ErrorResponse("File format not specified", 404));
}
if(!meta.originid) {
meta.originid = uuidv4();
}
// NOTE: is this the right way to get the data of the file ?
const buffer = req.files[0].buffer;
const filename = `${metadata.origin}_${metadata.originid}.${ metadata.format }`;
let stream;
try {
stream = Readable.from(buffer);
// HACK to make PINATA WORK.
stream.path = filename;
}
catch(e) {
logger.logError(e);
return false;
}
const options = {
pinataMetadata: {
name: filename,
keyvalues: {
context: metadata.context,
ownerid: metadata.ownerid
}
},
pinataOptions: {
cidVersion: 0
}
};
try {
var result = await pinata.pinFileToIPFS(stream, options);
console.log("SUCCESS ", result);
return result;
}
catch(e) {
logger.logError(e);
return null;
}
res.status(200).json({
success: true,
data: 'You got access'
})
});
}
So basically creating the stream based on the uploaded file buffer and sending it away to Pinata. Where do I go wrong?
const buffer = req.files[0].buffer;
If you used MemoryStorage. buffer property would be available. It is not available for diskStorage because it will save the file locally.:
const storage = multer.memoryStorage()
const upload = multer({ storage: storage })
Also I think it not req.files[0]
const buffer = req.file.buffer;
after I get the buffer, I convert it to FormData using form-data npm package:
import FormData from "form-data";
const formData = new FormData();
formData.append("file", buffer, {
contentType,
filename: fileName + "-" + uuidv4(),
});
then you send a post request to pinata
const url = `https://api.pinata.cloud/pinning/pinFileToIPFS`;
const fileRes = await axios.post(url, formData, {
maxBodyLength: Infinity,
headers: {
// formData.getBoundary() is specific to npm package. native javascript FormData does not have this method
"Content-Type": `multipart/form-data: boundary=${formData.getBoundary()}`,
pinata_api_key: pinataApiKey,
pinata_secret_api_key: pinataSecretApiKey,
},
});

How to download images from the database?

I need to upload files and images no larger than 2 megabytes in the database. But there is a problem downloading images. All images that are downloaded break and do not open. No such problems with text files.
Result of the downloaded image:
file 2.png
2.png: data
Uploading images this way:
module.exports.upload = async function (req, res) {
const sysFileObj = {
COMMENTS: req.body.COMMENTS,
NAME: req.file.originalname,
MIMETYPE: req.file.mimetype,
FILE_CONTENT: req.file.buffer
};
try {
await SysFiles.create(sysFileObj);
res.status(201).json(sysFileObj);
} catch (e) {
errorHandler(res, e);
}
};
multer:
const multer = require('multer');
const storage = multer.memoryStorage()
let obj = {
storage: storage,
limits: {
fileSize: 1024 * 1024 * 2
}
};
var upload = multer(obj)
module.exports = upload;
And here there is a problem when download:
module.exports.download = async function (req, res) {
try {
let sysFile = await SysFiles.findById(req.params.SYSFILE_ID);
var fileContents = Buffer.from(sysFile._props.FILE_CONTENT);
var readStream = new stream.PassThrough();
readStream.end(fileContents);
res.set('Content-disposition', 'attachment; filename=' + sysFile._props.NAME);
res.set('Content-Type', sysFile._props.MIMETYPE);
readStream.pipe(res);
} catch (e) {
errorHandler(res, e);
}
};
What am I doing wrong? Please tell me. I must say right away that I need to upload the image to the database without any links to any folder where the images will be stored.
Note:
But, by the way, when I downloaded the uploaded image using “SQL
Developer”, the image opens without any not problems.
You must store image content-Type
And image data
const formidable = require('formidable');
const _ = require('lodash');
const fs = require('fs');
// loading formidable library
let form = new formidable.IncomingForm();
// taking file extension
form.keepExtensions = true;
//processing for upload image
form.parse(request_data,(err,fields,files) => {
//if error when uploading image
if (err) {
return res.status(400).json({
err: 'Image could not bd uploaded'
})
}
// validating all fields without image
const {name, description, price, category, quantity, shipping} = fields;
if(!name || !description || !price || !category || !quantity || !shipping) {
return res.status(400).json({
err: 'All Fields are required'
})
}
// lets continue when there are no error
// form have available photo
if(files.photo){
//1 kb = 1000
//1 MB = 1000000
// check image size
if(files.photo.size > 2000000){
return res.status(400).json({
err: 'Image should be less than 2 MB'
})
}
// there are no error found then execute it
TAKING_AS_YOUR_VALRIABLE.data = fs.readFileSync(files.photo.path);
TAKING_AS_YOUR_VALRIABLE.contentType = files.photo.type
}
// finaly save product
//save it your own way
const FINAL_DATA = {...fields,...TAKING_AS_YOUR_VALRIABLE}
})
now show photo code
if(TAKING_AS_YOUR_VALRIABLE.data){
res.set('Content-Type',TAKING_AS_YOUR_VALRIABLE.contentType);
res.send(TAKING_AS_YOUR_VALRIABLE.data);
}

Resources