How to delete files with NodeJS? [duplicate] - node.js

This question already has answers here:
node.js remove file
(21 answers)
Closed last year.
I have two async functions:
One for creating objects in PostgreSQL and upload files with named in this object.
Another one for deleting this entity and deleting files from folder.
I don't know how to extract filenames from PostgreSQL entity and delete certain files in my 'static' folder
Entity in PostgreSQL looks like:
Car:
{
'name': 'Nissan 350Z',
'description': 'new',
'image': '123wejdsefberfkj.jpg',
'video': '23rusdjf8ioysdfs.mp4'
}
Create function:
Here I get files from form-data, create unique name for files and save it in PostgreSQL, then I save files in "static" folder.
let videoName = uuid.v4() + '.mp4';
let imageName = uuid.v4() + '.jpg';
let {name,description} = req.body;
const {video, image} = req.files;
const car = await Car.create( {name, description, video: videoName,image: imageName})
.then(video.mv(path.resolve(__dirname,'..', 'static', videoName)))
.then(image.mv(path.resolve(__dirname,'..', 'static', imgName)))
Delete function:
Here I need to extract file-names from database and delete them from folder
a bit pseudocode:
async delete(req, res) {
try {
const {id} = req.params;
await Car.findOne({where:{id}})
.then( async data => {
if(data) {
await let videoName = Car.#extract_video_name# ({where: {id}})
.then(mv(path.delete(__dirname,'..','static',videoName)))
await Car.destroy({where:{id}}).then(() => {
return res.json("Car deleted");
})
} else {
return res.json("This Car doesn't exist in database");
}
})
} catch (e) {
console.error(e)
}
}

You can use fs, require it using const fs = require('fs');
You have no npm package to install, it's included in node.js.
This is the code that allows you to save your files into a directory by creating the folder if it doesn't exist and upload the file into it
let path = String(`./directoryName/${fileName}`);
fs.mkdirSync('./directoryName', { recursive: true });
fs.writeFileSync(path, data);
And you can delete the file using
fs.unlink(path, (err) => {
if (err) throw err //handle your error the way you want to;
console.log('path/file.txt was deleted');//or else the file will be deleted
});
);
Refernce : https://nodejs.org/api/fs.html

Related

Delete file from S3 bucket based on metadata?

I've tried specifying metadata in parameters passed to DeleteObjectCommand, but the file gets deleted whether or not it has the provided metadata...
static async deleteFile(req, res) {
try {
const key = basename(req.body.key) //file that will be deleted
const params = {
Bucket: "my_bucket",
Key: key,
Metadata: { userId: req.user._id }
}
const data = await s3.send(new DeleteObjectCommand(params))
return res.send(data)
}
catch (err) {
console.log(err)
}
}
I have different user's files in the same bucket and actually want to prevent one app user from deleting files of another user. Is there a better way to do this?
Thanks in advance

Smooch - create attachments from buffer

I'm trying to create an image via smooch-core API
I have an image as Buffer - base64, And I try something like this:
smoochClient.attachments
.create({
appId: appId,
props: {
for: 'message',
access: 'public',
appUserId: appUserId
},
source: myBuffer
})
.then(() => {
console.log('OK');
}).catch(err => {
console.log(JSON.stringify(err));
});
I get this error: "status":413,"statusText":"Payload Too Large"
[When I create this image normally through Postman it does work well, so it's not too big - I guess it's because of the Buffer's sending]
Anyone know how I can send a buffer to this API?
Are you able to submit the base64 data directly in the postman call?
Reading through the spec here it looks like source should be a filepath/name, and not raw binary data.
The easy way may be to save the base64 data to a[n appropriately encoded] file, then provide that file's path as source
Otherwise I'm not sure I'd go so far as to take apart api_instance.upload_attachment() to feed in the base64 data instead of opening/reading from the specified filename.
I found such a solution:
Create a temporary file to get it's read stream and send it in source instead of the myBuffer parameter and here is the code of creating the temporary file:
async getTempFileSource(bufferData) {
const fs = require("fs");
//remove mime type
if (bufferData.startsWith('data:'))
bufferData = bufferData.split('base64,')[1];
//Get file extension
const type = await require('file-type').fromBuffer(new Buffer(bufferData, 'base64'));
if (!type) {
console.log("getTempFileSource - The buffer data is corrupted", 'red');
return null;
}
//create temporary file
const tempFile = require('tmp').fileSync({postfix: '.' + type.ext});
//append buffer data to temp file
fs.appendFileSync(tempFile.name, new Buffer(bufferData, 'base64'));
//create read stream from the temp file
const source = fs.createReadStream(tempFile.name);
//remove the temp file
tempFile.removeCallback();
return source;
}
Here is the code for creating the attachment:
return new Promise(async (resolve, reject) => {
const source = await getTempFileSource(bufferData);
if (!source)
resolve(null);
else {
session.smoochClient.attachments
.create({
appId: appId,
props: {
for: 'message',
access: 'public',
appUserId: appUserId
},
source: source
})
.then(res => {
resolve(res);
}).catch(err => {
reject(err);
});
}
});

how to make formidable not save to var/folders on nodejs and express app

I'm using formidable to parse incoming files and store them on AWS S3
When I was debugging the code I found out that formidable is first saving it to disk at /var/folders/ and overtime some unnecessary files are stacked up on disk which could lead to a big problem later.
It's very silly of me using a code without fully understanding it and now
I have to figure out how to either remove the parsed file after saving it to S3 or save it to s3 without storing it in disk.
But the question is how do I do it?
I would appreciate if someone could point me in the right direction
this is how i handle the files:
import formidable, { Files, Fields } from 'formidable';
const form = new formidable.IncomingForm();
form.parse(req, async (err: any, fields: Fields, files: Files) => {
let uploadUrl = await util
.uploadToS3({
file: files.uploadFile,
pathName: 'myPathName/inS3',
fileKeyName: 'file',
})
.catch((err) => console.log('S3 error =>', err));
}
This is how i solved this problem:
When I parse incoming form-multipart data I have access to all the details of the files. Because it's already parsed and saved to local disk on the server/my computer. So using the path variable given to me by formidable I unlink/remove that file using node's built-in fs.unlink function. Of course I remove the file after saving it to AWS S3.
This is the code:
import fs from 'fs';
import formidable, { Files, Fields } from 'formidable';
const form = new formidable.IncomingForm();
form.multiples = true;
form.parse(req, async (err: any, fields: Fields, files: Files) => {
const pathArray = [];
try {
const s3Url = await util.uploadToS3(files);
// do something with the s3Url
pathArray.push(files.uploadFileName.path);
} catch(error) {
console.log(error)
} finally {
pathArray.forEach((element: string) => {
fs.unlink(element, (err: any) => {
if (err) console.error('error:',err);
});
});
}
})
I also found a solution which you can take a look at here but due to the architecture if found it slightly hard to implement without changing my original code (or let's just say I didn't fully understand the given implementation)
I think i found it. According to the docs see options.fileWriteStreamHandler, "you need to have a function that will return an instance of a Writable stream that will receive the uploaded file data. With this option, you can have any custom behavior regarding where the uploaded file data will be streamed for. If you are looking to write the file uploaded in other types of cloud storages (AWS S3, Azure blob storage, Google cloud storage) or private file storage, this is the option you're looking for. When this option is defined the default behavior of writing the file in the host machine file system is lost."
const form = formidable({
fileWriteStreamHandler: someFunction,
});
EDIT: My whole code
import formidable from "formidable";
import { Writable } from "stream";
import { Buffer } from "buffer";
import { v4 as uuidv4 } from "uuid";
export const config = {
api: {
bodyParser: false,
},
};
const formidableConfig = {
keepExtensions: true,
maxFileSize: 10_000_000,
maxFieldsSize: 10_000_000,
maxFields: 2,
allowEmptyFiles: false,
multiples: false,
};
// promisify formidable
function formidablePromise(req, opts) {
return new Promise((accept, reject) => {
const form = formidable(opts);
form.parse(req, (err, fields, files) => {
if (err) {
return reject(err);
}
return accept({ fields, files });
});
});
}
const fileConsumer = (acc) => {
const writable = new Writable({
write: (chunk, _enc, next) => {
acc.push(chunk);
next();
},
});
return writable;
};
// inside the handler
export default async function handler(req, res) {
const token = uuidv4();
try {
const chunks = [];
const { fields, files } = await formidablePromise(req, {
...formidableConfig,
// consume this, otherwise formidable tries to save the file to disk
fileWriteStreamHandler: () => fileConsumer(chunks),
});
// do something with the files
const contents = Buffer.concat(chunks);
const bucketRef = storage.bucket("your bucket");
const file = bucketRef.file(files.mediaFile.originalFilename);
await file
.save(contents, {
public: true,
metadata: {
contentType: files.mediaFile.mimetype,
metadata: { firebaseStorageDownloadTokens: token },
},
})
.then(() => {
file.getMetadata().then((data) => {
const fileName = data[0].name;
const media_path = `https://firebasestorage.googleapis.com/v0/b/${bucketRef?.id}/o/${fileName}?alt=media&token=${token}`;
console.log("File link", media_path);
});
});
} catch (e) {
// handle errors
console.log("ERR PREJ ...", e);
}
}

Retrieve the attributes from a file using nodejs?

Hi I'm looking for an nodejs code which would probably return the attributes of each file in a folder. I developed the code to retrieve all the file name in a folder and another code to list data's of filename provide by us. But actually I need to return all the files names in a folder with its column name. I'm new to nodejs so someone help me please.
LISTING DATA CODE:
const AWS = require('aws-sdk');
const neatCsv = require('neat-csv');
var s3 = new AWS.S3({});
exports.handler = (event,context,callback)=>{
const params = {
Bucket:'ml-framework-api',
Key: wavicle.csv
};
s3.getObject(params,async(err, result) => {
if (err){
return console.error(err);
}
neatCsv(result.Body).then((parsedData) => {
callback(null,parsedData);
})
})
}
LISTING FILE IN S3:
const AWS = require('aws-sdk')
const s3 = new AWS.S3({
accessKeyId:'-------------',
secretAccessKey:'-------------------',
region:'ap-south-1'
})
const params = {
Bucket:'wavicle'
}
s3.listObjects(params,(err,data)=>{
if(err){
return console.log(err)
}
console.log(data)
})
It's best to start with node's file system api documentation.
Here is a simple example of how to get information about files of a folder (there are many ways, this is quickly from the example in the documentation above):
const fsp = require("fs/promises");
async function dirFilesInfo(path) {
const dir = await fsp.opendir(path);
for await (const dirEntry of dir) {
const fileInfo = await fsp.stat("./" + dirEntry.name);
console.log(dirEntry.name, fileInfo);
}
}
dirFilesInfo("./").catch(console.error);

How to convert multiple files as zip in nodejs?

Here my code:
So someone help how to dowload the files as a zip folder
exports.downloadAllFiles = function(req,res){
demodb.findOne({ guid: req.params.id }, function(err, data) {
if (err) {
console.log("Error in finding case....");
res.json(HttpStatus.INTERNAL_SERVER_ERROR, {});
} else {
if(data){
// Here multiple files are contained in the data array
//So I need to download the files into a zip folder
}
}
})
};
You can do this using ADM-ZIP
const zip=require('adm-zip');
var zipper = new zip();
zipper.addLocalFile('1.csv');
zipper.addLocalFile('2.csv');
zipper.addLocalFile('3.csv');
zipper.writeZip("123.zip");
Here's a small example of adm-zip on how to add files directly, from local directory and buffer:
// creating archives
var zip = new AdmZip();
// add file directly
zip.addFile("test.txt", new Buffer("inner content of the file"), "entry comment goes here");
// add local file
zip.addLocalFile("/home/me/some_picture.png");
// get everything as a buffer
var willSendthis = zip.toBuffer();
// or write everything to disk
zip.writeZip(/*target file name*/"/home/me/files.zip");
In your case you can add files in a for loop iterating through the array and adding a file in each recursion.
exports.downloadAllFiles = function(req,res){
demodb.findOne({ guid: req.params.id }, function(err, data) {
if (err) {
console.log("Error in finding case....");
res.json(HttpStatus.INTERNAL_SERVER_ERROR, {});
} else {
if(data){
// for loop goes here:
for(var i =0; i<data.length; i++){
// add the files to zip
}
}
}
})
};
var AdmZip = require('adm-zip');
var zip = new AdmZip();
var fs=require('fs-extra');
let arr = ['AKTU.pdf', 'test1.pdf']
for(let i = 0 ;i<arr.length ;i++){
zip.addLocalFile(`./createZip/${arr[i]}`); //local path
}
zip.writeZip("./files.zip");
You can use this code to add multiple files in a zip.

Resources