Access file upload formData in Express - node.js

How can I access the files in the API?
I read the other solutions and all of them require npm packages to solve this. I want to understand why I can't do it vanilla. Also, the answers are old and recommend using body-parser, which now comes bundled with Express.
I would like the solution to be vanilla JS in order to understand the process better.
client
async function uploadFile(file) {
let formData = new FormData();
formData.append("file", file);
let res = await fetchPostFile("/api/files", formData);
}
fetch
export async function fetchPostFile(url, formData) {
try {
let result = await (
await fetch(url, {
method: "POST",
withCredentials: true,
credentials: "include",
headers: {
Authorization: localStorage.getItem("token"),
Accept: "application/json",
"Content-type": "multipart/form-data",
},
body: formData,
})
).json();
return result;
} catch (err) {
return err;
}
}
api
router.post("/api/files", async function (req, res, next) {
try {
console.log(req.file); // undefined
console.log(req.files); // undefined
console.log(req.body); // {}
} catch (err) {
next(err);
} finally {
req.connection.release();
}
});
Why is the req.body empty? Please help me understand what I'm doing wrong.

First you need to use multer package to handle multipart/form-data in express. You must use it as a middleware to set the field name for the file. The name passed as an argument to the single() function must match to the appended name on client side.
const multer = require('multer')
router.post("/api/files", multer().single('file'), async function (req, res, next) {
try {
console.log(req.file)
} catch (err) {
next(err)
} finally {
req.connection.release()
}
});

Here's the whole file management API.
I am using busboy which is what multer uses under the hood. I found it easier to use.
const express = require("express");
const router = express.Router();
const config = require("../../config");
const busboy = require("busboy");
const fs = require("fs");
const SHA256 = require("crypto-js/sha256");
let filesFolderPath = config.paths.files;
router.get("/api/records/:recordUid/files/:fieldUid", async (req, res, next) => {
try {
let { recordUid, fieldUid } = req.params;
let query = `
select
rdf.*,
r.uid recordUid,
round(sizeBytes / 1024, 0) sizeKb,
round(sizeBytes / 1024 / 1024, 0) sizeMb
from recordDataFile rdf
left join record r on r.id = rdf.recordId
left join field f on f.id = rdf.fieldId
where
r.uid = ?
and f.uid = ?;
`;
let rows = await req.pool.query(query, [recordUid, fieldUid]);
res.status(200).send(rows);
} catch (err) {
next(err);
}
});
router.get("/api/files/:hash", async (req, res, next) => {
try {
let { hash } = req.params;
let query = `
select *
from recordDataFile
where hash = ?
`;
let rows = await req.pool.query(query, [hash]);
let fileData = rows[0];
res.download(fileData.path);
} catch (err) {
next(err);
}
});
router.post("/api/files", async (req, res, next) => {
try {
let bb = busboy({
headers: req.headers,
defCharset: "utf8",
limits: {
fileSize: 20 * 1024 * 1024, // 20 mb
files: 5,
},
});
let fields = {};
// Get any text values
bb.on("field", (fieldname, val, fieldnameTruncated, valTruncated) => {
console.log(fieldname, val);
fields[fieldname] = val;
});
// Read file stream
bb.on("file", (fieldname, fileStream, filename, encoding, mimetype) => {
// Prevents hieroglyphs from cyrillic
let originalName = Buffer.from(filename.filename, "latin1").toString("utf8");
let nameParts = originalName.split(".");
let extension = nameParts[nameParts.length - 1]; // without the . from .jpeg
// IMPORTANT!!! FILE NAME CAN'T HAVE SPACES, it won't save properly!!!
let hash = SHA256(`${+new Date()}${originalName}`).toString();
// Absolute path to file
let filePath = `${filesFolderPath}${hash}`;
// Open writeable stream to path
let writeStream = fs.createWriteStream(filePath);
// Pipe the file to the opened stream
fileStream.pipe(writeStream);
// Check for errors
writeStream.on("error", (err) => {
console.log("writeStream", err);
});
// Writing done, stream closed
writeStream.on("close", async (err) => {
// console.log("closing + SQL");
if (err) {
console.log("closing error");
return;
}
let query = `
insert into recordDataFile
(
recordId,
fieldId,
name,
extension,
hash,
path,
sizeBytes,
userId,
created
)
values
(
(select id from record where uid = ?),
(select id from field where uid = ?),
?,
?,
?,
?,
?,
?,
now()
);
`;
let sizeBytes = fs.statSync(filePath).size;
await req.pool.query(query, [fields.recordUid, fields.fieldUid, originalName, extension, hash, filePath, sizeBytes, req.userId]);
// record updated. send notification?
await req.pool.query(`update record set updated = now(), updatedByUserId = ? where uid = ?`, [req.userId, fields.recordUid]);
});
});
bb.on("finish", () => {
res.status(200).send({ success: true });
});
req.pipe(bb); // Hooks the streams together. Without it, you're not feeding busboy any data to parse.
} catch (err) {
console.log("file upload catch", err);
next(err);
}
});
router.delete("/api/files/:hash", async (req, res, next) => {
try {
let { hash } = req.params;
// get the file
let query = `
select * from recordDataFile where hash = ?
`;
let rows = await req.pool.query(query, [hash]);
let file = rows[0];
let filePath = file.path;
// remove file
fs.unlinkSync(filePath);
// delete the file metadata
await req.pool.query(` delete from recordDataFile where hash = ?`, [hash]);
res.status(200).send(rows);
} catch (err) {
next(err);
}
});
module.exports = router;

Related

Why the nodejs heap out of memory for creating Excel file with big data?

I am creating an excel file at nodejs end and returning base64 data to reactJS to download the file. At nodejs end, I am using promise all and fetch data from a server in chunks and append data into Excel as
worksheet.addRows(data);
For data around 20-30k, it is working fine but for data like 100k, it shows me an error heap out of memory at nodejs end.
I have increase memory allocate to nodejs also but same error
node --max_old_space_size=5000 app.js
What I am doing wrong any suggestions?
Nodejs
const axios = require('axios');
var excel = require("exceljs");
const workbook = new excel.Workbook();
const worksheet = workbook.addWorksheet("My Sheet");
worksheet.columns = [
{ header: "TicketId", key: "ticketId" },
{ header: "Email", key: 'user_email' },
{ header: "User", key : 'user_name' },
{ header: "Subject", key: "subject" },
...//many more headers
];
exports.getTicketData = async (req, res, next) => {
res.connection.setTimeout(0);
const { body } = req;
const token = body.token;
const organization_id = body.organization_id;
const server = body.server;
const sideFilter = body.sideFilter;
let baseurl = 'url for server end to fetch data';
if (baseurl) {
let data = new Array();
let limit = 3000;
const promises = [];
try {
let count = await getCount(token,limit, organization_id, baseurl, sideFilter);
for(var i = 1;i<=count;i++) {
promises.push(getData(i,limit,organization_id,token, baseurl, sideFilter));
}
await Promise.all(promises).then((results) => {
}).catch((e) => {
throw e;
});
var base64File = await writeExcelAndUpload(workbook);
return res.status(200).json({ file:base64File });
} catch (err) {
return res.status(400).json({ type:'error', msg:'File not generated please contact support staff' });
}
} else {
return res.status(400).json({ type:'error', msg:'please define server name' });
}
};
let getData = (page,limit, organization_id,token, baseurl, sideFilter) =>{
return new Promise((resolve, reject) => {
axios.post(baseurl+`/v2/get-export`, {
page:page,
organization_id:organization_id,
per_page:limit,
filter: "",
sorted:"",
...sideFilter
},{ headers: {"Authorization" : `Bearer ${token}`} }).then(function (response) {
let dataTemp = response.data.data.data.map((t,i)=>{
return {
...t,
name:t.name,
...//many more columns like 70
}
});
worksheet.addRows(dataTemp);
resolve(true);
}).catch(function (error) {
reject(error);
});
});
}
let getCount = (token,limit, organization_id, baseurl, sideFilter) => {
// run an api and return count against limit
}
let writeExcelAndUpload = async (workbook) => {
const fileBuffer = await workbook.xlsx.writeBuffer();
let base64File = Buffer.from(fileBuffer).toString('base64');
base64File = 'data:application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;base64,'+base64File;
return base64File;
}
Client side reactjs
exportLink = () => {
postData ={
...
};
return axios.post(`${baseurl}/api/ticketing/get-ticket`, postData).then(function (response) {
const downloadLink = document.createElement("a");
const fileName = "export.xlsx";
downloadLink.href = response.data.file;
downloadLink.download = fileName;
downloadLink.click();
}).catch(function(error){
throw error;
});
}
Well, it is kinda expected that you may get a heap out of memory when working with such an amount of entries like 100k.
I could suggest you start using pagination, and instead of fetching e.g. 100k of entries at once fetch 1k of entries do what you need with them, then fetch the next 1k of entries repeat until you processed all entries.

How to store the readStream files of Node.js into Redis and also how to retrieve the stored readStream files from Redis?

I tried converting the readStream (Image) to string and then storing it in Redis. Then retrieving the string from Redis and converting it back to readStream. But it didn't work out.
function getFile(fileKey) {
console.log(fileKey);
const downloadParams = {
Key: fileKey,
Bucket: bucketName,
};
return s3.getObject(downloadParams).createReadStream();
}
exports.getFile = getFile;
For converting stream to string I'm using stream-to-string. It gets converted and stored in Redis.
const { getFile } = require("../s3");
const redis = require("redis");
const client = redis.createClient();
var toString = require("stream-to-string");
exports.getFileFromS3Controller = async (req, res) => {
console.log(req.params);
const path = req.params.path;
const key = req.params.key;
const readStream = getFile(path + "/" + key);
toString(readStream).then(function (msg) {
// Set data to Redis
client.setex(key, 3600, msg);
});
readStream.pipe(res);
};
On Retrieving from the Redis I am not getting it.
const redis = require("redis");
const client = redis.createClient(null, null, { detect_buffers: true });
const Readable = require("stream").Readable;
// Cache middleware
function cache(req, res, next) {
const { path, key } = req.params;
client.get(key, (err, data) => {
if (err) throw err;
if (data !== null) {
var s = new Readable();
s.push(data);
s.push(null);
s.pipe(res);
} else {
next();
}
});
}
router.get("/:path/:key", cache, getFileFromS3Controller);
You are not calling next. Another mistake is that the stream is not being saved anywhere in the req so you can access later from the controller. From what I see you are writing it directly in res which is a problem because after this you cannot use res anymore to send anything else.
Here it is the code (not tested)
exports.getFileFromS3Controller = (req, res) => {
if (req.fileStream) {
req.fileStream.pipe(res);
return
}
console.log(req.params);
const path = req.params.path;
const key = req.params.key;
const readStream = getFile(path + "/" + key);
toString(readStream).then(function (msg) {
// Set data to Redis
client.setex(key, 3600, msg);
// Conver string to readable
const readable = new Readable();
readable.push(msg);
readable.push(null);
readable.pipe(res);
});
};
function cache(req, res, next) {
const { path, key } = req.params;
client.get(key, (err, data) => {
if (err) throw err;
if (data !== null) {
var s = new Readable();
s.push(data);
s.push(null);
req.fileStream = s;
}
next();
});
}
Edit I fixed a mistake in my answer because a Readable stream cannot be rewinded.

Reading all JSONs in a folder and getting their strings

There is a folder with a lot of JSON files and all of them got an object called "name"
I want to get their strings and turn them into a string like this
name0=UsernameExample;name1=Flowers;name2=Test; ...
the number after name is the index/count of the json, like if its name48, its the 48th json
This far I only tried to read the JSONs from the folder but I failed of course
let s = "";
fs.readdir('/tmp/userdb/', (files) => {
files.each(file => {
name = file[file.keys()[0]];
})})
I can already convert this
var other_users = (serialize({
"sid0": 0,
"name0": "user1",
"pays0": "8521",
"avatar0": "357",
"onlinescore0": "50"
}));
to this:
sid0=0;name0=user1;pays0=8521;avatar0=357;onlinescore0=50
with this const
const serialize = obj =>
Object.entries(obj).map(([k, v]) => `${k}=${v}`).join(';')
And I want to send the result to the user with this way
if (req.query.d === 'getRandomPlayers') {
var sessionid = req.body.player_sid
let user = require("./tmp/userdb/" + sessionid + ".json")
var current_user = (serialize({
player_name: user.name
}));
res.send("method_id=1665;" + current_user);
}
It should be like res.send("method_id=1665;" + current_user + thefinalresult);
thefinalresult is what this all should go. current_user and other stuff is not related to this question.
Assuming an example JSON files inside /tmp/userdb/ has the following structure,
{
"53874745": {
"avatar": "372",
"name": "BILLY",
"onlinescore": "1",
"pays": "8758"
}
}
you could do something like the following:
const { promisify } = require("util");
const fs = require("fs");
const path = require("path");
const readdir = promisify(fs.readdir);
const readFile = promisify(fs.readFile);
async function process(excludedSessionId) {
try {
const entries = [];
// get a list of all `JSON` files
const jsonFiles = await readdir(
path.join(__dirname, "./tmp/userdb/")
).then(
(files) => files.filter(
(file) => path.extname(file) === ".json" && !file.includes(excludedSessionId)
)
);
// iterate through a list of all `JSON` files & read their content
for (const [index, file] of jsonFiles.entries()) {
const content = await readFile(
path.join(__dirname, "./tmp/userdb/", file)
).then(JSON.parse);
// create an object for a new entry
const key = `sid${index}`;
const keyValue = Object.keys(content)[0];
// use the `spread syntax` to include the rest of the
// properties in a new entry
const entry = {
[key]: keyValue,
...content[keyValue],
};
entries.push(entry);
}
console.log(entries[0]);
// {
// sid0: '53874745',
// avatar: '372',
// name: 'BILLY',
// onlinescore: '1',
// pays: '8758'
// }
const result = entries.map((entry) => serialize(entry)).join(";");
console.log(result);
// sid0=53874745;avatar=372;name=BILLY;onlinescore=1;pays=8758;
// sid1=154261758;avatar=480;name=JESSEY;onlinescore=30;pays=8521;
return result;
} catch (error) {
console.error(error);
throw error;
}
}
process("154261742");
Then, if you'd want to use this function in a callback of your route controller, you could do something like the following:
app.get("/user", (req, res) => {
// ...
const excludedSessionId = req.body.player_sid;
process(excludedSessionId)
.then(result => {
res.send(result);
})
.catch(error => {
res.status(500).send("Something went wrong.");
});
});
References:
Spread syntax (...) - MDN
async function - MDN

How to concat chunks of incoming binary into video (webm) file node js?

I am trying to upload chunks of base64 to node js server and save those chunks into one file
let chunks = [];
app.post('/api', (req, res) => {
let {blob} = req.body;
//converting chunks of base64 to buffer
chunks.push(Buffer.from(blob, 'base64'));
res.json({gotit:true})
});
app.post('/finish', (req, res) => {
let buf = Buffer.concat(chunks);
fs.writeFile('finalvideo.webm', buf, (err) => {
console.log('Ahh....', err)
});
console.log('SAVED')
res.json({save:true})
});
Problem with the above code is video is not playable I don't why Am I really doing something wrong and I've also tried writable streams it is not working either
UPDATE - I
Instead of sending blobs I've implemented to send binary but even though I am facing a problem like TypeError: First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.
client.js
postBlob = async blob => {
let arrayBuffer = await new Response(blob).arrayBuffer();
let binary = new Uint8Array(arrayBuffer)
console.log(binary) // logging typed Uint8Array
axios.post('/api',{binary})
.then(res => {
console.log(res)
})
};
server.js
let chunks = [];
app.post('/api', (req, res) => {
let {binary} = req.body;
let chunkBuff = Buffer.from(binary) // This code throwing Error
chunks.push(chunkBuff);
console.log(chunkBuff)
res.json({gotit:true})
});
//Somehow combine those chunks into one file
app.post('/finish', (req, res) => {
console.log('Combinig the files',chunks.length);
let buf = Buffer.concat(chunks);
console.log(buf) //empty buff
fs.writeFile('save.webm', buf, (err) => {
console.log('Ahh....', err)
});
res.json({save:true})
});
UPDATE - II
I am able to receive the binary chunk and append to a stream but in the final video only first chunk is playing I don't know what happened to other chunks and the video ends.
code
const writeMyStream = fs.createWriteStream(__dirname+'/APPENDED.webm', {flags:'a', encoding:null});
app.post('/api', (req, res) => {
let {binary} = req.body;
let chunkBuff = Buffer.from(new Uint8Array(binary));
writeMyStream.write(chunkBuff);
res.json({gotit:true})
});
UPDATE - III
my client code | Note: I've tried other ways to upload blobs I've commented out
customRecordStream = stream => {
let recorder = new MediaStreamRecorder(stream);
recorder.mimeType = 'video/webm;codecs=vp9';
recorder.ondataavailable = this.postBlob
recorder.start(INT_REC)
};
postBlob = async blob => {
let arrayBuffer = await new Response(blob).arrayBuffer();
let binary = new Uint8Array(arrayBuffer)
axios.post('/api',{binary})
.then(res => {
console.log(res)
})
// let binaryUi8 = new Uint8Array(arrayBuffer);
// let binArr = Array.from(binaryUi8);
// // console.log(new Uint8Array(arrayBuffer))
//
// console.log(blob);
// console.log(binArr)
// let formData = new FormData();
// formData.append('fname', 'test.webm')
// formData.append("file", blob);
//
// console.log(formData,'Checjk Me',blob)
// axios({
// method:'post',
// url:'/api',
// data:formData,
// config: { headers: {'Content-Type': 'multipart/form-data' }}
// }).then(res => {
// console.log(res,'FROM SERBER')
//
// })
//
//
// .then(res => {
// console.log(res)
// })
// this.blobToDataURL(blob, (blobURL) => {
//
// axios.post('/api',{blob:blobURL})
// .then(res => {
// console.log(res)
// })
// })
};
I was able to get this working by converting to base64 encoding on the front-end with the FileReader api. On the backend, create a new Buffer from the data chunk sent and write it to a file stream. Some key things with my code sample:
I'm using fetch because I didn't want to pull in axios.
When using fetch, you have to make sure you use bodyParser on the backend
I'm not sure how much data you're collecting in your chunks (i.e. the duration value passed to the start method on the MediaRecorder object), but you'll want to make sure your backend can handle the size of the data chunk coming in. I set mine really high to 50MB, but this may not be necessary.
I never close the write stream explicitly... you could potentially do this in your /final route. Otherwise, createWriteStream defaults to AutoClose, so the node process will do it automatically.
Full working example below:
Front End:
const mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
let mediaRecorder;
let sourceBuffer;
function customRecordStream(stream) {
// should actually check to see if the given mimeType is supported on the browser here.
let options = { mimeType: 'video/webm;codecs=vp9' };
recorder = new MediaRecorder(window.stream, options);
recorder.ondataavailable = postBlob
recorder.start(INT_REC)
};
function postBlob(event){
if (event.data && event.data.size > 0) {
sendBlobAsBase64(event.data);
}
}
function handleSourceOpen(event) {
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
}
function sendBlobAsBase64(blob) {
const reader = new FileReader();
reader.addEventListener('load', () => {
const dataUrl = reader.result;
const base64EncodedData = dataUrl.split(',')[1];
console.log(base64EncodedData)
sendDataToBackend(base64EncodedData);
});
reader.readAsDataURL(blob);
};
function sendDataToBackend(base64EncodedData) {
const body = JSON.stringify({
data: base64EncodedData
});
fetch('/api', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body
}).then(res => {
return res.json()
}).then(json => console.log(json));
};
Back End:
const fs = require('fs');
const path = require('path');
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
const server = require('http').createServer(app);
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json({ limit: "50MB", type:'application/json'}));
app.post('/api', (req, res) => {
try {
const { data } = req.body;
const dataBuffer = new Buffer(data, 'base64');
const fileStream = fs.createWriteStream('finalvideo.webm', {flags: 'a'});
fileStream.write(dataBuffer);
console.log(dataBuffer);
return res.json({gotit: true});
} catch (error) {
console.log(error);
return res.json({gotit: false});
}
});
Inspired by #willascend answer:
Backend-side:
app.use(express.raw());
app.post('/video-chunck', (req, res) => {
fs.createWriteStream('myvideo.webm', { flags: 'a' }).write(req.body);
res.sendStatus(200);
});
Frontend-side:
mediaRecorder.ondataavailable = event => {
if (event.data && event.data.size > 0) {
fetch(this.serverUrl + '/video-chunck', {
method: 'POST',
headers: {'Content-Type': 'application/octet-stream'},
body: event.data
});
}
};
My express version is 4.17.1
i faced the same problem today
as a solution in back-end i used fs.appendfile
fs.appendFile(Path, rawData, function (err) {
if (err) throw err;
console.log('Chunck Saved!');
})

ASYNC/AWAIT for Chaining multiple http.requests

How would I go about chaining an http.request within the response of another http.request and then push them into an array before going to the front end?
router.get("/:team", (req, res) => {
let teamParams = teams[req.params.team];
twitter.get("search/tweets", teamParams, (err, data, resp) => {
let tweetArr = [];
let text = data.statuses;
text.map((dat) => {
let im = dat.entities.urls[0].url
dat.links = im;
tweetArr.push(dat);
});
res.json({ message: "Success", tweets: tweetArr });
});
});
Currently I get my data object loop through it and add a url as a property. Now I want to chain another http request to make an API call to another API and get a response, before I use res.json.
I've tried a workaround with promises but I can never return the full object with the response from the second api call.
This is what I have so far, I have managed to get to a point where my object contains the requests from the second link. How can I return all the tweets into an array I can finally resolve?
require("dotenv").config();
const Twitter = require("twitter");
const API_IMAGE_PREV = "http://api.linkpreview.net/";
const request = require("request");
const key = process.env.IM_PREV_KEY;
let twitter = new Twitter({
consumer_key: process.env.TWITTER_CONSUMER_KEY,
consumer_secret: process.env.TWITTER_CONSUMER_SECRET,
bearer_token: process.env.TWITTER_BEARER_TOKEN
});
let teamParams = {
q: "from:ManUtdMEN MUFC",
count: 2,
result_type: "recent"
};
var third = function thirdUrl(next) {
var promise = new Promise(function(resolve, reject) {
next.forEach(x => {
let ln = x.links;
const options = {
url: "http://api.linkpreview.net/?key=" + key + "&q=" + ln,
method: "get"
};
request.get(options, (err, req, res) => {
if (err) {
console.log(err);
} else {
x.desc = res;
}
});
});
});
};
var second = function secondUrl(previous) {
var promise = new Promise(function(resolve, reject) {
let p = previous.statuses;
let links = [];
p.forEach(t => {
let l = t.entities.urls[0].url;
t.links = l;
});
resolve(p);
});
return promise;
};
twitter
.get("search/tweets", teamParams)
.then(second)
.then(third)
.catch(function(error) {
throw error;
});
What module are you using for the http requests? Here's an example with axios.
const axios = require('axios');
router.get("/:team", (req, res) => {
let teamParams = teams[req.params.team];
twitter.get("search/tweets", teamParams, async (err, data, resp) => {
let tweetArr = [];
let text = data.statuses;
text.map((dat) => {
let im = dat.entities.urls[0].url
dat.links = im;
tweetArr.push(dat);
});
let res = await Promise.all(tweetArr.map(dat => axios.get(dat.links));
res.json({ message: "Success", tweets: res });
});
})

Resources