I'm trying to build a certificate generation module using nodeJS.
The api to create certificate is as follows
// Create certificate
const express = require('express');
const router = express.Router();
const { createCanvas } = require('canvas')
const fs = require("fs");
const { decodeBase64Image } = require('../helper/certificatehelper');
router.post('/imageadd', async (req, res, next) => {
try {
const { name } = req.body;
const width = 1920
const height = 1080
const canvas = createCanvas(width, height)
const context = canvas.getContext('2d')
context.fillStyle = "white";
context.fillRect(0, 0, width, height)
context.fillStyle = '#000'
context.font = "72px Arial";
context.textAlign = "center";
context.fillText(name, 900, 500);
const dataurl = canvas.toDataURL();
const decodedImg = decodeBase64Image(dataurl);
const imageBuffer = decodedImg.data;
fs.writeFileSync(`./src/images/image1.png`, imageBuffer);
fs.readFile('./src/images/image1.png', function(err, data) {
if (err) throw err; // Fail if the file can't be read.
res.writeHead(200, {'Content-Type': 'image/jpeg'});
res.end(data); // Send the file data to the browser.
});
res.json({ data: respArray, success: true, msg: 'Certificate generated' });
} catch (error) {
res.json({ success: false, msg: error.message });
}
})
module.exports = router;
The problem I'm facing is, If multiple request are send in parallel how do I generate one certificate at a time (synchronized).
Other requests need to wait till the certificate is generated for previous request.
The certificates should be generated in a separate process from the main app process.
How do I solve the above given problem.
Related
I am trying to use sharp in my MERN application, I sent a request from my frontend and it is undefined in my sharp middleware but if I get rid of the sharp middleware the req is defined later on. If I log the request in createCountry, the body is defined, if I log it in convertToWebP, it is not.
the route is the one that says "/new" below:
const express = require("express");
const router = express.Router();
const { storage } = require("../imageupload/cloudinary.js");
const multer = require("multer");
const {
getCountry,
createCountry,
getCountries,
updateCountry,
deleteCountry,
getAllCountries,
} = require("../controllers/country.js");
const {convertToWebP} = require('../middlewares/toWebP')
const { isLoggedIn, authorizeCountry, validateCountry } = require("../middlewares/auth");
const catchAsync = require("../utils/catchAsync");
const ExpressError = require("../utils/ExpressError");
const upload = multer({ storage: storage });
router.get("/", getCountries);
router.get('/getAll', getAllCountries);
router.post("/new", isLoggedIn, converToWebP, upload.array("images"), createCountry);
router.get("/:countryId", getCountry);
router.patch("/:countryId", validateCountry, authorizeCountry, upload.array("images", 8), updateCountry);
router.delete("/:countryId", authorizeCountry, deleteCountry);
module.exports = router;
the code for create country is here:
exports.createCountry = async (req, res) => {
const { name, description, tags, location, cjLink } = req.body;
const creator = req.user._id;
const images = req.files.map((file) => {
return { image: file.path, publicId: file.filename };
});
try {
const geoData = await geocoder
.forwardGeocode({
query: req.body.location,
limit: 1,
})
.send();
const geometry = geoData.body.features[0].geometry;
const country = new Country({
name,
description,
tags,
creator,
location, //: //geometry
geometry,
url: '',
cjLink: cjLink,
});
const overall = new Overall({
name,
description,
tags,
creator,
location, //: //geometry
geometry,
url: '',
cjLink: cjLink,
});
country.images.push(...images);
country.headerImage.push(...images);
const data = await country.save();
overall.url = `/country/${data._id}`
data.url = `/country/${data._id}`
overall.save();
data.save();
return res.status(201).json(data);
} catch (error) {
return console.log("error during create country", error);
}
};
And lastly the code for the convertToWebP is here:
const sharp = require("sharp");
const { cloudinary } = require("../imageupload/cloudinary");
exports.convertToWebP = async (req, res, next) => {
try {
req.files = await Promise.all(req.files.map(async (file) => {
const buffer = await sharp(file.buffer)
.toFormat('webp')
.toBuffer();
return { ...file, buffer, originalname: `${file.originalname}.webp` };
}));
next();
} catch (error) {
res.status(500).json({ message: error.message });
}
};
Any help is appreciated! I tried console.log as described above, I tried to change the order of the middleware and that does not work either, and I tried logging the req.body directly from the route and it came up as an empty object
You cannot acces req.files before you use multer middleware
You have to reorder
router.post("/new", isLoggedIn, upload.array("images"), converToWebP, createCountry);
const PDF = require("pdfkit");
const fs = require("fs");
const express = require("express");
const app = express();
app.get("/", (req, res) => {
try {
const doc = new PDF({ size: "A4" });
doc.image("/home/rahul/Desktop/projects/test/test/vlogo.png", 10, 0, {
width: 100,
height: 100,
align: "left",
});
doc.pipe(
fs.createWriteStream("/home/rahul/Desktop/projects/test/test/doc.pdf")
);
doc.end();
res.sendFile("/home/rahul/Desktop/projects/test/test/doc.pdf");
} catch (error) {
console.log(error);
res.status(500).sendFile("Rahul");
}
});
The above code is not sending any response to the browser.
When I am loading the API in the browser, then I am getting this.
But, when I remove the doc.pipe code, then it's working fine. It's sending PDF.
You can try by commenting
// doc.pipe(
// fs.createWriteStream("/home/rahul/Desktop/projects/test/test/doc.pdf")
// );
When calling doc.end() the pdf will take some time to be written on the disk. So you are basically sending a "half written" pdf file which results in an invalid file.
So wait for the created WriteStream to finish first:
const PDF = require("pdfkit");
const fs = require("fs");
const express = require("express");
const app = express();
app.get("/", (req, res) => {
try {
const doc = new PDF({ size: "A4" });
doc.image("/home/rahul/Desktop/projects/test/test/vlogo.png", 10, 0, {
width: 100,
height: 100,
align: "left",
});
const stream = fs.createWriteStream("/home/rahul/Desktop/projects/test/test/doc.pdf")
stream.on('finish', () => {
// now, the file is fully written to disk. Let's send it back!.
res.sendFile("/home/rahul/Desktop/projects/test/test/doc.pdf");
})
stream.on('error', err => {
console.error(err)
res.status(500).send("Failed to send pdf.")
})
doc.pipe(stream);
// start writing to file!.
doc.end();
} catch (error) {
console.log(error);
res.status(500).send("Server error occured.");
}
});
I am working with express node.js, and I am trying to save datas from post request in a json file. but for some reason when I restart the server, my data from post request was supposed to save in roomDB.json file doesnt remain instead it resets to initial empty array...
Could anyone please advice? thank you very much.
here is my code
//saving function
const fs = require("fs");
exports.save =(data, PATH) =>{
return new Promise((resolve, reject) => {
fs.writeFile(PATH, JSON.stringify(data), function(err) {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
// code in router file to make requests
const express = require("express");
const router = express.Router();
const fs = require("fs");
const rooms = ("./roomDB.json");
const { addRoom} = require("./rooms");
router.get("/", (req, res)=>{
fs.readFile("roomDB.json", (err, data)=>{
if(err) return res.status(400);
res.send(roomDB_PATH)
})
});
router.get("/:id", (req, res)=>{
res.send("connect to a room");
});
router.post("/", (req, res)=>{
let roomName = req.body;
if(!roomName.name){
res.status(404);
res.end();
return;
}
let room =addRoom(roomName.name);
res.status(201).send(room)
})
module.exports = router;
*/
const uuid = require("uuid");
let roomdatas;
const {save} = require("./save");
const roomDB_PATH = "roomDB.json";
try {
roomdatas = JSON.parse(fs.readFileSync(roomDB_PATH));
} catch (e) {
roomdatas = []
save(roomdatas, roomDB_PATH);
}
const addRoom = (roomName) => {
roomName = roomName.trim().toLowerCase();
const existingRoom = roomdatas.find((room) => room.name === roomName);
if (existingRoom) {
return { error: 'chatroom has existed' };
}
let room = {
name: roomName,
id: uuid.v4(),
messages: [],
users: [],
created: +new Date()
};
roomdatas.push(room);
save(roomdatas, roomDB_PATH);
return { room };
};
module.exports ={addRoom};
I'm assuming that you are encountering an error with the JSON.parse(fs.readFileSync(roomDB_PATH)); call. This code runs every time your server is started (when you import the file into your router file), and if it encounters an error it is resetting the file to an empty array. Try logging the error to see what is causing it. You're currently completely suppressing the error with no way to tell why it is failing.
I want to stream a file upload request in multipart/form-data to another server and change some fields name at the same time.
I don't want to store temporarily a file on disk and don't want to store the file completely in memory either.
I tried to use multer, busboy and multiparty. I think I got closer by using custom Transform streams but it is not working yet.
const express = require('express');
const request = require('request');
const { Transform } = require('stream');
const router = express.Router();
class TransformStream extends Transform {
_transform(chunk, encoding, callback) {
// here I tried to manipulate the chunk
this.push(chunk);
callback();
}
_flush(callback) {
callback();
}
}
router.post('/', function pipeFile(req, res) {
const transformStream = new TransformStream();
req.pipe(transformStream).pipe(request.post('http://somewhere.com'));
res.sendStatus(204);
});
I tried to manipulate chunks in _transform without success (EPIPE). It sounds quit hacky, are they any better solutions ?
Here is a solution using replacestream along with content-disposition.
const replaceStream = require('replacestream');
const contentDisposition = require('content-disposition');
router.post('/', function pipeFile(req, res) {
let changeFields = replaceStream(/Content-Disposition:\s+(.+)/g, (match, p1) => {
// Parse header
let {type, parameters} = contentDisposition.parse(p1);
// Change the desired field
parameters.name = "foo";
// Prepare replacement
let ret = `Content-Disposition: ${type}`;
for(let key in parameters) {
ret += `; ${key}="${parameters[key]}"`;
}
return ret;
})
req.pipe(changeFields)
.pipe(request.post('http://somewhere.com'))
.on('end', () => {
res.sendStatus(204);
});
});
This worked for a single file multipart upload using express, multiparty, form-data, pump and got.
const stream = require('stream');
const express = require('express');
const multiparty = require("multiparty");
const got = require("got");
const FormData = require('form-data');
const pump = require('pump');
const app = express();
app.post('/upload', (req, res) => {
const url = "<<multipart image upload endpoint>>";
var form = new multiparty.Form();
form.on("part", function(formPart) {
var contentType = formPart.headers['content-type'];
var formData = new FormData();
formData.append("file", formPart, {
filename: formPart.filename,
contentType: contentType,
knownLength: formPart.byteCount
});
const resultStream = new stream.PassThrough();
try {
// Pipe the formdata to the image upload endpoint stream and the result to the result stream
pump(formData, got.stream.post(url, {headers: formData.getHeaders(), https:{rejectUnauthorized: false}}), resultStream, (err) =>{
if(err) {
res.send(error);
}
else {
// Pipe the result of the image upload endpoint to the response when there are no errors.
resultStream.pipe(res);
}
resultStream.destroy();
});
}
catch(err) {
resultStream.destroy();
console.log(err);
}
});
form.on("error", function(error){
console.log(error);
})
form.parse(req);
});
I am trying to upload chunks of base64 to node js server and save those chunks into one file
let chunks = [];
app.post('/api', (req, res) => {
let {blob} = req.body;
//converting chunks of base64 to buffer
chunks.push(Buffer.from(blob, 'base64'));
res.json({gotit:true})
});
app.post('/finish', (req, res) => {
let buf = Buffer.concat(chunks);
fs.writeFile('finalvideo.webm', buf, (err) => {
console.log('Ahh....', err)
});
console.log('SAVED')
res.json({save:true})
});
Problem with the above code is video is not playable I don't why Am I really doing something wrong and I've also tried writable streams it is not working either
UPDATE - I
Instead of sending blobs I've implemented to send binary but even though I am facing a problem like TypeError: First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.
client.js
postBlob = async blob => {
let arrayBuffer = await new Response(blob).arrayBuffer();
let binary = new Uint8Array(arrayBuffer)
console.log(binary) // logging typed Uint8Array
axios.post('/api',{binary})
.then(res => {
console.log(res)
})
};
server.js
let chunks = [];
app.post('/api', (req, res) => {
let {binary} = req.body;
let chunkBuff = Buffer.from(binary) // This code throwing Error
chunks.push(chunkBuff);
console.log(chunkBuff)
res.json({gotit:true})
});
//Somehow combine those chunks into one file
app.post('/finish', (req, res) => {
console.log('Combinig the files',chunks.length);
let buf = Buffer.concat(chunks);
console.log(buf) //empty buff
fs.writeFile('save.webm', buf, (err) => {
console.log('Ahh....', err)
});
res.json({save:true})
});
UPDATE - II
I am able to receive the binary chunk and append to a stream but in the final video only first chunk is playing I don't know what happened to other chunks and the video ends.
code
const writeMyStream = fs.createWriteStream(__dirname+'/APPENDED.webm', {flags:'a', encoding:null});
app.post('/api', (req, res) => {
let {binary} = req.body;
let chunkBuff = Buffer.from(new Uint8Array(binary));
writeMyStream.write(chunkBuff);
res.json({gotit:true})
});
UPDATE - III
my client code | Note: I've tried other ways to upload blobs I've commented out
customRecordStream = stream => {
let recorder = new MediaStreamRecorder(stream);
recorder.mimeType = 'video/webm;codecs=vp9';
recorder.ondataavailable = this.postBlob
recorder.start(INT_REC)
};
postBlob = async blob => {
let arrayBuffer = await new Response(blob).arrayBuffer();
let binary = new Uint8Array(arrayBuffer)
axios.post('/api',{binary})
.then(res => {
console.log(res)
})
// let binaryUi8 = new Uint8Array(arrayBuffer);
// let binArr = Array.from(binaryUi8);
// // console.log(new Uint8Array(arrayBuffer))
//
// console.log(blob);
// console.log(binArr)
// let formData = new FormData();
// formData.append('fname', 'test.webm')
// formData.append("file", blob);
//
// console.log(formData,'Checjk Me',blob)
// axios({
// method:'post',
// url:'/api',
// data:formData,
// config: { headers: {'Content-Type': 'multipart/form-data' }}
// }).then(res => {
// console.log(res,'FROM SERBER')
//
// })
//
//
// .then(res => {
// console.log(res)
// })
// this.blobToDataURL(blob, (blobURL) => {
//
// axios.post('/api',{blob:blobURL})
// .then(res => {
// console.log(res)
// })
// })
};
I was able to get this working by converting to base64 encoding on the front-end with the FileReader api. On the backend, create a new Buffer from the data chunk sent and write it to a file stream. Some key things with my code sample:
I'm using fetch because I didn't want to pull in axios.
When using fetch, you have to make sure you use bodyParser on the backend
I'm not sure how much data you're collecting in your chunks (i.e. the duration value passed to the start method on the MediaRecorder object), but you'll want to make sure your backend can handle the size of the data chunk coming in. I set mine really high to 50MB, but this may not be necessary.
I never close the write stream explicitly... you could potentially do this in your /final route. Otherwise, createWriteStream defaults to AutoClose, so the node process will do it automatically.
Full working example below:
Front End:
const mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
let mediaRecorder;
let sourceBuffer;
function customRecordStream(stream) {
// should actually check to see if the given mimeType is supported on the browser here.
let options = { mimeType: 'video/webm;codecs=vp9' };
recorder = new MediaRecorder(window.stream, options);
recorder.ondataavailable = postBlob
recorder.start(INT_REC)
};
function postBlob(event){
if (event.data && event.data.size > 0) {
sendBlobAsBase64(event.data);
}
}
function handleSourceOpen(event) {
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
}
function sendBlobAsBase64(blob) {
const reader = new FileReader();
reader.addEventListener('load', () => {
const dataUrl = reader.result;
const base64EncodedData = dataUrl.split(',')[1];
console.log(base64EncodedData)
sendDataToBackend(base64EncodedData);
});
reader.readAsDataURL(blob);
};
function sendDataToBackend(base64EncodedData) {
const body = JSON.stringify({
data: base64EncodedData
});
fetch('/api', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body
}).then(res => {
return res.json()
}).then(json => console.log(json));
};
Back End:
const fs = require('fs');
const path = require('path');
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
const server = require('http').createServer(app);
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json({ limit: "50MB", type:'application/json'}));
app.post('/api', (req, res) => {
try {
const { data } = req.body;
const dataBuffer = new Buffer(data, 'base64');
const fileStream = fs.createWriteStream('finalvideo.webm', {flags: 'a'});
fileStream.write(dataBuffer);
console.log(dataBuffer);
return res.json({gotit: true});
} catch (error) {
console.log(error);
return res.json({gotit: false});
}
});
Inspired by #willascend answer:
Backend-side:
app.use(express.raw());
app.post('/video-chunck', (req, res) => {
fs.createWriteStream('myvideo.webm', { flags: 'a' }).write(req.body);
res.sendStatus(200);
});
Frontend-side:
mediaRecorder.ondataavailable = event => {
if (event.data && event.data.size > 0) {
fetch(this.serverUrl + '/video-chunck', {
method: 'POST',
headers: {'Content-Type': 'application/octet-stream'},
body: event.data
});
}
};
My express version is 4.17.1
i faced the same problem today
as a solution in back-end i used fs.appendfile
fs.appendFile(Path, rawData, function (err) {
if (err) throw err;
console.log('Chunck Saved!');
})