Download file from server (Nodejs Express > React) - node.js

how can i send a file(docx) to a user ?
this is my server code :
app.get('/api/topic/file/:id', function (req, res, next) {
Topic.findByIdAndUpdate(req.params.id)
.exec()
.then((topic) => {
let filepath = topic.news_file[0]
console.log('filepath', filepath)
res.download(filepath, topic.name + '.docx', function (err) {
if (err) {
console.log('api get file err ', err);
} else {
// decrement a download credit, etc.
}
});
}).catch((err) => console.log('error', err));
})
this does not trigger a download on the browser.
i am using react as front-end.
on the client i have a button triggering this upon click :
handleDownload() {
if (this.state.lastClicked) {
fetch("/api/topic/file/" + this.state.lastClicked._id)
.then(results => {
console.log('results', results)
return results;
})
} else {
//somthings...
}
}

Found a solution using downloadjs..
var download = require("downloadjs")
async handleDownload() {
const res = await fetch("/api/topic/file/" + this.state.lastClicked._id);
const blob = res.blob();
download(blob, this.state.lastClicked.name + '.docx');
}

Related

Sending webm blob to nodejs and saving it on the server

I'm having troubles saving an incoming webm blob to the server. I'm using react-video-recorder on NextJS like this:
<VideoRecorder
onRecordingComplete={(videoBlob) => {
// Do something with the video...
fetch("/api/video",
method: "post",
body: videoBlob,
})
.then(function (response) {
console.log("done");
return response;
})
.catch(function (err) {
console.log('error', err);
});
console.log(url);
// output: blob:http://localhost:3000/99a5b711-f3d5-481d-9e04-8981d1be3727
console.log(videoBlob);
// output BlobĀ {size: 307028, type: "video/webm;codecs="vp8,opus""}
}}
/>
On the api side I'm trying to save the file like this. It does save something, but is only the first chunk or buffer. How can I capture and write the file to my server?
export default async (req, res) => {
fs.writeFile('test.webm', req.body, function (err) {
if (err) return console.log(err);
console.log('video saved');
} );
}
I did that task by doing this.
I saved the recordingChunks/Video blob to a state and then sent it to the Nodejs server from Reactjs Frontend
FrontEnd code:-
const blob = new Blob(context.data.recordedChunks, {
type: "video/webm",
});
const fd = new FormData()
fd.append('video', blob)
axios.post(`${process.env.REACT_APP_BASE_URL}/video/blob_video`, fd)
.then((res) => console.log(res.data))
.catch((err) => console.log(err))
Backend code:-
router.post('/blob_video', async (req, res) => {
try {
if (req.files.video !== undefined) {
const video = req.files.video // this is your file do what ever you want with it
const videoname = Date.now() + req.files.video.name + ".webm"
video.mv(`${__dirname}/../your_path/${videoname}`, err => {
if (err) {
console.log(err)
return res.json({ err })
}
})
}
res.json(req.body)
} catch (err) {
res.json({ success: false, err: err, msg: "Server error" })
console.log(err)
}
})
Using express-fileupload to upload a file you can do it with your favourite one.

express file-upload getting error after deploying on aws.for local it works successfully

this is my nodejs backend file upload and save to database code.this works fine on local after deploy on aws it does't work.check this error image
for more info i have changed environment.prod. no problem for other req though.
exports.create = (req, res) => {
try {
if (!req.files) {
res.status(400).send({ message: "Please select image" });
}else
{
const file = req.files.image;
const filename = req.files.image.name;
file.mv(__dirname + '/uploads/' + filename, function (err,success) {
if (err) {
res.status(600).send({ err });
} else {
const url = req.protocol + '://' + req.get("host");
const tutorial = new Tutorial({
data: req.body.data,
image: url + "/uploads/" + filename,
creator : req.userData.userId,
uploaderName : req.body.name,
createdDateTime : new Date()
});
//Save post in the database
tutorial.save(tutorial).then(data => {
res.status(200).send({
message:"success"
});
}).catch(err => {
res.status(500).send({
message:err
});
});
}
})
}
} catch (e) {
res.status(700).send({ error: e });
}
};

How to download the pdf file after generated in nodejs

I'm building a web application in Node.js 10.x and angular 6.x. I wish to generate a PDF and download it via browser.
angular 6.x
generatePDF(params): any {
return this.http.post(this.url('generatePDF'), params, this.getRequestOptions())
.pipe(map((res: any) => {
return res;
})
);
}
Node.js
async generatePDF(options = { format:'A4' }) {
return new Promise((resolve, reject) => { ejs.renderFile(this.templateName, this.data, (err, res) => {
if (err) {
return reject(err);
}
pdf.create(res, options)
.toFile(this.fileName, (err, res) => {
if (err) {
return reject(err);
}
resolve(res);
});
});
});
}
I have solved this problem as follows.
Node.js
let pdffilename = username + '_' + gameName.replace(/ /g, "_") + '.pdf';
pdfReport = new Report(gameName, ejsfilename, pdffilename, refined_score);
await pdfReport.generatePDF();
res.sendFile(pdfReport.fileName);
After Creation of PDF path , you can use the following code to download your file
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Headers', 'X-Requested-With');
res.header('content-type', 'application/pdf');
res.download(PDF_PATH, PDF_NAME + '.pdf');

Streaming data from oracle db to browser with node.js

I am learning node.js and database. I am trying to stream heavy data about 7,700,000 rows and 96 columns from oracle to client. Later i use that data for virtual table. But in client it is showing only one row and then in node command error is displaying "Cannot set headers after they are sent to the client". How to stream data in client. Please help
var oracledb = require('oracledb');
const cors = require('cors');
var express = require('express');
var app = express();
app.use(cors());
oracledb.outFormat = oracledb.ARRAY;
oracledb.getConnection({
user: 'user',
password: 'password',
connectString: 'some string'
},
(err, connection) => {
if (err) {
console.error(err.message);
return;
}
var rowsProcessed = 0;
var startTime = Date.now();
var dataSize = 0;
var stream = connection.queryStream(
'SELECT * FROM table',
);
// stream.on('data', function (data) {
// rowsProcessed++;
// // console.log(JSON.stringify(data));
// // console.log(data);
// dataSize = dataSize + data.length;
// // oracleData.push(data);
// // console.log("pushing");
// // console.log(oracleData);
// // app.get('/data', (req, res) => {
// // res.send(data);
// // })
// // console.log(data);
// });
app.get('/data', (req, res) => {
stream.on('data', (data) => {
rowsProcessed++;
dataSize = dataSize + data.length;
res.send(JSON.stringify(data));
})
})
stream.on('end', function () {
var t = ((Date.now() - startTime) / 1000);
console.log('queryStream(): rows: ' + rowsProcessed +
', seconds: ' + t);
// console.log(dataSize + ' bytes');
connection.close(
function (err) {
if (err) {
console.error(err.message);
} else {
console.log("connection closed")
}
}
)
})
}
);
app.listen(5000, () => {
console.log('Listening at 5000')
})
I tried using above approach. But it is failing. How can I achieve the output?
The browser is freezing if I output entire data at single time that's why I am trying to use streaming and in the node command prompt it is displaying out of memory if I load entire data at single time.
Thank you.
The first thing you'll want to do is organize your app a little better. Separation of concerns is important, you should have a connection pool, etc. Have a look at this series for some ideas: https://jsao.io/2018/03/creating-a-rest-api-with-node-js-and-oracle-database/
Once you get the organization figured out, incorporate this example of streaming a large result set out.
const oracledb = require('oracledb');
async function get(req, res, next) {
try {
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
res.write('[');
stream.on('data', (row) => {
res.write(JSON.stringify(row));
res.write(',');
});
stream.on('end', () => {
res.end(']');
});
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;
If you find you're doing this a lot, simplify things by creating a reusable transform stream:
const oracledb = require('oracledb');
const { Transform } = require('stream');
class ToJSONArray extends Transform {
constructor() {
super({objectMode: true});
this.push('[');
}
_transform (row, encoding, callback) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
this.push(',');
}
this._prevRow = row;
callback(null);
}
_flush (done) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
}
this.push(']');
delete this._prevRow;
done();
}
}
async function get(req, res, next) {
try {
const toJSONArray = new ToJSONArray();
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
stream.pipe(toJSONArray).pipe(res);
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;

How to configure browser to wait for a response before sending another request in MEAN application

I have a MEAN application that runs test cases on a bunch of similarly coded application.
I send a request that triggers running the test cases on the application through Angular6 to ExpressJS.
Since there is a delay in receiving the response, the browser is re-sending the request, which re triggers the process.
I used windows.location.reload() and it did stop the browser from re-sending request. But the browser will not receive the response that the server sends once its done with the process.
Below is my Express code:
router.post("/trainee", (req, res, next) => {
var empList = req.body.empList;
var scriptCount = 0
asyncLoop(empList, (empId, next) => {
req.body.empId = empId;
console.log("started Script", "empId : " + empId);
return testunzip.decompress(req, res).then((msg) => {
console.log("decompressed: ", msg);
return testunzip.makeTempsolution(req, res).then((msg) => {
console.log("temp soln : ", msg);
return storage.testing(req, res).then((msg) => {
console.log("testing : ", msg);
return storage.fetchMarks(req, res).then((data) => {
data = jsonReport.parser(data, req, res)
scriptCount++
console.log("data in call: ", data)
next();
})
})
})
}).catch((err) => {
console.log(err.message)
next();
})
}, (err) => {
if (err) {
console.error('Error: ' + err.message);
}
console.log('Evaluation completed for !' + scriptCount + ' scripts');
res.send("Done!")
})
})
This is my angular code:
evaluateScript(data) {
this.traineesResult = null;
this.errorMessage = null;
data.empList = this.empList;
data.folderName = this.folderName;
this.service.evaluateScript(data).subscribe(
(result) => { this.traineesResult = result },
(err) => { this.errorMessage = err.error.message }
)
window.location.reload()
}
How can I achieve my requirement? Any pointers will be helpful.

Resources