Use Koa to stream an ExcelJS workbook immediately to the browser - node.js

What I would like is to stream my ExcelJS workbook little by little to the browser. The problem is that I'm very uncomfortable with streams.
The facts:
I'm on Node JS v.14
I'm using last version of ExcelJS library to generate a big Excel file
I'm using last version of Koa as a REST API tool
Here's a simplified part of my code. The Excel workbook is ok. If I create a full buffer in memory and send this buffer to Koa, I can download it. But when I'm trying to stream and pass a stream to koa, things go wrong. The way of passing stream to ExcelJS is the right way, from their docs.
EDIT
Here's a full working work base (except for streams).
package.json
{
"name": "stackoverflow-exceljs-koa",
"private": true,
"version": "0.0.1",
"author": "rekam",
"dependencies": {
"exceljs": "^4.3.0",
"koa": "^2.13.4"
}
}
index.js
const Koa = require('koa');
const app = new Koa();
const { exportExcelData } = require('./builder');
app.use(async ctx => {
if (ctx.request.url === '/file') {
await exportExcelData('useFile', './test.xlsx');
ctx.body = 'Done, file saved at ' + __dirname + '/test.xlsx';
return;
}
if (ctx.request.url === '/stream') {
// const stream = new Stream();
// ctx.body = stream;
// await exportExcelData('useStream', stream);
ctx.body = 'to be implemented';
return;
}
ctx.body = 'Not Found';
ctx.status = 404;
});
app.listen(8101);
console.log('visit http://localhost:8101/file or /stream');
builder.js
const Excel = require('exceljs');
module.exports = {
async exportExcelData(type, streamOrFilename) {
const limit = 100;
let offset = 0;
const workbook = new Excel.stream.xlsx.WorkbookWriter({
stream: type === 'useStream' ? streamOrFilename : undefined,
filename: type === 'useFile' ? streamOrFilename : undefined,
useSharedStrings: true,
useStyles: true
});
const sheet = workbook.addWorksheet('Export');
sheet.columns = [{
header: 'ID',
key: 'id'
}];
const build = async (offset) => {
const rows = Array.from({ length: limit }).map((_, i) => ({ id: i + offset }));
if (offset > 10000) {
rows.pop(); // will break while looop
}
rows.forEach(row => sheet.addRow(row).commit());
return rows;
};
let rows;
do {
rows = await build(offset);
offset += limit;
} while (rows.length === limit);
sheet.commit();
await workbook.commit();
return;
}
};
usage
$ node index.js

Related

File chunk upload to azure storage blob, file seems broken

I'm trying to upload excel file to azure storage blob in chunks, using the stage block and commitblock from BlobBlockClient Class. File upload seems to success but when i try to download and open the file, there it seems to be broken.
I'm using react and node js to do this. Code follows below
In UI
const chunkSize = (1024 * 1024) * 25; // file chunk size
// here slicing the file and sending it to api method
const fileReader = new FileReader();
const from = currentChunkIndexRef.current * chunkSize;
const to = from + chunkSize;
const blob = file.slice(from, to);
fileReader.onload = ((e: any) => uploadChunksToBlob(e, file, obj));
fileReader.readAsDataURL(blob);
// api method
const uploadChunksToBlob = async (event: any, file: File, obj: any) => {
try {
const totalChunks = Math.ceil(file.size / chunkSize);
const uploadChunkURL = `/upload?currentChunk=${currentChunkIndexRef.current}&totalChunks=${totalChunks}&file=${file.name}&type=${file.type}`;
console.log(event.target.result)
const fileUpload = await fetch(uploadChunkURL, {
method: "POST",
headers: { "Content-Type": "application/octet-stream" },
body: JSON.stringify(event.target.result),
});
const fileUploadJson = await fileUpload.json();
const isLastChunk = (totalChunks - 1) === currentChunkIndexRef.current;
if(!isLastChunk) {
console.log({ Chunk: currentChunkIndexRef.current });
currentChunkIndexRef.current = currentChunkIndexRef.current + 1;
// eslint-disable-next-line #typescript-eslint/no-use-before-define
uploadFileToAzureBlob(file, obj);
} else {
console.log("File Uploaded")
}
//
} catch (error) {
console.log("uploadFileToAzureBlob Catch Error" + error);
}
}
// In Node
const sharedKeyCredential = new StorageSharedKeyCredential(
config.StorageAccountName,
config.StorageAccountAccessKey
);
const pipeline = newPipeline(sharedKeyCredential);
const blobServiceClient = new BlobServiceClient(
`https://${config.StorageAccountName}.blob.core.windows.net`,
pipeline
);
const containerName = getContainerName(req.headers.key, req.headers.clientcode);
const identifier = uuid.v4();
const blobName = getBlobName(identifier, file);
const containerClient = blobServiceClient.getContainerClient(containerName);
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
try {
let bufferObj = Buffer.from(`${file}_${Number(currentChunk)}`, "utf8"); // Create buffer object, specifying utf8 as encoding
let base64String = bufferObj.toString("base64"); // Encode the Buffer as a base64 string
blockIds = [...blockIds, base64String];
const bufferedData = Buffer.from(req.body);
let resultOfUnitArray = new Uint8Array(bufferedData.length);
for (let j = 0; j < bufferedData.length; j++) {
resultOfUnitArray[j] = bufferedData.toString().charCodeAt(j);
} // Converting string to bytes
const stageBlockResponse = await blockBlobClient.stageBlock(base64String, resultOfUnitArray, resultOfUnitArray.length, {
onProgress: (e) => {
console.log("bytes sent: " + e.loadedBytes);
}
});
if ((Number(totalChunks) - 1) === (Number(currentChunk))) {
const commitblockResponse = await blockBlobClient.commitBlockList(blockIds, {blobHTTPHeaders: req.headers});
res.json({ uuid: identifier, message: 'File uploaded to Azure Blob storage.' });
} else {
res.json({ message: `Current Chunks ${currentChunk} is Successfully Uploaded` });
}
} catch (err) {
console.log({ err })
res.json({ message: err.message });
}
I don't know, what i'm doing wrong here.
Any help would be appreciated
Thank you
The problem is that you convert it into dataURL, that’s where things break.
It appears to me that you're under the wrong impression that you need to first encode a blob into string in order to send it. Well, you don't have to, browser fetch API is capable to handle raw binary payload.
So on the client (browser) side, you don’t need to go through FileReader. Just send the chunk blob directly.
const blob = file.slice(from, to);
// ...
fetch(uploadChunkURL, {
method: "POST",
headers: { "Content-Type": "application/octet-stream" },
body: blob,
});
On the server (node.js) side, you'll receive the blob in raw binary form, so you can simply forward that blob untouched to azure storage. There's no need to decode from string and move bytes onto resultOfUnitArray like you currently do.
const base64String = Buffer.from(`${file}_${Number(currentChunk)}`, "utf8").toString("base64");
const bufferedData = Buffer.from(req.body);
const stageBlockResponse = await blockBlobClient.stageBlock(
base64String,
bufferedData,
bufferedData.length
);

how to solve audio encoding error in Media-translation GCP API?

Here's my code.
I have went through the google cloud platform API documentation, and followed as per the GCP DOC steps correctly. But still unable to fix the encoding error, which you can see it below. I'm trying to translate an audio clip from en-US(english) to hi-IN (hindi), and it would be helpful if you can give some alternative ways for this solution.
function main(filename, encoding, sourceLanguage, targetLanguage) {
const fs = require('fs');
const {
SpeechTranslationServiceClient,
} = require('#google-cloud/media-translation');
const client = new SpeechTranslationServiceClient();
async function quickstart() {
const filename = './16kmonoceo.wav';
const encoding = 'LINEAR16';
const sourceLanguage = 'en-US';
const targetLangauge = 'hi-IN';
const config = {
audioConfig: {
audioEncoding: encoding,
sourceLanguageCode: sourceLanguage,
targetLanguageCode: targetLangauge,
},
};
const initialRequest = {
streamingConfig: config,
audioContent: null,
};
const readStream = fs.createReadStream(filename, {
highWaterMark: 4096,
encoding: 'base64',
});
const chunks = [];
readStream
.on('data', chunk => {
const request = {
streamingConfig: config,
audioContent: chunk.toString(),
};
chunks.push(request);
})
.on('close', () => {
// Config-only request should be first in stream of requests
stream.write(initialRequest);
for (let i = 0; i < chunks.length; i++) {
stream.write(chunks[i]);
}
stream.end();
});
const stream = client.streamingTranslateSpeech().on('data', response => {
const {result} = response;
if (result.textTranslationResult.isFinal) {
console.log(
`\nFinal translation: ${result.textTranslationResult.translation}`
);
console.log(`Final recognition result: ${result.recognitionResult}`);
} else {
console.log(
`\nPartial translation: ${result.textTranslationResult.translation}`
);
console.log(`Partial recognition result: ${result.recognitionResult}`);
}
});
}
quickstart();
}
main(...process.argv.slice(2));
here my error from command line.
CHECK ERROR MESSAGE
I'm using windows 10 and IDE VS CODE.
This is a case where careful reading of the error message helps.
Some module gacked on "LINEAR16" as the audioEncoding value saying there's no encoding with that name.
A quick look at the documentation shows "linear16" (lower case) as the value to use.

Capture WebRTC stream

I got this little proof of concept script that I copy/paste into Google Chrome console to capture live webcam video. I capture the chunks every 5 seconds, turn them into blobs, attach to a form data instance and post to a Node server. Then I clean up. It works, but eventually the browser crashes. RAM and CPU spikes heavily.
It seems the problematic areas are creating the Blobs and FormData variables.
How can I improve the script?
To test, go here:
https://www.earthcam.com/usa/arizona/sedona/redrock/?cam=sedona_hd
Copy/paste the script. Check the tab's RAM and CPU consumption.
let chunks = [];
const getOptions = function() {
let options = { mimeType: 'video/webm;codecs=vp9,opus' };
if (!window.MediaRecorder.isTypeSupported(options.mimeType)) {
console.error(`${options.mimeType} is not supported`);
options = { mimeType: 'video/webm;codecs=vp8,opus' };
if (!window.MediaRecorder.isTypeSupported(options.mimeType)) {
console.error(`${options.mimeType} is not supported`);
options = { mimeType: 'video/webm' };
if (!window.MediaRecorder.isTypeSupported(options.mimeType)) {
console.error(`${options.mimeType} is not supported`);
options = { mimeType: '' };
}
}
}
return options;
};
const captureStream = async function(chunks) {
let blob = new window.Blob(chunks, {
type: 'video/webm',
});
let formData = new window.FormData();
formData.append('upl', blob, 'myFile.webm');
await window.fetch('http://localhost:3000', {
method: 'post',
body: formData,
});
blob = null;
formData = null;
console.log(`Saved ${chunks.length}`);
chunks = [];
};
const recordStream = function() {
if (window.MediaRecorder === undefined) {
return console.log('Not supported');
}
const video = document.querySelector('video');
const stream = video.captureStream();
const options = getOptions();
const mediaRecorder = new window.MediaRecorder(stream, options);
mediaRecorder.ondataavailable = function(e) {
if (e.data && e.data.size > 0) {
chunks.push(e.data);
}
};
mediaRecorder.start(0);
// Capture chunks every 5 sec
setInterval(async function() {
await captureStream(chunks);
}, 5000);
};
recordStream();
When I paste in the code above into the Console it displays this error:
Uncaught SyntaxError: Unexpected token '}'
Adding a preceding { then returns this error:
VM97:3 Uncaught ReferenceError: formData is not defined at <anonymous>:3:11

getting exceljs workbookdata created in nodejs to saveAs in the client (SOLVED)

I have an angular + node application that has the ability nto download excel files rendered using the exceljs package.
All the work (except for getting the data for the excel) is done throught the client side. The problem is that the browser couldn't handle such amount of data.
What I'm trying to do now is basically do all the work in the server and the client should get the data as buffer array [buffer] and save it.
This my code which worked: (below you can see the fixed version)
Component :
//The this.srv.getExcel() only return observable of data returned from the DB
this.srv.getExcel().subscribe(result =>
{
let workbook = new Workbook();
workbook.addWorksheet('sheet1');
result.forEach(dataItem => worksheet.addRow(Object.keys(dataItem).map(di => dataItem[di]))); //populating the excel
workbook.xlsx.writeBuffer().then((data) =>
{
const data: Blob = new Blob([data], {type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;charset=UTF-8'});
FileSaver.saveAs(data, 'excelFile.xlsx');
});
})
Now - Trying to convert it (SOLVED):
Component:
this.nodeSrv.getExcel(request, fileName).subscribe(result =>
{
const data: Blob = new Blob([request], {type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;charset=UTF-8'});
FileSaver.saveAs(data, fileName + '.xlsx');
},
error => { debugger; this.loading = false; }
)
service with http to the end point on the server:
getExcel(request, fileName)
{
var path = reportsUrl.GetVotingBoxListForExcel;
const options = { withCredentials: true };
return this.http.post<any>(path, {request: request, reportName : fileName }, options);
}
This is the main change - most of te work is in the server - This is the nodeSrv:
router:
const express = require('express');
const router = express.Router();
router.use(req, res, next) =>
{
//The GetDataForExcel is the same as this.srv.getExcel() only return promise of data returned from the DB
return DB.GetDataForExcel(req.body.request).then((dataResult) => {
let reportTypeNameForExcel = req.body.reportName ? req.body.reportName : '';
return excel.createExcel(res, dataResult, reportTypeNameForExcel);
}).catch((err) => {
next({
details: err
})
});
})
module.exports = router;
This is the excel.createExcel, something is probably wrong here
createExcel : function(res, dataResult, reportTypeNameForExcel)
{
let workbook = new Workbook();
workbook.addWorksheet('sheet1');
dataResult.forEach(dataItem => worksheet.addRow(Object.keys(dataItem).map(di => dataItem[di]))); //populating the excel
res.setHeader('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet');
res.setHeader("Content-Disposition", "attachment; filename=" + "Report.xlsx");
workbook.xlsx.write(res).then(() =>
{
res.end();
})
}
The code above is already fixed - solved

Node.js download multiple files

I need to download multiple files from urls. I have got list of them in the file. How should I do that? I already made it, but it's not working. I need to wain until last download is done before starting next wan. How can I do that?
You want to call the download function from the callback of the file before that. I threw together something, do not consider it pretty nor production ready, please ;-)
var http = require('http-get');
var files = { 'url' : 'local-location', 'repeat-this' : 'as often as you want' };
var MultiLoader = function (files, finalcb) {
var load_next_file = function (files) {
if (Object.keys(files) == 0) {
finalcb(null);
return;
}
var nexturl = Object.keys(files)[0];
var nextfnname = files[nexturl];
console.log('will load ' + nexturl);
http.get(nexturl, nextfnname, function (err, result) {
console.log('loaded ' + nexturl);
delete files[nexturl];
load_next_file(files);
});
};
load_next_file(JSON.parse(JSON.stringify(files)));
};
MultiLoader(files, function () { console.log('finalcb'); });
http-get is not a standard node module, you can install it via npm install http-get.
I think this is what you're looking for.
const fs = require('fs')
const https = require('https')
const downloadFolderPath = 'downloads'
const urls = [
'url 1',
'url 2'
]
const downloadFile = url => {
return new Promise((resolve, reject) => {
const splitUrl = url.split('/')
const filename = splitUrl[splitUrl.length - 1]
const outputPath = `${downloadFolderPath}/${filename}`
const file = fs.createWriteStream(outputPath)
https.get(url, res => {
if (res.statusCode === 200) {
res.pipe(file).on('close', resolve)
} else {
reject(res.statusCode)
}
})
})
}
if (!fs.existsSync(downloadFolderPath)) {
fs.mkdirSync(downloadFolderPath)
}
let downloadedFiles = 0
urls.forEach(async url => {
await downloadFile(url)
downloadedFiles++
console.log(`${downloadedFiles}/${urls.length} downloaded`)
})
You can read files using fs (var fs = require('fs');)in node js
fs.readFile('<filepath>', "utf8", function (err, data) {
if (err) throw err;
console.log(data);
});

Resources