generate excel via nodeJS in Azure Functions - node.js

I have nodejs app with expressJS and excel4node library, which is running on local machine.
I'm sending REST messages to this server and it returns me excel binary file.
I want to move it Azure Functions, but facing with issue. Even simple app (took from example) is not running there. Maybe someone have suggestions how to solve this?
const createHandler = require('azure-function-express').createHandler;
const express = require('express');
const xl = require('excel4node')
// Create express app as usual
const app = express();
app.post('/api/hello-world', (req, res) => {
var wb = new xl.Workbook();
var ws = wb.addWorksheet('S');
ws.cell(1, 1).string('A');
wb.write(`FileName.xlsx`, res);
});
// Binds the express app to an Azure Function handler
module.exports = createHandler(app);
and this is the error what I'm seeing :
Microsoft.AspNetCore.Server.Kestrel.Core: Response Content-Length mismatch: too many bytes written (3790 of 3569).
Does someone know how to solve it, or maybe have an example of generating excel in Azure Functions via NodeJS

Just in case anyone else stubles upon this looking for the answer (like I did). This works for me:
var xl = require('excel4node');
const tryCreate = async (obj) => {
let wb = new xl.Workbook();
const buffer = await wb.writeToBuffer();
return {
setEncoding: 'binary',
// status: 200, /* Defaults to 200 */
body: buffer
};
}
module.exports = async function (context, req) {
try {
context.res = await tryCreate(req.body);
} catch (error) {
context.log.error(error, new Date().toISOString());
}
}

Related

Writing file in /tmp in a Firebase Function does not work

I am writing a Firebase function that exposes an API endpoint using express. When the endpoint is called, it needs to download an image from an external API and use that image to make a second API call. The second API call needs the image to be passed as a readableStream. Specifically, I am calling the pinFileToIPFS endpoint of the Pinata API.
My Firebase function is using axios to download the image and fs to write the image to /tmp. Then I am using fs to read the image, convert it to a readableStream and send it to Pinata.
A stripped-down version of my code looks like this:
const functions = require("firebase-functions");
const express = require("express");
const axios = require("axios");
const fs = require('fs-extra')
require("dotenv").config();
const key = process.env.REACT_APP_PINATA_KEY;
const secret = process.env.REACT_APP_PINATA_SECRET;
const pinataSDK = require('#pinata/sdk');
const pinata = pinataSDK(key, secret);
const app = express();
const downloadFile = async (fileUrl, downloadFilePath) => {
try {
const response = await axios({
method: 'GET',
url: fileUrl,
responseType: 'stream',
});
// pipe the result stream into a file on disc
response.data.pipe(fs.createWriteStream(downloadFilePath, {flags:'w'}))
// return a promise and resolve when download finishes
return new Promise((resolve, reject) => {
response.data.on('end', () => {
resolve()
})
response.data.on('error', () => {
reject()
})
})
} catch (err) {
console.log('Failed to download image')
console.log(err);
throw new Error(err);
}
};
app.post('/pinata/pinFileToIPFS', cors(), async (req, res) => {
const id = req.query.id;
var url = '<URL of API endpoint to download the image>';
await fs.ensureDir('/tmp');
if (fs.existsSync('/tmp')) {
console.log('Folder: /tmp exists!')
} else {
console.log('Folder: /tmp does not exist!')
}
var filename = '/tmp/image-'+id+'.png';
downloadFile(url, filename);
if (fs.existsSync(filename)) {
console.log('File: ' + filename + ' exists!')
} else {
console.log('File: ' + filename + ' does not exist!')
}
var image = fs.createReadStream(filename);
const options = {
pinataOptions: {cidVersion: 1}
};
pinata.pinFileToIPFS(image, options).then((result) => {
console.log(JSON.stringify(result));
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Authorization, Origin, X-Requested-With, Accept");
res.status(200).json(JSON.stringify(result));
res.send();
}).catch((err) => {
console.log('Failed to pin file');
console.log(err);
res.status(500).json(JSON.stringify(err));
res.send();
});
});
exports.api = functions.https.onRequest(app);
Interestingly, my debug messages tell me that the /tmp folder exists, but the file of my downloaded file does not exist in the file system.
[Error: ENOENT: no such file or directory, open '/tmp/image-314502.png']. Note that the image can be accessed correctly when I manually access the URL of the image.
I've tried to download and save the file using many ways but none of them work. Also, based on what I've read, Firebase Functions allow to write and read temp files from /tmp.
Any advice will be appreciated. Note that I am very new to NodeJS and to Firebase, so please excuse my basic code.
Many thanks!
I was not able to see you are initializing the directory as suggested in this post:
const bucket = gcs.bucket(object.bucket);
const filePath = object.name;
const fileName = filePath.split('/').pop();
const thumbFileName = 'thumb_' + fileName;
const workingDir = join(tmpdir(), `${object.name.split('/')[0]}/`);//new
const tmpFilePath = join(workingDir, fileName);
const tmpThumbPath = join(workingDir, thumbFileName);
await fs.ensureDir(workingDir);
Also, please consider that if you are using two functions, the /tmp directory would not be shared as each one has its own. Here is an explanation from Doug Stevenson. In the same answer, there is a very well explained video about local and global scopes and how to use the tmp directory:
Cloud Functions only allows one function to run at a time in a particular server instance. Functions running in parallel run on different server instances, which have different /tmp spaces. Each function invocation runs in complete isolation from each other. You should always clean up files you write in /tmp so that they don't accumulate and cause a server instance to run out of memory over time.
I would suggest using Google Cloud Storage extended with Cloud Functions to achieve your goal.

Filter large debug logs on client side

I am working on a web app. Web app can execute code and retrieve debug log and display based on the filter selected. This debug log can be large. The web app has express app on the server side and handlebars on the client side. currently I am retrieving the debug log into server side code and using code below.
const fs = require("fs");
const readline = require("readline");
const {Stream} = require("stream");
//creating file using below code
fs.writeFile("DebugLog.txt", debugLog, async function(err) {
if (err) {
throw err;
}
const result = await searchStream("DebugLog","USER_DEBUG|METHOD_ENTRY|METHOD_EXIT");
res.send(JSON.stringify({"DebugLog": result}));
});
//executing the search using below code
const searchStream = (filename, text) => {
return new Promise((resolve)=>{
const inStream = fs.createReadStream("./" + filename + ".txt");
const outStream = new Stream();
const rl = readline.createInterface(inStream, outStream);
const result = [];
const regEx = new RegExp(text, "i");
rl.on("line", function(line) {
if (line && line.search(regEx) >=0) {
result.push(line);
}
});
rl.on("close", function() {
console.log("finished search", filename);
resolve(result);
});
});
};
Doing this way is ending up creating a static file that is available across the server. but I need the file to be available only for user session. I am looking for any solutions that can either create file that is available for only a individual session or client side filtering where I don't have to worry about creating a file on the server side.

Enabling Http2Stream in koa2 app

I7m trying to create a simple http2 server and want to make use of Http2Stream in the http2 module to push large assets. How can I incorporate it in my Koa2 app? currently my server is just a middleware that receives the ctx and next object and checks if the file exists and tries to send it out.
async server(ctx, next, ...arg){
//check if file exists
//if it exists, set the headers and mimetype and send file
}
Does the ctx object contain the functions needed to work with http2stream, or how can i extend it?
You can utilize the stream in ctx.res (which is the original node response) like so: ctx.res.stream
Working example: Koa2 with http/2 - this one gets a file in the public folder (filename ist hardcoded here) and sends it through the stream (which then should be the http2stream). Just type https://localhost:8080/file in your browser. You need to place a file thefile.html in to ./public:
'use strict';
const fs = require('fs');
const http2 = require('http2');
const koa = require('koa');
const app = new koa();
const options = {
key: fs.readFileSync('./key.pem'),
cert: fs.readFileSync('./cert.pem'),
passphrase: 'test'
};
function getFile(path) {
const filePath = `${__dirname}/public/${path}`;
try {
const content = fs.openSync(filePath, 'r');
const contentType = 'text/html';
return {
content,
headers: {
'content-type': contentType
}
};
} catch (e) {
return null;
}
}
// response
app.use(ctx => {
if (ctx.request.url === '/file') {
const file = getFile('thefile.html');
ctx.res.stream.respondWithFD(file.content, file.headers);
} else {
ctx.body = 'OK' ;
}
});
const server = http2.createSecureServer(options, app.callback());
console.log('Listening on port 8080');
server.listen(8080);
Hope that helps

nodejs async/await nested API progress

I have an API that searches for the user-provided term, returns an array of results, then fires off async requests for each of the results and gets results for each of these second batch of requests. I'd like the API to report progress as it happens rather than just the final result. So, if I do the following request, I should get updates like so
$ curl 'http://server/?q=foobar'
searching for ${q}…
found 76… now getting images…
found 30 images… done
{
result
}
Most of relevant code is shown below. Fwiw, I am using hapijs for my application.
let imagesOfRecords = {};
const getImages = async function (q) {
console.log(`searching for ${q}…`);
const uri = `http://remoteserver/?q=${q}`;
const {res, payload} = await Wreck.get(uri);
const result = JSON.parse(payload.toString()).hits;
const numOfFoundRecords = result.total;
if (result.total) {
console.log(`found ${result.total}… now getting images…`);
const foundRecords = result.hits.map(getBuckets);
Promise.all(foundRecords).then(function() {
console.log(`found ${Object.keys(imagesOfRecords).length} images… done`);
reply(imagesOfRecords).headers = res.headers;
}).catch(error => {
console.log(error)
});
}
else {
console.log('nothing found');
reply(0).headers = res.headers;
}
};
const getBuckets = async function(record) {
const { res, payload } = await Wreck.get(record.links.self);
const bucket = JSON.parse(payload.toString()).links.bucket;
await getImageFiles(bucket, record.links.self);
};
const getImageFiles = async function(uri, record) {
const { res, payload } = await Wreck.get(uri);
const contents = JSON.parse(payload.toString()).contents;
imagesOfRecords[record] = contents.map(function(el) {
return el.links.self;
});
};
Once I can implement this, my next task would be to implement this progressive update in a web app that uses the above API.
To show result with each step of your requests for backend you can use EventEmitter, which will emit event on each progress step. You can read about events here.
Simple implementation:
const events = require('events');
const eventEmitter = new events.EventEmitter();
//your request code
Promise.all(foundRecords).then(function() {
console.log(`found ${Object.keys(imagesOfRecords).length} images… done`);
eventEmitter.emit('progress');
reply(imagesOfRecords).headers = res.headers;
})
const eventReaction = (e) => {
// do something with event, console log for example.
}
eventEmitter.on('progress', eventReaction);
More examples you can find here and here.
To show events to client you can use library socket.io. I think you can find pretty straightforward explanations how socket.io works in documentation.
If you want to send events between servers or processes and want to go little further, you can read more about 0MQ (zero mq) and it's node implementation

Firebase database querying taking to much time in Cloud Function

I use nodejs as a backend hosted on the firebase functions to query my firebase database, I have this code:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const cors = require('cors')({origin: true});
admin.initializeApp(functions.config().firebase);
const express = require('express')
exports.getAjax = functions.https.onRequest((request, response) => {
cors(request, response, () => {
console.log("request.body :", JSON.stringify(request.body));
console.log("request.query :", JSON.stringify(request.query));
var date = {
startDate: request.body.startDate,
endDate: request.body.endDate
}
var db = admin.database();
var logsDbPath = 'logs';
var usersDbPath = 'users';
var ref = db.ref(logsDbPath);
var tags;
db.ref(usersDbPath).once('value').then(function(tagsSnapshot) {
tagsSnapshot.forEach(function(tagSnapshot) {
var tagId = tagSnapshot.key;
tagSnapshot.forEach(function(sessSnapshot) {
var userSessId = sessSnapshot.key;
var userInfo = sessSnapshot.val();
});
});
tags = JSON.parse(JSON.stringify(tagsSnapshot.val()));
console.log(tags);
});
});
});
My main problem is that console.log(tags); and actually finishing the query and get the value for tags took ~38 sec in this case (sometimes takes ~ 1 min), I will put picture bellow:
And that object it's not that big at all, like 100 rows with 4 properties each, I don't think it should take that much, probably I did something wrong, but where?, I also have to write a promise or a callback in order to send that tags var response on the front end, cause the function finish before I get any value on tags(see picture), I come from a PHP background and this asynchronous javascript concept it's quite new for me.
You're not sending any response back to the client, which means that the function will keep running until it times out.
To make sure the function runs only for as long as needed, send a response to the client when you've loaded the data:
exports.getAjax = functions.https.onRequest((request, response) => {
cors(request, response, () => {
console.log("request.body :", JSON.stringify(request.body));
console.log("request.query :", JSON.stringify(request.query));
var date = {
startDate: request.body.startDate,
endDate: request.body.endDate
}
var db = admin.database();
var logsDbPath = 'logs';
var usersDbPath = 'users';
var ref = db.ref(logsDbPath);
var tags;
db.ref(usersDbPath).once('value').then(function(tagsSnapshot) {
tagsSnapshot.forEach(function(tagSnapshot) {
var tagId = tagSnapshot.key;
tagSnapshot.forEach(function(sessSnapshot) {
var userSessId = sessSnapshot.key;
var userInfo = sessSnapshot.val();
});
});
tags = JSON.parse(JSON.stringify(tagsSnapshot.val()));
console.log(tags);
// Send the response to the client, which also ends the function
response.status(200).send(tags);
});
});
});
Aside from that it's hard to say much. Keep in mind that Cloud Functions is in beta and is constantly changing, we have no way of knowing whether this is a cold start or warm start, and we can't see your project to inspect what's going on.
If you'd like to have more help here, I recommend that you try to reproduce the problem in a more troubleshoot-friendly environment. For example, can you reproduce the perfor,ance problem with a local node.js process?

Resources