I use nodejs as a backend hosted on the firebase functions to query my firebase database, I have this code:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const cors = require('cors')({origin: true});
admin.initializeApp(functions.config().firebase);
const express = require('express')
exports.getAjax = functions.https.onRequest((request, response) => {
cors(request, response, () => {
console.log("request.body :", JSON.stringify(request.body));
console.log("request.query :", JSON.stringify(request.query));
var date = {
startDate: request.body.startDate,
endDate: request.body.endDate
}
var db = admin.database();
var logsDbPath = 'logs';
var usersDbPath = 'users';
var ref = db.ref(logsDbPath);
var tags;
db.ref(usersDbPath).once('value').then(function(tagsSnapshot) {
tagsSnapshot.forEach(function(tagSnapshot) {
var tagId = tagSnapshot.key;
tagSnapshot.forEach(function(sessSnapshot) {
var userSessId = sessSnapshot.key;
var userInfo = sessSnapshot.val();
});
});
tags = JSON.parse(JSON.stringify(tagsSnapshot.val()));
console.log(tags);
});
});
});
My main problem is that console.log(tags); and actually finishing the query and get the value for tags took ~38 sec in this case (sometimes takes ~ 1 min), I will put picture bellow:
And that object it's not that big at all, like 100 rows with 4 properties each, I don't think it should take that much, probably I did something wrong, but where?, I also have to write a promise or a callback in order to send that tags var response on the front end, cause the function finish before I get any value on tags(see picture), I come from a PHP background and this asynchronous javascript concept it's quite new for me.
You're not sending any response back to the client, which means that the function will keep running until it times out.
To make sure the function runs only for as long as needed, send a response to the client when you've loaded the data:
exports.getAjax = functions.https.onRequest((request, response) => {
cors(request, response, () => {
console.log("request.body :", JSON.stringify(request.body));
console.log("request.query :", JSON.stringify(request.query));
var date = {
startDate: request.body.startDate,
endDate: request.body.endDate
}
var db = admin.database();
var logsDbPath = 'logs';
var usersDbPath = 'users';
var ref = db.ref(logsDbPath);
var tags;
db.ref(usersDbPath).once('value').then(function(tagsSnapshot) {
tagsSnapshot.forEach(function(tagSnapshot) {
var tagId = tagSnapshot.key;
tagSnapshot.forEach(function(sessSnapshot) {
var userSessId = sessSnapshot.key;
var userInfo = sessSnapshot.val();
});
});
tags = JSON.parse(JSON.stringify(tagsSnapshot.val()));
console.log(tags);
// Send the response to the client, which also ends the function
response.status(200).send(tags);
});
});
});
Aside from that it's hard to say much. Keep in mind that Cloud Functions is in beta and is constantly changing, we have no way of knowing whether this is a cold start or warm start, and we can't see your project to inspect what's going on.
If you'd like to have more help here, I recommend that you try to reproduce the problem in a more troubleshoot-friendly environment. For example, can you reproduce the perfor,ance problem with a local node.js process?
Related
im new at cloud functions
i just want to disable my targets after countdown end. this function works correctly but updates after 3 min after function finished
what am i missing?
const functions = require("firebase-functions");
const admin = require('firebase-admin');
admin.initializeApp();
const db = admin.firestore();
exports.timecontroller = functions.firestore.document("DigitalTargets/{digitalTargetID}").onCreate((snap, context) => {
const id = snap.id
const date = new Date(snap.data().endDate.toDate())
var countDownDate = date.getTime();
var myfunc = setInterval(function () {
var now = new Date().getTime();
var timeleft = countDownDate - now;
if (timeleft < 0) {
db.collection("DigitalTargets").doc(id).update({ isActive: false })
clearInterval(myfunc);
}
}, 1000);
})
Since you pay for the time that your Cloud Functions code executes, the container tries to execute your code and terminate as quickly as possible. Unless you tell it otherwise, that means that it terminates the code as soon as the final statement before the closing } executes.
But since you are executing an asynchronous operation with your setInterval call, the code actually needs to continue to run after the closing }.
To allow that you'll need to return a promise that resolves when the code is complete. Something like:
exports.timecontroller = functions.firestore.document("DigitalTargets/{digitalTargetID}").onCreate((snap, context) => {
const id = snap.id
const date = new Date(snap.data().endDate.toDate())
var countDownDate = date.getTime();
return new Promise((resolve, reject) => { // 👈 This tells Cloud Functions to wait
var myfunc = setInterval(function () {
var now = new Date().getTime();
var timeleft = countDownDate - now;
if (timeleft < 0) {
db.collection("DigitalTargets").doc(id).update({ isActive: false })
clearInterval(myfunc);
resolve(); // 👈 This tells Cloud Functions that you're done
}
}, 1000);
})
})
I recommend reading (and watching the videos in) the Firebase documentation on asynchronous behavior in Cloud Functions.
I have nodejs app with expressJS and excel4node library, which is running on local machine.
I'm sending REST messages to this server and it returns me excel binary file.
I want to move it Azure Functions, but facing with issue. Even simple app (took from example) is not running there. Maybe someone have suggestions how to solve this?
const createHandler = require('azure-function-express').createHandler;
const express = require('express');
const xl = require('excel4node')
// Create express app as usual
const app = express();
app.post('/api/hello-world', (req, res) => {
var wb = new xl.Workbook();
var ws = wb.addWorksheet('S');
ws.cell(1, 1).string('A');
wb.write(`FileName.xlsx`, res);
});
// Binds the express app to an Azure Function handler
module.exports = createHandler(app);
and this is the error what I'm seeing :
Microsoft.AspNetCore.Server.Kestrel.Core: Response Content-Length mismatch: too many bytes written (3790 of 3569).
Does someone know how to solve it, or maybe have an example of generating excel in Azure Functions via NodeJS
Just in case anyone else stubles upon this looking for the answer (like I did). This works for me:
var xl = require('excel4node');
const tryCreate = async (obj) => {
let wb = new xl.Workbook();
const buffer = await wb.writeToBuffer();
return {
setEncoding: 'binary',
// status: 200, /* Defaults to 200 */
body: buffer
};
}
module.exports = async function (context, req) {
try {
context.res = await tryCreate(req.body);
} catch (error) {
context.log.error(error, new Date().toISOString());
}
}
I am making code to receive news from news API and send me daily emails with top headlines based on cron jobs.
I need to receive data first then map it into a variable, I added async await but for some reason it is not working.
If you run this code it will only print "Before summary" and that is it.
It won't get inside if condition however I made async await for that, so the server responses first and then goes on executing the code and have "news" to make summary.
const cron = require("node-cron");
const express = require("express");
const moment = require("moment")
const axios = require("axios")
app = express();
var news;
app.listen("3000")
//I removed my API key from the code and wrote 'MYAPIKEY'.
async function getNews(){
let response = await axios.get('https://newsapi.org/v2/everything?q=bitcoin&from=2018-11-12&sortBy=publishedAt&apiKey=MYAPIKEY')
.then( response =>
news = response.data.articles
)
}
getNews()
console.log('Before summary')
if (news) {
var summary = news.map( newsItem => newsItem.title )
console.log(summary)
}
You don't need then if you are working with async
async function getNews() {
try {
let response = await axios.get('https://newsapi.org/v2/everything?q=bitcoin&from=2018-11-12&sortBy=publishedAt&apiKey=MYAPIKEY');
var news = response.data.articles;
if (news) {
var summary = news.map(newsItem => newsItem.title) console.log(summary)
}
} catch (err) {
console.error(error);
}
}
getNews();
I'm having a tremendously tough time organizing the flow here as I'm self-taught so wondering if someone might be able to assist.
var channelIds = ['XYZ','ABC','QRS']
var playlistIds = [];
var videoIds = [];
ORDER OF PROCESS
1. Get All Playlist IDs: If returning Get Request JSON contains nextPageToken run Get Request again with that page before going to (2)
2. Get All Video IDs: If returning Get Request JSON contains nextPageToken run Get Request again with that page before going to (3)
3. Aggregate into Final Array: I need put all in an array such as:
var ArrFinal = [{channelId,playlistID,videoId},{channelId,playlistID,videoId},{channelId,playlistID,videoId}];
I don't necessarily need someone to write the whole thing. I'm trying to better understand the most efficient way to know when the previous step is done, but also handle the nextPageToken iteration.
i'm not familiar with the youtube api.
But what you basically need is a get function for each endpoint. This function should also care about the "nextPageToken".
Something like that: (not tested)
'use strict';
const Promise = require('bluebird');
const request = Promise.promisifyAll(require('request'));
const playlistEndpoint = '/youtube/v3/playlists';
const baseUrl = 'https://www.googleapis.com'
const channelIds = ['xy', 'ab', 'cd'];
const getPlaylist = async (channelId, pageToken, playlists) => {
const url = `${baseUrl}${playlistEndpoint}`;
const qs = {Â
channelId,
maxResults: 25,
pageToken
};
try {
const playlistRequest = await request.getAsync({ url, qs });
const nextPageToken = playlistRequest.body.nextPageToken;
// if we already had items, combine with the new ones
const items = playlists ? playlists.concat(playlistRequest.body.items) : playlistRequest.body.items;
if (nextPageToken) {
// if token, do the same again and pass results to function
return getPlaylist(channelId, nextPageToken, items);
}
// if no token we are finished
return items;
}
catch (e) {
console.log(e.message);
}
};
const getVideos = async (playlistId, pageToken, videos) => {
// pretty much the same as above
}
function awesome(channelIds) {
const fancyArray = [];
await Promise.map(channelIds, async (channelId) => {
const playlists = await getPlaylist(channelId);
const videos = await Promise.map(playlists, async (playlistId) => {
const videos = await getVideos(playlistId);
videos.forEach(videoId => {
fancyArray.push({ channelId, playlistId, videoId })
})
});
});
return fancyArray;
}
awesome(channelIds)
// UPDATE
This may be a lot concurrent requests, you can limit them by using
Promise.map(items, item => { somefunction() }, { concurrency: 5 });
I have an API that searches for the user-provided term, returns an array of results, then fires off async requests for each of the results and gets results for each of these second batch of requests. I'd like the API to report progress as it happens rather than just the final result. So, if I do the following request, I should get updates like so
$ curl 'http://server/?q=foobar'
searching for ${q}…
found 76… now getting images…
found 30 images… done
{
result
}
Most of relevant code is shown below. Fwiw, I am using hapijs for my application.
let imagesOfRecords = {};
const getImages = async function (q) {
console.log(`searching for ${q}…`);
const uri = `http://remoteserver/?q=${q}`;
const {res, payload} = await Wreck.get(uri);
const result = JSON.parse(payload.toString()).hits;
const numOfFoundRecords = result.total;
if (result.total) {
console.log(`found ${result.total}… now getting images…`);
const foundRecords = result.hits.map(getBuckets);
Promise.all(foundRecords).then(function() {
console.log(`found ${Object.keys(imagesOfRecords).length} images… done`);
reply(imagesOfRecords).headers = res.headers;
}).catch(error => {
console.log(error)
});
}
else {
console.log('nothing found');
reply(0).headers = res.headers;
}
};
const getBuckets = async function(record) {
const { res, payload } = await Wreck.get(record.links.self);
const bucket = JSON.parse(payload.toString()).links.bucket;
await getImageFiles(bucket, record.links.self);
};
const getImageFiles = async function(uri, record) {
const { res, payload } = await Wreck.get(uri);
const contents = JSON.parse(payload.toString()).contents;
imagesOfRecords[record] = contents.map(function(el) {
return el.links.self;
});
};
Once I can implement this, my next task would be to implement this progressive update in a web app that uses the above API.
To show result with each step of your requests for backend you can use EventEmitter, which will emit event on each progress step. You can read about events here.
Simple implementation:
const events = require('events');
const eventEmitter = new events.EventEmitter();
//your request code
Promise.all(foundRecords).then(function() {
console.log(`found ${Object.keys(imagesOfRecords).length} images… done`);
eventEmitter.emit('progress');
reply(imagesOfRecords).headers = res.headers;
})
const eventReaction = (e) => {
// do something with event, console log for example.
}
eventEmitter.on('progress', eventReaction);
More examples you can find here and here.
To show events to client you can use library socket.io. I think you can find pretty straightforward explanations how socket.io works in documentation.
If you want to send events between servers or processes and want to go little further, you can read more about 0MQ (zero mq) and it's node implementation