Batch requests placed via nodejs request.get using rxjs - node.js

I am currently using the following function to create a Promise from the result of calling request.get:
function dlPromiseForMeta(meta) {
return new Promise(function (resolve, reject) {
meta.error = false;
var fileStream = fs.createWriteStream(meta.filePath);
fileStream.on('error', function (error) {
meta.error = true;
console.log('filesystem ' + meta.localFileName + ' ERROR: ' + error);
console.log('record: ' + JSON.stringify(meta));
reject(meta);
});
fileStream.on('close', function () {
resolve(meta);
});
request.get({
uri: meta.url,
rejectUnauthorized: false,
followAllRedirects: true,
pool: {
maxSockets: 1000
},
timeout: 10000,
agent: false
})
.on('socket', function () {
console.log('request ' + meta.localFileName + ' made');
})
.on('error', function (error) {
meta.error = true;
console.log('request ' + meta.localFileName + ' ERROR: ' + error);
console.log('record: ' + JSON.stringify(meta));
reject(meta);
})
.on('end', function () {
console.log('request ' + meta.localFileName + ' finished');
fileStream.close();
})
.pipe(fileStream);
});
}
This works fine except when I am trying to call it too many times, as in the example below, where imagesForKeywords returns an rxjs Observable:
imagesForKeywords(keywords, numberOfResults)
.mergeMap(function (meta) {
meta.fileName = path.basename(url.parse(meta.url).pathname);
meta.localFileName = timestamp + '_' + count++ + '_' + meta.keyword + '_' + meta.source + path.extname(meta.fileName);
meta.filePath = path.join(imagesFolder, meta.localFileName);
return rxjs.Observable.fromPromise(dlPromiseForMeta(meta))(meta);
});
I start getting ESOCKETTIMEDOUT errors when the source observable becomes sufficiently large.
So what I would like to do is somehow batch what happens in mergeMap for every, say, 100 entries... so I do those 100 in parallel, and each batch serially, and then merge them at the end.
How can I accomplish this using rxjs?

I think the simplest thing to use is bufferTime() which triggers after a certain number of ms but also has a parameter at the end for count.
Using a timeout seems useful, in case there's a stream pattern that does not reach the batch limit in a reasonable time.
If that does not fit your use-case, comment me with some more details and I will adjust accordingly.
Your code will look like this,
bufferTime as described above
forkjoin - run the buffer contents in parallel and emit when all return
mergeMap - coalesce the results
imagesForKeywords(keywords, numberOfResults)
.mergeMap(function (meta) {
meta.fileName = path.basename(url.parse(meta.url).pathname);
meta.localFileName = timestamp + '_' + count++ + '_' + meta.keyword + '_' + meta.source + path.extname(meta.fileName);
meta.filePath = path.join(imagesFolder, meta.localFileName);
return meta;
})
.bufferTime(maxTimeout, null, maxBatch)
.mergeMap(items => rxjs.Observable.forkJoin(items.map(dlPromiseForMeta)))
.mergeMap(arr => rxjs.Observable.from(arr))
Here's a runnable mockup to show it working. Have commented out the last mergeMap to show the buffering.
I have assumed a couple of things,
imagesForKeywords breaks keywords into observable stream of keyword
there is one keyword per dlPromiseForMeta call
// Some mocking
const imagesForKeywords = (keywords, numberOfResults) => {
return Rx.Observable.from(keywords.map(keyword => { return {keyword} }))
}
const dlPromiseForMeta = (meta) => {
return Promise.resolve(meta.keyword + '_image')
}
// Compose meta - looks like it can run at scale, since is just string manipulations.
const composeMeta = meta => {
// meta.fileName = path.basename(url.parse(meta.url).pathname);
// meta.localFileName = timestamp + '_' + count++ + '_' + meta.keyword + '_' + meta.source + path.extname(meta.fileName);
// meta.filePath = path.join(imagesFolder, meta.localFileName);
return meta;
}
const maxBatch = 3
const maxTimeout = 50 //ms
const bufferedPromises = (keywords, numberOfResults) =>
imagesForKeywords(keywords, numberOfResults)
.map(composeMeta)
.bufferTime(maxTimeout, null, maxBatch)
.mergeMap(items => Rx.Observable.forkJoin(items.map(dlPromiseForMeta)))
//.mergeMap(arr => Rx.Observable.from(arr))
const keywords = ['keyw1', 'keyw2', 'keyw3', 'keyw4', 'keyw5', 'keyw6', 'keyw7'];
const numberOfResults = 1;
bufferedPromises(keywords, numberOfResults)
.subscribe(console.log)
<script src="https://cdnjs.cloudflare.com/ajax/libs/rxjs/5.5.6/Rx.js"></script>

Related

wait for promise before exit process

I'm trying to run a NodeJS code that reads some data fields from an array, use them to do a database query to check if the data is duplicate before inserting them into the corresponding table.
My NodeJS code will be called from a PHP script so I need to know when it ends this is why I need to add process.exit(0) somewhere. The problem I have is that if I add it, the script is terminated and my promise never gets the time to send back the result.
Here is my code:
var bar = new Promise((resolve, reject) => {
result.forEach((row, index, array) => {
var escaped = _.map(row, mysql.escape);
var checkQuery = "SELECT COUNT(*) as found FROM data WHERE field1 = " + escaped[0] + " AND field2 = " + escaped[1] + " AND field3 = " + escaped[2] + " AND field4 = " + escaped[3] + " AND field5 = " + escaped[4] + " AND field6 = " + escaped[5] + " AND field7 = " + escaped[6] + ";";
conn.query(checkQuery, function (err, res) {
if (err) {
console.log("Error checking row for duplicate");
console.log(checkQuery);
process.exit(1);
} else {
if (res[0].found == 0) {
var query = " (";
var escaped = _.map(row, mysql.escape);
var csv = escaped.join(',');
query += csv;
query += ")";
query += row !== _.last(result) ? ',' : ';';
console.log(query);//This will change to inserting the data to the table
}else{
console.log("Duplicate found!");
}
}
});
if (index === array.length -1) resolve();
});
});
bar.then(() => {
console.log('All done!');
process.exit(0);
});
If I remove process.exit(0); I see "All done" first then console.log(query) result.
If I add it, the script is terminated and I see "All done" only.
Is there a better approach to do this task please?
Thanks.
Here is a way to wait for a promise before the application exits.
class Waiter {
private timeout: any
constructor() {
this.waitLoop()
}
private waitLoop():void {
this.timeout = setTimeout(() => { this.waitLoop() }, 100 * 1000)
}
okToQuit():void {
clearTimeout(this.timeout)
}
}
// Your app.
const appPromise:Promise<any> = ...
const w = new Waiter()
appPromise.finally(() => {
console.log("Quitting")
w.okToQuit()
})
Running multiple asynchronous operations in a loop and tracking when everything is done is just way, way, way easier if you use promises for all the individual asynchronous operation rather than trying to track asynchronous operations that use plain callbacks.
You don't say exactly what your database is, but if it's mysql, then there is a mysql2/promise driver that natively supports promises and that would be my recommendation to switch to that. Then you can directly use a promise returned from .query(). But, without the info about your specific database driver, I've shown how to manually promisify .query().
Then, the looping code can use a for loop and await to sequence the database calls so it's easy to know when they are all complete.
const { promisify } = require('util');
async function someFunc() {
// other code here
// promisify conn.query (or use promise interface directly from the database)
conn.queryP = promisify(conn.query);
try {
for (const row of result) {
const escaped = _.map(row, mysql.escape);
const checkQuery = "SELECT COUNT(*) as found FROM data WHERE field1 = " + escaped[0] + " AND field2 = " +
escaped[1] + " AND field3 = " + escaped[2] + " AND field4 = " + escaped[3] + " AND field5 = " +
escaped[4] + " AND field6 = " + escaped[5] + " AND field7 = " + escaped[6] + ";";
let res = await con.queryP(checkQuery);
if (res[0].found == 0) {
const csv = _.map(row, mysql.escape).join(',');
const terminator = row !== _.last(result) ? ',' : ';';
const query = " (" + csv + ")" + terminator;
console.log(query); //This will change to inserting the data to the table
} else {
console.log("Duplicate found!");
}
}
} catch (e) {
console.log("Error checking row for duplicate: ", checkQuery);
console.log(e);
process.exit(1);
}
console.log('All done!');
process.exit(0);
}
The code appears to be trying to build a query inside the loop where each iteration of the loop will add-on to the next (that's what _.last(result) ? ',' : ';'; look like anyway). If that's the case, then the query variable has to be moved outside the loop so it can build from one iteration of the loop to the next. But, you don't show what you're really trying to do with that query so you're on your own for that part.
you decide how many promises will go out before hand and then count them as they resolve, then exit
in this example the same principle is applied but it has callback functions instead of promises. For promises you would call a count function from the .then() or .finally(), and the count function will decide whether it is time to exit
mongoose example from a javascript server:
let g_DB = null;
//init mongoose
const mongoose = require("mongoose");
const connectionParams = {
useNewUrlParser: true,
useUnifiedTopology: true,
};
const connStr1 = "mongodb+srv://XX:XX#clusterXX.XX.mongodb.net/XX?
retryWrites=true&w=majority";
mongoose.set("strictQuery", false);
mongoose.connect(connStr1, connectionParams)
.then(handleConnection())
.catch((err) => console.log("Error:", err));
//end script
//handleConnection - start on successful response from mongoose connection
function handleConnection(msg) {
console.log("mongoose has connected to Mongo Atlas successfully");
g_DB = mongoose.connection;
g_DB.once("open", function () {
console.log(
"mongoose has connected to Mongo Atlas Cluster using database XX"
);
doTest();
});
}
//---------------------------------------------------
function doTest() {
console.log("test-05: create 500 books");
//---- MODEL ----
const _schema = new mongoose.Schema({
name: String,
price: Number,
quantity: Number,
});
//g_DB is a mongoose connection set earlier in the script
const _model = g_DB.model("book_schema", _schema, "bookstore");
let loopcount = 500;
let waitcount = loopcount;
for (let i = 0; i < loopcount; i++) {
_m = new _model({
name: `WHY MAKE 500 BOOKS ${new Date().toISOString()}`,
price: 200,
quantity: 2000,
});
_m.save((e, x) => {
if (e) return console.error(e);
console.log(x, `waitcount: ${--waitcount}`);
if (!waitcount) doExit();
});
}
}
//--
function doExit() {
console.log("exit from server");
process.exit();
}
Use Reject/Resolve to manage promise in Nodejs
When your task fulfils your request send result with resolve(); and if its failing use reject();
In your case you are not managing promise properly that's why it's running asynchronously, better to use following way with the proper returns.
var bar = new Promise((resolve, reject) => {
return result.forEach((row, index, array) => {
var escaped = _.map(row, mysql.escape);
var checkQuery = "SELECT COUNT(*) as found FROM data WHERE field1 = " + escaped[0] + " AND field2 = " + escaped[1] + " AND field3 = " + escaped[2] + " AND field4 = " + escaped[3] + " AND field5 = " + escaped[4] + " AND field6 = " + escaped[5] + " AND field7 = " + escaped[6] + ";";
return conn.query(checkQuery, function (err, res) {
if (err) {
console.log("Error checking row for duplicate");
console.log(checkQuery);
return reject(err);
} else {
if (res[0].found == 0) {
var query = " (";
var escaped = _.map(row, mysql.escape);
var csv = escaped.join(',');
query += csv;
query += ")";
query += row !== _.last(result) ? ',' : ';';
console.log(query);//This will change to inserting the data to the table
return resolve(query)
} else {
console.log("Duplicate found!");
return reject('Duplicate Found');
}
}
});
});
});
bar.then((data) => {
console.log('All done!');
});
In above code I am returning query + resolve/reject so it makes better to run in more synchronised way.
return conn.query(checkQuery, function (err, res) {
Plus, while processing this promise I am handling with .then((data) so I can handle that resolve values here.
bar.then((data) => {
console.log('All done!');
});
Note: If you are rejecting any promise it won't be available in above .then block you'll find this reject in catch block so code will be changed in following way.
bar.then((data) => {
console.log('All done!');
}).catch(err=>{
console.log(err);
});
You can try the following:
(async () => {
await new Promise((resolve, reject) => {
result.forEach((row, index, array) => {
var escaped = _.map(row, mysql.escape);
var checkQuery = "SELECT COUNT(*) as found FROM data WHERE field1 =
" + escaped[0] + " AND field2 = " + escaped[1] + " AND field3 = " +
escaped[2] + " AND field4 = " + escaped[3] + " AND field5 = " + escaped[4] + " AND field6 = " + escaped[5] + " AND field7 = " + escaped[6] + ";";
conn.query(checkQuery, function (err, res) {
if (err) {
console.log("Error checking row for duplicate");
console.log(checkQuery);
process.exit(1);
} else {
if (res[0].found == 0) {
var query = " (";
var escaped = _.map(row, mysql.escape);
var csv = escaped.join(',');
query += csv;
query += ")";
query += row !== _.last(result) ? ',' : ';';
console.log(query);//This will change to inserting the data to the table
}else{
console.log("Duplicate found!");
}
}
});
if (index === array.length -1) resolve();
});
});
console.log('All done!');
})();
You don't even need to call the process.exit(0) because the code will always terminate when the job is done :)

Proper async/await function

I am attempting to run a bot that scrapes Amazon (using amazon-buddy) for certain products (using array of ASINs) and checks the price. If the price is not 0, it should be sending a message on discord. I currently have this set to run every 30 seconds and it's working, but there are times where it seems like each element is not waiting for the previous one to get a response in the forEach loop and my function doesn't seem to be correct (I'm still trying to understand async/await functions properly).
Is there a better way to run this so that each element waits for the previous element to get scraped before moving on to the next one and THEN run the loop again after 30 seconds?
(function() {
var c = 0;
var timeout = setInterval(function() {
const checkStock = (async () => {
config.items.itemGroup.forEach(element => {
console.log('Checking stock on ' + element)
try {
const product_by_asin = await amazonScraper.asin({ asin: element });
console.log(product_by_asin)
const price = product_by_asin.result[0].price.current_price
const symbol = product_by_asin.result[0].price.symbol
const asin = product_by_asin.result[0].asin
const title = product_by_asin.result[0].title
const url = product_by_asin.result[0].url
const image = product_by_asin.result[0].main_image
if (price != 0) {
const inStockResponse = {
color: 0x008000,
title: title + ' is in stock!',
url: url,
author: {
name: config.botName,
icon_url: config.botImg,
url: config.botUrl
},
description: '<#767456705306165298>, click the tite to go purchase!\n\n' +
'Price: ' + symbol + price,
thumbnail: {
url: image
},
timestamp: new Date()
}
message.channel.send({embed: inStockResponse });
console.log(title + ' (' + asin + ') IS available!')
} else {
console.log(title + ' (' + asin + ') IS NOT available!')
}
} catch (error) {
console.log(error);
}
});
checkStock()
});
console.log('Counter: ' + c)
c++;
}, 30000);
})();
You could use a for...of loop which can wait for each iteration to finish:
async function checkItems(items) {
// Check all items, wait for each to complete.
for (const item of items) {
try {
const product_by_asin = await amazonScraper.asin({ asin: item });
console.log(product_by_asin);
const price = product_by_asin.result[0].price.current_price;
const symbol = product_by_asin.result[0].price.symbol;
const asin = product_by_asin.result[0].asin;
const title = product_by_asin.result[0].title;
const url = product_by_asin.result[0].url;
const image = product_by_asin.result[0].main_image;
if (price != 0) {
const inStockResponse = {
color: 0x008000,
title: title + " is in stock!",
url: url,
author: {
name: config.botName,
icon_url: config.botImg,
url: config.botUrl,
},
description:
"<#767456705306165298>, click the tite to go purchase!\n\n" +
"Price: " +
symbol +
price,
thumbnail: {
url: image,
},
timestamp: new Date(),
};
// NOTE: you might want to wait for this too, the error
// currently isn't being handled like this either.
message.channel.send({ embed: inStockResponse });
console.log(title + " (" + asin + ") IS available!");
} else {
console.log(title + " (" + asin + ") IS NOT available!");
}
} catch (err) {
console.log(err);
}
}
// Wait 30s and check again.
setTimeout(() => checkItems(items), 30000);
}
checkItems(config.items.itemGroup);

Using node.js for-loop index in a coinbase-api callback function

I am new to node.js and i am trying to make a simple script that will connect to the coinbase-api and get the current price of whatever markets are defined in the MARKET array.
The problem i am having is that the for-loop that iterates through the array is asynchronous and the callback function is not getting the correct index value for the array.
The two main solutions i have found are to use promises or force the loop to wait. I think i need to be using promises rather than forcing the for loop to wait but honestly i have failed to implement a solution either way. I have found may example of promises but i just cant seem to figure out how to implement them into my script. I would appreciate any help.
const coinbaseModule = require('coinbase-pro');
const COINBASE_URI = 'https://api-public.sandbox.pro.coinbase.com';
// const MARKET = ['BTC-USD'];
const MARKET = ['BTC-USD', 'ETH-BTC'];
let askPrice = [null, null];
let averagePrice = [null, null];
let tickerCount = null;
const getCallback = (error, response, data) =>
{
if (error)
return console.log(error);
if ((data!=null) && (data.ask!=null) && (data.time!=null))
{
askPrice[tickerCount] = parseFloat(data.ask);
if (averagePrice[tickerCount]===null)
{
averagePrice[tickerCount] = askPrice[tickerCount];
console.log(MARKET[tickerCount] + " ask price: " + askPrice[tickerCount].toFixed(6));
}
else
{
averagePrice[tickerCount] = (averagePrice[tickerCount] * 1000 + askPrice[tickerCount]) / 1001;
console.log(MARKET[tickerCount] + " ask price: " + askPrice[tickerCount].toFixed(6) + " average price: "+ averagePrice[tickerCount].toFixed(6));
}
}
}
setInterval(() =>
{
console.log('\n');
publicClient = new coinbaseModule.PublicClient(COINBASE_URI);
for (tickerCount = 0; tickerCount < MARKET.length; tickerCount++)
{
publicClient.getProductTicker(MARKET[tickerCount], getCallback);
}
}, 10000);
I was able to figure out how to use promises with trial and error from the helpful examples on the Mozilla Developer Network. I am sure i am making some mistakes but at least it is working now. Another little bonus is that i was able to remove a global.
const coinbaseModule = require('coinbase-pro');
const COINBASE_URI = 'https://api-public.sandbox.pro.coinbase.com';
// const MARKET = ['BTC-USD'];
const MARKET = ['BTC-USD', 'ETH-BTC'];
let askPrice = [null, null];
let averagePrice = [null, null];
function getProductTicker(tickerCount) {
return new Promise(resolve => {
publicClient.getProductTicker(MARKET[tickerCount],function callback(error, response, data){
if (error)
return console.log(error);
if ((data!=null) && (data.ask!=null) && (data.time!=null))
{
askPrice[tickerCount] = parseFloat(data.ask);
if (averagePrice[tickerCount]===null)
{
averagePrice[tickerCount] = askPrice[tickerCount];
console.log(MARKET[tickerCount] + " ask price: " + askPrice[tickerCount].toFixed(6));
}
else
{
averagePrice[tickerCount] = (averagePrice[tickerCount] * 1000 + askPrice[tickerCount]) / 1001;
console.log(MARKET[tickerCount] + " ask price: " + askPrice[tickerCount].toFixed(6) + " average price: "+ averagePrice[tickerCount].toFixed(6));
}
resolve();
}
});
});
}
setInterval( async () =>
{
console.log('\n');
publicClient = new coinbaseModule.PublicClient(COINBASE_URI);
for (var tickerCount = 0; tickerCount < MARKET.length; tickerCount++)
{
await getProductTicker(tickerCount);
}
}, 10000);

Lambda function taking >3 seconds to run + 5-10 secs warmup each time

I have a simple node.js function with 2 REST API calls and a socket connection output hosted in an AWS lambda. It takes 5-10 secs warmup time and >3+ secs execution time.
When the code is run locally it executes both requests, socket connection and completes in about ~1300ms. Why is AWS more then double the execution time? I have set-timeout to 120seconds and memory at 128mb (default).
I appreciate the code is not very tidy; I am working on cleaning it but needed something going for the time being.
The project simply gets info from ServiceM8 via API when called by a webhook subscription, then formats the info into ZPL strings and forwards them to a tcp server for printing via thermal printer.
My questions are:
Is it my code bottle necking?
Can it be optimized to run faster?
Do i simply need to employ a warming plugin for my function to allow hot starting?
My function:
'use strict';
//Require libraries
var request = require("request");
var net = require('net');
exports.handler = (event, context, callback) => {
if (event.eventName != 'webhook_subscription') {
callback(null, {});
}
//Global Variables
var strAssetUUID;
var strAssetURL;
var strFormUUID;
var strTestDate;
var strRetestDate;
var appliancePass = true;
var strAccessToken;
var strResponseUUID;
//Printer Access
const tcpUrl = 'example.com';
const tcpPort = 12345;
var client = new net.Socket();
//UUID of Appliance Test Form.
const strTestFormUUID = 'UUID_of_form';
//Begin function
/**
* Inspect the `eventArgs.entry` argument to get details of the change that caused the webhook
* to fire.
*/
strResponseUUID = event.eventArgs.entry[0].uuid;
strAccessToken = event.auth.accessToken;
console.log('Response UUID: ' + strResponseUUID);
console.log('Access Token: ' + strAccessToken);
//URL Options for FormResponse UUID query
const urlFormResponse = {
url: 'https://api.servicem8.com/api_1.0/formresponse.json?%24filter=uuid%20eq%20' + strResponseUUID,
headers: {
// Use the temporary Access Token that was issued for this event
'Authorization': 'Bearer ' + strAccessToken
}
};
//Query form Response UUID to get information required.
request.get(urlFormResponse, function(err, res, body) {
//Check response code from API query
if (res.statusCode != 200) {
// Unable to query form response records
callback(null, {err: "Unable to query form response records, received HTTP " + res.statusCode + "\n\n" + body});
return;
}
//If we do recieve a 200 status code, begin
var arrRecords = JSON.parse(body);
//Store the UUID of the form used for the form response.
strFormUUID = arrRecords[0].form_uuid;
console.log('Form UUID: ' + strFormUUID);
//Store the UUID of the asset the form response relates to.
strAssetUUID = arrRecords[0].asset_uuid;
console.log('Asset UUID: ' + strAssetUUID);
if (strFormUUID == strTestFormUUID){
//Get the edited date and parse it into a JSON date object.
var strEditDate = new Date(arrRecords[0].edit_date);
//Reassemble JSON date to dd-mm-yyyy.
strTestDate = strEditDate.getDate() + '/' + (strEditDate.getMonth() + 1) + '/' + strEditDate.getFullYear();
//Extract the response for retest period.
var strRetestAnswer = JSON.parse(arrRecords[0].field_data);
strRetestAnswer = strRetestAnswer[0].Response;
//Appropriate function based on retest response.
switch(strRetestAnswer) {
case '3 Months':
//Add x months to current test date object
strEditDate.setMonth(strEditDate.getMonth() + 3);
strRetestDate = strEditDate.getDate() + '/' + (strEditDate.getMonth() + 1) + '/' + strEditDate.getFullYear();
break;
case '6 Months':
strEditDate.setMonth(strEditDate.getMonth() + 6);
strRetestDate = strEditDate.getDate() + '/' + (strEditDate.getMonth() + 1) + '/' + strEditDate.getFullYear();
break;
case '12 Months':
strEditDate.setMonth(strEditDate.getMonth() + 12);
strRetestDate = strEditDate.getDate() + '/' + (strEditDate.getMonth() + 1) + '/' + strEditDate.getFullYear();
break;
case '2 Years':
strEditDate.setMonth(strEditDate.getMonth() + 24);
strRetestDate = strEditDate.getDate() + '/' + (strEditDate.getMonth() + 1) + '/' + strEditDate.getFullYear();
break;
case '5 Years':
strEditDate.setMonth(strEditDate.getMonth() + 60);
strRetestDate = strEditDate.getDate() + '/' + (strEditDate.getMonth() + 1) + '/' + strEditDate.getFullYear();
break;
default:
strRetestDate = "FAIL";
appliancePass = false;
}
console.log('Appliance Pass: ' + appliancePass);
console.log('Test Date: ' + strTestDate);
console.log('Retest Period: ' + strRetestAnswer);
console.log('Retest Date: ' + strRetestDate);
//URL Options for Asset UUID query
const urlAssetResponse = {
url: 'https://api.servicem8.com/api_1.0/asset/' + strAssetUUID + '.json',
headers: {
// Use the temporary Access Token that was issued for this event
'Authorization': 'Bearer ' + strAccessToken
}
};
//Query the api for the asset URL of the provided asset UUID.
request.get(urlAssetResponse, function(err, res, body) {
//Check response code from API query
if (res.statusCode != 200) {
// Unable to query asset records
callback(null, {err: "Unable to query asset records, received HTTP " + res.statusCode + "\n\n" + body});
return;
}
//If we do recieve a 200 status code, begin
var strAssetResponse = JSON.parse(body);
//Store the asset URL
strAssetURL = 'https://sm8.io/' + strAssetResponse.asset_code;
console.log('Asset URL: ' + strAssetURL);
//generate tag and send to printer
var strZPLPass = ('^XA....^XZ\n');
var strZPLFail = ('^XA....^XZ\n');
//Now that we have our ZPL generated from our dates and URLs
//Send the correct ZPL to the printer.
client.connect(tcpPort, tcpUrl, function() {
console.log('Connected');
//Send Appropriate ZPL
if (appliancePass) {
client.write(strZPLPass);
}else {
client.write(strZPLFail);
}
console.log('Tag Successfully Printed!');
//As the tcp server receiving the string does not return any communication
//there is no way to know when the data has been succesfully received in full.
//So we simply timeout the connection after 750ms which is generally long enough
//to ensure complete transmission.
setTimeout(function () {
console.log('Timeout, connection closing...');
client.destroy();
}, 750);
});
});
}
});
};
First of all, I would suggest you stop using the request module and switch to native. Everything can be done without a tons of lines these days. request is a module with 48 total dependencies; if you do the math, that's thousands of lines for a simple GET request.
You should always minimize the complexity of your dependencies. I use a Lambda to check the health of my sites, grabbing the whole request and checking the HTML on completely different servers. VPS is located in Frankfurt, AWS in Ireland. My ms/request is ranging between 100~150 ms.
Here's a simple promise request I'm using:
function request(obj, timeout) {
return new Promise(function(res, rej) {
if (typeof obj !== "object") {
rej("Argument must be a valid http request options object")
}
obj.timeout = timeout;
obj.rejectUnauthorized = false;
let request = http.get(obj, (response) => {
if (response.statusCode !== 200) {
rej("Connection error");
}
var body = '';
response.on('data', (chunk) => {
body += chunk;
});
response.on('end', () => {
res(body);
});
response.on('error', (error) => {
rej(error);
});
});
request.setTimeout(timeout);
request.on('error', (error) => {
rej(error);
})
request.on('timeout', () => {
request.abort();
rej("Timeout!")
})
});
}
Example
const reqOpts = {
hostname: 'www.example.com',
port: 443,
path: '/hello',
method: 'GET',
headers: {
handshake: "eXTNxFMxQL4pRrj6JfzQycn3obHL",
remoteIpAddress: event.sourceIp || "lambda"
}
}
try {
httpTestCall = await request(reqOpts, 250);
}
catch (e) {
console.error(e);
}
Now based on that change switch your handler to async using exports.handler = async(event, context, callback) => {} and use console to measure the execution time of every request using console.time() and console.timeEnd() for your request or anything. From there you could see what's stepping down your code using Cloudwatch logs. Here's another example based on your code:
let reqOpts = {
hostname: 'api.servicem8.com',
port: 443,
path: '/api_1.0/formresponse.json?%24filter=uuid%20eq%20' + strResponseUUID,
method: 'GET',
headers: {
// Use the temporary Access Token that was issued for this event
'Authorization': 'Bearer ' + strAccessToken
}
}
console.time("=========MEASURE_servicem8=========")
let error = null;
await request(reqOpts, 5555).catch((e)=>{
error = e;
})
console.timerEnd("=========MEASURE_servicem8=========")
if (error){
callback(null, {err: "Unable to query form response records, received HTTP" + error}); /* or anything similar */
}
References
https://docs.aws.amazon.com/lambda/latest/dg/best-practices.html
https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-handler.html
aws lambdas are not fast by nature (as of writing this answer). The startup time is not guaranteed, and it is known to be high.
If you need performance - you will not get it this way.

How to upload a list of file to firebase storage from firebase functions

I have a firebase function that will take a request from frontend with a file's name, which will be a video that stored in firebase storage, and then I will apply ffmpeg and extract the video to many frames. In the end, I will upload all frames into firebase storage.
Everything works good, I am able to get all frames. However, there is a problem with uploading frames. Sometimes I can upload all frames successfully, but the function will keep running until timeout, and sometimes I can only upload the first frame. I am new to node.js. I guess there is a problem with return or promise (I don't quit understand what to return and how to handle promise).
Also, I would like to write the data of each frame to database. Where should I put this part of code?
exports.extractFrame = functions.https.onRequest(function (req, res) {
const name = req.query.fileName;
const username = name.substr(0, name.length - 4);
const sessionId = 'video-org';
const framePath = 'frame-org';
const sourceBucketName = 'this is my bucket name';
const sourceBucket = gcs.bucket(sourceBucketName);
const temDir = os.tmpdir();
return sourceBucket.file(sessionId + '/' + name).download({
destination: temDir + '/' + name
}
).then(() => {
console.log('extract frames');
return spawn(ffmpegPath, ['-i', temDir + '/' + name, temDir + '/' +
username + '%d.png']);
}).then(() => {
const frames = fs.readdirSync(temDir);
console.log(frames);
for (let index in frames) {
if (index != 0) {
console.log('uploading');
sourceBucket.upload(temDir + '/' + frames[index], {destination:
framePath + '/' + frames[index]});
}
}
}).then(() => {
res.send('I am done');
});
});
Thanks so much for the help!!
Collect all the promises from all of the calls to sourceBucket.upload() into an array, then use Promise.all() to wait for the entire set to resolve before sending the response:
const promises = [];
for (let index in frames) {
if (index != 0) {
console.log('uploading');
const p = sourceBucket.upload(temDir + '/' + frames[index], {destination:
framePath + '/' + frames[index]});
promises.push(p);
}
}
return Promise.all(promises);
Also, you don't return a promise from an HTTP type function. Just sending the response with res.send() will end the function. This is mentioned in the documentation.
I wrote a gist on this a while back:
// set it up
firebase.storage().ref().constructor.prototype.putFiles = function(files) {
var ref = this;
return Promise.all(files.map(function(file) {
return ref.child(file.name).put(file);
}));
}
// use it!
firebase.storage().ref().putFiles(files).then(function(metadatas) {
// Get an array of file metadata
}).catch(function(error) {
// If any task fails, handle this
});

Resources