I develop for Firebase Cloud Functions. I have a Firebase Realtime Database like this:
----- myData
-------eqewrwrepere (this one is a device token)
---------Lta+sde-fer (this one is a firebase id)
firstvalue : "a"
secondvalue : "b"
----------Qrgd+ad-qdda (this one is second firebase id)
firstvalue : "c"
secondvalue : "d"
-------eqwerSAsdqe (this one is another device token)
---------Lta+sde-fer (this one is a firebase id)
firstvalue : "x"
secondvalue : "y"
----------Qrgd+ad-qdda (this one is second firebase id)
firstvalue : "z"
secondvalue : "t"
I fetch these data by this code. With this code i fetch all data and put them an array. And when fetching done, i loop this array for finding items. I am an iOS developer, so i am a newbie for NodeJS. Here is what i want to do:
Get firstvalue for each database data.
Make a api request with firstvalue of each database data.
Api returns an image.
Write image temp directory.
Process this image for visionApi.
Extract text.
Update database.
Send notification for deviceToken
Now i am able to retrieve database items in my array. When i make a request in for loop, request called async. So for loop continues, but request response or writing file and vision processing executed only once.
In for loop, get databasearray[0], make request, write file, process it with vision api, update database and go for next databasearray[1] item.
I read about Promises on different pages. But i did not understand.
Thank you.
'use strict';
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp(functions.config().firebase);
var request = require('request');
var fs = require('fs');
//var fs = require("fs");
// Get a reference to the Cloud Vision API component
const Vision = require('#google-cloud/vision');
const vision = new Vision.ImageAnnotatorClient();
// Imports the Google Cloud client library
//const {Storage} = require('#google-cloud/storage');
var fs = require("fs");
var os = require("os");
var databaseArray = [];
exports.hourly_job = functions.pubsub
.topic('hourly-job')
.onPublish((event) => {
console.log("Hourly Job");
var db = admin.database();
var ref = db.ref("myData")
ref.once("value").then(function(allData) {
allData.forEach(function(deviceToken) {
deviceToken.forEach(function(firebaseIDs) {
var deviceTokenVar = deviceToken.key;
var firebaseIDVar = firebaseIDs.key;
var firstvalue = firebaseIDs.child("firstvalue").val();
var secondvalue = firebaseIDs.child("secondvalue").val();
var items = [deviceTokenVar, firebaseIDVar, firstvalue, secondvalue];
databaseArray.push([...items]);
});
});
return databaseArray;
}).then(function(databasem) {
var i;
for (i = 0; i < databaseArray.length; i++) {
var databaseArrayDeviceToken = databaseArray[i][0];
console.log("DeviceToken: " + databaseArrayDeviceToken);
var databaseArrayFirebaseID = databaseArray[i][1];
console.log("FirebaseID: " + databaseArrayFirebaseID);
var databaseArrayfirstvalue = databaseArray[i][2];
console.log("firstval: " + databaseArrayfirstvalue);
var databaseArraysecondval = databaseArray[i][3];
console.log("Second: " + databaseArraysecondval);
var url = "http://api.blabla" + databaseArrayfirstvalue;
/////////////here make a request, pause loop, process returned image, but how //////////////////////
request.get({
url: url,
encoding: 'binary'
}, function(error, httpResponse, body) {
if (!error && httpResponse.statusCode == 200) {
fs.writeFileSync('/tmp/processed.jpg', body, 'binary')
console.log("file written");
})
}
});
return true;
});
I found solution with Mocas helps. Here is the solution. I use async/await functions in code. Now for loop waits for the function response. But now I have different problems. I think main async function hangs because of awaits. And then next hourly trigger, it runs again. So console log shows 15-16-17 or more ‘i’ values in for loop. I have 4 element in database array but console log shows more than this every hour. And it increases every time. So I guess that I should cancel this await functions after a timeout. But I don’t know how. Here is code:
use strict';
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp(functions.config().firebase);
var request = require('request-promise').defaults({ encoding: null });
var fs = require('fs');
// Get a reference to the Cloud Vision API component
const Vision = require('#google-cloud/vision');
const vision = new Vision.ImageAnnotatorClient();
var os = require("os");
var databaseArray = [];
var uniqueFilename = require('unique-filename')
exports.hourly_job = functions.pubsub
.topic('hourly-job')
.onPublish((event) => {
console.log("Hourly Job");
var db = admin.database();
var ref = db.ref("myData")
ref.once("value").then(function(allData) {
allData.forEach(function(deviceToken) {
deviceToken.forEach(function(firebaseIDs) {
var deviceTokenVar = deviceToken.key;
var firebaseIDVar = firebaseIDs.key;
var firstvalue = firebaseIDs.child("firstvalue").val();
var secondvalue = firebaseIDs.child("secondvalue").val();
var items = [deviceTokenVar, firebaseIDVar, firstvalue, secondvalue];
databaseArray.push([...items]);
//console.log(databaseArray);
//return true;
});
//return true;
});
return databaseArray;
}).then(function (databasem) {
main().catch(console.error);
});
return true;
});
const main = async () => {
var i;
for (i = 0; i < databaseArray.length; i++) {
console.log("Database Arrays " + i + ". elements: ");
var databaseArrayDeviceToken = databaseArray[i][0];
console.log("DeviceToken: " + databaseArrayDeviceToken);
var databaseArrayFirebaseID = databaseArray[i][1];
console.log("FirebaseID: " + databaseArrayFirebaseID);
var databaseArrayfirst = databaseArray[i][2];
console.log("first: " + databaseArrayfirst);
var databaseArraysecond = databaseArray[i][3];
console.log("second: " + databaseArraysecond);
if (databaseArrayfirst != "") {
var apiUrl = "http://api.blabla;
try {
const apiBody = await request.get(apiUrl);
///////////////////////////vison start//////////////////////
const visionResponseBody = await vision.documentTextDetection(apiBody)
var visionResponse = visionResponseBody[0].textAnnotations[0].description;
console.log("Vision response text " + visionResponse );
...some logic here about response...
/////////////////////////////////////////////////
var getdatabasevar = await admin.database().ref("myData/" + databaseArrayDeviceToken + "/" + databaseArrayFirebaseID);
await getdatabasevar.update({
"firstvalue": visionResponse
});
/////////////////////////////////////////////////
var getanotgerdatabasevar = await admin.database().ref("myData/" + databaseArrayDeviceToken + "/" + databaseArrayFirebaseID + "/" + "secondvalue");
await getanotgerdatabasevar.once("value")
.then(function(var) {
..some logic..
//send notification
});
} catch (error) {
console.error(error);
}
///////////////////////////vison end//////////////////////
}
};
return true;
};
Related
I'm working on a small project to convert a large xml to several formatted pdf documents. The large xml contains multiple similar format xmls. So I'm using a single html template for printing all the documents. After producing all the pdf documents I also need to produce a metadata file with some basic info on each document that was printed.
I thought using the fan out fan in scenario of durable functions is a perfect for my use case. I'm working with Nodejs. I setup all my code and it seems to be working fine locally. The Orchestration function looks like the below.
const df = require("durable-functions");
module.exports = df.orchestrator(function* (context) {
var xmldata = yield context.df.callActivity("DurablegetblobJS1","");
var tasks = [];
for (file of xmldata) {
tasks.push(context.df.callActivity("Durableactivityjs2", file));
}
const outputs = yield context.df.Task.all(tasks);
var finalout = "";
for (out of outputs){
console.log('I am done1 :' + out );
finalout = finalout + out;
}
return finalout;
});
DurablegetblobJS1 : Fetches the entire xmls and splits it into multiple smaller xmls(1 per document).
Durableactivityjs2 : Fetches the html template, extracts the different values from the individual xmls and applies them to the html and finally prints out the pdf into an azure storage. It returns the name of the pdf document that was printed for creation of the metadata file. The code for this is below.
var fs = require('fs');
var xml2js = require('xml2js');
var html_to_pdf = require('html-pdf-node');
var parser = new xml2js.Parser();
module.exports = async function (context) {
//console.log("Hello from activity :")
var xmldict = {}
var xmltext = context.bindings.name;
//Extract the nodes and attributes
metadata(xmltext,xmldict);
report(xmltext,xmldict);
context.log(xmldict)
const { BlobServiceClient } = require("#azure/storage-blob");
// Load the .env file if it exists
require("dotenv").config();
const AZURE_STORAGE_CONNECTION_STRING = process.env.STORAGE_CONNECTION_STRING || "";
const blobServiceClient = BlobServiceClient.fromConnectionString(
AZURE_STORAGE_CONNECTION_STRING
);
var containerClient = blobServiceClient.getContainerClient('test');
var blobname = 'comb_template.html';
var blockBlobClient = containerClient.getBlockBlobClient(blobname);
var downloadBlockBlobResponse = await blockBlobClient.download(0);
var html_template = await streamToText(downloadBlockBlobResponse.readableStreamBody);
let options = { format: 'A4'};
let file = { content: html_template};
const x = await writepdf1(file, options,blobServiceClient,xmldict);
console.log("Written Blob PDF");
return x;
};
async function writepdf1(file, options,blobServiceClient,xmldict){
const pdfBuffer = await html_to_pdf.generatePdf(file, options);
const containerClient = blobServiceClient.getContainerClient('test2');
const targetblob = xmldict['OU'] + '/' + xmldict['ReportName'] + '/' + xmldict['OU'] + '_' + xmldict['ReportName'] + '_' + xmldict['DocumentID'] + '_' + '.pdf';
console.log('Blob name :' + targetblob);
const blockBlobClient_t = containerClient.getBlockBlobClient(targetblob);
const uploadBlobResponse = await blockBlobClient_t.upload(pdfBuffer, pdfBuffer.length);
return targetblob;
}
async function streamToText(readable) {
readable.setEncoding('utf8');
let data = '';
for await (const chunk of readable) {
data += chunk;
}
return data;
}
function metadata(xmltext,xmldict){
parser.parseString(xmltext, function (err, result) {
var test1 = result['HPDPSMsg']['DocumentRequest'][0]['MetaData'][0];
Object.entries(test1).forEach(([key, value]) => {
xmldict[key] = value[0];
});
});
}
function report(xmltext,xmldict){
parser.parseString(xmltext, function (err, result) {
var test2 = result['HPDPSMsg']['DocumentRequest'][0]['Report'][0]['$'];
Object.entries(test2).forEach(([key, value]) => {
xmldict[key] = value;
});
});
}
However, when I deploy the entire project into a azure premium function(EP1 - Windows), I see some errors in app insights when I try and execute my function and the pdfs are never generated.
Activity function 'Durableactivityjs2' failed: Could not find browser
revision 818858. Run "PUPPETEER_PRODUCT=firefox npm install" or
"PUPPETEER_PRODUCT=firefox yarn install" to download a supported
Firefox browser binary
I'm a bit clueless how I'm supposed to resolve this. Any help or suggestions would be appreciated.
I am trying to get some CVS data from google sheet and store it into an Apify dataset.
const Apify = require('apify');
const request = require('request-promise');
Apify.main(async () => {
var URL = "https://docs.google.com/spreadsheets/d/1-auXklWqHQ-jj6AXymMPa7FLtP1eYGJGF3rprxuWitk/gviz/tq?tqx=out:csv";
const html = await request(URL);
console.log('My output:');
console.log(html);
await Apify.setValue('OUTPUT', html);
const namedDataset = await Apify.openDataset();
await namedDataset.pushData(html);
});
Here is error message:
2020-01-01T16:43:21.501Z My output:
2020-01-01T16:43:21.510Z "city","country"
2020-01-01T16:43:21.512Z "Berlin ","Germany"
2020-01-01T16:43:21.513Z "Los Angeles","United States"
2020-01-01T16:43:21.514Z "Melbourne","Australia"
2020-01-01T16:43:21.516Z "Sydney","Australia"
2020-01-01T16:43:21.517Z "London","United Kingdom"
2020-01-01T16:43:21.519Z "New York City","United States"
2020-01-01T16:43:21.614Z ERROR: The function passed to Apify.main() threw an exception: (error details: type=invalid-parameter)
2020-01-01T16:43:21.616Z ApifyClientError: Parameter "data" of type Array | Object must be provided
2020-01-01T16:43:21.617Z at exports.checkParamOrThrow (/usr/src/app/node_modules/apify-client/build/utils.js:222:15)
2020-01-01T16:43:21.619Z at Dataset.pushData (/usr/src/app/node_modules/apify/build/dataset.js:222:34)
2020-01-01T16:43:21.620Z at Apify.main (/usr/src/app/main.js:16:22)
2020-01-01T16:43:21.621Z at process._tickCallback (internal/process/next_tick.js:68:7)
A more elegant solution would be using our Google Sheets actor.
const Apify = require('apify');
Apify.main(async () => {
const spreadsheetId = '1-auXklWqHQ-jj6AXymMPa7FLtP1eYGJGF3rprxuWitk';
const sheetsActorInput = {
mode: 'read',
spreadsheetId,
};
const data = await Apify.call('lukaskrivka/google-sheets', sheetsActorInput);
const namedDataset = await Apify.openDataset('my-dataset');
await namedDataset.pushData(data);
});
The only disadvantage (also an advantage is some sense) is that you need to authorize in your first run but that is really simple.
I was able to use this somewhat hacky approach. I am sure their is a more modern elgagent approach:
const Apify = require('apify');
const request = require('request-promise');
function csvJSON(csv) { //https://stackoverflow.com/a/27979069/2330272
var lines = csv.split("\n");
var result = [];
// NOTE: If your columns contain commas in their values, you'll need
// to deal with those before doing the next step
// (you might convert them to &&& or something, then covert them back later)
// jsfiddle showing the issue https://jsfiddle.net/
var headers = lines[0].split(",");
for (var i = 1; i < lines.length; i++) {
var obj = {};
var currentline = lines[i].split(",");
for (var j = 0; j < headers.length; j++) {
obj[headers[j]] = currentline[j];
}
result.push(obj);
}
return JSON.stringify(result); //JSON
}
Apify.main(async () => {
var URL = "https://docs.google.com/spreadsheets/d/1-auXklWqHQ-jj6AXymMPa7FLtP1eYGJGF3rprxuWitk/gviz/tq?tqx=out:csv"; //test
const html = await request(URL);
const urls = csvJSON(html.replace(/\"/g, "")); // remove quotes from csv data
console.log('My output:');
const namedDataset = await Apify.openDataset();
await namedDataset.pushData(JSON.parse(urls));
});
I have a cloud function. It is triggered by an App Engine Cron job. It triggers my Firebase Cloud Function every hour with a Google Cloud Pub/Sub. I fetch my Firebase Realtime Database once, and loop for every value. The problem is my main.async function called multiple times. I use an i variable for loop and my console logs more i count than my database length. I mean that if my database length is 4, but for loop runs 8 or 15 or 23 times. This values change randomly. I want to loop for my each database value,fetch some data on internet,and when it is done, go for next value. Here is the code:
use strict';
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp(functions.config().firebase);
var request = require('request-promise').defaults({ encoding: null });
var fs = require('fs');
// Get a reference to the Cloud Vision API component
const Vision = require('#google-cloud/vision');
const vision = new Vision.ImageAnnotatorClient();
var os = require("os");
var databaseArray = [];
var uniqueFilename = require('unique-filename')
exports.hourly_job = functions.pubsub
.topic('hourly-job')
.onPublish((event) => {
console.log("Hourly Job");
var db = admin.database();
var ref = db.ref("myData")
ref.once("value").then(function(allData) {
allData.forEach(function(deviceToken) {
deviceToken.forEach(function(firebaseIDs) {
var deviceTokenVar = deviceToken.key;
var firebaseIDVar = firebaseIDs.key;
var firstvalue = firebaseIDs.child("firstvalue").val();
var secondvalue = firebaseIDs.child("secondvalue").val();
var items = [deviceTokenVar, firebaseIDVar, firstvalue, secondvalue];
databaseArray.push(items);
});
});
return databaseArray;
}).then(function (databasem) {
main().catch(console.error);
});
return true;
});
const main = async () => {
var i;
for (i = 0; i < databaseArray.length; i++) {
console.log("Database Arrays " + i + ". elements: ");
if (databaseArrayfirst != "") {
var apiUrl = "http://api.blabla;
try {
const apiBody = await request.get(apiUrl);
///////////////////////////vison start//////////////////////
const visionResponseBody = await vision.documentTextDetection(apiBody)
var visionResponse = visionResponseBody[0].textAnnotations[0].description;
...some logic here about response...
/////////////////////////////////////////////////
var getdatabasevar = await admin.database().ref("myData/" + databaseArrayDeviceToken + "/" + databaseArrayFirebaseID);
await getdatabasevar.update({
"firstvalue": visionResponse
});
/////////////////////////////////////////////////
} catch (error) {
console.error(error);
}
///////////////////////////vison end//////////////////////
}
};
return true;
};
Thank you.
My script written in NodeJS connects to Firebase, checks my Firebase Database and even successfully sends notifications when results from my database return true... However, it only works when I run it from my local machine. I deploy it to Firebase and it will not work. Could someone please advise? Thank you.
I hate asking on here because I'm a newbie but I have spent hours tonight trying to find an answer...
INDEX.JS
// Firebase Functions
const functions = require('firebase-functions');
var admin = require("firebase-admin");
// Default admin firebase configuration
admin.initializeApp(functions.config().firebase);
// var serviceAccount = require("xxxxxx-80xxxxd-firebase-adminsdk- xxxxxxx.json");
var moment = require('moment');
var FCM = require('fcm-push');
var dateTime = require('node-datetime');
var serverKey = 'xxxxxxxxxxxxxpSELZBjQYwpZgmxxxxxxxxxxx';
var fcm = new FCM(serverKey);
//Initial function call:
exports.CheckDates = functions.https.onRequest((req, response) => {
// Get a database reference to our posts
var db = admin.database();
var ref = db.ref("records");
var userToken = '';
var itemExpires = '';
var itemName = '';
var reminded = '';
var itemCount = 0;
var counter = 1;
var itemFoundCount = 0;
var dt = dateTime.create();
var formatted = dt.format('m-d-Y');
ref.once("value", function (recordsSnapshot) {
recordsSnapshot.forEach(function (recordsSnapshot) {
var mainKey = recordsSnapshot.key;
recordsSnapshot.forEach(function (child) {
var key = child.key;
var value = child.val();
if (key == 'Account') {
userToken = value.userToken;
}
if (key == 'Items') {
recordsSnapshot.child("Items").forEach(function (itemsSnapshot) {
counter++;
if (itemFoundCount === 0) {
itemFoundCount = itemsSnapshot.numChildren();
}
var itemsChildkey = itemsSnapshot.key;
var itemsChildvalue = itemsSnapshot.val();
itemExpires = itemsChildvalue.itemExpires;
itemName = itemsChildvalue.itemName;
reminded = itemsChildvalue.reminded;
moment().format('YYYY-MM-DD');
var currentDate = moment();
var otherTime = moment(reminded);
if (typeof reminded !== 'undefined') {
if (currentDate.diff(otherTime, 'days') >= 30) {
if (currentDate.diff(itemExpires, 'days') <= 90) {
itemCount++;
console.log("Expire date is less than " +
currentDate + " by 90 days = " + (currentDate.diff(otherTime, 'days') <=
90));
db.ref("records/" + mainKey + "/Items/" +
itemsChildkey + '/reminded').set(formatted);
}
}
} else {
itemCount++;
db.ref("records/" + mainKey + "/Items/" + itemsChildkey +
`enter code here`'/reminded').set(formatted);
}
if (counter == itemFoundCount && itemCount > 0) {
console.log(itemFoundCount);
var message = {
to: userToken, // required fill with device token or
topics
notification: {
title: 'Item Expire Notification',
body: itemCount + ' is about to expire.'
}
};
//callback style
fcm.send(message, function (err, response) {
if (err) {
console.log("Something has gone wrong!");
} else {
console.log("Successfully sent with response: ",
response);
}
});
itemCount = 0;
itemFoundCount = 0;
counter = 1;
}
});
}
});
});
});
response.send(200, "ok");
}) // END exports.CheckDates
Obviously, I remove the Exports.CheckDates lines when I run it locally but it wasn't showing up at all without the exports on Firebase's console. It returns warnings locally and on Firebase console but it works on one and not the other.
Please disregard this question. The script adds the date that the last time a notification was sent to the item's "Reminded" key in the database... this prevents notifications from going out every day for the same thing..
It ran on my local machine, and did it's job but I couldn't figure out why it wouldn't run again... well, ha! At least we know that part works.
I am using firebase for my android application and am performing full text search using Algolia which is suggested by all the blogs. I have successfully developed the script and its functioning properly. Now I want to host the script to run 24* 7. As I have an azure account , how do I go about uploading the script ? I have tried uploading the following as a function , web app but have been unsuccessful .
PS:- I have tried Heroku but wasn't satisfied.
The Script.
var http = require('http');
var port = process.env.port || 1337;
http.createServer(function (req, res) {
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('Hello World\n');
}).listen(port);
var dotenv = require('dotenv');
var firebaseAdmin = require("firebase-admin");
var algoliasearch = require('algoliasearch');
var algoliasearchHelper = require('algoliasearch-helper');
// load values from the .env file in this directory into process.env
dotenv.load();
// configure firebase
var serviceAccount = require("./serviceAccountKey.json");
firebaseAdmin.initializeApp({
credential: firebaseAdmin.credential.cert(serviceAccount),
databaseURL: process.env.FIREBASE_DATABASE_URL
});
var database = firebaseAdmin.database();
// configure algolia
var algolia = algoliasearch(process.env.ALGOLIA_APP_ID, process.env.ALGOLIA_API_KEY);
var index = algolia.initIndex('books');
var contactsRef = database.ref("/BookFair");
contactsRef.on('child_added', addOrUpdateIndexRecord);
contactsRef.on('child_changed', addOrUpdateIndexRecord);
contactsRef.on('child_removed', deleteIndexRecord);
function addOrUpdateIndexRecord(dataSnapshot) {
// Get Firebase object
var firebaseObject = dataSnapshot.val();
// Specify Algolia's objectID using the Firebase object key
firebaseObject.objectID = dataSnapshot.key;
// Add or update object
index.saveObject(firebaseObject, function(err, content) {
if (err) {
throw err;
}
console.log('Firebase object indexed in Algolia', firebaseObject.objectID);
});
}
function deleteIndexRecord(dataSnapshot) {
// Get Algolia's objectID from the Firebase object key
var objectID = dataSnapshot.key;
// Remove the object from Algolia
index.deleteObject(objectID, function(err, content) {
if (err) {
throw err;
}
console.log('Firebase object deleted from Algolia', objectID);
});
}
var queries = database.ref("/queries");
queries.on('child_added', addOrUpdateIndexRecordN);
function addOrUpdateIndexRecordN(dataSnapshot) {
// Get Firebase object
var firebaseObject = dataSnapshot.val();
// Specify Algolia's objectID using the Firebase object key
firebaseObject.objectID = dataSnapshot.key;
// Add or update object
var collegeName = "";
var query_ID_LOLWA= "";
var year="";
var query = "";
var counter = 0;
for(var i in firebaseObject){
var c = firebaseObject.charAt(i);
if(c=='/'){
counter = counter + 1;
continue;
}
else{
if(counter==2)
collegeName = collegeName + c;
else if(counter == 3)
year = year+c;
else if(counter == 1)
query_ID_LOLWA = query_ID_LOLWA + c;
else
query = query +c;
}
}
console.log(collegeName);
console.log(year);
console.log(query_ID_LOLWA);
console.log(query);
const query_final = query_ID_LOLWA;
var helper = algoliasearchHelper(algoliasearch("****", "****"), 'books', {
facets: ['collegeName', 'year','priority']});
helper.on('result', function(data,query_ID_LOLWA){
data.getFacetValues('priority',{sortBy: ['count:desc']});
console.log(data.hits);
var path_query = "/queries_answers/"+query_final;
path_query = path_query.toString();
console.log(path_query);
if(data.hits.length==0){
console.log("No results");
database.ref(path_query).push(-1);
}
else if(data.hits.length>1){
var ID = 1;
var counter = -1;
var length = data.hits.length-1;
for(var h in data.hits){
counter = counter + 1;
if( (counter%5 == 0) && (counter != 0)){
ID = ID + 1;
}
database.ref(path_query+"/"+ID).push(data.hits[h].uuid);
}
database.ref(path_query+"/totalResults").push(data.hits.length);
}
else{
database.ref(path_query+"/totalResults").push(data.hits.length);
for(var h in data.hits)
database.ref(path_query+"/1").push(data.hits[h].uuid);
}
});
helper.addFacetRefinement('collegeName', collegeName);
helper.addFacetRefinement('year',year);
helper.setQuery(query);
helper.search();
/*index.search(firebaseObject, function(err, content) {
if (err) {
console.error(err);
return;
}
console.log(content.hits);
for (var h in content.hits) {
console.log('Hit(' + content.hits[h].objectID + '): ' + content.hits[h].uuid);
}
database.ref("/query_result").push(content.hits);
});*/
}
Without more details than but have been unsuccessful, the only advice one could give you is to follow the usual steps to get a time-based Azure function deployed.
The simplest way is to use the Azure Portal:
Login to your Microsoft Azure account
Create a Function App to host your function
Add a Timer-triggered Function
Select the TimerTrigger-Javascript template to get started
At this point, you'll have a function that runs every minute. You can check the logs to confirm it is working.
You now want to configure its frequency:
Update the function's Timer Schedule (in the Integrate tab) to set how frequently the function should run
Finally, replace the template's code with your own.
You can find a detailed tutorial here with explanations on how to achieve each of these steps.