Creating an SMS group forwarder in Twilio with NodeJS - node.js

Here's what I'm trying to accomplish:
Set a list of names and numbers (my "group")
When a text message is sent to the Twilio number, forward it on to every member in the group
At a high-level, the idea seems straight forward enough. My programming / syntax skills are rusty, though, and I'd love some help.
I'm using Twilio Functions, and I've been able to send and receive messages successfully. Now I am stuck on how to carry this idea of iterating through a group.
Here's what I've written so far:
var groupmembers = {
jonathan:{
name: 'Jonathan',
number: '+0000000000'
},
joshua:{
name: 'Joshua',
number: '+1110000000'
}
}
exports.handler = function(context, event, callback) {
// Set some values for later use
this.fromNumber = event.From
this.body = event.Body || ''
let twiml = new Twilio.twiml.MessagingResponse();
groupmembers.forEach(function(member) {
// Skip sending if it's the same number
if (member.number === this.fromNumber ) {
return;
}
// Otherwise, let's send a mesage!
twiml.message("Hello World").to( member.number );
callback(null, twiml);
});
};
The issues I believe I have:
Not being sure how to properly set my array or "dictionary"
Not knowing the proper syntax for passing the "to" variable to the message
Not knowing the proper syntax for doing a loop in NodeJS (the Functions console is telling me that 'groupmembers.forEach is not a function')
Thank you for any and all feedback and for pointing me in the right direction!

The mistake you have is pretty simple. groupmembers is an object, you want an array.
You may want something akin to this instead:
var groupmembers = [{
name: 'Jonathan',
number: '+0000000000'
},
{
name: 'Joshua',
number: '+1110000000'
}]
Apart from that, it looks to me as okay.

With more searching and the point in the right direction from Slava I was able to figure it out! Here's the complete code:
/**
* Represents a search trough an array.
* #function search
* #param {Array} array - The array you wanna search trough
* #param {string} key - The key to search for
* #param {string} [prop] - The property name to find it in
* Props: https://stackoverflow.com/a/33097318/315818
*/
function search(array, key, prop){
// Optional, but fallback to key['name'] if not selected
prop = (typeof prop === 'undefined') ? 'name' : prop;
for (var i=0; i < array.length; i++) {
if (array[i][prop] === key) {
return array[i];
}
}
}
var groupmembers = [
{
name: 'Jonathan',
number: '+000000000'
},
{
name: 'Joshua',
number: '+111111111'
}
];
exports.handler = function(context, event, callback) {
let twiml = new Twilio.twiml.MessagingResponse();
// Search for the group member that matches the sender number
let sender = search(groupmembers, event.From, 'number');
// Now, loop through each of the group members
groupmembers.forEach(function(member) {
// Skip sending if it's the same number
if (member.number === event.From ) {
return;
}
// Now, forward on the message to the group member, using the sender's name
twiml.message(`${sender.name}: ${event.Body}`, {
to: member.number
});
});
// Loop ended
callback(null, twiml);
};

Related

GraphQL - Apollo Client 3 | Pagination

I'm new to Apollo Client and I'm trying to wrap my head around field policies to implement pagination.
So basically I have a category page where I perform a query that is based on the the slug that I receive from the URL of the page, returns a list of IDs (and I pass them down as props for the product component), for example:
query getProductId($slug: String!) {
slug(where: {slug: $slug}){
products {
Id
}
}
}
from this query I get and array of all the objects containing the IDs of the products.
I can pass a "first: " and "after: {id: }" to the products field and this way I could decide after which product ID I want to query. for example:
query getProductId($slug: String!) {
slug(where: {slug: $slug}){
products(first: 4, after: {id: 19}) {
Id
}
}
}
I know that in my ApolloClient instance I can define a field policy for the cache like this:
const apollo = new ApolloClient({
//...
cache: new InMemoryClient({
typePolicies: {
Query: {
fields: {
products: offsetLimitPagination(["<* keyArgs>"]),
},
},
},
})
})
This is just one random helper function I took, but in my case I think using a cursor based strategy is better since I could use the last ID in the list as cursor, I guess(?)
From here I'm completely lost, the more I read the docs the more I get confused.
{
keyArgs: ["first"],
merge(existing, incoming, { args: { cursor }, readField }) {
const merged = existing ? existing.slice(0) : [];
let offset = offsetFromCursor(merged, cursor, readField);
// If we couldn't find the cursor, default to appending to
// the end of the list, so we don't lose any data.
if (offset < 0) offset = merged.length;
// Now that we have a reliable offset, the rest of this logic
// is the same as in offsetLimitPagination.
for (let i = 0; i < incoming.length; ++i) {
merged[offset + i] = incoming[i];
}
return merged;
},
// // If you always want to return the whole list, you can omit
// // this read function.
// read(
// existing,
// { args: { cursor, limit = existing.length }, readField }
// ) {
// if (existing) {
// let offset = offsetFromCursor(existing, cursor, readField);
// // If we couldn't find the cursor, default to reading the
// // entire list.
// if (offset < 0) offset = 0;
// return existing.slice(offset, offset + limit);
// }
// },
},
},
},
},
}),
});
function offsetFromCursor(items, cursor, readField) {
// Search from the back of the list because the cursor we're
// looking for is typically the ID of the last item.
for (let i = items.length - 1; i >= 0; --i) {
const item = items[i];
// Using readField works for both non-normalized objects
// (returning item.id) and normalized references (returning
// the id field from the referenced entity object), so it's
// a good idea to use readField when you're not sure what
// kind of elements you're dealing with.
if (readField("id", item) === cursor) {
// Add one because the cursor identifies the item just
// before the first item in the page we care about.
return i + 1;
}
}
// Report that the cursor could not be found.
return -1;
}
Let's suppose I use this a field policy for the list of products, how do I go from here? I'm completely lost

Global search is not working in DataTable when server side is enabed in NodeJs

I am trying to get form data from MongoDB server and showing it into data table using nodeJs.I successfully have done server-side pagination using npm Paginate v-2 plugin. But now the searching is not working. Below is my NodeJs and javascript files code. Please help me for searching.
NodeJs code
app.get('/gettable',(req,res)=>{
console.log(req.query);
user.paginate({},{
page:Math.ceil(req.query.start / req.query.length) + 1,
limit:parseInt(req.query.length)
},function(err,result){
var mytable = {
draw:req.query.draw,
recordsTotal:0,
recordsFiltered:0,
data:[],
}
if(err) {
console.log(err);
res.json(mytable);
} else {
if(result.totalDocs > 0) {
mytable.recordsTotal = result.totalDocs;
mytable.recordsFiltered = result.totalDocs;
for(var key in result.docs) {
mytable.data.push([
result.docs[key]['name'],
result.docs[key]['lastname'],
result.docs[key]['email'],
result.docs[key]['pass'],
result.docs[key]['birthdate'],
result.docs[key]['zipcode'],
result.docs[key]['phonenumber'],
]);
}
}
res.json(mytable);
}
});
DisplayTable.Js code
$(document).ready(function(){
$('#example').DataTable({
"processing": true,
"serverSide": true,
"ajax": "http://localhost:8080/gettable"
});
})
As I said, I am successfully getting data from a server and showing into data table with server-side pagination but searching is not working but in searching div whatever I search, I am getting that value in search array, like this
search: { value: 'svs', regex: 'false' },
_: '1548653540009' }
But its not implementing in datatable to filter columns.
As I said in the comment that search will not work out of the box when server side is enabled in DataTable, it is because now the whole functionality, whether sorting, paging, limit, and search has to be implemented in the server. DataTable will only send the parameter needed for doing the functionality. Following is the code just for your reference, it is not tested and you may get an error also. You may get inputs from the following code. Feel free to edit the following code if in case of getting errors so that it can help future readers.
app.get('/gettable',(req,res)=>{
console.log(req.query);
var query = {},
// array of columns that you want to show in table
columns = ['name', 'lastname', 'email', 'pass', 'birthdate', 'zipcode', 'phonenumber',];
// check if global search is enabled and it's value is defined
if (typeof req.query.search !== 'undefined' && req.query.search.value != '') {
// get global search value
var text = req.query.search.value;
// iterate over each field definition to check whether search is enabled
// for that particular column or not. You can set search enable/disable
// in datatable initialization.
for (var i=0; i<req.query.columns.length; i++) {
requestColumn = req.query.columns[i];
column = columns[requestColumn.data];
// if search is enabled for that particular field then create query
if (requestColumn.searchable == 'true') {
query[column] = {
$regex: text,
};
}
}
}
user.paginate(query,{
page:Math.ceil(req.query.start / req.query.length) + 1,
limit:parseInt(req.query.length)
},function(err,result){
var mytable = {
draw:req.query.draw,
recordsTotal:0,
recordsFiltered:0,
data:[],
}
if(err) {
console.log(err);
res.json(mytable);
} else {
if(result.totalDocs > 0) {
mytable.recordsTotal = result.totalDocs;
mytable.recordsFiltered = result.totalDocs;
for(var key in result.docs) {
var data = [];
for(var column in columns) {
data.push(result.docs[key][column]);
}
mytable.data.push(data);
}
}
res.json(mytable);
}
});

array manipulation in node js and lodash

I have two arrays
typeArr = [1010111,23342344]
infoArr={'name':'jon,'age':25}
I am expecting following
[{'name:'jone','age':25,'type':1010111,'default':'ok'},{'name:'jone','age':25,'type':23342344,'default':'nok'}]
Code :
updaterecord(infoArr,type)
{
infoArr.type=type;
response = calculate(age);
if(response)
infoArr.default = 'ok';
else
infoArr.default = 'nok';
return infoArr;
}
createRecord(infoArr,typeArr)
{
var data = _.map(typeArr, type => {
return updaterecord(infoArr,type);
});
return (data);
}
var myData = createRecord(infoArr,typeArr);
I am getting
[{'name:'jone,'age':25.'type':23342344,'default':nok},{'name:'jone,'age':25.'type':23342344,'default':nok}]
with some reason the last record updates the previous one. I have tried generating array using index var but not sure what's wrong it keep overriding the previous item.
how can I resolve this
You are passing the entire infoArr array to your updaterecord() function, but updaterecord() looks like it's expecting a single object. As a result it is adding those properties to the array rather than individual members of the array.
It's not really clear what is supposed to happen because typeArr has two elements and infoArr has one. Do you want to add another to infoArr or should infoArr have the same number of elements as typeArr.
Assuming it should have the same number you would need to use the index the _map gives you to send each item from infoArr:
function createRecord(infoArr,typeArr) {
var data = _.map(typeArr, (type, i) => {
// use infoArr[i] to send one element
return updaterecord(infoArr[i],type);
});
return (data);
}
Edit:
I'm not sure how you are calculating default since it's different in your expected output, but based on one number. To get an array of objects based on infoArray you need to copy the object and add the additional properties the you want. Object.assign() is good for this:
let typeArr = [1010111,23342344]
let infoArr={'name':'jon','age':25}
function updaterecord(infoArr,type){
var obj = Object.assign({}, infoArr)
return Object.assign(obj, {
type: type,
default: infoArr.age > 25 ? 'ok' : 'nok' //or however your figuring this out
})
}
function createRecord(infoArr,typeArr) {
return _.map(typeArr, type => updaterecord(infoArr,type));
}
Result:
[ { name: 'jon', age: 25, type: 1010111, default: 'nok' },
{ name: 'jon', age: 25, type: 23342344, default: 'nok' } ]

How do I increment a value for an existing object in Firebase?

I'm building a step counter app.
I got an iOS app that pushes the sum of each day to /users/{mobile}/steps/{date}/
When a new steps child is updated or added, I want to sum the value of all the steps for that particular user and update his stepsTotal.
To achieve that I need to
Find the original user and sum all the steps.
Save the new value to stepsTotal.
I would be most grateful if someone could give some help here. :-)
database
{
"users": {
"92291000": {
"firstName": "Tore",
"stepsTotal": "1500",
"steps": {
"02-09-2017": "500",
"03-09-2017": "1000"
},
import.js
var db = admin.database();
var dbRoot = db.ref("/");
var usersRef = dbRoot.child("users");
// This works
function saveUser(attributes) {
let mobile = attributes.mobile;
delete attributes['mobile']
let user = usersRef.child(mobile);
user.update(attributes);
}
function increaseSteps( { mobile=null, steps=null } = {}) {
// Find the User
console.log("looking for mobile", mobile); // OK
let userRef = usersRef.child(mobile);
// Here I'm not able to read the old data from the user.
userRef.transaction(function(user) {
console.log("user: ", user); // null
// ^ User is null.
});
/*
If I mangage to find user above, I expect to do something like this.
Or it is possible to only update *stepsTotal*?
*/
let attributes = {
firstName: user.firstName,
lastName: user.lastName,
stepsTotal: user.stepsTotal + steps,
}
user.update( attributes );
}
If I understand correctly, you have a problem in this snippet of the code:
let userRef = usersRef.child(mobile);
// Here I'm not able to read the old data from the user.
userRef.transaction(function(user) {
console.log("user: ", user); // null
// ^ User is null.
});
In Firebase Database transactions the initial value is often null. From the Firebase documentation on transactions:
Transaction Function is Called Multiple Times
Your transaction handler is called multiple times and must be able to handle null data. Even if there is existing data in your database it may not be locally cached when the transaction function is run.
This is due to how Firebase transactions work behind the scenes. To learn more about that, see my answers here Transcation updateFunction parameter is null and Firebase runTransaction not working.
The solution is to handle both cases: if the user node doesn't exist yet count the initial number of steps, otherwise update the number of steps:
let userRef = usersRef.child(mobile);
userRef.transaction(function(user) {
return (user || 0) + new_steps_for_user;
});

Dealing with geo queries on the client in the context of multiple subscriptions

I have two different subscriptions in my app:
Meteor.subscribe('collection');
and
Meteor.subscribe('filtered-collection',param1,param2);
I want to supply the data to different templates through different template helpers, say allResults and filteredResults respectively.
Since $geoWithin doesn't work at the client side and I need to use it for filtering, I cannot just filter the first subscription by
filteredResults = Collection.find(selector);`
Therefore, I need a separate subscription for it.
So, the question is: how to find the result set from respective subscription and pass it through a helper?
I finally solved the problem. I don't think the solution is ideal though.
At Server:
Collection = new Meteor.Collection('collection');
Meteor.publish('collection',function(){
return Collection.find();
});
Meteor.publish('filteredCollection',function(loc, radius){
var selector = {};
if (radius === undefined)
radius = 100;
if (loc !== undefined && !(isNaN(loc[0]) || isNaN(loc[1]))) {
selector.loc = {
$geoWithin: {
$centerSphere: [loc, radius / 6371]
}
};
}
var sub = this,
handle = null;
var handle = Collection.find(selector).observeChanges({
added: function(id, fields) {
sub.added("filteredCollection", id, fields);
},
changed: function(id, fields) {
sub.changed("filteredCollection", id, fields);
},
removed: function(id) {
sub.removed("filteredCollection", id);
}
});
sub.ready();
this.onStop(function() {
handle.stop();
});
});
At client:
Collection = new Meteor.Collection('collection');
FilteredCollection = new Meteor.Collection('filteredCollection');
Meteor.subscribe('collection');
Meteor.subscribe('filteredCollection',loc,radius);
Template.collection.helpers({
collection: function(){
return Collection.find();
},
filteredCollection: function(){
return FilteredCollection.find();
}
});
At the client, Collection and FilteredCollection are two different subsets of the same underlying collection at the server. But whether the two subsets are dependent on each other in terms of caching and persistence, is (I think) a different question altogether.

Resources