Express Endpoint to Run Axios Get Request - node.js

The problem that I am running into is that I am trying to create an express route that I can run a seed script or cron job against which when hit will then run a GET request, with Axios against an external API endpoint on a scheduled basis, get the appropriate data, and store it in a MongoDB database.
I was able to successfully run this in my backend app.js file, but the problem is that I don't want to run this query every single time the backend app starts or restarts as it will input duplicate data into the database and will not scale, which is why I've separated it out into Models and Routes and am trying to create an endpoint that I can hit with a cron job or seed script.
I have created a Model that will create the appropriate Schema and Model in MongoDB. The model creates fine, which I can see when I restart my backend server.
I have created a Route called /schedule that when hit should run the appropriate code to:
Use the Mongo Schema and Model
Run axios.get against the API endpoint
Use an onSuccess function to query the JSON response data, pull out the appropriate fields, and then send that data up to my MongoDB collection.
Here is my schedule.Route.js file:
const express = require("express");
const router = express.Router();
const axios = require('axios');
const Schedule = require("../models/scheduleModel");
const { response } = require('express')
var date_ob = new Date();
var day = ("0" + date_ob.getDate()).slice(-2);
var month = ("0" + (date_ob.getMonth() + 1)).slice(-2);
var year = date_ob.getFullYear();
var date = year + "/" + month + "/" + day;
// Route
router.get("/schedule", (response) => {
axios.get('https://api.sportradar.us/ncaamb/trial/v7/en/games/' + date + '/schedule.json?api_key=' + apiKey)
.then(function (response) {
onSuccess(response)
})
.catch(function (error) {
console.log(error);
});
});
function onSuccess(response) {
var gameData = response;
var gameArray = gameData.data.games
var arrayLength = Object.keys(gameArray).length
console.log(arrayLength)
for(var i = 0; i < arrayLength; i++) {
var homeTeam = gameData.data.games[i].home.name;
var homeAlias = gameData.data.games[i].home.alias;
var homeId = gameData.data.games[i].home.id;
var awayTeam = gameData.data.games[i].away.name;
var awayAlias = gameData.data.games[i].away.alias;
var awayId = gameData.data.games[i].away.id;
console.log( homeTeam + " " + homeAlias + " " + homeId + " " + awayTeam + " " + awayAlias + " " + awayId);
assignScheduleValue(homeTeam, homeAlias, homeId, awayTeam, awayAlias, awayId)
}
}
// Store values in MongoDB
function assignScheduleValue(homeTeam, homeAlias, homeId, awayTeam, awayAlias, awayId) {
var upSchedule = new Schedule()
upSchedule.homeAlias = homeAlias;
upSchedule.homeTeam = homeTeam;
upSchedule.awayTeam = awayTeam;
upSchedule.homeId = homeId;
upSchedule.awayId = awayId;
upSchedule.awayAlias = awayAlias;
upSchedule.save();
}
module.exports = router;
From my app.js file I'm just running:
app.use("/schedule", require("./routes/scheduleRoute"));
Again the goal with this route is so that I can hit it with a CURL or some other command and just run the axios.get request to pull that data into MongoDB daily.

Related

Is it possible to fetch the AWS S3 Uploaded Image File Data and pass that file data through Twitter API from Node JS?

I have a Scheduler where I can post scheduled tweets and threads and it will post through my twitter account the whole backend is based on Node JS.
So right now for Posting scheduled Tweets and Tweet threads with images I'm using mongoDB, storing the base64 string data, but I don't want to store such a big data to mongo, is it possible to upload images to AWS S3 first and then when time matches as Scheduled time that Image will be fetched again to my Node Server and pass it through the Twitter API?
In this way I will have to just store the AWS S3 image link to my MongoDB and get rid of a big string data.
API
// THREAD SCHEDULER API
app.post('/TweetThread/:id', async (req, res) => {
const ID = req.params.id;
const { texts, dateTimeGMT } = req.body;
const filter = { _id: ObjectId(ID) };
const findUser = await userCollection.findOne(filter);
// GET TIMEZONE FROM INSTANT REQ
const timeZone = Intl.DateTimeFormat().resolvedOptions().timeZone;
const threadSchData = {
texts,
date: new Date(dateTimeGMT).toLocaleDateString(),
time: new Date(dateTimeGMT).toLocaleTimeString(),
timezone: timeZone,
token: findUser.token,
token_secret: findUser.token_secret,
screen_name: findUser.screen_api
}
const result = await threadDataCollection.insertOne(threadSchData);
res.json(result);
});
Scheduler
// TWITTER THREAD SCHEDULER
const threadSch = cron.schedule('*/1 * * * *', async () => {
const cursor = threadDataCollection.find({});
const threadsArray = await cursor.toArray();
// console.log(threadsArray);
threadsArray.forEach(thread => {
// Twitter Thread CONFIG
const configThread = {
consumer_key: process.env.CONSUMER_KEY,
consumer_secret: process.env.CONSUMER_SECRET,
access_token: thread.token,
access_token_secret: thread.token_secret
};
// INSERTING THREAD DATA TO THREAD FUNCTION
async function tweetThread() {
const t = new TwitThread(configThread);
await t.tweetThread(thread.texts);
}
// Calculating The Time According to Time ZONE
let scheduledTime = new Date(thread.date + ' ' + thread.time);
let now = new Date();
let year = now.getFullYear();
let month = now.getMonth() + 1;
let day = now.getDate();
let hour = now.getHours();
let minute = now.getMinutes();
let new_date = new Date(year + '-' + month + '-' + day + ' ' + hour + ':' + minute + ':' + '00');
const countingTime = scheduledTime.toLocaleString("en-US", { timeZone: thread.timezone });
const serverTime = new_date.toLocaleString("en-US", { timeZone: thread.timezone });
// POSTING THREAD WHEN TIME MATCHED
if (countingTime === serverTime) {
tweetThread();
console.log('Posted Thread Once');
console.log('TRUE');
}
});
});
threadSch.start();
This is the request body where in media_data field I'm passing the string data from front-end
you need credentials to connect to aws from command line. Then you have only to run from the image path aws s3 cp your_file_name s3:\\your_bucket_name\your_file_name .
Then save this your_file_name to MongoDB and upload it from here. There are more tricks to use cloudformation and to make s3 public so you can only point to the file from your html page. If you need help with any of the steps please write me.

How to fetch data from Spotify Search API and parse it to JSON? [In node.js

This is my app.js:
const express = require("express");
//const https = require("https");
const app = express();
app.get("/", function(req, res) {
const query = niall;
// Follow procedure here to get access_token and refresh_token: https://benwiz.com/blog/create-spotify-refresh-token/
const access_token = {access_token};
const token = "Bearer " + access_token;
var searchUrl = "https://api.spotify.com/v1/search?q=" + query + "&type=track&limit=4";
////////WRITE YOUR CODE HERE//////////
});
// Starting the server. Should this be placed at the top of all other commands?
app.listen(3000, function() {
console.log("Server is running on port 3000.")
})
Expected Behaviour:
We are to get results from the api endpoint using https.get method.
The returned data is to be parsed to JSON.
We need to store the value of track ID, from this JSON output, into a variable and log it in the console.
Helpful Resources:
Follow the steps here to get the access_token: https://benwiz.com/blog/create-spotify-refresh-token/
Understand parameters here: https://developer.spotify.com/documentation/web-api/reference/#category-search
Spotify Web Console to understand the JSON arrangement: https://developer.spotify.com/console/get-search-item/
I'm really in need of help. Any help would be appreciated.
Replace ////////WRITE YOUR CODE HERE////////// with
axios.get(searchUrl, {
headers: {
'Authorization': token,
}
})
.then((resAxios) => {
console.log(resAxios.data)
spotifyResult = resAxios.data;
//Extracting required data from 'result'. The following "items[0].id.videoId" is the address of the data that we need from the JSON 'ytResult'.
let spotifyTrackIdAppJs00 = spotifyResult.tracks.items[0].id;
let spotifyAlbumIdAppJs00 = spotifyResult.tracks.items[0].album.id;
let spotifyArtistIdAppJs00 = spotifyResult.tracks.items[0].artists[0].id;
console.log("Fetched values: " + spotifyTrackIdAppJs00 + ", " + spotifyAlbumIdAppJs00 + ", " + spotifyArtistIdAppJs00);
// The 'results' named EJS file is rendered and fed in response. The 'required' data is passed into it using the following letiable(s).
// A folder named 'views' has to be in the same directory as "app.js". That folder contains 'results.ejs'.
res.render("results", {
spotifyTrackIdEjs00: spotifyTrackIdAppJs00,
spotifyAlbumIdEjs00: spotifyAlbumIdAppJs00,
spotifyArtistIdEjs00: spotifyArtistIdAppJs00
});
console.log("Values to be used in rendered file: " + spotifyTrackIdAppJs00 + ", " + spotifyAlbumIdAppJs00 + ", " + spotifyArtistIdAppJs00);
})
.catch((error) => {
console.error(error)
})
I needed ID value for Album and Artist as well. Please modify accordingly.

Firebase Database Get All Value In Order Cloud Functions

I develop for Firebase Cloud Functions. I have a Firebase Realtime Database like this:
----- myData
-------eqewrwrepere (this one is a device token)
---------Lta+sde-fer (this one is a firebase id)
firstvalue : "a"
secondvalue : "b"
----------Qrgd+ad-qdda (this one is second firebase id)
firstvalue : "c"
secondvalue : "d"
-------eqwerSAsdqe (this one is another device token)
---------Lta+sde-fer (this one is a firebase id)
firstvalue : "x"
secondvalue : "y"
----------Qrgd+ad-qdda (this one is second firebase id)
firstvalue : "z"
secondvalue : "t"
I fetch these data by this code. With this code i fetch all data and put them an array. And when fetching done, i loop this array for finding items. I am an iOS developer, so i am a newbie for NodeJS. Here is what i want to do:
Get firstvalue for each database data.
Make a api request with firstvalue of each database data.
Api returns an image.
Write image temp directory.
Process this image for visionApi.
Extract text.
Update database.
Send notification for deviceToken
Now i am able to retrieve database items in my array. When i make a request in for loop, request called async. So for loop continues, but request response or writing file and vision processing executed only once.
In for loop, get databasearray[0], make request, write file, process it with vision api, update database and go for next databasearray[1] item.
I read about Promises on different pages. But i did not understand.
Thank you.
'use strict';
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp(functions.config().firebase);
var request = require('request');
var fs = require('fs');
//var fs = require("fs");
// Get a reference to the Cloud Vision API component
const Vision = require('#google-cloud/vision');
const vision = new Vision.ImageAnnotatorClient();
// Imports the Google Cloud client library
//const {Storage} = require('#google-cloud/storage');
var fs = require("fs");
var os = require("os");
var databaseArray = [];
exports.hourly_job = functions.pubsub
.topic('hourly-job')
.onPublish((event) => {
console.log("Hourly Job");
var db = admin.database();
var ref = db.ref("myData")
ref.once("value").then(function(allData) {
allData.forEach(function(deviceToken) {
deviceToken.forEach(function(firebaseIDs) {
var deviceTokenVar = deviceToken.key;
var firebaseIDVar = firebaseIDs.key;
var firstvalue = firebaseIDs.child("firstvalue").val();
var secondvalue = firebaseIDs.child("secondvalue").val();
var items = [deviceTokenVar, firebaseIDVar, firstvalue, secondvalue];
databaseArray.push([...items]);
});
});
return databaseArray;
}).then(function(databasem) {
var i;
for (i = 0; i < databaseArray.length; i++) {
var databaseArrayDeviceToken = databaseArray[i][0];
console.log("DeviceToken: " + databaseArrayDeviceToken);
var databaseArrayFirebaseID = databaseArray[i][1];
console.log("FirebaseID: " + databaseArrayFirebaseID);
var databaseArrayfirstvalue = databaseArray[i][2];
console.log("firstval: " + databaseArrayfirstvalue);
var databaseArraysecondval = databaseArray[i][3];
console.log("Second: " + databaseArraysecondval);
var url = "http://api.blabla" + databaseArrayfirstvalue;
/////////////here make a request, pause loop, process returned image, but how //////////////////////
request.get({
url: url,
encoding: 'binary'
}, function(error, httpResponse, body) {
if (!error && httpResponse.statusCode == 200) {
fs.writeFileSync('/tmp/processed.jpg', body, 'binary')
console.log("file written");
})
}
});
return true;
});
I found solution with Mocas helps. Here is the solution. I use async/await functions in code. Now for loop waits for the function response. But now I have different problems. I think main async function hangs because of awaits. And then next hourly trigger, it runs again. So console log shows 15-16-17 or more ‘i’ values in for loop. I have 4 element in database array but console log shows more than this every hour. And it increases every time. So I guess that I should cancel this await functions after a timeout. But I don’t know how. Here is code:
use strict';
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp(functions.config().firebase);
var request = require('request-promise').defaults({ encoding: null });
var fs = require('fs');
// Get a reference to the Cloud Vision API component
const Vision = require('#google-cloud/vision');
const vision = new Vision.ImageAnnotatorClient();
var os = require("os");
var databaseArray = [];
var uniqueFilename = require('unique-filename')
exports.hourly_job = functions.pubsub
.topic('hourly-job')
.onPublish((event) => {
console.log("Hourly Job");
var db = admin.database();
var ref = db.ref("myData")
ref.once("value").then(function(allData) {
allData.forEach(function(deviceToken) {
deviceToken.forEach(function(firebaseIDs) {
var deviceTokenVar = deviceToken.key;
var firebaseIDVar = firebaseIDs.key;
var firstvalue = firebaseIDs.child("firstvalue").val();
var secondvalue = firebaseIDs.child("secondvalue").val();
var items = [deviceTokenVar, firebaseIDVar, firstvalue, secondvalue];
databaseArray.push([...items]);
//console.log(databaseArray);
//return true;
});
//return true;
});
return databaseArray;
}).then(function (databasem) {
main().catch(console.error);
});
return true;
});
const main = async () => {
var i;
for (i = 0; i < databaseArray.length; i++) {
console.log("Database Arrays " + i + ". elements: ");
var databaseArrayDeviceToken = databaseArray[i][0];
console.log("DeviceToken: " + databaseArrayDeviceToken);
var databaseArrayFirebaseID = databaseArray[i][1];
console.log("FirebaseID: " + databaseArrayFirebaseID);
var databaseArrayfirst = databaseArray[i][2];
console.log("first: " + databaseArrayfirst);
var databaseArraysecond = databaseArray[i][3];
console.log("second: " + databaseArraysecond);
if (databaseArrayfirst != "") {
var apiUrl = "http://api.blabla;
try {
const apiBody = await request.get(apiUrl);
///////////////////////////vison start//////////////////////
const visionResponseBody = await vision.documentTextDetection(apiBody)
var visionResponse = visionResponseBody[0].textAnnotations[0].description;
console.log("Vision response text " + visionResponse );
...some logic here about response...
/////////////////////////////////////////////////
var getdatabasevar = await admin.database().ref("myData/" + databaseArrayDeviceToken + "/" + databaseArrayFirebaseID);
await getdatabasevar.update({
"firstvalue": visionResponse
});
/////////////////////////////////////////////////
var getanotgerdatabasevar = await admin.database().ref("myData/" + databaseArrayDeviceToken + "/" + databaseArrayFirebaseID + "/" + "secondvalue");
await getanotgerdatabasevar.once("value")
.then(function(var) {
..some logic..
//send notification
});
} catch (error) {
console.error(error);
}
///////////////////////////vison end//////////////////////
}
};
return true;
};

How make a node.js script to be restarted every hour?

I have a node.js script that creates a websocket connection to a crypto trading site (bitmex). It streams price data.
For some reason after an hour or two the stream goes bad and the prices (if streamed at all) are inacurate.
For now I restart it manually every hour but I need that to be done automatically. How can I do that?
Here is the code of the script:
var WebSocket = require('ws');
var ws = new WebSocket("wss://www.bitmex.com/realtime");
var couchbase = require('couchbase')
var cluster = new couchbase.Cluster('couchbase://localhost/');
cluster.authenticate('xxxxxx', 'xxxxxxx');
var bucket = cluster.openBucket('test');
var N1qlQuery = couchbase.N1qlQuery;
let num_msg = 0;
ws.onopen = function(){
ws.send(JSON.stringify({"op": "subscribe", "args": [
"trade:XBTUSD",
"trade:LTCZ18"]
}))
};
ws.onmessage = function(msg){
var response = JSON.parse(msg.data);
num_msg++
if(num_msg > 50) {
var coin = response['data'][0]['symbol'];
var price = response['data'][0]['price'];
//console.log(coin + ":" + price + "\n");
bucket.manager().createPrimaryIndex(
function(){
bucket.upsert( coin,
{
'price': price
},
function (err, result){
});
});
}
};
EDIT: I missed to mention that currently I use Windows 7 system (eventhough I do need to move to Ubuntu or similar).
EDIT 2: Final version of my code :)
const cron = require("node-cron");
var WebSocket = require('ws');
var couchbase = require('couchbase');
var dateTime = require('node-datetime');
let now = new Date();
minutes = now.getMinutes() + 1;
if(minutes + 30 > 59) {
minutes1 = minutes - 30;
} else {
minutes1 = minutes - 30;
}
if(minutes > minutes1) {
s_cron = minutes1 + "," + minutes + " * * * *";
} else {
s_cron = minutes + "," + minutes1 + " * * * *";
}
cron.schedule(s_cron, () => {
console.log("---------------------");
console.log("Running Cron Job");
var dt = dateTime.create();
var formatted = dt.format('Y-m-d H:M:S');
console.log(formatted);
// create bitmex ws
var ws = new WebSocket("wss://www.bitmex.com/realtime");
// connect to couchbase
var cluster = new couchbase.Cluster('couchbase://localhost/');
cluster.authenticate('xxxxxxxx', 'xxxxxxxxxx');
var bucket = cluster.openBucket('test');
var N1qlQuery = couchbase.N1qlQuery;
let num_msg = 0;
ws.onopen = function(){
ws.send(JSON.stringify({"op": "subscribe", "args": [
"trade:XBTUSD",
"trade:LTCZ18"]
}))
};
ws.onmessage = function(msg){
var response = JSON.parse(msg.data);
num_msg++
if(num_msg > 50) {
var coin = response['data'][0]['symbol'];
var price = response['data'][0]['price'];
//console.log(coin + ":" + price + "\n");
bucket.manager().createPrimaryIndex(
function(){
bucket.upsert( coin,
{
'price': price
},
function (err, result){
//bucket.disconnect()
});
});
}
};
});
Try 'node-cron': more at https://www.npmjs.com/package/node-cron Hope that works.
Consider using cron to restart every hour. Your crontab entry would look like:
0 * * * * <command to restart your app>
If you can't or don't want to use your system crontab or equivalent (not sure what it would be on Windows), you can use pm2.
pm2: https://www.npmjs.com/package/pm2
For how to make pm2 restart every hour, see https://github.com/Unitech/pm2/issues/1076 or https://stackoverflow.com/a/38062307/436641.
Another option would be node-cron: https://www.npmjs.com/package/node-cron

Making sequential mongoose queries in a for loop inside a mocha test

I am trying to use mocha as a test driver for my API application.
I have a json of user names to song names, that I am trying to feed to an API endpoint.
json:
{
"description": "hold the lists that each user heard before",
"userIds":{
"a": ["m2","m6"],
"b": ["m4","m9"],
"c": ["m8","m7"],
"d": ["m2","m6","m7"],
"e": ["m11"]
}
}
Let's say the User and Song schema is just _id with name. What I want to do is after parsing the json file, convert "a" to user_id, by doing a mongoose lookup of "a" by name, let's say id=0. Convert each of the "m2" into song_id by name let's say id=1, and then call
the request, http://localhost:3001/listen/0/1
Here is what I have so far for my mocha test:
test.js:
it('adds the songs the users listen to', function(done){
var parsedListenJSON = require('../listen.json')["userIds"];
//console.log(parsedListenJSON);
for (var userName in parsedListenJSON) {
if (parsedListenJSON.hasOwnProperty(userName)) {
var songs = parsedListenJSON[userName];
var currentUser;
//console.log(userName + " -> " + songs);
var userQuery = User.findOne({'name': userName})
userQuery.then(function(user){
//console.log("user_id: " + user["_id"]);
currentUser = user;
});
for (var i=0; i<songs.length; i++)
{
//console.log(songs[i]);
var songQuery = Song.findOne({'name': songs[i]})
songQuery.then(function(song){
console.log("user_id: " + currentUser["_id"]);
console.log("song_id: " + song["_id"]);
// need to ensure we have both the user_id and song_id
api_request = "http://localhost:3001/listen/" + currentUser["_id"] + "/" + song["_id"];
console.log(api_request);
// listen/user_id/song_id
//.post('http://localhost:3001/listen/0/1')
request
.post(api_request)
.end(function(err, res){
expect(res.status).to.equal(200);
});
});
}
}
}
done();
})
Questions:
1) I need to make sure I have both the user_id and song_id before I make the API cal. Right now, each of the User.findOne and Song.findOne query has their own then clauses. I am passing the user_id through the currentUser variable into the then block of the Song query. But since the query is asynchronous, I don't think this is proper. How can I structure this code so that I proceed with the API call when both the then block have executed.
2) When I run the code as is, only the first user gets executed, and not the rest,
ie. the print out is:
user_id: 0
song_id: 1
http://localhost:3001/listen/0/1
user_id: 0
song_id: 5
http://localhost:3001/listen/0/5
3) The API endpoint works with postman, and in a simpler Mocha test below. But it doesn't seem to work in my original code.
var request = require('superagent');
var expect = require('expect.js');
var User = require('../models/user');
var Song = require('../models/song');
var mongoose = require('mongoose');
mongoose.connect('localhost/TestSongRecommender');
...
it('adds the songs', function(){
request
.post('http://localhost:3001/listen/0/2')
.end(function(res){
expect(res.status).to.equal(200);
});
});
Update:
The async.forEach approach works. Here's my final snippet:
updated test.js
var request = require('superagent');
var expect = require('expect.js');
var async = require('async');
var User = require('../models/user');
var Song = require('../models/song');
var mongoose = require('mongoose');
mongoose.connect('localhost/Test');
describe('song recommendations', function(){
it('adds the songs the users listen to', function(done){
var parsedListenJSON = require('../listen.json')["userIds"];
//console.log(parsedListenJSON);
async.forEach(Object.keys(parsedListenJSON), function forAllUsers(userName, callback) {
var songs = parsedListenJSON[userName];
//var currentUser;
//console.log(userName + " -> " + songs);
var userQuery = User.findOne({'name': userName})
userQuery.then(function(user){
//console.log("user_id: " + user["_id"]);
//console.log(songs);
//currentUser = user;
async.forEach(songs, function runSongQuery(songName, smallback) {
//console.log(songName);
var songQuery = Song.findOne({'name': songName})
songQuery.then(function(song){
//console.log("user_id: " + user["_id"]);
//console.log("song_id: " + song["_id"]);
// need to ensure we have both the user_id and song_id
api_request = "http://localhost:3001/listen/" + user["_id"] + "/" + song["_id"];
console.log(api_request);
// listen/user_id/song_id
//.post('http://localhost:3001/listen/0/1')
request
.post(api_request)
.end(function(err, res){
expect(res.status).to.equal(200);
smallback()
});
});
}, function allSongs(err) {
callback();
})
});
}, function allUserNames(err) {
done()
})
})
});
1&2) You're going to want to use the async package from NPM. You can get it with the command npm install async --save in your main folder and var async = require('async') in your code.
You'll have to replace each for loop with async.forEach. async.forEach takes an array and two functions as arguments. It calls the first function on each item in the array and the second function once all callbacks have returned.
The way you're currently doing it, with for loops isn't going to work. By the time asynchronous things return, your loop has iterated past them (non-blocking IO, remember) and your variables are no longer set correctly.
You also need things set up to call done only once all your code has run.
It will end up looking something like this if written properly:
it('adds the songs the users listen to', function(done){
var parsedListenJSON = require('../listen.json')["userIds"];
//console.log(parsedListenJSON);
async.forEach(parsedListenJSON, function forAllUsers(userName, callback) {
var songs = parsedListenJSON[userName];
//console.log(userName + " -> " + songs);
var userQuery = User.findOne({'name': userName})
userQuery.then(function(user){
//console.log("user_id: " + user["_id"]);
var currentUser = user;
async.forEach(songs, function runSongQuery(songName, smallback) {
var songQuery = Song.findOne({'name': songName})
songQuery.then(function(song){
console.log("user_id: " + currentUser["_id"]);
console.log("song_id: " + song["_id"]);
// need to ensure we have both the user_id and song_id
api_request = "http://localhost:3001/listen/" + currentUser["_id"] + "/" + song["_id"];
console.log(api_request);
// listen/user_id/song_id
//.post('http://localhost:3001/listen/0/1')
request
.post(api_request)
.end(function(res){
expect(res.status).to.equal(200);
smallback()
});
});
}, function allRan(err) {
callback();
})
});
}, function allUserNames(err) {
done()
})
})
3) You're going to want to use a testing framework for testing your API. I recommend supertest. Once you have supertest, require your app and supertest:
var request = require('supertest');
var app = require('./testApp.js');
Use it in tests like:
request(app)
.post(api_request)
.end(function(res){
expect(res.status).to.equal(200);
smallback()
});

Resources