Receive response with request in node.js - node.js

I need to recover the response in variable and show it
'confirmation' :()=> {
let responseJson = {};
var request = require('request');
var url = 'http://dev.exemple02.com/ws/ws_login_sh.php?username=' + username + '&password=' + password;
request(url, function (error, responseJson, body) {
console.log(responseJson);
var test = JSON.parse(body);
if(test['codeRetour']===1){
var customer=test['customer'];
var nom=customer['first_name'];
var prenom=customer['last_name'];
console.log('super !vous etes connecté'+prenom);
responseJson.displayText = 'super ! vous etes connecté'+prenom ;
response.json(responseJson);
}
});
but the response.json(responseJson); doesn't show the result

Related

get client username sent to server side in javascript

How can I pass form input from client to server in javascript? Below is the client side. I want to take a username or anything entered in textbox, and send it to server.js where it will be processed for validation. the thing is that I need the data from client.js to be stored in a variable in server.js to be able o retreive it.
var textbox;
var dataDiv;
window.onload = init;
function init(){
textbox = document.createElement("input");
textbox.id="textbox";
dataDiv = document.createElement("div");
var header = document.createElement("h1");
header.appendChild(document.createTextNode("Select User"));
var button = document.createElement("BUTTON");
button.id = "myBtn";
var textBtn = document.createTextNode("Click me");
button.appendChild(textBtn);
button.addEventListener("click", () => {
sendData();
});
var docBody = document.getElementsByTagName("body")[0];//Only one body
docBody.appendChild(header);
docBody.appendChild(dataDiv);
docBody.appendChild(textbox);
docBody.appendChild(button);
}
function sendData(){
var usrName = document.getElementById("textbox").value; //I want to send it to server.js
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (this.readyState == 4 && this.status == 200) {
var dataObj = JSON.stringify(this.responseText);
dataDiv.innerHTML = dataObj;
}
};
xhttp.open("GET", "/register", true);
xhttp.send();
}
This is the server side
var express = require('express');
var app = express();
app.get('/register', handleGetRequest); //how do I pass usrName here?
app.use(express.static('public'));
app.listen(5000);
function handleGetRequest(request, response){
var pathArray = request.url.split("/");
var pathEnd = pathArray[pathArray.length - 1];
if(pathEnd === 'register'){
response.send("{working}");
}
else
response.send("{error: 'Path not recognized'}");
}
If you use GET, you have to put the parameters in the URL.
xhttp.open("GET", "/register?usrName=" + encodeURIComponent(usrName), true);
See How to get a URL parameter in Express? for how you read the query parameter in Express.
Sending data:
function sendData(){
var usrName = document.getElementById("textbox").value; //I want to send it to server.js
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (this.readyState == 4 && this.status == 200) {
var dataObj = JSON.stringify(this.responseText);
dataDiv.innerHTML = dataObj;
}
};
xhttp.open("GET", "http://localhost:5000/register?usrName=" + encodeURIComponent(usrName), true);
xhttp.send();
}
Reading data:
function handleGetRequest(request, response){
var urlParts = request.url.split("?");
if(urlParts[0] === '/register'){
var usrName = urlParts[1].replace('usrName=', '');
response.send("{" + usrName + "}");
}
else
response.send("{error: 'Path not recognized'}");
}

Add return value of callback async method to write HTTP response. with async callback page completes loading before I write response

I am trying out NodeJS server app for getting AWS S3 bucket file (object) names list and return as HTTP response text.
below is the full code I wrote.
var AWS = require('aws-sdk');
var http = require("http");
function onRequest(request, response) {
response.writeHead(200, {
'Content-Type': 'application/json'
});
var credentials = new AWS.SharedIniFileCredentials();
AWS.config.credentials = credentials;
AWS.config.logger = console;
var str = "";
var s3 = new AWS.S3();
var params = {
Bucket: 'bucketname',
Delimiter: '/',
Prefix: 'path/to/the/folder'
}
//var data = s3.listObjectsV2(params); returns data as [object Object]
s3.listObjects(params, function(err, data) {
console.log("Data : " + data);
for (var i = 0; i < data.Contents.length; i++) {
var tokens = data.Contents[i].Key.split("/");
str = str + tokens[tokens.length - 1];
if (i != data.Contents.length) {
str = str + ",";
}
}
console.log("Final text:" + str);
});
response.write(str);
response.end();
}
http.createServer(onRequest).listen(8081);
console.log('Server running at http://127.0.0.1:8081/');'
Problem: Due to asynchronous call to listObjects my HTTP response writing ends before I get callback value.
Can anyone help me to make it synchronous so that I can add str as part of the response body?
Thanks in advance.
You can just put res.write and res.end inside the callback that will work:
var AWS = require('aws-sdk'); var http = require("http");
function onRequest(request, response) {
response.writeHead(200, { 'Content-Type': 'application/json' });
var credentials = new AWS.SharedIniFileCredentials();
AWS.config.credentials = credentials;
AWS.config.logger = console;
var str = "";
var s3 = new AWS.S3();
var params = { Bucket: 'bucketname', Delimiter: '/', Prefix: 'path/to/the/folder' }
//var data = s3.listObjectsV2(params);
returns data as [object Object]
s3.listObjects(params, function(err, data) {
console.log("Data : " + data);
for (var i = 0; i < data.Contents.length; i++) {
var tokens = data.Contents[i].Key.split("/");
str = str + tokens[tokens.length - 1];
if (i != data.Contents.length) {
str = str + ",";
}
}
console.log("Final text:" + str);
response.write(str); // And putting response write() and end() here
response.end();
});
// taking off write() and end() from here
}
http.createServer(onRequest).listen(8081);
console.log('Server running at http://127.0.0.1:8081/');
You only need response.end(str), and it can go inside the callback. Node will keep the HTTP stream open until the S3 call returns.

Nodejs: Comparing results of two async requests

I looked at other questions regarding this topic but can't wrap my head around how to implement it in this case.
What I am trying to achieve:
Visit site and get content (body)
Visit matching test site and get content (body)
Compare content
Crawl links on page1
Crawl links on page2
Continue
The problem I am having at the moment is that I cannot compare the content because the requests are not waiting for each other.
Here's what my code looks like at the moment.
require('colors');
var request = require('request');
var cheerio = require('cheerio');
var jsdiff = require('diff');
var URL = require('url-parse');
var PROD_START_URL = "https://www.somesite.org";
var MAX_PAGES_TO_VISIT = 100;
var pagesVisited = {};
var numPagesVisited = 0;
var pagesToVisit = [];
var globalProdContent;
var globalTestContent;
var url = new URL(PROD_START_URL);
var baseUrl = url.protocol + "//" + url.hostname;
pagesToVisit.push(PROD_START_URL);
crawl();
function crawl() {
if(numPagesVisited >= MAX_PAGES_TO_VISIT) {
console.log("Reached max limit of number of pages to visit.");
return;
}
var nextPage = pagesToVisit.pop();
if (nextPage in pagesVisited) {
// We've already visited this page, so repeat the crawl
crawl();
} else {
// New page we haven't visited
visitPage(nextPage, crawl);
}
}
function visitPage(url, callback) {
// Add page to our set
pagesVisited[url] = true;
numPagesVisited++;
// Make the request
console.log("Visiting page " + url);
request(url, function(error, response, body) {
// Check status code (200 is HTTP OK)
console.log("Status code: " + response.statusCode);
if(response.statusCode !== 200) {
callback();
return;
}
// Parse the document body
var $ = cheerio.load(body);
globalProdContent = $("#wrapper").text();
// Build new URL for test site
var testURL = url.replace("https://www.somesite.org", "http://matching.testsite");
// Scrape test site
scrapeTestContent(testURL);
collectInternalLinks($);
callback();
});
}
function collectInternalLinks($) {
var relativeLinks = [];
relativeLinks = $("a[href]");
console.log("Found " + relativeLinks.length + " relative links on page");
relativeLinks.each(function() {
pagesToVisit.push(baseUrl + "/" + $(this).attr('href'));
});
}
function scrapeTestContent(testURL) {
console.log("Visiting matching testpage " + testURL);
request(testURL, function(error, response, body) {
console.log("Status code: " + response.statusCode);
if(response.statusCode !== 200) {
callback();
return;
}
var $ = cheerio.load(body);
globalTestContent = $("#wrapper").text();
console.log(globalTestContent);
});
}
Is there an easier way to do this or am I completely off the track?
This can be done in two ways:
1. Add callback to scrapeTestContent
function scrapeTestContent(testURL, cb) {
...
request(testURL, function(error, response, body) {
cb();
});
In visitPage,
function visitPage(url, callback) {
...
scrapeTestContent(testURL, () => collectInternalLinks($));
}
Using ES6 promises. In scrapeTestContent() return new Promise((resolve, reject) => {}. Then in visitPage, use following construct: scrapeTestContent(testUrl).then(() => collectInternalLinks($))

Write array object to JSON in node.js

I am trying to write some items I pushed into an array into a JSON file in node.js but I can't figure out how to wait for the array to contain the items before writing the JSON file. As a result the file is always empty. Do i need to have a callback? If so, how? NB:I'm still new to node.js
This is the code below:
var getLinks = require('./news_archive/news_links.js');
var request = require('request');
var cheerio = require('cheerio');
var fs = require('fs');
var saveNews = './news_archive/news.json';
var jsonObj = [];
var i;
var number_of_links = getLinks.links.length;
for(i=0; i<number_of_links; i++){
//GET ARTICLE LINK FROM link.js
var url = "http://www.times.co.sz/"+getLinks.links[i];
request(url, function(err, resp, body){
var $ = cheerio.load(body);
//GET ARTICLE HEADLINE
var storyHeadline = $('#article_holder h1');
var storyHeadlineText = storyHeadline.text();
//GET DATE POSTED
var datePosted = $('.metadata_time');
var datePostedText = datePosted.text();
//GET ARTICLE REPORTER'S NAME
var reporterName = $('.article_metadata a');
var reporterNameText = reporterName.text();
//GET ARTICLE SUMMARY
var fullStory = $('#article_body span');
var fullStoryText = fullStory.text();
//PUSH ITEMS TO jsonObj ARRAY
jsonObj.push({
id: i,
storyHeadline: storyHeadlineText,
datePosted: datePostedText,
reporterName: reporterNameText,
fullStory: fullStoryText
})
});
} //END for LOOP
//WRITE TO news.json file
fs.writeFile(saveNews, JSON.stringify(jsonObj, null, 4), function(err) {
if(err) {
console.log(err);
} else {
console.log("JSON saved to " + saveNews);
}
});
The issue is that request is asyncronous and you cannot use syncronous loop to iterate through. You can use async lib for that
var getLinks = require('./news_archive/news_links.js');
var request = require('request');
var cheerio = require('cheerio');
var fs = require('fs');
var saveNews = './news_archive/news.json';
var number_of_links = getLinks.links.length;
var async = require('async');
async.times(number_of_links, function (i, next) {
var url = "http://www.times.co.sz/"+getLinks.links[i];
request(url, function(err, resp, body){
var $ = cheerio.load(body);
//GET ARTICLE HEADLINE
var storyHeadline = $('#article_holder h1');
var storyHeadlineText = storyHeadline.text();
//GET DATE POSTED
var datePosted = $('.metadata_time');
var datePostedText = datePosted.text();
//GET ARTICLE REPORTER'S NAME
var reporterName = $('.article_metadata a');
var reporterNameText = reporterName.text();
//GET ARTICLE SUMMARY
var fullStory = $('#article_body span');
var fullStoryText = fullStory.text();
//PUSH ITEMS TO jsonObj ARRAY
next(err, {
id: i,
storyHeadline: storyHeadlineText,
datePosted: datePostedText,
reporterName: reporterNameText,
fullStory: fullStoryText
});
});
}, function (err, res) {
// do not forget to handle error
fs.writeFile(saveNews, JSON.stringify(res, null, 4), function(err) {
if(err) {
console.log(err);
} else {
console.log("JSON saved to " + saveNews);
}
});
})

Dealing with asynchronous functions. Custom callback?

I have the code below and am trying to access the all_records array once the _.each function has completed. However as it is asynchronous I was wondering if was possible to force a callback onto the underscores each?
var request = require('request'),
cheerio = require('cheerio'),
_ = require('underscore');
var all_records = [];
_.each([0,100], function(start) {
var base_url = "http://www.example.com/search?limit=100&q=foobar&start=";
var url = base_url + start;
request(url, function(err, res, body) {
var $ = cheerio.load(body),
links = $('#results .row');
$(links).each(function(i, link) {
var $link = $(link);
var record = {
title: $link.children('.title').text().trim()
};
all_records.push(record);
});
});
});
// Need to run this once _.each has completed final iteration.
console.log(all_records);
Here is a simple solution using a simple synchronization method:
var count = 101;//there are 101 numbers between 0 and 100 including 0 and 100
_.each([0,100], function(start) {
var base_url = "http://www.example.com/search?limit=100&q=foobar&start=";
var url = base_url + start;
request(url, function(err, res, body) {
var $ = cheerio.load(body),
links = $('#results .row');
$(links).each(function(i, link) {
var $link = $(link);
var record = {
title: $link.children('.title').text().trim()
};
all_records.push(record);
count--;
if(count===0){//101 iterations done
console.log(all_records);
}
});
});
});
A more elegant solution can be accomplied by using async's .parallel method.
var requests = []; //an array for all the requests we will be making
for(var i=0;i<=100;i++){
requests.push((function(done){ //create all the requests
//here you put the code for a single request.
//After the push to all_records you make a single done() call
//to let async know the function completed
}).bind(null,i));//the bind is that so each function gets its own value of i
}
async.parallel(requests,function(){
console.log(all_records);
});
async.each ended up being the easiest to implement.
async.each([0,100], function(start) {
var base_url = "http://www.example.com/search?limit=100&q=foobar&start=";
var url = base_url + start;
request(url, function(err, res, body) {
var $ = cheerio.load(body),
links = $('#results .row');
$(links).each(function(i, link) {
var $link = $(link);
var record = {
title: $link.children('.title').text().trim()
};
all_records.push(record);
});
});
}, function(err){
console.log(all_records);
});

Resources