how to access jsonp in nodejs? - node.js

I am trying to access the body of a json page that has a callback url.
In client side it is easy by using jquery and using jsonp option and so on...
How do i do it on the nodejs server side to get the title for example??
the url prints out: data({"log":{"title":"example","description" ...
var http = require("http");
function getPost(){
var reqest = http.get("http://news.example.com/api/read/json?callback=data", function(response){
response.setEncoding('utf8');
response.on('data', function(posts){
var x = JSON.parse(posts);
var callback = function(x){
console.log(callback);
}
});
});
}
getPost();

Related

can someone explain from where in my console comes undefined value?

if i go to localhost:8080/?referal=referalID in my console i got referall id and also one line of undefined value, how can i remove that undefined line?
var http = require('http');
var url = require('url');
http.createServer(function(req,res){
res.writeHead(200, {'Content-Type':'text/html'});
var adr = req.url;
var q = url.parse(adr, true);
var qdata = q.query;
res.end(console.log(qdata.referal));
}).listen(8080);
my question is what i need to change here?
I'm not sure what you're trying to do there, however this should be the correct syntax:
var http = require('http');
var url = require('url');
http.createServer(function(req,res) {
var adr;
var q;
var qdata;
req.on('data', function() {
// Data handler
}).on('end', function() {
res.writeHead(200, {'Content-Type':'text/html'});
adr = req.url;
q = url.parse(adr, true);
qdata = q.query;
console.log(qdata);
res.end(qdata.referal);
});
}).listen(8080);
You cannot send a console.log call in the response.
You should separate the two:
console.log(qdata.referal); // This will print in the console, server side
res.end(qdata.referal) // This will be echoed back to the client, in the browser
The reason you see undefined is because your browser is also looking for favicon.ico so your function gets hit twice. If you do:
var http = require('http');
var url = require('url');
http.createServer(function(req,res){
res.writeHead(200, {'Content-Type':'text/html'});
var adr = req.url;
console.log(adr);
var q = url.parse(adr, true);
var qdata = q.query;
res.end(qdata.referal);
}).listen(8080);
you will get:
/?referal=referalID
/favicon.ico
The result of url.parse('/favicon.ico', true); is undefined.
Also as others mentioned res.end(console.log(qdata.referal)); won't return anything to the browser/client because console.log() doesn't return anything.
The undefined line you probably talking about is the one returns to the client, that's because console log doesn't return anything
as you can see in the source code here
Override console logging to return the arguments
['log', 'warn', 'error'].forEach(function(method) {
var oldMethod = console[method].bind(console);
console[method] = function() {
oldMethod.apply(console, arguments);
return arguments;
};
});
Although this might save couple opcodes and pushing another object into the stack, you should use this with caution or memorize a pray that will help you with future-related-bugs.
Next time, please attach log or screenshot
#over-programming

Node.js http.get against squarespace.com site has status code 403

When I do a simple http.get for a URL that goes to a SquareSpace (SS) site I'm getting a 403 message. I know the site is working and that the server can reach it. Here's a simple example against a SS site (not mine, but produces the same issue):
Show that server can access site:
curl http://www.letsmoveschools.org
This returns all the HTML from the site...
Node app
var http = require('http');
var url;
url = 'http://www.letsmoveschools.org/';
var req = http.get(url, function(res) {
res.on('data', function(chunk) {
//Handle chunk data
});
res.on('end', function() {
// parse xml
console.log(res.statusCode);
});
// or you can pipe the data to a parser
//res.pipe(dest);
});
req.on('error', function(err) {
// debug error
console.log('error');
});
When I run the node app now node app.js it outputs the 403 status code.
I have tried this code with other sites and it works fine, just not against squarespace sites. Any idea of either configuration on SS or something else I need to do in Node?
The problem is that the remote server is expecting/requiring a User-Agent header and node does not send such headers automatically. Add that and you should get back a 200 response:
// ...
url = 'http://www.letsmoveschools.org/';
var opts = require('url').parse(url);
opts.headers = {
'User-Agent': 'javascript'
};
var req = http.get(opts, function(res) {
// ...

Restify: Retrieve external url content (ISO-8859-1) return to requesting client (UTF-8)

I have a very simple javascript using Node. The purpose of the script is to:
Open a listening socket
Add a handler for URL /test with HTTP function GET
When /test is requested another URL located on another external web site should be fetched. This web site is encoded with ISO-8859-1
The data returned from the external website should be packaged in a JSON structure and returned to the requesting client encoded with UTF-8
So far I have created the following code:
var Buffer = require('buffer').Buffer;
var iconv = require('iconv-lite');
var urllib = require('url');
var restify = require('restify');
var server = restify.createServer();
server.use(restify.bodyParser());
server.get('/test', test);
server.listen(8080, function() {
console.log('%s listening at %s', server.name, server.url);
});
function test(req, res, next) {
console.log('TEST');
var httpClient = restify.createStringClient({ url: "http://dl.dropboxusercontent.com" });
httpClient.get("/u/815962/iso-8859-1.html", function(cerr, creq, cres, cdata) {
cdata = iconv.decode(cdata, 'iso-8859-1');
res.send(200, {"Data": cdata});
});
}
I have set up a test document used in the code above. The test document is in ISO-8859-1 encoding and has the national letters "ÅÄÖåäö" inside it. When returned to the client, if read like UTF-8 I receive "ýýýýýý"
It really seem that this is a bug in Restify. The following example shows different results using Restify and Request lib:
var request = require('request');
var iconv = require('iconv');
var restify = require('restify');
var ic = new iconv.Iconv('iso-8859-1', 'utf-8');
request.get({ url: 'http://dl.dropboxusercontent.com/u/815962/iso-8859-1.html', encoding: null, }, function(err, res, body) {
var buf = ic.convert(body);
var utf8String = buf.toString('utf-8');
console.log(utf8String);
});
var httpClient = restify.createStringClient({ url: "http://dl.dropboxusercontent.com" });
httpClient.get("/u/815962/iso-8859-1.html", function(cerr, creq, cres, cdata) {
var buf = ic.convert(cdata);
var utf8String = buf.toString('utf-8');
console.log(utf8String);
});
I have browsed the code of Restify on github trying to find the issue, but I can't

Incorrect header check error

When I am trying to run the code from below:
var express = require('express');
var app = express();
var port = process.env.PORT || 5000;
var request = require('request');
var zlib = require('zlib');
app.listen(port, function() {
console.log("Listening on " + port);
makeRequest();
});
function makeRequest(){
var url = 'https://api.stackexchange.com/2.1/search?pagesize=5&order=desc&sort=activity&intitle=ios development&site=stackoverflow';
var headers = {'Accept-Encoding': 'gzip'};
var response = request(url, headers);
gunzipJSON(response);
}
function gunzipJSON(response){
var gunzip = zlib.createGunzip();
var json = "";
gunzip.on('data', function(data){
json += data.toString();
});
gunzip.on('end', function(){
parseJSON(json);
});
response.pipe(gunzip);
}
function parseJSON(json){
var json = JSON.parse(json);
if(json.items.length){
for(var i in json.items){
console.log(json.items[i].title + '\n' + json.items[i].link);
}
}
}
I get error saying
Error: incorrect header check
at Zlib._binding.onerror (zlib.js:286:17)
I am passing my own gzipped url in the code.
Any help will be really useful.
Thanks
Old question (and request library is now deprecated), but better late than never:
Interestingly, the code in question does work for me on Node.js version 15.13.0, but not on 14.16.0 (keeping the version of request the same, which is the latest 2.88.2).
However, just using the following simple code works on 14.16.0 (live demo), but not on 15.13.0!:
request(url, function (error, response, body) {
console.log(JSON.parse(body));
});
This means that for some reason, on 14.16.0 the response body is automatically unzipped (hence the above snippet works), while on 15.13.0 the response body is kept compressed and so an active decompression is needed.

Simple node.js proxy by piping http server to http request

Trying to learn more about node.js by making a simple http proxy server. The use scenario is simple: user -> proxy -> server -> proxy -> user
The following code works until the last step. Couldn't find way to pipe connector's output back to the user.
#!/usr/bin/env node
var
url = require('url'),
http = require('http'),
acceptor = http.createServer().listen(3128);
acceptor.on('request', function(request, response) {
console.log('request ' + request.url);
request.pause();
var options = url.parse(request.url);
options.headers = request.headers;
options.method = request.method;
options.agent = false;
var connector = http.request(options);
request.pipe(connector);
request.resume();
// connector.pipe(response); // doesn't work
// connector.pipe(request); // doesn't work either
});
Using tcpflow I see the incoming request from the browser, then the outgoing proxy request, then the server response back to the proxy. Somehow i couldn't manage to retransmit the response back to the browser.
What is the proper way to implement this logic with pipes?
you dont have to 'pause', just 'pipe' is ok
var connector = http.request(options, function(res) {
res.pipe(response, {end:true});//tell 'response' end=true
});
request.pipe(connector, {end:true});
http request will not finish until you tell it is 'end';
OK. Got it.
UPDATE: NB! As reported in the comments, this example doesn't work anymore. Most probably due to the Streams2 API change (node 0.9+)
Piping back to the client has to happen inside connector's callback as follows:
#!/usr/bin/env node
var
url = require('url'),
http = require('http'),
acceptor = http.createServer().listen(3128);
acceptor.on('request', function(request, response) {
console.log('request ' + request.url);
request.pause();
var options = url.parse(request.url);
options.headers = request.headers;
options.method = request.method;
options.agent = false;
var connector = http.request(options, function(serverResponse) {
serverResponse.pause();
response.writeHeader(serverResponse.statusCode, serverResponse.headers);
serverResponse.pipe(response);
serverResponse.resume();
});
request.pipe(connector);
request.resume();
});
I used the examples from this post to proxy http/s requests. Faced with the problem that cookies were lost somewhere.
So to fix that you need to handle headers from the proxy response.
Below the working example:
const http = require('http');
const acceptor = http.createServer().listen(3128);
acceptor.on('request', function(request, response) {
const req = service.request(options, function(res) {
response.writeHead(res.statusCode, res.headers);
return res.pipe(response, {end: true});
});
request.pipe(req, {end: true});
});

Resources