I am trying to fetch a JSON file from a api website using the below code but i am getting an error saying "Unexpected end of JSON input" when I fetch using the below code
var express = require("express");
var app = express();
var body = require("body-parser");
var https = require("https");
app.get("/results", function (req, res) {
https.get("https://www.omdbapi.com/?apikey=d49698c3&s=harry", function (response) {
response.on("data", function (data) {
var got = JSON.parse(data);
res.send(got.Title);
})
})
});
There are a couple of wrong assumptions in your code.
1 - Looking at the data, you can see that the property Title is in every single object inside the Search property, which is an Array, thus when the JSON gets correctly parsed, you'll have an issue with got.Title
2 - The event data of the object response can be called multiple times until you can get all the data, every time it gets called, you receive a chunk of the data.
Once all the data has been sent the end event is called and there is the place to parse the JSON.
Your code is trying to parse an incomplete JSON string, just the first chunk.
The most common approach is to declare an array outside the functions that will handle the events data and end. For every data event you push the chunk into the outside array and on the end event you concatenate it.
Check this out:
var express = require('express')
var app = express()
var body = require('body-parser')
var https = require('https')
app.get('/results', function (req, res) {
https.get('https://www.omdbapi.com/?apikey=d49698c3&s=harry', function (response) {
const chunks = []
response.on('data', function (chunk) {
chunks.push(chunk)
})
response.on('end', function () {
const data = Buffer.concat(chunks)
var got = JSON.parse(data)
// Try this one out as well
// res.json(got)
res.send(got.Search[0].Title)
})
})
})
app.listen(3000)
I just tested the code above and it works like a charm.
In case of large json object you would want to parse the data in chunks.
Receive all the chunks on data event and club the data from as soon as it arrives and parse the concatenated json on the end event
const express = require("express");
const app = express();
const body = require("body-parser");
const https = require("https");
app.get("/results", function (req, res) {
https.get("https://www.omdbapi.com/?apikey=d49698c3&s=harry", function (response) {
let finalData = '';
response.on("data", function (data) {
finalData += data.toString();
});
response.on("end", function() {
const parsedData = JSON.parse(finalData);
res.send(parsedData.Title);
}
})
});
app.listen(3000)
For everyone who's having the same issue. I found a solution:
As what #Maestre San - has explained, you're parsing an incomplete JSON data. That's why you're getting the error. I've tried both solutions suggested by both but it still wasn't working for me. Hence, I researched and found out that first of all, you will need to store the data in an empty variable, once the data stream is done, you can then parse it by doing the following:
response.on("end", function () {
var jsonParse = JSON.parse(newsItems);
});
The full code is:
app.get("/", function (req, res) {
const queryString = "mamamoo";
const url = "https://newsapi.org/v2/everything?apiKey=<API_KEY>&qInTitle=" + queryString;
https.get(url, function (response) {
var newsItems = '';
response.on("data", function (data) {
newsItems += data;
});
response.on("end", function () {
var jsonParse = JSON.parse(newsItems);
console.log(jsonParse);
});
});
});
So as to explain what I did:
First, I make sure to catch any request made to my server. I then used the native node https request to make a request to the api, catching the response (the response contains statusCode and other response body).
However, I want to access the data body, so I performed "response.on". First, I captured the data in chunks and stored it in an empty variable.
Why store it in an empty variable? Because if I didn't and continued to parse it, it will throw an error saying "Unexpected end of JSON input", meaning, I'm parsing an incomplete JSON data.
Next step I did is, parse the data WHEN the data stream is done by specifying the "end".
Hope this helped.
Related
i wanted to get the data from two tables in firebase db
the 1st table was
from here i want to get the next data from table based on the hospital_fk
this is the result it got on my json
and here is my script for getting the data..
router.get('', function(req, res){
var booths = database.ref('booths');
var hospital = database.ref('hospitals');
booths.once('value', function (snapshot) {
var dataSet = [];
snapshot.forEach(function (childSnapshot) {
var childKey = childSnapshot.key;
var fk = snapshot.child(childKey).val();
hospital.child(childSnapshot.val().hospital_fk).on('value', hospital=>{
var childData = _.assign(fk, hospital.val());
dataSet.push({
childKey: childKey,
childData: childData
});
res.json(dataSet);
});
});
});
});
now my problem was only the first data is being returned and also getting an error.. says that FIREBASE WARNING: Exception was thrown by user callback. Error: Can't set headers after they are sent.
any idea on how to get all the records and what's the best approach on joining two tables.
When you call res.json(dataSet) it sets a header that your response is JSON, and sends the JSON. You can only set headers on the response before sending data, so the second time you make this call, Node.js will rightfully throw an error saying that it can't set the header.
What you'll need to do is first gather all joined data into a single JSON response, and then send it in one go after you've loaded all of them. To do this you use a list of promises and Promise.all():
router.get('', function(req, res){
var booths = database.ref('booths');
var hospital = database.ref('hospitals');
booths.once('value', function (snapshot) {
var promises = [];
snapshot.forEach(function (childSnapshot) {
var childKey = childSnapshot.key;
var fk = snapshot.child(childKey).val();
var promise = hospital.child(childSnapshot.val().hospital_fk).once('value');
promises.push(promise);
});
Promise.all(promises).then(function(snapshots) {
var dataSet = [];
snapshots.forEach(function(hospital) {
var childData = _.assign(hospital.key, hospital.val());
dataSet.push({
childKey: hospital.key,
childData: childData
});
});
res.json(dataSet);
});
});
});
Now the code only calls res.json(dataSet) once, after it's gotten all of the hospital data.
You'll note that I also changed your on('value' to once('value', since I doubt you'll want to keep the listener active for more than just one read.
I'm trying to fetch json from an api but only half of the response is received. So how to get the full response?
var request = require('request');
var url_to_check = 'http://example.com/api/test';
request.get(url_to_check).on('data', function(data) {
// Only half of the data is printed (8192). Remaining bytes are lost.
console.log(data.toString());
})
Your code is correct the only mistake you made is that you are streaming request data so you won't get whole data on event 'data' if a response is large. You will have to collect chunk and consolidate on event 'end'. check this code snippet
var request = require('request');
var url = 'https://reqres.in/api/users';
var req = request.get(url)
var data = []
req.on('data',function(chunk){
data.push(chunk))
})
req.on('end',function(){
console.log(Buffer.concat(data).toString())
})
And If you don't want to stream and pipe data and also response size is small then you can try this:
request.get(url, function(err, response, responseBody) {
if (!err) {
// var jsonRes = JSON.parse(JSON.stringify(response))
// responseBody = jsonRes.body
console.log(responseBody)
} else {
// handle error here
}
})
I have been trying for some sort of web scraping and I cant figure out the problem I am facing right now which is pretty easy I assume.
What I wanted to do is get each of product titles from a particular website and print them in json format. Every product title comes very clear when I console but when I try for json, It shows only first element and gives an error. I believe there is some kind of asynchronous problem is exist or maybe the reason is I am forgetting for loop before printing json file and It takes first element of 'product title' and prints.
Thanks for any response.
Cheers
var express = require('express'),
app = express(),
request = require('request'),
cheerio = require('cheerio')
port = process.env.PORT || 3000,
bodyParser = require('body-parser');
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({extended:true}));
app.get("/product", function(req, res){
request('someWebsite', (error, response, html) => {
if(!error && response.statusCode == 200) {
const $ = cheerio.load(html);
$('.product').each((i, el) => {
var productTitle = $(el)
.find('.product-name')
.text();
// console.log(productTitle);
res.send(
{
message:productTitle
}
);
})
}
});
});
app.listen(port, ()=>{
console.log('App is running on port' + port);
});
you are calling res.send() inside a loop. But you can send data and closing the connection with the client (that is what actually res.send() is doing) only one time.
If the JSON object dimensions are acceptable, you can construct the JSON object while iterating over .product elements, and send it back to the client once you finished parsing.
request('someWebsite', (error, response, html) => {
if(!error && response.statusCode == 200) {
const $ = cheerio.load(html);
let obj = {} // create an empty object
$('.product').each((i, el) => {
var productTitle = $(el)
.find('.product-name')
.text();
// console.log(productTitle);
// do not call res.send() inside "each" iteration
})
res.send(obj) // call res.send() AFTER "each" loop.
}
});
I'm using the Request module to download files, but I'm not quite sure how to pipe the response to an output stream when the filename must come from the 'Content-Disposition' header. So basically, I need to read the response until the header is found, and then pipe the rest to that filename.
The examples show something like:
request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'));
Where I want to do (pseudocode):
var req = request('http://example.com/download_latest_version?token=XXX');
var filename = req.response.headers['Content-Disposition'];
req.pipe(fs.createWriteStream(filename));
I could get the filename using the Request callback:
request(url, function(err, res, body) {
// get res headers here
});
But wouldn't that negate the benefits of using pipe and not loading the downloaded file into memory?
I'm reqesting a image from yahoo and it isn't using the content-disposition header but I am extracting the date and content-type headers to construct a filename. This seems close enough to what you're trying to do...
var request = require('request'),
fs = require('fs');
var url2 = 'http://l4.yimg.com/nn/fp/rsz/112113/images/smush/aaroncarter_635x250_1385060042.jpg';
var r = request(url2);
r.on('response', function (res) {
res.pipe(fs.createWriteStream('./' + res.headers.date + '.' + res.headers['content-type'].split('/')[1]));
});
Ignore my image choice please :)
Question has been around a while, but I today faced the same problem and solved it differently:
var Request = require( 'request' ),
Fs = require( 'fs' );
// RegExp to extract the filename from Content-Disposition
var regexp = /filename=\"(.*)\"/gi;
// initiate the download
var req = Request.get( 'url.to/somewhere' )
.on( 'response', function( res ){
// extract filename
var filename = regexp.exec( res.headers['content-disposition'] )[1];
// create file write stream
var fws = Fs.createWriteStream( '/some/path/' + filename );
// setup piping
res.pipe( fws );
res.on( 'end', function(){
// go on with processing
});
});
Here's my solution:
var fs = require('fs');
var request = require('request');
var through2 = require('through2');
var req = request(url);
req.on('error', function (e) {
// Handle connection errors
console.log(e);
});
var bufferedResponse = req.pipe(through2(function (chunk, enc, callback) {
this.push(chunk);
callback()
}));
req.on('response', function (res) {
if (res.statusCode === 200) {
try {
var contentDisposition = res.headers['content-disposition'];
var match = contentDisposition && contentDisposition.match(/(filename=|filename\*='')(.*)$/);
var filename = match && match[2] || 'default-filename.out';
var dest = fs.createWriteStream(filename);
dest.on('error', function (e) {
// Handle write errors
console.log(e);
});
dest.on('finish', function () {
// The file has been downloaded
console.log('Downloaded ' + filename);
});
bufferedResponse.pipe(dest);
} catch (e) {
// Handle request errors
console.log(e);
}
}
else {
// Handle HTTP server errors
console.log(res.statusCode);
}
});
The other solutions posted here use res.pipe, which can fail if the content is transferred using gzip encoding, because the response stream contains the raw (compressed) HTTP data. To avoid this problem you have to use request.pipe instead. (See the second example at https://github.com/request/request#examples.)
When using request.pipe I was getting an error: "You cannot pipe after data has been emitted from the response.", because I was doing some async stuff before actually piping (creating a directory to hold the downloaded file). I also had some problems where the file was being written with no content, which might have been due to request reading the HTTP response and buffering it.
So I ended up creating an intermediate buffering stream with through2, so that I could pipe the request to it before the response handler fires, then later piping from the buffering stream into the file stream once the filename is known.
Finally, I'm parsing the content disposition header whether the filename is encoded in plain form or in UTF-8 form using the filename*=''file.txt syntax.
I hope this helps someone else who experiences the same issues that I had.
I'm new to node.js, and I'm trying to call a service, parse its data and return it as part of a view. I can't seem to get the request to block until the response is complete. The console always logs 'wrong' before 'right' (returning the 1,2,3 array). What am I missing?
app.js
var reading = require('./reading');
app.get('/reading', function(req, res){
res.render('reading/index.stache',
{
locals : { ids : reading.list},
partials : {
list : '{{#ids}}{{.}}<br />{{/ids}}'
}
});
});
reading.js
var request,
http = require('http'),
host = 'google.com',
path ='/';
var list = function(){
var connection = http.createClient(80, host),
request = connection.request(path);
request.addListener('response', function(response){
var data = '';
response.addListener('data', function(chunk){
data += chunk;
});
response.addListener('end', function(){
console.log('right')
//var results = JSON.parse(data);
// i need results from json
return [88234,883425,234588];
});
});
request.end();
console.log('wrong');
return [1,2,3];
}
module.exports.list = list;
Of course you can't get the request to block until the response is back.
That's because there is communication latency between sending the request of and getting the response back. It would be stupid to wait and do nothing whilst that latency is happening.
Use callbacks and asynchronous control flow.
var list = function(callback){
var connection = http.createClient(80, host),
request = connection.request(path);
request.addListener('response', function(response){
var data = '';
response.addListener('data', function(chunk){
data += chunk;
});
response.addListener('end', function(){
console.log('right')
// USE A CALLBACK >:(
callback([88234,883425,234588]);
});
});
request.end();
}
If you wan't to run anything in sync have a look at the sync module. It's based on fibers.