I'm calling a node'js process to scrape twitter and create a graph from the data...
here's the code.
var http = require('http');
var url = require('url');
var exec = require('child_process').exec;
var fs = require('fs');
var filepath = 'filenamegraph.png';
http.createServer(function(req,res){
var urlstr = url.parse(req.url,true);
var twit = exec("php -f twitter-scraper.php "+urlstr.query.term ,function(error, stdout, stderr){
var graphstring = stdout;
var stream = fs.createWriteStream("graph.txt");
stream.once('open', function(fd){
stream.write(graphstring, function(){
exec('dot -T png -o filenamegraph.png graph.txt', function(){
fs.stat(filepath,function(err,stat){
res.writeHead(200,{'Content-Type':'image/png', 'Content-Length': stat.size });
fs.readFile(filepath,function(err,file_conts){
res.write(file_conts);
res.end();
});
});
});
});
});
});
}).listen(1337);
all thats is hunky dory...
now I call this with:
$.ajax({
url: 'http://eavesdropper.dev:1337',
data: { term : $.url.parse().params.term },
success: function(data) {
var res = $.parseJSON(data);
console.log(res);
$("body").append('<img src="filenamegraph.png" />');
},
error: function(jqXHR, textStatus, errorThrown) {
console.log(jqXHR,'errror - ' + textStatus + " - " + errorThrown + ' ||');
},
complete: function(jqXHR, textStatus){
console.log(jqXHR, textStatus)
}
});
the work behind the scenes is fine BUT i get this...
Object { readyState=0, status=0, statusText="error"} error - error - ||
Not sure what next step is so any tips muchly appreciated.
PS: running directly in the browser the script correctly procues the graph so this is sure around the data being returned to the ajax call that is getting upset.
Your server is delivering the contents of the newly-created graphic file, but all your client is doing is trying to parse the contents as if it were JSON (which it obviously isn't, causing your error messages) and then inserting an <img> element that will cause the browser to request "filenamegraph.png" from whatever server delivered the original HTML.
It looks to me like you don't need to do an AJAX request at all; just append <img src='http://eavesdropper.dev:1337/?your_params' /> and the browser will automatically get and display the generated image from your server.
Related
i create simple rest api to return media file
var fs = require('fs')
var express = require('express')
var app = express()
app.get('/file.ogg', function (req, res) {
res.set({
'Content-Type': 'audio/ogg',
'Transfer-Encoding': 'chunked'
});
var inputStream = fs.createReadStream(__dirname + '/1.ogg');
inputStream.pipe(res);
});
var server = app.listen(3002);
if i call http://127.0.0.1:3002/file.ogg
server read file 1.ogg and return it in response.
now i use websocket to get file data from external device
socket.on('message', function incoming(message) {
var data = JSON.parse(message);
console.log('fileName: ' + data.fileName);
console.log('fileData : ' + data.fileData.length);
var path =__dirname + '/_' + data.fileName;
var buf = Buffer.from(data.fileData, 'base64');
console.log('buf : ' + buf.length);
// save data to file
fs.appendFile(path ,buf ,function(err){
if(err) throw err;
});
});
i want to return data (buf) in response like first example but without saving any think, how i can do this.
If your files are large, you may want to pull in an additional websocket streaming library which will allow you to stream the data back to the client. socket.io-stream was created specifically to allow that. From their examples:
// send data
ss(socket).on('file', function(stream) {
fs.createReadStream('/path/to/file').pipe(stream);
});
// receive data
ss(socket).emit('file', stream);
stream.pipe(fs.createWriteStream('file.txt'));
I'm trying to make an HTTP request in Node on a data:application URL that gets generated, then writing the results
var fileName = "csv-" + uuid() + ".csv";
var fileLocation = process.env.USERPROFILE + "\\" + fileName;
var file=fs.createWriteStream(fileLocation);
var exportCsvData = 'data:application/csv;charset=utf-8,' + encodeURIComponent(data);
var options={
host: location.hostname,
port: location.port,
path: '/' + exportCsvData
}
console.log(exportCsvData);
http.get(options, function(response) {
// write all of our message parts to the file
response.pipe(file);
});
The file gets created, but the only thing that is written is "Not Found". However when I take the exportCsvData data that gets logged out, then copy and paste it into Chrome, it downloads the data just fine.
I've also tried doing
http.get(options,function(res){
res.on('data', function (chunk) {
file.write(chunk);
});
res.on('end',function(){
file.end();
});
});
With the same result.
Edit: I've just realized that in this specific case, I can just grab data and write that out to a csv file and it will work just fine.
So I want to use this: (taken from their API site -> node.js documentation)
https://github.com/blockchain/api-v1-client-node
Recieving payments:
https://github.com/blockchain/api-v1-client-node/blob/master/docs/Receive.md
var blockchain = require('blockchain.info');
var identifier = 'myidentifier';
var password = 'mypassword';
var myWallet = new blockchain.MyWallet(identifier, password);
var myBTCadress = '14Q3ufL1BUHtWskBKtsshVDATRY65TaJMB';
Ok, so the recieving part:
var receive = new blockchain.Receive( [confirmations: 1], ? ); // What do I need to put here?
Documentation says:
callbackURL: the url to which the callback should be sent (string)
I don't understand what URL it should go to?!
The callback URL should be the one that redirects back to your site. So setup a callback url with blockchain like...
https://www.yoursite.com/callback/blockchain
Assuming you are using something like express in your app make a route like so.
app.get('/callback/blockchain', function (req, res) {
// Stuff here
});
you will prob need to include
var https = require('https');
That way then you can set up your logic inside for example...
// Stuff here
var options = {
host : 'api.blockchain.info',
path : '/some/path/',
port : 443,
method : 'GET'
}
var request = https.request(options, function(response){
var body = ""
response.on('data', function(data) {
body += data;
});
response.on('end', function() {
res.send(JSON.parse(body));
});
});
request.on('error', function(e) {
console.log('Problem with request: ' + e.message);
});
request.end();
That will for example output you request in json on whatever page you have your app.get('whateverurl') set to.
I spawned the following child: var spw = spawn('ping', ['-n','10', '127.0.0.1']) and I would like to receive the ping results on the client side (browser) one by one, not as a whole.
So far I tried this:
app.get('/path', function(req, res) {
...
spw.stdout.on('data', function (data) {
var str = data.toString();
res.write(str + "\n");
});
...
}
and that:
...
spw.stdout.pipe(res);
...
In both cases browser waits 10 of the pings to complete, and then prints the result as a whole. I would like to have them one by one, how to accomplish that?
(Client is just making a call to .../path and console.logs the result)
EDIT: Although I do believe that websockets are necessary to implement this, I just want to know whether there are any other ways. I saw several confusing SO answers, and blog posts (in this post, at step one OP streams the logs to the browser) which didn't help, therefore I decided to go for a bounty for some attention.
Here's a complete example using SSE (Server sent events). This works in Firefox and probably Chrome too:
var cp = require("child_process"),
express = require("express"),
app = express();
app.configure(function(){
app.use(express.static(__dirname));
});
app.get('/msg', function(req, res){
res.writeHead(200, { "Content-Type": "text/event-stream",
"Cache-control": "no-cache" });
var spw = cp.spawn('ping', ['-c', '100', '127.0.0.1']),
str = "";
spw.stdout.on('data', function (data) {
str += data.toString();
// just so we can see the server is doing something
console.log("data");
// Flush out line by line.
var lines = str.split("\n");
for(var i in lines) {
if(i == lines.length - 1) {
str = lines[i];
} else{
// Note: The double-newline is *required*
res.write('data: ' + lines[i] + "\n\n");
}
}
});
spw.on('close', function (code) {
res.end(str);
});
spw.stderr.on('data', function (data) {
res.end('stderr: ' + data);
});
});
app.listen(4000);
And the client HTML:
<!DOCTYPE Html>
<html>
<body>
<ul id="eventlist"> </ul>
<script>
var eventList = document.getElementById("eventlist");
var evtSource = new EventSource("http://localhost:4000/msg");
var newElement = document.createElement("li");
newElement.innerHTML = "Messages:";
eventList.appendChild(newElement);
evtSource.onmessage = function(e) {
console.log("received event");
console.log(e);
var newElement = document.createElement("li");
newElement.innerHTML = "message: " + e.data;
eventList.appendChild(newElement);
};
evtSource.onerror = function(e) {
console.log("EventSource failed.");
};
console.log(evtSource);
</script>
</body>
</html>
Run node index.js and point your browser at http://localhost:4000/client.html.
Note that I had to use the "-c" option rather than "-n" since I'm running OS X.
If you are using Google Chrome, changing the content-type to "text/event-stream" does what your looking for.
res.writeHead(200, { "Content-Type": "text/event-stream" });
See my gist for complete example: https://gist.github.com/sfarthin/9139500
This cannot be achieved with the standard HTTP request/response cycle. Basically what you are trying to do is make a "push" or "realtime" server. This can only be achieved with xhr-polling or websockets.
Code Example 1:
app.get('/path', function(req, res) {
...
spw.stdout.on('data', function (data) {
var str = data.toString();
res.write(str + "\n");
});
...
}
This code never sends an end signal and therefore will never respond. If you were to add a call to res.end() within that event handler, you will only get the first ping – which is the expected behavior because you are ending the response stream after the first chunk of data from stdout.
Code Sample 2:
spw.stdout.pipe(res);
Here stdout is flushing the packets to the browser, but the browser will not render the data chunks until all packets are received. Thus the reason why it waits 10 seconds and then renders the entirety of stdout. The major benefit to this method is not buffering the response in memory before sending — keeping your memory footprint lightweight.
I am new to node.js and json and am having trouble returning the JSON object to the client html page that's using $.getjson.
In the example below when i point the same code at the api.twitter.... url I can query and return the value. But when I point this to my own node.js back end that's spitting out the same JSON the alert('inside callback: ' + data[0].is_translator); does not pop up. What am I doing wrong here? I would really appreciate some help here.
This is script code in my test.html that's calling $.getJSON.
<script>
//var url = "http://localhost:5000/searchPlaces?callback=?";
function abc(result) {
//var url = "http://localhost:5000/random"
var url = "http://api.twitter.com/1/statuses/21947795900469248/retweeted_by.json?callback=?";
alert('before $.getjson');
$.getJSON(url, function(data) {
alert('hello');
alert('inside callback: ' + data[0].is_translator);
abc(result.data);
})
}
abc();
</script>
This is the code from my node.js backend:
var http = require("http");
var url = require("url");
var port = process.env.PORT || 8888
function start(route, handle){
function onRequest(request, response) {
var postData = "";
var pathname = url.parse(request.url).pathname;
console.log("Request received for:" + pathname + " receieved.");
response.writeHead(200, {"Content-Type": "application/json"});
var otherObject = {"is_translator":false,"id":644233,"followers_count":77};
response.write(
JSON.stringify({
anObject: otherObject
})
);
response.end();
// route(handle, pathname, response, request);
}
http.createServer(onRequest).listen(port);
console.log("Server had started. Port:" + port);
}
exports.start = start;
Callback for $.getJSON() is called only if the JSON is valid. The url for your twitter api http://api.twitter.com/1/statuses/21947795900469248/retweeted_by.json?callback=? gives a JSON with the following error on JSONlint.org
Parse error on line 1:
([ { "is
^
Expecting '{', '['
You should maybe $.get() the url and then validate JSON yourself before using it.