node.js streaming finance data from Yahoo or Google - node.js

UPDATE: I want to continuously receive data. Currently, this returns one data set. I think the only alternative is polling/setInterval techniques that achieve only the effect of streaming.
Is it possible to retrieve streaming finance data from Yahoo or Google using node.js?
I know that they have no public API available for the data I'm trying to retrieve.
I've written this using express but the data is not streaming.
var express = require('express'),
http = require('http'),
app = express.createServer();
// Using either Google or Yahoo...
var client = http.createClient(80,'download.finance.yahoo.com');
var request = client.request('GET', '/d/quotes.csv?s=GOOG&f=snr', { host: 'download.finance.yahoo.com' });
//var client = http.createClient(80,'www.google.com');
//var request = client.request('GET', '/finance/info?client=ig&q=CSCO', { host: 'www.google.com'});
request.end();
request.addListener('response', function (response) {
response.setEncoding(encoding="utf8");
response.addListener('data', function (data) {
console.log(data);
});
response.addListener('end',function(data) {
console.log('End');
});
});
app.listen(8080);

Related

Node js(Express framework): not able to print number API from external server to client browser

I am trying to retrieve weather API from external server and when I am console logging particular data of weather API, it's also showing on my command prompt.
But when I am using get method to show that data on browser I am only able send string data like "description": moderate rain and not number data like "temp": 27
it the crash the app.
Node js code:
//jshint esversion:6
const express = require("express");
const app = express();
const https = require("https");
app.get("/", function(req, res) {
const url = "https://api.openweathermap.org/data/2.5/weather?q=mumbai&appid=d88391210768983e6be06cdd76bdcde3&units=metric";
https.get(url, function(response) {
console.log(response.statusCode);
response.on("data", function(data) {
const weatherData = JSON.parse(data);
const temp= weatherData.main.temp;
const description= weatherData.weather[0].description;
console.log(temp);
console.log(description);
res.send(temp);
});
});
});
app.listen(3000, function() {
console.log("Server is running on port: 3000");
});
You should ideally return a json.
It can be:
res.send({temp: temp, description: description});
The res.send has to return a string/object/array/buffer.
You could do something like:
res.status(200).send(temp)
But sending json response is preferable, and you can scale it as well.
Another hack kind of solution is:
res.send("" + temp)

Axios can GET but not POST to the same URL

I'm building a react app
In one component I'm writing this GET request which works:
In another component I'm writing this POST request:
Which then returns this 404 error:
And I have no idea how my GET works but my POST returns 404:not found when I'm requesting the same file both times?
UPDATE:
I'm running a node.js server now but it's a bit of a frankenstein's monster as this really isn't an area I have an understanding of. Does anyone know what I'm doing wrong?
// Server setup from node.js website
const http = require('http');
const hostname = '127.0.0.1';
const port = 3000;
const server = http.createServer((req, res) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
res.end('Hello World\n');
});
server.listen(port, hostname, () => {
console.log(`Server running at http://${hostname}:${port}/`);
});
// Trying to listen for data from React app to feed into JSON (broken)
var express = require("express");
var myParser = require("body-parser");
var app = express();
app.use(myParser.urlencoded({extended : true}));
app.post("/scene-setup.json", function(request, response) {
console.log(request.body); //This prints the JSON document received (if it is a JSON document)
});
app.listen(3001);
// Updating JSON file with "obj" (working)
var jsonfile = require('jsonfile')
var file = './scene-setup.json'
var obj = {name: 'JP'}
jsonfile.writeFile(file, obj, function (err) {
console.error(err)
})
Axios is used for making HTTP requests. So, you should have a backend server running that can handle these requests. I am not sure what exactly is the data that you want to save. If you need access to that data, should be saving it on the backend.
If you want to save some data just on the client side, HTML5 filesystem API might be something you want to look at. It can manage some data in the limited sandboxed part of user's filesystem.

NodeJs Google Speech API Streaming

I've been trying to setup a node service for streaming live audio to the Google Speech API, but I've hit a problem that I think might be authentication related.
The service is written in node using Express and BinaryServer, I'm receiving data without any problems (and have been able to save it to the local disk, but that section of code isn't in the below example), but when I try to submit to the Google API I don't receive anything back (Although if I remove the keyFileName from the request then I get "Error: Could not load the default credentials" which is fair enough because I'm running outside the GCE)
var express = require("express");
var app = express();
var port = 54180;
var BinaryServer = require('binaryjs').BinaryServer;
var server = BinaryServer({
port: port
});
app.listen(port, function () {
console.log('server open on port ' + port);
});
binaryServer = BinaryServer({
port: 9001
});
binaryServer.on('connection', function (client) {
console.log('Binary Server connection started');
client.on('stream', function (stream, meta) {
console.log('>>>Incoming audio stream');
var speech = require('#google-cloud/speech')({
projectId: 'MYPROJECT-1234'
//keyFilename: '/config/KeyFile.json'
});
const request = {
config: {
encoding: 'LINEAR16',
sampleRate: 16000
},
singleUtterance: false,
interimResults: true
};
// Create a recognize stream
const recognizeStream = speech.createRecognizeStream(request)
.on('error', function (error) {
console.log('Error');
console.log(error)
})
.on('data', function (data) {
console.log('Data');
console.log(data);
});
// Send the microphone input to the Speech API
stream.pipe(recognizeStream);
stream.on('end', function () {
fileWriter.end();
recognizeStream.end();
console.log('||| Audio stream ended');
});
});
});
I'll also admit this is the first time I've tried to re-pipe to another API, so it could be that I've screwed that part up, but the default credentials message makes me think it's piping OK and it's just rejecting my request without returning a reason.
Can anyone spot what I'm getting wrong?
Cheers.
See Shiv's question for the answer to this problem
NodeJS Convert Int16Array binary Buffer to LINEAR16 encoded raw stream for Google Speech API
The code answer he gave was
We can write the buffer directly to recognizerStream which created from
GoogleSpeech as follows:
const recognizer = getGoogleSpeechStreamRecognizer();
recognizer.write(int16ArrayBuffer)

Serving files concurrently to multiple clients in node

I'm building a simple HTTP server that serves a fairly large file to clients using streams. I need to serve multiple clients at the same time, and I'm wondering what the simplest way to achieve that is.
My initial feeling is that using the cluster module and forking {num CPUs} processes might be the simplest way.
var StreamBrake = require('streambrake');
var http = require('http');
var fs = require('fs');
var server = http.createServer(function(req, res) {
var stream = fs.createReadStream('/data/somefile');
stream.pipe(new StreamBrake(10240)).pipe(res);
});
server.listen(1234, 10);
Edit: To clarify, the issue is that this code won't begin to serve the second client until it has finished serving the first.
After thinking about your issue, I'm confused as to why you believe there's an issue. CreateReadStream is asynchronous. Here is in example that complicates the code a little bit in order to demonstrate that using CreateReadStream we can indeed service multiple connections at a time.
/*jshint node:true*/
var http = require('http');
var fs = require('fs');
var activeRequests = 0;
var launchFiveRequests = 5;
http.createServer(function (req, res) {
activeRequests++;
console.log('Request received');
var readStream = fs.createReadStream('play.js', {'bufferSize': 1024});
readStream.setEncoding('utf8');
readStream.on('data', function (data) {
console.log(activeRequests);
res.write(data);
});
readStream.on('end', function () {
res.end();
console.log('end');
activeRequests--;
});
}).listen(8080);
var options = {
hostname: 'localhost',
port: 8080,
path: '/'
};
while(launchTenRequests--) {
http.get(options, function(res) {
console.log('response received');
});
}
Serving up a sufficiently large file, and you should see that all 5 requests are live at once, and all end up ending relatively simultaneously.

How can I create a Twitter stream using Node.js and Websockets?

A few months ago (August 2011) I successfully created a node.js websockets server which connected to Twitter's Streaming API using basic HTTP user/password authentication. To do this, I employed Andre Goncalves' twitter-nodejs-websocket library.
Since creating this working implementation, Twitter has eliminated access to the streaming API via basic HTTP auth, in favor of OAuth. After this shift, I utilized Ciaran Jessup's node-oauth library, which has successfully given me access to the Streaming API again (when I run the server I am successfully outputting the tweets via console.log(tweet) -- see below ).
The problem now is that my websockets server is no longer working. When I run my server from the command line and hit the client web page from the browser, the websocket "onclose" event is immediately fired.
I've tried everything I can think of to get this working. Any help would be very greatly appreciated!
server.js
var sys = require('sys'),
http = require('http'),
ws = require("./vendor/ws"),
base64 = require('./vendor/base64'),
arrays = require('./vendor/arrays')
var OAuth = require('./oauth/oauth').OAuth;
var consumer_key = '[...]'; //removed for obvious security reasons...
var consumer_secret = '[...]';
var access_token = '[...]';
var access_token_secret = '[...]';
oa = new OAuth("https://twitter.com/oauth/request_token",
"https://twitter.com/oauth/access_token",
consumer_key,
consumer_secret,
"1.0A",
null,
"HMAC-SHA1");
var request = oa.get("https://stream.twitter.com/1/statuses/filter.json?track=google", access_token, access_token_secret );
// Response Parsing -------------------------------------------- //
var clients = [];
var message = "";
request.addListener('response', function (response) {
response.setEncoding('utf8');
response.addListener("data", function (chunk) {
message += chunk;
var newlineIndex = message.indexOf('\r');
// response should not be sent until message includes '\r'.
// Look at the section titled "Parsing Responses" in Twitter's documentation.
if (newlineIndex !== -1) {
var tweet = message.slice(0, newlineIndex);
clients.forEach(function(client){
// Send response to all connected clients
client.write(tweet);
});
// this just tests if we are receiving tweets -- we are: terminal successfully outputs stream //
var pt = JSON.parse(tweet);
console.log('tweet: ' + pt.text);
}
message = message.slice(newlineIndex + 1);
});
});
request.end();
// Websocket TCP server
ws.createServer(function(websocket){
clients.push(websocket);
websocket.addListener("connect", function(resource){
// emitted after handshake
sys.debug("connect: " + resource);
}).addListener("close", function(){
// emitted when server or client closes connection
clients.remove(websocket);
sys.debug("close");
});
}).listen(8081);
// This basic http server works, so we know this port is open.
//
// var http = require('http');
// http.createServer(function (req, res) {
// res.writeHead(200, {'Content-Type': 'text/plain'});
// res.end('Hello World\n');
// }).listen(8081);
client code
<script type="text/javascript" charset="utf-8">
ws = new WebSocket("ws://ec2-67-202-6-10.compute-1.amazonaws.com:8081");
ws.onmessage = function(evt) {
console.log('tweet')
};
ws.onclose = function() {
console.log("socket closed");
};
ws.onopen = function() {
console.log("connected...");
};
</script>
Maybe you updated the browser? The websocket spec is chaning rapidly. Anyway, I'd propose using socket.io because it will even still work with fallbacks if the browser is outdated or websockets got incompatible again or a crappy proxy is preventing websockets from working.
Have a look at this sample event stream (it uses server sent events) from a twitter stream:
https://github.com/chovy/nodejs-stream

Resources