I am implementing an IFTTT Action using Node.js. Node.js is version 0.10.29 running on Windows Server 2012 R2.
I am trying to read the body of a POST that is coming from IFTTT. The 'data' event is not firing. Only the 'readable' event is firing. No other events such as 'end' are firing.
However, if I simulate the same POST request using curl, from another server, this code works great.
Also, if I place Fiddler on my server in between IFTTT and my Node.js, it works great.
The request coming from IFTTT looks great in Fiddler, and when Fiddler "processes" it somehow, the Node.js code can read it.
What could be the issue?
var s = http.createServer(process_request);
s.listen(8080);
function process_request(req, res) {
req.parsed_url = url.parse(req.url, true);
var core_url = req.parsed_url.pathname;
if (core_url == '/ifttt/v1/status' && req.method.toLowerCase() == 'get') {
// do stuff required for their test
}
else if (core_url == '/ifttt/v1/test/setup' && req.method.toLowerCase() == 'post') {
// do stuff required for their test
}
else if (core_url == '/ifttt/v1/actions/my_action' && req.method.toLowerCase() == 'post') {
if (<the channel key matches>) {
var json_body = "";
req.on('readable',function () { console.log("In callback for readable ");});
req.on('end', function (data) { console.log("In callback for end: " + json_body); });
req.on('close', function (data) { console.log("In callback for close ");});
req.on('error', function (data) { console.log("In callback for error "); });
req.on('data',
function (chunkdata) {
console.log("In callback for data ");
if (chunkdata) {
if (typeof chunkdata == 'string') { json_body += chunkdata; }
else { console.log("data: No chunkdata "); }
}
};
)
}
} // action url
} // function process_request
Update: I rewrote this code using Express and the problem did not reappear.
Related
I've been analyzing the js ipfs source codes to understand how exactly delegated routing works. It turns out that js ipfs uses delegated routing by default, which means whenever I call IPFS.cat(), I send an http request to a delegated routing node that will perform DHT querying and propagation on my behalf and return the contents back to me so I can display them. What confuses the heck out of me for a few weeks is how exactly those delegated routing nodes alter my node's local directory upon returning a response. I am positive that the cat method creates a new local directory with the returned contents, since while analyzing the source code for ipfs.cat(), I realized that it calls repo.blocks.get(CID, options), and during that process, fs-datastore.get(cid) is called, which uses the OS module to append my node's local directory to the CID of the file to be fetched. So it only seems logical that either the node-fetch module on my local node saves the file on my local directory so it can be accessed by fs-datastore.get() or that the delegated routing node somehow remotely saves the files on my local directory after figuring out its path.
function fetch(url, opts) {
//native-fetch method
//This method should fetch an ipfs file from other peers and then
//save it to the local directory
// allow custom promise
if (!fetch.Promise) {
throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
}
Body.Promise = fetch.Promise;
// wrap http.request into fetch
return new fetch.Promise(function (resolve, reject) {
// build request object
const request = new Request(url, opts);
const options = getNodeRequestOptions(request);
const send = (options.protocol === 'https:' ? https : http).request;
const signal = request.signal;
let response = null;
const abort = function abort() {
let error = new AbortError('The user aborted a request.');
reject(error);
if (request.body && request.body instanceof Stream.Readable) {
destroyStream(request.body, error);
}
if (!response || !response.body) return;
response.body.emit('error', error);
};
if (signal && signal.aborted) {
abort();
return;
}
const abortAndFinalize = function abortAndFinalize() {
abort();
finalize();
};
// send request
const req = send(options);
console.log("node-fetch req(http.request() return value, send() return value): ", req)
let reqTimeout;
if (signal) {
signal.addEventListener('abort', abortAndFinalize);
}
function finalize() {
req.abort();
if (signal) signal.removeEventListener('abort', abortAndFinalize);
clearTimeout(reqTimeout);
}
if (request.timeout) {
req.once('socket', function (socket) {
reqTimeout = setTimeout(function () {
reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
finalize();
}, request.timeout);
});
}
req.on('error', function (err) {
reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
if (response && response.body) {
destroyStream(response.body, err);
}
finalize();
});
fixResponseChunkedTransferBadEnding(req, function (err) {
if (signal && signal.aborted) {
return;
}
destroyStream(response.body, err);
});
/* c8 ignore next 18 */
if (parseInt(process.version.substring(1)) < 14) {
// Before Node.js 14, pipeline() does not fully support async iterators and does not always
// properly handle when the socket close/end events are out of order.
req.on('socket', function (s) {
s.addListener('close', function (hadError) {
// if a data listener is still present we didn't end cleanly
const hasDataListener = s.listenerCount('data') > 0;
// if end happened before close but the socket didn't emit an error, do it now
if (response && hasDataListener && !hadError && !(signal && signal.aborted)) {
const err = new Error('Premature close');
err.code = 'ERR_STREAM_PREMATURE_CLOSE';
response.body.emit('error', err);
}
});
});
}
req.on('response', function (res) {
clearTimeout(reqTimeout);
const headers = createHeadersLenient(res.headers);
// HTTP fetch step 5
if (fetch.isRedirect(res.statusCode)) {
// HTTP fetch step 5.2
const location = headers.get('Location');
// HTTP fetch step 5.3
const locationURL = location === null ? null : resolve_url(request.url, location);
// HTTP fetch step 5.5
switch (request.redirect) {
case 'error':
reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
finalize();
return;
case 'manual':
// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
if (locationURL !== null) {
// handle corrupted header
try {
headers.set('Location', locationURL);
} catch (err) {
// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
reject(err);
}
}
break;
case 'follow':
// HTTP-redirect fetch step 2
if (locationURL === null) {
break;
}
// HTTP-redirect fetch step 5
if (request.counter >= request.follow) {
reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
finalize();
return;
}
// HTTP-redirect fetch step 6 (counter increment)
// Create a new Request object.
const requestOpts = {
headers: new Headers(request.headers),
follow: request.follow,
counter: request.counter + 1,
agent: request.agent,
compress: request.compress,
method: request.method,
body: request.body,
signal: request.signal,
timeout: request.timeout,
size: request.size
};
// HTTP-redirect fetch step 9
if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
finalize();
return;
}
// HTTP-redirect fetch step 11
if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
requestOpts.method = 'GET';
requestOpts.body = undefined;
requestOpts.headers.delete('content-length');
}
// HTTP-redirect fetch step 15
resolve(fetch(new Request(locationURL, requestOpts)));
finalize();
return;
}
}
// prepare response
res.once('end', function () {
if (signal) signal.removeEventListener('abort', abortAndFinalize);
});
let body = res.pipe(new PassThrough$1());
const response_options = {
url: request.url,
status: res.statusCode,
statusText: res.statusMessage,
headers: headers,
size: request.size,
timeout: request.timeout,
counter: request.counter
};
// HTTP-network fetch step 12.1.1.3
const codings = headers.get('Content-Encoding');
// HTTP-network fetch step 12.1.1.4: handle content codings
// in following scenarios we ignore compression support
// 1. compression support is disabled
// 2. HEAD request
// 3. no Content-Encoding header
// 4. no content response (204)
// 5. content not modified response (304)
if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
response = new Response(body, response_options);
resolve(response);
return;
}
// For Node v6+
// Be less strict when decoding compressed responses, since sometimes
// servers send slightly invalid responses that are still accepted
// by common browsers.
// Always using Z_SYNC_FLUSH is what cURL does.
const zlibOptions = {
flush: zlib.Z_SYNC_FLUSH,
finishFlush: zlib.Z_SYNC_FLUSH
};
// for gzip
if (codings == 'gzip' || codings == 'x-gzip') {
body = body.pipe(zlib.createGunzip(zlibOptions));
response = new Response(body, response_options);
resolve(response);
return;
}
// for deflate
if (codings == 'deflate' || codings == 'x-deflate') {
// handle the infamous raw deflate response from old servers
// a hack for old IIS and Apache servers
const raw = res.pipe(new PassThrough$1());
raw.once('data', function (chunk) {
// see http://stackoverflow.com/questions/37519828
if ((chunk[0] & 0x0F) === 0x08) {
body = body.pipe(zlib.createInflate());
} else {
body = body.pipe(zlib.createInflateRaw());
}
response = new Response(body, response_options);
resolve(response);
});
return;
}
// for br
if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
body = body.pipe(zlib.createBrotliDecompress());
response = new Response(body, response_options);
resolve(response);
return;
}
// otherwise, use response as-is
response = new Response(body, response_options);
resolve(response);
});
writeToStream(req, request);
});
}
In the node-fetch module's fetch source code, there doesn't seem to be anywhere that saves the response to my local directory, let alone figure out the local directory with the OS module. While it seems unlikely that the remote delegated routing node access my local directory and saves the file remotely, if it's possible method, I'd like to know how exactly this whole file saving process works that I've been struggling to understand for weeks.
The following code from the code pasted below it is too fast as the call to:
global.h2=jsonifiedver(l.number)
is too slow. How can i make it wait on getting an answer back from jsonifiedver() function call so i can get a correct answer. I tried using global variables, these work, but only after every second call, and plus so that's how i know the call works, it's just that program is ending too fast and then on the second call it has the data i want. I'm new to nodejs so any help is appreciated. Thanks!
const server = http.createServer((req, res) => {
if (req.method == 'POST') {
var body = ''
req.on('data', function(data) {
body += data
global.l = JSON.parse(body)
l = JSON.parse(body)
global.h2=jsonifiedver(l.number) // This call is slow and doesnt
// finish in time
global.h3 = JSON.stringify(h2)
console.log('Partial body: ' + body, global.l, global.l.number)
console.log("POST")
res.end("Not The real end");
})
} else {
console.log("GET")
}
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json'); // 'text/plain');
console.log(global.l)
res.end(global.h3); //"End");
});
so res.end(global.h3) finishes before my function call to global.h2=jsonifiedver(l.number) finishes. So i don't get the answer i need. Does that make sense?
The problem you are having is because, when the request is invoked, req.on('data', function(){}) is just adding a hook for data event, but you are also returning the response using res.end() after else statement. You should not send response back unless the req.on('end') is triggered, which implies that the request data has ended. In data event, you should ideally just append the data to body, and then on end event handler you should process the body and return the response. Like follows:
const server = http.createServer((req, res) => {
const methodType = req.method;
if (methodType === "GET") {
console.log("GET");
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
console.log(global.l);
res.end(global.h3);
} else if (methodType === 'POST') {
console.log("POST")
var body = ''
req.on('data', function(data) {
body += chunk;
});
req.on('end', function() {
global.l = JSON.parse(body);
l = JSON.parse(body);
global.h2=jsonifiedver(l.number);
global.h3 = JSON.stringify(h2);
res.statusCode = 200;
res.setHeader('Content-Type', 'application/json');
console.log(global.l);
res.end(global.h3);
});
}
});
And if you are looking for waiting till jsonifiedver() call is done, make sure its defined as a Promise / Async function and then you can call it using await, make sure the wrapper function where you are invoking jsonifiedver() is defined async too.
So, here is the problem. I found how to do http request in Node.js, so that I could download and parse remote JSON file. It all works fine but nothing happens after that. I have a function and in that there is if condition that doesn't get executed. It simply cannot get past the http request. Is there something I am missing?
var remoteStamp;
if (typeof force == "undefined") {
var timeurl = "http://" + parsedconfig.weburl + "/timestamp.json";
request(timeurl, { json: true }, (err, res, body) => {
if (err) { return console.log(err); }
console.log(body.timestamp);
remoteStamp = body.timestamp;
});
}
if (remoteStamp < parsedconfig.timestamp || force == "local") {
//something should happen here, all the values check out - still nothing happens
}
you are using callback so all the code which you want to execute after request is completed should be inside the callback
var remoteStamp;
if (typeof force == "undefined") {
var timeurl = "http://" + parsedconfig.weburl + "/timestamp.json";
request(timeurl, { json: true }, (err, res, body) => {
if (err) { return console.log(err); }
console.log(body.timestamp);
remoteStamp = body.timestamp;
if (remoteStamp < parsedconfig.timestamp || force == "local") {
//something should happen here, all the values check out - still nothing happens
}
});
}
Or you can use request-promise library to do this in promises. https://github.com/request/request-promise
On how to use promises: https://developers.google.com/web/fundamentals/primers/promises
I am using Facebook Graph NodeJS API to fetch user_posts. The response has pagination and therefore I need to loop over the response to fetch all the posts. I am using following route to fetch facebook posts and I am looping over pagination using get_user_statuses function:
var posts = "";
function get_user_statuses(response_posts, res) {
var link_regex = /https?:\/\/(?:www\.|(?!www))[^\s\.]+\.[^\s]{2,}|www\.[^\s]+\.[^\s]{2,}/g;
var isNextPageAvailable = true;
if ("paging" in response_posts) {
var nextPage = response_posts.paging.next;
isNextPageAvailable = true;
} else {
isNextPageAvailable = false;
}
for (var i = 0; i < response_posts.data.length; i++) {
var post = response_posts.data[i].message + " ";
if ("message" in response_posts.data[i]) {
posts += post.replace(link_regex, "");
}
}
if (nextPage !== undefined) {
request(nextPage, function (error, response, body) {
if (!error && response.statusCode === 200) {
get_user_statuses(JSON.parse(body));
} else {
console.log(error);
}
});
}
if (!isNextPageAvailable){
//Sending posts to facebook Modal
console.log(posts);
res.send(JSON.stringify({posts: posts})); //res is not defined here
}
}
router.post('/fbData', function (req, response, next) {
FB.setAccessToken(req.body.access_token);
FB.api('/me?fields=posts&posts.limit=1000', function (res) {
if (!res || res.error) {
if (!res) {
response.send(JSON.stringify({error: 'An error occurred. Please try again.'}))
}
response.send(JSON.stringify({error: response.error}));
return;
}
get_user_statuses(res.posts, response); //Passing response object here
});
});
The issue is that response object passed from express route is not defined in get_user_statuses function. Now I have two question:
Why is response object is not defined?
Is there better approach to achieve this arrangement?
res is not defined because you forgot to pass it in internal call. Change get_user_statuses(JSON.parse(body)); to get_user_statuses(JSON.parse(body), res); and it should work
I solved my problem. I needed to create a function with a callback. In case anyone else is stuck at this kind of issue, this post helped me resolve it:
[How to recurse asynchronously over API callbacks in node.js?
I've got a node.js-based webserver running at home and i'm trying to implement a login form.
So basically I need to access POSTed data (login/password). I found this :
How do you extract POST data in Node.js?
(req.on('data'...) & req.on('end'...))
But i need to do this asynchronously, can someone tell me how to do that ?
(I need this code to be blocking, not non-blocking)
EDIT: All my code is available on Github :
https://github.com/Fointard/NodeJs/tree/authentication
The problem lies here : https://github.com/Fointard/NodeJs/blob/authentication/js/reqHandlers/auth.js Lines 98 and 104, i'm relying on 'data' and 'end' envents but i'd like to do that asychronously so that checkID() (line 95) is able to return true or false.
You can't. HTTP Requests are I/O operations and will always be resolved asychronously. Your checkID function will never return a value. You need to add a second parameter (usually called callback) that will be called with true or false.
function checkID(req, callback) {
var body = '';
req.on('data', function (data) {
body += data;
if (body.length > 1e6)
req.connection.destroy();
});
req.on('end', function () {
var post = qs.parse(body);
if ('loginInputLogin' in post && 'loginInputPassword' in post) {
console.log('login : '+post['loginInputLogin']);
console.log('password : '+post['loginInputPassword']);
}
if (post['loginInputLogin'] === 'fointard' && post['loginInputPassword'] === 'f01n') {
console.log('ID confirmed');
callback(true);
}
callback(false);
});
}
And use it like so:
checkID(yourRequest, function(success) {
if(success) {
//login successfull
} else {
//login failed
}
})