I am using oneSignal for push notifications node.js. I am using the create notification api to send notification to the users, but i dont know why it works some times and sometimes gives timeout error
sendNotificationToUser(data) {
try {
var notificationData = {}
notificationData.app_id = oneSignalAppId
notificationData.headings = {
en: "Heading"
}
notificationData.contents = {
en: data.message
}
notificationData.include_player_ids = [data.deviceId]
var headers = {
"Content-Type": "application/json; charset=utf-8"
}
var options = {
host: "onesignal.com",
port: 443,
path: "/api/v1/notifications",
method: "POST",
headers: headers
}
var https = require("https")
var req = https.request(options, function (res) {
res.on("data", function (data1) {
console.log("Response:")
console.log(JSON.parse(data1))
})
})
req.on("error", function (e) {
console.log("ERROR:")
console.log(e)
})
req.write(JSON.stringify(notificationData))
req.end()
} catch (err) {
console.log("err in notification", err)
}
}
this api works 50% of times and 50% of times it responds with time out error, even all the inputs are correct
ERROR:
{
Error: connect ETIMEDOUT 104.18.225.52:443
at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1107:14)
errno: 'ETIMEDOUT',
code: 'ETIMEDOUT',
syscall: 'connect',
address: '104.18.225.52',
port: 443
}
one simple solution is to directly hit the working ip, you can do this by including
host: 'onesignal.con' in headers and
host: '104.18.226.52' in options
this resolved my issue
to know more about how you can specify ip with host in https request
go here HTTPS request, specifying hostname and specific IP address
Related
I need to access GCP resources outside of the GCP environment from AWS using a AWS lambda. So, I found this document [accessing GCP resources from AWS][1] which provides a way to access the GCP resources and asks to create a workload identity pool.
I need to create a Workload identity pool in GCP using a REST API call. The REST API call has to run outside of the GCP environment, that is, in this case from the AWS environment. My GCP's IAM user doesn't have privileges to create a workload identity pool (due to org policy reasons). But, I've a service account which has admin privileges to create a workload identity pool and all the required permissions to access the required resources once the pool is created.
I'm a newbie to GCP and figuring out ways of calling a POST REST API call using my service account credentials. Any help is much appreciated.
Edited
Pasting the sample code I've been trying to make the REST call.
const {google} = require('googleapis');
const util = require('util');
const https = require('https');
const aws4 = require('aws4');
const auth = new google.auth.GoogleAuth({
keyFile: 'serviceAccountCreds.json',
scopes: ['https://www.googleapis.com/auth/cloud-platform'],
});
async function createSignedRequestParams(jsonBodyParams) {
const getAccessToken = await auth.getAccessToken();
console.log(`createSignedRequestParams() - this.credentials:${getAccessToken !== null}`);
// Set up the request params object that we'll sign
const requestParams = {
path: '/v1beta/projects/serviceAccountdev/locations/global/workloadIdentityPools?workloadIdentityPoolId=12345',
method: 'POST',
host: 'iam.googleapis.com',
headers: { 'Content-Type': 'application/json' },
body: jsonBodyParams
};
console.log(`createSignedRequestParams() - (signed) requestParams:${util.inspect(requestParams)}`);
return requestParams;
}
const jsonBodyParams = {
"description": "createWorkloadIdentityPool",
"display-name": "devAccount"
};
async function request(requestParams, jsonBodyParams) {
console.log(`request() requestParams:${util.inspect(requestParams)} jsonBodyParams:${jsonBodyParams}`);
// return new pending promise
return new Promise((resolve, reject) => {
const req = https.request(requestParams);
if (['POST', 'PATCH', 'PUT'].includes(requestParams.method)) {
req.write(jsonBodyParams);
}
req.end();
// Stream handlers for the request
req.on('error', (err) => {
console.log(`request() req.on('error') err:${util.inspect(err)}`);
return reject(err);
});
req.on('response', (res) => {
let dataJson = '';
res.on('data', chunk => {
dataJson += chunk;
});
res.on('end', () => {
const statusCode = res.statusCode;
const statusMessage = res.statusMessage;
const data = JSON.parse(dataJson);
console.log(`request() res.on('end')`, { statusCode, statusMessage, data });
resolve({ statusMessage, statusCode, data });
});
});
});
}
async function postTheRequest(reqParams, jsonBodyParams) {
try {
const response = await request(reqParams, jsonBodyParams);
return response;
} catch (error) {
console.log(error);
}
}
reqParams = createSignedRequestParams(jsonBodyParams);
postTheRequest(reqParams, jsonBodyParams);
output of the above code
[Running] node "c:\Users\av250044\.aws\GCP_Code_examples\registerTheWorkloadIdentifier.js"
request() requestParams:Promise { <pending> } jsonBodyParams:[object Object]
request() req.on('error') err:{ Error: connect ECONNREFUSED 127.0.0.1:443
at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1106:14)
errno: 'ECONNREFUSED',
code: 'ECONNREFUSED',
syscall: 'connect',
address: '127.0.0.1',
port: 443 }
{ Error: connect ECONNREFUSED 127.0.0.1:443
at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1106:14)
errno: 'ECONNREFUSED',
code: 'ECONNREFUSED',
syscall: 'connect',
address: '127.0.0.1',
port: 443 }
Wondering if I'm passing the PATH and host are correct. Please let me know your thoughts on my code sample.
[1]: https://cloud.google.com/iam/docs/access-resources-aws#iam-workload-pools-add-aws-rest
I am getting this weird error:
Making request to bitbucket api at path: /2.0/repositories/interos/eco-system-globe/commit/116c82c81b8d3e1b8c2fd3f352510fd09e66a02e
***Request stream error***: Error: write EPROTO 139717438125888:error:1408F10B:SSL routines:ssl3_get_record:wrong version number:../deps/openssl/openssl/ssl/record/ssl3_record.c:332:
at WriteWrap.onWriteComplete [as oncomplete] (internal/stream_base_commons.js:83:16) {
errno: 'EPROTO',
code: 'EPROTO',
syscall: 'write'
}
the code I have is simply:
const pth = `/2.0/repositories/${fullRepoName}/commit/${sha}`;
console.log('Making request to bitbucket api at path:',pth);
const newReq = https.get({ // formerly get
protocol: 'https:',
port: 80,
hostname: 'api.bitbucket.org',
path: pth,
headers: {
'Authorization': `Basic ${bitbucketBase64}`,
'Auth': `Basic ${bitbucketBase64}`
}
}, r => {
const v = {
data: ''
};
r.on('data', d => {
v.data += String(d || '');
});
r.once('end', () => {
console.log('response ended:', r.statusCode);
cb(null, v);
});
});
newReq.once('error', e => {
console.error('***Request stream error***:', e);
});
// newReq.write(stringified);
newReq.end();
anyone have an idea what that's about? I am on Node.js version 12.2.0
I had port: 80 but it should be port: 443 ... for SSL that's the default, so you can just omit the port argument.
I am trying connect third part server with basic auth and pfx tls certificate using node express platform. I am not sure; Where do I need to set basic auth, cert file and header params?
For get operation call from remote server, I have tried to use, https client and tls tool.
Request requires ;
header = basic auth,schemaVersion, id, applicationName
In addition to sending the basic auth credentials, send the following in the HTTP Header: Content-Type: application/json
Accept: application/json
and also request require one additional parameter i.e. categories
onst https = require('https');
const options = {
hostname: 'https://yadda/cards/getAssets',
headers: {
Authorization: 'Basic ' + new Buffer(username + ':' + passw).toString('base64'),
schemaVersion: '2.0.0',
id: '0000000',
applicationName: 'yadda',
Accept: 'application/json',
'Content-Type': 'application/json',
},
pfx: fs.readFileSync('/somefile.cert.pfx'),
passphrase: 'passphrase',
categories: 'food',
};
https
.get(options, (response: any) => {
response.on('data', (d: any) => {
console.log(`BODY: `, d);
});
})
.on('error', (e: any) => {
console.error('Error: ', e);
});
// With Tls tool, I tried as;
var fs = require('fs');
var socket = tls.connect(options, () => {
console.log('client connected');
process.stdin.pipe(socket);
process.stdin.resume();
});
I would like to get connected with remote server and receive data in response; instead I am getting following error;
ERROR] Error: { Error: getaddrinfo ENOTFOUND https://yadd/cards/getAssets:443
at GetAddrInfoReqWrap.onlookup [as oncomplete] (dns.js:67:26)
errno: 'ENOTFOUND',
code: 'ENOTFOUND',
syscall: 'getaddrinfo',
hostname: 'https://yadda/cards/getAssets',
A hostname (like www.example.com) is expected here, not a URL. The string you gave will be used as hostname and a DNS lookup will fail:
ERROR] Error: { Error: getaddrinfo ENOTFOUND https://yadd/cards/getAssets:443
At one point I just want to make a POST request to my rails server running on localhost:3000
If a use like postman on even cUrl I can ping my API
But kow, with my node app I want to do this :
var req_body = {
'my_id': id,
'dataTable': data }
var options = {
method: 'POST',
url: config.get('api_url') + 'end_tracking',
json: true,
body: req_body
}
// config.get('api_url') return http://localhost:3000/
request(options, function (error, response, body) {
console.log(error)
... }
this is what I get :
{ Error: connect ECONNREFUSED 127.0.0.1:3000
at Object.exports._errnoException (util.js:896:11)
at exports._exceptionWithHostPort (util.js:919:20)
at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1073:14)
code: 'ECONNREFUSED',
errno: 'ECONNREFUSED',
syscall: 'connect',
address: '127.0.0.1',
port: 3000 }
and as I said, if I do this request directly with postman, everything works !
I finally did it !
For a unknown reason, accessing localhost:3000, launched with rails s didn't work at all on my node server
but I worked when I started the rails server with rails s -b 0.0.0.0 !
how i did this , just use the same structure and i hope it works for you
request({
url: url, //URL to hit
method: 'post',
headers: headers,
timeout: 10000,
body: JSON.stringify(body)
}, function (error, result, body) {
if (error) {
console.log(error);
} else if (result.statusCode == 500) {
console.log('not found');
} else {
console.log('success')
}
});
hope it works.
I am trying to run the following node code,
function getQuestions() {
var options = {
host: 'mysite.com',
path: '/services/v2/question.json',
headers: {
"Content-Type": "application/json",
"accept": "application/json; charset=UTF-8",
'Authorization': auth
}
};
http.request(options, function(response) {
console.log("Everything worked!");
}).on('error', function(e) {
console.log('problem with request: ' + e.stack);
});
}
But it always errors saying...
problem with request: Error: connect ECONNREFUSED
at errnoException (net.js:905:11)
at Object.afterConnect [as oncomplete] (net.js:896:19)
When I put the same info into Postman it works fine. Is there a way to debug this?
Update
Tried this...
var options = {
host: "google.com"
};
Same result so something must be wrong in my code. Any ideas?
It did end up being a proxy issue this...
var options = {
host: "proxy",
port: 80,
path: 'http://google.com'
};
Notice the proxy in host. I will mark as duplicate of this post