I wrote a small code in AWS-Lambda(Node.js) to send the file to some API. I am able to run the code but i am getting the upload error.
Error: Function Logs:
START RequestId: 08ad7fab-3658-11e8-8483-a7fbad976cb7 Version: $LATEST
2018-04-02T09:27:17.787Z 08ad7fab-3658-11e8-8483-a7fbad976cb7 upload failed: { Error: unable to verify the first certificate
at Error (native)
at TLSSocket.<anonymous> (_tls_wrap.js:1092:38)
at emitNone (events.js:86:13)
at TLSSocket.emit (events.js:185:7)
at TLSSocket._finishInit (_tls_wrap.js:610:8)
at TLSWrap.ssl.onhandshakedone (_tls_wrap.js:440:38) code: 'UNABLE_TO_VERIFY_LEAF_SIGNATURE' }
END RequestId: 08ad7fab-3658-11e8-8483-a7fbad976cb7.
The code which i have written is :
console.log('Loading Function');
const fs = require('fs');
const request = require('request');
const url = require('url');
const https = require('https');
https.globalAgent.options.ca = rootCas;
var rootCas = require('ssl-root-cas').create();
// default for all https requests
// (whether using https directly, request, or another module)
require('https').globalAgent.options.ca = rootCas;
exports.handler = (event, context, callback) => {
var formData = {
// Pass a simple key-value pair
my_field: 'my_value',
// Pass data via Buffers
my_buffer: new Buffer([1, 2, 3]),
// Pass data via Streams
my_file: fs.createReadStream(__dirname + '/myfile.csv'),
// Pass multiple values /w an Array
// attachments: [
// fs.createReadStream(__dirname + '/myfile.txt'),
// fs.createReadStream(__dirname + '/myfile.txt')
// ],
};
var req = request.post({url:'https://abc.xyz.com:443/file/', formData: formData}, function optionalCallback(err, httpResponse, body) {
if (err) {
return console.error('upload failed:', err);
}
console.log('Upload successful! Server responded with:', body);
});
};
When i am trying to run the same code locally, i am not getting any error even the module ssl is able to download all the certificates. The same when i am trying in AWS-Lambda , the code wont be able to download the certificates. I even tried in the Configure test with adding the JSON as rejectUnauthorized": false ,still not able to overcome with the error.
Related
I need to access GCP resources outside of the GCP environment from AWS using a AWS lambda. So, I found this document [accessing GCP resources from AWS][1] which provides a way to access the GCP resources and asks to create a workload identity pool.
I need to create a Workload identity pool in GCP using a REST API call. The REST API call has to run outside of the GCP environment, that is, in this case from the AWS environment. My GCP's IAM user doesn't have privileges to create a workload identity pool (due to org policy reasons). But, I've a service account which has admin privileges to create a workload identity pool and all the required permissions to access the required resources once the pool is created.
I'm a newbie to GCP and figuring out ways of calling a POST REST API call using my service account credentials. Any help is much appreciated.
Edited
Pasting the sample code I've been trying to make the REST call.
const {google} = require('googleapis');
const util = require('util');
const https = require('https');
const aws4 = require('aws4');
const auth = new google.auth.GoogleAuth({
keyFile: 'serviceAccountCreds.json',
scopes: ['https://www.googleapis.com/auth/cloud-platform'],
});
async function createSignedRequestParams(jsonBodyParams) {
const getAccessToken = await auth.getAccessToken();
console.log(`createSignedRequestParams() - this.credentials:${getAccessToken !== null}`);
// Set up the request params object that we'll sign
const requestParams = {
path: '/v1beta/projects/serviceAccountdev/locations/global/workloadIdentityPools?workloadIdentityPoolId=12345',
method: 'POST',
host: 'iam.googleapis.com',
headers: { 'Content-Type': 'application/json' },
body: jsonBodyParams
};
console.log(`createSignedRequestParams() - (signed) requestParams:${util.inspect(requestParams)}`);
return requestParams;
}
const jsonBodyParams = {
"description": "createWorkloadIdentityPool",
"display-name": "devAccount"
};
async function request(requestParams, jsonBodyParams) {
console.log(`request() requestParams:${util.inspect(requestParams)} jsonBodyParams:${jsonBodyParams}`);
// return new pending promise
return new Promise((resolve, reject) => {
const req = https.request(requestParams);
if (['POST', 'PATCH', 'PUT'].includes(requestParams.method)) {
req.write(jsonBodyParams);
}
req.end();
// Stream handlers for the request
req.on('error', (err) => {
console.log(`request() req.on('error') err:${util.inspect(err)}`);
return reject(err);
});
req.on('response', (res) => {
let dataJson = '';
res.on('data', chunk => {
dataJson += chunk;
});
res.on('end', () => {
const statusCode = res.statusCode;
const statusMessage = res.statusMessage;
const data = JSON.parse(dataJson);
console.log(`request() res.on('end')`, { statusCode, statusMessage, data });
resolve({ statusMessage, statusCode, data });
});
});
});
}
async function postTheRequest(reqParams, jsonBodyParams) {
try {
const response = await request(reqParams, jsonBodyParams);
return response;
} catch (error) {
console.log(error);
}
}
reqParams = createSignedRequestParams(jsonBodyParams);
postTheRequest(reqParams, jsonBodyParams);
output of the above code
[Running] node "c:\Users\av250044\.aws\GCP_Code_examples\registerTheWorkloadIdentifier.js"
request() requestParams:Promise { <pending> } jsonBodyParams:[object Object]
request() req.on('error') err:{ Error: connect ECONNREFUSED 127.0.0.1:443
at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1106:14)
errno: 'ECONNREFUSED',
code: 'ECONNREFUSED',
syscall: 'connect',
address: '127.0.0.1',
port: 443 }
{ Error: connect ECONNREFUSED 127.0.0.1:443
at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1106:14)
errno: 'ECONNREFUSED',
code: 'ECONNREFUSED',
syscall: 'connect',
address: '127.0.0.1',
port: 443 }
Wondering if I'm passing the PATH and host are correct. Please let me know your thoughts on my code sample.
[1]: https://cloud.google.com/iam/docs/access-resources-aws#iam-workload-pools-add-aws-rest
I am building a NodeJS app that makes calls to an external API. The external API uses a self-signed certificate. I tried setting the environment variable process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0'. This works to ignore the certificate verification when using the app normally. However, a request to the same endpoint does NOT work when calling the NodeJS route with the Jest Supertest agent.
There is a certificate verification error when running the Jest Supertest case. Is there a way to accept self-signed certificates when sending requests using the Supertest agent?
npm test
Error: Error: SSL Error: DEPTH_ZERO_SELF_SIGNED_CERT
at Object.dispatchError (/home/node/app/node_modules/jsdom/lib/jsdom/living/xhr-utils.js:54:19)
at EventEmitter.<anonymous> (/home/node/app/node_modules/jsdom/lib/jsdom/living/xmlhttprequest.js:675:20)
at EventEmitter.emit (events.js:323:22)
at Request.<anonymous> (/home/node/app/node_modules/jsdom/lib/jsdom/living/xhr-utils.js:384:47)
at Request.emit (events.js:311:20)
at Request.onRequestResponse (/home/node/app/node_modules/request/request.js:948:10)
at ClientRequest.emit (events.js:311:20)
at HTTPParser.parserOnIncomingClient [as onIncoming] (_http_client.js:603:27)
at HTTPParser.parserOnHeadersComplete (_http_common.js:119:17)
at TLSSocket.socketOnData (_http_client.js:476:22) undefined
NodeJS internal route
Works when accessing route via the browser, but not when running Jest Supertest. The internal route is /internal and that works, but when that code subsequently sends a request to the external API that has a self-signed certificate, the self-signed certificate causes a 500 error message.
router.get('/internal', (req, res, next) => {
// Set request values that are specific to this route
const requestOptionsData = { method: `GET`, endpoint: `/external` };
try {
httpCtrl.makeRequest(requestOptionsData).then(result => {
if (result.error) {
return res.status(result.status).json(result.error.message || result.error);
}
return res.status(result.status).json(result);
}).catch((error) => {
console.error(error);
return res.status(500).send(error);
});
} catch (e) {
console.error(e);
return res.status(500).send(e);
}
});
NodeJS controller
A wrapper function to make axios requests to external API
httpCtrl.makeRequest = async (requestOptionsData) => {
let result = {};
// Set request options
const requestOptions = httpCtrl.setApiRequestOptions(requestOptionsData);
let response;
try {
response = await axios(requestOptions);
} catch(e) {
result.error = e.toJSON() || e;
console.error(result.error);
result.status = 500;
return result;
}
result.status = response && response.status || 500;
result.data = response && response.data || {};
return result;
}
JEST Supertest
Test that causes certificate error
const app = require('../app.js');
const supertest = require('supertest');
describe('API routes', () => {
it('GET internal NodeJS route', async done => {
agent
.get('/internal')
.set('Accept', 'application/json')
.send()
.expect(200)
.end((err, res) => {
if (err) {
return done(err);
}
expect(res.status).toBe(200);
return done();
});
});
});
UPDATE:
I tried removing NODE_TLS_REJECT_UNAUTHORIZED and setting rejectUnauthorized to false in the axios agent config but still having the same problem. The connection works when using the app via the browser but does work with supertest.
const agent = new https.Agent({
rejectUnauthorized: false
});
const options = {
url: url,
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Authorization': `Bearer ${requestOptionsData.jwt}`,
'Host': process.env.ADMIN_API_BASE_URL
},
method: requestOptionsData.method || `GET`,
httpsAgent: agent
}
Here is the error with this agent configuration:
Error: Error: self signed certificate
at Object.dispatchError (/home/node/app/node_modules/jsdom/lib/jsdom/living/xhr-utils.js:54:19)
at EventEmitter.<anonymous> (/home/node/app/node_modules/jsdom/lib/jsdom/living/xmlhttprequest.js:675:20)
at EventEmitter.emit (events.js:323:22)
at Request.<anonymous> (/home/node/app/node_modules/jsdom/lib/jsdom/living/xhr-utils.js:384:47)
at Request.emit (events.js:311:20)
at Request.onRequestError (/home/node/app/node_modules/request/request.js:877:8)
at ClientRequest.emit (events.js:311:20)
at TLSSocket.socketErrorListener (_http_client.js:426:9)
at TLSSocket.emit (events.js:311:20)
at emitErrorNT (internal/streams/destroy.js:92:8) undefined
console.error controllers/http.ctrl.js:50
I was able to solve this with the solution in this github issue.
I solved it by adding testEnvironment: 'node', to jest.config.js file.
https://github.com/axios/axios/issues/1180
I am developing a Web crawl program for fun, but when i use superagent to get page, read ECONNRESET occurred.
const https = require('https');
https.globalAgent.options.secureProtocol = 'TLSv1_2_method';
const superagent = require('superagent-use')(require('superagent'))
superagent.use(require('superagent-verbose-errors'))
const cheerio = require('cheerio');
const baidu = "https://www.baidu.com/s?wd=";
const sogou = "https://www.sogou.com/web?query=";
function search(key) {
const fullUrl = baidu + key;
return superagent
.get(fullUrl)
.on('error', error => console.log(error))
.then(response => {
return parsehtml(response.body);
});
}
function parsehtml(htmlBody) {
}
module.exports=search;
The following is error details:
response:undefined
stack:"Error: read ECONNRESET\n at _errnoException (util.js:992:11)\n at
TLSWrap.onread (net.js:618:25)"
status:500
syscall:"read"
and security about https://www.baidu.com
Security in Chrome
How can fix this issue, thanks a lot.
I use unzip node module for unzip my binary-data (come from request module).
it fails in some cases when request module's response not contain zip folder binary data (if response is not have zip folder data, some other binary data).
How I handle this exception.
const request = require("request");
const unzip = require('unzip');
const stream = require('stream');
var options = {
method: 'GET',
url: /*URL*/,
encoding: null
};
request(options, function (error, response, body) {
zipExtract(error, body);
});
zipExtract:
function zipExtract(error, zipData) {
if (error) {
console.error(error);
}
else {
try {
//create stream object
var artifactStream = new stream.PassThrough();
//parse buffer into stream
artifactStream.end(zipData);
//pipe response to unzip
artifactStream.pipe(unzip.Extract({path: 'app/output'}));
}
catch (exception) {
console.error(exception);
}
}
}
it prompt error on console
events.js:160
throw er; // Unhandled 'error' event
^
Error: invalid signature: 0x6d74683c
at C:\app-hub\module-application-size\node_modules\unzip\lib\parse.js:63:13
at runCallback (timers.js:637:20)
at tryOnImmediate (timers.js:610:5)
at processImmediate [as _immediateCallback] (timers.js:582:5)
npm ERR! Test failed. See above for more details.
use adm-zip module for handle exception.
const admzip = require('adm-zip');
try {
var zip = new admzip(zipData);
zip.extractAllTo(/*path*/);
}
catch (exception) {
console.error(exception);
}
var http = require('http');
exports.handler = function(event, context) {
var headers = {
'content-type': 'application/x-www-form-urlencoded'
}
var options = {
host: 'stage.wings.com',
path:'/test-lambda',
form: {
'days':'3'
},
headers:headers
};
console.log(options);
var req = http.request(options, function(response) {
// Continuously update stream with data
var body = '';
response.on('data', function(d) {
body += d;
});
response.on('end', function() {
// Data reception is done, do whatever with it!
var parsed = JSON.parse(body);
console.log("success");
console.log(parsed);
});
});
// Handler for HTTP request errors.
req.on('error', function (e) {
console.error('HTTP error: ' + e.message);
completedCallback('API request completed with error(s).');
});
};
my node version : v0.10.25
If i execute on file it gives HTTP error: socket hang up
From aws lambda if i run this function it throws error
Lambda error:2016-10-09T23:11:17.200Z 89f2146f-8e75-11e6-9219-b9b32aa0a768 Error: socket hang up
at createHangUpError (_http_client.js:200:15)
at Socket.socketOnEnd (_http_client.js:285:23)
at emitNone (events.js:72:20)
at Socket.emit (events.js:166:7)
at endReadableNT (_stream_readable.js:905:12)
at nextTickCallbackWith2Args (node.js:437:9)
at process._tickDomainCallback (node.js:392:17)
There is a timeout time for aws-lambda, it will hang up after at most 300 seconds.
Here is little more about it. http://docs.aws.amazon.com/lambda/latest/dg/limits.html
you can use
context.getRemainingTimeInMillis(); which will return you remaining time of your lambda so you can flush your data. If this is intended to be run longer than five minutes, then you can implement some kind of grace-full shutdown and flush your data before that.