Adding Authorization Header - node.js

How would I add an authorization key to the header of this sample code?
I would like to add an authorization header to the getRemoteData url request. For example, Authorization = Bearer XXXXXXXXXXXX
How can I add this to my URL request?
async handle(handlerInput) {
let outputSpeech = 'This is the default message.';
await getRemoteData('http://api.open-notify.org/astros.json')
.then((response) => {
const data = JSON.parse(response);
outputSpeech = `There are currently ${data.people.length} astronauts in space. `;
for (let i = 0; i < data.people.length; i++) {
if (i === 0) {
//first record
outputSpeech = outputSpeech + 'Their names are: ' + data.people[i].name + ', '
} else if (i === data.people.length - 1) {
//last record
outputSpeech = outputSpeech + 'and ' + data.people[i].name + '.'
} else {
//middle record(s)
outputSpeech = outputSpeech + data.people[i].name + ', '
}
}
})
.catch((err) => {
//set an optional error message here
//outputSpeech = err.message;
});
return handlerInput.responseBuilder
.speak(outputSpeech)
.getResponse();
},
};
const getRemoteData = function (url) {
return new Promise((resolve, reject) => {
const client = url.startsWith('https') ? require('https') :
require('http');
const request = client.get(url, (response) => {
if (response.statusCode < 200 || response.statusCode > 299) {
reject(new Error('Failed with status code: ' + response.statusCode));
}
const body = [];
response.on('data', (chunk) => body.push(chunk));
response.on('end', () => resolve(body.join('')));
});
request.on('error', (err) => reject(err))
})
};

Change this client.get() call from:
const request = client.get(url, (response) => {
to:
const headers = {
Authorization: 'Bearer 1234'
};
const request = client.get(url, {headers}, (response) => {

Related

How can I Convert code from Nodejs to Angular including API features [closed]

Closed. This question needs to be more focused. It is not currently accepting answers.
Want to improve this question? Update the question so it focuses on one problem only by editing this post.
Closed 5 days ago.
This post was edited and submitted for review 4 days ago.
Improve this question
I need to convert Nodejs code into Angular. Please suggest me all the steps. I have Nodejs code which is for Get Data using API. That I need to convert into Angular.
I have a task to get Data from Autodesk using their API and Post into MySQL Database (Workbench). This task has already been done in NodeJs. I want to apply same functionality in Angular.
Controller Code : UpdateController.js
const fs = require('fs');
const path = require('path');
const csvtojson = require('csvtojson');
const xlsx = require('xlsx');
const mongoose = require('mongoose');
const openssl = require('openssl-nodejs');
const extract = require('extract-zip');
const SubscriptionsModel = require('../models/subscription');
const PriceListModel = require('../models/pricelist');
const AuthModel = require('../models/auth');
var SubscriptionController = require('../controllers/subscriptioncontroller');
const { sendPasswordNotificationAfterBatch } = require('../models/notification');
var auth = new AuthModel();
class UploadController {
getCSVData(file){
console.log("In getCSVData");
return new Promise((resolve, reject) => {
csvtojson()
.fromFile(file)
.then(csvData => {
//console.log(csvData);
resolve(csvData);
}).catch(err => {
reject(err);
});
});
}
getXLSXData(filename){
console.log("In getXLSData");
return new Promise((resolve, reject) => {
var workbook = xlsx.readFile(filename);
var sheet_list = workbook.SheetNames;
var sheet = workbook.Sheets[sheet_list[0]];
var jsonObjects = xlsx.utils.sheet_to_json(sheet, { range: 7, raw: true, default: null });
var response = [];
response = jsonObjects.filter(item => {
return item.SRP !== undefined && item.SRP !== null && item.DTP !== undefined && item.DTP !== null
});
resolve(response);
}).catch(error => {
throw error;
});
}
updatePricelist(filePath){
return new Promise((resolve, reject) => {
this.getXLSXData(filePath).then(res => {
if(res){
// Delete backup
//PriceListModel.collection.drop();
// Create collection for updated data
PriceListModel.insertMany(res, err => {
if(err){
reject(err);
}
resolve();
})
}
}).catch(error => {
reject(error);
})
});
}
importSubscriptions()
{
console.log("In Import Subscriptions");
var url = `v1/export/subscriptions`;
console.log(`Importing subscription data...`);
return new Promise((resolve,reject) =>{
var body = {
'startDateSince': '2000-01-01'
}
auth.post(url, body).then(function(res)
{
var d = JSON.parse(res);
console.log(d);
resolve(d);
}).catch(err => {
reject(err);
});
});
}
checkImportSubscriptionJobStatus(jobId, count){
var url = `/v1/export/subscriptions/${jobId}`;
console.log(`Checking import subscription job status ${++count}...`);
return new Promise((resolve,reject) =>{
auth.getUsingHttpPlugin(url).then((res) => {
if (res.statusCode == 303) {
console.log("Current status is " + res.statusCode);
resolve(res.headers["location"]);
}
else {
console.log(`Current status is ${res.statusCode}, will retry in 60 seconds again `);
setTimeout(() => {
this.checkImportSubscriptionJobStatus(jobId, count).then(fileUrl => {
resolve(fileUrl);
});
}, '60000');
}
}).catch(function(err){
reject(err);
});
});
}
downloadFile(jobId, fileUrl) {
console.log("Downloading file...");
return new Promise((resolve, reject) => {
var file = fs.createWriteStream(`uploads/subscriptions/${jobId}.csv.zip.enc`);
//console.log(file);
console.log(fileUrl);
auth.getPlainHttp(fileUrl).then(function (response) {
response.pipe(file);
console.log(file.path);
resolve();
}).catch(err => {
console.log("In Catch");
reject(err);
});
});
}
decriptFieUsingOpenSSL(fileName, password) {
console.log("Decrypting file...");
return new Promise((resolve, reject) => {
var encFile = `../uploads/subscriptions/${fileName}.zip.enc`;
var zipFile = `../uploads/subscriptions/${fileName}.zip`;
var openSSLCmd = `enc -aes-256-cbc -md sha512 -d -in ${encFile} -out ${zipFile} -k ${password}`;
console.log('start running openssl command ' + openSSLCmd);
setTimeout(function () {
openssl(openSSLCmd, () => {
resolve();
})
}, 5000);
});
}
updateSubscriptionData(){
return new Promise((resolve, reject) => {
this.importSubscriptions().then(response => {
if(response.error !== undefined){
console.log(`Error Code: ${response.error.code}`);
console.log(`Message: ${response.error.message}`);
reject();
}else{
var id = response.id;
var password = response.password;
this.checkImportSubscriptionJobStatus(id, 0).then(fileUrl => {
this.downloadFile(id, fileUrl).then(() => {
var fileName = `${id}.csv`;
this.decriptFieUsingOpenSSL(fileName, password).then(() => {
console.log("File decrypted successfully...");
var targetFolder = path.join(__dirname, '..', 'uploads', 'subscriptions');
var zipFile = path.join(targetFolder, `${fileName}.zip`);
extract(zipFile, { dir : targetFolder }).then(() => {
console.log("Extracted file successfully");
var fullFileName = path.join(targetFolder, fileName);
this.getCSVData(fullFileName).then(res => {
if(res){
SubscriptionController.updateSubscriptions(res).then(data => {
resolve('Subscriptions data replicated successfully!!!');
console.log("CHECK NOTIFICATION");
sendPasswordNotificationAfterBatch();
}).catch(err => {
throw err;
});
}
}).catch(error => {
reject(error);
})
})
});
});
});
}
}).catch(err => {
reject(err);
})
});
}
}
module.exports = new UploadController();
Model Code : auth.js
var CryptoJS = require("crypto-js");
var request = require("request");
var httpRequest = require("http");
var httpsRequest = require("https");
var config = require('../config.json');
class AuthModel {
constructor() {
this.timestamp = Math.floor((new Date()).getTime() / 1000);
this.consumer_key = config.app.consmer_key;
this.consumer_secret = config.app.consumer_secret;
this.callback_url = config.app.callback_url;
this.partner_csn = config.app.partner_csn;
this.environment_url_stg = "enterprise-api-stg.autodesk.com"; //STG Environment
this.environment_url_prd = "enterprise-api.autodesk.com"; //prd Environment
this.access_token = '';
this.api_timestamp = '';
}
getbaseUrl()
{
var env = config && config.app.env;
console.log("ENVIRONMENT");
console.log(env);
if(env == 'prd')
{
return this.environment_url_prd;
console.log("ENVIRONMENT URL PID");
console.log(this.environment_url_prd);
}
else{
return this.environment_url_stg;
console.log("ENVRIONMENT URL STG");
console.log(this.environment_url_stg);
}
}
get(url, headerData){
var self = this;
return new Promise((resolve, reject) =>{
self.getAccessToken().then(function(token){
var time = Math.floor(Date.now() / 1000);
var header = {
'CSN': self.partner_csn,
'signature': self.getAPISignature(token),
'timestamp': self.api_timestamp,
'Authorization': "Bearer " + token
};
var options = {
method: 'GET',
url: `https://${ self.getbaseUrl() }/${url}`,
headers:header
};
request(options, function (error, response, body) {
if (error) {
reject(error);
}
resolve(body);
});
}).catch(function(err){
reject(err);
});
});
}
post(url, body){
console.log(url);
var self = this;
return new Promise((resolve, reject) =>{
self.getAccessToken().then(function(token){
var time = Math.floor(Date.now() / 1000);
var headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Authorization': `Bearer ${token}`,
'signature': self.getAPISignature(token),
'timestamp': self.api_timestamp,
'CSN': self.partner_csn
};
var options = {
method: 'POST',
url: `https://${ self.getbaseUrl() }/${url}`,
headers,
form: body
};
request(options, function (error, response) {
if (error){
reject(error);
}
resolve(response.body);
});
}).catch(function(err){
reject(err);
});
});
}
getAPISignature(token)
{
this.api_timestamp = Math.floor(Date.now() / 1000);
var message = this.callback_url + token + this.api_timestamp;
var hash = CryptoJS.HmacSHA256(message, this.consumer_secret);
var hashInBase64 = CryptoJS.enc.Base64.stringify(hash);
return hashInBase64;
}
createSignature(time)
{
var message = this.callback_url + this.consumer_key + time;
console.log(message);
var hash = CryptoJS.HmacSHA256(message, this.consumer_secret);
console.log(this.consumer_key);
console.log(this.consumer_secret);
var hashInBase64 = CryptoJS.enc.Base64.stringify(hash);
return hashInBase64;
}
createAuthorization(){
var passwordSignature = this.consumer_key + ":" + this.consumer_secret;
console.log(passwordSignature);
var authorization = Buffer.from(passwordSignature).toString('base64')
console.log("Authorization");
console.log(authorization);
return "Basic " + authorization;
}
getAccessToken(){
console.log("IN GetAccessToken");
var self = this;
var time = Math.floor((new Date()).getTime() / 1000);
return new Promise((resolve, reject) =>{
var options = {
method: 'POST',
url:`https://${self.getbaseUrl()}/v2/oauth/generateaccesstoken?grant_type=client_credentials`,
headers: {
timestamp: time,
signature: self.createSignature(time),
Authorization : self.createAuthorization()
}
};
request(options, function (error, response, body) {
if (error) {
reject(error);
}
resolve(JSON.parse(body).access_token);
console.log(JSON.parse(body).access_token);
});
});
}
getUsingHttpPlugin(url, headerData) {
var self = this;
return new Promise((resolve, reject) => {
self.getAccessToken().then(function (token) {
console.log("Get Access Token");
console.log(token);
var time = Math.floor(Date.now() / 1000);
var header = {
'CSN': self.partner_csn,
'signature': self.getAPISignature(token),
'timestamp': self.api_timestamp,
'Authorization': "Bearer " + token
};
var options = {
method: 'GET',
host: `${self.getbaseUrl()}`,
path: `${url}`,
headers: header
};
var s = httpRequest.request(options, (res) => {
resolve(res);
});
s.end();
}).catch(function (err) {
reject(err);
});
});
}
getPlainHttp(url) {
console.log("URL");
console.log(url);
return new Promise((resolve, reject) => {
console.log("In 1");
var s = httpsRequest.get(url, (res) => {
console.log("RESPONSE STATUS CODE");
console.log(res.statusCode);
if (res.statusCode > 200) {
reject(res);
}
resolve(res);
})
s.end();
});
}
}
module.exports = AuthModel;
If any other Node code needed from my side, I am happy to share.

How to generate a content-length

I'm using the node module form-data to send multipart/forms to a server, but i'm not sure how to calculate the content-length correctly for the request. The form-data github documentation seems a bit hand-wavy about it and the server i'm sending to requires the content-length header to be included.
const FormData = require('form-data');
const fs = require('fs');
const https = require('https');
class Platform {
_postKey(key, serviceName) {
const options = this._getOptions();
const keyName = (key.search(/(PUBLIC)/) !== -1) ? 'publicKey' : 'privateKey';
const extension = (keyName == 'publicKey') ? 'pub' : '';
const filePath = (extension.length > 0) ? __dirname + '/data/'+serviceName+'.'+extension : __dirname + '/data/'+serviceName;
const file = fs.createWriteStream(filePath, {flags: 'wx'});
file.write(key);
file.end();
const form = new FormData();
form.append('file', fs.createReadStream(filePath));
form.append('Name', keyName);
form.append('MimeMajor', 'application');
form.append('MimeMinor', 'x-pem-file');
form.append('Extension', extension);
form.append('FileClass', 'MFS::File');
options.headers = form.getHeaders();
options.headers.Authorization = 'Basic ' + Buffer.from(this.FILESTORE_USERNAME + ':' + this.FILESTORE_PASSWORD).toString('base64');
// options.headers['Content-Length'] = key.length;
console.log(options);
return new Promise((resolve, reject) => {
let post = https.request(options, (res) => {
let data = '';
if (res.statusCode < 200 || res.statusCode > 299) {
reject(new Error('File Storage API returned a status code outside of acceptable range: ' + res.statusCode));
} else {
res.setEncoding('utf8');
res.on('data', (chunk) => {
data += chunk;
});
res.on('error', (err) => {
reject(err);
});
res.on('end', () => {
console.log(data);
if (data) {
resolve(JSON.parse(data));
} else {
resolve();
}
});
}
});
post.on('error', reject);
form.pipe(post);
});
}
}

how to avoid the "Allocation failed - JavaScript heap out of memory" during API import requests

I've got a nodejs script that read data in excels files and send it to a nodejs API with 'request'. this script works fine with a small amount of data. But when i test with an Excel with 60.000 lines, it breaks with error:
FATAL ERROR: CALL_AND_RETRY_LAST Allocation failed - JavaScript heap out of memory
function fillWithFile(file, path, header, fileConfig) {
let workbook = XLSX.readFile(path + '/' + file); // read file
for (var sheetIterator = 0; sheetIterator < workbook.SheetNames.length; sheetIterator++) { // for each tab in excel file
let worksheetJson = XLSX.utils.sheet_to_json(workbook.Sheets[workbook.SheetNames[sheetIterator]]); // transforme one tab of excel to JSON
let config = fileConfig[workbook.SheetNames[sheetIterator]]; // get the config for the current tab
let datasPromises = getPromises(worksheetJson, config, header);
Promise.all(datasPromises).then(function (data) {
var dataString = '{"data":' + JSON.stringify(data) + ',"options":{"purgeBefore":false}}';
let options = {
url: API_URL + '/' + config.service + 's/import',
method: 'POST',
headers: header,
body: dataString
}
request.post(options, function (err, res, body) {
if (err) throw err;
console.log('Server responded with:', body);
});
});
}
}
function getPromises(worksheetJson, config, header) {
let datasPromises = [];
let promises = [];
for (let lineIterator = 0; lineIterator < worksheetJson.length; lineIterator++) { // for each line
datasPromises.push(new Promise(function (resolve, reject) {
for (let key in config.columns) { // for each column in config
if (config.columns[key].service !== undefined) { // if service exist we need to get id of the object in this service.
promises.push(new Promise(function (resolve, reject) {
findChildren(worksheetJson[lineIterator], config.columns[key], header)
.then(function (res) {
resolve({ key: key, data: res.id });
});
}));
}
else {
promises.push(new Promise(function (resolve, reject) {
resolve({ key: key, data: worksheetJson[lineIterator][config.columns[key]] });
}));
}
}
let tmpObj = {};
Promise.all(promises).then(function (values) {
for (var i = 0; i < values.length; i++) {
tmpObj[values[i].key] = values[i].data;
}
resolve(tmpObj);
});
}));
}
return datasPromises;
}
function findChildren(sheetData, config, header) { // get children with get request
let dataObj = {};
let searchParams;
for (let key in config.columns) {
dataObj[key] = sheetData[config.columns[key]];
if (searchParams === undefined) searchParams = key + '=' + sheetData[config.columns[key]];
else searchParams += '&' + key + '=' + sheetData[config.columns[key]];
}
var headers = {
'Authorization': header.Authorization,
'Accept': 'application/json, text/plain, */*',
};
var options = {
url: API_URL + '/' + config.service + 's?' + searchParams,
headers: headers
};
return new Promise(function (resolve, reject) {
request(options, function (error, response, body) {
if (!error && response.statusCode == 200) {
try {
resolve(JSON.parse(body));
} catch (e) {
reject(e);
}
}
else {
return reject(error);
}
});
});
}
The script use a huge amount of memory and he ends up to crash ...
Anyone have an idea how i can solve this ?
I think, i have to find a way to force promises to resolve instantly for avoid the memory overflow.
thanks for any help.
I think the problem is you don't control parallelism and when everything runs 'at once' - it causes memory issues.
I've tried to rewrite the code using async/await - effectively all operations are now serial:
async function fillWithFile(file, path, header, fileConfig) {
let workbook = XLSX.readFile(path + '/' + file); // read file
for (var sheetIterator = 0; sheetIterator < workbook.SheetNames.length; sheetIterator++) { // for each tab in excel file
const worksheetJson = XLSX.utils.sheet_to_json(workbook.Sheets[workbook.SheetNames[sheetIterator]]); // transforme one tab of excel to JSON
const config = fileConfig[workbook.SheetNames[sheetIterator]]; // get the config for the current tab
const data = await getData(worksheetJson, config, header);
const dataString = '{"data":' + JSON.stringify(data) + ',"options":{"purgeBefore":false}}';
const options = {
url: API_URL + '/' + config.service + 's/import',
method: 'POST',
headers: header,
body: dataString
};
await new Promise((resolve, reject) => {
request.post(options, function (err, res, body) {
if (err) {
reject(err);
return;
}
console.log('Server responded with:', body);
resolve(body);
})
});
}
}
async function getData(worksheetJson, config, header) {
const data = [];
for (let lineIterator = 0; lineIterator < worksheetJson.length; lineIterator++) { // for each line
const values = {};
for (let key in config.columns) { // for each column in config
if (config.columns[key].service !== undefined) { // if service exist we need to get id of the object in this service.
const res = await findChildren(worksheetJson[lineIterator], config.columns[key], header);
values[key] = res.id;
}
else {
values[key] = worksheetJson[lineIterator][config.columns[key]];
}
}
data.push(values);
}
return data;
}
function findChildren(sheetData, config, header) { // get children with get request
let dataObj = {};
let searchParams;
for (let key in config.columns) {
dataObj[key] = sheetData[config.columns[key]];
if (searchParams === undefined) searchParams = key + '=' + sheetData[config.columns[key]];
else searchParams += '&' + key + '=' + sheetData[config.columns[key]];
}
var headers = {
'Authorization': header.Authorization,
'Accept': 'application/json, text/plain, */*',
};
var options = {
url: API_URL + '/' + config.service + 's?' + searchParams,
headers: headers
};
return new Promise(function (resolve, reject) {
request(options, function (error, response, body) {
if (!error && response.statusCode === 200) {
try {
resolve(JSON.parse(body));
} catch (e) {
reject(e);
}
}
else {
return reject(error);
}
});
});
}
You now run import like this:
fillWithFile(..args).then(() => console.log('done'))

Trouble with asynchronous functions in node.js

I'm very new to node, and I'm trying to pull a list of IDs from an API, iterate through that list for each ID saving the output, and ultimately rename each file generated. The code below is the closest I've come, and while it works sometimes, it frequently fails as I believe one function isn't waiting for the other to complete (e.g. tries to read before a write), but I'm sure I have other issues going on.
const apiKey = inputData.apiKey
var https = require('https');
var sync = require('sync');
var fs = require('fs');
var JSONfileloc = "./pdfs/file.json"
var queryurl = 'https://intakeq.com/api/v1/intakes/summary?startDate=2018-01-01'
var authHeaders = { 'X-Auth-Key': apiKey }
var queryOpts = { method: 'GET', headers: authHeaders}
function handleFile (error, file)
{
if (error) return console.error('Ran into a problem here', error)
}
fetch(queryurl, queryOpts)
.then
(function findAPI(res, err)
{
if( err )
{ console.log('I cant find the API '+err) }
return res.json()
{console.log('found the API!')}
}
)
.then (function itID(res, err)
{
if( err )
{ console.log('I cant iterate the API '+err) }
for(var i = 0; i < res.length; i++)
{
var intakeID=res[i].Id;
var APIoptions={ host:"intakeq.com", path:"/api/v1/intakes/"+ intakeID, headers: authHeaders };
var PDFoptions={ host:"intakeq.com", path:"/api/v1/intakes/"+ intakeID+'/pdf', headers: authHeaders };
console.log('Working on ID:'+intakeID)
var JSONrequest = https.get(APIoptions, writeJSON)
}})
//READ JSON FUNCTION
function readJSON (err, data)
{
if (err) throw err;
if(data.indexOf('New Patient Forms') >= 0)
var contents = fs.readFileSync(JSONfileloc, handleFile);
var jsonContent = JSON.parse(contents)
//pull PT Name
pName = (jsonContent.ClientName);
console.log('The Patient Name Is ' + jsonContent.ClientName)
//pull PT DOB
pDob = (jsonContent.Questions[3].Answer)
console.log('Patient DOB Is ' + jsonContent.Questions[3].Answer)
//pull Form Type
pForm = (jsonContent.QuestionnaireName)
console.log('The Form Submitted is ' + jsonContent.QuestionnaireName)
//rename and move JSON
fs.rename("./pdfs/file.json", './JSONLogs/'+pName+' '+pForm+' '+Date.now()+'.json', function(err) {
if ( err ) console.log('Problem renaming! ' + err)
else console.log('Copying & Renaming JSON File!');
})
};
//WRITE JSON FUNCTION
function writeJSON(response, err)
{
var JSONfile = fs.createWriteStream(JSONfileloc, handleFile);
if (err) throw err;
response.pipe(JSONfile);
console.log('JSON Created')
fs.readFile(JSONfileloc, readJSON)
}
The research I've done leads me to believe that async.forEach is probably the right approach here, but I've been having a hard time getting that to work properly. Thanks in advance and any suggestions are much appreciated.
const apiKey = inputData.apiKey
var https = require('https');
var sync = require('sync');
var fs = require('fs');
var JSONfileloc = "./pdfs/file.json"
var queryurl = 'https://intakeq.com/api/v1/intakes/summary?startDate=2018-01-01'
var authHeaders = {
'X-Auth-Key': apiKey
}
var queryOpts = {
method: 'GET',
headers: authHeaders
}
function handleFile(error, file) {
if (error) return console.error('Ran into a problem here', error)
}
fetch(queryurl, queryOpts)
.then(function findAPI(res) {
return res.json();
})
.then(function itID(res) {
const JSONRequests = [];
for (var i = 0; i < res.length; i++) {
var intakeID = res[i].Id;
var APIoptions = {
host: "intakeq.com",
path: "/api/v1/intakes/" + intakeID,
headers: authHeaders
};
var PDFoptions = {
host: "intakeq.com",
path: "/api/v1/intakes/" + intakeID + '/pdf',
headers: authHeaders
};
// https.get has response as a stream and not a promise
// This `httpsGet` function converts it to a promise
JSONRequests.push(httpsGet(APIoptions, i));
}
return Promise.all(JSONRequests);
})
function httpsGet(options, filename) {
return new Promise((resolve, reject) => {
https.get(options, (response) => {
// The WriteJSON function, just for brewity
// Otherwise pass resolve to the seperate writeJSON and call it in there
var JSONfile = fs.createWriteStream(filename + ".json");
response.pipe(JSONfile);
JSONfile.on('close', () => {
readJSON(filename + ".json").then(() => {
resolve();
})
})
})
})
}
//READ JSON FUNCTION
function readJSON(filename) {
// if (err) throw err;
var contents = fs.readFileSync(filename, 'utf-8'); // removed handleFile as readFileSync does not allow callbacks, added format
var jsonContent = JSON.parse(contents)
// Make your conditional checks here with the jsonContents
//pull PT Name
pName = (jsonContent.ClientName);
console.log('The Patient Name Is ' + jsonContent.ClientName)
//pull PT DOB
pDob = (jsonContent.Questions[3].Answer)
console.log('Patient DOB Is ' + jsonContent.Questions[3].Answer)
//pull Form Type
pForm = (jsonContent.QuestionnaireName)
console.log('The Form Submitted is ' + jsonContent.QuestionnaireName)
//rename and move JSON
return new Promise((resolve, reject) => {
fs.rename("./pdfs/file.json", './JSONLogs/' + pName + ' ' + pForm + ' ' + Date.now() + '.json', function (err) {
if (err) {
console.log('Problem renaming! ' + err);
reject(err);
} else {
console.log('Copying & Renaming JSON File!');
resolve();
}
})
})
};
Updated to convert https.get response stream to return a Promise which can be handled much better.

Request body sometimes null

The request body sometimes (less than 1% of the time) is null when pulled into lambda. I am processing on the order of 14,000 request bodies one at a time. Any request bodies erring out have to be handled manually. Why is the body randomly coming in null?
Sample code ran from prompt (node index):
const async = require('async');
const _ = require('lodash');
const moment = require('moment');
const Client = require('node-rest-client').Client;
const fs = require('fs');
const input = require('./TestFull.json');
module.exports = () => {
const filename = `./eventfulFails-${new moment().format("YYYY-MM-DD-HHmmss")}.json`;
console.log('Start Time: ', new moment().format("HH:mm:ss"));
let failedObjects = {
events: [],
venues: [],
performers: []
};
async.parallel([
async.apply(processVenues, input.venues, failedObjects),
async.apply(processPerformers, input.performers, failedObjects)
], (lookupErr) => {
if (lookupErr) {
return console.error('Error processing venues and performers.', lookupErr);
}
console.log('Start Events: ', new moment().format("HH:mm:ss"));
async.waterfall([
async.apply(processEvents, input.events, failedObjects)
], (eventErr) => {
if (eventErr) {
console.log('Time of Failure: ', new moment().format("HH:mm:ss"));
return console.error('Error processing events.', eventErr);
}
console.log('End Time: ', new moment().format("HH:mm:ss"));
if (failedObjects.events.length || failedObjects.venues.length || failedObjects.performers.length) {
const stream = fs.createWriteStream(filename);
stream.once('open', function(fd) {
stream.write(JSON.stringify(failedObjects));
stream.end();
});
}
});
});
};
function processVenues(venues, failedObjects, callback) {
const calls = [];
for (let i = 0; i < venues.length; i++) {
const v = venues[i];
calls.push(async.apply((venue, postCallback) => {
const client = new Client();
const args = {
data: venue,
headers: {"Content-Type": "application/json"}
};
client.post('https://hm1br4yo34.execute-api.us-west-2.amazonaws.com/dev/eventful-venue', args, (data, response) => {
if (response.statusCode !== 200 && response.statusCode !== 201) {
failedObjects.venues.push({
venue,
response
});
console.log('venue status code: ', response);
console.log('venue data: ', venue);
}
return postCallback(null);
});
}, v));
}
async.waterfall(calls, callback);
}
function processPerformers(performers, failedObjects, callback) {
const calls = [];
for (let i = 0; i < performers.length; i++) {
const v = performers[i];
calls.push(async.apply((performer, postCallback) => {
const client = new Client();
const args = {
data: performer,
headers: {"Content-Type": "application/json"}
};
client.post('https://hm1br4yo34.execute-api.us-west-2.amazonaws.com/dev/eventful-performer', args, (data, response) => {
if (response.statusCode !== 200 && response.statusCode !== 201) {
failedObjects.performers.push({
performer,
response
});
console.log('performer status code: ', response);
console.log('performer data: ', performer);
}
return postCallback(null);
});
}, v));
}
async.waterfall(calls, callback);
}
function processEvents(events, failedObjects, callback) {
const calls = [];
for (let i = 0; i < events.length; i++) {
const v = events[i];
calls.push(async.apply((event, postCallback) => {
const client = new Client();
const args = {
data: event,
headers: {"Content-Type": "application/json"}
};
client.post('https://hm1br4yo34.execute-api.us-west-2.amazonaws.com/dev/eventful', args, (data, response) => {
if (response.statusCode !== 200 && response.statusCode !== 201) {
failedObjects.events.push({
event,
response
});
console.log('event status code: ', response);
console.log('event data: ', event);
}
return postCallback(null);
});
}, v));
}
async.waterfall(calls, callback);
}
if (!module.parent) {
module.exports();
}
Code of function processVenues (eventful-venue-load) that is called:
const _ = require('lodash');
const AWS = require('aws-sdk');
const async = require('async');
const sdk = require('#consultwithmikellc/withify-sdk');
const host = process.env.aurora_host;
const user = process.env.aurora_user;
const database = process.env.aurora_database;
let decryptedPassword;
const lambda = new AWS.Lambda({
region: 'us-west-2' //your region
});
class WithifyEventCreate extends sdk.Lambda {
constructor(event, context, keysToDecrypt) {
super(event, context, keysToDecrypt);
this.getLocation = this.getLocation.bind(this);
this.insertLocations = this.insertLocations.bind(this);
this.insertLocationImages = this.insertLocationImages.bind(this);
}
decryptedKey(key, value) {
switch (key) {
case 'aurora_password':
decryptedPassword = value;
break;
}
}
initializeComplete() {
this.connect(host, user, decryptedPassword, database, true);
}
connectComplete() {
async.waterfall(
[
this.getLocation,
this.insertLocations,
this.insertLocationImages
]
);
}
getLocation(callback) {
const {id: eventfulLocationID} = this.body;
this.connection.query('SELECT * FROM `Location` WHERE `eventfulLocationID` = ?',
[eventfulLocationID],
(err, results) => {
if (err) {
// error call block
return this.sendResponse(err, this.createResponse(500));
} else if (results.length === 1) {
console.log('Invoking withify-eventful-venue-update...');
lambda.invoke({
FunctionName: 'withify-eventful-venue-update',
Payload: JSON.stringify(this.event)
}, (error, data) => {
return this.sendResponse(null, JSON.parse(data.Payload));
});
} else if (results.length > 1) {
return this.sendResponse(`The location lookup produced multiple results. event:${JSON.stringify(this.body)}`, this.createResponse(500));
} else {
return callback(null);
}
}
);
}
insertLocations(callback) {
const {name: locationName, address: street, city, region_abbr: state, postal_code,
description, id: eventfulLocationID, latitude: lat, longitude: lng, withdrawn: locationWithdrawn} = this.body;
let addresses = street.concat(', ', city, ', ', state, ', ', postal_code);
if (!description.length){
var phones = "";
}else{
var re = /(([\(][0-9]{3}[\)][\s][0-9]{3}[-][0-9]{4})|([0-9]{3}[-][0-9]{3}[-][0-9]{4})|([0-9]{3}[\.][0-9]{3}[\.][0-9]{4}))/i;
this.body.found = description.match(re);
if (!this.body.found){
var phone = "";
}else{
if (!this.body.found.length){
var phone = "";
}else{
var phone = this.body.found[0];
}
}
}
this.connection.query('INSERT IGNORE INTO `Location` (`locationName`, `address`, ' +
'`phone`, `lat`, `lng`, `eventfulLocationID`, `locationWithdrawn`) VALUES (?, ?, ?, ?, ?, ?, ?)',
[locationName, addresses, phone, lat, lng, eventfulLocationID, locationWithdrawn],
(err, results) => {
if (err) {
return this.sendResponse(err, this.createResponse(500));
}
this.body.locationID = results.insertId;
return callback(null);
}
);
}
insertLocationImages(callback) {
var altText = "";
const images = _.flatten(this.body.images.map(im => {
return _.map(im.sizes, (ims, idx) => {
const title = `Image ${idx}`;
return [
this.body.locationID,
this.body.name,
ims.url,
null,
null,
this.body.id,
ims.width,
ims.height
];
});
}));
if(!images[0]){
return this.sendResponse(null, this.createResponse(201, this.body));
}
this.connection.query('INSERT IGNORE INTO `LocationImage` (`locationID`, `imageTitle`, `imageUrl`, ' +
'`imageName`, `altText`, `eventfulLocationID`, `width`, `height`) VALUES ?',
[images],
(err, results) => {
if (err) {
return this.sendResponse(err, this.createResponse(500));
} else if (results.affectedRows !== images.length) {
return this.sendResponse('The image inserts did not affect the right number' +
' of rows.', this.createResponse(500));
}
return this.sendResponse(null, this.createResponse(201, this.body));
}
);
}
}
exports.handler = (event, context) => {
const withifyEventCreate = new WithifyEventCreate(event, context, ['aurora_password']);
withifyEventCreate.initialize([decryptedPassword]);
};

Resources