Node JS unit test - cloud function, how to call to inner function - node.js

I'm using AVA framework (https://github.com/avajs/ava/blob/HEAD/docs/01-writing-tests.md)
to test Cloud Function on GCP written in NodeJS.
i'm trying to unit test an inner function inside my cloud function.
My testing code looks as follow:
const test = require(`ava`);
const uuid = require(`uuid`);
const sinon = require(`sinon`);
const triggerResultsService = require(`..`).triggerResultsService;
const consoleLog = sinon.stub(console, 'log');
const sendToTransferService = require(`..`).triggerResultsService.sendToTransferService;
test('resolves with unicorn', t => {
const filename = uuid.v4();
sendToTransferService(filename, () =>{
});
return Promise().then(result => {
t.is(result, 'unicorn');
});
});
My Cloud function code looks as the following:
/**
* Generic background Cloud Function to be triggered by Cloud Storage.
*
* #param {object} event The Cloud Functions event.
* #param {function} callback The callback function.
*/
var request = require("request");
exports.triggerResultsService = (event, callback) => {
var file = event.data;
var fileName = file.name.substr(file.name.lastIndexOf('/')).toLowerCase().trim();
if(!fileName.includes('temp-') && fileName.includes('.csv.gz')) {
console.log("file name is in correct location, sending options");
sendToTransferService(file);
sendStatusEmail("Transfer File Call successful");
};
callback();
};
function sendToTransferService(file) {
var options = {
method: 'POST',
uri: process.env.TRANSFER_SERVICE_URL,
body: {
fileName: file.name,
bucketName: file.bucket
},
json: true
};
return new Promise(function (resolve, reject) {
request(options, function (err, resp) {
if (err) {
console.log(err);
return reject({err: err});
}
return resolve({responsebody:resp.body});
});
});
}
function sendStatusEmail(statusMessage) {
var options = {
method: 'POST',
uri: process.env.EMAIL_NOTIFICATION_URL,
body: {
from: process.env.EMAIL_FROM,
to: [process.env.SLACK_EMAIL],
cc: [''],
bcc: [''],
subject: process.env.EMAIL_SUBJECT,
body: statusMessage
},
json: true
};
return new Promise(function (resolve, reject) {
request(options, function (err, resp) {
if (err) {
console.log(err);
return reject({err: err});
}
return resolve({responsebody:resp.body});
});
});
}
I'm not able to reach the function sendToTransferService .
Any ideas what should I require/declare
Thanks in advance

I think your easiest path is to turn that function into its own module, and export it from there. It will be accessible to any other code that imports the module, including your Cloud Functions code.

Related

API works randomly while putting data into dynamoDB

I'm trying to put data into Dynamodb using serverless deployment. I have added the permission to write to Dynamodb.
The api is always sending {"message": "Internal server error"} but is able to put the data into db once if tried 5,6 times.
Following is the serverless.yaml config
handler: dynamoUpdate.handler
events:
- http:
path: /createdbentry
method: get
cors: true
Following is the code:
const AWS = require('aws-sdk')
AWS.config.update({ region: process.env.REGION || 'us-east-1' })
var ddb = new AWS.DynamoDB({apiVersion: '2012-08-10'});
exports.handler = async (event) => {
return await createDBEntry("i123","Working");
}
const sendRes = (status, body) => {
var response = {
statusCode: status,
headers: {
"Content-Type": "application/json"
},
body: body
};
return response;
};
const createDBEntry = async function(id,result) {
var params = {
TableName: 'emplist',
Item: {
'ID' : {S:id},
'Summary':{S: result},
}
};
ddb.putItem(params, function(err, data) {
console.log("Here comes me.")
if (err) {
console.log("Opps Error");
return sendRes(403,err);
} else {
console.log("Complete")
return sendRes(200,data);
}
});
}
How can it be resolved ?
The problem is you are not returning any promise or awaiting anything async in your function called createDBEntry. Thus your handler returns undefined which makes apigateway return 500 internal server error.
You are mixing callbacks and async/await.
Your createDBEntry function should look like this.
const createDBEntry = async function(id, result) {
var params = {
TableName: 'emplist',
Item: {
'ID' : {S:id},
'Summary':{S: result},
}
};
try {
let data = await ddb.putItem(params).promise()
return sendRes(200, JSON.stringify(data))
} catch (err) {
console.log("Oops Error");
return sendRes(403, err.toString());
}
}
When you return from an async handler in lambda the current execution environment is frozen immediately (unlike when you use a non-async handler with a callback). This is why most of the time the writes to the database does not succeed.

Azure: Function stops working when trying to get blob content

I am working with nodeJS and Azure functions. I am trying to get the content of a blob (pptx) and then further work with that pptx (unzip it with admzip).
However, whenever I try to get the content, the function just stops without any error and after some time it times out. I tried getting the properties of the blob first (to check if the blob exists) and that works.
Here is my function:
const storage = require('azure-storage');
const STORAGE_ACCOUNT_NAME = 'storage-account';
const ACCOUNT_ACCESS_KEY = 'storage-key';
let AdmZip = require('adm-zip');
let fs = require('file-system');
const blobService = storage.createBlobService(STORAGE_ACCOUNT_NAME, ACCOUNT_ACCESS_KEY);
module.exports = function (context, req) {
context.log('JavaScript HTTP trigger function processed a request.');
getBlobProperties('default-powerpoint', 'download.pptx').then((properties) => {
context.log('Properties: ', properties);
getBlobContent('default-powerpoint', 'download.pptx').then((content) => {
context.log('Blob Content: ', content);
})
});
};
function getBlobProperties(containerName, fileName) {
return new Promise((resolve, reject) => {
blobService.getBlobProperties(
containerName,
fileName,
function (err, properties, status) {
if (err) {
reject(err);
} else {
resolve(properties);
}
});
})
}
function getBlobContentAsStream(containerName, fileName, res) {
return new Promise((resolve, reject) => {
blobService.getBlobToStream(containerName, fileName, res, function (err, results) {
if (err) {
reject(err);
} else {
resolve(JSON.stringify(results, null, 2));
}
});
})
}
function getBlobContent(containerName, blobName) {
return new Promise((resolve, reject) => {
blobService.getBlobToText(
containerName,
blobName,
function (err, blobContent, blob) {
if (err) {
reject(err);
} else {
resolve({
'content': blobContent,
'blob': blob
});
}
});
})
}
As you can see I tried both getBlobToStream and getBlobToText but with the same result. The getBlobProperties works fine and I get all the information about the blob, just not the content.
Can anyone please help me get the content of the blob.
Edit:
This is the output of the properties if anyone is interested:
BlobResult {
container: 'default-powerpoint',
name: 'download.pptx',
metadata: {},
lastModified: 'Wed, 14 Aug 2019 08:28:16 GMT',
creationTime: 'Wed, 14 Aug 2019 08:28:16 GMT',
etag: '"something"',
blobType: 'BlockBlob',
contentLength: '4658',
serverEncrypted: 'true',
requestId: 'someID',
contentSettings: { contentType: 'image/jpeg' },
lease: { status: 'unlocked', state: 'available' },
copy:
{ id: 'id123',
status: 'success',
source: 'sourceURL',
progress: '4658/4658',
bytesCopied: 4658,
totalBytes: 4658,
completionTime: 'Wed, 14 Aug 2019 08:28:16 GMT' } }
Here is the working code i have used in my app
return new Promise(async r => {
bst.getBlobToText(containername, name, (err, text) => r(err ? null : text));
})
Full SourceCode
I was able to get solve this to work using code
const storage = require('azure-storage');
const fs = require('fs');
const STORAGE_ACCOUNT_NAME = '<account_name>';
const ACCOUNT_ACCESS_KEY = '<access_key>';
const blobService = storage.createBlobService(STORAGE_ACCOUNT_NAME, ACCOUNT_ACCESS_KEY);
function getBlobContentAsStream(containerName, fileName, res) {
return new Promise((resolve, reject) => {
blobService.getBlobToStream(containerName, fileName, fs.createWriteStream('task1-download.txt'), function(error, serverBlob) {
if(!error) {
resolve(serverBlob);
} else {
reject(err);
}
});
})
}
module.exports = function (context, req) {
context.log('JavaScript HTTP trigger function processed a request.');
context.log('Starting...');
getBlobContentAsStream('default-powerpoint', 'download.pptx').then((content) => {
context.log('Blob Content: ', content);
context.done();
}, function(err) {
console.log.error(err);
context.done();
});
};
And trace output
The problem that happens in your code you can actually see in Application Insights trace if you connected Functions to it. You didn't get any errors because you didn't add error handling for your then callback executor.
getBlobContent('default-powerpoint', 'download.pptx').then((content) => {
context.log('Blob Content: ', content);
context.done();
})
Use
getBlobContent('default-powerpoint', 'download.pptx').then((content) => {
context.log('Blob Content: ', content);
context.done();
}, function(err) {
console.log(err);
context.done();
}))
You would see
With details
Error: An incorrect number of bytes was read from the connection.
The connection may have been closed.
So the problem you are having with getBlobToText is that it tries to return Buffer object as string and it fails to validate MD5. I read somewhere it's possible to using write to stream function to write to buffer instead of file but I can't find it right now.
I would probably grab some NodeJS memory stream library and try output it there as I would assume you don't want to save directly to file. But maybe you do, decide yourself.
If you would end up using 'fs' library remember to use recommended safe non blocking patterns like this
const fs = require('fs');
const util = require('util');
const readFileAsync = util.promisify(fs.readFile);
module.exports = async function (context) {
try {
const data = await readFileAsync('./hello.txt');
} catch (err) {
context.log.error('ERROR', err);
// This rethrown exception will be handled by the Functions Runtime and will only fail the individual invocation
throw err;
}
context.log(`Data from file: ${data}`);
}
Might be the issue is that the api is changed. I just checked below, callback function takes only two arguments in getBlobToText:
https://github.com/Azure-Samples/storage-blobs-node-quickstart/blob/master/index.js
const downloadBlob = async (containerName, blobName) => {
const dowloadFilePath = path.resolve('./' + blobName.replace('.txt', '.downloaded.txt'));
return new Promise((resolve, reject) => {
blobService.getBlobToText(containerName, blobName, (err, data) => {
if (err) {
reject(err);
} else {
resolve({ message: `Blob downloaded "${data}"`, text: data });
}
});
});
};

How to return a string from a node js api call

I have using a code snippet which will return a value after post rest call to an api.
But where ever i am calling the function its not returning the value and prints undefined.
when ever i will call any where getAccessToken(), its says undefiuned, but ifi print the value am getting the output.
How do the called will get the return value, do i need to change anything in the below code.
Thanks
var getAccessToken = exports.getAccessToken = function (res) {
// body...
const request = require('request');
const authKey='EAcEa4o4SkBLo9IpZpW4Y7oDn7d6b30GlouNh28pJ6Q='
const ContentType='application/x-www-form-urlencoded' ;
var postData={
'grant_type':'client_credentials'
};
const options = {
url: 'https://xyz/v1/login',
method: 'POST',
headers: {
'Content-Type': ContentType,
'Authorization':authKey
},
body:require('querystring').stringify(postData)
};
var token;
request(options, function(errror, response, body) {
//console.log(JSON.parse(body));
token= JSON.parse(body).access_token;
});
return token;
}
Your function doesn't return anything. You may use async/await, promises or callbacks to fix it.
exports.getAccessToken = async (res) => {
...
return await request(...)
}
OR
exports.getAccessToken = function(res) {
...
return new Promise(function(resolve, reject) {
...
request(options, function(errror, response, body) {
var token = JSON.parse(body).access_token;
resolve(token);
}
});
}
// Use it like
getAccessToken().then(function(token) { ... });
OR
exports.getAccessToken = function(res, cb) {
...
request(options, function(errror, response, body) {
var token = JSON.parse(body).access_token;
cb(token);
}
}
// Use it like
getAccessToken(res, function(token) { ... });

how to call rest api inside aws lambda function using nodejs

i have created aws lambda function. i want to use rest api calls inside my lambda function.
Is there any reference how to connect it to rest api using nodejs
const https = require('https')
// data for the body you want to send.
const data = JSON.stringify({
todo: 'Cook dinner.'
});
const options = {
hostname: 'yourapihost.com',
port: 443,
path: '/todos',
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Content-Length': data.length
},
};
const response = await doRequest(options, data);
console.log("response", JSON.stringify(response));
/**
* Do a request with options provided.
*
* #param {Object} options
* #param {Object} data
* #return {Promise} a promise of request
*/
function doRequest(options, data) {
return new Promise((resolve, reject) => {
const req = https.request(options, (res) => {
res.setEncoding("utf8");
let responseBody = "";
res.on("data", (chunk) => {
responseBody += chunk;
});
res.on("end", () => {
resolve(JSON.parse(responseBody));
});
});
req.on("error", (err) => {
reject(err);
});
req.write(data);
req.end();
});
}
If you want to call rest api inside lambda function, you can use request package:
install request package via npm: https://www.npmjs.com/package/request
Then inside lambda function try this to call rest api:
var req = require('request');
const params = {
url: 'API_REST_URL',
headers: { 'Content-Type': 'application/json' },
json: JSON.parse({ id: 1})
};
req.post(params, function(err, res, body) {
if(err){
console.log('------error------', err);
} else{
console.log('------success--------', body);
}
});
const superagent = require('superagent');
exports.handler = async(event) => {
return await startPoint(); // use promise function for api
}
function startPoint(){
return new Promise(function(resolve,reject){
superagent
.get(apiEndPoint)
.end((err, res) => {
...
});
})
}
If you are asking about creating a HTTP rest endpoint in lambda using
nodejs. Here is the example.
https://github.com/serverless/examples/tree/master/aws-node-simple-http-endpoint
If you are asking about access an external API inside lambda using
nodejs. Here is an example.
https://github.com/robm26/SkillsDataAccess/blob/master/src/CallService/index.js
Hope this helps.

node.js stubbing AWS S3 method in request spec with sinon

I've got an express based app running on node.js 0.12.2 which uses the s3.headBucket method from aws-sdk 2.1.22 to return a JSON response depending upon whether a particular bucket exists or not.
I've been struggling to directly stub out the call to s3.headBucket with sinon. I've managed to work around this by creating an s3wrapper module which just requires the aws-sdk and instantiates and returns the s3 variable, however, I'm sure this can be done without using the wrapper module and can instead be stubbed directly with sinon, can anyone point me in the right direction?
Below is the currently working code (with the wrapper module s3wrapper.js which I'd like to remove and handle the stubbing in my status_router_spec.js file). In other words, I'd like to be able to call s3.headBucket({Bucket: 'whatever' ... instead of s3wrapper.headBucket({Bucket: ' ... and be able to stub out this s3.headBucket call with my own response.
status_router_spec.js
var chai = require('chai'),
sinon = require('sinon'),
request = require('request'),
myHelper = require('../request_helper')
var expect = chai.expect
var s3wrapper = require('../../helpers/s3wrapper')
describe('My router', function () {
describe('checking the service status', function () {
var headBucketStub
beforeEach(function () {
headBucketStub = sinon.stub(s3wrapper, 'headBucket')
})
afterEach(function () {
s3wrapper.headBucket.restore()
})
describe('when no errors are returned', function () {
it('returns healthy response', function (done) {
// pass null to represent no errors
headBucketStub.yields(null)
request.get(myHelper.appUrl('/status'), function (err, resp, body) {
if (err) { done(err) }
expect(JSON.parse(body)).to.deep.eql({
healthy: true,
message: 'success'
})
done()
})
})
})
})
})
s3wrapper.js
var AWS = require('aws-sdk')
var s3 = new AWS.S3()
module.exports = s3
status_router.js
var Router = require('express').Router
var s3wrapper = require('../helpers/s3wrapper.js')
var router = new Router()
function statusHandler (req, res) {
s3wrapper.headBucket({Bucket: 'some-bucket-id'}, function (err) {
if (err) {
return res.json({ healthy: false, message: err })
} else {
return res.json({ healthy: true, message: 'success' })
}
})
}
router.get(/^\/status\/?$/, statusHandler)
module.exports = router
Answering this question for the benefit of #ippomakunochi who requested a follow up response.
We ended up using rewire to directly set a stub on the s3 library. For example, we stubbed the getObject call for the s3 library using the following:
s3stub = { getObject: sinon.stub(), listObjects: sinon.stub() }
revert = s3.__set__('s3', s3stub)
Here's the complete code:
../../../build/app/helpers/s3
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
module.exports = {
get: function get(options, callback) {
var requestOptions = { Bucket: module.exports.bucket(), Key: options.productId + '.json' };
s3.getObject(requestOptions, function (err, data) {
if (err) { // handle err }
try {
var productData = JSON.parse(data.Body);
} catch (e) {
// handle error
}
return callback(null, productData);
});
}
}
}
test/unit/app/helpers/s3_spec.js
var AWS = require('aws-sdk')
var chai = require('chai')
var sinon = require('sinon')
var sinonChai = require('sinon-chai')
var chaiSubset = require('chai-subset')
var rewire = require('rewire')
var s3 = rewire('../../../build/app/helpers/s3')
chai.use(chaiSubset)
chai.use(sinonChai)
var expect = chai.expect
describe('S3', function () {
var s3stub, revert
beforeEach(function () {
s3stub = { getObject: sinon.stub(), listObjects: sinon.stub() }
revert = s3.__set__('s3', s3stub)
})
afterEach(function () {
revert()
})
describe('#get', function () {
context('when no errors are returned by s3', function () {
it('returns a product', function (done) {
var productResponse = helper.fixture.body('product.json')
s3stub.getObject.yields(null, productResponse)
s3.get({ productId: '1234' }, function (err, res) {
expect(err).to.not.exist
expect(res).to.containSubset({name: 'long sleeve shirt', 'retailer_code': 'retailer-1'})
done()
})
})
})
context('when s3 returns a NoSuchKey error', function () {
it('returns a NotFoundError', function (done) {
var s3Error = AWS.util.error(new Error(), { name: 'NoSuchKey' })
s3stub.getObject.yields(s3Error)
s3.get({ productId: '1234' }, function (err) {
expect(err.message).to.eql('1234 is not found in s3')
expect(err.output.statusCode).to.eql(404)
done()
})
})
})
})

Resources