Read enviroment based config properties in nodejs - node.js

I am C# guy but I am writing first time node JS lambda function.
I am trying to return JSON object the based on environment for example if query param ?env=prod
then expected response will be
{
"Aws_Client_Id": "ProdAbc",
"Aws_Secret_Key": "ProdAbc",
"FeatureStringTest": "DefaultValue"
}
config.json
{
"Aws_Client_Id": {
"Deafult": "Abc",
"Dev": "DevAbc",
"Uat": "UatAbc",
"Prod": "ProdAbc"
},
"Aws_Secret_Key": {
"Deafult": "Abc",
"Dev": "DevAbc",
"Uat": "UatAbc",
"Prod": "ProdAbc"
},
"FeatureStringTest": "DefaultValue"
}
my current lambda code
const fs = require("fs");
exports.handler = async (event) => {
const jsonString = fs.readFileSync("./config.json", 'utf8');
var config = JSON.parse(jsonString);
const response = {
statusCode: 200,
body: JSON.stringify(config),
};
return response;
};

Related

Why am I encountering an error when deploying a nodejs function in gcloud with a zip or directly with editor?

I want to realize cloud functions, I do it on vscode. I think I use all that is necessary to realize the cloud function.
To test this one I installed # google-cloud / storage and it works perfectly on my machine, however when I compress in zip to import it into GCP at deployment it gives me an error:
(Build failed: function.js does not exist; Error ID: 7485c5b6)
While I clearly indicate the entry point of my exports.backup function that I indicate in the entry point in GCP.
Here is the code I'm trying to run - something must be missing but I can't figure it out.
package.json:
{
"name": "export-mysql",
"version": "1.0.0",
"description": "create backup database production",
"main": "index.js",
"scripts": {
"backup": "functions-framework --target=backup"
},
"author": "",
"license": "ISC",
"dependencies": {
"chalk": "^4.1.2",
"dayjs": "^1.10.7",
"dotenv": "^10.0.0",
"googleapis": "^92.0.0",
"#google-cloud/functions-framework": "^2.1.0"
}
}
code:
const { google } = require("googleapis");
const sqlAdmin = google.sqladmin("v1beta4");
const dayjs = require("dayjs");
const chalk = require("chalk");
const dotenv = require("dotenv");
const log = console.log;
const error = console.error;
dotenv.config({ path: "./config/.env" });
let = respo = "";
authorize(function (authClient) {
const date = dayjs(Date.now()).format("YYYYMMDDHHmm");
var request = {
project: "project",
instance: "database-prod",
resource: {
exportContext: {
databases: ["database"],
fileType: "SQL",
kind: "sql#exportContext",
uri: `gs://backup-database-pop/backup-prod-${date}.gz`,
},
},
auth: authClient,
};
sqlAdmin.instances.export(request, function (err, response) {
if (err) {
error(chalk.yellow.bold(`Status: ${err.code}`));
log(chalk.red.bold(`Message: ${err.message}`));
return;
}
// TODO: Change code below to process the `response` object:
// log(chalk.yellow.bold(`Status: ${response.status}`));
log(chalk.greenBright.bold(`Database Exporter dans le bucket -> backup-database-pop fichier: backup-prod-${date}.sql`));
respo = `Database Exporter dans le bucket -> backup-database-pop fichier: backup-prod-${date}.sql`;
return respo;
// log.log(JSON.stringify(response, null, 2));
});
});
function authorize(callback) {
google.auth
.getClient({
scopes: ["https://www.googleapis.com/auth/cloud-platform"],
})
.then((client) => {
callback(client);
})
.catch((err) => {
error(chalk.red.bold("authentication failed: ", err));
});
}
exports.backup = (req, res) => {
res.end();
log(respo);
log("Function complete!");
};
And here is the structure of the folder that is zipped:
functionFolder
folder -> config/.env
index.js
package.json
package-lock.json
authorize.json
Here is the solution you have to select the files and compress them and not compress the folder

How to properly mock data object for unit tests?

I'm trying to unit test a chunk of code that looks something like the snippet below.
const getSubscriptions = {
options: {
async handler({query}) {
const workType = searchParams.get('workType');
return axios.get(url, {
headers: {
Authorization: `Bearer ${clientCredentials.access_token}`
}
}).then((response) => {
if (workType && workType.length > 0) {
if (workType === 'Unknown') {
response.data._embedded = response.data._embedded.filter((subscriptions) => !subscriptions.systemProperties.workType);
} else {
response.data._embedded = response.data._embedded.filter((subscriptions) => subscriptions.systemProperties.workType && subscriptions.systemProperties.workType === workType);
}
}
return response.data;
})
}
},
method: 'GET',
path: '/subscriptions'};
I'm seeing errors such as "cannot read property 'filter' of undefined" in the existing tests after adding the .filter usage. How can I properly mock this in my unit tests so that filter is recognized as a function? Currently the tests are passing in a data object that looks like:
data: [chance.object(), chance.object()]
I've tried editing this data object to include _embedded and then I get an error that .filter is not a function.
I'm new to writing unit tests for JavaScript, so I've been stuck on this for a while. Any help is appreciated. I'm using Mocha for testing, but can likely interpret a Jasmine or Jest solution as well.
Here is a trimmed down version of the test suite:
describe('Get subscriptions', () => {
const workType = chance.word();
const getSubscriptionsResponse = {
data: [chance.object(), chance.object()]
};
const expectedRequest = {
auth: {
...expectedAuth
},
path: '/subscriptions',
query: {
workType
}
};
it('should call API with auth token for Subscriptions', async () => {
const axiosStub = sinon.stub(axios, 'get').resolves(getSubscriptionsResponse);
const expectedParams = new URLSearchParams(expectedRequest.query);
await getSubscriptionsRoute.options.handler(expectedRequest);
expect(axiosStub).to.have.callCount(1);
expect(axiosStub).to.be.calledWith(expectedUrl, {
headers: {
Authorization: `${accessToken}`
}
});
});
The code is trying to filter the response.data._embedded array — response.data is an object. You should mock the response data to be an object (with the _embedded property) and not an array.
{
data: [chance.object(), chance.object()]
}
should be
{
data: {
_embedded: [
// ...
]
}
}

Unable to write item(s) to DynamoDB table utilizing DocumentClient - Nodejs

I'm absolutely brand new to DynamoDb and I'm trying to simply write an object from a NodeJS Lambda. Based on what I've read and researched I should probably be using DocumentClient from the aws-sdk. I also found the following question here regarding issues with DocumentClient, but it doesn't seem to address my specific issue....which I can't really find/pinpoint unfortunately. I've set up a debugger to help with SAM local development, but it appears to be only providing some of the errors.
The code's implementation is shown here.
var params = {
TableName: "March-Madness-Teams",
Item: {
"Id": {"S": randstring.generate(9)},
"School":{"S": team_name},
"Seed": {"S": seed},
"ESPN_Id": {"S": espn_id}
}
}
console.log(JSON.stringify(params))
dynamodb.put(params, (error,data) => {
if (error) {
console.log("Error ", error)
} else {
console.log("Success! ", data)
}
})
Basically I'm scrubbing a website utilizing cheerio library and cherry picking values from the DOM and saving them into the json object shown below.
{
"TableName": "March-Madness-Teams",
"Item": {
"Id": {
"S": "ED311Oi3N"
},
"School": {
"S": "BAYLOR"
},
"Seed": {
"S": "1"
},
"ESPN_Id": {
"S": "239"
}
}
}
When I attempt to push this json object to Dynamo, I get errors says
Error MultipleValidationErrors: There were 2 validation errors:
* MissingRequiredParameter: Missing required key 'TableName' in params
* MissingRequiredParameter: Missing required key 'Item' in params
The above error is all good in well....I assume it didn't like the fact that I had wrapped those to keys in strings, so I removed the quotes and sent the following
{
TableName: "March-Madness-Teams",
Item: {
"Id": {
"S": "ED311Oi3N"
},
"School": {
"S": "BAYLOR"
},
"Seed": {
"S": "1"
},
"ESPN_Id": {
"S": "239"
}
}
}
However, when I do that...I kind of get nothing.
Here is a larger code snippet.
return new Promise((resolve,reject) => {
axios.get('http://www.espn.com/mens-college-basketball/bracketology')
.then(html => {
const dynamodb = new aws.DynamoDB.DocumentClient()
let $ = cheerio.load(html.data)
$('.region').each(async function(index, element){
var preregion = $(element).children('h3,b').text()
var region = preregion.substr(0, preregion.indexOf('(') - 1)
$(element).find('a').each(async function(index2, element2){
var seed = $(element2).siblings('span.rank').text()
if (seed.length > 2){
seed = $(element2).siblings('span.rank').text().substring(0, 2)
}
var espn_id = $(element2).attr('href').split('/').slice(-2)[0]
var team_name = $(element2).text()
var params = {
TableName: "March-Madness-Teams",
Item: {
"Id": randstring.generate(9),
"School":team_name,
"Seed": seed,
"ESPN_Id": espn_id
}
}
console.log(JSON.stringify(params))
// dynamodb.put(params)
// .then(function(data) {
// console.log(`Success`, data)
// })
})
})
})
})
Can you try without the type?
Instead of
"School":{"S": team_name},
for example, use
"School": team_name,
From your code, I can see the mis promise on the dynamodb request. Try to change your lines :
dynamodb.put(params).then(function(data) {
console.log(`Success`, data)
})
to be :
dynamodb.put(params).promise().then(function(data) {
console.log(`Success`, data)
})
you can combine with await too :
await dynamodb.put(params).promise().then(function(data) {
console.log(`Success`, data)
})
exports.lambdaHandler = async (event, context) => {
const html = await axios.get('http://www.espn.com/mens-college-basketball/bracketology')
let $ = cheerio.load(html.data)
const schools = buildCompleteSchoolObject(html, $)
try {
await writeSchoolsToDynamo(schools)
return { statusCode: 200 }
} catch (error) {
return { statusCode: 400, message: error.message }
}
}
const writeSchoolsToDynamo = async (schools) => {
const promises = schools.map(async school => {
await dynamodb.put(school).promise()
})
await Promise.all(promises)
}
const buildCompleteSchoolObject = (html, $) => {
const schools = []
$('.region').each(loopThroughSubRegions(schools, $))
return schools
}
const loopThroughSubRegions = (schools, $) => {
return (index, element) => {
var preregion = $(element).children('h3,b').text()
var region = preregion.substr(0, preregion.indexOf('(') - 1)
$(element).find('a').each(populateSchoolObjects(schools, $))
}
}
const populateSchoolObjects = (schools, $) => {
return (index, element) => {
var seed = $(element).siblings('span.rank').text()
if (seed.length > 2) {
seed = $(element).siblings('span.rank').text().substring(0, 2)
}
var espn_id = $(element).attr('href').split('/').slice(-2)[0]
var team_name = $(element).text()
schools.push({
TableName: "March-Madness-Teams",
Item: {
"Id": randstring.generate(9),
"School": team_name,
"Seed": seed,
"ESPN_Id": espn_id
}
})
}
}
I know this is drastically different from what I started with but I did some more digging and kind of kind of worked to this...I'm not sure if this is the best way, but I seemed to get it to work...Let me know if something should change!
Oh I understand what you want.
Maybe you can see the code above works, but there is one concept you have to improve here about async - await and promise especially on lambda function.
I have some notes here from your code above, maybe can be your consideration to improve your lambda :
Using await for every promise in lambda is not the best approach because we know the lambda time limitation. But sometimes we can do that for other case.
Maybe you can change the dynamodb.put method to be dynamodb.batchWriteItem :
The BatchWriteItem operation puts or deletes multiple items in one or more tables.
Or If you have to use dynamodb.put instead, try to get improve the code to be like so :
const writeSchoolsToDynamo = async (schools) => {
const promises = schools.map(school => {
dynamodb.put(school).promise()
})
return Promise.all(promises)
}

Cloud Functions for Firebase: 'Error: could not handle the request'

I feel like pulling my hair out; this is either super simple and i'm having brain freeze or it is not that simple.
What I want
I am trying to unshorten a shortened URL using firebase, when a user goes to:
myapp.firebaseappurl.com/url/SHORTENEDLINK
SO wont let me add a shortened URL
I would like the output to be:
{
"url": "https://stackoverflow.com/questions/45420989/sphinx-search-how-to-use-an-empty-before-match-and-after-match"
}
What I have tried
firebase.json file:
{
"hosting": {
"public": "public",
"rewrites": [ {
"source": "/url/:item",
"destination": "/url/:item"
} ]
}
}
index.js file:
const functions = require('firebase-functions');
exports.url = functions.https.onRequest((requested, response) => {
var uri = requested.url;
request({
uri: uri,
followRedirect: true
},
function(err, httpResponse) {
if (err) {
return console.error(err);
}
response.send(httpResponse.headers.location || uri);
}
);
});
Result
When I go to myapp.firebaseappurl.com/url/SHORTENEDLINK I get the following:
Error: could not handle the request
You are seeing Error: could not handle the request since there probably was an exception and it timed out.
Check your logs using:
firebase functions:log
Refer docs for more details
Here's how I got URL unshortening to work
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp(functions.config().firebase);
const http = require('http');
const urlP = require('url');
const unshorten = (url, cb) => {
const _r = http.request(
Object.assign(
{},
urlP.parse(url),
{
method: 'HEAD',
}
),
function(response) {
cb(null, response.headers.location || url);
}
);
_r.on('error', cb);
_r.end();
};
const resolveShortUrl = (uri, cb) => {
unshorten(uri, (err, longUrl) => {
if (longUrl === uri) {
cb(null, longUrl);
} else {
resolveShortUrl(longUrl, cb);
}
});
};
exports.url = functions.https.onRequest((requested, response) => {
var uri = requested.query.url;
resolveShortUrl(uri, (err, url) => {
if (err) {
// handle err
} else {
response.send({ url });
}
});
});
You can follow the hello world example straight away and use the above code as your function.
Above code uses HEAD requests to peek into 'Location` field of the headers and decides if the url can be further unshortened.
This is lighter as HEAD requests ask for no body (thereby avoiding body parsing). Also, no third party lib required!
Also note that the url passed as a query param. So the request would be
http://<your_firebase_server>/url?url=<short_url>
Saves you the trouble of URL re-writes. Plus semantically makes a little more sense.
Did you tried using { source: '/url/**' } syntax?
You can use something like this;
{
"hosting": {
"public": "public",
"rewrites": [ {
"source": "/url/**",
"function": "/url"
}]
}
}
and then you can parse the url from the request.
exports.url = functions.https.onRequest((req, res) => {
// parse the url from the req and redirect to the correct link
});
You should try this in the firebase.json, its worked for me:
"source": "/**",
I also tried "source": "/url/**" but its not worked.
I think your code is fine. What you're doing incorrectly is that you're using Express-js notations in your firebase.json's rewrites node. (the :item part). These don't work in the Firebase Realtime Database.
So, instead of doing that, change your firebase.json to the following :-
{
"hosting": {
"public": "public",
"rewrites": {
"source": "YOUR SHORTENED URL",
"destination": "YOUR ORIGINAL URL"
}
}
}
This is also the advocated approach in the Cloud Functions for Firebase's URL Shortener sample.
First make sure you are receiving the request properly with the shortened url.
const functions = require('firebase-functions');
const express = require('express');
var express_app = express();
express_app.use(body_parser.text({type: ()=>true}));
express_app.all('*', (req, res) => {
console.log(req.path);
res.send(JSON.stringify(req.path));
});
exports.url = functions.https.onRequest(express_app);
Now when you visit myapp.firebaseappurl.com/url/SHORTENEDLINK you should see the SHORTENEDLINK in plain text. When that's working, try the redirect.
const functions = require('firebase-functions');
const express = require('express');
const request = require('request');
var express_app = express();
express_app.use(body_parser.text({type: ()=>true}));
express_app.all('*', (req, res) => {
var url = req.path;
request({
uri: uri,
followRedirect: true
},
function(err, httpResponse) {
if (err) {
return console.error(err);
}
res.send(httpResponse.headers.location || uri);
}
);
});
exports.url = functions.https.onRequest(express_app);
Also it's good practice to npm install with --save so they end up in the packages.json. While firebase copies your node_modules folder, most other SaaS platforms run npm install.

Unable to parse xml to json properly

I'm trying to parse a xml to json & i'm using xml2js in nodejs.
Below is my code
var chai = require('chai');
var chaiHttp = require('chai-http');
var request = chai.request;
var should = chai.should();
var expect = chai.expect;
var assert = chai.assert;
var supertest = require('supertest');
var fs = require('fs');
var xmlLocation = "./configdata/xmlDoc.xml";
var xml2js = require('xml2js');
var parser = new xml2js.Parser();
//Plugin for HTTP, etc.
chai.use(chaiHttp);
process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = '0';
//xmlFile = JSON.parse(fs.readFileSync(xmlData, 'utf8'));
describe("Test : ", function () {
it("convert xml to json", function (done) {
r = fs.readFileSync(xmlLocation, 'UTF-8');
parser.parseString(r, function (err, parsedData) {
if (err) throw err;
else {
fs.writeFile("jsonData.json", JSON.stringify(parsedData), function (err, response) {
});
}
});
done();
});
})
My sample xml file :
<?xml version="1.0" encoding="UTF-8" ?>
<ALEXA>
<SD TITLE="A" FLAGS="" HOST="davidwalsh.name">
<TITLE TEXT="David Walsh Blog :: PHP, MySQL, CSS, Javascript, MooTools, and Everything Else"/>
<LINKSIN NUM="1102"/>
<SPEED TEXT="1421" PCT="51"/>
</SD>
<SD>
<POPULARITY URL="davidwalsh.name/" TEXT="7131"/>
<REACH RANK="5952"/>
<RANK DELTA="-1648"/>
</SD>
</ALEXA>
I'm getting the below output :
{
"ALEXA": {
"SD": [
{
"$": {
"TITLE": "A",
"FLAGS": "",
"HOST": "davidwalsh.name"
},
"TITLE": [
{
"$": {
"TEXT": "David Walsh Blog :: PHP, MySQL, CSS, Javascript, MooTools, and Everything Else"
}
}
],
"LINKSIN": [
{
"$": {
"NUM": "1102"
}
}
],
"SPEED": [
{
"$": {
"TEXT": "1421",
"PCT": "51"
}
}
]
},
{
"POPULARITY": [
{
"$": {
"URL": "davidwalsh.name/",
"TEXT": "7131"
}
}
],
"REACH": [
{
"$": {
"RANK": "5952"
}
}
],
"RANK": [
{
"$": {
"DELTA": "-1648"
}
}
]
}
]
}
}
'$' is getting added to the parsed json. How to avoid it..??
Looking for a solution. Thanks in advance.
The $ is the place for your attributes with the default config.
As xml2js parses your XML tags (SD for example) explicitly as arrays (with explicitArray=true - you either way have multiple of them, and you can only assign one similar key per object in JSON), you need a place to store the attributes, this is what $ is for. You can enforce creating arrays using (which is the default) or turn this off. Using mergeAttrs you might eventually get a result, that you desire.
You can change the attrkey if that would be one solution as well. The same goes for charkey and so on. You can find the whole config options in the README on GitHub at https://github.com/Leonidas-from-XIV/node-xml2js - eventually an option to transform to children might the right for you.
If you don't need attributes at all you can set ignoreAttrs=true. By the way; parser options go as an object into the parser constructor, in your case: new xml2js.Parser({...options});
library like this usually parse everything and sometimes resulting in having many properties which you don't need.
i created camaro for this purpose.
downside of this is you have to write your own template file for the structure you want xml to be transform to.
const transform = require('camaro')
const fs = require('fs')
const xml = fs.readFileSync('ean.xml', 'utf-8')
const template = {
cache_key: "/HotelListResponse/cacheKey",
hotels: ["//HotelSummary", {
hotel_id: "hotelId",
name: "name",
rooms: ["RoomRateDetailsList/RoomRateDetails", {
rates: ["RateInfos/RateInfo", {
currency: "ChargeableRateInfo/#currencyCode",
non_refundable: "nonRefundable",
price: "ChargeableRateInfo/#total"
}],
room_name: "roomDescription",
room_type_id: "roomTypeCode"
}]
}],
session_id: "/HotelListResponse/customerSessionId"
}
const result = transform(xml, template)
Instead xml2js, use xml2json which converts exactly from xml to json with the specific keys.

Resources