aws Nodejs sdk:: autoscaling.describeAutoScalingGroups - node.js

I need to get the status of the autoscaling group processes (whether they're suspended or resumed). I've written the below script which returns the properties for the given ASG but the SuspendedProcesses: value is "[Object]". How do I expand the object. The ASG I'm querying has the Terminate process suspended, so I'd expect to see this in the output of the script:
var AWS = require('aws-sdk');
var uuid = require('uuid');
AWS.config.update({ region: 'eu-west-1' });
AWS.config.apiVersions = {
autoscaling: '2011-01-01',
};
var autoscaling = new AWS.AutoScaling();
var params = {
AutoScalingGroupNames: ["myAutoScalingGroup"]
};
function status() {
autoscaling.describeAutoScalingGroups(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
}
status();
This is the response from the above script:
C:\Node.js\NodeJsSamples\package01>node SuspendProcess.js
{ ResponseMetadata: { RequestId: 'myRequestId' },
AutoScalingGroups:
[ { AutoScalingGroupName: 'myAutoScalingGroupName',
AutoScalingGroupARN: 'arn:aws:autoscaling:eu-west-
1:myAccNumber:autoScalingGroup:myAutoScalingGroupName',
LaunchConfigurationName: 'myLaunchConfigurationName',
MinSize: 1,
MaxSize: 1,
DesiredCapacity: 1,
DefaultCooldown: 300,
AvailabilityZones: [Object],
LoadBalancerNames: [Object],
TargetGroupARNs: [],
HealthCheckType: 'ELB',
HealthCheckGracePeriod: 300,
Instances: [Object],
CreatedTime: 2017-11-02T08:08:31.364Z,
SuspendedProcesses: [Object],
VPCZoneIdentifier: 'subnet-########,subnet-########,subnet-########',
EnabledMetrics: [],
Tags: [Object],
TerminationPolicies: [Object],
NewInstancesProtectedFromScaleIn: false } ] }
How can I expand the [Object] values?
thanks.

Use this snipped. Is native.
console.log('string', require('util').inspect(<variable>, 1, 10, 1));
in your code:
function status() {
autoscaling.describeAutoScalingGroups(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log('output', require('util').inspect(data, 1, 10, 1)); // successful response
});
}

You need to JSON.stringify(data) instead of printing only data in else clause.
If you print console.log(JSON.stringify(data)) in else condition, you will get proper response.

Use JSON.stringify -
var obj = { "name":"John", "age":function () {return 30;}, "city":"New York"};
console.log(JSON.stringify(obj));

Related

Google Document AI - Inconsistent Long Running Operation's metadata JSON representation

While checking the status of Document AI - Long Running Operation (Form processor), the JSON representation of decodedOperation.metadata seems to vary during the execution.
I suspect that operation response does not resolve straight away despite using then() on checkBatchProcessDocumentsProgress(operation.name).
This behaviour does not happen using similar code for Google Speech's LROs.
Context:
At console.log line #24 of implemented code (below), as expected, decodedOperation.metadata resolves to
{
"state":"RUNNING",
"createTime":{
"seconds":"1669278029",
"nanos":500249000
},
"updateTime":{
"seconds":"1669278029",
"nanos":500249000
}
}
Current behaviour:
At console.log line #27, decodedOperation.metadata.state returns 2 (!?)
Expected behaviour:
decodedOperation.metadata.state should return RUNNING.
More details of output in the code below.
Reproduction details:
Environment:
node.js 12.02
Package.json:
{
"dependencies": {
"#google-cloud/documentai": "latest",
}
}
Code:
function run() {
const documentai = require('#google-cloud/documentai').v1;
// Create a promise on object
let options = {
credentials: {
client_email: ** ** ** ,
private_key: ** ** * ,
},
projectId: ** ** *
};
return async (async callback => {
const client = new documentai.DocumentProcessorServiceClient(options);
client.checkBatchProcessDocumentsProgress(properties.operation)
.then(
(decodedOperation) => {
console.log("METADATA " + JSON.stringify(decodedOperation.metadata));
/* logs to console:
{
"state":"RUNNING",
"createTime":{
"seconds":"1669278029",
"nanos":500249000
},
"updateTime":{
"seconds":"1669278029",
"nanos":500249000
}
}
/// then
{
"state":"SUCCEEDED",
"createTime":{
"seconds":"1669278029",
"nanos":500249000
},
"updateTime":{
"seconds":"1669278048",
"nanos":758825000
},
"individualProcessStatuses":[
{
"inputGcsSource":"gs://bucket/intake-form.pdf",
"status":{
},
"outputGcsDestination":"gs://bucket/ocr/7371120297544371692/0",
"humanReviewStatus":{
"state":"SKIPPED",
"stateMessage":"HumanReviewConfig is DISABLED, skipping human review."
}
}
]
}
*/
console.log("STATE " + JSON.stringify(decodedOperation.metadata.state));
/* log to console: 2
when above is "RUNNING" */
/* log to console: 3
when above is "SUCCEEDED" */
if (decodedOperation.metadata.state == "SUCCEEDED") { // Never triggers as decodedOperation.metadata.state evaluates to an integer at this line
};
let response = {
"operationStatus": decodedOperation.metadata.state
};
callback(undefined, response);
})
.catch(
(err) => {
callback(err);
});
})
}
Update on investigation
util.inspect(decodedOperation.metadata, { showHidden: false }) returns:
BatchProcessMetadata {
{
"individualProcessStatuses":[
"IndividualProcessStatus"{
"inputGcsSource":"gs://bucketxxx/intake-form.pdf",
"status":[
"Status"
],
"outputGcsDestination":"gs://bucketxxx/ocr/7999521463088838887/0",
"humanReviewStatus":[
"HumanReviewStatus"
]
}
],
"state":3,
"createTime":"Timestamp"{
"seconds":"Long"{
"low":1670011754,
"high":0,
"unsigned":false
},
"nanos":105214000
},
"updateTime":"Timestamp"{
"seconds":"Long"{
"low":1670011773,
"high":0,
"unsigned":false
},
"nanos":489028000
}
}
util.inspect(decodedOperation.metadata, { showHidden: true }) returns (section of interest only):
[...] [root]: [Getter], [fullName]: [Getter] }, State: { STATE_UNSPECIFIED: 0, WAITING: 1, RUNNING: 2, SUCCEEDED: 3, CANCELLING: 4, CANCELLED: 5, FAILED: 6, '0': 'STATE_UNSPECIFIED', '1': 'WAITING', '2': 'RUNNING', '3': 'SUCCEEDED', '4': 'CANCELLING', '5': 'CANCELLED', '6': 'FAILED' }, encode: <ref *5> [Function: BatchProcessMetadata$encode] [...]
To fix this issue, you can access the string representation of the state enum value by using the documentai.v1.BatchProcessMetadata.State object. For example:
console.log("STATE " + documentai.v1.BatchProcessMetadata.State[decodedOperation.metadata.state]);
instead of
console.log("STATE " + JSON.stringify(decodedOperation.metadata.state));
Read more about it.
https://cloud.google.com/php/docs/reference/cloud-document-ai/latest/V1.BatchProcessMetadata.State

Return a node js response inside session.withTransaction

I am using session.withTransaction() to execute multiple updates in the mongo db. Please note that promiseArray has multiple Stock.update statements to update stock quantities.
await session.withTransaction(
async () => {
promiseResults = await Promise.all(promiseArray);
for (const result of promiseResults) {
recordCounter++;
if (result.nModified === 1) {
stockItemsNoUpdate.push(goodReturnSummary[recordCounter]);
}
}
if (stockItemsNoUpdate.length > 0) {
return res.status(200).send(response);
}
existingGoodReturnSummary = GoodReturn.build({
_id: sheetId,
goodReturnSummary,
agency,
createdBy,
});
await existingGoodReturnSummary.save({ session: session });
existingGoodReturnSummary = await GoodReturn.calculateTotalGoodReturnAmount(
existingGoodReturnSummary,
session
);
},
{
readPreference: 'primary',
readConcern: { level: 'local' },
writeConcern: { w: 'majority' },
}
);
If stockItemsNoUpdate.length > 0 I need to abort this transaction and send the response. done by below code segment.
if (stockItemsNoUpdate.length > 0) {
return res.status(200).send(response);
}
But I cannot do this because of the below error
Any idea on how to resolve this ??
Cheers
See Nodejs mongodb's Transaction API `withTransaction` always return null and https://jira.mongodb.org/browse/NODE-2014.
https://jira.mongodb.org/browse/NODE-2014?focusedCommentId=2420255&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-2420255 provides a workaround.

DynamoDB ignored in AWS Lambda Local

I have DynamoDB running locally:
java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar -sharedDb
and I'm trying to run lambda-local example:
lambda-local -f aws -e event.json
However, I don't get any output from dynamodb at all. There is no error and it looks like the call to dynamodb.listTables() is skipped/ignored. What's wrong?
My aws.js is as follows:
var AWS = require("aws-sdk");
AWS.config.update({
region: "us-west-2",
endpoint: "http://localhost:8000",
accessKeyId: "BLAH",
secretAccessKey: "BLAH"
});
var dynamodb = new AWS.DynamoDB();
exports.handler = function(event, context) {
console.log("EVENT: ", event);
event.int++;
console.log("LIST TABLES:");
dynamodb.listTables(function(err, data) {
if (err) {
console.log("Error: ", err.code);
} else {
console.log("Table names are: ", data.TableNames);
}
});
console.log("---SUCCEED:---");
context.succeed(event);
};
and event.json:
{
"obj" : { "a" : "b" },
"int" : 1,
"str" : "qwerty",
"arr" : [ 1, 2, 3, 4 ]
}
Output is:
EVENT: { obj: { a: 'b' }, int: 1, str: 'qwerty', arr: [ 1, 2, 3, 4 ] }
LIST TABLES:
---SUCCEED:---
OUTPUT
--------------------------------
{
"obj": {
"a": "b"
},
"int": 2,
"str": "qwerty",
"arr": [
1,
2,
3,
4
]
}
I'm expecting to see at least something between LIST TABLES and --SUCCEED:---
Yet there is no output and no error. I also checked DynamoDB log and there is nothing there. The line event.int++ worked fine and I see all other console.log() calls.
Additionally, I run this code from node just to prove that the DynamoDB is working and it does list tables fine:
node ListTables.js
Content of ListTables.js (which is basically same code as above):
var AWS = require("aws-sdk");
AWS.config.update({
region: "us-west-2",
endpoint: "http://localhost:8000",
accessKeyId: "BLAH",
secretAccessKey: "BLAH"
});
var dynamodb = new AWS.DynamoDB();
dynamodb.listTables(function(err, data) {
if (err) {
console.log("Error: ", err.code);
} else {
console.log("Table names are: ", data.TableNames);
}
});
Outputs as expected:
Table names are: [ 'Movies' ]
I created that table earlier just to prove that DynamoDB is actually running and accepting connections from node.
The problem and solution is that the DynamoDB function is called asynchronous and therefore, the script finished earlier.
Once I moved context.succeed(event); line from the end to inside of dynamodb.listTables(function(err, data) { then I've got the output fine:
dynamodb.listTables(function(err, data) {
console.log("INSIDE");
if (err) {
console.log("Error: ", err.code);
} else {
console.log("Table names are: ", data.TableNames);
}
context.succeed(event);
});
lambda-local -f aws -e event.json
OUTPUTS:
EVENT: { obj: { a: 'b' }, int: 1, str: 'qwerty', arr: [ 1, 2, 3, 4 ] }
LIST TABLES:
---SUCCEED:---
INSIDE
Table names are: [ 'Image', 'Movies' ]
OUTPUT
--------------------------------
...

My task isn't working of ActionHerojs

exports.sayHelloAction = {
name: 'sayHelloAction',
description: '',
outputExample: {},
version: 1,
inputs: {},
run: function (api, data, next) {
// Enqueue the task now, and process it ASAP
// api.tasks.enqueue(nameOfTask, args, queue, callback)
api.tasks.enqueue("sayHello", null, 'default', function (error, toRun) {
next(error)
});
}
};
and my task is like this, but when I run my task from my action y cant see the log(">>>>>>>>>>") in my console :(
const sayHello = {
name: 'sayHello',
description: 'I say hello',
queue: "default",
plugins: [],
pluginOptions: [],
frequency: 1000,
run: function(api, params, next){
console.log(">>>>>>>>>>>>>>>>>>>>>>>>>>")
next(true);
}
};
exports.task = sayHello
versions: Nodejs: 7.7, ActionHerojs 17
You are enquing a task, not running it. You need to enable some workers on your server.

Converting audio with sox in nodejs

I have made sure that all that sox needs to function is installed, and I've also installed the sox package for node. Yet, this does not seem to work.
function encode(file, destination, quality) {
return new Promise((resolve, reject) => {
console.log(destination)
let job = sox.transcode(file, destination, {
sampleRate: 44100,
format: 'mp3',
bitRate: quality * 1024,
})
job.on('src', function(info) {
console.log(info)
});
job.on('progress', (amountDone, amountTotal) => {
console.log("progress", amountDone, amountTotal);
});
job.on('error', (err) => {
reject('Could not transcode mp3.')
})
job.on('end', () => {
resolve('mp3 transcoded.')
})
console.log(job)
})
}
The console.log at the end logs:
Transcode {
domain: null,
_events:
{ src: [Function],
progress: [Function],
error: [Function],
end: [Function] },
_eventsCount: 4,
_maxListeners: undefined,
inputFile: 'C:\\Users\\User\\Documents\\App\\Media\\media\\uploads\\audio/2016/269/1/0//10c746ef62374c6ab1f2ecfc36705618/original.mp3',
outputFile: 'C:\\Users\\User\\Documents\\App\\Media\\media\\uploads\\audio/2016/269/1/0//10c746ef62374c6ab1f2ecfc36705618/128.mp3',
options:
{ sampleRate: 44100,
format: 'mp3',
bitRate: 196608,
channelCount: 2,
compressionQuality: 5 } }
There are no errors, so I wonder if something here is failing silently. Identifying a file works just fine:
let original = (path + 'original.mp3')
sox.identify(original, function(err, info) {
if (err) {
console.log(err)
throw(err)
}
if (info.format !== 'mp3') {
throw('File must be mp3!')
}
console.log(info) // {format: 'mp3', ..}
});
The above code works just fine. but sox.transcode does not.
The example code mentions a job.start() that (presumably) starts the transcoding job. I don't see that being called in your code.

Resources