How do you troubleshoot Dynamodb trigger "PROBLEM: Function call failed" or Lambda "Missing credentials in config"? - node.js

I have the Lambda function below that is supposed to take changes to an existing Dynamodb table and make them to another Dynamodb table.
I purposely left the code I've tried commented out I am down to two paths both of which throw errors in different places.
If I include SharedIniFileCredentials in the code I get the following error in the Cloudwatch logs:
{
"message": "Missing credentials in config, if using AWS_CONFIG_FILE, set AWS_SDK_LOAD_CONFIG=1",
"errno": -2,
"syscall": "open",
"code": "CredentialsError",
"path": "/home/sbx_user1051/.aws/credentials",
"time": "2020-11-16T02:57:31.177Z",
"originalError": {
"message": "Could not load credentials from SharedIniFileCredentials",
"errno": -2,
"syscall": "open",
"code": "CredentialsError",
"path": "/home/sbx_user1051/.aws/credentials",
"time": "2020-11-16T02:57:31.177Z",
"originalError": {
"errno": -2,
"syscall": "open",
"code": "ENOENT",
"path": "/home/sbx_user1051/.aws/credentials",
"message": "ENOENT: no such file or directory, open '/home/sbx_user1051/.aws/credentials'"
}
}
}
If I remove that an just let the role that I created and attached (which the AWS documentation says is all I need to do) do the work for permissions I get no error in Cloudwatch logs but I see in my console statements that the putItem (or put if I use the other class) never gets executed. The code returns before it executes those functions. So if I go to the AWS console and look at the Trigger tab on my Dynamodb table I see "PROBLEM: Function call failed".
I have add environment variable. I have hard coded the key/secret into the code. I have tried running this on the command line with AWS CLI. I added environment variables. I have made sure my credentials and config files are populated correctly. Same two issues no matter what I do.
I even tried AWS CLI. If I run this on the command line with the following command
aws lambda invoke --function-name myfunction --cli-binary-format raw-in-base64-out --payload file://mynewitem2.json output.txt
I get this after a few seconds go by
Read timeout on endpoint URL: "https://lambda.us-east-2.amazonaws.com/2015-03-31/functions/myfunction/invocations"
If I add the config file statements and then re-run it on the command line I get this, but the Cloudwatch logs do not show that I reached the putItem or put commands:
{
"StatusCode": 200,
"ExecutedVersion": "$LATEST"
}
Does anyone have any suggestions here? I have looked through all the similar requests for help with no luck. I've looked at the AWS documentation over and over.
I'm completely stuck here. I must be missing something. I just don't know what it is. Do any of you??
Here is the Lambda function:
'use strict';
console.log("starting myplugin insertion . . . ");
var AWS = require("aws-sdk");
AWS.config.logger = console;
//AWS.config = new AWS.Config();
//AWS.config.accessKeyId = "...";
//AWS.config.secretAccessKey = "...";
//AWS.config.region = 'us-east-2';
//AWS.config.update({
// aws_access_key_id : '...',
// aws_secret_access_key : '...',
// region: 'us-east-2'
// });
var creds = new AWS.SharedIniFileCredentials({profile: 'myprofile'});
console.log('Creds problem: ', JSON.stringify(creds, null, 2));
AWS.config.credentials = creds;
AWS.config.getCredentials(function(err) {
if (err)
console.log('Cred problem: ', JSON.stringify(err, null, 2));
else {
console.log("Cred Access:", JSON.stringify(AWS.config.credentials, null, 2));
}
});
var ddb = new AWS.DynamoDB({
'apiVersion': '2012-08-10'
});
exports.handler = (event, context, callback) => {
console.log(JSON.stringify(event, null, 2));
event.Records.forEach((record) => {
console.log('my Stream record: ', JSON.stringify(record, null, 2));
if (record.eventName == 'INSERT') {
console.log('my INSERTING RECORD');
var params = {
TableName: 'myplugin_temp',
Item: {
"client" : record.dynamodb.NewImage.client,
"expiration" : record.dynamodb.NewImage.expiration,
"notificationurl" : record.dynamodb.NewImage.notificationurl,
"clientid" : record.dynamodb.NewImage.clientid,
"s3path" : record.dynamodb.NewImage.s3path,
"language" : record.dynamodb.NewImage.language,
"filename" : record.dynamodb.NewImage.filename,
"timecreated" : record.dynamodb.NewImage.timecreated,
"appid" : record.dynamodb.NewImage.appid,
"subtitle" : record.dynamodb.NewImage.subtitle,
"host" : record.dynamodb.NewImage.host,
"mediatype" : record.dynamodb.NewImage.mediatype,
"sourcemimtype" : record.dynamodb.NewImage.sourcemimetype,
}
};
console.log("my UP HERE");
ddb.putItem(params, function(err,data) {
console.log("my HERE");
if (err) {
console.log("my INSERTING Error", JSON.stringify(err, null, 2));
} else {
console.log("my INSERTING Success",JSON.stringify(data, null, 2));
console.log("my COMPLETED INSERTION MODULE");
}
});
}
if (record.eventName == 'REMOVE') {
console.log('my DELETING RECORD');
var params = {
TableName: 'myplugin_temp',
Item: {
"client" : record.dynamodb.NewImage.client,
"expiration" : record.dynamodb.NewImage.expiration,
"notificationurl" : record.dynamodb.NewImage.notificationurl,
"clientid" : record.dynamodb.NewImage.clientid,
"s3path" : record.dynamodb.NewImage.s3path,
"language" : record.dynamodb.NewImage.language,
"filename" : record.dynamodb.NewImage.filename,
"timecreated" : record.dynamodb.NewImage.timecreated,
"appid" : record.dynamodb.NewImage.appid,
"subtitle" : record.dynamodb.NewImage.subtitle,
"host" : record.dynamodb.NewImage.host,
"mediatype" : record.dynamodb.NewImage.mediatype,
"sourcemimtype" : record.dynamodb.NewImage.sourcemimetype,
}
};
ddb.deleteItem(params, function(err, data) {
if (err) {
console.log("my DELETING Error", JSON.stringify(err, null, 2));
} else {
console.log("my DEL Success", JSON.stringify(data, null, 2));
}
});
}
if (record.eventName == 'MODIFY') {
console.log('my MODIFYING RECORD');
var params = {
TableName: 'myplugin_temp',
Item: {
"client" : record.dynamodb.NewImage.client,
"expiration" : record.dynamodb.NewImage.expiration,
"notificationurl" : record.dynamodb.NewImage.notificationurl,
"clientid" : record.dynamodb.NewImage.clientid,
"s3path" : record.dynamodb.NewImage.s3path,
"language" : record.dynamodb.NewImage.language,
"filename" : record.dynamodb.NewImage.filename,
"timecreated" : record.dynamodb.NewImage.timecreated,
"appid" : record.dynamodb.NewImage.appid,
"subtitle" : record.dynamodb.NewImage.subtitle,
"host" : record.dynamodb.NewImage.host,
"mediatype" : record.dynamodb.NewImage.mediatype,
"sourcemimtype" : record.dynamodb.NewImage.sourcemimetype,
}
};
ddb.updateItem(params, function(err,data) {
if (err) {
console.log("my UPDATING Error", JSON.stringify(err, null, 2));
} else {
console.log("my UPDATING Success",JSON.stringify(data, null, 2));
}
});
}
});
//callback(null, `my Successfully processed records.`);
console.log(JSON.stringify(callback, null, 2));
};

Well, some notes to be highlighted :
AWS.SharedIniFileCredentials should load a particular path of the user home. On linux it will be on /home/[user]/.aws/credentials. While the lambda function has no access to that system path. In other words, AWS.SharedIniFileCredentials is possible to be implemented on the system which is you have access to the home directory.
Loading Credentials in Node.js from the Shared Credentials File
There are two ways to setup the permission for lambda to have access into aws services i.e dynamodb :
Creating an IAM role for lambda and add dynamodb access to the policy. Actually it should ask you to create new role policy when creating new lambda function from aws console.
How to Create an AWS IAM Policy to Grant AWS Lambda Access to an Amazon DynamoDB Table
Using IAM user keys that you have done above
Remember to have knowledge about async and sync process on javascript. On your lambda function, you miss this part there for example :
// it is a promise function that running async process
ddb.putItem(params, function(err,data) {
...
})
I would prefer to suggest change the code using the latest approach and aws promise best practice like so :
exports.handler = async (event) => {
try {
let allTasks = [];
event.Records.forEach((record) => {
// set params here
allTasks.push(ddb.putItem(params).promise());
...
})
if (allTasks.length > 0) {
return Promise.all(allTasks).then(results => {
console.log(results)
})
}
return
} catch(e) {
console.log(e)
throw e
}
}

Related

AWS PUT request met with "Provided key element does not match schema."

(Edited to incorporate comments)
So I apologize in advance for the long question. I don't know how else to ask it.
I'm trying to finish up a full-stack web app using React, Node, and DynamoDB. POST and GET requests are working fine, but I'm stuck on PUT. My mock PUT request works fine, but once I try it from the front end in React, I get the error mentioned in the title. I'll show the back end code first, then the mock update, and then the front end.
import handler from "./libs/handler-lib";
import dynamoDb from "./libs/dynamodb-lib";
export const main = handler(async (event, context) => {
const data = JSON.parse(event.body);
const params = {
TableName: process.env.tableName,
Key: {
userId: event.requestContext.identity.cognitoIdentityId,
activityId: event.pathParameters.activityId
},
UpdateExpression: "SET title = :title, activityType = :activityType, activityRoutine = :activityRoutine, activityComment = :activityComment",
ExpressionAttributeValues: {
":title": data.title || null,
":activityType": data.activityType || null,
// ":activityRoutine": data.activityRoutine == '' ? "None" : data.activityRoutine,
// ":activityComment": data.activityComment == '' ? "None" : data.activityComment
":activityRoutine": data.activityRoutine || null,
":activityComment": data.activityComment || null
},
ReturnValues: "ALL_NEW"
};
await dynamoDb.update(params);
return { status: true };
This mock update event works without issue:
{
"body": "{\"title\":\"test\",\"activityType\":\"testing\",\"activityRoutine\":\"\",\"activityComment\":\"\"}",
"pathParameters": {
"activityId": "long-alphanumeric-id"
},
"requestContext": {
"identity": {
"cognitoIdentityId": "us-east-and-so-on"
}
}
}
But this code, which produces the exact same Javascript object as the mock, is not okay with AWS:
function saveActivity(activity) {
try {
return API.put("activities", `/activities/${id}`, {
body: activity
});
} catch(e) {
console.log("saveActivity error:", e);
}
}
async function handleSubmit(event) {
event.preventDefault();
setIsLoading(true)
try {
await saveActivity({
title: title, activityType: activityType, activityRoutine: activityRoutine, activityComment: activityComment
// "key": {userId: userId, activityId: activityId}
// "pathParameters": {"id": activityId},
// "requestContext": {"identity": {"cognitoIdentityId": userId}}
});
} catch(e) {
console.log(e)
setIsLoading(false)
}
}
If anyone needs to see more of the code, I'm happy to share, but I figured this question is already getting very long. Any code you see commented out has been tried before without success.
I'd also be happy if someone could point me in the right direction as far as the AWS documentation is concerned. I've been going off of a tutorial and modifying it where need be.
Any help is appreciated!

Unable to write item(s) to DynamoDB table utilizing DocumentClient - Nodejs

I'm absolutely brand new to DynamoDb and I'm trying to simply write an object from a NodeJS Lambda. Based on what I've read and researched I should probably be using DocumentClient from the aws-sdk. I also found the following question here regarding issues with DocumentClient, but it doesn't seem to address my specific issue....which I can't really find/pinpoint unfortunately. I've set up a debugger to help with SAM local development, but it appears to be only providing some of the errors.
The code's implementation is shown here.
var params = {
TableName: "March-Madness-Teams",
Item: {
"Id": {"S": randstring.generate(9)},
"School":{"S": team_name},
"Seed": {"S": seed},
"ESPN_Id": {"S": espn_id}
}
}
console.log(JSON.stringify(params))
dynamodb.put(params, (error,data) => {
if (error) {
console.log("Error ", error)
} else {
console.log("Success! ", data)
}
})
Basically I'm scrubbing a website utilizing cheerio library and cherry picking values from the DOM and saving them into the json object shown below.
{
"TableName": "March-Madness-Teams",
"Item": {
"Id": {
"S": "ED311Oi3N"
},
"School": {
"S": "BAYLOR"
},
"Seed": {
"S": "1"
},
"ESPN_Id": {
"S": "239"
}
}
}
When I attempt to push this json object to Dynamo, I get errors says
Error MultipleValidationErrors: There were 2 validation errors:
* MissingRequiredParameter: Missing required key 'TableName' in params
* MissingRequiredParameter: Missing required key 'Item' in params
The above error is all good in well....I assume it didn't like the fact that I had wrapped those to keys in strings, so I removed the quotes and sent the following
{
TableName: "March-Madness-Teams",
Item: {
"Id": {
"S": "ED311Oi3N"
},
"School": {
"S": "BAYLOR"
},
"Seed": {
"S": "1"
},
"ESPN_Id": {
"S": "239"
}
}
}
However, when I do that...I kind of get nothing.
Here is a larger code snippet.
return new Promise((resolve,reject) => {
axios.get('http://www.espn.com/mens-college-basketball/bracketology')
.then(html => {
const dynamodb = new aws.DynamoDB.DocumentClient()
let $ = cheerio.load(html.data)
$('.region').each(async function(index, element){
var preregion = $(element).children('h3,b').text()
var region = preregion.substr(0, preregion.indexOf('(') - 1)
$(element).find('a').each(async function(index2, element2){
var seed = $(element2).siblings('span.rank').text()
if (seed.length > 2){
seed = $(element2).siblings('span.rank').text().substring(0, 2)
}
var espn_id = $(element2).attr('href').split('/').slice(-2)[0]
var team_name = $(element2).text()
var params = {
TableName: "March-Madness-Teams",
Item: {
"Id": randstring.generate(9),
"School":team_name,
"Seed": seed,
"ESPN_Id": espn_id
}
}
console.log(JSON.stringify(params))
// dynamodb.put(params)
// .then(function(data) {
// console.log(`Success`, data)
// })
})
})
})
})
Can you try without the type?
Instead of
"School":{"S": team_name},
for example, use
"School": team_name,
From your code, I can see the mis promise on the dynamodb request. Try to change your lines :
dynamodb.put(params).then(function(data) {
console.log(`Success`, data)
})
to be :
dynamodb.put(params).promise().then(function(data) {
console.log(`Success`, data)
})
you can combine with await too :
await dynamodb.put(params).promise().then(function(data) {
console.log(`Success`, data)
})
exports.lambdaHandler = async (event, context) => {
const html = await axios.get('http://www.espn.com/mens-college-basketball/bracketology')
let $ = cheerio.load(html.data)
const schools = buildCompleteSchoolObject(html, $)
try {
await writeSchoolsToDynamo(schools)
return { statusCode: 200 }
} catch (error) {
return { statusCode: 400, message: error.message }
}
}
const writeSchoolsToDynamo = async (schools) => {
const promises = schools.map(async school => {
await dynamodb.put(school).promise()
})
await Promise.all(promises)
}
const buildCompleteSchoolObject = (html, $) => {
const schools = []
$('.region').each(loopThroughSubRegions(schools, $))
return schools
}
const loopThroughSubRegions = (schools, $) => {
return (index, element) => {
var preregion = $(element).children('h3,b').text()
var region = preregion.substr(0, preregion.indexOf('(') - 1)
$(element).find('a').each(populateSchoolObjects(schools, $))
}
}
const populateSchoolObjects = (schools, $) => {
return (index, element) => {
var seed = $(element).siblings('span.rank').text()
if (seed.length > 2) {
seed = $(element).siblings('span.rank').text().substring(0, 2)
}
var espn_id = $(element).attr('href').split('/').slice(-2)[0]
var team_name = $(element).text()
schools.push({
TableName: "March-Madness-Teams",
Item: {
"Id": randstring.generate(9),
"School": team_name,
"Seed": seed,
"ESPN_Id": espn_id
}
})
}
}
I know this is drastically different from what I started with but I did some more digging and kind of kind of worked to this...I'm not sure if this is the best way, but I seemed to get it to work...Let me know if something should change!
Oh I understand what you want.
Maybe you can see the code above works, but there is one concept you have to improve here about async - await and promise especially on lambda function.
I have some notes here from your code above, maybe can be your consideration to improve your lambda :
Using await for every promise in lambda is not the best approach because we know the lambda time limitation. But sometimes we can do that for other case.
Maybe you can change the dynamodb.put method to be dynamodb.batchWriteItem :
The BatchWriteItem operation puts or deletes multiple items in one or more tables.
Or If you have to use dynamodb.put instead, try to get improve the code to be like so :
const writeSchoolsToDynamo = async (schools) => {
const promises = schools.map(school => {
dynamodb.put(school).promise()
})
return Promise.all(promises)
}

Error getting salesOrder in Netsuite while getting a record

Unable to get single NetSuite salesOrder, Generating below error
Getting Sales Order record
Error
[
{
'$attributes': { type: 'ERROR' },
code: 'INVALID_TRANS_TYP',
message: 'Transaction type specified is incorrect.'
}
]
{
"readResponse": {
"status": {
"$attributes": {
"isSuccess": "false"
},
"statusDetail": [
{
"$attributes": {
"type": "ERROR"
},
"code": "INVALID_TRANS_TYP",
"message": "Transaction type specified is incorrect."
}
]
}
}
}
Last Request:
<soap:Envelope xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:tns="urn:platform_2014_2.webservices.netsuite.com" xmlns:platformMsgs="urn:messages_2014_2.platform.webservices.netsuite.com" xmlns:platformFaults="urn:faults_2014_2.platform.webservices.netsuite.com" xmlns:platformCore="urn:core_2014_2.platform.webservices.netsuite.com" xmlns:platformCommon="urn:common_2014_2.platform.webservices.netsuite.com" xmlns:listRel="urn:relationships_2014_2sales.transactions.webservices.netsuite.com" xmlns:tranSales="urn:sales_2014_2.transactions.webservices.netsuite.com" xmlns:actSched="urn:scheduling_2014_2.activities.webservices.netsuite.com" xmlns:setupCustom="urn:customization_2014_2.setup.webservices.netsuite.com" xmlns:listAcct="urn:accounting_2014_2.lists.webservices.netsuite.com" xmlns:tranBank="urn:bank_2014_2.transactions.webservices.netsuite.com" xmlns:tranCust="urn:customers_2014_2.transactions.webservices.netsuite.com" xmlns:tranInvt="urn:inventory_2014_2.transactions.webservices.netsuite.com" xmlns:listSupport="urn:support_2014_2.lists.webservices.netsuite.com" xmlns:tranGeneral="urn:general_2014_2.transactions.webservices.netsuite.com" xmlns:listMkt="urn:marketing_2014_2.lists.webservices.netsuite.com" xmlns:listWebsite="urn:website_2014_2.lists.webservices.netsuite.com" xmlns:fileCabinet="urn:filecabinet_2014_2.documents.webservices.netsuite.com" xmlns:listEmp="urn:employees_2014_2.lists.webservices.netsuite.com"><soap:Header><platformMsgs:passport><platformCore:email>darshan.sanandiya#techholding.co</platformCore:email><platformCore:password>techh#123#</platformCore:password><platformCore:account>5022995_SB1</platformCore:account><platformCore:role internalId="3"></platformCore:role></platformMsgs:passport></soap:Header><soap:Body><platformMsgs:get xmlns:platformMsgs="urn:messages_2014_2.platform.webservices.netsuite.com" xmlns="urn:messages_2014_2.platform.webservices.netsuite.com"><platformMsgs:baseRef type="salesOrder" xsi:type="platformCore:RecordRef" internalId="106095" externalId="106095"></platformMsgs:baseRef></platformMsgs:get></soap:Body></soap:Envelope>
All I want to fetch single order with internalId, But in return, it throws INVALID transction type error;
I am using npm soap; and netsuite sdk with nodejs
'use strict';
var denodeify = require('denodeify');
var NetSuite = require('../');
var credentials = require('../example/credentials.json');
var config = new NetSuite.Configuration(credentials);
var service = new NetSuite.Service(config);
console.log('Creating NetSuite connection');
console.log(service,"Service<<<")
service
.init()
.then(function( /*client*/ ) {
console.log('WSDL processed. Service description:');
console.log(service.config.client.describe());
var recordRef = new NetSuite.Records.RecordRef();
recordRef.internalId = "106095";
recordRef.type = 'salesOrder';
console.log('Getting Sales Order record');
return service.get(recordRef);
})
.then(function(result, raw, soapHeader) {
if (result.readResponse.status.$attributes.isSuccess !== 'true') {
console.error('Error');
console.error(result.readResponse.status.statusDetail);
}
console.log(JSON.stringify(result, null, 2));
console.log('Last Request:');
console.log(service.config.client.lastRequest);
})
.catch(function(err) {
console.error(err);
console.error('Last Request:');
console.error(service.config.client.lastRequest);
});
Above is the code I am executing to get salesOrder;
Believe the Record Type you want is SalesOrder, not salesOrder.
Reference: https://system.netsuite.com/help/helpcenter/en_US/srbrowser/Browser2019_2/schema/record/salesorder.html

DynamoDB ignored in AWS Lambda Local

I have DynamoDB running locally:
java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar -sharedDb
and I'm trying to run lambda-local example:
lambda-local -f aws -e event.json
However, I don't get any output from dynamodb at all. There is no error and it looks like the call to dynamodb.listTables() is skipped/ignored. What's wrong?
My aws.js is as follows:
var AWS = require("aws-sdk");
AWS.config.update({
region: "us-west-2",
endpoint: "http://localhost:8000",
accessKeyId: "BLAH",
secretAccessKey: "BLAH"
});
var dynamodb = new AWS.DynamoDB();
exports.handler = function(event, context) {
console.log("EVENT: ", event);
event.int++;
console.log("LIST TABLES:");
dynamodb.listTables(function(err, data) {
if (err) {
console.log("Error: ", err.code);
} else {
console.log("Table names are: ", data.TableNames);
}
});
console.log("---SUCCEED:---");
context.succeed(event);
};
and event.json:
{
"obj" : { "a" : "b" },
"int" : 1,
"str" : "qwerty",
"arr" : [ 1, 2, 3, 4 ]
}
Output is:
EVENT: { obj: { a: 'b' }, int: 1, str: 'qwerty', arr: [ 1, 2, 3, 4 ] }
LIST TABLES:
---SUCCEED:---
OUTPUT
--------------------------------
{
"obj": {
"a": "b"
},
"int": 2,
"str": "qwerty",
"arr": [
1,
2,
3,
4
]
}
I'm expecting to see at least something between LIST TABLES and --SUCCEED:---
Yet there is no output and no error. I also checked DynamoDB log and there is nothing there. The line event.int++ worked fine and I see all other console.log() calls.
Additionally, I run this code from node just to prove that the DynamoDB is working and it does list tables fine:
node ListTables.js
Content of ListTables.js (which is basically same code as above):
var AWS = require("aws-sdk");
AWS.config.update({
region: "us-west-2",
endpoint: "http://localhost:8000",
accessKeyId: "BLAH",
secretAccessKey: "BLAH"
});
var dynamodb = new AWS.DynamoDB();
dynamodb.listTables(function(err, data) {
if (err) {
console.log("Error: ", err.code);
} else {
console.log("Table names are: ", data.TableNames);
}
});
Outputs as expected:
Table names are: [ 'Movies' ]
I created that table earlier just to prove that DynamoDB is actually running and accepting connections from node.
The problem and solution is that the DynamoDB function is called asynchronous and therefore, the script finished earlier.
Once I moved context.succeed(event); line from the end to inside of dynamodb.listTables(function(err, data) { then I've got the output fine:
dynamodb.listTables(function(err, data) {
console.log("INSIDE");
if (err) {
console.log("Error: ", err.code);
} else {
console.log("Table names are: ", data.TableNames);
}
context.succeed(event);
});
lambda-local -f aws -e event.json
OUTPUTS:
EVENT: { obj: { a: 'b' }, int: 1, str: 'qwerty', arr: [ 1, 2, 3, 4 ] }
LIST TABLES:
---SUCCEED:---
INSIDE
Table names are: [ 'Image', 'Movies' ]
OUTPUT
--------------------------------
...

File creation is ignoring my parameters but creates an "Untitled" binary file

Using the Google API via the googleapis package and the file.create call simply does not work. I've experimented making the same call in Google's API Explorer and it does work. I'm at a bit of a loss.
The createSheet call is encapsulated as follows :
Google.prototype.createSheet = function(filename, callback) {
var services = google.drive('v3');
services.files.create({
"name" : filename,
"mimeType" : "application/vnd.google-apps.spreadsheet",
"description" : 'auto-generated by the cli',
"auth" : this.auth
}, function(err,response) {
if( err ) {
console.log('Error : unable to create file, ' + err);
return;
} else {
console.dir(response);
}
});
}
... the net result is,
{ kind: 'drive#file',
id: '0BwWAQdfAgbYzWk5XRFQyODQ0Zmc',
name: 'Untitled',
mimeType: 'application/octet-stream'
}
It's missing both the filename as well as the filetype.
The general framework here is working correctly as I can get a list of files and read from spreadsheets.

Resources