I'm trying to make a call to the Amazon Rekognition service with NodeJS. The call is going through but I receive an InvalidImageFormatException error in which it says:
Invalid Input, input image shouldn't be empty.
I'm basing my code off an S3 example:
var AWS = require('aws-sdk');
var rekognition = new AWS.Rekognition({region: 'us-east-1'});
//Create a bucket and upload something into it
var params = {
Image: {
S3Object: {
Bucket: "MY-BUCKET-NAME",
Name: "coffee.jpg"
}
},
MaxLabels: 10,
MinConfidence: 70.0
};
var request = rekognition.detectLabels(params, function(err, data) {
if(err){
console.log(err, err.stack); // an error occured
}
else{
console.log(data); // successful response
}
});
The documentation states that the service only accepts PNG or JPEG images but I can't figure out what is going on.
Related
I'm using Amazon S3 as photo storage and I'm constantly getting this error:
RequestTimeout: Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.
Here's the code:
function(source, name, callback) {
var awsUtils = this;
fs.stat(source, function(err, file_info) {
console.log(file_info);
var bodyStream = fs.createReadStream(source);
var params = {
Key : name,
ContentLength : file_info.size,
Body : bodyStream
};
awsUtils.s3bucket.putObject(params, function (err, data) {
if(err) {
console.error('AWSDriverUtils-unable to upload: ' + err);
callback(false);
} else {
console.log('AWSDriverUtils-upload success:', data);
callback(true);
}
});
});
}
The upload image file is small, about 44.0 KB only. While researching, I have found someone saying:
Amazon S3 will send that error response after 20 seconds of inactivity. The error indicates that Amazon S3 was attempting to read the request body, but no new data arrived over a period of 20 seconds.
What should I do to fix this, maybe a flush() function?
I have also tried to comment out ContentLength in params.
I am new to AWS Lambda. I got a working model of lambda function which logs the json data to cloudwatch and also S3 bucket.
This is the function :
exports.handler = function(event, context) {
var s3 = new AWS.S3();
var param = {Bucket: 'test', Key: 'test123', Body: event.name};
console.log("EVENT DATA :" + param.Body);
s3.upload(param, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
console.log('actually done!');
context.done();
});
console.log('done?');
};
This is my json data :
{
"name": "XYZ ABC",
"value": 123
}
How should I push the whole JSON data given above to S3 and CloudFront logs rather than just event.name?
Thanks.
Change event.name to JSON.stringify(event). If you get [object Object] somewhere, change it to JSON.stringify(event, null, 2)
I'm trying out AWS S3 for the first time and I wrote the following function to generate a bucket policy.
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
// Load configuration
AWS.config = new AWS.Config();
AWS.config.accessKeyId = sails.config.accessKeyId;
AWS.config.secretAccessKey = sails.config.secretAccessKey;
AWS.config.region = sails.config.region
// Create S3 object
var s3 = new AWS.S3();
// Defining the required parameters
var params = {
Bucket: "bucket-name-here"
};
s3.getBucketPolicy(params, function(error, date) {
if(error) {
// An error occurred
console.log("Error\n" + error);
return res.json({
message: "Error",
'error': error
});
} else {
// Successful
console.log("Data\n" + date);
return res.json({
message: "Successful",
'data': date
});
}
});
But the response is always NoSuchBucketPolicy: The bucket policy does not exist
I tried uploading a test file into the bucket, listing all buckets and it both worked as expected. What is wrong with the code?
Your code doesn't "generate" a bucket policy... it tries to fetch the existing policy of a bucket. Buckets don't have a policy until you create one, so this error would be normal in that case.
Error Code: NoSuchBucketPolicy
Description: specified bucket does not have a bucket policy.
HTTP Status Code: 404 Not Found
http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html
I'm trying to upload a PDF to an S3 bucket using the Knox library, but I keep getting 505 errors and the PDFs won't save. My code:
// all of this works well
var knox = require('knox');
var client = knox.createClient(require('../path/to/config.js').knox);
client.putFile('tmp/file', '/prefix/key',
function(err, res) {
if (err) {
console.log("Error PUTing file in S3:", err);
}
console.log("S3 RESPONSE:", res.statusCode); // returns 505
}
);
Anyone have any insight into what I'm doing wrong? I've also tried setting my own headers using client.put(..), but I got the same 505 response.
2 Possible reasons.
1) If this is your complete code, then you forgot to enter the key,secret and bucket.
var client = knox.createClient({
key: '<api-key-here>'
, secret: '<secret-here>'
, bucket: 'learnboost'
});
2) There is a space in the file name that you are trying to upload.
This isn't an answer per se, and I'm still unsure about the 505 response above, but the AWS SDK that Amazon puts out works great if anyone is having similar issues with Knox. The above just becomes:
var aws = require('aws-sdk');
aws.config.loadFromPath('./path/to/config.json');
var s3 = new aws.S3();
var params = { Bucket: 'your-bucket',
Key: 'your-key',
Body: fs.readFileSync('/path/to/file.pdf') };
s3.putObject(params, function(err, data) {
if (err) {
console.log("Error PUTing file:", err);
}
console.log("S3 RESPONSE:", data);
});
I've tried using aws-sdk and knox and I get status code 301 trying to upload images. I get status code 301 and message - 'The bucket you are attempting to access must be addressed using the specified endpoint. Please send all future requests to this endpoint. This works in php.
AWS.config.loadFromPath(__dirname + '/config/config.json');
fs.readFile(source, function (err, data) {
var s3 = new AWS.S3();
s3.client.createBucket({Bucket: 'mystuff'}, function() {
var d = {
Bucket: 'mystuff',
Key: 'img/test.jpg',
Body: data,
ACL: 'public-read'
};
s3.client.putObject(d, function(err, res) {
if (err) {
console.log("Error uploading data: ", err);
callback(err);
} else {
console.log("Successfully uploaded data to myBucket/myKey");
callback(res);
}
});
});
});
I actually solved this problem. In your config you have to have a region, since my bucket was "US Standard", I left my region blank and it worked.
config.json -
{ "accessKeyId": "secretKey", "secretAccessKey": "secretAccessKey", "region": ""}
go to s3 management console select one of your files and click on proporties - > look at the file link.
US standard
https://s3.amazonaws.com/yourbucket/
host in your console window
yourbucket.s3.amazonaws.com/
us-west-1
https://s3-us-west-1.amazonaws.com/yourbucket/
host in your console window
yourbucket.s3-us-west-1.amazonaws.com/
Did you try .send()?
I can upload to S3 by below code.
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/AWSRequest.html
var s3object = {Bucket: 'mystuff', Key: name, Body : data['data']};
s3.client.putObject(s3object).done(function(resp){
console.log("Successfully uploaded data");
}).fail(function(resp){
console.log(resp);
}).send();
I have the same problem with the new SDK and solved it by setting the endpoint option explicitly.
Reference : http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#constructor_details
Snippet:
var AWS = require('aws-sdk');
var s3 = new AWS.S3({ endpoint :'https://s3-your-region-varies.amazonaws.com' }),
myBucket = 'your-bucket-name';
var params = {Bucket: myBucket, Key: 'myUpload', Body: "Test"};
s3.putObject(params, function(err, data) {
if (err) {
console.log(err)
} else {
console.log("Successfully uploaded data to "+myBucket+"/testKeyUpload");
}
});
Alternatively, you can solve this by setting the region in your config file and you just have to be precise of your region name.