I'm trying to extract the body from the callback returned from a Lambda fuction. I am calling the function using the code below:
const pdfBuffer = await new Promise((resolve, reject) => {
lambda.invoke(params, function(err, data) {
if (err) {
console.log('STACK ERROR IS' + err, err.stack)
reject(err);
} else {
console.log('Returned DATA is: ' + JSON.stringify(data));
resolve(data);
}
});
});
Prescription.deletePrescription(programId);
const returnedPDF = JSON.parse(pdfBuffer.Payload)
response.status(200);
response.set({
'Content-Type': 'application/pdf',
'Content-Disposition': `attachment; filename="${filename}.pdf"`,
'X-Publish-Filename': `"${filename}.pdf"`,
});
response.end(new Buffer(returnedPDF.body, 'binary'));
And the Lambda callback function looks like this:
return callback(null, {
statusCode: 200,
body: data,
isBase64Encoded: true,
headers: {
'Content-Type': 'application/pdf',
},
})
The data object I am getting back looks like this:
{"StatusCode":200,"ExecutedVersion":"$LATEST","Payload":"{\"statusCode\":200,\"body\":\"JVBERi0xLjQKJdPr6eEKMSAwIG9iago8PC9DcmVhdG9yIChDa=\",\"headers\":{\"Content-Type\":\"application/pdf\"}}"}
I've tried a number of things to get the body of this nested object? Including JSON.parse() as I thought that it was stringified, however I'm not having any luck.
Many thanks in advance.
Related
I am trying to upload an image file from my node.js application to a group's drive in Sharepoint.
As the official documentation states, I'm making my request as follows:
PUT /groups/{group-id}/drive/items/{parent-id}:/{filename}:/content
With the binary image in the body: "The contents of the request body should be the binary stream of the file to be uploaded."
The problem is that the image is uploaded but as a corrupted file. I tried different solutions and still don't see why is always the image corrupted.
Here is my code:
//i get my image from a URL first
https.get(url.parse(attachment.contentUrl), function (response) {
var data = [];
response.on('data', function (chunk) {
data.push(chunk);
});
response.on('end', function () {
if (response.statusCode === 200) {
var buffer = Buffer.concat(data);
//store my image in a local file to test if image is correct (which it is)
fs.writeFile(localFileName, buffer, (fsError) => {
//error handling
});
functions.uploadImageToSharepoint(session, localFileName, buffer,
function (err, body, res) {
if (err) {
console.error(err);
}else{
console.log('OK!');
}
});
} else {
//error handling
}
});
}).on('error', function (e) {
console.log("error2: " + e);
});
//and the request to graph api
function uploadImageToSharepoint(session, fileName, data, callback) {
var options = {
url: 'https://graph.microsoft.com/v1.0/groups/xxxxxxx/drive/root:/yyyyyy/fileName.jpg:/content',
method: 'PUT',
body: data,
json: true,
headers: {
'Content-Type': 'image/jpg',
Authorization: 'Bearer ' + session.userData.accessToken
}
};
request(options, function (err, res, body) {
if (err) return callback(err, body, res);
if (parseInt(res.statusCode / 100, 10) !== 2) {
if (body.error) {
return callback(new Error(res.statusCode + ': ' + (body.error.message || body.error)), body, res);
}
return callback(err, body, res);
}
callback(err, body ,res);
});
}
The file is most likely getting corrupted due to the following option for request:
var options = {
json: true, //<--setting this option sets body to JSON representation of value
//another properties are omitted for clarity
};
In that case request sets body to JSON representation of value and adds accept header to application/json for Upload endpoint and binary file get corrupted.
The solution would be to omit json option from a request and use the proper content-type only:
var options = {
url: '/me/drive/root:/filename.jpg:/content',
method: 'PUT',
body: data,
headers: {
'Content-Type': 'image/jpg',
Authorization: 'Bearer ' + accessToken
}
};
I am using node js and making a call to spotify API and receive the response in body object, as shown in below code:
var options = {
url: 'https://api.spotify.com/v1/me',
headers: { 'Authorization': 'Bearer ' + access_token },
json: true
};
request.get(options, function(error, res, body) {
console.log(body)
});
This gives me output:
But now when I try to access the body object outside the function I get undefined. I think the problem is that I am making an asynchronous call and so before the response is received the statements where I make use of body variable outside function are executed. But I am a bit confused about how to get to the solution.
Any help is appreciated
Edit:
request.get(options, function(error, res, body) {
console.log(body)
response.render('user_account.html', {
data: body
})
});
And it gives the output:
Use promise.
You can try following:
const apiCall = () => {
return new Promise((resolve, reject) => {
var options = {
url: 'https://api.spotify.com/v1/me',
headers: { 'Authorization': 'Bearer ' + access_token },
json: true
};
request.get(options, function(error, res, body) {
if(error) reject(error);
console.log(body);
resolve(body);
});
});
}
apiCall().then((body) => {
// do your things here
})
.catch((err) => console.log(err));
I am trying to save my response into the database. but it shows only the console and does not return the write to the database.
here is my code....
exports.saveGroups = functions.firestore.document("Users/{user_id}").onWrite((change,context) => {
token_id1 = change.after.data().token_id;
token_email = change.after.data().email;
image = change.after.data().image;
name1 = change.after.data().name;
user_id = context.params.user_id;
console.log('token_id1:' + token_id1);
console.log('token_email:' + token_email);
console.log('Image:' + image);
console.log('name:' + name1);
console.log('user_id' + user_id);
var headers = {
'Authorization': 'key = AAAATmJbwHE:APA91bGIEsq0aioIzAa_7g5JvLX1NPU3z1Gkt6vxB2J-I_9IvllwDJaxjmEp5DPw6ZoEBmXfwiYwICrMuE0kQrvxuHTGPc5YKr3i-JNru-o6AHnrAjq4r7iZWmzUuSmPwu8CbR2kXEIq',
'project_id': '336657629297',
'Content-Type': 'application/json'
}
var options = {
url: 'https://android.googleapis.com/gcm/notification',
method: 'POST',
headers: headers,
json: {'operation': 'create',
'notification_key_name': token_email,
'registration_ids': [token_id1]}
}
const promise = request(options, function (error, response, body) {
tokenName = body.notification_key;
console.log('Key: ' + tokenName); //here it shows me the correct value
return db.collection('Users').doc(user_id).set({name: name1,token_id: token_id1,notification_key: tokenName,image: image,email: token_email}).then(() => { //never reach this line
return console.log("Document successfully written!"); //never returns this console
}).catch(function(error) {
return console.error("Error writing document: ", error);
});
})
return promise; //finishes with status "ok"
});
I've gone through the promises documentation. but I don't find any example to handle the "request" functions.
requires help. thanks in advance.
request supports callback interfaces natively but does not return a promise, which is what you must do within a Cloud Function.
I strongly suggest that you watch these videos from the Firebase team: https://www.youtube.com/watch?v=7IkUgCLr5oA&t=28s and https://www.youtube.com/watch?v=652XeeKNHSk which explain this key concept.
You could use request-promise (https://github.com/request/request-promise) and rp(...) method which "returns a regular Promises/A+ compliant promise" and then do something like:
....
return rp(options) // <- You should return this promise within the Function
.then(function (body) {
const tokenName = body.notification_key;
console.log('Key: ' + tokenName);
return db.collection('Users').doc(user_id).set({name: name1,token_id: token_id1,notification_key: tokenName,image: image,email: token_email});
})
.catch(function (err) {
console.log(err);
});
I am using an AWS Node Lambda to resize an image and send that image back in binary format, and I am not sure if I am handling this right or not. Basically, I want to send the binary data back in my response in a way that it can just be loaded without any front end changes. I have a perfectly good version working that returns base64 data, but how would I modify this to return binary instead?
Here is my resizing function:
function imageSizer(url, args) {
return new Promise((resolve, reject) => {
bufferRequest.get(url, args, function (err, res, body) {
if (err) {
console.log(err);
reject(err);
}
const originalFormat = url.includes('png') ? 'png' : 'jpeg';
let newSize = (args.exact === true)
? sharp(body).resize(args.width, args.height)
: sharp(body).resize(args.width, args.height).max();
newSize.toFormat(originalFormat)
.toBuffer()
.then((outputBuffer) => {
const newImage = "data:" + res.headers["content-type"] + ";base64," + new Buffer(outputBuffer).toString('base64');
resolve(newImage);
})
.catch((error) => {
console.log(error);
reject(error);
})
});
})
}
Here is my handler:
function handler(event, context) {
imageSizer(event.url, event.queryStringParameters);
.then((result) => {
context.succeed({
statusCode: 200,
headers: { 'Content-Type': 'image/jpeg'},
body: result
});
})
.catch((error) => {
context.succeed({
statusCode: 502,
headers: { 'Content-Type': 'application/json' },
body: `Error sizing image ${error}`
});
})
}
As mentioned, this works fine for base64 but I am unsure what changes to make to const newImage = to send back binary data the browser can use to load the image.
I went with this solution:
Change:
.then((outputBuffer) => {
const newImage = "data:" + res.headers["content-type"] + ";base64," + new Buffer(outputBuffer).toString('base64');
resolve(newImage);
})
To:
.then((outputBuffer) => {
resolve(outputBuffer);
})
This returns the buffer in binary format, without having to do anything ourselves.
Using Node.js, I am trying to upload a large file (700MB ~ 1GB), that i get as a response to a POST request (using request), to my S3 bucket.
Using the aws-sdk for Javascript iv'e tried 2 approaches but each had a different issue:
Approach 1 - Trying to invoke the s3.upload() function on the response event:
const sendRequest = (data) => {
try {
return new Promise((resolve, reject) => {
AWS.config.loadFromPath('./config/awsConfig.json');
let s3 = new AWS.S3({ params:{ Bucket:'myBucket', Key:'path/to/file.csv' } });
request({
method: 'POST',
uri: 'https://www.example.com/apiEndpoint',
headers: {
host: 'example.com',
'content-type': 'application/json'
},
body: JSON.stringify(data)
}).on('response', (response) => { // 1st approach
if (200 == response.statusCode) {
s3.upload({
Body: response,
ACL: 'public-read',
CacheControl: "5184000"
}, (err, data) => { //2 months
console.log(err, data);
});
}
}).on('error', (error) => {
reject();
}).on('end', () => {
resolve();
});
});
} catch (error) {
throw new Error('Unable to get and upload file');
}
}
Result: s3.upload() is called once. The file is created in the bucket but has no data in it (zero bytes).
Approach 2 - Trying to invoke the s3.upload() function on the data event:
const sendRequest = (data) => {
try {
return new Promise((resolve, reject) => {
AWS.config.loadFromPath('./config/awsConfig.json');
let s3 = new AWS.S3({ params:{ Bucket:'myBucket', Key:'path/to/file.csv' } });
request({
method: 'POST',
uri: 'https://www.example.com/apiEndpoint',
headers: {
host: 'example.com',
'content-type': 'application/json'
},
body: JSON.stringify(data)
}).on('data', (data) => { // 2nd approach
s3.upload({
Body: data,
ACL: 'public-read',
CacheControl: "5184000"
}, (err, data) => { //2 months
console.log(err, data);
});
}).on('error', (error) => {
reject();
}).on('end', () => {
resolve();
});
});
} catch (error) {
throw new Error('Unable to get and upload file');
}
}
Result: s3.upload() is called every time there is a data event. The file is created in the bucket but each time the event is emitted, the new data overwrites the old one. In the end there is only the last data that was emitted (7kb ~ 10kb).
Also, after resolve() is called, s3.upload() is still being called multiple times.
Notes:
1) The function returns a Promise because my entire process is synchronous.
2) Both approaches are taken from the answers to Stream response from nodejs request to s3 and from Piping from request.js to s3.upload results in a zero byte file
3) A 3rd approach is to stream to a local file on my server and only then upload to s3. I would very much like to avoid that.
Any ideas on how to get it to work?