Response body: Payload Too Large - node.js

I am writing a POST request to a mock api that will store a file and its binary. The error I'm receiving is:
<Buffer 49 44 33 03 00 00 00 00 01 10 54 49 54 32 00 00 00 0b 00 00 00 61 6c 61 72 6d 53 6f 75 6e 64 54 58 58 58 00 00 00 17 00 00 00 53 6f 66 74 77 61 72 65 ... 115969 more bytes>
Error: SyntaxError: Unexpected token P in JSON at position 0
at JSON.parse (<anonymous>)
at safeParseJSON (C:\Users\Student\Downloads\RequestMaker\fetchTest.js:36:19)
at processTicksAndRejections (internal/process/task_queues.js:93:5)
at async C:\Users\Student\Downloads\RequestMaker\fetchTest.js:72:5
Response body: Payload Too Large
const fs = require('fs');
const fetch = require('node-fetch');
async function makePostReq(time, date) {
let file = 'sound.mp3'
fs.readFile(file, 'utf8', async (err, data) => {
if (err) {
console.error(err)
return
}
let buf = Buffer.from(data, 'utf8');
console.log(buf)
let body = {
"File Name": "sound.mp3",
"Binary": buf
}
let url = 'https://6050ffd85346090017671094.mockapi.io/posts/mockData'
await fetch(url, {
method: 'POST',
body: JSON.stringify(body),
headers: { 'Content-Type': 'application/json' },
})
.then(safeParseJSON)
.then(json => console.log(json));
})
}
async function safeParseJSON(response) {
const body = await response.text();
try {
return JSON.parse(body);
} catch (err) {
console.error("Error:", err);
console.error("Response body:", body);
}
}
A lot of the solutions online talk about limiting the size using express. I'm only using Nodejs to do this because it's going to be part of an Alexa Skill once it works, so express won't work. Are there any ideas on how I can send the binary of a file as a POST request?
Thank you.

Related

How to make request to amazon s3 using node js for downloading a file?

I made a request to amazon s3 using node js for downloading a file. In the response i receive the Buffer, here i have 2 problems:
If i send the buffer to frontend from node js like res.send(awsreq.Body.Buffer) and console log it once in node js and once in frontend, the buffer from frontend will look different than in node js.
Node js Buffer: <Buffer 50 4b 03 04 14 00 06 00 08 00 00 00 21 00 df a4 d2 6c 5a 01 00 00 20 05 00 00 13 00 08 02 5b 43 6f 6e 74 65 6e 74 5f 54 79 70 65 73 5d 2e 78 6d 6c 20 ... 11906 more bytes>
Vue js frontend buffer: PK\u0003\u0004\u0014\u0000\u0006\u0000\b\u0000\u0 Show more(36,8kb)
How can i directly download the file from a request made from frontend? First how to receive that buffer corecly and how to convert it in that way frontend will download the file automatically? If possible how to do all of that in node js and when frontend receive the response to start automatically the download?
To Download a file from amazon S3 follow these steps
Install aws sdk using this command npm install aws-sdk
Check below code
var AWS = require("aws-sdk")
const s3 = new AWS.S3({
endpoint: "ENDPOINT",
accessKeyId: "YOUR_ACCESS_KEY_ID",
secretAccessKey: "YOUR_SECRET_ACCESS_KEY",
region: "us-east-1",
signatureVersion: "v4"
})
const downloadParams = {
Bucket: "BUCKET_NAME",
Key: "KEY_NAME/FILE_NAME/FILE_PATH"
}
// Download the file
s3.getObject(downloadParams, function (error, fileData) {
if (!error) {
console.log(fileData, "file")
} else {
console.log(error, " ERROR ")
}
})
For more information you can check AWS official documentation using this link:
https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/getting-started-nodejs.html
At last you can use
Content-Disposition in the request header
Or you can use pre Signed url
Example:
const getpreSignedUrlForDownload = async () => {
try {
const parms = {
Bucket: "BUCKET_NAME",
Key: "FILE_KEY",
Expires: 60 * 1,
ResponseContentDisposition: 'attachment; filename"' + "FILE_NAME + '"'
}
return new Promise((resolve, reject) => {
s3.getSignedUrl("getObject", parms, (err, url) => {
err ? reject(err) : resolve(url)
})
})
} catch (error) {
throw new Error(error)
}
}
getpreSignedUrlForDownload()

Imgur upload api error always returning 400 with multer and form-data nodejs

I'm using react antd to upload pictures. Here's my express handling the upload request:
router.post('/upload-image', upload.any(), async function(req, res, next) {
const file = req.files[0];
const form = new FormData();
form.append('image', file.buffer);
form.append('type', 'file');
const imgurInstance = new imgur({
refreshToken: user.imgurRefreshToken,
clientId: IMGUR_CLIENT_ID,
clientSecret: IMGUR_CLIENT_SECRET
});
await imgurInstance.getAccessToken();
const response = await imgurInstance.uploadImage(form);
const data = await response.json();
});
Here's the req.files[0] console logged:
{
fieldname: 'file',
originalname: 'headscratch.jpeg',
encoding: '7bit',
mimetype: 'image/jpeg',
buffer: <Buffer ff d8 ff e0 00 10 4a 46 49 46 00 01 01 01 00 01 00 01 00 00 ff e1 00 42 45 78 69 66 00 00 4d 4d 00 2a 00 00 00 08 00 01 87 69 00 04 00 00 00 01 00 00 ... 13803 more bytes>,
size: 13853
}
And here's my uploadImage function using node-fetch
uploadImage (form){
const path = `/3/upload`;
const headers = {
'Authorization': `Bearer ${this.accessToken}`,
...form.getHeaders()
};
const options = {
method: 'POST',
body: form,
headers
};
return fetch(`${this.BASE_API_URL}${path}`, options);
}
I always get a 400 from imgur and they don't provide any details why.
{
status: 400,
success: false,
data: { error: 'Bad Request', request: '/3/upload', method: 'POST' }
}
I have tried using base64, just uploading a url using form-data and I still get a 400 error. Does anybody have any guidance on how to successfully make this call work? Thanks.
I guess I needed to understand what form-data is actually doing. Apparently form-data won't send as a file unless you include the filename with {filename: ''} as an option
form.append('image', file.buffer, {filename: file.originalname});
imgur finally accepted my request and voila.

Why does my request to the JIRA api to create an attachment return an empty array

I'm having trouble getting my request to jira cloud to add an attachment to an issue. It returns 200 but the response is an empty array.
const formData = new FormData();
for(var i = 0; i < files.length; i++){
let file = files[i];
formData.append("file", file.content, file.name);
}
console.log(formData); // see below
let params = {
method: 'post',
url: `https://submissive.atlassian.net/rest/api/3/issue/${createKey}/attachments`,
data: formData.getBuffer(),
headers: {
Accept: "application/json",
'X-Atlassian-Token': 'no-check',
'Authorization': getAuthHeader(user, apiToken),
... formData.getHeaders()
}
};
console.log('params: ', params);
let response = await axios(params);
console.log(response);
console.log(response.data); // [] empty array
console.log('attachments added');
formdata:
FormData {
_overheadLength: 143,
_valueLength: 2946927,
_valuesToMeasure: [],
writable: false,
readable: true,
dataSize: 0,
maxDataSize: 2097152,
pauseStreams: true,
_released: false,
_streams: [
'----------------------------470840621872458708605830\r\n' +
'Content-Disposition: form-data; name="file"\r\n' +
'Content-Type: application/octet-stream\r\n' +
'\r\n',
<Buffer ff d8 ff e0 00 10 4a 46 49 46 00 01 01 00 01 2c 01 2c 00 00 ff e1 28 3a 45 78 69 66 00 00 4d 4d 00 2a 00 00 00 08 00 06 01 06 00 03 00 00 00 01 00 02 ... 2946877 more bytes>,
[Function: bound ]
],
_currentStream: null,
_insideLoop: false,
_pendingNext: false,
_boundary: '--------------------------470840621872458708605830'
}
Is it a problem the length is 0? I've tried adding the forma data with and without the extension method with the same results. I've confirmed I can add an attachment with the account I built my API credentials from. The file name is correct: 'file'. My only thought is maybe im ussing buffers incorrectly but the form log above makes it look correct.
In case this helps anyone. I was having the same issue and the problem was with the call to formData.append(). The third argument needs to be an object.
// get the attachment file
// what I originally had
formData.append("file", attachment.Body) // in my case the attachment.Body is the buffer
// what fixed the issue
const fileInfo = {
filename: fileName, // e.g. MyDoc.pdf
contentType: attachment.ContentType, // e.g. application/pdf
knownLength: attachment.ContentLength, // e.g. 18059464
}
formData.append("file", attachment.Body, fileInfo)

How do I upload a file to a pre-signed URL in AWS using Node.js and Axios?

I have next scenario:
Generate a signed URL for file upload by AWS-SDK
Try to upload a local file using Axios (or request) npm package
But every time I have an error:
Status: 403
statusText: Forbidden
<?xml version="1.0" encoding="UTF-8"?>
<Error><Code>SignatureDoesNotMatch</Code><Message>The request signature we calculated does not match the signature you provided. Check your key and signing method.</Message><AWSAccessKeyId>FakeKeyId</AWSAccessKeyId><StringToSign>PUT
application/json;charset=utf-8
1577742550
/test-bucket-super/xxx/test.mp3</StringToSign><SignatureProvided>DAAOZ0/VkMNEMMlGkRUsSuRO3J4=</SignatureProvided><StringToSignBytes>50 55 54 0a 0a 61 70 70 6c 69 63 61 74 69 6f 6e 2f 6a 73 6f 6e 3b 63 68 61 72 73 65 74 3d 75 74 66 2d 38 0a 31 35 37 37 37 34 32 35 35 30 0a 2f 74 65 73 74 2d 62 75 63 6b 65 74 2d 73 75 70 65 72 2f 78 78 78 2f 74 65 73 74 2e 6d 70 33</StringToSignBytes><RequestId>CBD3F1D0D02EA874</RequestId><HostId>LPu+RQ8otcljI1Wt5FiZm+UmTFNiCX+2HyGtN0kTAugLiT21M55DtbzQdF/s7qOCSaZvzTp4kw4=</HostId></Error>
const axios = require('axios');
const AWS = require('aws-sdk')
const s3 = new AWS.S3({
accessKeyId: 'FakeKeyId',
secretAccessKey: 'xxxxxxxxxxxxxxxxxxxxxxxx',
region: 'eu-west-1'
});
const fs = require('fs');
const readFile = require('util').promisify(fs.readFile);
(async () => {
try {
const presignedS3Url = s3.getSignedUrl('putObject', {
Bucket: 'test-bucket-super',
Key: 'xxx/test.mp3'
});
const file = await readFile('./SampleAudio_0.4mb.mp3');
const axiosResponse = await axios.put(presignedS3Url, {
data: file,
});
console.info(axiosResponse)
} catch (e) {
console.error(e)
}
})();
But I managed to upload the file via cURL
curl -X PUT -T ~/Downloads/SampleAudio_0.4mb.mp3 'https://test-bucket-super.s3.eu-west-1.amazonaws.com/xxx/test.mp3?AWSAccessKeyId=FakeKeyId&Expires=1577741900&Signature=9kPiC%2B85SEFp6g5C3nwEWe4TueU%3D' -v
The issue here is, axios for some reason adding a default Content-Type: application/json header. thats the reason why the signature is failing to match. I am not sure how to remove the header.
But following works for me where I am including the content-type during the signature generation. Also I am including the same header when using it.
(async () => {
try {
const presignedS3Url = s3.getSignedUrl('putObject', {
Bucket: 'bucket-name',
Key: 'car.jpg',
ContentType: 'application/octet-stream'
});
const file = await readFile('./car.jpg');
const axiosResponse = await axios.put(presignedS3Url, {
data: file,
}, {
headers: {
'Content-Type': 'application/octet-stream'
}
});
console.info(axiosResponse)
} catch (e) {
console.error(e)
}
})();
As mentioned, Axios will default to application/json Content-Type if it's not declared. You might find it useful to immediately clear the Content-Type header upon creating your Axios instance:
const axios = Axios.create()
delete axios.defaults.headers.put['Content-Type']
Your request needs to match the signature, exactly. One apparent problem is that you are not actually including the canned ACL in the request, even though you included it in the signature. Change to this:
var options = { headers: { 'Content-Type': fileType, 'x-amz-acl': 'public-read' } };

express-fileupload to Google Drive API

I can successfully save my uploaded image in my public uploads folder and integrate google drive api but it seems that it is uploading an empty file.
What should I put in the body parameter of the Google Drive API from my req.files data
{name: 'country.jpg',data: <Buffer ff d8 ff e0 00 10 4a 46 49 46 00 01 01 01 00 48 00 48 00 00 ff ed 26 0c 50 68 6f 74 6f 73 68 6f 70 20 33 2e 30 00 38 42 49 4d 04 04 00 00 00 00 00 22 ... 246290 more bytes>,size: 246340,encoding: '7bit',tempFilePath: '',truncated: false,mimetype: 'image/jpeg',md5: '8890c8336c58d854d490b41fa6ec0ad4',mv: [Function: mv]}
Here's my Google Drive API call after auth.
const drive = google.drive({ version: "v3", auth });
drive.files.create({
media: {
mimeType: "image/jpeg",
body: // WHAT TO PUT HERE
},
resource: {
name: "photo.jpg"
// if you want to store the file in the root, remove this parents
//parents: ['folder id in which he file needs to be stored.']
},
fields: "id"
})
.then(function(resp) {
//console.log("RESPONSE");
console.log(resp, "resp");
})
.catch(function(error) {
console.log("ERROR");
console.log(error);
});
Just like #DalmTo mentioned, you need to send your data to the Google Drive API somehow.
If you were sending a file in your filsystem you could use the code she provided:
var media = {
mimeType: 'image/jpeg',
body: fs.createReadStream(FILEPATH)
};
However, since you are not trying to save the file to your filesystem before uploading you have to adapt to your situation.
I'm assuming this files reaches your application thru a form upload since you are using Express. If you can user multer as well, you can get your files as a Buffer
You can then convert that Buffer to a Stream.
Your code for treating this would look like this:
Copyvar Readable = require('stream').Readable;
function bufferToStream(buffer) {
var stream = new Readable();
stream.push(buffer);
stream.push(null);
return stream;
}
app.post('/upload', upload.single('picture'), function (req, res, next) {
// req.file is the `picture` file
var media = {
mimeType: 'image/jpeg',
body: bufferToStream(req.file.buffer)
};
})
Thats because you are only uploading the file metadata you need to actually upload the file.
var media = {
mimeType: 'image/jpeg',
//PATH OF THE FILE FROM YOUR COMPUTER
body: fs.createReadStream('C:/Users/me/Downloads/photo.jpg')
};

Resources