I'm trying to use RefTab's API to pull custom reports, but I'm not sure how to pull the data I want from the response.
I think it might have something to do with unzipping a gunzip?
I've tried accessing res.headers and res.body, but it's not exactly what I'm looking for.
I can't seem to find anything anywhere on RefTab specifically.
I'm not experienced in using REST APIs so I don't know if this is specific to RefTab
Any help would be greatly appreciated!
https://reftab.com
https://reftab.com/api-docs
My index.js file:
const btoa = require('btoa');
const fetch = require('node-fetch');
const CryptoJS = require('crypto-js');
//CryptoJS is needed for the md5 and HmacSHA256 methods
function signRequest(request) {
const publicKey = 'PUBLIC_KEY_HERE';
const secretKey = 'SECRET_KEY_HERE';
const body = request.body;
const method = request.method;
const url = request.url;
const now = new Date().toUTCString();
let contentMD5 = '';
let contentType = '';
if (body !== undefined) {
contentMD5 = CryptoJS.md5(body).toString();
contentType = 'application/json';
}
let signatureToSign = method + '\n' +
contentMD5 + '\n' +
contentType + '\n' +
now + '\n' +
url;
signatureToSign = unescape(encodeURIComponent(signatureToSign));
const token = btoa(CryptoJS.HmacSHA256(signatureToSign, secretKey));
const signature = 'RT ' + publicKey + ':' + token;
request.headers = {};
request.headers.Authorization = signature;
request.headers['x-rt-date'] = now;
return request;
}
//expected input object
const options = {
method: 'GET',
url: 'https://www.reftab.com/api/assets'
};
fetch('https://www.reftab.com/api/assets', signRequest(options))
.then((res) => {
console.log(res);
});
The response:
Response {
size: 0,
timeout: 0,
[Symbol(Body internals)]: {
body: Gunzip {
_writeState: [Uint32Array],
_readableState: [ReadableState],
readable: true,
_events: [Object: null prototype],
_eventsCount: 6,
_maxListeners: undefined,
_writableState: [WritableState],
writable: true,
allowHalfOpen: true,
_transformState: [Object],
_hadError: false,
bytesWritten: 0,
_handle: [Zlib],
_outBuffer: <Buffer a0 ac 36 be 49 02 00 00 e0 46 33 be 49 02 00 00 6e 5b b1 6b 01 09 00 00 8c 21 00 00 36 09 00 00 30 87 34 be 49 02 00 00 f0 7d 34 be 49 02 00 00 62 69 ... 16334 more bytes>,
_outOffset: 0,
_chunkSize: 16384,
_defaultFlushFlag: 2,
_finishFlushFlag: 2,
_defaultFullFlushFlag: 3,
_info: undefined,
_level: -1,
_strategy: 0,
[Symbol(kCapture)]: false
},
disturbed: false,
error: null
},
[Symbol(Response internals)]: {
url: 'https://www.reftab.com/api/assets',
status: 200,
statusText: 'OK',
headers: Headers { [Symbol(map)]: [Object: null prototype] },
counter: 0
}
}
I changed:
fetch('https://www.reftab.com/api/assets', signRequest(options))
.then((res) => {
console.log(res);
});
To:
fetch('https://www.reftab.com/api/assets', signRequest(options))
.then((res) => res.json())
.then(json => console.log(json))
And it worked!
Related
I'm trying to upload from my lambda (nodejs express) to s3 bucket. But whenever I upload, and look for my uploaded file in S3, it only shows a white small box. 1
I already try converting my file buffer to base64, but it still not working.
My uploaded file only show if I upload it using my local api(localhost).
Here's my code:
// multer middleware
const multer = require("multer");
const helpers = require("../helpers/image-upload-helper");
const storage =multer.memoryStorage();
let upload = multer({
storage: storage,
fileFilter: helpers.imageFilter,
}).any();
//controller
try {
if(req.files){
for (const file of req.files) {
const ImageName = randomImageNameGenerator.randomImageName()
const params = {
Bucket: process.env.BUCKET_NAME,
Key: ImageName,
Body: file.buffer,
ContentType : file.mimetype,
}
const command = new PutObjectCommand(params)
const myData = await s3.send(command)
}
}
//log of my command
PutObjectCommand {
middlewareStack: {
add: [Function: add],
addRelativeTo: [Function: addRelativeTo],
clone: [Function: clone],
use: [Function: use],
remove: [Function: remove],
removeByTag: [Function: removeByTag],
concat: [Function: concat],
applyToStack: [Function: cloneTo],
identify: [Function: identify],
resolve: [Function: resolve]
},
input: {
Bucket: 'orex-product-images',
Key: 'b465138efab90aba02e5376ef247f536cfb1e7e32e34877bf21ab1bd655b3749',
Body: <Buffer 89 50 4e 47 0d 0a 1a 0a 00 00 00 0d 49 48 44 52 00 00 01 f8 00 00 00 f5 08 06 00 00 00 bc c1 e7 15 00 00 00 01 73 52 47 42 00 ae ce 1c e9 00 00 20 00 ... 10640 more bytes>,
ContentType: 'image/png'
}
}
// log of myData
{
'$metadata': {
httpStatusCode: 200,
requestId: '6C1EM009PP420NRK',
extendedRequestId: 'ZfGR4AR4mElYOSGes68YqEegptyO5PY5iPCvplP89wr1nqT4DZHwo0D0bl5qyZ7aAB0HaDaTAKU=',
cfId: undefined,
attempts: 1,
totalRetryDelay: 0
},
ETag: '"96425366df243451e35a941524b2a019a6ad2b"',
ServerSideEncryption: 'ABDS256',
VersionId: 'rpgj.L5AwGNCcKVzatIY5zHf_SYVNWt0'
}
Note: I didn't see any error in my cloud watch
1 Example of what the white box looks like
For those that arrived here as I did with the same issue, this answer solved it for me:
Using Lambda to get image from S3 returns a white box in Python
And for those using serverless, this is also relevant:
Serverless I image upload to S3 broken after deploy, local worked only
My web allows user to drag and drop a zip file and upload it to AWS S3 bucket. The steps are:
User drag-and-drops a zip file to the drop-zone of the UI;
User clicks send;
A request is made to AWS Lambda function and the function will generate a pre-signed URL that allows the user to upload any file.
An axios PUT request is made to the pre-signed S3 URL to upload the file.
I used local node.js code to test the pre-signed S3 URL:
const fileToUpload = fs.readFileSync(test_file_path);
console.log("fileToUpload: type: ", typeof fileToUpload, ", content: ", fileToUpload);
try {
const uploadResponse = await axios({
method: 'PUT',
url: presignedUrl,
data: fileToUpload,
headers: {
'Content-Type': '',
},
maxContentLength: Infinity,
maxBodyLength: Infinity
});
return uploadResponse.data;
} catch (error) {
console.error('Error while uploading object to S3:', error.message);
}
And it works well, which proves that the generated pre-signed URL is valid.
However, on client side Reactjs:
console.log(`formState.file: type: ${formState.file}, content: ${formState.file}`);
const uploadResponse = await axios({
method: 'PUT',
url: presignedS3Url,
data: formState.file,
headers: {
'Content-Type': ''
},
maxContentLength: Infinity,
maxBodyLength: Infinity
});
It fails and the request ends in a 403 forbidden error.
The difference is that in nodejs code, the fileToUpload is:
type: object, content: <Buffer 50 4b 03 04 14 00 08 00 08 00 78 84 cb 50 00 00 00 00 00 00 00 00 24 ae 12 01 3e 00 20 00 31 2e 32 2e 38 34 30 2e 31 31 33 35 36 34 2e 31 30 2e 31 2e ... 10573784 more bytes>
Whereas in client side, the formState.file is initialized by react-dropzone lib and has the type: formState.file: type: [object File] and its content is:
path: "1.2.840.113564.10.1.312260962047571814316520322884140128208155.zip"
lastModified: 1625164188712
lastModifiedDate: Fri Jul 02 2021 03:29:48 GMT+0900 (Japan Standard Time) {}
name: "1.2.840.113564.10.1.312260962047571814316520322884140128208155.zip"
size: 10573834
type: "application/zip"
webkitRelativePath: ""
[[Prototype]]: File
length: 1
I am not entirely sure that this is the cause. A few thoughts:
fs.readFileSync() is nodejs only, and it is not available in client side Reactjs.
On client side, should I get the zip file in the form of <Buffer ....> and how should I do it?
Or is it ok with the current [object File] type on client side? maybe there is another way to upload it to S3 bucket?
I am trying to access google drive using the node client. This will run on a server in a background process without user involvement. In preparation, I have done the following:
Created a service account
Enabled Drive API access on the account whose drive I am accessing
Shared a particular folder in the drive with the service account (called MyFolder below).
I am able to successfully authenticate as the service account and list files inside the directory. However, I am not able to download any of the files. When I try, I apparently get a 403 error. It's kind of buried in the error message but that appears to be the issue. Here is my code:
const fs = require('fs');
const { google } = require('googleapis');
const auth = require('./service-creds.json');
(async () => {
let jwtClient = new google.auth.JWT(auth.client_email, null,
auth.private_key, ['https://www.googleapis.com/auth/drive']);
try {
const tokens = await jwtClient.authorize();
let drive = google.drive('v3');
const res1 = await drive.files.list({
auth: jwtClient, q: `name = 'MyFolder'`
});
const folder = res1.data.files[0];
const res2 = await drive.files.list({
auth: jwtClient,
q: `'${folder.id}' in parents`
});
// print out all files under MyFolder
res2.data.files.forEach(f => console.log(f.name, f.id));
const dest = fs.createWriteStream('./myfile.csv');
const file = res2.data.files[0];
const response = await drive.files.export({
fileId: file.id,
mimeType: file.mimeType,
auth: jwtClient
}, {
responseType: 'stream'
});
response.data.on('error', err => {
console.log(err);
}).on('end', () => {
console.log('done');
}).pipe(dest);
}
catch (err) {
console.log('The API returned an error: ', err);
}
})();
Here is part of the resulting error:
The API returned an error:
... at Gaxios.<anonymous> (/api-test/node_modules/gaxios/build/src/gaxios.js:73:27)
Response {
size: 0,
timeout: 0,
[Symbol(Body internals)]:
{ body:
Gunzip {
_readableState: [Object],
readable: true,
domain: null,
_events: [Object],
_eventsCount: 7,
_maxListeners: undefined,
_writableState: [Object],
writable: true,
allowHalfOpen: true,
_transformState: [Object],
bytesRead: 0,
_opts: [Object],
_chunkSize: 16384,
_flushFlag: 2,
_finishFlushFlag: 2,
_scheduledFlushFlag: 0,
_handle: [Object],
_hadError: false,
_buffer: <Buffer 00 00 00 00 00 00 00 00 34 00 00 00 00 00 00 00 ... >,
_offset: 0,
_level: -1,
_strategy: 0 },
disturbed: false,
error: null },
[Symbol(Response internals)]:
{ url: 'https://www.googleapis.com/drive/v3/files/123abc123abc/export?mimeType=text%2Fplain',
status: 403,
statusText: 'Forbidden',
headers: Headers { [Symbol(map)]: [Object] },
counter: 0 } }
I have not been able to find anything in the error that states why the 403 is being thrown. It appears to be zipped up but I have not been able to successfully unzip any part of it.
You want to download a file of text/plain from Google Drive.
If my understanding is correct, how about this modification?
Modification points:
I think that the reason of your issue is to download the file of the mimeType of text/plain using the files.export method of Drive API.
When Google Docs (Spreadsheet, Document, Slides and so on) files are downloaded, you can do it by the files.export method of Drive API.
When you want to download the files except for Google Docs, please use the files.get method.
When I tried to download the file of text/plain using the files.export method, I could confirm that the same error occurs.
In order to reflect above points, please modify as follows.
Modified script:
From:
const response = await drive.files.export({
fileId: file.id,
mimeType: file.mimeType,
auth: jwtClient
}, {
responseType: 'stream'
});
To:
const response = await drive.files.get({
fileId: file.id,
alt: "media",
auth: jwtClient
}, {
responseType: 'stream'
});
Reference:
Download files
I am using dropbox for node: "dropbox": "^4.0.17" and trying to upload a file.
Here is the example code:
require('dotenv').config();
const fs = require('fs');
const fetch = require('isomorphic-fetch');
const Dropbox = require('dropbox').Dropbox;
const config = { accessToken: process.env.DROPBOX_ACCESS_TOKEN, fetch: fetch };
const dbx = new Dropbox(config);
const fileContent = fs.readFileSync('full path to some pdf files');
dbx.filesUpload(fileContent)
.then((response) => {
console.log('response', response);
})
.catch((err) => {
console.log('error', err);
});
and here is the response:
{ error: '<html>\r\n<head><title>400 Request Header Or Cookie Too Large</title></head>\r\n<body>\r\n<center><h1>400 Bad Request</h1></center>\r\n<center>Request Header Or Cookie Too Large</center>\r\n<hr><center>nginx</center>\r\n</body>\r\n</html>\r\n',
response:
Body {
url: 'https://content.dropboxapi.com/2/files/upload',
status: 400,
statusText: 'Bad Request',
headers: Headers { _headers: [Object] },
ok: false,
body:
PassThrough {
_readableState: [ReadableState],
readable: false,
domain: null,
_events: [Object],
_eventsCount: 4,
_maxListeners: undefined,
_writableState: [WritableState],
writable: false,
allowHalfOpen: true,
_transformState: [Object] },
bodyUsed: true,
size: 0,
timeout: 0,
_raw:
[ <Buffer 3c 68 74 6d 6c 3e 0d 0a 3c 68 65 61 64 3e 3c 74 69 74 6c 65 3e 34 30 30 20 52 65 71 75 65 73 74 20 48 65 61 64 65 72 20 4f 72 20 43 6f 6f 6b 69 65 20 ... > ],
_abort: false,
_bytes: 226 },
status: 400 }
The argument passed to filesUpload should be a FilesCommitInfo, not just the file contents directly. You can find an example of what it should look like here.
So, for your code, instead of:
dbx.filesUpload(fileContent)
you should so something like:
dbx.filesUpload({ path: '/some/destination/file/path/and/name.ext', contents: fileContent})
(The way you currently have it will end up trying to send the entire file contents as the API call parameters, which happen to be sent in a header, causing the error you get.)
I am trying to insert a media document to cloudant couchdb using below below code.
var uuid = require('uuid').v4;
fs.readFile('./test.jpeg', function(err, data) {
if (!err) {
var newAttachmentObj = {
type: 'media',
media_mime: 'image/jpeg',
media_type: 'Photo',
media_filename: 'rabbit1'
}
var filename = 'rabbit1';
var media_mime = 'image/jpeg';
var attachment_id = uuid();
var media_data = data;
console.log(data);
console.log(newAttachmentObj);
console.log(attachment_id);
db.multipart.insert(newAttachmentObj,
[{ name: filename, data: media_data, content_type: media_mime }], attachment_id, function (err, body) {
console.log(body);
if (err) {
console.log('Error: Creating a media doc in cloudant.');
console.log(err);
// console.log(JSON.stringify(err));
} else {
console.log(body);
}
})
}
});
Document is getting created in the cloudant couchdb and also we can view the uploaded attachment, but the callback function returns an error as shown below.
{ Error: function_clause
at Request._callback (/home/boatman/anoop/forwarding-module/node_modules/cloudant-nano/lib/nano.js:248:15)
at Request.self.callback (/home/boatman/anoop/forwarding-module/node_modules/request/request.js:188:22)
at emitTwo (events.js:125:13)
at Request.emit (events.js:213:7)
at Request.<anonymous> (/home/boatman/anoop/forwarding-module/node_modules/request/request.js:1171:10)
at emitOne (events.js:115:13)
at Request.emit (events.js:210:7)
at IncomingMessage.<anonymous> (/home/boatman/anoop/forwarding-module/node_modules/request/request.js:1091:12)
at Object.onceWrapper (events.js:314:30)
at emitNone (events.js:110:20)
at IncomingMessage.emit (events.js:207:7)
at endReadableNT (_stream_readable.js:1045:12)
at _combinedTickCallback (internal/process/next_tick.js:138:11)
at process._tickDomainCallback (internal/process/next_tick.js:218:9)
name: 'Error',
error: 'unknown_error',
reason: 'function_clause',
ref: 944644368,
scope: 'couch',
statusCode: 500,
request:
{ method: 'PUT',
headers: { 'content-type': 'multipart/related' },
uri: 'https://XXXXXX:XXXXXX#account_id-bluemix.cloudant.com/db_media/a73d3788-d661-4944-964b-bcffce0286bd',
multipart: [ [Object], [Object] ] },
headers:
{ 'cache-control': 'must-revalidate',
'content-type': 'application/json',
date: 'Tue, 17 Apr 2018 08:37:28 GMT',
'x-couch-request-id': '5097b3e876',
'x-couch-stack-hash': '944644368',
'x-frame-options': 'DENY',
'strict-transport-security': 'max-age=31536000',
'x-content-type-options': 'nosniff',
'x-cloudant-request-class': 'write',
'x-cloudant-backend': 'bm-cc-uk-04',
via: '1.1 lb1.bm-cc-uk-04 (Glum/1.50.4)',
statusCode: 500,
uri: 'https://XXXXXX:XXXXXX#account_id-bluemix.cloudant.com/db_media/a73d3788-d661-4944-964b-bcffce0286bd' },
errid: 'non_200',
description: 'couch returned 500' }
Please find the image below.
I tried to reproduce your conditions with CouchDB 2.1.1, not Cloudant, therefore it may not be an exact reproduction, but I thought I'd share my results.
I created the following server.js file based on your code with a little modification. I used package nano for CouchDB not cloudant-nano since I'm not using Cloudant:
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"; // Ignore rejection, becasue CouchDB SSL certificate is self-signed
var uuid = require('uuid').v4;
const fs = require('fs')
const nano = require('nano')('https://admin:****#192.168.1.106:6984');
// Database 'reproduce' is already created in CouchDB 2.1.1
const db = nano.db.use('reproduce');
fs.readFile('./test.jpeg', function(err, data) {
if(err){
console.log('err -> ', err)
}else if (!err) {
var newAttachmentObj = {
type: 'media',
media_mime: 'image/jpeg',
media_type: 'Photo',
media_filename: 'rabbit1'
}
var filename = 'rabbit1';
var media_mime = 'image/jpeg';
var attachment_id = uuid();
var media_data = data;
console.log(data);
console.log(newAttachmentObj);
console.log(attachment_id);
db.multipart.insert(newAttachmentObj,
[{ name: filename, data: media_data, content_type: media_mime }], attachment_id, function (err, body) {
console.log(body);
if (err) {
console.log('Error: Creating a media doc in cloudant.');
console.log(err);
// console.log(JSON.stringify(err));
} else {
console.log(body);
}
})
}
});
I placed a sample test.jpeg in the same directory as my server.js file. Then I ran the code with command $ node server.js and I got the following results:
$ node server.js
<Buffer ff d8 ff e1 00 32 45 78 69 66 00 00 49 49 2a 00 08 00 00 00 01 00 98 82 02 00 0e 00 00 00 1a 00 00 00 00 00 00 00 52 61 77 70 69 78 65 6c 20 4c 74 64 ... >
{ type: 'media',
media_mime: 'image/jpeg',
media_type: 'Photo',
media_filename: 'rabbit1' }
ec6a36d1-952e-4d86-9865-3587c6079fb5
{ ok: true,
id: 'ec6a36d1-952e-4d86-9865-3587c6079fb5',
rev: '1-896eca9e9980509aeaa8539b281c3257' }
{ ok: true,
id: 'ec6a36d1-952e-4d86-9865-3587c6079fb5',
rev: '1-896eca9e9980509aeaa8539b281c3257' }
Obviously, I don't receive the errors your getting.