Convert Binary Media Data to Buffer in Node JS - node.js

I am using a third party api which returns the image in Binary Media Data format. After getting this data, I want to upload this to Google Cloud Storage. To do this, I need to convert this data into Buffer. I've tried multiple times but failed.
I am using NodeJS, npm request module to call api to save image to google cloud storage.
Here is the code:
var binaryData = data;
var bufferData = new Buffer(data);
request({
method: "POST",
url: '/endpoint/upload',
headers: {
'cache-control': 'no-cache',
'content-type': 'multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW'
},
formData: {
filename: {
value: fileBase64,
options: {
filename: 'test.jpg',
contentType: 'image/jpeg'
}
},
}
}, function(err, response, body){
console.log(body);
})

Your post request should follow the template described in the documentation. My post request looks like this:
req = https.request({
method: "POST",
protocol: 'https:',
hostname: 'www.googleapis.com',
path: '/upload/storage/v1/b/[bucket-name]/o?uploadType=media&name=[file-name]',
headers: {
'content-type': 'image/png',
'content-length': Buffer.byteLength(data),
'authorizatoin': Bearer [bearer-token]
}
}, (res) => {
console.log(res.statusCode);
console.log(res.statusMessage);
console.log(res.headers);
}
);
It also looks like you’re lacking authentication. You need to use OAuth 2.0 for Google Cloud Storage. Make sure the Cloud Storage JSON API is enabled too.

You need to obtain your file as a stream. Here's a useful post that specifies how to do that with axios. Once you download the file in the server, you can get it as a Buffer with fs.readFile.

Related

Forge API to translate InventorParams of {URL}/workitems into supported format like svf,dwg

Didn't get idea to translate the model InventorParams.url to any supported formats like SVF,DWG,Obj
sending data:{InventorParams.url} on endpoint /workitems by axios
axios({
method: 'POST',
url: 'https://developer.api.autodesk.com/da/us-east/v3/workitems',
headers: {
'Authorization': 'Bearer ******',
'content-type': 'application/json'
},
data: JSON.stringify({
activityId: 'BAsBRLiyiaHR1X9eYiAI4ATPmdcuZ5Pf.NamiliftActivity+beta',
arguments: {
InventorDoc: {
url: 'https://developer.api.autodesk.com/oss/v2/signedresources/1a2ac1d9-a8af-4aa2-b9d1-8b0fde21bcf3?region=US',
pathInZip: 'MasterAssembly.iam'
},
InventorParams: {
url: 'data:application/json,{"ProjectNumber":"12345","ProjectName":"8000-2016","PreparedBy":"Nami","DrawingNumber":"Nami","CommissionNumber":"","ElevatorDesignation":"","ElevatorNumber":"","Logo":"NamiLift","LanguageOptions":"ENG","Standard":"PL400","NumberOfFloor":2,"LengthOfCar":2000,"WidthOfCar":1100,"Pits":true,"PitHeight":50,"ZoneSectionStart":239,"HeightAboveTopElevator":0,"ElectricBoxSide":"a","ElectricBoxFloorNo":"0","ElectricBoxDefault":false,"OuterRoof":false,"OuterRoofSide":"A","WindowsSectionA":"0-0","WindowsSectionB":"0-5000","WindowsSectionC":"0-0","WindowsSectionD":"0-0","DefaultDoorSize":false,"Colorelevator":"RAL 9001","Colordoor":"RAL 9001","Safelinemx2":false,"Safelinemx2gsm":false,"Schoolcontrolassa":false,"Schoolcontrol":false,"Intercom":false,"Callsend":false,"Firealarmdrive":false,"Folderseat":false,"Floorvoiceannouncement":false,"Lsfcable":false,"Telephone":false,"Keyswitch":false,"Ups":false,"Comments":"-","LiftHeightLevel0":0,"DoorSideLevel0":"A","TypeOfDoorLevel0":"A1","DoorHingeLevel0":"Right","DoorSizeLevel0":900,"DoorHeightLevel0":2000,"DoorCloserLevel0":true,"CanopyLevel0":true,"CallBoxLevel0":true,"LiftHeightLevel1":5000,"DoorSideLevel1":"A","TypeOfDoorLevel1":"A1","DoorHingeLevel1":"Right","DoorSizeLevel1":900,"DoorHeightLevel1":2000,"DoorCloserLevel1":false,"CanopyLevel1":false,"CallBoxLevel1":false}',
'OutputPDF': {
'url': fileName.resultZipUrl,
'headers': {
'Authorization': 'Bearer ' + values.access_token,
'Content-type': 'application/octet-stream'
},
'verb': 'put'
},
onComplete: 'onComplete': {
'verb': 'post',
'url': config.credentials.callback_url +'/api/forge/datamanagement/signanddownload'
}
}
}
})
WorkItem Output
{"status":"pending","stats":{"timeQueued":"2022-02-09T03:31:43.525012Z"},"id":"27fa82758b794111a91025ed69da6fec"}
This is the workitem/:id response for that InventorParams
This is the txt file which i am getting from reportUrl of workitem/:id response
From report.txt file oss/v2/signedresources GET ZIP (InventorDoc.pathInZip name) with this content
The activity you pasted seems to only be about creating a PDF for the drawing of a given Inventor model - see OutputPDF output parameter.
InventorParams simply provides a json file for the work item that the code in your app bundle can read, and use the values in it to modify the model accordingly using the Inventor API.
To better understand how the input and output parameters, inc the json file are used exactly by the app bundle, please have a look at the tutorial here:
https://learnforge.autodesk.io/#/tutorials/modifymodels

Pipe gzipped content without decompression to response in NodeJS

I am using the node-fetch library and it by-default decompresses the response. I want to make a POST request which returns gzipped content and I want to pipe the POST request response to the response.
The code I am currently using is here:
router.post('/getData', async(request, response) => {
fetch(`http://url:port/path`, { method: 'POST', headers: {'Accept-Encoding': 'gzip'}, body: ''})
.then(data=> {
return data.body.pipe(response);
}
}
}
I understand that the node-fetch library decompresses the data by default. I do not need this. I want to pass the compressed data directly using streams. (Like a proxy)
What worked for me was setting compress=false but then adding the header to accept gzip encoding:
fetch(url, {
compress: false,
headers: { "accept-encoding": "gzip" },
});

Agora.io : Having issue with acquire POST call REST API in cloud recording

I am trying to set up cloud recording in Agora.io video call.According to agora docs first step is to call acquire API.
Initially I had issue with unescaped character is URL using axios NodeJS so I used encodeURI to bypass that error.My requests config is as follows
{
"url":"https://api.agora.io%E2%80%8B/v1%E2%80%8B/apps%E2%80%8B/xxxxxxx_APPID_xxxx%E2%80%8B/cloud_recording%E2%80%8B/acquire",
"method":"post",
"data":"{\"cname\":\"5f30xxxx-xx33-xxa9-adxx-xxxxxxxx\",\"uid\":\"75\",\"clientRequest\":{\"resourceExpiredHour\":24}}",
"headers":{
"Accept":"application/json, text/plain, */*",
"Content-Type":"application/json;charset=utf-8",
"Authorization":"Basic xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
"User-Agent":"axios/0.19.2",
"Content-Length":102
},
"transformRequest":[
null
],
"transformResponse":[
null
],
"timeout":0,
"xsrfCookieName":"XSRF-TOKEN",
"xsrfHeaderName":"X-XSRF-TOKEN",
"maxContentLength":-1
}
I get this response
Error: Request failed with status code 400
I have enabled cloud recording in agora console still the same error.
I would recommend taking a look at the Agora Postman Collection, which helps provide properly formatted requests.
In your request you are missing the
For example your request should look like this:
var axios = require('axios');
var data = JSON.stringify({"cname":"demo","uid":"527841","clientRequest":{ "resourceExpiredHour": 24}});
var config = {
method: 'post',
url: 'https://api.agora.io/v1/apps/<xxxx_APPID_xxxx>/cloud_recording/acquire',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Basic xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
},
data : data
};
axios(config)
.then(function (response) {
console.log(JSON.stringify(response.data));
})
.catch(function (error) {
console.log(error);
});
Here is a guide I've written for getting started with the Postman Collection for Agora's Cloud Recording.

Google Places how to autocomplete and componentRestrictions search only from a specific country?

I want that when user types in my autocomplete search, that the data comes from Google servers will be related only to a specific country.
I am calling to their webservice via API in Node.js like this:
var headers = {
'User-Agent': 'Super Agent/0.0.1',
'Content-Type': 'application/x-www-form-urlencoded'
};
// Configure the request
var options = {
url: "https://maps.googleapis.com/maps/api/place/autocomplete/json",
method: 'POST',
headers: headers,
qs: {
key: gkey,
input: input,
types: ['(cities)'],
componentRestrictions: {country: 'il'}
}
};
// Start the request
request(options, function (error, response, body) {
// returning the body to the frontend
});
Note that I have tried to play with this but nothing works.
I can able to see worldwide results.
I have tried to search about this but nothing solved this.
Google Maps API web services supposed to be used with GET method, not the POST method. The only web service that supposed to be used with POST is Geolocation API.
You should change the code to
var headers = {
'User-Agent': 'Super Agent/0.0.1',
'Content-Type': 'application/x-www-form-urlencoded'
};
// Configure the request
var options = {
url: "https://maps.googleapis.com/maps/api/place/autocomplete/json",
method: 'GET',
headers: headers,
qs: {
key: gkey,
input: input,
types: ['(cities)'],
componentRestrictions: {country: 'IL'}
}
};
// Start the request
request(options, function (error, response, body) {
// returning the body to the frontend
});
I hope this helps!

Gzip decompression of http Response

Using request module, I am trying to fetch response from a web service which has following header in the API request:
accept-encoding : gzip
and correspondingly, following header in the response :
content-encoding : gzip
When I am trying to decompress the response(get the correct readable response) using zlib(referred here), I am unable to do so.
Code Snippet :
var options = {
url: url,
qs: params.qparams,
method: params.method,
json: params.body,
headers: {
'api_key': configkey,
'Content-Type': 'application/json',
'Accept-Encoding': 'gzip'
},
timeout: constants.request_timeout
};
request(options, function(err, response, body) {
var encoding = response.headers['content-encoding']
if (encoding && encoding.indexOf('gzip') >= 0) {
zlib.gunzip(body, function(err, dezipped) {
//ERROR : { [Error: incorrect header check] errno: -3, code: 'Z_DATA_ERROR' }
var json_string = dezipped.toString('utf-8');
var json = JSON.parse(json_string);
console.log('\nJSON ::\n',json);
});
} else {
console.log('\n\nRESPONSE IS NOT GZIPPED!');
}
}
I am getting an error here(as commented in the code), using zlib.
I could not figure out as where is it going wrong, tried with multiple npm modules like unzipResponse and compress-buffer and tried different approaches as well as suggested at various places for handling gzip.
If someone can help out in resolving this, I'll be really thankful.
I have got a solution as need to add one more key to the options object as :
var options = {
url: url,
qs: params.qparams,
method: params.method,
json: params.body,
headers: {
'api_key': configkey,
'Content-Type': 'application/json',
'Accept-Encoding': 'gzip'
},
timeout: constants.request_timeout,
encoding: null
};
If someone has a better approach to perform the decompression, please add-on.

Resources