What I've been looking to do is make a GET request and then manipulate the JSON data store that in a variable and then make a PUT request. Can't seem to find documentation on this. Maybe I am thinking about this wrong. Once I have the variable I want to do something like the below. I have all of my data from the GET request saved to an outputV3.json file.
var outputJson = fs.readFileSync("outputV3.JSON");
outputJson = JSON.parse(outputJson);
(function () {
for (let i = 0; i < outputJson.objects.length; i++) {
let postId = outputJson.objects[i].id.toString();
let newSlug = outputJson.objects[i].slug.replace("blog/", "");
let urlToPut = "https://api.hubapi.com/content/api/v2/blog-posts?limit=1000&hapikey=" + process.env.HAPIKEY;
urlToPut = urlToPut.replace("blogPostId", postId);
let put_data = JSON.stringify({
"slug": newSlug
});
put_data.put(urlToPut);
}
});
If you need to stream your data from file to request, you should create read stream from the file and pipe it to the destination:
const dataStream = fs.createReadStream('outputV3.JSON');
const options = {
hostname: 'www.example.com',
port: 80,
path: '/destination',
method: 'PUT',
headers: {
'Content-Type': 'application/json',
}
};
const req = http.request(options, (res) => {
// response processing...
});
dataStream.pipe(req);
And if you need more advanced logic for streaming you should consider putting a custom Transform stream between readable file stream and writable request stream.
If I wanted to use request-promise could I do something like the following
let options = {
uri: 'https://api.hubapi.com/content/api/v2/blog-posts?limit=1000&hapikey=' + process.env.HAPIKEY,
method: 'GET'
transform: function (body, response) {
return for(var i=0; i<outputJson.objects.length; i++) {
var postId = outputJson.objects[i].id.toString();
var newSlug = outputJson.objects[i].slug.replace("blog/","");
}
};
rp(options)
.then(function(removedSlug) {
.pipe(request.put('https://api.hubapi.com/content/api/v2/blog-
posts/blogPostId?hapikey=' + process.env.HAPIKEY))
});
Related
I am reading JSON file using fs.readFileSync and for each document obtained, I am making a rest API call using client.post. Once I get response, I want to place the received content into another JSON file which is a replica of input JSON except additional element which is the data received from client.post call. However probably because of async nature of client.post, I am unable to add element to output JSON. I am new to NodeJS. Can you please help me where I am missing. Below is code and data
data:
[
{
"ticker": "CLYD"
},
{
"ticker": "EGH"
}
]
Code:
var fs = require('fs');
var Client = require('node-rest-client').Client;
var data = fs.readFileSync(__dirname + "/data/stocks.json", "utf8");
processData(data);
function processData (data) {
var obj = JSON.parse(data);
for (j = 0; j < obj.length; j++) {
obj[j].stockInformation = getValuesForTicker (obj[j].ticker.trim());
}
var jsonOutput = JSON.stringify(obj,null,'\t');
fs.writeFileSync(__dirname + "/data/response.json", jsonOutput);
};
function getValuesForTicker (ticker) {
/**
* More details and samples at https://www.npmjs.com/package/node-rest-client
*/
var client = new Client();
var values;
// set content-type header and data as json in args parameter
var args = {
data: { "ticker" : ticker},
headers: { "Content-Type": "application/json", "Accept" : "application/json" }
};
var responseToRequest = client.post("https://url.providing.response.as.json.content/", args, function (data, response) {
// parsed response body as js object
values = JSON.parse(JSON.stringify(data)).price;
});
return values;
};
Since getValueForTicker makes a async call to fetch data it should call a callback once data is recieved (or better a promise) and not return the result (currently undefined is returned as the value is returned before the value is assigned)
function getValuesForTicker (ticker, callback) {
/**
* More details and samples at https://www.npmjs.com/package/node-rest-client
*/
return new Promise(function(resolve, reject) {
var client = new Client();
var values;
// set content-type header and data as json in args parameter
var args = {
data: { "ticker" : ticker},
headers: { "Content-Type": "application/json", "Accept" : "application/json" }
};
var responseToRequest =
client.post("https://url.providing.response.as.json.content/", args, function (data, response) {
// parsed response body as js object
values = JSON.parse(JSON.stringify(data)).price;
resolve(values)
});
};
})
and to get the data once async call is done you will need to call then function as below:
getValuesForTicker(obj[j].ticker.trim())
.then(function(val) {
obj[j].stockInformation = val
})
Considering you are new to node.js it will be hard to get.Take some time to understand callback and promise first.
Hi I am new to this domain, I am trying to do PUT request and add the json file. I have the json file created and i have to perform put and post request using the URI's please can any one post a code using nodejs and it will be helpful.I created a put request file like this
var i = 0;
var fs = require("fs");
var request = require('request');
var jsonPath = fs.readFileSync('filepath');
// String --> Object
var jsonObj = JSON.parse(jsonPath);
console.log(changedevicename.call());
for( i = 0; i < jsonObj.ipConfig.length; i++)
{
var ipv4URI = jsonObj.ipConfig[i].ipv4; // taking ipv4 json file
var ipv6URI = jsonObj.ipConfig[i].ipv6; // taking ipv6 json file
console.log(ipv4URI);
console.log(ipv6URI);
rest_service();
//console.log(config[i]);
}
function rest_service() // should I change this or what
{
var i = 0;
var request = require('request');
var options = {
url: 'http://'+'USERNAME'+':'+'PASSWORD'+ '#'+'IPV6'+'URI',
method: 'PUT',
}
{
//IP = userGivenIP;
//IP = '192.168.0.1';
request(
{
method:'PUT',
url: 'http://'+'USERNAME'+':'+'PASSWORD'+ '#'+'IPV6'+'URI', //
headers: {
'Content-Type': 'application/json', // check this, I should change this
},
//var ip4Json = JSON.parse(body); // check this, I should change this
//console.log('\n\n'+ body + '\n\n');
},
function (error, response, body) // check this, I should change this
{
if (error!=undefined)
{
console.log(body);
}
else
{
console.log("printerror", error);
console.log("IP disabled");
}
});
}
This code has to be doen dynamically but I am not getting how to do this for put and post request please help me out and mail the code thanks.
thanks and regards
Prathamesh
You can add a body parameter to request.
const jsonBody = {
key1: value1,
key2: value2
};
const headers = {
authorization: "<token>"
};
const options = {
method: 'PUT',
uri: "some-url",
headers: headers, // headers if your api requires
body: jsonBody,
json: true
};
request(options, function(err, response) {
// handle err first
// do stuff with response
});
You should go through the docs: https://www.npmjs.com/package/request
I am trying to use Node.js to programmatically build Jenkins jobs that take Git parameters.
I am sending the parameters as post data, as shown below. However, no matter what value I assign to ref, Jenkins runs the build with the default parameter value (specified in the job's configuration). I have tried passing in the parameters as query strings in the URL, but that also did not work.
I am using Jenkins v1.651.1 and Node v6.2.0.
var jobOptions = {
url: requestedJobObject.url + 'build',
method: 'POST',
port: 8080
};
// parameters = { "name": "ref", "value": "origin/master" }
if (!_.isEmpty(parameters)) {
var jsonParametersString = JSON.stringify({"parameter": parameters});
var parameterParam = encodeURIComponent(jsonParametersString);
parameters.json = parameterParam;
jobOptions.headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': querystring.stringify(parameters).length
};
jobOptions.url += 'WithParameters';
postData = querystring.stringify(parameters);
}
// jobOptions contains auth field & separates url into hostname and path
// makes an http request to jobOptions and calls req.write(postData)
makeRequest(jobOptions, callback, responseCB, postData)
makeRequest makes an http request:
function makeRequest (object, callback, responseCB, postData) {
var accumulator = '';
var parsedUrl = u.parse('//' + object.url, true, true);
var options = {
hostname: parsedUrl.hostname,
port: object.port || 8080,
path: parsedUrl.path,
method: object.method || 'GET',
auth: getAuthByHost(parsedUrl.hostname)
};
if (object.headers) {
options.headers = object.headers;
}
var response = null;
var req = http.request(options, function(res) {
response = res;
res.on('data', function (data) {
accumulator = accumulator + data.toString();
res.resume();
});
res.on('close', function () {
// first assume accumulator is JSON object
var responseContent;
try {
responseContent = JSON.parse(accumulator);
}
// if not object, use accumulator as string
catch (err) {
responseContent = accumulator;
}
callback(responseContent, response.statusCode);
if (responseCB) {
responseCB(res);
}
});
});
req.on('close', function () {
// first assume accumulator is JSON object
var responseContent;
try {
responseContent = JSON.parse(accumulator);
}
catch (err) {
responseContent = accumulator;
}
callback(responseContent, response.statusCode);
if (responseCB) {
responseCB(response);
}
});
if (postData) {
req.write(postData);
}
req.end();
}
try this, it works for me:
var auth = 'Basic yourUserToken';
var jobOptions = {
url:'jenkinsHostName:8080/jenkins/job/jobName/' +'build',
method: 'POST',
port: 8080
};
var parameter = {"parameter": [{"name":"ref", "value":"origin/master"}]};
var postData;
if (!_.isEmpty(parameter)) {
var jsonParametersString = JSON.stringify(parameter);
jobOptions.headers = {
'Authorization':auth,
'Content-Type': 'application/x-www-form-urlencoded',
};
jobOptions.url += '?token=jobRemoteTriggerToken';
postData = "json="+jsonParametersString;
console.log("postData = " + postData);
}
var callback;
var responseCB;
makeRequest(jobOptions, callback, responseCB, postData) ;
It is based on your code. I removed the querystring - it seems that it returned an empty string when performed on the parameters object. I change /buildWithParameters to /build - it didn't work the other way.
In addition, verify that when you pass the 'Content-Length' in the header, it doesn't truncated your json parameters object (I removed it ).
also note that I used the user API token, that you can get at http://yourJenkinsUrl/me/configure and click the "Shown API Token" button.
Not sure about this, as I don't know Node.js -- but maybe this fits: the Jenkins remote access API indicates that the parameter entity in the json request must point to an array, even if there's just one parameter to be defined.
Does the change below fix the problem (note the angle brackets around parameters)?
[...]
var jsonParametersString = JSON.stringify({"parameter": [parameters]});
[...]
I need to download or process a file from a soap based web service in node.js.
can someone suggest me on how to handle this in node.js
I tried with 'node-soap' or 'soap' NPM module. it worked for normal soap web service. But, not for binary steam or MTOM based SOAP web service
I want to try to answer this... It's quite interesting that 2 years and 2 months later I can not figure it out how to easily solve the same problem.
I'm trying to get the attachment from a response like:
...
headers: { 'cache-control': 'no-cache="set-cookie"',
'content-type': 'multipart/related;boundary="----=_Part_61_425861994.1525782562904";type="application/xop+xml";start="";start-info="text/xml"',
...
body: '------=_Part_61_425861994.1525782562904\r\nContent-Type:
application/xop+xml; charset=utf-8;
type="text/xml"\r\nContent-Transfer-Encoding: 8bit\r\nContent-ID:
\r\n\r\n....\r\n------=_Part_61_425861994.1525782562904\r\nContent-Type:
application/octet-stream\r\nContent-Transfer-Encoding:
binary\r\nContent-ID:
\r\n\r\n�PNG\r\n\u001a\n\u0000\u0000\u0000\rIHDR\u0000\u0000\u0002,\u0000\u0000\u0005�\b\u0006\u0........binary....
I tried ws.js but no solution.
My solution:
var request = require("request");
var bsplit = require('buffer-split')
// it will extract "----=_Part_61_425861994.1525782562904" from the response
function getBoundaryFromResponse(response) {
var contentType = response.headers['content-type']
if (contentType && contentType.indexOf('boundary=') != -1 ) {
return contentType.split(';')[1].replace('boundary=','').slice(1, -1)
}
return null
}
function splitBufferWithPattern(binaryData, boundary) {
var b = new Buffer(binaryData),
delim = new Buffer(boundary),
result = bsplit(b, delim);
return result
}
var options = {
method: 'POST',
url: 'http://bla.blabal.../file',
gzip: true,
headers: {
SOAPAction: 'downloadFile',
'Content-Type': 'text/xml;charset=UTF-8'
},
body: '<soapenv: ... xml request of the file ... elope>'
};
var data = [];
var buffer = null;
var filename = "test.png"
request(options, function (error, response, body) {
if (error) throw new Error(error);
if (filename && buffer) {
console.log("filename: " + filename)
console.log(buffer.toString('base64'))
// after this, we can save the file from base64 ...
}
})
.on('data', function (chunk) {
data.push(chunk)
})
.on('end', function () {
var onlyPayload = splitBufferWithPattern(Buffer.concat(data), '\r\n\r\n') // this will get from PNG
buffer = onlyPayload[2]
buffer = splitBufferWithPattern(buffer, '\r\n-')[0]
console.log('Downloaded.');
})
I am not sure it will works in most of the cases. It looks like unstable code to my eyes and so I'm looking for something better.
Use ws.js
Here is how to fetch the file attachments:
const ws = require('ws.js')
const { Http, Mtom } = ws
var handlers = [ new Mtom(), new Http()];
var request = '<s:Envelope xmlns:s="http://www.w3.org/2003/05/soap-envelope">' +
'<s:Body>' +
'<EchoFiles xmlns="http://tempuri.org/">' +
'<File1 />' +
'</EchoFiles>' +
'</s:Body>' +
'</s:Envelope>'
var ctx = { request: request
, contentType: "application/soap+xml"
, url: "http://localhost:7171/Service/mtom"
, action: "http://tempuri.org/IService/EchoFiles"
}
ws.send(handlers, ctx, function(ctx) {
//read an attachment from the soap response
var file = ws.getAttachment(ctx, "response", "//*[local-name(.)='File1']")
// work with the file
fs.writeFileSync("result.jpg", file)
})
Two limitations:
No basic auth provided out-of-box, patch required https://github.com/yaronn/ws.js/pull/40
If the file name is an url, you need to apply another patch at mtom.js. Replace:
.
xpath = "//*[#href='cid:" + encodeURIComponent(id) + "']//parent::*"
with:
xpath = "//*[#href='cid:" + id + "']//parent::*"
I'm attempting to use the ng-file-upload directive to provide file upload functionality in my angular app.
I've got it working for the most part - I can select multiple files and loop through to grab the file name and file types. I just can't seem to figure out where the actual binary data of each file is stored in the file object.
I tried using the approach outlined in this post - AngularJS Upload a file and send it to a DB, but that results in a an error that "$q is not defined".
function create_blob(file) {
var deferred = $q.defer();
var reader = new FileReader();
reader.onload = function () {
deferred.resolve(reader.result);
};
reader.readAsDataURL(file);
return deferred.promise;
}
So then I tried the approach outlined in this post - Send an uploaded image to the server and save it in the server, but again I'm running into an error reading "dataURI.split is not a function".
function dataURItoBlob(dataURI) {
var binary = atob(dataURI.split(',')[1]);
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
var array = [];
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {
type: mimeString
});
}
The code I'm using is as follows:
function create_blob(file) {
var deferred = $q.defer();
var reader = new FileReader();
reader.onload = function () {
deferred.resolve(reader.result);
};
reader.readAsDataURL(file);
return deferred.promise;
}
function dataURItoBlob(dataURI) {
var binary = atob(dataURI.split(',')[1]);
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
var array = [];
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {
type: mimeString
});
}
$scope.uploadFiles = function (files) {
$scope.files = files;
angular.forEach(files, function (file) {
if (file && !file.$error) {
//var reader = new FileReader();
//console.log(reader.readAsDataURL(file));
//var binary = create_blob(file);
var fileBinary = dataURItoBlob(file);
$http({
url: root + '/DesktopModules/ServiceProxy/API/NetSuite/InsertCaseFile',
method: "POST",
//headers: { 'caseId': id, 'fileName': file.name, fileContent: $.base64.encode(file) }
headers: { 'caseId': id, 'fileName': file.name, fileContent: fileBinary }
}).
success(function (data, status, headers, config) {
//if (data == true) {
// getCase();
// $scope.newMessage = "";
// //toaster.pop('success', "", "Message succesfully submitted.",0);
//}
}).
error(function (data, status, headers, config) {
});
file.upload.progress(function (evt) {
file.progress = Math.min(100, parseInt(100.0 * evt.loaded / evt.total));
});
}
});
}
What am I overlooking?
It depends on what format your DB is accepting for file upload. If it support multipart form data, then you can just use
Upload.upload({file: file, url: my/db/url}).then(...);
if it accepts post requests with file's binary as content of the request (like CouchDB, imgur, ...) then you can do
Upload.http({data: file, url: my/db/url, headers: {'Content-Type': file.type}})...;
if you db just accept json objects and you want to store the file as base64 data url in the database like this question then you can do
Upload.dataUrl(file, true).then(function(dataUrl) {
$http.post(url, {
fileBase64DataUrl: dataUrl,
fileName: file.name,
id: uniqueId
});
})