How to consume MTOM SOAP web service in node.js? - node.js

I need to download or process a file from a soap based web service in node.js.
can someone suggest me on how to handle this in node.js
I tried with 'node-soap' or 'soap' NPM module. it worked for normal soap web service. But, not for binary steam or MTOM based SOAP web service

I want to try to answer this... It's quite interesting that 2 years and 2 months later I can not figure it out how to easily solve the same problem.
I'm trying to get the attachment from a response like:
...
headers: { 'cache-control': 'no-cache="set-cookie"',
'content-type': 'multipart/related;boundary="----=_Part_61_425861994.1525782562904";type="application/xop+xml";start="";start-info="text/xml"',
...
body: '------=_Part_61_425861994.1525782562904\r\nContent-Type:
application/xop+xml; charset=utf-8;
type="text/xml"\r\nContent-Transfer-Encoding: 8bit\r\nContent-ID:
\r\n\r\n....\r\n------=_Part_61_425861994.1525782562904\r\nContent-Type:
application/octet-stream\r\nContent-Transfer-Encoding:
binary\r\nContent-ID:
\r\n\r\n�PNG\r\n\u001a\n\u0000\u0000\u0000\rIHDR\u0000\u0000\u0002,\u0000\u0000\u0005�\b\u0006\u0........binary....
I tried ws.js but no solution.
My solution:
var request = require("request");
var bsplit = require('buffer-split')
// it will extract "----=_Part_61_425861994.1525782562904" from the response
function getBoundaryFromResponse(response) {
var contentType = response.headers['content-type']
if (contentType && contentType.indexOf('boundary=') != -1 ) {
return contentType.split(';')[1].replace('boundary=','').slice(1, -1)
}
return null
}
function splitBufferWithPattern(binaryData, boundary) {
var b = new Buffer(binaryData),
delim = new Buffer(boundary),
result = bsplit(b, delim);
return result
}
var options = {
method: 'POST',
url: 'http://bla.blabal.../file',
gzip: true,
headers: {
SOAPAction: 'downloadFile',
'Content-Type': 'text/xml;charset=UTF-8'
},
body: '<soapenv: ... xml request of the file ... elope>'
};
var data = [];
var buffer = null;
var filename = "test.png"
request(options, function (error, response, body) {
if (error) throw new Error(error);
if (filename && buffer) {
console.log("filename: " + filename)
console.log(buffer.toString('base64'))
// after this, we can save the file from base64 ...
}
})
.on('data', function (chunk) {
data.push(chunk)
})
.on('end', function () {
var onlyPayload = splitBufferWithPattern(Buffer.concat(data), '\r\n\r\n') // this will get from PNG
buffer = onlyPayload[2]
buffer = splitBufferWithPattern(buffer, '\r\n-')[0]
console.log('Downloaded.');
})
I am not sure it will works in most of the cases. It looks like unstable code to my eyes and so I'm looking for something better.

Use ws.js
Here is how to fetch the file attachments:
const ws = require('ws.js')
const { Http, Mtom } = ws
var handlers = [ new Mtom(), new Http()];
var request = '<s:Envelope xmlns:s="http://www.w3.org/2003/05/soap-envelope">' +
'<s:Body>' +
'<EchoFiles xmlns="http://tempuri.org/">' +
'<File1 />' +
'</EchoFiles>' +
'</s:Body>' +
'</s:Envelope>'
var ctx = { request: request
, contentType: "application/soap+xml"
, url: "http://localhost:7171/Service/mtom"
, action: "http://tempuri.org/IService/EchoFiles"
}
ws.send(handlers, ctx, function(ctx) {
//read an attachment from the soap response
var file = ws.getAttachment(ctx, "response", "//*[local-name(.)='File1']")
// work with the file
fs.writeFileSync("result.jpg", file)
})
Two limitations:
No basic auth provided out-of-box, patch required https://github.com/yaronn/ws.js/pull/40
If the file name is an url, you need to apply another patch at mtom.js. Replace:
.
xpath = "//*[#href='cid:" + encodeURIComponent(id) + "']//parent::*"
with:
xpath = "//*[#href='cid:" + id + "']//parent::*"

Related

Stream data from variable to PUT request

What I've been looking to do is make a GET request and then manipulate the JSON data store that in a variable and then make a PUT request. Can't seem to find documentation on this. Maybe I am thinking about this wrong. Once I have the variable I want to do something like the below. I have all of my data from the GET request saved to an outputV3.json file.
var outputJson = fs.readFileSync("outputV3.JSON");
outputJson = JSON.parse(outputJson);
(function () {
for (let i = 0; i < outputJson.objects.length; i++) {
let postId = outputJson.objects[i].id.toString();
let newSlug = outputJson.objects[i].slug.replace("blog/", "");
let urlToPut = "https://api.hubapi.com/content/api/v2/blog-posts?limit=1000&hapikey=" + process.env.HAPIKEY;
urlToPut = urlToPut.replace("blogPostId", postId);
let put_data = JSON.stringify({
"slug": newSlug
});
put_data.put(urlToPut);
}
});
If you need to stream your data from file to request, you should create read stream from the file and pipe it to the destination:
const dataStream = fs.createReadStream('outputV3.JSON');
const options = {
hostname: 'www.example.com',
port: 80,
path: '/destination',
method: 'PUT',
headers: {
'Content-Type': 'application/json',
}
};
const req = http.request(options, (res) => {
// response processing...
});
dataStream.pipe(req);
And if you need more advanced logic for streaming you should consider putting a custom Transform stream between readable file stream and writable request stream.
If I wanted to use request-promise could I do something like the following
let options = {
uri: 'https://api.hubapi.com/content/api/v2/blog-posts?limit=1000&hapikey=' + process.env.HAPIKEY,
method: 'GET'
transform: function (body, response) {
return for(var i=0; i<outputJson.objects.length; i++) {
var postId = outputJson.objects[i].id.toString();
var newSlug = outputJson.objects[i].slug.replace("blog/","");
}
};
rp(options)
.then(function(removedSlug) {
.pipe(request.put('https://api.hubapi.com/content/api/v2/blog-
posts/blogPostId?hapikey=' + process.env.HAPIKEY))
});

NodeJS Read file, make node-rest-client call to get related data and add retrieved response as element in JSON

I am reading JSON file using fs.readFileSync and for each document obtained, I am making a rest API call using client.post. Once I get response, I want to place the received content into another JSON file which is a replica of input JSON except additional element which is the data received from client.post call. However probably because of async nature of client.post, I am unable to add element to output JSON. I am new to NodeJS. Can you please help me where I am missing. Below is code and data
data:
[
{
"ticker": "CLYD"
},
{
"ticker": "EGH"
}
]
Code:
var fs = require('fs');
var Client = require('node-rest-client').Client;
var data = fs.readFileSync(__dirname + "/data/stocks.json", "utf8");
processData(data);
function processData (data) {
var obj = JSON.parse(data);
for (j = 0; j < obj.length; j++) {
obj[j].stockInformation = getValuesForTicker (obj[j].ticker.trim());
}
var jsonOutput = JSON.stringify(obj,null,'\t');
fs.writeFileSync(__dirname + "/data/response.json", jsonOutput);
};
function getValuesForTicker (ticker) {
/**
* More details and samples at https://www.npmjs.com/package/node-rest-client
*/
var client = new Client();
var values;
// set content-type header and data as json in args parameter
var args = {
data: { "ticker" : ticker},
headers: { "Content-Type": "application/json", "Accept" : "application/json" }
};
var responseToRequest = client.post("https://url.providing.response.as.json.content/", args, function (data, response) {
// parsed response body as js object
values = JSON.parse(JSON.stringify(data)).price;
});
return values;
};
Since getValueForTicker makes a async call to fetch data it should call a callback once data is recieved (or better a promise) and not return the result (currently undefined is returned as the value is returned before the value is assigned)
function getValuesForTicker (ticker, callback) {
/**
* More details and samples at https://www.npmjs.com/package/node-rest-client
*/
return new Promise(function(resolve, reject) {
var client = new Client();
var values;
// set content-type header and data as json in args parameter
var args = {
data: { "ticker" : ticker},
headers: { "Content-Type": "application/json", "Accept" : "application/json" }
};
var responseToRequest =
client.post("https://url.providing.response.as.json.content/", args, function (data, response) {
// parsed response body as js object
values = JSON.parse(JSON.stringify(data)).price;
resolve(values)
});
};
})
and to get the data once async call is done you will need to call then function as below:
getValuesForTicker(obj[j].ticker.trim())
.then(function(val) {
obj[j].stockInformation = val
})
Considering you are new to node.js it will be hard to get.Take some time to understand callback and promise first.

Azure Functions - NodeJS - Response Body as a Stream

I'd like to return a file from Blob Storage when you hit a given Azure Function end-point. This file is binary data.
Per the Azure Storage Blob docs, the most relevant call appears to be the following since its the only one that doesn't require writing the file to an interim file:
getBlobToStream
However this call gets the Blob and writes it to a stream.
Is there a way with Azure Functions to use a Stream as the value of res.body so that I can get the Blob Contents from storage and immediately write it to the response?
To add some code, trying to get something like this to work:
'use strict';
const azure = require('azure-storage'),
stream = require('stream');
const BLOB_CONTAINER = 'DeContainer';
module.exports = function(context){
var file = context.bindingData.file;
var blobService = azure.createBlobService();
var outputStream = new stream.Writable();
blobService.getBlobToStream(BLOB_CONTAINER, file, outputStream, function(error, serverBlob) {
if(error) {
FileNotFound(context);
} else {
context.res = {
status: 200,
headers: {
},
isRaw: true,
body : outputStream
};
context.done();
}
});
}
function FileNotFound(context){
context.res = {
status: 404,
headers: {
"Content-Type" : "application/json"
},
body : { "Message" : "No esta aqui!."}
};
context.done();
}
Unfortunately we don't have streaming support implemented in NodeJS just yet - it's on the backlog: https://github.com/Azure/azure-webjobs-sdk-script/issues/1361
If you're not tied to NodeJ open to using a C# function instead, you can use the storage sdk object directly in your input bindings and stream request output, instead of using the intermediate object approach.
While #Matt Manson's answer is definitely correct based on the way I asked my question, the following code snippet might be more useful for someone who stumbles across this question.
While I can't send the Stream to the response body directly, I can use a custom stream which captures the data into a Uint8Array, and then sends that to the response body.
NOTE: If the file is REALLY big, this will use a lot of memory.
'use strict';
const azure = require('azure-storage'),
stream = require('stream');
const BLOB_CONTAINER = 'deContainer';
module.exports = function(context){
var file = context.bindingData.file;
var blobService = azure.createBlobService();
var outputStream = new stream.Writable();
outputStream.contents = new Uint8Array(0);//Initialize contents.
//Override the write to store the value to our "contents"
outputStream._write = function (chunk, encoding, done) {
var curChunk = new Uint8Array(chunk);
var tmp = new Uint8Array(this.contents.byteLength + curChunk.byteLength);
tmp.set(this.contents, 0);
tmp.set(curChunk, this.contents.byteLength);
this.contents = tmp;
done();
};
blobService.getBlobToStream(BLOB_CONTAINER, file, outputStream, function(error, serverBlob) {
if(error) {
FileNotFound(context);
} else {
context.res = {
status: 200,
headers: {
},
isRaw: true,
body : outputStream.contents
};
context.done();
}
});//*/
}
function FileNotFound(context){
context.res = {
status: 404,
headers: {
"Content-Type" : "application/json"
},
body : { "Message" : "No esta aqui!"}
};
context.done();
}
I tried #Doug's solution from the last comment above, with a few minor mods in my azure function, and so far, after trying 20 different ideas, this is the only one that actually delivered the file to the browser! Thank you, #Doug...
const fs = require("fs");
const stream = require("stream");
...
const AzureBlob = require('#[my_private_artifact]/azure-blob-storage');
const azureStorage = new AzureBlob(params.connectionString);
//Override the write to store the value to our "contents" <-- Doug's solution
var outputStream = new stream.Writable();
outputStream.contents = new Uint8Array(0);//Initialize contents.
outputStream._write = function (chunk, encoding, done) {
var curChunk = new Uint8Array(chunk);
var tmp = new Uint8Array(this.contents.byteLength + curChunk.byteLength);
tmp.set(this.contents, 0);
tmp.set(curChunk, this.contents.byteLength);
this.contents = tmp;
done();
};
let azureSpeedResult = await azureStorage.downloadBlobToStream(params.containerName, params.objectId, outputStream);
let headers = {
"Content-Length": azureSpeedResult.size,
"Content-Type": mimeType
};
if (params.action == "download") {
headers["Content-Disposition"] = "attachment; filename=" + params.fileName;
}
context.res = {
status: 200,
headers: headers,
isRaw: true,
body: outputStream.contents
};
context.done();
...

how to do put request using node.js and add the json file

Hi I am new to this domain, I am trying to do PUT request and add the json file. I have the json file created and i have to perform put and post request using the URI's please can any one post a code using nodejs and it will be helpful.I created a put request file like this
var i = 0;
var fs = require("fs");
var request = require('request');
var jsonPath = fs.readFileSync('filepath');
// String --> Object
var jsonObj = JSON.parse(jsonPath);
console.log(changedevicename.call());
for( i = 0; i < jsonObj.ipConfig.length; i++)
{
var ipv4URI = jsonObj.ipConfig[i].ipv4; // taking ipv4 json file
var ipv6URI = jsonObj.ipConfig[i].ipv6; // taking ipv6 json file
console.log(ipv4URI);
console.log(ipv6URI);
rest_service();
//console.log(config[i]);
}
function rest_service() // should I change this or what
{
var i = 0;
var request = require('request');
var options = {
url: 'http://'+'USERNAME'+':'+'PASSWORD'+ '#'+'IPV6'+'URI',
method: 'PUT',
}
{
//IP = userGivenIP;
//IP = '192.168.0.1';
request(
{
method:'PUT',
url: 'http://'+'USERNAME'+':'+'PASSWORD'+ '#'+'IPV6'+'URI', //
headers: {
'Content-Type': 'application/json', // check this, I should change this
},
//var ip4Json = JSON.parse(body); // check this, I should change this
//console.log('\n\n'+ body + '\n\n');
},
function (error, response, body) // check this, I should change this
{
if (error!=undefined)
{
console.log(body);
}
else
{
console.log("printerror", error);
console.log("IP disabled");
}
});
}
This code has to be doen dynamically but I am not getting how to do this for put and post request please help me out and mail the code thanks.
thanks and regards
Prathamesh
You can add a body parameter to request.
const jsonBody = {
key1: value1,
key2: value2
};
const headers = {
authorization: "<token>"
};
const options = {
method: 'PUT',
uri: "some-url",
headers: headers, // headers if your api requires
body: jsonBody,
json: true
};
request(options, function(err, response) {
// handle err first
// do stuff with response
});
You should go through the docs: https://www.npmjs.com/package/request

Jenkins Git Plugin does not receive posted Parameters

I am trying to use Node.js to programmatically build Jenkins jobs that take Git parameters.
I am sending the parameters as post data, as shown below. However, no matter what value I assign to ref, Jenkins runs the build with the default parameter value (specified in the job's configuration). I have tried passing in the parameters as query strings in the URL, but that also did not work.
I am using Jenkins v1.651.1 and Node v6.2.0.
var jobOptions = {
url: requestedJobObject.url + 'build',
method: 'POST',
port: 8080
};
// parameters = { "name": "ref", "value": "origin/master" }
if (!_.isEmpty(parameters)) {
var jsonParametersString = JSON.stringify({"parameter": parameters});
var parameterParam = encodeURIComponent(jsonParametersString);
parameters.json = parameterParam;
jobOptions.headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': querystring.stringify(parameters).length
};
jobOptions.url += 'WithParameters';
postData = querystring.stringify(parameters);
}
// jobOptions contains auth field & separates url into hostname and path
// makes an http request to jobOptions and calls req.write(postData)
makeRequest(jobOptions, callback, responseCB, postData)
makeRequest makes an http request:
function makeRequest (object, callback, responseCB, postData) {
var accumulator = '';
var parsedUrl = u.parse('//' + object.url, true, true);
var options = {
hostname: parsedUrl.hostname,
port: object.port || 8080,
path: parsedUrl.path,
method: object.method || 'GET',
auth: getAuthByHost(parsedUrl.hostname)
};
if (object.headers) {
options.headers = object.headers;
}
var response = null;
var req = http.request(options, function(res) {
response = res;
res.on('data', function (data) {
accumulator = accumulator + data.toString();
res.resume();
});
res.on('close', function () {
// first assume accumulator is JSON object
var responseContent;
try {
responseContent = JSON.parse(accumulator);
}
// if not object, use accumulator as string
catch (err) {
responseContent = accumulator;
}
callback(responseContent, response.statusCode);
if (responseCB) {
responseCB(res);
}
});
});
req.on('close', function () {
// first assume accumulator is JSON object
var responseContent;
try {
responseContent = JSON.parse(accumulator);
}
catch (err) {
responseContent = accumulator;
}
callback(responseContent, response.statusCode);
if (responseCB) {
responseCB(response);
}
});
if (postData) {
req.write(postData);
}
req.end();
}
try this, it works for me:
var auth = 'Basic yourUserToken';
var jobOptions = {
url:'jenkinsHostName:8080/jenkins/job/jobName/' +'build',
method: 'POST',
port: 8080
};
var parameter = {"parameter": [{"name":"ref", "value":"origin/master"}]};
var postData;
if (!_.isEmpty(parameter)) {
var jsonParametersString = JSON.stringify(parameter);
jobOptions.headers = {
'Authorization':auth,
'Content-Type': 'application/x-www-form-urlencoded',
};
jobOptions.url += '?token=jobRemoteTriggerToken';
postData = "json="+jsonParametersString;
console.log("postData = " + postData);
}
var callback;
var responseCB;
makeRequest(jobOptions, callback, responseCB, postData) ;
It is based on your code. I removed the querystring - it seems that it returned an empty string when performed on the parameters object. I change /buildWithParameters to /build - it didn't work the other way.
In addition, verify that when you pass the 'Content-Length' in the header, it doesn't truncated your json parameters object (I removed it ).
also note that I used the user API token, that you can get at http://yourJenkinsUrl/me/configure and click the "Shown API Token" button.
Not sure about this, as I don't know Node.js -- but maybe this fits: the Jenkins remote access API indicates that the parameter entity in the json request must point to an array, even if there's just one parameter to be defined.
Does the change below fix the problem (note the angle brackets around parameters)?
[...]
var jsonParametersString = JSON.stringify({"parameter": [parameters]});
[...]

Resources