i am getting error like the the site cannot be accessed in share point hosted app.
it is occured when i moving the from one page to another page in the same app. please help me in advance
it is Default.aspx code
<script>
'use strict';
var appWebUrl, hostWebUrl;
jQuery(document).ready(function () {
// Check for FileReader API (HTML5) support.
if (!window.FileReader) {
alert('This browser does not support the FileReader API.');
}
// Get the add-in web and host web URLs.
appWebUrl = decodeURIComponent(getQueryStringParameter("SPAppWebUrl"));
hostWebUrl = decodeURIComponent(getQueryStringParameter("SPHostUrl"));
});
function getQueryStringParameter(paramToRetrieve) {
var params = document.URL.split("?")[1].split("&");
for (var i = 0; i < params.length; i = i + 1) {
var singleParam = params[i].split("=");
if (singleParam[0] == paramToRetrieve) return singleParam[1];
}
}
function F1()
{
window.location.href=sphosturl+'pages/uploadform.aspx';
}
</script>
<div>
<input type='button' value='clickheretoUploadfile' onclick='F1()'/>
</div>
when the user is clicked on clickhere button is redirecting to uploadform.aspx
it is uploadform.aspx code
<script>
'use strict';
jQuery(document).ready(function () {
// Check for FileReader API (HTML5) support.
if (!window.FileReader) {
alert('This browser does not support the FileReader API.');
}
});
// Upload the file.
// You can upload files up to 2 GB with the REST API.
function uploadFile() {
// Define the folder path for this example.
var serverRelativeUrlToFolder = '/shared documents';
// Get test values from the file input and text input page controls.
var fileInput = jQuery('#getFile');
var newName = jQuery('#displayName').val();
// Get the server URL.
var serverUrl = _spPageContextInfo.webAbsoluteUrl;
// Initiate method calls using jQuery promises.
// Get the local file as an array buffer.
var getFile = getFileBuffer();
getFile.done(function (arrayBuffer) {
// Add the file to the SharePoint folder.
var addFile = addFileToFolder(arrayBuffer);
addFile.done(function (file, status, xhr) {
// Get the list item that corresponds to the uploaded file.
var getItem = getListItem(file.d.ListItemAllFields.__deferred.uri);
getItem.done(function (listItem, status, xhr) {
// Change the display name and title of the list item.
var changeItem = updateListItem(listItem.d.__metadata);
changeItem.done(function (data, status, xhr) {
alert('file uploaded and updated');
});
changeItem.fail(onError);
});
getItem.fail(onError);
});
addFile.fail(onError);
});
getFile.fail(onError);
// Get the local file as an array buffer.
function getFileBuffer() {
var deferred = jQuery.Deferred();
var reader = new FileReader();
reader.onloadend = function (e) {
deferred.resolve(e.target.result);
}
reader.onerror = function (e) {
deferred.reject(e.target.error);
}
reader.readAsArrayBuffer(fileInput[0].files[0]);
return deferred.promise();
}
// Add the file to the file collection in the Shared Documents folder.
function addFileToFolder(arrayBuffer) {
// Get the file name from the file input control on the page.
var parts = fileInput[0].value.split('\\');
var fileName = parts[parts.length - 1];
// Construct the endpoint.
var fileCollectionEndpoint = String.format(
"{0}/_api/web/getfolderbyserverrelativeurl('{1}')/files" +
"/add(overwrite=true, url='{2}')",
serverUrl, serverRelativeUrlToFolder, fileName);
// Send the request and return the response.
// This call returns the SharePoint file.
return jQuery.ajax({
url: fileCollectionEndpoint,
type: "POST",
data: arrayBuffer,
processData: false,
headers: {
"accept": "application/json;odata=verbose",
"X-RequestDigest": jQuery("#__REQUESTDIGEST").val(),
"content-length": arrayBuffer.byteLength
}
});
}
// Get the list item that corresponds to the file by calling the file's ListItemAllFields property.
function getListItem(fileListItemUri) {
// Send the request and return the response.
return jQuery.ajax({
url: fileListItemUri,
type: "GET",
headers: { "accept": "application/json;odata=verbose" }
});
}
// Change the display name and title of the list item.
function updateListItem(itemMetadata) {
// Define the list item changes. Use the FileLeafRef property to change the display name.
// For simplicity, also use the name as the title.
// The example gets the list item type from the item's metadata, but you can also get it from the
// ListItemEntityTypeFullName property of the list.
var body = String.format("{{'__metadata':{{'type':'{0}'}},'FileLeafRef':'{1}','Title':'{2}'}}",
itemMetadata.type, newName, newName);
// Send the request and return the promise.
// This call does not return response content from the server.
return jQuery.ajax({
url: itemMetadata.uri,
type: "POST",
data: body,
headers: {
"X-RequestDigest": jQuery("#__REQUESTDIGEST").val(),
"content-type": "application/json;odata=verbose",
"content-length": body.length,
"IF-MATCH": itemMetadata.etag,
"X-HTTP-Method": "MERGE"
}
});
}
}
// Display error messages.
function onError(error) {
alert(error.responseText);
}
<script>
<input id="getFile" type="file"/><br />
<input id="displayName" type="text" value="Enter a unique name" /><br />
<input id="addFileButton" type="button" value="Upload" onclick="uploadFile()">
the problem is when i perform the uploading functionality in the default.aspx page it is working good.but i redirecting from that page to upload page and perform the uploading functionality it is the error
The first question is where is your "sphosturl" parameter in the Default.aspx code? I guess it is the "appWebUrl".
From your code ,it seems you want to make your SharePoint-hosted add-in to upload files to the add-in web, so you must confirm whether you have documents library folder in your app and set correct location to "serverRelativeUrlToFolder" parameter . Otherwise it will threw Access Denied error. Tested code below is for your reference (I have add the documents library in my app):
'use strict';
jQuery(document).ready(function () {
// Check for FileReader API (HTML5) support.
if (!window.FileReader) {
alert('This browser does not support the FileReader API.');
}
});
// Upload the file.
// You can upload files up to 2 GB with the REST API.
function uploadFile() {
// Define the folder path for this example.
var serverRelativeUrlToFolder = 'Lists/DL';
// Get test values from the file input and text input page controls.
var fileInput = jQuery('#getFile');
var newName = jQuery('#displayName').val();
// Get the server URL.
var serverUrl = _spPageContextInfo.webAbsoluteUrl;
// Initiate method calls using jQuery promises.
// Get the local file as an array buffer.
var getFile = getFileBuffer();
getFile.done(function (arrayBuffer) {
// Add the file to the SharePoint folder.
var addFile = addFileToFolder(arrayBuffer);
addFile.done(function (file, status, xhr) {
// Get the list item that corresponds to the uploaded file.
var getItem = getListItem(file.d.ListItemAllFields.__deferred.uri);
getItem.done(function (listItem, status, xhr) {
// Change the display name and title of the list item.
var changeItem = updateListItem(listItem.d.__metadata);
changeItem.done(function (data, status, xhr) {
alert('file uploaded and updated');
});
changeItem.fail(onError);
});
getItem.fail(onError);
});
addFile.fail(onError);
});
getFile.fail(onError);
// Get the local file as an array buffer.
function getFileBuffer() {
var deferred = jQuery.Deferred();
var reader = new FileReader();
reader.onloadend = function (e) {
deferred.resolve(e.target.result);
}
reader.onerror = function (e) {
deferred.reject(e.target.error);
}
reader.readAsArrayBuffer(fileInput[0].files[0]);
return deferred.promise();
}
// Add the file to the file collection in the Shared Documents folder.
function addFileToFolder(arrayBuffer) {
// Get the file name from the file input control on the page.
var parts = fileInput[0].value.split('\\');
var fileName = parts[parts.length - 1];
// Construct the endpoint.
var fileCollectionEndpoint = String.format(
"{0}/_api/web/getfolderbyserverrelativeurl('{1}')/files" +
"/add(overwrite=true, url='{2}')",
serverUrl, serverRelativeUrlToFolder, fileName);
// Send the request and return the response.
// This call returns the SharePoint file.
return jQuery.ajax({
url: fileCollectionEndpoint,
type: "POST",
data: arrayBuffer,
processData: false,
headers: {
"accept": "application/json;odata=verbose",
"X-RequestDigest": jQuery("#__REQUESTDIGEST").val(),
"content-length": arrayBuffer.byteLength
}
});
}
// Get the list item that corresponds to the file by calling the file's ListItemAllFields property.
function getListItem(fileListItemUri) {
// Send the request and return the response.
return jQuery.ajax({
url: fileListItemUri,
type: "GET",
headers: { "accept": "application/json;odata=verbose" }
});
}
// Change the display name and title of the list item.
function updateListItem(itemMetadata) {
// Define the list item changes. Use the FileLeafRef property to change the display name.
// For simplicity, also use the name as the title.
// The example gets the list item type from the item's metadata, but you can also get it from the
// ListItemEntityTypeFullName property of the list.
var body = String.format("{{'__metadata':{{'type':'{0}'}},'FileLeafRef':'{1}','Title':'{2}'}}",
itemMetadata.type, newName, newName);
// Send the request and return the promise.
// This call does not return response content from the server.
return jQuery.ajax({
url: itemMetadata.uri,
type: "POST",
data: body,
headers: {
"X-RequestDigest": jQuery("#__REQUESTDIGEST").val(),
"content-type": "application/json;odata=verbose",
"content-length": body.length,
"IF-MATCH": itemMetadata.etag,
"X-HTTP-Method": "MERGE"
}
});
}
}
// Display error messages.
function onError(error) {
alert(error.responseText);
}
Related
Is it possible to attach a file from the firebase storage??
I tried the following code but it doesn't work
var mailgun = require("mailgun-js");
var api_key = 'key-acf9f881e32c85b3c0dad34358507a95';
var DOMAIN = 'sandbox76c6f74ddab14862816390c16f37a272.mailgun.org';
var mailgun = require('mailgun-js')({apiKey: api_key, domain: DOMAIN});
var path = require("path");
var filepath = path.join(`gs://i-m-here-c01f6.appspot.com/Groups/${leaderId}`, 'group_image.jpg');
var data = {
from: 'Excited User <postmaster#sandbox76c6f74ddab14862816390c16f37a272.mailgun.org>',
to: 'rayteamstudio#gmail.com',
subject: 'Complex',
text: 'Group Creation Request',
html: `<p>A user named: ${fromName} wants to create a group.<br />
User ID: ${leaderId}<br />
Group Name: ${groupName}<br />
Group Description: ${groupDescription}<br /><br />
To Accept the request click here:<br />
https://us-central1-i-m-here-c01f6.cloudfunctions.net/acceptOrDenyGroupCreation?leaderID=${leaderId}&requestStatus=approved <br /><br />
To Deny the request click here:<br />
https://us-central1-i-m-here-c01f6.cloudfunctions.net/acceptOrDenyGroupCreation?leaderID=${leaderId}&requestStatus=denied /></p>`,
attachment: filepath
};
mailgun.messages().send(data, function (error, body) {
if(error)
console.log('email err: ',error);
});
please help
You can't use a gs://bucket-name/path-to-file URL to download a file from Cloud Storage just like it was an HTTP URL. Instead, you'll have to do one of these:
Use the Cloud Storage SDK to download the file locally, then attach it to your email
Or, use the Cloud Storage SDK to generate a "Signed URL", which will give you an HTTPS URL to the file, which can be used to download it.
You can get it as a buffer and send it like this :
var request = require('request');
var file = request("https://www.google.ca/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png");
var data = {
from: 'Excited User <me#samples.mailgun.org>',
to: 'serobnic#mail.ru',
subject: 'Hello',
text: 'Testing some Mailgun awesomeness!',
attachment: file
};
mailgun.messages().send(data, function (error, body) {
console.log(body);
});
from here
In the example you've given you are using the gs:://bucket-name/path to download the file from Cloud Storage and send it as an attachment. In order to do so, you would need to use the request dependency as per the docs:
https://github.com/bojand/mailgun-js#attachments
In this case, all you would have to do is:
// var path = require("path");
const request = require('request');
var filepath = request(`gs://i-m-here-c01f6.appspot.com/Groups/${leaderId}`);
If you wanted to get more specific about assigning properties to the file, you can use the new mailgun.Attachments(options) to pass in an options parameter that looks something like this:
options: {
data: filepath,
filename: 'name,jpg',
contentType: 'image/jpeg',
knownLength: 2019121,
};
Where
data - can be one of:
a string representing file path to the attachment
a buffer of file data
an instance of Stream which means it is a readable stream.
filename - the file name to be used for the attachment. Default is 'file'
contentType - the content type. Required for case of Stream data. Ex. image/jpeg.
knownLength - the content length in bytes. Required for case of Stream data.
[That's an example for Nodemailer, it took me so much time to attach dynamically an attachment from a Cloud Storage for Firebase. Therefore, hopefully it'll help out someone. You can find so many similarities.
All the examples are in Angular / TypeScript.
It starts somewhere in the component.html file
<form
#formDirective="ngForm"
[formGroup]="contactForm"
(ngSubmit)="onSubmit(contactForm.value, formDirective)"
>
<mat-form-field>
<ngx-mat-file-input
(change)="uploadFile($event)"
formControlName="fileUploader"
multiple
type="file"
>
</ngx-mat-file-input>
</mat-form-field>
</form>
In component.ts
import { AngularFirestore } from '#angular/fire/firestore';
import {
AngularFireStorage,
AngularFireStorageReference,
AngularFireUploadTask,
} from '#angular/fire/storage';
import {
FormBuilder,
FormGroup,
FormGroupDirective,
Validators,
} from '#angular/forms';
import { throwError } from 'rxjs';
...
constructor(
private angularFirestore: AngularFirestore,
private angularFireStorage: AngularFireStorage,
private formBuilder: FormBuilder
) {}
public contentType: string[] = [];
public downloadURL: string[] = [];
public fileName: string = '';
public maxFileSize = 20971520;
public contactForm: FormGroup = this.formBuilder.group({
fileUploader: [
'',
Validators.compose([
// Your validators rules...
]),
],
...
});
...
public onSubmit(form: any, formDirective: FormGroupDirective): void {
form.contentType = this.contentType;
form.fileUploader = this.downloadURL;
form.fileName = this.fileName;
this.angularFirestore
.collection(String(process.env.FIRESTORE_COLLECTION_MESSAGES)) // Make sure the environmental variable is a string.
.add(form)
.then(() => {
// Your logic, such as alert...
.catch(() => {
// Your error handling logic...
});
}
public uploadFile(event: any): void {
// Iterate through all uploaded files.
for (let i = 0; i < event.target.files.length; i++) {
const file = event.target.files[i]; // Get each uploaded file.
const fileName = file.name + '_' + Date.now(); // It makes sure files with the same name will be uploaded more than once and each of them will have unique ID, showing date (in milliseconds) of the upload.
this.contentType = file.type;
this.fileName = fileName;
// Get file reference.
const fileRef: AngularFireStorageReference = this.angularFireStorage.ref(
fileName
);
// Create upload task.
const task: AngularFireUploadTask = this.angularFireStorage.upload(
fileName,
file,
file.type
);
// Upload file to Cloud Firestore.
task
.snapshotChanges()
.pipe(
finalize(() => {
fileRef.getDownloadURL().subscribe((downloadURL: string) => {
this.angularFirestore
.collection(String(process.env.FIRESTORE_COLLECTION_FILES)) // Make sure the environmental variable is a string.
.add({ downloadURL: downloadURL });
this.downloadURL.push(downloadURL);
});
}),
catchError((error: any) => {
return throwError(error);
})
)
.subscribe();
}
}
That's all frontend, now it's finally time for our backend.
import { DocumentSnapshot } from 'firebase-functions/lib/providers/firestore';
import { EventContext } from 'firebase-functions';
async function onCreateSendEmail(
snap: DocumentSnapshot,
_context: EventContext
) {
try {
const contactFormData = snap.data();
// You can use those to debug your code...
console.log('Submitted contact form: ', contactFormData);
console.log('context: ', _context); // This log will be shown in Firebase Functions logs.
const mailTransport: Mail = nodemailer.createTransport({
// Make sure the environmental variables have proper typings.
host: String(process.env.MAIL_HOST),
port: Number(process.env.MAIL_PORT),
auth: {
user: String(process.env.MAIL_ACCOUNT),
pass: String(process.env.MAIL_PASSWORD),
},
tls: {
rejectUnauthorized: false, //! Fix ERROR "Hostname/IP doesn't match certificate's altnames".
},
});
const mailOptions = {
attachments: [
{
contentType: `${contactFormData!.contentType}`,
filename: `${contactFormData!.fileName}`,
path: `${contactFormData!.fileUploader}`,
},
],
... // Your other mails options such as bcc, from, to, subject, html...
};
await mailTransport.sendMail(mailOptions);
} catch (err) {
console.error(err);
}
}
That's more or less all the headache from frontend to backend a developer has to go through to dynamically attach file to an email with correct contentType, downloadURL, and fileName. I was missing a complete solution for the case across WWW, that's why the front-to-back answer.
Note:
The frontend is handling multiple file uploads on the UI/Cloud Storage for Firebase side and all the files are uploaded to Firebase. However, only one dynamically added attachment is working fine. I still can't figure out how to handle multiple dynamic files uploads.
I am reading JSON file using fs.readFileSync and for each document obtained, I am making a rest API call using client.post. Once I get response, I want to place the received content into another JSON file which is a replica of input JSON except additional element which is the data received from client.post call. However probably because of async nature of client.post, I am unable to add element to output JSON. I am new to NodeJS. Can you please help me where I am missing. Below is code and data
data:
[
{
"ticker": "CLYD"
},
{
"ticker": "EGH"
}
]
Code:
var fs = require('fs');
var Client = require('node-rest-client').Client;
var data = fs.readFileSync(__dirname + "/data/stocks.json", "utf8");
processData(data);
function processData (data) {
var obj = JSON.parse(data);
for (j = 0; j < obj.length; j++) {
obj[j].stockInformation = getValuesForTicker (obj[j].ticker.trim());
}
var jsonOutput = JSON.stringify(obj,null,'\t');
fs.writeFileSync(__dirname + "/data/response.json", jsonOutput);
};
function getValuesForTicker (ticker) {
/**
* More details and samples at https://www.npmjs.com/package/node-rest-client
*/
var client = new Client();
var values;
// set content-type header and data as json in args parameter
var args = {
data: { "ticker" : ticker},
headers: { "Content-Type": "application/json", "Accept" : "application/json" }
};
var responseToRequest = client.post("https://url.providing.response.as.json.content/", args, function (data, response) {
// parsed response body as js object
values = JSON.parse(JSON.stringify(data)).price;
});
return values;
};
Since getValueForTicker makes a async call to fetch data it should call a callback once data is recieved (or better a promise) and not return the result (currently undefined is returned as the value is returned before the value is assigned)
function getValuesForTicker (ticker, callback) {
/**
* More details and samples at https://www.npmjs.com/package/node-rest-client
*/
return new Promise(function(resolve, reject) {
var client = new Client();
var values;
// set content-type header and data as json in args parameter
var args = {
data: { "ticker" : ticker},
headers: { "Content-Type": "application/json", "Accept" : "application/json" }
};
var responseToRequest =
client.post("https://url.providing.response.as.json.content/", args, function (data, response) {
// parsed response body as js object
values = JSON.parse(JSON.stringify(data)).price;
resolve(values)
});
};
})
and to get the data once async call is done you will need to call then function as below:
getValuesForTicker(obj[j].ticker.trim())
.then(function(val) {
obj[j].stockInformation = val
})
Considering you are new to node.js it will be hard to get.Take some time to understand callback and promise first.
I'd like to return a file from Blob Storage when you hit a given Azure Function end-point. This file is binary data.
Per the Azure Storage Blob docs, the most relevant call appears to be the following since its the only one that doesn't require writing the file to an interim file:
getBlobToStream
However this call gets the Blob and writes it to a stream.
Is there a way with Azure Functions to use a Stream as the value of res.body so that I can get the Blob Contents from storage and immediately write it to the response?
To add some code, trying to get something like this to work:
'use strict';
const azure = require('azure-storage'),
stream = require('stream');
const BLOB_CONTAINER = 'DeContainer';
module.exports = function(context){
var file = context.bindingData.file;
var blobService = azure.createBlobService();
var outputStream = new stream.Writable();
blobService.getBlobToStream(BLOB_CONTAINER, file, outputStream, function(error, serverBlob) {
if(error) {
FileNotFound(context);
} else {
context.res = {
status: 200,
headers: {
},
isRaw: true,
body : outputStream
};
context.done();
}
});
}
function FileNotFound(context){
context.res = {
status: 404,
headers: {
"Content-Type" : "application/json"
},
body : { "Message" : "No esta aqui!."}
};
context.done();
}
Unfortunately we don't have streaming support implemented in NodeJS just yet - it's on the backlog: https://github.com/Azure/azure-webjobs-sdk-script/issues/1361
If you're not tied to NodeJ open to using a C# function instead, you can use the storage sdk object directly in your input bindings and stream request output, instead of using the intermediate object approach.
While #Matt Manson's answer is definitely correct based on the way I asked my question, the following code snippet might be more useful for someone who stumbles across this question.
While I can't send the Stream to the response body directly, I can use a custom stream which captures the data into a Uint8Array, and then sends that to the response body.
NOTE: If the file is REALLY big, this will use a lot of memory.
'use strict';
const azure = require('azure-storage'),
stream = require('stream');
const BLOB_CONTAINER = 'deContainer';
module.exports = function(context){
var file = context.bindingData.file;
var blobService = azure.createBlobService();
var outputStream = new stream.Writable();
outputStream.contents = new Uint8Array(0);//Initialize contents.
//Override the write to store the value to our "contents"
outputStream._write = function (chunk, encoding, done) {
var curChunk = new Uint8Array(chunk);
var tmp = new Uint8Array(this.contents.byteLength + curChunk.byteLength);
tmp.set(this.contents, 0);
tmp.set(curChunk, this.contents.byteLength);
this.contents = tmp;
done();
};
blobService.getBlobToStream(BLOB_CONTAINER, file, outputStream, function(error, serverBlob) {
if(error) {
FileNotFound(context);
} else {
context.res = {
status: 200,
headers: {
},
isRaw: true,
body : outputStream.contents
};
context.done();
}
});//*/
}
function FileNotFound(context){
context.res = {
status: 404,
headers: {
"Content-Type" : "application/json"
},
body : { "Message" : "No esta aqui!"}
};
context.done();
}
I tried #Doug's solution from the last comment above, with a few minor mods in my azure function, and so far, after trying 20 different ideas, this is the only one that actually delivered the file to the browser! Thank you, #Doug...
const fs = require("fs");
const stream = require("stream");
...
const AzureBlob = require('#[my_private_artifact]/azure-blob-storage');
const azureStorage = new AzureBlob(params.connectionString);
//Override the write to store the value to our "contents" <-- Doug's solution
var outputStream = new stream.Writable();
outputStream.contents = new Uint8Array(0);//Initialize contents.
outputStream._write = function (chunk, encoding, done) {
var curChunk = new Uint8Array(chunk);
var tmp = new Uint8Array(this.contents.byteLength + curChunk.byteLength);
tmp.set(this.contents, 0);
tmp.set(curChunk, this.contents.byteLength);
this.contents = tmp;
done();
};
let azureSpeedResult = await azureStorage.downloadBlobToStream(params.containerName, params.objectId, outputStream);
let headers = {
"Content-Length": azureSpeedResult.size,
"Content-Type": mimeType
};
if (params.action == "download") {
headers["Content-Disposition"] = "attachment; filename=" + params.fileName;
}
context.res = {
status: 200,
headers: headers,
isRaw: true,
body: outputStream.contents
};
context.done();
...
I need to download or process a file from a soap based web service in node.js.
can someone suggest me on how to handle this in node.js
I tried with 'node-soap' or 'soap' NPM module. it worked for normal soap web service. But, not for binary steam or MTOM based SOAP web service
I want to try to answer this... It's quite interesting that 2 years and 2 months later I can not figure it out how to easily solve the same problem.
I'm trying to get the attachment from a response like:
...
headers: { 'cache-control': 'no-cache="set-cookie"',
'content-type': 'multipart/related;boundary="----=_Part_61_425861994.1525782562904";type="application/xop+xml";start="";start-info="text/xml"',
...
body: '------=_Part_61_425861994.1525782562904\r\nContent-Type:
application/xop+xml; charset=utf-8;
type="text/xml"\r\nContent-Transfer-Encoding: 8bit\r\nContent-ID:
\r\n\r\n....\r\n------=_Part_61_425861994.1525782562904\r\nContent-Type:
application/octet-stream\r\nContent-Transfer-Encoding:
binary\r\nContent-ID:
\r\n\r\n�PNG\r\n\u001a\n\u0000\u0000\u0000\rIHDR\u0000\u0000\u0002,\u0000\u0000\u0005�\b\u0006\u0........binary....
I tried ws.js but no solution.
My solution:
var request = require("request");
var bsplit = require('buffer-split')
// it will extract "----=_Part_61_425861994.1525782562904" from the response
function getBoundaryFromResponse(response) {
var contentType = response.headers['content-type']
if (contentType && contentType.indexOf('boundary=') != -1 ) {
return contentType.split(';')[1].replace('boundary=','').slice(1, -1)
}
return null
}
function splitBufferWithPattern(binaryData, boundary) {
var b = new Buffer(binaryData),
delim = new Buffer(boundary),
result = bsplit(b, delim);
return result
}
var options = {
method: 'POST',
url: 'http://bla.blabal.../file',
gzip: true,
headers: {
SOAPAction: 'downloadFile',
'Content-Type': 'text/xml;charset=UTF-8'
},
body: '<soapenv: ... xml request of the file ... elope>'
};
var data = [];
var buffer = null;
var filename = "test.png"
request(options, function (error, response, body) {
if (error) throw new Error(error);
if (filename && buffer) {
console.log("filename: " + filename)
console.log(buffer.toString('base64'))
// after this, we can save the file from base64 ...
}
})
.on('data', function (chunk) {
data.push(chunk)
})
.on('end', function () {
var onlyPayload = splitBufferWithPattern(Buffer.concat(data), '\r\n\r\n') // this will get from PNG
buffer = onlyPayload[2]
buffer = splitBufferWithPattern(buffer, '\r\n-')[0]
console.log('Downloaded.');
})
I am not sure it will works in most of the cases. It looks like unstable code to my eyes and so I'm looking for something better.
Use ws.js
Here is how to fetch the file attachments:
const ws = require('ws.js')
const { Http, Mtom } = ws
var handlers = [ new Mtom(), new Http()];
var request = '<s:Envelope xmlns:s="http://www.w3.org/2003/05/soap-envelope">' +
'<s:Body>' +
'<EchoFiles xmlns="http://tempuri.org/">' +
'<File1 />' +
'</EchoFiles>' +
'</s:Body>' +
'</s:Envelope>'
var ctx = { request: request
, contentType: "application/soap+xml"
, url: "http://localhost:7171/Service/mtom"
, action: "http://tempuri.org/IService/EchoFiles"
}
ws.send(handlers, ctx, function(ctx) {
//read an attachment from the soap response
var file = ws.getAttachment(ctx, "response", "//*[local-name(.)='File1']")
// work with the file
fs.writeFileSync("result.jpg", file)
})
Two limitations:
No basic auth provided out-of-box, patch required https://github.com/yaronn/ws.js/pull/40
If the file name is an url, you need to apply another patch at mtom.js. Replace:
.
xpath = "//*[#href='cid:" + encodeURIComponent(id) + "']//parent::*"
with:
xpath = "//*[#href='cid:" + id + "']//parent::*"
I am using nodejs for file upload. I am trying something like this below link. But not getting how to post the stuff. I need to post the JSON data along with the file. How can I do?
function myCtrl() {
//an array of files selected
$scope.files = [];
//listen for the file selected event
$scope.$on("fileSelected", function (event, args) {
alert("file selected:")
$scope.$apply(function () {
//add the file object to the scope's files collection
$scope.files.push(args.file);
});
});
//the save method
$scope.save = function(filename) {
$http({
method: 'POST',
url: "http://localhost:3000/uploadfile",
headers: { 'Content-Type': false },
//This method will allow us to change how the data is sent up to the
//server for which we'll need to encapsulate the model data
//in 'FormData'
transformRequest: function (data) {
var formData = new FormData();
formData.append("model", angular.toJson(data.model));
for (var i = 0; i < data.files; i++) {
//add each file to the form data and iteratively name them
formData.append("file" + i, data.files[i]);
}
return formData;
},
//Create an object that contains the model and files which will
//be transformed in the above transformRequest method
data: {files: $scope.files }
}).
success(function (data, status, headers, config) {
alert("success!:"+JSON.stringify(data));
}).
error(function (data, status, headers, config) {
alert("failed!:"+JSON.stringify(data));
});
};
}
Here is my angular directive "file-upload", I used to recognize the selected files to upload.
angular.module('myApp', []).directive('file-upload', function () {
return {
scope: true, //create a new scope
link: function (scope, el, attrs) {
el.bind('change', function (event) {
var files = event.target.files;
//iterate files since 'multiple' may be
//specified on the element
for (var i = 0;i<files.length;i++) {
//emit event upward
scope.$emit("fileSelected", { file: files[i] });
}
});
}
};
});
I am using on ng-click I am calling save(). Not working. What is the problem?
I don't see any ng-click in your code.
The correct way is to bind to "change" event which seems like that's what you are doing.
You can use the lightweight angular-file-upload library which does what you are looking for with a directive.
It also support IE9 with flash polyfill since IE9 doesn't support FormData.
<script src="angular.min.js"></script>
<script src="angular-file-upload.js"></script>
<div ng-controller="MyCtrl">
<input type="file" ng-file-select="onFileSelect($files)" multiple>
</div>
JS:
//inject angular file upload directive.
angular.module('myApp', ['angularFileUpload']);
var MyCtrl = [ '$scope', '$http', function($scope, $http) {
$scope.onFileSelect = function($files) {
//$files: an array of files selected, each file has name, size, and type.
for (var i = 0; i < $files.length; i++) {
var $file = $files[i];
$http.uploadFile({
url: 'my/upload/url',
file: $file
}).then(function(data, status, headers, config) {
// file is uploaded successfully
console.log(data);
});
}
}
}];