I have to export large amount of data (>100000 documents) from lotus to excel.
I use java apache poi and ssjs for this function, however the server crashes after 4-5000 douments.
excel can not be installed on the lotus-domino server.csv format is not an option.
Creating more files with predefined limited number of records is not an option.
What can be the proper method/technology for exporting every field in all documents in a view?
var maxDocs:Integer=6001;
function ExcExp(docIDArray:Array, sVieNam:String, proFie, filTip:String,logEve:String){
var sUzenet='xpExcel ExcExp ';
var bRetVal=false;
var docId:String;
var doc:NotesDocument=null;
var tmpDoc:NotesDocument=null;
var aIteNam:array=new Array();
var aIteLab:array=new Array();
var aIteHin:array=new Array();
var sIteNam:String;
var category:String;
var y=0;
aIteNam=#Explode(proFie.getString('fieNam'),'~');
aIteLab=#Explode(proFie.getString('fieLab'),'~');
aIteHin=#Explode(proFie.getString('fieHin'),'~');
var rowInd=new java.lang.Integer(0);
var rowInd1=new java.lang.Integer(3);
try{
var fileName='c:\\Temp\\'+renFile('_'+filTip+'_','xls');
var fileOut = new java.io.FileOutputStream(fileName);
var wb = new org.apache.poi.hssf.usermodel.HSSFWorkbook();
var sheet=wb.createSheet('CRM'+filTip+'Export');
var createHelper = wb.getCreationHelper();
var drawing = sheet.createDrawingPatriarch();
var anchor = createHelper.createClientAnchor();
var row=sheet.createRow(rowInd);
for (x=0;x<aIteNam.length;x++){
cellInd=new java.lang.Integer(x);
colInd1=new java.lang.Integer(x);
colInd2=new java.lang.Integer(x+5);
var cell=row.createCell(cellInd);
cell.setCellValue(aIteNam[cellInd]);
anchor.setCol1(colInd1);
anchor.setCol2(colInd2);
anchor.setRow1(rowInd);
anchor.setRow2(rowInd1);
var comment = drawing.createCellComment(anchor);
var str = createHelper.createRichTextString(aIteLab[cellInd]+": "+fieldHint.getString(aIteHin[cellInd]));
comment.setString(str);
comment.setAuthor(#Name('[Abbreviate]',#UserName()));
cell.setCellComment(comment);
}
for (x=0;x<aIteNam.length;x++){
cellInd=new java.lang.Integer(x);
sheet.autoSizeColumn(cellInd);
}
if (docIDArray.length>0){
for(y=0;y<docIDArray.length;y++){
docId=docIDArray[y];
doc=database.getDocumentByID(docId);
if (doc!=null){
bRetVal=false;
rowInd=new java.lang.Integer(y+1);
row=sheet.createRow(rowInd);
for (x=0;x<aIteNam.length;x++){
cellInd=new java.lang.Integer(x);
cell=row.createCell(cellInd);
sIteNam=aIteNam[cellInd];
if (doc.hasItem(sIteNam)){
if (doc.getFirstItem(sIteNam).getType()!=1){
cell.setCellValue(doc.getItemValueString(sIteNam));
}else{
cell.setCellValue(doc.getFirstItem(sIteNam).getFormattedText(true, 0,0));
}
}else{
cell.setCellValue('');
}
}
bRetVal=true;
if (bRetVal){
}
}
for (x=0;x<aIteNam.length;x++){
cellInd=new java.lang.Integer(x);
sheet.autoSizeColumn(cellInd);
}
wb.write(fileOut);
}else{
if (viewScope.query && viewScope.query.length>0){
bRetVal=false;
var vView=database.getView(sVieNam);
if (vView.FTSearch(viewScope.query,maxDocs)>0){
doc=vView.getFirstDocument();
y=1;
while (doc!=null && y<maxDocs){
tmpDoc=vView.getNextDocument(doc);
rowInd=new java.lang.Integer(y);
row=sheet.createRow(rowInd);
for (x=0;x<aIteNam.length;x++){
cellInd=new java.lang.Integer(x);
cell=row.createCell(cellInd);
sIteNam=aIteNam[cellInd];
if (doc.hasItem(sIteNam)){
cell.setCellValue(doc.getItemValueString(sIteNam));
}else{
cell.setCellValue('');
}
}
bRetVal=true;
doc.recycle();
doc=tmpDoc;
y=y+1;
}
}
for (x=0;x<aIteNam.length;x++){
cellInd=new java.lang.Integer(x);
sheet.autoSizeColumn(cellInd);
}
wb.write(fileOut);
}else{
bRetVal=false;
var vView=database.getView(sVieNam);
doc=vView.getFirstDocument();
var y=1;
while (doc!=null && y<maxDocs){
tmpDoc=vView.getNextDocument(doc);
rowInd=new java.lang.Integer(y);
row=sheet.createRow(rowInd);
for (x=0;x<aIteNam.length;x++){
cellInd=new java.lang.Integer(x);
cell=row.createCell(cellInd);
sIteNam=aIteNam[cellInd];
if (doc.hasItem(sIteNam)){
cell.setCellValue(doc.getItemValueString(sIteNam));
}else{
cell.setCellValue('');
}
}
bRetVal=true;
doc.recycle();
doc=tmpDoc;
y=y+1;
}
for (x=0;x<aIteNam.length;x++){
cellInd=new java.lang.Integer(x);
sheet.autoSizeColumn(cellInd);
}
wb.write(fileOut);
}
}
fileOut.close();
if (y>0){
doc=database.createDocument();
doc.replaceItemValue('Form','ExcelExport');
doc.replaceItemValue('From',#Name('[Abbreviate]',#UserName()));
doc.replaceItemValue('Subject',logEve+' Export');
doc.replaceItemValue('Records',y);
doc.replaceItemValue('categories',logEve);
var rtitem:NotesRichTextItem = doc.createRichTextItem('Body');
rtitem.embedObject(NotesEmbeddedObject.EMBED_ATTACHMENT, fileName,fileName, null); doc.replaceItemValue('fileSize',doc.getFirstItem('Body').getEmbeddedObjects()[0].getFileSize()/1000);
doc.save();
}
delFile(fileName);
}catch(e){
fileOut.close();
delFile(fileName);
bRetVal=false;
sUzenet+=' hiba: '+e;
msgVScope(sUzenet);
}finally{
return bRetVal;
}
}
As suggested in the comments:
Move all the code to a managed bean. I suggest you pass the profileDocument, the database, the session and an OutputStream to the function that renders the spreadsheet. This way you can use the class from a command line, an agent or pass back the Sheet directly in the browser response (XAgent style), The command line is actually invaluable for debugging.
Watch your variables and recycle everything properly (check for the shred(base ... moreturi) function on OpenNTF.
Unless you actually want to pass back the spreadsheet in the browser.... use an Agent (with the bean approach you can change your mind anytime)
Let us know how it goes
Related
I have a Notification Service Extension and an AppGroup. I save a photo from camera in the PCL project and copy it to the App Group Container (shared folder).
In the Notification Service Extension I try to download the photo from the App Group container and attach it to the notification but it just does not display in the notification.
I also cannot debug the Service Extension to see what is going. As far as I know that is not possible currently still in Xamarin unless someone can correct me on that please.
Here is the code:
1.in my PCL I save the photo to the AppGroup when a save button is pressed:
private void ButtonSavePhoto_Clicked(object sender, EventArgs e)
{
if (!string.IsNullOrEmpty(photoFilePath))
{
Preferences.Set(AppConstants.CUSTOM_PICTURE_FILE_PATH, photoFilePath);
Preferences.Set(AppConstants.CUSTOM_PHOTO_SET_KEY, true);
if (Device.RuntimePlatform == Device.iOS)
{
bool copiedSuccessfully = DependencyService.Get<IPhotoService>().CopiedFileToAppGroupContainer(photoFilePath);
if (copiedSuccessfully)
{
var customPhotoDestPath = DependencyService.Get<IPhotoService>().GetAppContainerCustomPhotoFilePath();
// save the path of the custom photo in the AppGroup container to pass to Notif Extension Service
DependencyService.Get<IGroupUserPrefs>().SetStringValueForKey("imageAbsoluteString", customPhotoDestPath);
// condition whether to use custom photo in push notification
DependencyService.Get<IGroupUserPrefs>().SetBoolValueForKey("isCustomPhotoSet", true);
}
}
buttonSavePhoto.IsEnabled = false;
}
}
2.in my iOS project, Dependency injection calls this method when pressing save button:
public bool CopiedFileToAppGroupContainer(string filePath)
{
bool success = false;
string suiteName = "group.com.company.appName";
var appGroupContainerUrl = NSFileManager.DefaultManager.GetContainerUrl(suiteName);
var appGroupContainerPath = appGroupContainerUrl.Path;
var directoryNameInAppGroupContainer = Path.Combine(appGroupContainerPath, "Pictures");
var filenameDestPath = Path.Combine(directoryNameInAppGroupContainer, AppConstants.CUSTOM_PHOTO_FILENAME);
try
{
Directory.CreateDirectory(directoryNameInAppGroupContainer);
if (File.Exists(filenameDestPath))
{
File.Delete(filenameDestPath);
}
File.Copy(filePath, filenameDestPath);
success = true;
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
return success;
}
Now the path for the photo in the App Group container is:
/private/var/mobile/Containers/Shared/AppGroup/12F209B9-05E9-470C-9C9F-AA959D940302/Pictures/customphoto.jpg
3.Finally in the Notification Service Extension I try to attach the photo to the push notification:
public override void DidReceiveNotificationRequest(UNNotificationRequest request, Action<UNNotificationContent> contentHandler)
{
ContentHandler = contentHandler;
BestAttemptContent = (UNMutableNotificationContent)request.Content.MutableCopy();
string imgPath;
NSUrl imgUrl;
string notificationBody = BestAttemptContent.Body;
string notifBodyInfo = "unknown";
string suiteName = "group.com.company.appname";
NSUserDefaults groupUserDefaults = new NSUserDefaults(suiteName, NSUserDefaultsType.SuiteName);
string pref1_value = groupUserDefaults.StringForKey("user_preference1");
string[] notificationBodySplitAtDelimiterArray = notificationBody.Split(',');
userPrefRegion = notificationBodySplitAtDelimiterArray[0];
bool isCustomAlertSet = groupUserDefaults.BoolForKey("isCustomAlert");
bool isCustomPhotoSet = groupUserDefaults.BoolForKey("isCustomPhotoSet");
string alarmPath = isCustomAlertSet == true ? "customalert.wav" : "default_alert.m4a";
if (isCustomPhotoSet)
{
// this is the App Group url of the custom photo saved in PCL
imgPath = groupUserDefaults.StringForKey("imageAbsoluteString");
}
else
{
imgPath = null;
}
if (imgPath != null )
{
imgUrl = NSUrl.FromString(imgPath);
}
else
{
imgUrl = null;
}
if (!string.IsNullOrEmpty(pref1_value))
{
if (BestAttemptContent.Body.Contains(pref1_value))
{
if (imgUrl != null)
{
// download the image from the AppGroup Container
var task = NSUrlSession.SharedSession.CreateDownloadTask(imgUrl, (tempFile, response, error) =>
{
if (error != null)
{
ContentHandler(BestAttemptContent);
return;
}
if (tempFile == null)
{
ContentHandler(BestAttemptContent);
return;
}
var cache = NSSearchPath.GetDirectories(NSSearchPathDirectory.CachesDirectory, NSSearchPathDomain.User, true);
var cachesFolder = cache[0];
var guid = NSProcessInfo.ProcessInfo.GloballyUniqueString;
var fileName = guid + "customphoto.jpg";
var cacheFile = cachesFolder + fileName;
var attachmentUrl = NSUrl.CreateFileUrl(cacheFile, false, null);
NSError err = null;
NSFileManager.DefaultManager.Copy(tempFile, attachmentUrl, out err);
if (err != null)
{
ContentHandler(BestAttemptContent);
return;
}
UNNotificationAttachmentOptions options = null;
var attachment = UNNotificationAttachment.FromIdentifier("image", attachmentUrl, options, out err);
if (attachment != null)
{
BestAttemptContent.Attachments = new UNNotificationAttachment[] { attachment };
}
});
task.Resume();
}
BestAttemptContent.Title = "My Custom Title";
BestAttemptContent.Subtitle = "My Custom Subtitle";
BestAttemptContent.Body = "Notification Body";
BestAttemptContent.Sound = UNNotificationSound.GetSound(alarmPath);
}
}
else
{
pref1_value = "error getting extracting user pref";
}
// finally display customized notification
ContentHandler(BestAttemptContent);
}
/private/var/mobile/Containers/Shared/AppGroup/12F209B9-05E9-470C-9C9F-AA959D940302/Pictures/customphoto.jpg
From shared code, when image getting from AppGroup .You can check the file path whether work in this project.
imgPath = groupUserDefaults.StringForKey("imageAbsoluteString");
If not getting file from this path. You can get Url from AppGroup directly.Here is a sample as follow:
var FileManager = new NSFileManager();
var appGroupContainer = FileManager.GetContainerUrl("group.com.company.appName");
NSUrl fileURL = appGroupContainer.Append("customphoto.jpg", false);
And if fileURL can not be directly used, also can convert to NSData and save to local file system. This also can be a try.
Here is a sample below that grabs from local file system:
public static void Sendlocalnotification()
{
var localURL = "...";
NSUrl url = NSUrl.FromString(localURL) ;
var attachmentID = "image";
var options = new UNNotificationAttachmentOptions();
NSError error;
var attachment = UNNotificationAttachment.FromIdentifier(attachmentID, url, options,out error);
var content = new UNMutableNotificationContent();
content.Attachments = new UNNotificationAttachment[] { attachment };
content.Title = "Good Morning ~";
content.Subtitle = "Pull this notification ";
content.Body = "reply some message-BY Ann";
content.CategoryIdentifier = "message";
var trigger1 = UNTimeIntervalNotificationTrigger.CreateTrigger(0.1, false);
var requestID = "messageRequest";
var request = UNNotificationRequest.FromIdentifier(requestID, content, trigger1);
UNUserNotificationCenter.Current.AddNotificationRequest(request, (err) =>
{
if (err != null)
{
Console.Write("Notification Error");
}
});
}
My goal is to stream a large response to the browser. I need to minimise the memory use of the XPage so that multiple hits don't bring the server down.
The XPages are set up "XAgent" style with rendering=false.
Has anyone got this working?
Attempt 1
According to various HttpServletResponse posts it should be as simple as flushing the writer as you proceed but that wasn't the case. The code below includes a commented section called Experiment1 that I used to deliberately break the response. My logic being that the resetBuffer call should not impact the content I've already "flushed" - but it did. The broken response only included the error message and none of the chunks.There's a good chance that my assumption is wrong of course.The code ran in the afterRenderResponse event.
importPackage(java.util);
importPackage(java.io);
importPackage(java.lang);
var ec=facesContext.getExternalContext();
var response=ec.getResponse();
var request=ec.getRequest();
var writer=facesContext.getResponseWriter();
try{
response.setHeader("Content-Type","text/plain; charset=utf-8");
for(var i=0;i<100;i++){
var s="CHUNKCHUNKCHUNK"+i;
writer.write(s);
writer.flush();
response.flushBuffer();
//EXPERIMENT1
//if(i==50){
//throw new Error("FAIL ON PURPOSE")
//}
}
}catch(e){
//EXPERIMENT1
//response.resetBuffer()
var errorObj={};
response.setStatus(500);
var errorMessage = e.message;
if( typeof e.printStackTrace !== 'undefined' ){
var stringWriter = new java.io.StringWriter();
e.printStackTrace( new java.io.PrintWriter( stringWriter ) );
errorMessage = stringWriter.toString();
}
if( typeof e === 'com.ibm.jscript.InterpretException' ){
errorMessage = e.getNode().getTraceString() + '\n\n' + errorMessage;
}
errorObj.status="error";
errorObj.message=errorMessage;
writer.write(toJson(errorObj));
}finally{
if(creport!=null){
if(debug){
creport.close(true);
}else{
creport.closeOnlyIfIssue(true);
}
}
facesContext.responseComplete();
}
Attempt 2
My second attempt was pure desperation. It uses the beforeRenderResponse event and writes the chunks according to the HTML1.1 spec. The code below shows the "Transfer-Encoding" header commented out because it results in a zero byte payload. The headers make it though..
importPackage(java.util);
importPackage(java.io);
importPackage(java.lang)
var ec=facesContext.getExternalContext();
var response=ec.getResponse();
var request=ec.getRequest();
var writer=response.getOutputStream();
try{
response.setHeader("Content-Type","text/plain; charset=utf-8");
//response.setHeader("Transfer-Encoding","chunked")
response.setBufferSize(1024);
for(var i=0;i<100;i++){
var s="CHUNKCHUNKCHUNK"+i;
writer.write((Integer.toHexString(s.length())+"\r\n").getBytes("utf-8"));
writer.write((s+"\r\n").getBytes("utf-8"));
writer.flush();
response.flushBuffer();
}
writer.write("0\r\n\r\n".getBytes("utf-8"))
}catch(e){
var errorObj={};
//response.resetBuffer();
response.setStatus(500);
var errorMessage = e.message;
if( typeof e.printStackTrace !== 'undefined' ){
var stringWriter = new java.io.StringWriter();
e.printStackTrace( new java.io.PrintWriter( stringWriter ) );
errorMessage = stringWriter.toString();
}
if( typeof e === 'com.ibm.jscript.InterpretException' ){
errorMessage = e.getNode().getTraceString() + '\n\n' + errorMessage;
}
errorObj.status="error";
errorObj.message=errorMessage;
writer.write(toJson(errorObj).getBytes("utf-8"));
}finally{
facesContext.responseComplete();
}
When accessing the response with
var response=ec.getResponse()
you are getting an instance of com.ibm.xsp.webapp.XspHttpServletResponse. It uses internally a buffer/block-mechanism for a better performance, that's why your first experiment does not work as expected.
But if you access the underlying LCDAdapterHttpServletResponse directly, it is possible to get the full control for the outputstream:
var response=ec.getResponse().getDelegate();
importPackage(java.util);
importPackage(java.io);
importPackage(java.lang);
importPackage(imtk);
var ec=facesContext.getExternalContext();
var response=ec.getResponse().getDelegate();
var request=ec.getRequest();
var writer=response.getWriter();
try{
response.setHeader("Content-Type","application/json; charset=utf-8");
response.setHeader("Transfer-Encoding","chunked");
response.flushBuffer();
for(var i=0;i<100;i++){
var s="CHUNK"+i;
writer.println(Integer.toHexString(s.length()));
writer.println(s);
writer.flush();
}
writer.print("0\r\n");
writer.print("\r\n");
writer.flush();
}catch(e){
var errorObj={};
response.setStatus(500);
var errorMessage = e.message;
if( typeof e.printStackTrace !== 'undefined' ){
var stringWriter = new java.io.StringWriter();
e.printStackTrace( new java.io.PrintWriter( stringWriter ) );
errorMessage = stringWriter.toString();
}
if( typeof e === 'com.ibm.jscript.InterpretException' ){
errorMessage = e.getNode().getTraceString() + '\n\n' + errorMessage;
}
errorObj.status="error";
errorObj.message=errorMessage;
var json=toJson(errorObj);
writer.println(Integer.toHexString(json.length()))
writer.println(json);
writer.flush();
writer.print("0\r\n");
writer.print("\r\n");
writer.flush();
}finally{
if(creport!=null){
if(debug){
creport.close(true);
}else{
creport.closeOnlyIfIssue(true);
}
}
facesContext.responseComplete();
}
I am using Rotativa to generate PDF in my "MVC" application. How can I save Rotativa PDF? I need to save the document on a server after all the process is completed.
Code below:
public ActionResult PRVRequestPdf(string refnum,string emid)
{
var prv = functions.getprvrequest(refnum, emid);
return View(prv);
}
public ActionResult PDFPRVRequest()
{
var prv = Session["PRV"] as PRVRequestModel;
byte[] pdfByteArray = Rotativa.WkhtmltopdfDriver.ConvertHtml("Rotativa", "Approver", "PRVRequestPdf");
return new Rotativa.ViewAsPdf("PRVRequestPdf", new { refnum = prv.rheader.request.Referenceno });
}
You can give this a try
var actionResult = new ActionAsPdf("PRVRequestPdf", new { refnum = prv.rheader.request.Referenceno, emid = "Whatever this is" });
var byteArray = actionResult.BuildPdf(ControllerContext);
var fileStream = new FileStream(fullPath, FileMode.Create, FileAccess.Write);
fileStream.Write(byteArray, 0, byteArray.Length);
fileStream.Close();
If that doesn't do the trick then, you can follow the answers here
Just make sure if you do it this way not to have PRVRequestPdf return as a PDF View, rather a normal View like you have above (only mention as managed to fall foul of that myself causing lots of fun).
Another useful answer:
I found the solution here
var actionPDF = new Rotativa.ActionAsPdf("YOUR_ACTION_Method", new { id = ID, lang = strLang } //some route values)
{
//FileName = "TestView.pdf",
PageSize = Size.A4,
PageOrientation = Rotativa.Options.Orientation.Landscape,
PageMargins = { Left = 1, Right = 1 }
};
byte[] applicationPDFData = actionPDF.BuildPdf(ControllerContext);
This is the original thread
You can achieve this with ViewAsPdf.
[HttpGet]
public ActionResult SaveAsPdf(string refnum, string emid)
{
try
{
var prv = functions.getprvrequest(refnum, emid);
ViewAsPdf pdf = new Rotativa.ViewAsPdf("PRVRequestPdf", prv)
{
FileName = "Test.pdf",
CustomSwitches = "--page-offset 0 --footer-center [page] --footer-font-size 8"
};
byte[] pdfData = pdf.BuildFile(ControllerContext);
string fullPath = #"\\server\network\path\pdfs\" + pdf.FileName;
using (var fileStream = new FileStream(fullPath, FileMode.Create, FileAccess.Write))
{
fileStream.Write(pdfData, 0, pdfData.Length);
}
return Json(new { isSuccessful = true }, JsonRequestBehavior.AllowGet);
}
catch (Exception ex)
{
//TODO: ADD LOGGING
return Json(new { isSuccessful = false, error = "Uh oh!" }, JsonRequestBehavior.AllowGet);
//throw;
}
}
You can simply try this:
var fileName = string.Format("my_file_{0}.pdf", id);
var path = Server.MapPath("~/App_Data/" + fileName);
System.IO.File.WriteAllBytes(path, pdfByteArray );
I use this function to watch an array of objects for changes:
$scope.$watch('Data', function (newVal) { /*...*/ }, true);
How can I get an object in which property has been changed so that I can push it in an array?
For example:
var myApp = angular.module("myApp", []);
myApp.factory("Data", function(){
var Data = [{id:1, property: "Random"}, {id:2, property: "Random again"}];
return Data;
});
var myBigArray = [];
function tableCtrl($scope, Data){
$scope.TheData = Data;
$scope.$watch("TheData", function() {
//Here an object should be pushed
myBigArray.push(">>Object in which property has been changed <<<");
}, true);
}
I don't see a way currently in Angular to get the changed object... I suspect you might need to traverse the new array and try to find the differences with the old array...
Edit: Note that this solution turns out to be a bad practice as it is adding a lot of watchers, which is something you do not want because it has a performance penalty.
=======
I eventually came up with this solution:
items.query(function (result) {
_(result).each(function (item, i) {
$scope.items.push(item);
$scope.$watch('items[' + i + ']' , function(){
console.log(item); // This is the item that changed.
}, true);
});
});
There is still no option like this for $watch, but you can use jQuery plugin for that, http://archive.plugins.jquery.com/project/jquery-diff
I implemented undo/redo with AngularJS using $watch, mb this can help
//History Manager Factory
.factory('HistoryManager', function () {
return function(scope) {
this.container = Array();
this.index = -1;
this.lock = false;
//Insert new step into array of steps
this.pushDo = function() {
//we make sure that we have real changes by converting to json,
//and getting rid of all hash changes
if(this.container.length == 0 || (angular.toJson(scope.widgetSlider) != angular.toJson(this.container[this.index][0]))) {
//check if current change didn't came from "undo" change'
if(this.lock) {
return;
}
//Cutting array, from current index, because of new change added
if(this.index < this.container.length-1) {
this.container = this.container.slice(0, this.index+1);
}
var currentStepSlider = angular.copy(scope.widgetSlider);
var selectedWidgetIndex = scope.widgetSlider.widgets.indexOf(scope.widgetCurrent);
//Initialising index, because of new "Do" added
this.index = this.container.length;
this.container.push([currentStepSlider, selectedWidgetIndex]);
if (this.onDo) {
this.onDo();
}
}
}
//Upon undo returns previous do
this.undo = function() {
this.lock = true;
if(this.index>0){
this.index--;
scope.widgetSlider = angular.copy(this.container[this.index][0]);
var selectedWidgetIndex = this.container[this.index][1];
scope.widgetCurrent = scope.widgetSlider.widgets[selectedWidgetIndex];
}
this.lock = false;
}
//Upon redo returns next do
this.redo = function() {
if(this.index < this.container.length-1) {
this.index++;
scope.widgetSlider = angular.copy(this.container[this.index][0]);
var selectedWidgetIndex = this.container[this.index][1];
scope.widgetCurrent = scope.widgetSlider.widgets[selectedWidgetIndex];
}
}
}
})
;
Being very new to SharePoint coding I have been assigned the task to create a prototype code to upload a file and setting the field values for that file that will show up when opening the sharepoint page with the file.
This has to be done from a remote machine and not the Sharepoint server itself so using the .Net objects for Sharepoint is out the question.
I quickly found out how to upload a file through the Sharepoint Web Service Copy.asmx:
void UploadTestFile() {
var file = #"C:\Temp\TestFile.doc";
string destinationUrl = "http://mysharepointserver/Documents/"
+ Path.GetFileName(file);
string[] destinationUrls = { destinationUrl };
var CopyWS = new Copy.Copy();
CopyWS.UseDefaultCredentials = true;
CopyWS.Url = "http://mysharepointserver/_vti_bin/copy.asmx";
CopyResult[] result;
byte[] data = File.ReadAllBytes(file);
FieldInformation mf1 = new FieldInformation {
DisplayName = "title",
InternalName = "title",
Type = FieldType.Text,
Value = "Dummy text"
};
FieldInformation mf2 = new FieldInformation {
DisplayName = "MyTermSet",
InternalName = "MyTermSet",
Type = FieldType.Note,
Value = "Test; Unit;"
};
CopyWS.CopyIntoItems(
"+",
destinationUrls,
new FieldInformation[] { mf1, mf2 },
data,
out result);
}
This code easily uploads any file to the target site but only fills the "title" field with info. The field MyTermSet in which I have added 3 terms allready - Test, Unit and Page - will not update with the values "Test;" and "Unit;".
Being very new to Sharepoint and me not grasping all the basics googling has told me that updating "File", "Computed" or "Lookup" fields does not work with the CopyIntoItems method, and MyTermSet being a Taxonomy field is - if I am correct - a Lookup field.
So how do I get MyTermSet updated with the values "Test;" and "Unit;" ?
I would really prefer If someone has a sample code on this. I have followed several hint-links but I am none the wiser. I have found no sample-code on this at all.
Have anyone made one single method that wraps it all? Or another method that takes in the destinationUrl from the file upload and updates the Term Set/Taxonomy field.
Puzzling together what I have found so far, I am now able to do as I wanted. But I would really like to be able to get the Taxonomy field GUIDs dynamically and NOT having to explicitly set them myself:
void UploadTestFile(string FileName, string DocLib, Dictionary<string, string> Fields = null) {
//Upload the file to the target Sharepoint doc lib
string destinationUrl = DocLib + Path.GetFileName(FileName);
string[] destinationUrls = { destinationUrl };
var CopyWS = new Copy.Copy();
CopyWS.UseDefaultCredentials = true;
CopyWS.Url = new Uri(new Uri(DocLib), "/_vti_bin/copy.asmx").ToString();
CopyResult[] result;
var data = File.ReadAllBytes(FileName);
CopyWS.CopyIntoItems(
"+",
destinationUrls,
new FieldInformation[0],
data,
out result);
if (Fields == null) return; //Done uploading
//Get the ID and metadata information of the fields
var list = new ListsWS.Lists();
list.UseDefaultCredentials = true;
var localpath = new Uri(DocLib).LocalPath.TrimEnd('/');
var site = localpath.Substring(0, localpath.LastIndexOf("/")); //Get the site of the URL
list.Url = new Uri(new Uri(DocLib), site + "/_vti_bin/lists.asmx").ToString(); //Lists on the right site
FieldInformation[] fiOut;
byte[] filedata;
var get = CopyWS.GetItem(destinationUrl, out fiOut, out filedata);
if (data.Length != filedata.Length) throw new Exception("Failed on uploading the document.");
//Dictionary on name and display name
var fieldInfos = fiOut.ToDictionary(x => x.InternalName, x => x);
var fieldInfosByName = new Dictionary<string, FieldInformation>();
foreach (var item in fiOut) {
if (!fieldInfosByName.ContainsKey(item.DisplayName)) {
fieldInfosByName.Add(item.DisplayName, item);
}
}
//Update the document with fielddata - this one can be extended for more than Text and Note fields.
if (!fieldInfos.ContainsKey("ID")) throw new Exception("Could not get the ID of the upload.");
var ID = fieldInfos["ID"].Value; //The ID of the document we just uploaded
XDocument doc = new XDocument(); //Creating XML with updates we need
doc.Add(XElement.Parse("<Batch OnError='Continue' ListVersion='1' ViewName=''/>"));
doc.Element("Batch").Add(XElement.Parse("<Method ID='1' Cmd='Update'/>"));
var methNode = doc.Element("Batch").Element("Method");
//Add ID
var fNode = new XElement("Field");
fNode.SetAttributeValue("Name", "ID");
fNode.Value = ID;
methNode.Add(fNode);
//Loop each field and add each Field
foreach (var field in Fields) {
//Get the field object from name or display name
FieldInformation fi = null;
if (fieldInfos.ContainsKey(field.Key)) {
fi = fieldInfos[field.Key];
}
else if (fieldInfosByName.ContainsKey(field.Key)) {
fi = fieldInfosByName[field.Key];
}
if (fi != null) {
//Fix for taxonomy fields - find the correct field to update
if (fi.Type == FieldType.Invalid && fieldInfos.ContainsKey(field.Key + "TaxHTField0")) {
fi = fieldInfos[field.Key + "TaxHTField0"];
}
else if (fi.Type == FieldType.Invalid && fieldInfosByName.ContainsKey(field.Key + "_0")) {
fi = fieldInfosByName[field.Key + "_0"];
}
fNode = new XElement("Field");
fNode.SetAttributeValue("Name", fi.InternalName);
switch (fi.Type) {
case FieldType.Lookup:
fNode.Value = "-1;#" + field.Value;
break;
case FieldType.Choice:
case FieldType.Text:
fNode.Value = field.Value;
break;
case FieldType.Note: //TermSet's
var termsetval = "";
var terms = field.Value.Split(';');
foreach (var term in terms) {
termsetval += "-1;#" + term + ";";
}
fNode.Value = termsetval.TrimEnd(';');
break;
default:
//..Unhandled type. Implement if needed.
break;
}
methNode.Add(fNode); //Adds the field to the XML
}
else {
//Field does not exist. No use in uploading.
}
}
//Gets the listname (not sure if it is the full path or just the folder name)
var listname = new Uri(DocLib).LocalPath;
var listcol = list.GetListCollection(); //Get the lists of the site
listname = (from XmlNode x
in listcol.ChildNodes
where x.Attributes["DefaultViewUrl"].InnerText.StartsWith(listname, StringComparison.InvariantCultureIgnoreCase)
select x.Attributes["ID"].InnerText).DefaultIfEmpty(listname).First();
//Convert the XML to XmlNode and upload the data
var xmldoc = new XmlDocument();
xmldoc.LoadXml(doc.ToString());
list.UpdateListItems(listname, xmldoc.DocumentElement);
}
Then I call it like this:
var fields = new Dictionary<string, string>();
fields.Add("Test", "Dummy Text");
fields.Add("MrTermSet", "Page|a4ba29c1-3ed5-47e9-b43f-36bc59c0ea5c;Unit|4237dfbe-22a2-4d90-bd08-09f4a8dd0ada");
UploadTestFile(#"C:\Temp\TestFile2.doc", #"http://mysharepointserver/Documents/", fields);
I would however prefer to call it like this:
var fields = new Dictionary<string, string>();
fields.Add("Test", "Dummy Text");
fields.Add("MrTermSet", "Page;Unit");
UploadTestFile(#"C:\Temp\TestFile2.doc", #"http://mysharepointserver/Documents/", fields);