Defining log analytic data sources via C# - azure

How can I add the Windows Performance Counters shown in the screenshot below via a C# application?

I found out this can be done via the OperationalInsightsManagementClient. The code below will add a metric.
void AddWorkspaceDatasources(string resourceGroupName, string objectName, string counterName)
{
var client = new OperationalInsightsManagementClient(GetCredentials()) {SubscriptionId = subscriptionId};
var existing = client.DataSources.ListByWorkspace(
new ODataQuery<DataSourceFilter> {Filter = "kind eq 'WindowsPerformanceCounter'"},
resourceGroupName,
resourceGroupName);
if (!existing.Any(c => (c.Properties as JObject)["objectName"].ToString() == objectName && (c.Properties as JObject)["counterName"].ToString() == counterName))
{
var properties = new JObject();
properties["counterName"] = counterName;
properties["instanceName"] = "*";
properties["intervalSeconds"] = 10;
properties["objectName"] = objectName;
properties["collectorType"] = "Default";
client.DataSources.CreateOrUpdate(
resourceGroupName,
resourceGroupName,
Regex.Replace(objectName, "[^a-zA-Z0-9]", "") + Regex.Replace(counterName, "[^a-zA-Z0-9]", ""),
new DataSource
{
Kind = "WindowsPerformanceCounter",
Properties = properties
});
}
}

Related

How do I get disk space statistics for a clustered disk

I have a working program that retrieves information of disk such as FreeSpace, TotalSpace etc from a remote server. I have a problem however that i cannot get the same statistics for all Clustered Disks configured on the server. The query only returns information for the Local Disk (Logical Disk).
I am able to get sizes for Local Disk(:C) as below :
public List<Disk> GetEnvironmentStatistics()
{
var serverIP = Convert.ToString(System.Web.HttpContext.Current.Session["ServerIP"]);
List<Disk> diskinfo = new List<Disk>();
//Add System.Management to access these utilities
ConnectionOptions options = new ConnectionOptions
{
Username = Convert.ToString(System.Web.HttpContext.Current.Session["Username"]),
Password = Convert.ToString(System.Web.HttpContext.Current.Session["Password"]),
Authority = Convert.ToString(System.Web.HttpContext.Current.Session["Authority"]),
};
//root - root of the tree, cimv2 - version
ManagementScope scope = new ManagementScope("\\\\" + serverIP + "\\root\\CIMV2", options);
scope.Connect();
SelectQuery query = new SelectQuery("Select * from Win32_LogicalDisk");
ManagementObjectSearcher searcher = new ManagementObjectSearcher(scope, query);
ManagementObjectCollection queryCollection = searcher.Get();
foreach (ManagementObject mo in queryCollection)
{
Disk disk = new Disk();
disk.DiskName = mo["Name"].ToString();
disk.DeviceId = mo["DeviceID"].ToString();
disk.SystemName = mo["SystemName"].ToString();
disk.FreeSpace = Convert.ToDecimal(mo["FreeSpace"]);
var formattedFreeSpace = Helpers.DiskSpaceInGigabytes(disk.FreeSpace ?? 0);
disk.FreeSpace = Decimal.Truncate(formattedFreeSpace);
disk.TotalSpace = Convert.ToDecimal(mo["Size"]);
var formattedTotalSpace = Helpers.DiskSpaceInGigabytes(disk.TotalSpace ?? 0);
disk.TotalSpace = Decimal.Truncate(formattedTotalSpace);
disk.UsedSpace = disk.TotalSpace - disk.FreeSpace;
var HDPercentageUsed = 100 - (100 * disk.FreeSpace / disk.TotalSpace);
disk.PercentageUsed = Convert.ToInt32(HDPercentageUsed);
diskinfo.Add(disk);
}
return diskinfo;
}
I have logged into the server and noted the other disks appear as Clustered Disk.
I have researched a bit about the Clustered Disks or Cluster Shared Volumes but the only guides i see include Powershell Scripting like here : ClusteredSharedVolume Disk Space.
My question - how can i write a WMI query that also retrieves information for Clustered Disks. Precisely how i can adapt the query in the provided link for my needs :
$resources = Get-WmiObject -namespace root\MSCluster MSCluster_Resource -filter "Type='Physical
Disk'"
$resources | foreach {
$res = $_
$disks = $res.GetRelated("MSCluster_Disk")
$disks | foreach {
$_.GetRelated("MSCluster_DiskPartition") |
select #{N="Name"; E={$res.Name}}, #{N="Status"; E={$res.State}}, Path, VolumeLabel, TotalSize, FreeSpace
}
} | ft
The solution requires invoking Powershell script in C# :
1. Add System.Management.Automation Reference
This dll enable access to Powershell utilities. The packages available on Nuget seem ahead and are not being recognized so i had to manually add the Reference by References >> Add Reference >> Browse >> C:\Windows\assembly\GAC_MSIL\System.Management.Automation\1.0.0.0__31bf3856ad364e35
Using Powershell i can get disk information for all Clustered Disks by the command :
get-WmiObject win32_logicaldisk -Computername (ComputerName or IPAddress here)
2. Invoke this command in code
I added an extension method that checks for whether the remote host has Clustered Disks (if any) . If it indeed has , then use the Powershell script , if not then use normal WMI query :
public List<Disk> GetEnvironmentStatistics()
{
List<Disk> diskinfo = new List<Disk>();
var serverIP = Convert.ToString(System.Web.HttpContext.Current.Session["ServerIP"]);
var clusterDisksStatus = CheckForClusteredDisks(serverIP);
if (Helpers.HasClusteredDisks(clusterDisksStatus))
{
string getClusterSharedVolumesStatistics = "get-WmiObject win32_logicaldisk -Computername " + serverIP + "";
PowerShell ps = PowerShell.Create();
ps.AddScript(getClusterSharedVolumesStatistics);
var results = ps.Invoke();
foreach (var psobject in results)
{
if (psobject != null)
{
Disk clusteredDisk = new Disk();
clusteredDisk.DiskName = Convert.ToString(psobject.Members["DeviceID"].Value);
clusteredDisk.FreeSpace = Convert.ToDecimal(psobject.Members["FreeSpace"].Value);
var formattedFreeSpace = Helpers.DiskSpaceInGigabytes(clusteredDisk.FreeSpace ?? 0);
clusteredDisk.FreeSpace = Decimal.Truncate(formattedFreeSpace);
clusteredDisk.TotalSpace = Convert.ToDecimal(psobject.Members["Size"].Value);
var formattedTotalSpace = Helpers.DiskSpaceInGigabytes(clusteredDisk.TotalSpace ?? 0);
clusteredDisk.TotalSpace = Decimal.Truncate(formattedTotalSpace);
clusteredDisk.UsedSpace = clusteredDisk.TotalSpace - clusteredDisk.FreeSpace;
clusteredDisk.VolumeName = Convert.ToString(psobject.Members["VolumeName"].Value);
diskinfo.Add(clusteredDisk);
}
}
}
else
{
//Add System.Management to access these utilities
ConnectionOptions options = new ConnectionOptions
{
Username = Convert.ToString(System.Web.HttpContext.Current.Session["Username"]),
Password = Convert.ToString(System.Web.HttpContext.Current.Session["Password"]),
Authority = Convert.ToString(System.Web.HttpContext.Current.Session["Authority"]),
};
//root - root of the tree, cimv2 - version
ManagementScope scope = new ManagementScope("\\\\" + serverIP + "\\root\\CIMV2", options);
scope.Connect();
SelectQuery query = new SelectQuery("Select * from Win32_LogicalDisk");
ManagementObjectSearcher searcher = new ManagementObjectSearcher(scope, query);
ManagementObjectCollection queryCollection = searcher.Get();
try
{
foreach (ManagementObject mo in queryCollection)
{
Disk disk = new Disk();
disk.DiskName = mo["Name"].ToString();
disk.DeviceId = mo["DeviceID"].ToString();
disk.SystemName = mo["SystemName"].ToString();
disk.FreeSpace = Convert.ToDecimal(mo["FreeSpace"]);
var formattedFreeSpace = Helpers.DiskSpaceInGigabytes(disk.FreeSpace ?? 0);
disk.FreeSpace = Decimal.Truncate(formattedFreeSpace);
disk.TotalSpace = Convert.ToDecimal(mo["Size"]);
var formattedTotalSpace = Helpers.DiskSpaceInGigabytes(disk.TotalSpace ?? 0);
disk.TotalSpace = Decimal.Truncate(formattedTotalSpace);
disk.UsedSpace = disk.TotalSpace - disk.FreeSpace;
var HDPercentageUsed = 100 - (100 * disk.FreeSpace / disk.TotalSpace);
disk.PercentageUsed = Convert.ToInt32(HDPercentageUsed);
diskinfo.Add(disk);
}
}
catch (DivideByZeroException ex)
{
ExceptionLogger.SendErrorToText(ex);
}
}
return diskinfo;
}
And the Helper method to check :
public static bool HasClusteredDisks(int status)
{
int hasClusteredDisks = 1;
if (status == hasClusteredDisks)
{
return true;
}
else
{
return false;
}
}

Create Folder and Update Title and Custom Field in Sharepoint Online

I try to create folder in sharepoint online, based on some of tutorial. the problem comes since creating folder is not given "Title" column value.
I want to create folder and also update column "Title".
here is the code for create folder
public string CreateDocumentLibrary(string siteUrl, string relativePath)
{
//bool responseResult = false;
string resultUpdate = string.Empty;
string responseResult = string.Empty;
if (siteUrl != _siteUrl)
{
_siteUrl = siteUrl;
Uri spSite = new Uri(siteUrl);
_spo = SpoAuthUtility.Create(spSite, _username, WebUtility.HtmlEncode(_password), false);
}
string odataQuery = "_api/web/folders";
byte[] content = ASCIIEncoding.ASCII.GetBytes(#"{ '__metadata': { 'type': 'SP.Folder' }, 'ServerRelativeUrl': '" + relativePath + "'}");
string digest = _spo.GetRequestDigest();
Uri url = new Uri(String.Format("{0}/{1}", _spo.SiteUrl, odataQuery));
// Set X-RequestDigest
var webRequest = (HttpWebRequest)HttpWebRequest.Create(url);
webRequest.Headers.Add("X-RequestDigest", digest);
// Send a json odata request to SPO rest services to fetch all list items for the list.
byte[] result = HttpHelper.SendODataJsonRequest(
url,
"POST", // reading data from SP through the rest api usually uses the GET verb
content,
webRequest,
_spo // pass in the helper object that allows us to make authenticated calls to SPO rest services
);
string response = Encoding.UTF8.GetString(result, 0, result.Length);
if (response != null)
{
//responseResult = true;
responseResult = response;
}
return responseResult;
}
I already tried to use CAML, but, the problem is, the list of sharepoint is big, so got the error prohibited access related to limit tresshold.
Please help.
Refer below code to update folder name.
function renameFolder(webUrl,listTitle,itemId,name)
{
var itemUrl = webUrl + "/_api/Web/Lists/GetByTitle('" + listTitle + "')/Items(" + itemId + ")";
var itemPayload = {};
itemPayload['__metadata'] = {'type': getItemTypeForListName(listTitle)};
itemPayload['Title'] = name;
itemPayload['FileLeafRef'] = name;
var additionalHeaders = {};
additionalHeaders["X-HTTP-Method"] = "MERGE";
additionalHeaders["If-Match"] = "*";
return executeJson(itemUrl,"POST",additionalHeaders,itemPayload);
}
function getItemTypeForListName(name) {
return"SP.Data." + name.charAt(0).toUpperCase() + name.slice(1) + "ListItem";
}

how do you add diagnostics to an azure cloud service using just the Microsoft.WindowsAzure.Management libraries?

I don't want PowerShell involved at all. I can create the cloud service just fine and I have my diagnostics config file as part of the root of the worker role. How do you turn on the extension though ?
found out myself.
var etcs = cloudClient.HostedServices.ListAvailableExtensions();
var et = etcs.FirstOrDefault(p => p.Type == "PaaSDiagnostics");
cloudClient.HostedServices.AddExtension("agent1", new Microsoft.WindowsAzure.Management.Compute.Models.HostedServiceAddExtensionParameters()
{
Type = et.Type,
ProviderNamespace = et.ProviderNameSpace,
Id = "testext",
Version = et.Version,
PublicConfiguration = File.ReadAllText(#"PubConfig.xml"),
PrivateConfiguration = "<?xml version=\"1.0\" encoding=\"utf-8\"?><PrivateConfig xmlns=\"http://schemas.microsoft.com/ServiceHosting/2010/10/DiagnosticsConfiguration\"><StorageAccount name=\"store\" key=\"" + ks.SecondaryKey + "\"></StorageAccount></PrivateConfig>"
});
var id = cloudClient.Deployments.Create("agent1", Microsoft.WindowsAzure.Management.Compute.Models.DeploymentSlot.Production, new Microsoft.WindowsAzure.Management.Compute.Models.DeploymentCreateParameters()
{
Name = "test",
Configuration = File.ReadAllText(#"ServiceConfiguration.Cloud.cscfg"),
PackageUri = blob.Uri,
Label = "Test",
StartDeployment = true,
ExtensionConfiguration = new Microsoft.WindowsAzure.Management.Compute.Models.ExtensionConfiguration()
{
AllRoles = new[] { new Microsoft.WindowsAzure.Management.Compute.Models.ExtensionConfiguration.Extension ()
{
Id = "testext",
State = "Enable"
}}
}
});

NetSuite SuiteTalk API - Get Inventory Details

I'm using the SuiteTalk (API) service for NetSuite to retrieve a list of Assemblies. I need to load the InventoryDetails fields on the results to view the serial/lot numbers assigned to the items. This is the current code that I'm using, but the results still show those fields to come back as NULL, although I can see the other fields for the AssemblyBuild object. How do I get the inventory details (serials/lot#'s) to return on a transaction search?
public static List<AssemblyBuildResult> Get()
{
var listAssemblyBuilds = new List<AssemblyBuildResult>();
var service = Service.Context();
var ts = new TransactionSearch();
var tsb = new TransactionSearchBasic();
var sfType = new SearchEnumMultiSelectField
{
#operator = SearchEnumMultiSelectFieldOperator.anyOf,
operatorSpecified = true,
searchValue = new string[] { "_assemblyBuild" }
};
tsb.type = sfType;
ts.basic = tsb;
ts.inventoryDetailJoin = new InventoryDetailSearchBasic();
// perform the search
var response = service.search(ts);
response.pageSizeSpecified = true;
// Process response
if (response.status.isSuccess)
{
// Process the records returned in the response
// Get more records with pagination
if (response.totalRecords > 0)
{
for (var x = 1; x <= response.totalPages; x++)
{
var records = response.recordList;
foreach (var t in records)
{
var ab = (AssemblyBuild) t;
listAssemblyBuilds.Add(GetAssemblyBuildsResult(ab));
}
if (response.pageIndex < response.totalPages)
{
response = service.searchMoreWithId(response.searchId, x + 1);
}
}
}
}
// Parse and return NetSuite WorkOrder into assembly WorkOrderResult list
return listAssemblyBuilds;
}
After much pain and suffering, I was able to solve this problem with the following code:
/// <summary>
/// Returns List of AssemblyBuilds from NetSuite
/// </summary>
/// <returns></returns>
public static List<AssemblyBuildResult> Get(string id = "", bool getDetails = false)
{
// Object to populate and return results
var listAssemblyBuilds = new List<AssemblyBuildResult>();
// Initiate Service and SavedSearch (TransactionSearchAdvanced)
var service = Service.Context();
var tsa = new TransactionSearchAdvanced
{
savedSearchScriptId = "customsearch_web_assemblysearchmainlist"
};
// Filter by ID if specified
if (id != "")
{
tsa.criteria = new TransactionSearch()
{
basic = new TransactionSearchBasic()
{
internalId = new SearchMultiSelectField
{
#operator = SearchMultiSelectFieldOperator.anyOf,
operatorSpecified = true,
searchValue = new[] {
new RecordRef() {
type = RecordType.assemblyBuild,
typeSpecified = true,
internalId = id
}
}
}
}
};
}
// Construct custom columns to return
var tsr = new TransactionSearchRow();
var tsrb = new TransactionSearchRowBasic();
var orderIdCols = new SearchColumnSelectField[1];
var orderIdCol = new SearchColumnSelectField();
orderIdCols[0] = orderIdCol;
tsrb.internalId = orderIdCols;
var tranDateCols = new SearchColumnDateField[1];
var tranDateCol = new SearchColumnDateField();
tranDateCols[0] = tranDateCol;
tsrb.tranDate = tranDateCols;
var serialNumberCols = new SearchColumnStringField[1];
var serialNumberCol = new SearchColumnStringField();
serialNumberCols[0] = serialNumberCol;
tsrb.serialNumbers = serialNumberCols;
// Perform the Search
tsr.basic = tsrb;
tsa.columns = tsr;
var response = service.search(tsa);
// Process response
if (response.status.isSuccess)
{
var searchRows = response.searchRowList;
if (searchRows != null && searchRows.Length >= 1)
{
foreach (SearchRow t in searchRows)
{
var transactionRow = (TransactionSearchRow)t;
listAssemblyBuilds.Add(GetAssemblyBuildsResult(transactionRow, getDetails));
}
}
}
// Parse and return NetSuite WorkOrder into assembly WorkOrderResult list
return listAssemblyBuilds;
}
private static string GetAssemblyBuildLotNumbers(string id)
{
var service = Service.Context();
var serialNumbers = "";
var tsa = new TransactionSearchAdvanced
{
savedSearchScriptId = "customsearch_web_assemblysearchlineitems"
};
service.searchPreferences = new SearchPreferences { bodyFieldsOnly = false };
tsa.criteria = new TransactionSearch()
{
basic = new TransactionSearchBasic()
{
internalId = new SearchMultiSelectField
{
#operator = SearchMultiSelectFieldOperator.anyOf,
operatorSpecified = true,
searchValue = new[] {
new RecordRef() {
type = RecordType.assemblyBuild,
typeSpecified = true,
internalId = id
}
}
}
}
};
// Construct custom columns to return
var tsr = new TransactionSearchRow();
var tsrb = new TransactionSearchRowBasic();
var orderIdCols = new SearchColumnSelectField[1];
var orderIdCol = new SearchColumnSelectField();
orderIdCols[0] = orderIdCol;
tsrb.internalId = orderIdCols;
var serialNumberCols = new SearchColumnStringField[1];
var serialNumberCol = new SearchColumnStringField();
serialNumberCols[0] = serialNumberCol;
tsrb.serialNumbers = serialNumberCols;
tsr.basic = tsrb;
tsa.columns = tsr;
var response = service.search(tsa);
if (response.status.isSuccess)
{
var searchRows = response.searchRowList;
if (searchRows != null && searchRows.Length >= 1)
{
foreach (SearchRow t in searchRows)
{
var transactionRow = (TransactionSearchRow)t;
if (transactionRow.basic.serialNumbers != null)
{
return transactionRow.basic.serialNumbers[0].searchValue;
}
}
}
}
return serialNumbers;
}
private static AssemblyBuildResult GetAssemblyBuildsResult(TransactionSearchRow tsr, bool getDetails)
{
if (tsr != null)
{
var assemblyInfo = new AssemblyBuildResult
{
NetSuiteId = tsr.basic.internalId[0].searchValue.internalId,
ManufacturedDate = tsr.basic.tranDate[0].searchValue,
SerialNumbers = tsr.basic.serialNumbers[0].searchValue
};
// If selected, this will do additional NetSuite queries to get detailed data (slower)
if (getDetails)
{
// Look up Lot Number
assemblyInfo.LotNumber = GetAssemblyBuildLotNumbers(tsr.basic.internalId[0].searchValue.internalId);
}
return assemblyInfo;
}
return null;
}
What I learned about pulling data from NetSuite:
Using SavedSearches is the best method to pull data that doesn't automatically come through in the API objects
It is barely supported
Don't specify an ID on the SavedSearch, specify a criteria in the TransactionSearch to get one record
You will need to specify which columns to actually pull down. NetSuite doesn't just send you the data from a SavedSearch automatically
You cannot view data in a SavedSearch that contains a Grouping
In the Saved Search, use the Criteria Main Line = true/false to read data from the main record (top of UI screen), and line items (bottom of screen)

Sharepoint: How to upload files with metadata including Taxonomy fields through web services

Being very new to SharePoint coding I have been assigned the task to create a prototype code to upload a file and setting the field values for that file that will show up when opening the sharepoint page with the file.
This has to be done from a remote machine and not the Sharepoint server itself so using the .Net objects for Sharepoint is out the question.
I quickly found out how to upload a file through the Sharepoint Web Service Copy.asmx:
void UploadTestFile() {
var file = #"C:\Temp\TestFile.doc";
string destinationUrl = "http://mysharepointserver/Documents/"
+ Path.GetFileName(file);
string[] destinationUrls = { destinationUrl };
var CopyWS = new Copy.Copy();
CopyWS.UseDefaultCredentials = true;
CopyWS.Url = "http://mysharepointserver/_vti_bin/copy.asmx";
CopyResult[] result;
byte[] data = File.ReadAllBytes(file);
FieldInformation mf1 = new FieldInformation {
DisplayName = "title",
InternalName = "title",
Type = FieldType.Text,
Value = "Dummy text"
};
FieldInformation mf2 = new FieldInformation {
DisplayName = "MyTermSet",
InternalName = "MyTermSet",
Type = FieldType.Note,
Value = "Test; Unit;"
};
CopyWS.CopyIntoItems(
"+",
destinationUrls,
new FieldInformation[] { mf1, mf2 },
data,
out result);
}
This code easily uploads any file to the target site but only fills the "title" field with info. The field MyTermSet in which I have added 3 terms allready - Test, Unit and Page - will not update with the values "Test;" and "Unit;".
Being very new to Sharepoint and me not grasping all the basics googling has told me that updating "File", "Computed" or "Lookup" fields does not work with the CopyIntoItems method, and MyTermSet being a Taxonomy field is - if I am correct - a Lookup field.
So how do I get MyTermSet updated with the values "Test;" and "Unit;" ?
I would really prefer If someone has a sample code on this. I have followed several hint-links but I am none the wiser. I have found no sample-code on this at all.
Have anyone made one single method that wraps it all? Or another method that takes in the destinationUrl from the file upload and updates the Term Set/Taxonomy field.
Puzzling together what I have found so far, I am now able to do as I wanted. But I would really like to be able to get the Taxonomy field GUIDs dynamically and NOT having to explicitly set them myself:
void UploadTestFile(string FileName, string DocLib, Dictionary<string, string> Fields = null) {
//Upload the file to the target Sharepoint doc lib
string destinationUrl = DocLib + Path.GetFileName(FileName);
string[] destinationUrls = { destinationUrl };
var CopyWS = new Copy.Copy();
CopyWS.UseDefaultCredentials = true;
CopyWS.Url = new Uri(new Uri(DocLib), "/_vti_bin/copy.asmx").ToString();
CopyResult[] result;
var data = File.ReadAllBytes(FileName);
CopyWS.CopyIntoItems(
"+",
destinationUrls,
new FieldInformation[0],
data,
out result);
if (Fields == null) return; //Done uploading
//Get the ID and metadata information of the fields
var list = new ListsWS.Lists();
list.UseDefaultCredentials = true;
var localpath = new Uri(DocLib).LocalPath.TrimEnd('/');
var site = localpath.Substring(0, localpath.LastIndexOf("/")); //Get the site of the URL
list.Url = new Uri(new Uri(DocLib), site + "/_vti_bin/lists.asmx").ToString(); //Lists on the right site
FieldInformation[] fiOut;
byte[] filedata;
var get = CopyWS.GetItem(destinationUrl, out fiOut, out filedata);
if (data.Length != filedata.Length) throw new Exception("Failed on uploading the document.");
//Dictionary on name and display name
var fieldInfos = fiOut.ToDictionary(x => x.InternalName, x => x);
var fieldInfosByName = new Dictionary<string, FieldInformation>();
foreach (var item in fiOut) {
if (!fieldInfosByName.ContainsKey(item.DisplayName)) {
fieldInfosByName.Add(item.DisplayName, item);
}
}
//Update the document with fielddata - this one can be extended for more than Text and Note fields.
if (!fieldInfos.ContainsKey("ID")) throw new Exception("Could not get the ID of the upload.");
var ID = fieldInfos["ID"].Value; //The ID of the document we just uploaded
XDocument doc = new XDocument(); //Creating XML with updates we need
doc.Add(XElement.Parse("<Batch OnError='Continue' ListVersion='1' ViewName=''/>"));
doc.Element("Batch").Add(XElement.Parse("<Method ID='1' Cmd='Update'/>"));
var methNode = doc.Element("Batch").Element("Method");
//Add ID
var fNode = new XElement("Field");
fNode.SetAttributeValue("Name", "ID");
fNode.Value = ID;
methNode.Add(fNode);
//Loop each field and add each Field
foreach (var field in Fields) {
//Get the field object from name or display name
FieldInformation fi = null;
if (fieldInfos.ContainsKey(field.Key)) {
fi = fieldInfos[field.Key];
}
else if (fieldInfosByName.ContainsKey(field.Key)) {
fi = fieldInfosByName[field.Key];
}
if (fi != null) {
//Fix for taxonomy fields - find the correct field to update
if (fi.Type == FieldType.Invalid && fieldInfos.ContainsKey(field.Key + "TaxHTField0")) {
fi = fieldInfos[field.Key + "TaxHTField0"];
}
else if (fi.Type == FieldType.Invalid && fieldInfosByName.ContainsKey(field.Key + "_0")) {
fi = fieldInfosByName[field.Key + "_0"];
}
fNode = new XElement("Field");
fNode.SetAttributeValue("Name", fi.InternalName);
switch (fi.Type) {
case FieldType.Lookup:
fNode.Value = "-1;#" + field.Value;
break;
case FieldType.Choice:
case FieldType.Text:
fNode.Value = field.Value;
break;
case FieldType.Note: //TermSet's
var termsetval = "";
var terms = field.Value.Split(';');
foreach (var term in terms) {
termsetval += "-1;#" + term + ";";
}
fNode.Value = termsetval.TrimEnd(';');
break;
default:
//..Unhandled type. Implement if needed.
break;
}
methNode.Add(fNode); //Adds the field to the XML
}
else {
//Field does not exist. No use in uploading.
}
}
//Gets the listname (not sure if it is the full path or just the folder name)
var listname = new Uri(DocLib).LocalPath;
var listcol = list.GetListCollection(); //Get the lists of the site
listname = (from XmlNode x
in listcol.ChildNodes
where x.Attributes["DefaultViewUrl"].InnerText.StartsWith(listname, StringComparison.InvariantCultureIgnoreCase)
select x.Attributes["ID"].InnerText).DefaultIfEmpty(listname).First();
//Convert the XML to XmlNode and upload the data
var xmldoc = new XmlDocument();
xmldoc.LoadXml(doc.ToString());
list.UpdateListItems(listname, xmldoc.DocumentElement);
}
Then I call it like this:
var fields = new Dictionary<string, string>();
fields.Add("Test", "Dummy Text");
fields.Add("MrTermSet", "Page|a4ba29c1-3ed5-47e9-b43f-36bc59c0ea5c;Unit|4237dfbe-22a2-4d90-bd08-09f4a8dd0ada");
UploadTestFile(#"C:\Temp\TestFile2.doc", #"http://mysharepointserver/Documents/", fields);
I would however prefer to call it like this:
var fields = new Dictionary<string, string>();
fields.Add("Test", "Dummy Text");
fields.Add("MrTermSet", "Page;Unit");
UploadTestFile(#"C:\Temp\TestFile2.doc", #"http://mysharepointserver/Documents/", fields);

Resources