Changing connection data in PowerPivot file - excel

So I've run into an interesting problem. I need to specify a custom WHERE clause in a PowerPivot query. I must change it based on external conditions. I would like to edit the file and save a copy. Any idea how to do this? I opened the PowerPivot file from binary, but it appears encrypted...

You can go to existing connections, and then make an update there. If you open the same data source (SQL, SSRS, or anything else) again instead of changing parameters on the existing connection, it will slow your perf as PowerPivot will treat those as separate connections.

Solution was to open the Excel workbook up as a Zip (using the Package class).
If you are looking to modify the queries, you can. The file at /xl/customData/item1.data is a backup file that represents the PowerPivot database (which is just an Analysis Services database running in Vertipaq mode) used to process queries. You need to restore the file to a SSAS instance running in Vertipaq mode. Once that's done, script the queries as an ALTER script. Modify the scripts (in this case, replacing #projectId with my actual projectID), then run them against the database. Once all this is done, back the database up and put back into the Excel workbook. That modifies the queries.
The connection data is stored in the /xl/connections.xml file. Open that up, modify, and replace. Repack it all up again, and now you have a workbook again.
Here's the code I made. You will have to call the methods as you need. Basic idea is there, though...
const string DBName = "Testing";
const string OriginalBackupPath = #"\\MyLocation\BKUP.abf";
const string ModifiedBackupPath = #"\\MyLocation\BKUPAfter.abf";
const string ServerPath = #"machineName\powerpivot";
private static readonly Server srv = new Server();
private static readonly Scripter scripter = new Scripter();
private static Database db;
private static byte[] GetPackagePartContents(string packagePath, string partPath)
{
var pack = Package.Open(packagePath, FileMode.OpenOrCreate, FileAccess.ReadWrite);
var part = pack.GetPart(new Uri(partPath, UriKind.Relative));
var stream = part.GetStream();
var b = new byte[stream.Length];
stream.Read(b, 0, b.Length);
stream.Flush();
stream.Close();
pack.Flush();
pack.Close();
return b;
}
private static void WritePackagePartContents(string packagePath, string partPath, byte[] contents)
{
var uri = new Uri(partPath, UriKind.Relative);
var pack = Package.Open(packagePath, FileMode.OpenOrCreate, FileAccess.ReadWrite);
var part = pack.GetPart(uri);
var type = part.ContentType;
pack.DeletePart(uri);
pack.CreatePart(uri, type);
part = pack.GetPart(uri);
var stream = part.GetStream();
stream.Write(contents, 0, contents.Length);
stream.Flush();
stream.Close();
pack.Flush();
pack.Close();
}
private static void RestoreBackup(string server, string dbName, string backupPath)
{
srv.Connect(server);
if (srv.Databases.FindByName(dbName) != null) { srv.Databases.FindByName(dbName).Drop(); srv.Update(); }
srv.Restore(backupPath, dbName, true);
srv.Update();
srv.Refresh();
}
private static void WriteContentsToFile(byte[] contents, string filePath)
{
var fileStream = File.Open(filePath, FileMode.OpenOrCreate, FileAccess.Write);
fileStream.Write(contents, 0, contents.Length);
fileStream.Flush();
fileStream.Close();
}
private static byte[] ReadContentsFromFile(string filePath)
{
var fileStream = File.Open(filePath, FileMode.OpenOrCreate, FileAccess.Write);
var b = new byte[fileStream.Length];
fileStream.Read(b, 0, b.Length);
fileStream.Flush();
fileStream.Close();
return b;
}
private static XDocument GetAlterScript(MajorObject obj)
{
var stream = new MemoryStream();
var streamWriter = XmlWriter.Create(stream);
scripter.ScriptAlter(new[] { obj }, streamWriter, false);
streamWriter.Flush();
streamWriter.Close();
stream.Flush();
stream.Position = 0;
var b = new byte[stream.Length];
stream.Read(b, 0, b.Length);
var alterString = new string(Encoding.UTF8.GetString(b).ToCharArray().Where(w => w != 65279).ToArray());
var alter = XDocument.Parse(alterString);
stream.Close();
return alter;
}
private static void ExecuteScript(string script)
{
srv.Execute(script);
srv.Update();
db.Process();
db.Refresh();
}
private static void ProcessPowerpointQueries(string bookUrl, string projectId)
{
byte[] b = GetPackagePartContents(bookUrl, "/xl/customData/item1.data");
WriteContentsToFile(b, OriginalBackupPath);
RestoreBackup(ServerPath, DBName, OriginalBackupPath);
var db = srv.Databases.GetByName(DBName);
var databaseView = db.DataSourceViews.FindByName("Sandbox");
var databaseViewAlter = GetAlterScript(databaseView);
var cube = db.Cubes.FindByName("Sandbox");
var measureGroup = cube.MeasureGroups.FindByName("Query");
var partition = measureGroup.Partitions.FindByName("Query");
var partitionAlter = GetAlterScript(partition);
var regex = new Regex(#"\s#projectid=\w*[ ,]");
var newDatabaseViewAlter = databaseViewAlter.ToString().Replace(regex.Match(databaseViewAlter.ToString()).Value.Trim(',',' '), #"#projectid=" + projectId);
ExecuteScript(newDatabaseViewAlter);
var newPartitionAlter = partitionAlter.ToString().Replace(regex.Match(partitionAlter.ToString()).Value.Trim(',', ' '), #"#projectid=" + projectId);
ExecuteScript(newPartitionAlter);
db.Backup(ModifiedBackupPath, true);
WritePackagePartContents(bookUrl, #"/xl/customData/item1.data", ReadContentsFromFile(ModifiedBackupPath));
db.Drop();
srv.Disconnect();
}
private static void ProcessWorkbookLinks(string bookUrl, string newCoreUrl)
{
var connectionsFile = GetPackagePartContents(bookUrl, #"/xl/connections.xml");
var connectionsXml = Encoding.UTF8.GetString(connectionsFile);
connectionsXml = connectionsXml.Replace(
new Regex(#"Data Source=\S*;").Match(connectionsXml).Value.Trim(';'), #"Data Source=" + newCoreUrl);
WritePackagePartContents(bookUrl, #"/xl/connections.xml", connectionsXml.Replace(#"https://server/site/", newCoreUrl).ToCharArray().Select(Convert.ToByte).ToArray());
}

Related

Azure SDK with SSIS

I am looking at different options to connect to an Azure data lake storage using SSIS 2019 script task and one of the options would be to use the Azure .NET SDK. But, I am having a hard time to use this in the script task. I'm trying to make it work with below code but no luck yet.
C# Code:
static string path = null;
static ScriptMain()
{
AppDomain.CurrentDomain.AssemblyResolve += new ResolveEventHandler(CurrentDomain_AssemblyResolve);
}
static System.Reflection.Assembly CurrentDomain_AssemblyResolve(object sender, ResolveEventArgs args)
{
if (args.Name.Contains("dllname"))
{
return System.Reflection.Assembly.LoadFile(System.IO.Path.Combine(path, "dllname.dll"));
}
return null;
}
Please guide me to connect to Azure via SSIS script task.
C# Code:
static async Task Main(string[] args)
{
//Connect to Azure Storage Account
var applicationId = "applicationId";
var secretKey = "secretKey";
var tenantId = "tenantId";
var storageAccountName = "myStorageAccountName";
var filesystem = "myFilesystemName";
var mypath = "my/AzureFolder/Path";
var client = DLStorageManagementClient.CreateClient(applicationId, secretKey, tenantId, storageAccountName);
var isFileSystemCreated = await client.CreateFilesystemAsync(filesystem);
var isDirectoryCreated = await client.CreateDirectoryAsync(filesystem, mypath);
string tmpFile = Path.GetTempFileName();
string fileName = HttpUtility.UrlEncode(Path.GetFileName(tmpFile));
File.WriteAllText(tmpFile, $"this is sample file content for {tmpFile}");
var isFileCreated = await client.CreateFileAsync(filesystem, mypath, fileName, new FileStream(tmpFile, FileMode.Open, FileAccess.Read));
var stream = new MemoryStream();
var isFileDownloaded = await client.DownloadFileAsync(filesystem, $"{mypath}/{fileName}", stream);
if (isFileDownloaded.IsSuccessStatusCode)
{
var contentString = UTF8Encoding.UTF8.GetString(stream.ToArray());
Console.WriteLine(contentString);
}
var isFileDeleted = await client.DeleteFileOrDirectoryAsync(filesystem, mypath, true);
var isFileSystemDeleted = await client.DeleteFilesystemAsync(filesystem);
}
Error:
Error CS1983 The return type of an async method must be void, Task, Task<T>, a task-like type, IAsyncEnumerable<T>, or IAsyncEnumerator<T>
Error CS0161 'ScriptMain.Main(string[])': not all code paths return a value
The Azure Feature Pack for SSIS is intended to allow you to do this without using .NET code. And remember you can always read and write to ADLS Gen2 using the Blob Storage APIs.
For your script task to work you would need to download and extract the NuGet packages to a location the SSIS package can read in the AssemblyResolve event. You can do this right from your code if you want, but it's really a hack. eg
using System.Net;
using System.IO.Compression;
using System.IO;
using System.Linq;
using System.Collections.Generic;
/// <summary>
/// ScriptMain is the entry point class of the script. Do not change the name, attributes,
/// or parent of this class.
/// </summary>
[Microsoft.SqlServer.Dts.Tasks.ScriptTask.SSISScriptTaskEntryPointAttribute]
public partial class ScriptMain : Microsoft.SqlServer.Dts.Tasks.ScriptTask.VSTARTScriptObjectModelBase
{
static WebClient wc = new WebClient();
class NuGetEntry
{
public string AssemblyName { get; set; }
public string PackageUri { get; set; }
public string dllEntryPath { get; set; }
}
static ScriptMain()
{
var nugetPackageList = new List<NuGetEntry>()
{
new NuGetEntry()
{
AssemblyName= "Microsoft.Azure.Storage.Blob, Version=11.1.3.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35",
PackageUri= #"https://www.nuget.org/api/v2/package/Microsoft.Azure.Storage.Blob/11.1.3",
dllEntryPath= #"lib/net452/Microsoft.Azure.Storage.Blob.dll"
},
new NuGetEntry()
{
AssemblyName= "Microsoft.Azure.Storage.Common, Version=11.1.3.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35",
PackageUri= #"https://www.nuget.org/api/v2/package/Microsoft.Azure.Storage.Common/11.1.3",
dllEntryPath= #"lib/net452/Microsoft.Azure.Storage.Common.dll"
}
};
var nugetPackages = nugetPackageList.ToDictionary(e => e.AssemblyName, e => e);
AppDomain.CurrentDomain.AssemblyResolve += (s, a) =>
{
if (nugetPackages.ContainsKey(a.Name))
{
var pe = nugetPackages[a.Name];
var dllName = Path.GetFileName(pe.dllEntryPath);
var localFileName = Path.Combine(Path.GetTempPath(), dllName);
if (File.Exists(localFileName))
{
var asm = Assembly.LoadFile(localFileName);
return asm;
}
using (var pkg = wc.OpenRead(pe.PackageUri))
{
using (var zip = new ZipArchive(pkg))
{
//var entries = zip.Entries;
var dllStream = zip.GetEntry(pe.dllEntryPath).Open();
using (var fs = File.OpenWrite(localFileName))
{
dllStream.CopyTo(fs);
}
var asm = Assembly.LoadFile(localFileName);
return asm;
}
}
}
return null;
};
}

Send DataSet data email via attachment Excel File xls ( Not Creating Excel File ) C#

I want to send DataSet data with email excel file attachment in C# but I don't want to create Excel file physically. It can be do with MemoryStream but I couldn't.
Another problem I want to set Excel file's encoding type because data may be Russian or Turkish special character.
Please help me...
Here is my sample code...
<!-- language: c# -->
var response = HttpContext.Response;
response.Clear();
response.Charset = "utf-8";
response.ContentEncoding = System.Text.Encoding.Default;
GridView excelGridView = new GridView();
excelGridView.DataSource = InfoDataSet;
excelGridView.DataBind();
excelStringWriter = new StringWriter();
HtmlTextWriter htw = new HtmlTextWriter(excelStringWriter);
excelGridView.RenderControl(htw);
byte[] ExcelData = emailEncoding.GetBytes(excelStringWriter.ToString());
MemoryStream ms = new MemoryStream(ExcelData);
mailMessage.Attachments.Add(new Attachment(ms, excelFileName, "application/ms-excel"));
<!-- language: c# -->
here is another one simple and easy with excel attchment
public string SendMail(string LastId)
{
SqlConnection con = new SqlConnection(ConfigurationManager.ConnectionStrings["con"].ConnectionString);
SqlCommand cmd = new SqlCommand("sp_GetMailData", con);
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("#LastID", LastId);
con.Open();
string result = "0";
string temptext = "";
SqlDataAdapter da = new SqlDataAdapter(cmd);
DataTable dt=new DataTable();
da.Fill(dt);
//ExportToSpreadsheet(dt,"My sheet");
GridView gv = new GridView();
gv.DataSource = dt;
gv.DataBind();
AttachandSend(gv);
con.Close();
return result.ToString();
}
public void AttachandSend(GridView gv)
{
StringWriter stw = new StringWriter();
HtmlTextWriter hw = new HtmlTextWriter(stw);
gv.RenderControl(hw);
System.Text.Encoding Enc = System.Text.Encoding.ASCII;
byte[] mBArray = Enc.GetBytes(stw.ToString());
System.IO.MemoryStream mAtt = new System.IO.MemoryStream(mBArray, false);
System.Net.Mail.MailMessage mailMessage = new System.Net.Mail.MailMessage();
MailAddress address = new
MailAddress("xxxxxxxxxxxxx", "Admin");
mailMessage.Attachments.Add(new Attachment(mAtt, "sales.xls"));
mailMessage.Body = "Hi PFA";
mailMessage.From = address;
mailMessage.To.Add("xxxxxxxxxxxx");
mailMessage.Subject = "xxxxxxxxxxxxxx";
mailMessage.IsBodyHtml = true;
var smtp = new SmtpClient();
smtp.Send(mailMessage);
}
Here is your solution
private static Stream DataTableToStream(DataTable table)
{
const string semiColon = ";";
var ms = new MemoryStream();
var sw = new StreamWriter(ms);
foreach (DataColumn column in table.Columns)
{
sw.Write(column.ColumnName);
sw.Write(semiColon);
}
sw.Write(Environment.NewLine);
foreach (DataRow row in table.Rows)
{
for (int i = 0; i < table.Columns.Count; i++)
{
sw.Write(row[i].ToString().Replace(semiColon, string.Empty));
sw.Write(semiColon);
}
sw.Write(Environment.NewLine);
}
return ms;
}
private static MailMessage CreateMail(string from,
string to,
string subject,
string body,
string attname,
Stream tableStream)
{
// using System.Net.Mail
var mailMsg = new MailMessage(from, to, subject, body);
tableStream.Position = 0;
mailMsg.Attachments.Add(
new Attachment(tableStream, attname, CsvContentType));
return mailMsg;
}
private const string CsvContentType = "application/ms-excel";
private static void ExportToSpreadsheetInternal(Stream tableStream, string name)
{
HttpContext context = HttpContext.Current;
context.Response.Clear();
context.Response.ContentType = CsvContentType;
context.Response.AppendHeader(
"Content-Disposition"
, "attachment; filename=" + name + ".xls");
tableStream.Position = 0;
tableStream.CopyTo(context.Response.OutputStream);
context.Response.End();
}
public static void ExportToSpreadsheet(DataTable table, string name)
{
var stream = DataTableToStream(table);
var mailMsg = CreateMail("from#ddd.com",
"to#ddd.com",
"spread",
"the spread",
name,
stream);
//ExportToSpreadsheetInternal(stream, name);
// send the mailMsg with SmtpClient (config in your web.config)
var smtp = new SmtpClient();
smtp.Send(mailMsg);
}
Call this method
ExportToSpreadsheet(DataTable table, string name)

How to save Rotativa PDF on server

I am using Rotativa to generate PDF in my "MVC" application. How can I save Rotativa PDF? I need to save the document on a server after all the process is completed.
Code below:
public ActionResult PRVRequestPdf(string refnum,string emid)
{
var prv = functions.getprvrequest(refnum, emid);
return View(prv);
}
public ActionResult PDFPRVRequest()
{
var prv = Session["PRV"] as PRVRequestModel;
byte[] pdfByteArray = Rotativa.WkhtmltopdfDriver.ConvertHtml("Rotativa", "Approver", "PRVRequestPdf");
return new Rotativa.ViewAsPdf("PRVRequestPdf", new { refnum = prv.rheader.request.Referenceno });
}
You can give this a try
var actionResult = new ActionAsPdf("PRVRequestPdf", new { refnum = prv.rheader.request.Referenceno, emid = "Whatever this is" });
var byteArray = actionResult.BuildPdf(ControllerContext);
var fileStream = new FileStream(fullPath, FileMode.Create, FileAccess.Write);
fileStream.Write(byteArray, 0, byteArray.Length);
fileStream.Close();
If that doesn't do the trick then, you can follow the answers here
Just make sure if you do it this way not to have PRVRequestPdf return as a PDF View, rather a normal View like you have above (only mention as managed to fall foul of that myself causing lots of fun).
Another useful answer:
I found the solution here
var actionPDF = new Rotativa.ActionAsPdf("YOUR_ACTION_Method", new { id = ID, lang = strLang } //some route values)
{
//FileName = "TestView.pdf",
PageSize = Size.A4,
PageOrientation = Rotativa.Options.Orientation.Landscape,
PageMargins = { Left = 1, Right = 1 }
};
byte[] applicationPDFData = actionPDF.BuildPdf(ControllerContext);
This is the original thread
You can achieve this with ViewAsPdf.
[HttpGet]
public ActionResult SaveAsPdf(string refnum, string emid)
{
try
{
var prv = functions.getprvrequest(refnum, emid);
ViewAsPdf pdf = new Rotativa.ViewAsPdf("PRVRequestPdf", prv)
{
FileName = "Test.pdf",
CustomSwitches = "--page-offset 0 --footer-center [page] --footer-font-size 8"
};
byte[] pdfData = pdf.BuildFile(ControllerContext);
string fullPath = #"\\server\network\path\pdfs\" + pdf.FileName;
using (var fileStream = new FileStream(fullPath, FileMode.Create, FileAccess.Write))
{
fileStream.Write(pdfData, 0, pdfData.Length);
}
return Json(new { isSuccessful = true }, JsonRequestBehavior.AllowGet);
}
catch (Exception ex)
{
//TODO: ADD LOGGING
return Json(new { isSuccessful = false, error = "Uh oh!" }, JsonRequestBehavior.AllowGet);
//throw;
}
}
You can simply try this:
var fileName = string.Format("my_file_{0}.pdf", id);
var path = Server.MapPath("~/App_Data/" + fileName);
System.IO.File.WriteAllBytes(path, pdfByteArray );

With OrmLite, is there a way to automatically update table schema when my POCO is modified?

Can OrmLite recognize differences between my POCO and my schema and automatically add (or remove) columns as necessary to force the schema to remain in sync with my POCO?
If this ability doesn't exist, is there way for me to query the db for table schema so that I may manually perform the syncing? I found this, but I'm using the version of OrmLite that installs with ServiceStack and for the life of me, I cannot find a namespace that has the TableInfo classes.
I created an extension method to automatically add missing columns to my tables. Been working great so far. Caveat: the code for getting the column names is SQL Server specific.
namespace System.Data
{
public static class IDbConnectionExtensions
{
private static List<string> GetColumnNames(IDbConnection db, string tableName)
{
var columns = new List<string>();
using (var cmd = db.CreateCommand())
{
cmd.CommandText = "exec sp_columns " + tableName;
var reader = cmd.ExecuteReader();
while (reader.Read())
{
var ordinal = reader.GetOrdinal("COLUMN_NAME");
columns.Add(reader.GetString(ordinal));
}
reader.Close();
}
return columns;
}
public static void AlterTable<T>(this IDbConnection db) where T : new()
{
var model = ModelDefinition<T>.Definition;
// just create the table if it doesn't already exist
if (db.TableExists(model.ModelName) == false)
{
db.CreateTable<T>(overwrite: false);
return;
}
// find each of the missing fields
var columns = GetColumnNames(db, model.ModelName);
var missing = ModelDefinition<T>.Definition.FieldDefinitions
.Where(field => columns.Contains(field.FieldName) == false)
.ToList();
// add a new column for each missing field
foreach (var field in missing)
{
var alterSql = string.Format("ALTER TABLE {0} ADD {1} {2}",
model.ModelName,
field.FieldName,
db.GetDialectProvider().GetColumnTypeDefinition(field.FieldType)
);
Console.WriteLine(alterSql);
db.ExecuteSql(alterSql);
}
}
}
}
No there is no current support for Auto Migration of RDBMS Schema's vs POCOs in ServiceStack's OrmLite.
There are currently a few threads being discussed in OrmLite's issues that are exploring the different ways to add this.
Here is a slightly modified version of code from cornelha to work with PostgreSQL. Removed this fragment
//private static List<string> GetColumnNames(object poco)
//{
// var list = new List<string>();
// foreach (var prop in poco.GetType().GetProperties())
// {
// list.Add(prop.Name);
// }
// return list;
//}
and used IOrmLiteDialectProvider.NamingStrategy.GetTableName and IOrmLiteDialectProvider.NamingStrategy.GetColumnName methods to convert table and column names from PascalNotation to this_kind_of_notation used by OrmLite when creating tables in PostgreSQL.
public static class IDbConnectionExtensions
{
private static List<string> GetColumnNames(IDbConnection db, string tableName, IOrmLiteDialectProvider provider)
{
var columns = new List<string>();
using (var cmd = db.CreateCommand())
{
cmd.CommandText = getCommandText(tableName, provider);
var tbl = new DataTable();
tbl.Load(cmd.ExecuteReader());
for (int i = 0; i < tbl.Columns.Count; i++)
{
columns.Add(tbl.Columns[i].ColumnName);
}
}
return columns;
}
private static string getCommandText(string tableName, IOrmLiteDialectProvider provider)
{
if (provider == PostgreSqlDialect.Provider)
return string.Format("select * from {0} limit 1", tableName);
else return string.Format("select top 1 * from {0}", tableName);
}
public static void AlterTable<T>(this IDbConnection db, IOrmLiteDialectProvider provider) where T : new()
{
var model = ModelDefinition<T>.Definition;
var table = new T();
var namingStrategy = provider.NamingStrategy;
// just create the table if it doesn't already exist
var tableName = namingStrategy.GetTableName(model.ModelName);
if (db.TableExists(tableName) == false)
{
db.CreateTable<T>(overwrite: false);
return;
}
// find each of the missing fields
var columns = GetColumnNames(db, model.ModelName, provider);
var missing = ModelDefinition<T>.Definition.FieldDefinitions
.Where(field => columns.Contains(namingStrategy.GetColumnName(field.FieldName)) == false)
.ToList();
// add a new column for each missing field
foreach (var field in missing)
{
var columnName = namingStrategy.GetColumnName(field.FieldName);
var alterSql = string.Format("ALTER TABLE {0} ADD COLUMN {1} {2}",
tableName,
columnName,
db.GetDialectProvider().GetColumnTypeDefinition(field.FieldType)
);
Console.WriteLine(alterSql);
db.ExecuteSql(alterSql);
}
}
}
I implemented an UpdateTable function. The basic idea is:
Rename current table on database.
Let OrmLite create the new schema.
Copy the relevant data from the old table to the new.
Drop the old table.
Github Repo: https://github.com/peheje/Extending-NServiceKit.OrmLite
Condensed code:
public interface ISqlProvider
{
string RenameTableSql(string currentName, string newName);
string GetColumnNamesSql(string tableName);
string InsertIntoSql(string intoTableName, string fromTableName, string commaSeparatedColumns);
string DropTableSql(string tableName);
}
public static void UpdateTable<T>(IDbConnection connection, ISqlProvider sqlProvider) where T : new()
{
connection.CreateTableIfNotExists<T>();
var model = ModelDefinition<T>.Definition;
string tableName = model.Name;
string tableNameTmp = tableName + "Tmp";
string renameTableSql = sqlProvider.RenameTableSql(tableName, tableNameTmp);
connection.ExecuteNonQuery(renameTableSql);
connection.CreateTable<T>();
string getModelColumnsSql = sqlProvider.GetColumnNamesSql(tableName);
var modelColumns = connection.SqlList<string>(getModelColumnsSql);
string getDbColumnsSql = sqlProvider.GetColumnNamesSql(tableNameTmp);
var dbColumns = connection.SqlList<string>(getDbColumnsSql);
List<string> activeFields = dbColumns.Where(dbColumn => modelColumns.Contains(dbColumn)).ToList();
string activeFieldsCommaSep = ListToCommaSeparatedString(activeFields);
string insertIntoSql = sqlProvider.InsertIntoSql(tableName, tableNameTmp, activeFieldsCommaSep);
connection.ExecuteSql(insertIntoSql);
string dropTableSql = sqlProvider.DropTableSql(tableNameTmp);
//connection.ExecuteSql(dropTableSql); //maybe you want to clean up yourself, else uncomment
}
private static String ListToCommaSeparatedString(List<String> source)
{
var sb = new StringBuilder();
for (int i = 0; i < source.Count; i++)
{
sb.Append(source[i]);
if (i < source.Count - 1)
{
sb.Append(", ");
}
}
return sb.ToString();
}
}
MySql implementation:
public class MySqlProvider : ISqlProvider
{
public string RenameTableSql(string currentName, string newName)
{
return "RENAME TABLE `" + currentName + "` TO `" + newName + "`;";
}
public string GetColumnNamesSql(string tableName)
{
return "SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = '" + tableName + "';";
}
public string InsertIntoSql(string intoTableName, string fromTableName, string commaSeparatedColumns)
{
return "INSERT INTO `" + intoTableName + "` (" + commaSeparatedColumns + ") SELECT " + commaSeparatedColumns + " FROM `" + fromTableName + "`;";
}
public string DropTableSql(string tableName)
{
return "DROP TABLE `" + tableName + "`;";
}
}
Usage:
using (var db = dbFactory.OpenDbConnection())
{
DbUpdate.UpdateTable<SimpleData>(db, new MySqlProvider());
}
Haven't tested with FKs. Can't handle renaming properties.
I needed to implement something similiar and found the post by Scott very helpful. I decided to make a small change which will make it much more agnostic. Since I only use Sqlite and MSSQL, I made the getCommand method very simple, but can be extended. I used a simple datatable to get the columns. This solution works perfectly for my requirements.
public static class IDbConnectionExtensions
{
private static List<string> GetColumnNames(IDbConnection db, string tableName,IOrmLiteDialectProvider provider)
{
var columns = new List<string>();
using (var cmd = db.CreateCommand())
{
cmd.CommandText = getCommandText(tableName, provider);
var tbl = new DataTable();
tbl.Load(cmd.ExecuteReader());
for (int i = 0; i < tbl.Columns.Count; i++)
{
columns.Add(tbl.Columns[i].ColumnName);
}
}
return columns;
}
private static string getCommandText(string tableName, IOrmLiteDialectProvider provider)
{
if(provider == SqliteDialect.Provider)
return string.Format("select * from {0} limit 1", tableName);
else return string.Format("select top 1 * from {0}", tableName);
}
private static List<string> GetColumnNames(object poco)
{
var list = new List<string>();
foreach (var prop in poco.GetType().GetProperties())
{
list.Add(prop.Name);
}
return list;
}
public static void AlterTable<T>(this IDbConnection db, IOrmLiteDialectProvider provider) where T : new()
{
var model = ModelDefinition<T>.Definition;
var table = new T();
// just create the table if it doesn't already exist
if (db.TableExists(model.ModelName) == false)
{
db.CreateTable<T>(overwrite: false);
return;
}
// find each of the missing fields
var columns = GetColumnNames(db, model.ModelName,provider);
var missing = ModelDefinition<T>.Definition.FieldDefinitions
.Where(field => columns.Contains(field.FieldName) == false)
.ToList();
// add a new column for each missing field
foreach (var field in missing)
{
var alterSql = string.Format("ALTER TABLE {0} ADD {1} {2}",
model.ModelName,
field.FieldName,
db.GetDialectProvider().GetColumnTypeDefinition(field.FieldType)
);
Console.WriteLine(alterSql);
db.ExecuteSql(alterSql);
}
}
}
So I took user44 answer, and modified the AlterTable method to make it a bit more efficient.
Instead of looping and running one SQL query per field/column, I merge it into one with some simple text parsing (MySQL commands!).
public static void AlterTable<T>(this IDbConnection db, IOrmLiteDialectProvider provider) where T : new()
{
var model = ModelDefinition<T>.Definition;
var table = new T();
var namingStrategy = provider.NamingStrategy;
// just create the table if it doesn't already exist
var tableName = namingStrategy.GetTableName(model.ModelName);
if (db.TableExists(tableName) == false)
{
db.CreateTable<T>(overwrite: false);
return;
}
// find each of the missing fields
var columns = GetColumnNames(db, model.ModelName, provider);
var missing = ModelDefinition<T>.Definition.FieldDefinitions
.Where(field => columns.Contains(namingStrategy.GetColumnName(field.FieldName)) == false)
.ToList();
string alterSql = "";
string addSql = "";
// add a new column for each missing field
foreach (var field in missing)
{
var alt = db.GetDialectProvider().ToAddColumnStatement(typeof(T), field); // Should be made more efficient, one query for all changes instead of many
int index = alt.IndexOf("ADD ");
alterSql = alt.Substring(0, index);
addSql += alt.Substring(alt.IndexOf("ADD COLUMN")).Replace(";", "") + ", ";
}
if (addSql.Length > 2)
addSql = addSql.Substring(0, addSql.Length - 2);
string fullSql = alterSql + addSql;
Console.WriteLine(fullSql);
db.ExecuteSql(fullSql);
}

running stored procedures into own model with servicestack ormlite

Is there any examples to be found for running a stored procedure on serviceStack MVC using ormlite? mythz ? seen this block of code:
var results = new List<EnergyCompare>
{dbFactory.Exec(dbCmd =>
{
dbCmd.CommandType = CommandType.StoredProcedure;
dbCmd.Parameters.Add(new SqlParameter("#id", 1));
dbCmd.CommandText = "GetAuthorById";
return dbCmd.ExecuteReader().ConvertTo<EnergyCompare>();
}
)};
but came with the text of never worked on the google groups!
i can also write this:
using(var db = new SwitchWizardDb())
{
var results2 = db.dbCmd.ExecuteProcedure()
}
but not sure how to complete this with parameters, and in the source code I looked at, it said obsolete?
thanks
Looks like ServiceStack.ORMLite has been updated to make this easier:
List<Poco> results = db.SqlList<Poco>("EXEC GetAnalyticsForWeek 1");
List<Poco> results = db.SqlList<Poco>("EXEC GetAnalyticsForWeek #weekNo", new { weekNo = 1 });
List<int> results = db.SqlList<int>("EXEC GetTotalsForWeek 1");
List<int> results = db.SqlList<int>("EXEC GetTotalsForWeek #weekNo", new { weekNo = 1 });
This example is on the front page of the github repo.
Well I figured it was best to roll my own handler so have created this, any thoughts would be most welcome, especially with how I could pass over params in some kind of func or something:
I have a main class to deal with easy access to my connection object:
public class DatabaseNameSp : IDisposable
{
private readonly SqlConnection _spConn = new SqlConnection(DatabaseNameSp .dbConString);
public readonly SqlCommand SpCmd;
public DatabaseNameSp (string procedureName)
{
_spConn.Open();
SpCmd = new SqlCommand
{
Connection = _spConn,
CommandType = CommandType.StoredProcedure,
CommandText = procedureName
};
}
public void Dispose()
{
_spConn.Close();
SpCmd.Dispose();
}
}
usage:
using (var db = new DatabaseNameSp ("procedurenname"))
{
db.SpCmd.Parameters.Add(new SqlParameter("#Id", 1));
var rdr = db.SpCmd.ExecuteReader(CommandBehavior.CloseConnection);
var results = new List<CustomDTO>();
while (rdr.Read())
{
results.Add(new CustomDTO { Name = rdr["name"].ToString(), Id = rdr["id"].ToString() });
}
return new CustomDTOResponse { Results = results };
}
Any thoughts !
thanks
Here is an example of running a stored procedure with ormLite that may help you:
IList<MyDTO> myList = DbFactory.Run(dbCnx =>
{
using (var dbCmd = dbCnx.CreateCommand())
{
dbCmd.CommandType = CommandType.StoredProcedure;
dbCmd.CommandText = "mySchema.myStoredProc";
dbCmd.Parameters.Add(new SqlParameter("#param1", val1));
dbCmd.Parameters.Add(new SqlParameter("#param2", val2));
var r = dbCmd.ExecuteReader();
return r.ConvertToList<MyDTO>();
}
});
To just simply run a stored procedure with no data returned:
public class ComsManager : Dbase
{
private IDbConnection dbConn;
public ComsManager()
{
dbConn = Dbase.GetConnection();
}
public void Housekeeping()
{
using(var dbCmd = dbConn.CreateCommand())
dbConn.Exec(res => { dbCmd.CommandType = CommandType.StoredProcedure; dbCmd.CommandText = "SP_housekeeping"; dbCmd.ExecuteNonQuery(); });
}

Resources