I have a pretty normal join that I create via JoinSqlBuilder
var joinSqlBuilder = new JoinSqlBuilder<ProductWithManufacturer, Product>()
.Join<Product, Manufacturer>(sourceColumn: p => p.ManufacturerId,
destinationColumn: mf => mf.Id,
sourceTableColumnSelection: p => new { ProductId = p.Id, ProductName = p.Name },
destinationTableColumnSelection: m => new { ManufacturerId = m.Id, ManufacturerName = m.Name })
Of course, the join created by this could potentially return a lot of rows, so I want to use paging - preferably on the server-side. However, I cannot find anything in the JoinSqlBuilder which would let me do this? Am I missing something or does JoinSqlBuilder not have support for this (yet)?
If you aren't using MS SQL Server I think the following will work.
var sql = joinSqlBuilder.ToSql();
var data = this.Select<ProductWithManufacturer>(
q => q.Select(sql)
.Limit(skip,rows)
);
If you are working with MS SQL Server, it will most likely blow up on you. I am working to merge a more elegant solution similar to this into JoinSqlBuilder. The following is a quick and dirty method to accomplish what you want.
I created the following extension class:
public static class Extension
{
private static string ToSqlWithPaging<TResult, TTarget>(
this JoinSqlBuilder<TResult, TTarget> bldr,
string orderColumnName,
int limit,
int skip)
{
var sql = bldr.ToSql();
return string.Format(#"
SELECT * FROM (
SELECT ROW_NUMBER() OVER (ORDER BY [{0}]) As RowNum, *
FROM (
{1}
)as InnerResult
)as RowConstrainedResult
WHERE RowNum > {2} AND RowNum <= {3}
", orderColumnName, sql, skip, skip + limit);
}
public static string ToSqlWithPaging<TResult, TTarget>(
this JoinSqlBuilder<TResult, TTarget> bldr,
Expression<Func<TResult, object>> orderSelector,
int limit,
int skip)
{
var member = orderSelector.Body as MemberExpression;
if (member == null)
throw new ArgumentException(
"TResult selector refers to a non member."
);
var propInfo = member.Member as PropertyInfo;
if (propInfo == null)
throw new ArgumentException(
"TResult selector refers to a field, it must be a property."
);
var orderSelectorName = propInfo.Name;
return ToSqlWithPaging(bldr, orderSelectorName, limit, skip);
}
}
It is applied as follows:
List<Entity> GetAllEntities(int limit, int skip)
{
var bldr = GetJoinSqlBuilderFor<Entity>();
var sql = bldr.ToSqlWithPaging(
entity => entity.Id,
limit,
skip);
return this.Db.Select<Entity>(sql);
}
Related
With Amazon native .net lib, batchget is like this
var batch = context.CreateBatch<MyClass>();
batch.AddKey("hashkey1");
batch.AddKey("hashkey2");
batch.AddKey("hashkey3");
batch.Execute();
var result = batch.results;
Now I'm testing to use servicestack.aws, however I couldn't find how to do it. I've tried the following, both failed.
//1st try
var q1 = db.FromQueryIndex<MyClass>(x => x.room_id == "hashkey1" || x.room_id == "hashkey2"||x.room_id == "hashkey3");
var result = db.Query(q1);
//2nd try
var result = db.GetItems<MyClass>(new string[]{"hashkey1","hashkey2","hashkey3"});
In both cases, it threw an exception that says
Additional information: Invalid operator used in KeyConditionExpression: OR
Please help me. Thanks!
Using GetItems should work as seen with this Live Example on Gistlyn:
public class MyClass
{
public string Id { get; set; }
public string Content { get; set; }
}
db.RegisterTable<MyClass>();
db.DeleteTable<MyClass>(); // Delete existing MyClass Table (if any)
db.InitSchema(); // Creates MyClass DynamoDB Table
var items = 5.Times(i => new MyClass { Id = $"hashkey{i}", Content = $"Content {i}" });
db.PutItems(items);
var dbItems = db.GetItems<MyClass>(new[]{ "hashkey1","hashkey2","hashkey3" });
"Saved Items: {0}".Print(dbItems.Dump());
If your Item has both a Hash and a Range Key you'll need to use the GetItems<T>(IEnumerable<DynamoId> ids) API, e.g:
var dbItems = db.GetItems<MyClass>(new[]{
new DynamoId("hashkey1","rangekey1"),
new DynamoId("hashkey2","rangekey3"),
new DynamoId("hashkey3","rangekey4"),
});
Query all Items with same HashKey
If you want to fetch all items with the same HashKey you need to create a DynamoDB Query as seen with this Live Gistlyn Example:
var items = 5.Times(i => new MyClass {
Id = $"hashkey{i%2}", RangeKey = $"rangekey{i}", Content = $"Content {i}" });
db.PutItems(items);
var rows = db.FromQuery<MyClass>(x => x.Id == "hashkey1").Exec().ToArray();
rows.PrintDump();
I recently upgraded from Azure SDK 1 to SDK 2 and I converted the following code:
public IEnumerable<AnonymousSessionEntity> GetAnonymousSessions(Guid visitorId, Guid customerId, int connectionId, MergeOption mergeOption = MergeOption.AppendOnly)
{
try
{
var partitionKey = AzureUtil.CombineToKey(customerId, connectionId);
_tableStorageServiceContext.MergeOption = mergeOption;
_tableStorageServiceContext.ResolveType = (unused) => typeof(AnonymousSessionEntity);
var query = from s in _tableStorageServiceContext.CreateQuery<AnonymousSessionEntity>(TableStorageServiceContext.AnonymousSessionEntityName)
.Where(s => s.PartitionKey == partitionKey && s.RowKey.CompareTo(AzureUtil.GetRowKeyTimeLimit(90)) <= 0 && s.VisitorId == visitorId)
select s;
CloudTableQuery<AnonymousSessionEntity> cloudTableQuery = query.AsTableServiceQuery<AnonymousSessionEntity>();
IEnumerable<AnonymousSessionEntity> anonymousSessions = cloudTableQuery.Execute();
return anonymousSessions;
}
catch (DataServiceQueryException e)
{
if (e.Response.StatusCode == (int)HttpStatusCode.NotFound) return null;
else throw;
}
}
into the following:
public IEnumerable<AnonymousSessionEntity> GetAnonymousSessions(Guid visitorId, Guid customerId, int connectionId)
{
var partitionKey = AzureUtil.CombineToKey(customerId, connectionId);
var pkFilter = TableQuery.GenerateFilterCondition("PartitionKey", QueryComparisons.Equal, partitionKey);
var rkFilter = TableQuery.GenerateFilterCondition("RowKey", QueryComparisons.LessThan, AzureUtil.GetRowKeyTimeLimit(90));
var visitorIdFilter = TableQuery.GenerateFilterConditionForGuid("VisitorId", QueryComparisons.Equal, visitorId);
var combinedFilter = string.Format("({0}) {1} ({2}) {3} ({4})", pkFilter, TableOperators.And, rkFilter, TableOperators.And, visitorIdFilter);
var table = GetCloudTable(TableStorageDataSource.AnonymousSessionEntityName);
var rangeQuery = new TableQuery<AnonymousSessionEntity>().Where(combinedFilter);
var result = table.ExecuteQuery<AnonymousSessionEntity>(rangeQuery);
return result;
}
As you can see, I have a filter on the partitionKey and also on the RowKey and on a custom field (called "VisitorId"). This query worked great with SDK 1 but the new version is very slow. In fact, it's so slow that it leads me to believe that Azure does a full table scan (I stopped counting after more than a minute). I have a few million rows in this table so I absolutely have to avoid table scans.
For testing purposes, I removed the filter on the "VisitorId" field in the v2 query and it performs very fast: probably 2-3 seconds.
Is there a way to build a fast query with the following criteria:
Exact Match on the PartitionKey
Range filter on the RowKey
Exact match on a custom field
I know that Servicestack.Ormlite is setup to be a 1:1 mapping between poco and database table. I have a situation where I will have groups of tables that are of the same structure and they are created as necessary. I am trying to find a way to be able to do something where I can continue to use the IDbConnection and specify the table name in CRUD operations.
Something like
using(var db = _conn.OpenDbConnection()){
db.SaveAll(objList, "DIFFERENT_TABLE");
}
I was easily able to work around to make creating and deleting the tables. I am hoping that I can use of the ExpressionVisitor or something else to help change the table name before it is executed. One of the requirements of the project is that it be database agnostic, which is why I am trying to not manually write out the SQL.
Solutions
Here are a couple of functions that I ended up creating if anyone out there wants some more examples.
public static List<T> SelectTable<T>(this IDbConnection conn, string tableName) {
var stmt = ModelDefinition<T>.Definition.SqlSelectAllFromTable;
stmt = stmt.Replace(ModelDefinition<T>.Definition.Name, tableName.FmtTable());
return conn.Select<T>(stmt);
}
public static List<T> SelectTableFmt<T>(this IDbConnection conn, string tableName, string sqlFilter,
params object[] filterParams) {
var stmt = conn.GetDialectProvider().ToSelectStatement(typeof (T), sqlFilter, filterParams);
stmt = stmt.Replace(ModelDefinition<T>.Definition.Name, tableName.FmtTable());
return conn.Select<T>(stmt);
}
public static void InsertTable<T>(this IDbConnection conn, T obj, string tablename) {
var stmt = conn.GetDialectProvider().ToInsertRowStatement(null, obj);
stmt = stmt.Replace(obj.GetType().Name, tablename.FmtTable());
conn.ExecuteSql(stmt);
}
public static int SaveAll<T>(this IDbConnection conn, string tablename, IEnumerable<T> objs) {
var saveRows = objs.ToList();
var firstRow = saveRows.FirstOrDefault();
if (Equals(firstRow, default(T))) return 0;
var defaultIdValue = firstRow.GetId().GetType().GetDefaultValue();
var idMap = defaultIdValue != null
? saveRows.Where(x => !defaultIdValue.Equals(x.GetId())).ToSafeDictionary(x => x.GetId())
: saveRows.Where(x => x.GetId() != null).ToSafeDictionary(x => x.GetId());
var existingRowsMap = conn.SelectByIds<T>(tablename, idMap.Keys).ToDictionary(x => x.GetId());
var modelDef = ModelDefinition<T>.Definition;
var dialectProvider = conn.GetDialectProvider();
var rowsAdded = 0;
using (var dbTrans = conn.OpenTransaction()) {
foreach (var obj in saveRows) {
var id = obj.GetId();
if (id != defaultIdValue && existingRowsMap.ContainsKey(id)) {
var updStmt = dialectProvider.ToUpdateRowStatement(obj);
updStmt = updStmt.Replace(obj.GetType().Name, tablename.FmtTable());
conn.ExecuteSql(updStmt);
}
else {
if (modelDef.HasAutoIncrementId) {}
var stmt = dialectProvider.ToInsertRowStatement(null, obj);
stmt = stmt.Replace(obj.GetType().Name, tablename.FmtTable());
conn.ExecuteSql(stmt);
rowsAdded++;
}
}
dbTrans.Commit();
}
return rowsAdded;
}
OrmLite supports specifying the table name for Update and Delete operations. Unfortunately the examples in the readme here have yet to be updated. This is the required format:
UPDATE:
db.UpdateFmt(table: "Person", set: "FirstName = {0}".Fmt("JJ"), where: "LastName = {0}".Fmt("Hendrix"));
DELETE:
db.DeleteFmt(table: "Person", where: "Age = {0}".Fmt(27));
The methods you need can be found here. You should be able to use .Exec to handle reading and insert operations.
The new API for Servicestack.OrmLite dictates that when calling fx a stored procedure you should use either SqlScalar or SqlList like this:
List<Poco> results = db.SqlList<Poco>("EXEC GetAnalyticsForWeek 1");
List<Poco> results = db.SqlList<Poco>("EXEC GetAnalyticsForWeek #weekNo", new { weekNo = 1 });
List<int> results = db.SqlList<int>("EXEC GetTotalsForWeek 1");
List<int> results = db.SqlList<int>("EXEC GetTotalsForWeek #weekNo", new { weekNo = 1 });
However the named parameters doesn't work. You HAVE to respect the order of the parameters in the SP. I think it is because the SP is executed using CommandType=CommandType.Text instead of CommandType.StoredProcedure, and the parameters are added as dbCmd.Parameters.Add(). It seems that because the CommandType is Text it expects the parameters to be added in the SQL querystring, and not as Parameters.Add(), because it ignores the naming.
An example:
CREATE PROCEDURE [dbo].[sproc_WS_SelectScanFeedScanRecords]
#JobNo int = 0
,#SyncStatus int = -1
AS
BEGIN
SET NOCOUNT ON;
SELECT
FSR.ScanId
, FSR.JobNo
, FSR.BatchNo
, FSR.BagNo
, FSR.ScanType
, FSR.ScanDate
, FSR.ScanTime
, FSR.ScanStatus
, FSR.SyncStatus
, FSR.JobId
FROM dbo.SCAN_FeedScanRecords FSR
WHERE ((FSR.JobNo = #JobNo) OR (#JobNo = 0) OR (ISNULL(#JobNo,1) = 1))
AND ((FSR.SyncStatus = #SyncStatus) OR (#SyncStatus = -1) OR (ISNULL(#SyncStatus,-1) = -1))
END
When calling this SP as this:
db.SqlList<ScanRecord>("EXEC sproc_WS_SelectScanFeedScanRecords #SyncStatus",new {SyncStatus = 1});
It returns all records with JobNo = 1 instead of SyncStatus=1 because it ignores the named parameter and add by the order in which they are defined in the SP.
I have to call it like this:
db.SqlList<ScanRecord>("EXEC sproc_WS_SelectScanFeedScanRecords #SyncStatus=1");
Is this expected behavior? I think it defeats the anonymous type parameters if I can't trust it
TIA
Bo
My solution was to roll my own methods for stored procedures. If people finds them handy, I could add them to the project
public static void StoredProcedure(this IDbConnection dbConn, string storedprocedure, object anonType = null)
{
dbConn.Exec(dbCmd =>
{
dbCmd.CommandType = CommandType.StoredProcedure;
dbCmd.CommandText = storedprocedure;
dbCmd.SetParameters(anonType, true);
dbCmd.ExecuteNonQuery();
});
}
public static T StoredProcedureScalar<T>(this IDbConnection dbConn, string storedprocedure, object anonType = null)
{
return dbConn.Exec(dbCmd =>
{
dbCmd.CommandType = CommandType.StoredProcedure;
dbCmd.CommandText = storedprocedure;
dbCmd.SetParameters(anonType, true);
using (IDataReader reader = dbCmd.ExecuteReader())
return GetScalar<T>(reader);
});
}
public static List<T> StoredProcedureList<T>(this IDbConnection dbConn, string storedprocedure, object anonType = null)
{
return dbConn.Exec(dbCmd =>
{
dbCmd.CommandType = CommandType.StoredProcedure;
dbCmd.CommandText = storedprocedure;
dbCmd.SetParameters(anonType, true);
using (var dbReader = dbCmd.ExecuteReader())
return IsScalar<T>()
? dbReader.GetFirstColumn<T>()
: dbReader.ConvertToList<T>();
});
}
They are just modified versions of the SqlScalar and SqlList plus the ExecuteNonQuery
I am building an asp.net site in .net framework 4.0, and I am stuck at the method that supposed to call a .cs class and get the query result back here is my method call and method
1: method call form aspx.cs page:
helper cls = new helper();
var query = cls.GetQuery(GroupID,emailCap);
2: Method in helper class:
public IQueryable<VariablesForIQueryble> GetQuery(int incomingGroupID, int incomingEmailCap)
{
var ctx = new some connection_Connection();
ObjectSet<Members1> members = ctx.Members11;
ObjectSet<groupMember> groupMembers = ctx.groupMembers;
var query = from m in members
join gm in groupMembers on m.MemberID equals gm.MemID
where (gm.groupID == incomingGroupID) && (m.EmailCap == incomingEmailCap)
select new VariablesForIQueryble(m.MemberID, m.MemberFirst, m.MemberLast, m.MemberEmail, m.ValidEmail, m.EmailCap);
//select new {m.MemberID, m.MemberFirst, m.MemberLast, m.MemberEmail, m.ValidEmail, m.EmailCap};
return query ;
}
I tried the above code with IEnumerable too without any luck. This is the code for class VariablesForIQueryble:
3:Class it self for taking anonymouse type and cast it to proper types:
public class VariablesForIQueryble
{
private int _emailCap;
public int EmailCap
{
get { return _emailCap; }
set { _emailCap = value; }
}`....................................
4: and a constructor:
public VariablesForIQueryble(int memberID, string memberFirst, string memberLast, string memberEmail, int? validEmail, int? emailCap)
{
this.EmailCap = (int) emailCap;
.........................
}
I can't seem to get the query result back, first it told me anonymous type problem, I made a class after reading this: link text; and now it tells me constructors with parameters not supported. Now I am an intermediate developer, is there an easy solution to this or do I have to take my query back to the .aspx.cs page.
If you want to project to a specific type .NET type like this you will need to force the query to actually happen using either .AsEnumerable() or .ToList() and then use .Select() against linq to objects.
You could leave your original anonymous type in to specify what you want back from the database, then call .ToList() on it and then .Select(...) to reproject.
You can also clean up your code somewhat by using an Entity Association between Groups and Members using a FK association in the database. Then the query becomes a much simpler:
var result = ctx.Members11.Include("Group").Where(m => m.Group.groupID == incomingGroupID && m.EmailCap == incomingEmailCap);
You still have the issue of having to do a select to specify which columns to return and then calling .ToList() to force execution before reprojecting to your new type.
Another alternative is to create a view in your database and import that as an Entity into the Entity Designer.
Used reflection to solve the problem:
A: Query, not using custom made "VariablesForIQueryble" class any more:
//Method in helper class
public IEnumerable GetQuery(int incomingGroupID, int incomingEmailCap)
{
var ctx = new some_Connection();
ObjectSet<Members1> members = ctx.Members11;
ObjectSet<groupMember> groupMembers = ctx.groupMembers;
var query = from m in members
join gm in groupMembers on m.MemberID equals gm.MemID
where ((gm.groupID == incomingGroupID) && (m.EmailCap == incomingEmailCap)) //select m;
select new { m.MemberID, m.MemberFirst, m.MemberLast, m.MemberEmail, m.ValidEmail, m.EmailCap };
//select new VariablesForIQueryble (m.MemberID, m.MemberFirst, m.MemberLast, m.MemberEmail, m.ValidEmail, m.EmailCap);
//List<object> lst = new List<object>();
//foreach (var i in query)
//{
// lst.Add(i.MemberEmail);
//}
//return lst;
//return query.Select(x => new{x.MemberEmail,x.MemberID,x.ValidEmail,x.MemberFirst,x.MemberLast}).ToList();
return query;
}
B:Code to catch objects and conversion of those objects using reflection
helper cls = new helper();
var query = cls.GetQuery(GroupID,emailCap);
if (query != null)
{
foreach (var objRow in query)
{
System.Type type = objRow.GetType();
int memberId = (int)type.GetProperty("MemberID").GetValue(objRow, null);
string memberEmail = (string)type.GetProperty("MemberEmail").GetValue(objRow, null);
}
else
{
something else....
}