After registering my connections, I want to profile them. With the code below, I only profile the main connection (guepard).
public static IDbConnectionFactory RegisterConnections(this Container self, bool enableProfiler)
{
var dbFactory = new OrmLiteConnectionFactory(ConfigurationManager.ConnectionStrings["guepard"].ConnectionString, SqlServer2008Dialect.Provider);
self.Register<IDbConnectionFactory>(
c =>
{
var cs = ConfigurationManager.ConnectionStrings;
dbFactory.RegisterConnection("gecko-log", cs["gecko-log"].ConnectionString, SqlServerDialect.Provider);
dbFactory.RegisterConnection("ksmpro", cs["ksmpro"].ConnectionString, SqlServer2012Dialect.Provider);
dbFactory.RegisterConnection("gestion-stock", cs["gestion-stock"].ConnectionString, SqlServerDialect.Provider);
dbFactory.RegisterConnection("planning", cs["planning"].ConnectionString, SqlServerDialect.Provider);
dbFactory.RegisterConnection("febus", cs["febus"].ConnectionString, SqlServerDialect.Provider);
if (enableProfiler)
dbFactory.ConnectionFilter = x => new ProfiledDbConnection(x, Profiler.Current);
return dbFactory;
}
);
return dbFactory;
}
I don't know how to profile each connection.
Thank you for your time.
You can either register an OrmLiteConnectionFactory with the ConnectionFilter, e.g:
dbFactory.RegisterConnection("gecko-log",
new OrmLiteConnectionFactory(cs["gecko-log"].ConnectionString,
SqlServerDialect.Provider,
setGlobalDialectProvider: false) {
ConnectionFilter = x => new ProfiledDbConnection(x, Profiler.Current)
}
);
Or go through each NamedConnection factory after registering them to set the ConnectionFilter, e.g:
OrmLiteConnectionFactory.NamedConnections.Values
.Each(f => f.ConnectionFilter = x => new ProfiledDbConnection(x, Profiler.Current));
Related
I've done migration of my app to aspnetcore and now I do have random issue with validation tokens.
1. Issue is that randomly users receive
An error was encountered while handling the remote login. Correlation
failed.
The problem is that if I go and test it my self it works.
Second problem is that when user receive Email confirmation token and click link from email they will get
invalid token
so they can't confirm email.
Firstly I thought issue is with UseCookiePolicy but I've disabled it.
Startup.cs
namespace Flymark.Online.Web
{
public class Startup
{
private readonly IHostingEnvironment _env;
public Startup(IHostingEnvironment env)
{
_env = env;
var builder = new ConfigurationBuilder()
.SetBasePath(env.ContentRootPath)
.AddJsonFile("appsettings.json", true, true)
.AddJsonFile($"appsettings.{env.EnvironmentName}.json", true)
.AddEnvironmentVariables();
Configuration = builder.Build();
}
public IConfigurationRoot Configuration { get; }
// This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services)
{
// Configure SnapshotCollector from application settings
services.Configure<SnapshotCollectorConfiguration>(
Configuration.GetSection(nameof(SnapshotCollectorConfiguration)));
// Add SnapshotCollector telemetry processor.
services.AddSingleton<ITelemetryProcessorFactory>(sp => new SnapshotCollectorTelemetryProcessorFactory(sp));
services.AddApplicationInsightsTelemetryProcessor<TelemetryFilter>();
services.AddSingleton<ITelemetryInitializer, AppInsightsInitializer>();
services.AddCors();
var decompressionOptions = new RequestDecompressionOptions();
decompressionOptions.UseDefaults();
services.AddRequestDecompression(decompressionOptions);
FlymarkAppSettings.Init(Configuration, _env.EnvironmentName);
var storageUri = new Uri(Configuration.GetValue<string>("Flymark:DataProtectionStorageUrl"));
//Get a reference to a container to use for the sample code, and create it if it does not exist.
var container = new CloudBlobClient(storageUri).GetContainerReference("data-protection");
services.AddDataProtection()
.SetApplicationName("Flymark.Online")
.PersistKeysToAzureBlobStorage(container, "data-protection.xml");
services.AddDetection();
services.AddAutoMapper();
services.AddWebMarkupMin(
options =>
{
options.AllowMinificationInDevelopmentEnvironment = true;
options.AllowCompressionInDevelopmentEnvironment = true;
})
.AddHtmlMinification(o =>
{
o.ExcludedPages = new List<IUrlMatcher>
{
new WildcardUrlMatcher("/scripts/*")
};
o.MinificationSettings.AttributeQuotesRemovalMode = HtmlAttributeQuotesRemovalMode.KeepQuotes;
o.MinificationSettings.EmptyTagRenderMode = HtmlEmptyTagRenderMode.NoSlash;
o.MinificationSettings.RemoveOptionalEndTags = false;
})
.AddXmlMinification()
.AddHttpCompression();
services.Configure<CookiePolicyOptions>(options =>
{
// This lambda determines whether user consent for non-essential cookies is needed for a given request.
options.CheckConsentNeeded = context => true;
options.MinimumSameSitePolicy = SameSiteMode.Lax;
});
services
.AddScoped<UserStore<ApplicationUser, IdentityRole<int>, FlymarkContext, int, IdentityUserClaim<int>,
IdentityUserRole<int>, IdentityUserLogin<int>, IdentityUserToken<int>, IdentityRoleClaim<int>>,
ApplicationUserStore>();
services.AddScoped<UserManager<ApplicationUser>, FlymarkUserManager>();
services.AddScoped<RoleManager<IdentityRole<int>>, ApplicationRoleManager>();
services.AddScoped<SignInManager<ApplicationUser>, ApplicationSignInManager>();
services
.AddScoped<RoleStore<IdentityRole<int>, FlymarkContext, int, IdentityUserRole<int>,
IdentityRoleClaim<int>>, ApplicationRoleStore>();
services.TryAddSingleton<IHttpContextAccessor, HttpContextAccessor>();
services.AddIdentity<ApplicationUser, IdentityRole<int>>(
o =>
{
o.User.RequireUniqueEmail = true;
})
.AddUserStore<ApplicationUserStore>()
.AddUserManager<FlymarkUserManager>()
.AddRoleStore<ApplicationRoleStore>()
.AddRoleManager<ApplicationRoleManager>()
.AddSignInManager<ApplicationSignInManager>()
.AddClaimsPrincipalFactory<FlymarkClaimsPrincipalFactory>()
.AddDefaultTokenProviders();
services.AddSingleton<ILoggerFactory, LoggerFactory>(sp =>
new LoggerFactory(
sp.GetRequiredService<IEnumerable<ILoggerProvider>>(),
sp.GetRequiredService<IOptionsMonitor<LoggerFilterOptions>>()
)
);
services.Configure<ApiBehaviorOptions>(options => { options.SuppressModelStateInvalidFilter = true; });
services.AddMemoryCache();
services.AddSingleton<IEmailSender, FlymarkEmailSender>();
services.AddMvc(o =>
{
o.Conventions.Add(new FlymarkAsyncConvention());
o.AllowValidatingTopLevelNodes = false;
o.AllowEmptyInputInBodyModelBinding = true;
})
.SetCompatibilityVersion(CompatibilityVersion.Version_2_2)
.AddJsonOptions(opt =>
{
opt.SerializerSettings.DateFormatString = "dd/MM/yyyy";
opt.SerializerSettings.NullValueHandling = NullValueHandling.Ignore;
var resolver = opt.SerializerSettings.ContractResolver;
if (resolver == null) return;
if (resolver is DefaultContractResolver res) res.NamingStrategy = null;
});
services.Configure<IdentityOptions>(options =>
{
// Default Password settings.
options.Password.RequireDigit = false;
options.Password.RequireLowercase = false;
options.Password.RequireNonAlphanumeric = false;
options.Password.RequireUppercase = false;
options.Password.RequiredLength = 6;
options.Password.RequiredUniqueChars = 1;
options.Lockout.MaxFailedAccessAttempts = 20;
});
services
.AddAuthorization(options =>
{
options.DefaultPolicy = new AuthorizationPolicyBuilder()
.AddAuthenticationSchemes(OAuthValidationDefaults.AuthenticationScheme,
IdentityConstants.ApplicationScheme)
.RequireAuthenticatedUser()
.Build();
});
services.AddAuthentication()
.AddExternalAuthProviders(Configuration)
.AddFlymarkOpenIdConnectServer()
.AddOAuthValidation(OAuthValidationDefaults.AuthenticationScheme);
services.Configure<SecurityStampValidatorOptions>(options =>
{
// This is the key to control how often validation takes place
options.ValidationInterval = TimeSpan.FromMinutes(15);
});
services.ConfigureApplicationCookie(config =>
{
config.LoginPath = "/Identity/Account/LogIn";
config.AccessDeniedPath = "/Identity/Account/LogIn";
config.SlidingExpiration = true;
config.Events.OnRedirectToLogin = OnRedirectToLoginAsync;
});
}
private Task OnRedirectToLoginAsync(RedirectContext<CookieAuthenticationOptions> context)
{
if (context.HttpContext.Request.Path.Value.Contains("/api"))
context.Response.StatusCode = 401;
else
context.Response.Redirect(context.RedirectUri);
return Task.CompletedTask;
}
public void ConfigureContainer(ContainerBuilder builder)
{
builder.RegisterSource(new AnyConcreteTypeNotAlreadyRegisteredSource());
//builder.RegisterApiControllers(Assembly.GetExecutingAssembly());
builder.RegisterModule(new FlymarkDalDiModule
{
Configuration = Configuration
});
builder.RegisterModule(new DbDiModule(FlymarkAppSettings.Instance.DbContextConnection,
FlymarkAppSettings.Instance.StorageConnectionString));
builder.RegisterModule<FlymarkWebDiModule>();
}
private CultureInfo CreateCulture(string key)
{
return new CultureInfo(key)
{
NumberFormat = {NumberDecimalSeparator = "."},
DateTimeFormat = {ShortDatePattern = "dd/MM/yyyy"}
};
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, IHostingEnvironment env,
ILoggerFactory loggerFactory, IMapper mapper)
{
#if DEBUG
mapper.ConfigurationProvider.AssertConfigurationIsValid();
#endif
if (env.IsDevelopment())
{
app.UseDeveloperExceptionPage();
app.UseDatabaseErrorPage();
app.UseStaticFiles(new StaticFileOptions
{
OnPrepareResponse = context =>
{
context.Context.Response.Headers.Add("Cache-Control", "no-cache, no-store");
context.Context.Response.Headers.Add("Expires", "-1");
}
});
}
else
{
app.UseExceptionHandler("/Error/Error500");
app.UseStaticFiles();
}
app.UseCors(builder =>
{
builder.AllowAnyOrigin()
.AllowAnyMethod()
.AllowCredentials()
.SetPreflightMaxAge(TimeSpan.FromMinutes(5))
.AllowAnyHeader();
});
app.UseRequestDecompression();
app.UseLegacyTokenContentTypeFixMiddleware();
var supportedCultures = new[]
{
CreateCulture("en"),
CreateCulture("ru"),
CreateCulture("uk")
};
app.UseFlymarkExceptionMiddleware();
app.UseCookiePolicy();
app
.UseAuthentication()
.UseDomainMiddleware()
.UseRequestLocalization(new RequestLocalizationOptions
{
DefaultRequestCulture = new RequestCulture("en"),
SupportedCultures = supportedCultures,
SupportedUICultures = supportedCultures
})
.UseWebMarkupMin();
app.Use(async (ctx, next) =>
{
await next();
if (ctx.Response.StatusCode == 404 && !ctx.Response.HasStarted)
{
//Re-execute the request so the user gets the error page
var originalPath = ctx.Request.Path.Value;
ctx.Items["originalPath"] = originalPath;
ctx.Request.Path = "/error/error404";
await next();
}
});
app
.UseMvc(routes =>
{
routes.MapRoute(
"areaRoute",
"{area:exists}/{controller=Dashboard}/{action=Index}/{id?}");
routes.MapRoute(
"default",
"{controller=Home}/{action=Index}/{id?}");
});
}
}
}
I am generating url for email confirmation like this:
var code = await _userManager.GenerateEmailConfirmationTokenAsync(user);
var callbackUrl = Url.Page("/Account/ConfirmEmail",
null,
new {userId = user.Id, code = code.ToBase64String()},
returnDomainUrl.Scheme,
returnDomainUrl.Host);
I also thought that it could be an angularjs (I still have it on my page) but its not loaded at /signin-facebook since its handled by middleware.
I think issue is somehwere with dataprotection since I am getting them in login and confirmation email
I also tried to base 64 email token, but it wont help in addition I think that url is encoded automatically by Page.Url
Finally after weeks of investigations I found an issue.
When user register I will send email and sms, then user goes and confirm sms, which will trigger update of security stamp. Then later if user click confirm email and it fails because security stamp is not the same as in a token
So moving sending confirmation email after phone number is confirmed. Solved half of my problem.
Most probably the tokens validation failed because the tokens are generated in a domain and being validated in another domain.
In ASP.Net this can be solved by having the same machineKey in both domains web.config files.
For ASP.Net Core you can replace the machineKey as described here so you have the same cryptographic settings in both of domains.
see: Replace the ASP.NET machineKey in ASP.NET Core
I have a group called "Building Residents". In that group I have 2 groups for every apartment:
- Apt1a_Renters
- Apt1a_Owners
- Apt2a_Renters
- Apt2a_Owners
etc, etc...
A Person can be in the Apt2a_Owners group but also in the Apt1a_Renters group.
I want to query all members of the "Building Residents" group for users but when I do that, I just get back the groups inside of it.
Is there any way, using a single query, to list all the building residents without bringing back duplicates (same person in apt2a_owners and apt1a_renters)?
The Azure Graph REST only can get the direct members from one group. To get the member users recursively, we need to implement it ourselves. Here is a code sample wiring with C# using the Azure AD Graph library for your reference:
public void testGetMembersRecursively()
{
var accessToken="";
var tenantId="";
var groupName="";
var client = GraphHelper.CreateGraphClient(accessToken,tenantId);
var group = (Group)client.Groups.ExecuteAsync().Result.CurrentPage.First(g => g.DisplayName ==groupName);
var groupFetcher = client.Groups.GetByObjectId(group.ObjectId);
List<string> users = new List<string>();
GetMembersRecursively(groupFetcher, users);
Console.WriteLine(String.Join("\n", users.Distinct<string>().ToArray()));
}
public void GetMembersRecursively(Microsoft.Azure.ActiveDirectory.GraphClient.IGroupFetcher groupFetcher, List<string> users)
{
var membersResoult = groupFetcher.Members.ExecuteAsync().Result;
AddMember( membersResoult, users);
while (membersResoult.MorePagesAvailable)
{
membersResoult = membersResoult.GetNextPageAsync().Result;
AddMember(membersResoult, users);
}
}
public void AddMember( IPagedCollection<IDirectoryObject> membersResoult, List<string> users)
{
var members = membersResoult.CurrentPage;
foreach (var obj in members)
{
var _user = obj as Microsoft.Azure.ActiveDirectory.GraphClient.User;
if (_user != null)
users.Add(_user.DisplayName);
else
{
var groupMember = obj as Microsoft.Azure.ActiveDirectory.GraphClient.Group;
if (groupMember != null)
{
GetMembersRecursively(GetGroupById(groupMember.ObjectId), users);
}
}
}
}
GraphHelper class:
class GraphHelper
{
public static ActiveDirectoryClient CreateGraphClient(string accessToken, string tenantId)
{
string graphResourceId = "https://graph.windows.net";
Uri servicePointUri = new Uri(graphResourceId);
Uri serviceRoot = new Uri(servicePointUri, tenantId);
ActiveDirectoryClient activeDirectoryClient = new ActiveDirectoryClient(serviceRoot, async () => await Task.FromResult(accessToken));
return activeDirectoryClient;
}
}
I am writing a simple application that contains a database of items. The items have a type, manufacturer, model, and a few other properties. I have a implemented three UIPickerView's with MvxPickerViewModel's as outlined in N=19 of the N+1 series for MvvmCross. There is one UIPickerView/MvxPickerViewModel for each the type, the manufacturer, and the model (only one is ever on the screen at a time). However if I update the ItemSource data for a MvxPickerViewModel, the rows that were already visible in the UIPickerView do not refresh until they are scrolled off the screen. The N=19 example, does not update the list of items in the UIPickerView so it isn't clear that the problem didn't exist there. Have I made a mistake or has anyone else experienced this? Is there a work around?
public override void ViewDidLoad ()
{
base.ViewDidLoad ();
NavigationController.NavigationBarHidden = true;
var comparableTableViewSource = new MvxStandardTableViewSource(ComparableLV);
ComparableLV.Source = comparableTableViewSource;
var ManufacturerPicker = new UIPickerView();
var manufacturerPickerModel = new MvxPickerViewModel(ManufacturerPicker);
ManufacturerPicker.Model = manufacturerPickerModel;
ManufacturerPicker.ShowSelectionIndicator = true;
ManufacturerTextField.InputView = ManufacturerPicker;
var ModelPicker = new UIPickerView();
var modelPickerModel = new MvxPickerViewModel(ModelPicker);
ModelPicker.Model = modelPickerModel;
ModelPicker.ShowSelectionIndicator = true;
ModelTextField.InputView = ModelPicker;
var TypePicker = new UIPickerView();
var typePickerModel = new MvxPickerViewModel(TypePicker);
TypePicker.Model = typePickerModel;
TypePicker.ShowSelectionIndicator = true;
TypeTextField.InputView = TypePicker;
var set = this.CreateBindingSet<FirstView, FirstViewModel>();
set.Bind(comparableTableViewSource).For(s => s.ItemsSource).To(vm => vm.Comparables);
set.Bind(manufacturerPickerModel).For(p => p.ItemsSource).To(vm => vm.Manufacturers);
set.Bind(manufacturerPickerModel).For(p => p.SelectedItem).To(vm => vm.SelectedManufacturer);
set.Bind(ManufacturerTextField).To(vm => vm.SelectedManufacturer);
set.Bind(modelPickerModel).For(p => p.ItemsSource).To(vm => vm.Models);
set.Bind(modelPickerModel).For(p => p.SelectedItem).To(vm => vm.SelectedModel);
set.Bind(ModelTextField).To(vm => vm.SelectedModel);
set.Bind(typePickerModel).For(p => p.ItemsSource).To(vm => vm.Types);
set.Bind(typePickerModel).For(p => p.SelectedItem).To(vm => vm.SelectedType);
set.Bind(TypeTextField).To(vm => vm.SelectedType);
set.Apply();
var g = new UITapGestureRecognizer(() => {
HornTextField.ResignFirstResponder();
ManufacturerTextField.ResignFirstResponder();
ModelTextField.ResignFirstResponder();
});
View.AddGestureRecognizer(g);
}
Looking at MvxPickerViewModel.cs I'm suspicious that there is no call to ReloadAllComponents (or to ReloadComponent[0]) when the ItemsSource itself changes, but there is a call when the Collection internally changes.
As a workaround, perhaps try a subclass like:
public class MyPickerViewModel
: MvxPickerViewModel
{
private readonly UIPickerView _pickerView;
public MyPickerViewModel(UIPickerView pickerView)
: base(pickerViww)
{
_pickerView = pickerView;
}
[MvxSetToNullAfterBinding]
public override IEnumerable ItemsSource
{
get { return base.ItemsSource; }
set
{
base.ItemsSource = value;
if (value != null)
_pcikerView.ReloadComponent(0);
}
}
}
Would also be great to get a fix back into MvvmCross...
Given the following OrmLite configuration from the documentation, what is the best way to configure the ConnectionFilter for each of the the named connections?
var dbFactory = new OrmLiteConnectionFactory(
"Data Source=host;Initial Catalog=RobotsMaster;Integrated Security=SSPI", //Connection String
SqlServerDialect.Provider);
dbFactory.Run(db => db.CreateTable<MasterRecord>(overwrite:false));
NoOfShards.Times(i => {
var namedShard = "robots-shard" + i;
dbFactory.RegisterConnection(namedShard,
"~/App_Data/{0}.sqlite".Fmt(shardId).MapAbsolutePath(), //Connection String
SqliteDialect.Provider);
dbFactory.OpenDbConnection(namedShard).Run(db => db.CreateTable<Robot>(overwrite:false));
});
Currently, I am using this instead of RegisterConnection.
OrmLiteConnectionFactory.NamedConnections[namedShard] = new OrmLiteConnectionFactory("~/App_Data/{0}.sqlite".Fmt(shardId).MapAbsolutePath(), true, SqliteDialect.Provider, true) {
ConnectionFilter = x => new ProfiledDbConnection(x, Profiler.Current)
};
Could we change RegisterConnection to something like
public void RegisterConnection(string connectionKey, string connectionString, IOrmLiteDialectProvider dialectProvider, bool autoDisposeConnection = true)
{
RegisterConnection(connectionKey, new OrmLiteConnectionFactory(connectionString, autoDisposeConnection, dialectProvider, autoDisposeConnection));
}
public void RegisterConnection(string connectionKey, OrmLiteConnectionFactory ormLiteConnectionFactory)
{
NamedConnections[connectionKey] = ormLiteConnectionFactory;
}
Submitted a patch to ServiceStack.OrmLite. https://github.com/ServiceStack/ServiceStack.OrmLite/pull/212
I want to post some request values alongside the multipart-formdata file contents. In the old API you could use PostFileWithRequest:
[Test]
public void Can_POST_upload_file_using_ServiceClient_with_request()
{
IServiceClient client = new JsonServiceClient(ListeningOn);
var uploadFile = new FileInfo("~/TestExistingDir/upload.html".MapProjectPath());
var request = new FileUpload{CustomerId = 123, CustomerName = "Foo"};
var response = client.PostFileWithRequest<FileUploadResponse>(ListeningOn + "/fileuploads", uploadFile, request);
var expectedContents = new StreamReader(uploadFile.OpenRead()).ReadToEnd();
Assert.That(response.FileName, Is.EqualTo(uploadFile.Name));
Assert.That(response.ContentLength, Is.EqualTo(uploadFile.Length));
Assert.That(response.Contents, Is.EqualTo(expectedContents));
Assert.That(response.CustomerName, Is.EqualTo("Foo"));
Assert.That(response.CustomerId, Is.EqualTo(123));
}
I can't find any such method in the new API, nor any overrides on client.Post() which suggest that this is still possible. Does anyone know if this is a feature that was dropped?
Update
As #Mythz points out, the feature wasn't dropped. I had made the mistake of not casting the client:
private IRestClient CreateRestClient()
{
return new JsonServiceClient(WebServiceHostUrl);
}
[Test]
public void Can_WebRequest_POST_upload_binary_file_to_save_new_file()
{
var restClient = (JsonServiceClient)CreateRestClient(); // this cast was missing
var fileToUpload = new FileInfo(#"D:/test/test.avi");
var beforeHash = this.Hash(fileToUpload);
var response = restClient.PostFileWithRequest<FilesResponse>("files/UploadedFiles/", fileToUpload, new TestRequest() { Echo = "Test"});
var uploadedFile = new FileInfo(FilesRootDir + "UploadedFiles/test.avi");
var afterHash = this.Hash(uploadedFile);
Assert.That(beforeHas, Is.EqualTo(afterHash));
}
private string Hash(FileInfo file)
{
using (var md5 = MD5.Create())
{
using (var stream = file.OpenRead())
{
var bytes = md5.ComputeHash(stream);
return BitConverter.ToString(md5.ComputeHash(stream)).Replace("-", "").ToLower();
}
}
}
None of the old API was removed from the C# Service Clients, only new API's were added.
The way you process an uploaded file inside a service also hasn't changed.