public Block(StorageOptions storageOptions, ReadOptions readOptions, BlockHandle handle, FileData fileData) { try { _handle = handle; _storageOptions = storageOptions; _fileData = fileData; if (handle.Position > fileData.Size || (handle.Position + handle.Count + BlockTrailerSize) > fileData.Size) throw new CorruptedDataException("The specified accessor is beyond the bounds of the provided mappedFile"); _accessor = _fileData.File.CreateAccessor(handle.Position, handle.Count + BlockTrailerSize); if (readOptions.VerifyChecksums) { var crc = Crc.Unmask(_accessor.ReadInt32(handle.Count + 1)); var actualCrc = CalculateActualCrc(handle.Count + 1); // data + tag if (crc != actualCrc) throw new CorruptedDataException("block checksum mismatch"); } RestartsCount = _accessor.ReadInt32(handle.Count - sizeof(int)); RestartsOffset = handle.Count - (RestartsCount * sizeof(int)) - sizeof(int); if (RestartsOffset > handle.Count) throw new CorruptedDataException("restart offset wrapped around"); } catch (Exception) { Dispose(); throw; } }
protected BaseStorage(IOptions<StorageOptions> optionsAccessor) { if (optionsAccessor == null) { throw new ArgumentNullException(nameof(optionsAccessor)); } options = optionsAccessor.Value; }
/// <summary> /// Adds a sink that writes log events as documents using DocSet. /// </summary> /// <param name="loggerConfiguration">The logger configuration.</param> /// <param name="connectionsStringName">The connectionsString name.</param> /// <param name="schemaName">The name of the database schema.</param> /// <param name="restrictedToMinimumLevel">The minimum log event level required in order to write an event to the sink.</param> /// <param name="batchPostingLimit">The maximum number of events to post in a single batch.</param> /// <param name="period">The time to wait between checking for event batches.</param> /// <param name="formatProvider">Supplies culture-specific formatting information, or null.</param> /// <param name="propertiesAsTags">The properties as tags.</param> /// <param name="propertiesWhiteList">The properties filter.</param> /// <param name="options">The options.</param> /// <param name="indexMap">The index map.</param> /// <param name="enableDocSetLogging">if set to <c>true</c> [enable document set logging].</param> /// <returns> /// Logger configuration, allowing configuration to continue. /// </returns> /// <exception cref="System.ArgumentNullException"> /// </exception> /// <exception cref="ArgumentNullException">A required parameter is null.</exception> public static LoggerConfiguration DocSet( this LoggerSinkConfiguration loggerConfiguration, string connectionsStringName, string schemaName = null, LogEventLevel restrictedToMinimumLevel = LevelAlias.Minimum, int batchPostingLimit = 50, TimeSpan? period = null, IFormatProvider formatProvider = null, IEnumerable<string> propertiesAsTags = null, IEnumerable<string> propertiesWhiteList = null, IStorageOptions options = null, List<IIndexMap<LogEvent>> indexMap = null, bool enableDocSetLogging = false) { if (loggerConfiguration == null) throw new ArgumentNullException(nameof(loggerConfiguration)); if (connectionsStringName == null) throw new ArgumentNullException(nameof(connectionsStringName)); if (options == null) options = new StorageOptions( new ConnectionStrings().Get(connectionsStringName), schemaName: schemaName); if (indexMap == null) indexMap = new List<IIndexMap<LogEvent>> { new IndexMap<LogEvent>(nameof(LogEvent.Level), i => i.Level), new IndexMap<LogEvent>(nameof(LogEvent.Timestamp), i => i.Timestamp.ToString("s")) }; try { return loggerConfiguration.Sink( new DocSetSink( new DocStorage<LogEvent>(new SqlConnectionFactory(), options, new SqlBuilder(), new JsonNetSerializer(), null /*new Md5Hasher()*/, indexMap), batchPostingLimit, period ?? DocSetSink.DefaultPeriod, formatProvider, propertiesAsTags ?? new[] {"CorrelationId", "App" /*, "SourceContext"*/}, propertiesWhiteList ?? new[] {/*"CorrelationId",*/ "App", "SourceContext" /*"Message", "DocSetKey"*/}), restrictedToMinimumLevel); } catch (DbException) { // could not connect to the db, use a null docstorage instead return loggerConfiguration.Sink( new DocSetSink( null, batchPostingLimit, period ?? DocSetSink.DefaultPeriod, formatProvider, propertiesAsTags ?? new[] {"CorrelationId", "App" /*, "SourceContext"*/}, propertiesWhiteList ?? new[] {/*"CorrelationId",*/ "App", "SourceContext" /*"Message", "DocSetKey"*/}), restrictedToMinimumLevel); } }
public async Task<Storage> NewStorageAsync(StorageOptions storageOptions = null, FileSystem fileSystem = null) { if (storageOptions == null) storageOptions = new StorageOptions(); string name = string.Format("TestStorage-{0}-{1}", DateTime.Now.ToString("yyyy-MM-dd,HH-mm-ss"), Guid.NewGuid()); var storage = new Storage(new StorageState(name, storageOptions) { FileSystem = fileSystem ?? new InMemoryFileSystem(name) }); await storage.InitAsync(); storages.Add(storage); return storage; }
public StorageState(string name, StorageOptions options) { _perfCounters = new PerfCounters(name); _cancellationTokenSource = new CancellationTokenSource(); CancellationToken = _cancellationTokenSource.Token; Options = options; InternalKeyComparator = new InternalKeyComparator(options.Comparator); DatabaseName = name; Lock = new AsyncLock(); FileSystem = new FileSystem(DatabaseName); MemTable = new MemTable(this); TableCache = new TableCache(this); VersionSet = new VersionSet(this); Compactor = new BackgroundCompactor(this); Snapshooter = new Snapshooter(this); }
public void DeleteTest(string connectionName, string databaseName, string schemaName) { var options = new StorageOptions(new ConnectionStrings().Get(connectionName), databaseName: databaseName, schemaName: schemaName); var connectionFactory = new SqlConnectionFactory(); var indexMap = TestDocumentIndexMap; var storage = new DocStorage<TestDocument>(connectionFactory, options, new SqlBuilder(), new JsonNetSerializer(), new Md5Hasher(), indexMap); storage.Initialize(); storage.Reset(); Assert.That(storage.Count(), Is.EqualTo(0)); var doc1 = new Fixture().Create<TestDocument>(); storage.Upsert("key1", doc1, new[] {"en-US"}); var doc2 = new Fixture().Create<TestDocument>(); storage.Upsert("key2", doc1, new[] {"en-US"}); var doc3 = new Fixture().Create<TestDocument>(); storage.Upsert("key1", doc1, new[] {"en-GB"}); Assert.That(storage.Count(), Is.EqualTo(3)); var result1 = storage.Delete("key1", new[] {"en-US"}); // removes only key1 + en-US Assert.That(result1, Is.EqualTo(StorageAction.Deleted)); Assert.That(storage.Count(), Is.EqualTo(2)); var result2 = storage.Delete("key1"); // removes all with key1 Assert.That(result2, Is.EqualTo(StorageAction.Deleted)); Assert.That(storage.Count(), Is.EqualTo(1)); var result3 = storage.Delete("key2"); // removes all with key2 Assert.That(result3, Is.EqualTo(StorageAction.Deleted)); Assert.That(storage.Count(), Is.EqualTo(0)); }
public BenchmarkOptions() { var options = new StorageOptions(); Num = 1000000; Reads = -1; Threads = 1; ValueSize = 100; Histogram = false; WriteBatchSize = options.WriteBatchSize; CacheSize = -1; BloomBits = -1; UseExistingDatabase = false; DatabaseName = null; Stress = false; StressReaders = 2; StressSmallWriters = 4; StressLargeWriters = 1; Benchmarks = new List<string> { "fillseq", "fillrandom", "fillsync", "overwrite", "readrandom", "readrandom", // Extra run to allow previous compactions to quiesce "readseq", "readreverse", "compact", "readrandom", "readseq", "readreverse", "fill100K", "crc32c", "acquireload" }; }
public CdnHelper(IOptions <StorageOptions> storageOptions) { _storageOptions = storageOptions.Value; }
public FileLogWriterImpl(StorageOptions options, ILogger log) : base(options, log) { }
public LotteryOrderingDbContext(StorageOptions storageOptions, DbContextOptions <LotteryOrderingDbContext> options) : base(storageOptions, options) { }
public ArchiveStorage(IServiceProvider serviceProvider, ISerializer serializer, StorageOptions config) { logger = serviceProvider.GetService <ILogger <ArchiveStorage <PrimaryKey, StateType> > >(); this.serializer = serializer; this.config = config; var tableName = config.SnapshotArchiveTable; deleteSql = $"DELETE FROM {tableName} where id=@Id"; deleteAllSql = $"DELETE FROM {tableName} where stateid=@StateId"; getByIdSql = $"select * FROM {tableName} where id=@Id"; getListByStateIdSql = $"select Id,StartVersion,EndVersion,StartTimestamp,EndTimestamp,Index,EventIsCleared FROM {tableName} where stateid=@StateId"; getLatestByStateIdSql = $"select Id,StartVersion,EndVersion,StartTimestamp,EndTimestamp,Index,EventIsCleared FROM {tableName} where stateid=@StateId order by index desc limit 1"; insertSql = $"INSERT into {tableName}(Id,stateid,StartVersion,EndVersion,StartTimestamp,EndTimestamp,Index,EventIsCleared,data,IsOver,Version)VALUES(@Id,@StateId,@StartVersion,@EndVersion,@StartTimestamp,@EndTimestamp,@Index,@EventIsCleared,(@Data)::json,@IsOver,@Version)"; updateOverSql = $"update {tableName} set IsOver=@IsOver where stateid=@StateId"; updateEventIsClearSql = $"update {tableName} set EventIsCleared=true where id=@Id"; }
public override void PreConfigureServices(ServiceConfigurationContext context) { var configuration = context.Services.GetConfiguration(); var swagger = new SwaggerOptions(); var storage = new StorageOptions(); var cors = new CorsOptions(); var jwt = new JwtOptions(); var worker = new WorkerOptions(); var signature = new SignatureOptions(); var tencentCloud = new TencentCloudOptions(); var authorize = new AuthorizeOptions(); PreConfigure <SwaggerOptions>(options => { var swaggerOption = configuration.GetSection("swagger"); Configure <SwaggerOptions>(swaggerOption); options.Version = swaggerOption.GetValue <string>(nameof(options.Version)); options.Name = swaggerOption.GetValue <string>(nameof(options.Name)); options.Title = swaggerOption.GetValue <string>(nameof(options.Title)); options.Description = swaggerOption.GetValue <string>(nameof(options.Description)); options.RoutePrefix = swaggerOption.GetValue <string>(nameof(options.RoutePrefix)); options.DocumentTitle = swaggerOption.GetValue <string>(nameof(options.DocumentTitle)); swagger = options; }); PreConfigure <StorageOptions>(options => { var storageOption = configuration.GetSection("storage"); Configure <StorageOptions>(storageOption); options.Mongodb = storageOption.GetValue <string>(nameof(options.Mongodb)); options.RedisIsEnabled = storageOption.GetValue <bool>(nameof(options.RedisIsEnabled)); options.Redis = storageOption.GetValue <string>(nameof(options.Redis)); storage = options; }); PreConfigure <CorsOptions>(options => { var corsOption = configuration.GetSection("cors"); Configure <CorsOptions>(corsOption); options.PolicyName = corsOption.GetValue <string>(nameof(options.PolicyName)); options.Origins = corsOption.GetValue <string>(nameof(options.Origins)); cors = options; }); PreConfigure <JwtOptions>(options => { var jwtOption = configuration.GetSection("jwt"); Configure <JwtOptions>(jwtOption); options.Issuer = jwtOption.GetValue <string>(nameof(options.Issuer)); options.Audience = jwtOption.GetValue <string>(nameof(options.Audience)); options.SigningKey = jwtOption.GetValue <string>(nameof(options.SigningKey)); jwt = options; }); PreConfigure <WorkerOptions>(options => { var workerOption = configuration.GetSection("worker"); Configure <WorkerOptions>(workerOption); options.IsEnabled = workerOption.GetValue <bool>(nameof(options.IsEnabled)); options.Cron = workerOption.GetValue <string>(nameof(options.Cron)); worker = options; }); PreConfigure <TencentCloudOptions>(options => { var tencentCloudOption = configuration.GetSection("tencentCloud"); Configure <TencentCloudOptions>(tencentCloudOption); options.SecretId = tencentCloudOption.GetValue <string>(nameof(options.SecretId)); options.SecretKey = tencentCloudOption.GetValue <string>(nameof(options.SecretKey)); tencentCloud = options; }); PreConfigure <SignatureOptions>(options => { var signatureOption = configuration.GetSection("signature"); options.Path = signatureOption.GetValue <string>(nameof(options.Path)); foreach (var item in signatureOption.GetSection(nameof(options.Urls)).GetChildren()) { options.Urls.Add(item.GetValue <string>("url"), item.GetValue <string>("param")); } signature = options; Configure <SignatureOptions>(item => { item.Path = signature.Path; item.Urls = signature.Urls; }); }); PreConfigure <AuthorizeOptions>(options => { var authorizeOption = configuration.GetSection("authorize"); var githubOption = authorizeOption.GetSection("github"); var giteeOption = authorizeOption.GetSection("gitee"); var alipayOption = authorizeOption.GetSection("alipay"); var dingtalkOption = authorizeOption.GetSection("dingtalk"); var microsoftOption = authorizeOption.GetSection("microsoft"); var weiboOptions = authorizeOption.GetSection("weibo"); var qqOptions = authorizeOption.GetSection("qq"); Configure <AuthorizeOptions>(authorizeOption); Configure <GithubOptions>(githubOption); Configure <GiteeOptions>(giteeOption); Configure <AlipayOptions>(alipayOption); Configure <DingtalkOptions>(dingtalkOption); Configure <MicrosoftOptions>(microsoftOption); Configure <WeiboOptions>(weiboOptions); Configure <QQOptions>(qqOptions); options.Github = new GithubOptions { ClientId = githubOption.GetValue <string>(nameof(options.Github.ClientId)), ClientSecret = githubOption.GetValue <string>(nameof(options.Github.ClientSecret)), RedirectUrl = githubOption.GetValue <string>(nameof(options.Github.RedirectUrl)), Scope = githubOption.GetValue <string>(nameof(options.Github.Scope)) }; options.Gitee = new GiteeOptions { ClientId = giteeOption.GetValue <string>(nameof(options.Gitee.ClientId)), ClientSecret = giteeOption.GetValue <string>(nameof(options.Gitee.ClientSecret)), RedirectUrl = giteeOption.GetValue <string>(nameof(options.Gitee.RedirectUrl)), Scope = giteeOption.GetValue <string>(nameof(options.Gitee.Scope)) }; options.Alipay = new AlipayOptions { AppId = alipayOption.GetValue <string>(nameof(options.Alipay.AppId)), RedirectUrl = alipayOption.GetValue <string>(nameof(options.Alipay.RedirectUrl)), Scope = alipayOption.GetValue <string>(nameof(options.Alipay.Scope)), PrivateKey = alipayOption.GetValue <string>(nameof(options.Alipay.PrivateKey)), PublicKey = alipayOption.GetValue <string>(nameof(options.Alipay.PublicKey)) }; options.Dingtalk = new DingtalkOptions { AppId = dingtalkOption.GetValue <string>(nameof(options.Dingtalk.AppId)), AppSecret = dingtalkOption.GetValue <string>(nameof(options.Dingtalk.AppSecret)), RedirectUrl = dingtalkOption.GetValue <string>(nameof(options.Dingtalk.RedirectUrl)), Scope = dingtalkOption.GetValue <string>(nameof(options.Dingtalk.Scope)) }; options.Microsoft = new MicrosoftOptions { ClientId = microsoftOption.GetValue <string>(nameof(options.Microsoft.ClientId)), ClientSecret = microsoftOption.GetValue <string>(nameof(options.Microsoft.ClientSecret)), RedirectUrl = microsoftOption.GetValue <string>(nameof(options.Microsoft.RedirectUrl)), Scope = microsoftOption.GetValue <string>(nameof(options.Microsoft.Scope)) }; options.Weibo = new WeiboOptions { ClientId = weiboOptions.GetValue <string>(nameof(options.Weibo.ClientId)), ClientSecret = weiboOptions.GetValue <string>(nameof(options.Weibo.ClientSecret)), RedirectUrl = weiboOptions.GetValue <string>(nameof(options.Weibo.RedirectUrl)), Scope = weiboOptions.GetValue <string>(nameof(options.Weibo.Scope)) }; options.QQ = new QQOptions { ClientId = qqOptions.GetValue <string>(nameof(options.QQ.ClientId)), ClientSecret = qqOptions.GetValue <string>(nameof(options.QQ.ClientSecret)), RedirectUrl = qqOptions.GetValue <string>(nameof(options.QQ.RedirectUrl)), Scope = qqOptions.GetValue <string>(nameof(options.QQ.Scope)) }; authorize = options; }); PreConfigure <AppOptions>(options => { options.Swagger = swagger; options.Storage = storage; options.Cors = cors; options.Jwt = jwt; options.Worker = worker; options.Signature = signature; options.TencentCloud = tencentCloud; options.Authorize = authorize; Configure <AppOptions>(item => { item.Swagger = swagger; item.Storage = storage; item.Cors = cors; item.Jwt = jwt; item.Worker = worker; item.Signature = signature; item.TencentCloud = tencentCloud; item.Authorize = authorize; }); }); }
public ConfigureProviderOptions(IOptions <StorageOptions> storageOptions) { this.storageOptions = storageOptions.Value; }
public BaseSerializer(StorageOptions options, int indexColumn) : this(options, null, indexColumn) { }
public void PagingAndSortingTest(string connectionName, string databaseName, string schemaName) { var options = new StorageOptions(new ConnectionStrings().Get(connectionName), databaseName: databaseName, schemaName: schemaName) { DefaultTakeSize = 3, MaxTakeSize = 5, DefaultSortColumn = SortColumn.TimestampDescending }; var connectionFactory = new SqlConnectionFactory(); var indexMap = TestDocumentIndexMap; var storage = new DocStorage<TestDocument>(connectionFactory, options, new SqlBuilder(), new JsonNetSerializer(), new Md5Hasher(), indexMap); MiniProfiler.Start(); var mp = MiniProfiler.Current; MassInsertTest(connectionName, databaseName, schemaName, 20, true); var docs1 = storage.LoadValues(new[] { "en-US" }).ToList(); Assert.That(docs1, Is.Not.Null); Assert.That(docs1.Any(), Is.True); Assert.That(docs1.Count, Is.EqualTo(options.DefaultTakeSize)); foreach(var doc in docs1) Log.Debug($"doc1: {doc.Id}, {doc.Name}"); var docs2 = storage.LoadValues(new[] { "en-US" }, skip:0, take:10).ToList(); Assert.That(docs2, Is.Not.Null); Assert.That(docs2.Any(), Is.True); Assert.That(docs2.Count, Is.EqualTo(options.MaxTakeSize)); foreach (var doc in docs2) Log.Debug($"doc2: {doc.Id}, {doc.Name}"); Log.Debug($"trace: {mp.RenderPlainText()}"); MiniProfiler.Stop(); }
public Serializer(StorageOptions options, TypeInfo typeInfo) : base(options, typeInfo) { }
public ScheduleDbContext(StorageOptions storageOptions, DbContextOptions <ScheduleDbContext> options) : base(storageOptions, options) { }
public void InsertTest(string connectionName, string databaseName, string schemaName) { var options = new StorageOptions(new ConnectionStrings().Get(connectionName), databaseName: databaseName, schemaName: schemaName); //var options = new StorageOptions(new ConnectionStrings().Get("XtricateTestSqlLocalDb"), databaseName: databaseName, schemaName: "StorageTests"); //var connectionFactory = new SqlLocalDbConnectionFactory(); var connectionFactory = new SqlConnectionFactory(); var indexMap = TestDocumentIndexMap; var storage = new DocStorage<TestDocument>(connectionFactory, options, new SqlBuilder(), new JsonNetSerializer(), new Md5Hasher(), indexMap); MiniProfiler.Start(); var mp = MiniProfiler.Current; storage.Reset(); var preCount = storage.Count(new[] {"en-US"}); Log.Debug($"pre count: {preCount}"); var key = DateTime.Now.Epoch() + new Random().Next(10000, 99999); for (var i = 1; i < 5; i++) { Log.Debug($"+{i}"); using (mp.Step("insert " + i)) { var doc1 = new Fixture().Create<TestDocument>(); //doc1.Name = "Routing wide joints (≥ 4 mm, e.g. between natural stone tiles)öoäa®r¼4èe"; doc1.Name = "NEU!Kreissägeblätter Expert for Steel NOUVEAU˜!Lames de scies circulaires Expert for Steel °˛˝˙°ˆˇ! ˘ Expert for Steel"; var result1 = storage.Upsert("key1", doc1, new[] {"en-US"}); // Assert.That(result1, Is.EqualTo(StorageAction.Updated)); //} //using (mp.Step("upsert string")) //{ var result2 = storage.Upsert(Guid.NewGuid(), new Fixture().Create<TestDocument>(), new[] {"en-US"}); //Assert.That(result2, Is.EqualTo(StorageAction.Inserted)); //} //using (mp.Step("upsert int")) //{ var result3 = storage.Upsert(key + i, new Fixture().Create<TestDocument>(), new[] {"en-US"}); //Assert.That(result3, Is.EqualTo(StorageAction.Inserted)); } } for (var i = 1; i <= 5; i++) { using (mp.Step("load " + i)) { var result = storage.LoadValues(new[] {"en-US"}).Take(100); //Assert.That(result, Is.Not.Null); //Assert.That(result, Is.Not.Empty); Log.Debug($"loaded count: {result.Count()}"); Log.Debug($"first: {result.FirstOrDefault().Id}"); //result.ForEach(r => Trace.Write(r.Id)); //result.ForEach(r => Assert.That(r, Is.Not.Null)); result.ForEach(r => Log.Debug(r.Name)); } } using (mp.Step("post count")) { var postCount = storage.Count(new[] {"en-US"}); Log.Debug($"post count: {postCount}"); //Assert.That(storage.Count(), Is.GreaterThan(preCount)); } Log.Debug($"trace: {mp.RenderPlainText()}"); MiniProfiler.Stop(); }
private static void AddMetadata(VersionEdit edit, StorageOptions options) { edit.SetComparatorName(options.Comparator.Name); }
public void MassInsertTest(string connectionName, string databaseName, string schemaName, int docCount, bool reset) { var options = new StorageOptions(new ConnectionStrings().Get(connectionName), databaseName: databaseName, schemaName: schemaName); var connectionFactory = new SqlConnectionFactory(); var indexMap = TestDocumentIndexMap; var storage = new DocStorage<TestDocument>(connectionFactory, options, new SqlBuilder(), new JsonNetSerializer(), new Md5Hasher(), indexMap); MiniProfiler.Start(); var mp = MiniProfiler.Current; if (reset) storage.Reset(); Log.Debug($"pre count: {storage.Count(new[] {"en-US"})}"); var key = DateTime.Now.Epoch() + new Random().Next(10000, 99999); for (var i = 1; i <= docCount; i++) { Log.Debug($"+{i}"); var doc = new Fixture().Create<TestDocument>(); doc.Id = i; storage.Upsert(key + i, doc, new[] {"en-US"}, timestamp: DateTime.Now.AddMinutes(i)); } Log.Debug($"post count: {storage.Count(new[] {"en-US"})}"); Log.Debug($"trace: {mp.RenderPlainText()}"); MiniProfiler.Stop(); }
/// <summary> /// Class XtrmAddons Net Application Serializable Preferences constructor. /// </summary> public Preferences() { SpecialDirectories = new SpecialDirectories(); Storage = new StorageOptions(); }
public Handler(IStorageService <NoteEntity> storageService, IOptions <StorageOptions> storageOptions) { _storageService = storageService; _storageOptions = storageOptions.Value; }
public StorageFactory(IEnumerable <IStorageProvider> storageProviders, IOptions <StorageOptions> options) { this.storageProviders = storageProviders.ToDictionary(sp => sp.Name, sp => sp); this.options = options.Value; }
public ImagesController(IOptions <StorageOptions> options, ILogger <ImagesController> logger) { _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); }
public Reader(StorageOptions options, Stream input) : base(options, input, true) { _fileHeader = new Header(); _generator = new Serializer <T>(options, Type); }
public FileUploadActivity(IOptions <StorageOptions> options) { _options = options.Value; }
public FileService(IWebHostEnvironment env, IOptions <StorageOptions> storageOptions) { _env = env; _storageOptions = storageOptions.Value; }
public static IServiceCollection AddStorageService(this IServiceCollection services, StorageOptions storageOptions) { services.Configure <AccountOptions>(opt => { opt.AccountKey = storageOptions.AccountKey; opt.AccountName = storageOptions.AccountName; }); services.Configure <ContainerOptions>(opt => { opt.ProfileImageContainerName = storageOptions.ProfileImageContainerName; opt.PostImageContainerName = storageOptions.PostImageContainerName; opt.MeasureImageContainerName = storageOptions.MeasureImageContainerName; }); services.AddTransient <IStorageService, StorageService>(); return(services); }
public FaceDetectionFunction(FaceApp faceApp, IOptions <StorageOptions> storageOptions, ILogger <FunctionDetect> logger) { _faceApp = faceApp ?? throw new ArgumentNullException(nameof(faceApp)); _storageOptions = storageOptions?.Value ?? throw new ArgumentNullException(nameof(storageOptions)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); }
public BaseStorageClient(StorageOptions options) { Options = options; }
protected override void OnExecute() { StorageOptions.SaveAll(); EndAction(); }
/// <summary> /// Initializes a new instance of the <see cref="AzureBlobStorage" /> class. /// </summary> /// <param name="storageOptions">The options for the storage.</param> public AzureBlobStorage(IOptions <StorageOptions> storageOptions) { this.storageOptions = storageOptions.Value; }
public Handler(StorageOptions storageOptions) { _storageOptions = storageOptions; }
private IEnumerable <EndpointOptions> GetWriteEndpoints(MethodOptions methodOptions, StorageOptions storageOptions) { IEnumerable <EndpointOptions> endpoints; if (methodOptions?.WriteEndpoints != null && storageOptions?.WriteEndpoints != null) { endpoints = methodOptions?.WriteEndpoints ?? storageOptions?.WriteEndpoints; } else { endpoints = new List <EndpointOptions> { methodOptions?.WriteEndpoint ?? methodOptions?.Endpoint ?? storageOptions?.WriteEndpoint ?? storageOptions?.Endpoint }; } return(endpoints); }
/// <summary> /// Adds a sink that writes log events as documents using DocSet. /// </summary> /// <param name="loggerConfiguration">The logger configuration.</param> /// <param name="connectionsStringName">The connectionsString name.</param> /// <param name="schemaName">The name of the database schema.</param> /// <param name="restrictedToMinimumLevel">The minimum log event level required in order to write an event to the sink.</param> /// <param name="batchPostingLimit">The maximum number of events to post in a single batch.</param> /// <param name="period">The time to wait between checking for event batches.</param> /// <param name="formatProvider">Supplies culture-specific formatting information, or null.</param> /// <param name="propertiesAsTags">The properties as tags.</param> /// <param name="propertiesWhiteList">The properties filter.</param> /// <param name="options">The options.</param> /// <param name="indexMap">The index map.</param> /// <param name="enableDocSetLogging">if set to <c>true</c> [enable document set logging].</param> /// <returns> /// Logger configuration, allowing configuration to continue. /// </returns> /// <exception cref="System.ArgumentNullException"> /// </exception> /// <exception cref="ArgumentNullException">A required parameter is null.</exception> public static LoggerConfiguration DocSet( this LoggerSinkConfiguration loggerConfiguration, string connectionsStringName, string schemaName = null, LogEventLevel restrictedToMinimumLevel = LevelAlias.Minimum, int batchPostingLimit = 50, TimeSpan?period = null, IFormatProvider formatProvider = null, IEnumerable <string> propertiesAsTags = null, IEnumerable <string> propertiesWhiteList = null, IStorageOptions options = null, List <IIndexMap <LogEvent> > indexMap = null, bool enableDocSetLogging = false) { if (loggerConfiguration == null) { throw new ArgumentNullException(nameof(loggerConfiguration)); } if (connectionsStringName == null) { throw new ArgumentNullException(nameof(connectionsStringName)); } if (options == null) { options = new StorageOptions( new ConnectionStrings().Get(connectionsStringName), schemaName: schemaName); } if (indexMap == null) { indexMap = new List <IIndexMap <LogEvent> > { new IndexMap <LogEvent>(nameof(LogEvent.Level), i => i.Level), new IndexMap <LogEvent>(nameof(LogEvent.Timestamp), i => i.Timestamp.ToString("s")) } } ; try { return(loggerConfiguration.Sink( new DocSetSink( new DocStorage <LogEvent>(new SqlConnectionFactory(), options, new SqlBuilder(), new JsonNetSerializer(), null /*new Md5Hasher()*/, indexMap), batchPostingLimit, period ?? DocSetSink.DefaultPeriod, formatProvider, propertiesAsTags ?? new[] { "CorrelationId", "App" /*, "SourceContext"*/ }, propertiesWhiteList ?? new[] { /*"CorrelationId",*/ "App", "SourceContext" /*"Message", "DocSetKey"*/ }), restrictedToMinimumLevel)); } catch (DbException) { // could not connect to the db, use a null docstorage instead return(loggerConfiguration.Sink( new DocSetSink( null, batchPostingLimit, period ?? DocSetSink.DefaultPeriod, formatProvider, propertiesAsTags ?? new[] { "CorrelationId", "App" /*, "SourceContext"*/ }, propertiesWhiteList ?? new[] { /*"CorrelationId",*/ "App", "SourceContext" /*"Message", "DocSetKey"*/ }), restrictedToMinimumLevel)); } } }
public void InsertDataTest(string connectionName, string databaseName, string schemaName) { var options = new StorageOptions(new ConnectionStrings().Get(connectionName), databaseName: databaseName, schemaName: schemaName); var connectionFactory = new SqlConnectionFactory(); var indexMap = TestDocumentIndexMap; var storage = new DocStorage<TestDocument>(connectionFactory, options, new SqlBuilder(), new JsonNetSerializer(), new Md5Hasher(), indexMap); MiniProfiler.Start(); var mp = MiniProfiler.Current; storage.Reset(); var preCount = storage.Count(new[] {"en-US"}); Log.Debug($"pre count: {preCount}"); var key1 = DateTime.Now.Epoch() + new Random().Next(10000, 99999); var inStream1 = File.OpenRead(@"c:\tmp\cat.jpg"); storage.Upsert(key1, inStream1, new[] { "en-US" }); var outStreams1 = storage.LoadData(key1, new[] {"en-US"}); Assert.That(outStreams1, Is.Not.Null); Assert.That(outStreams1.Any()); foreach (var outStream in outStreams1) { Assert.That(outStream, Is.Not.Null); File.WriteAllBytes($@"c:\tmp\cat_{key1}.jpg", outStream.ToBytes()); } var result1 = storage.Upsert(key1, new Fixture().Create<TestDocument>(), new[] { "en-US" }); Assert.That(result1, Is.EqualTo(StorageAction.Updated)); var key2 = DateTime.Now.Epoch() + new Random().Next(10000, 99999); var inStream2 = File.OpenRead(@"c:\tmp\test.log"); storage.Upsert(key2, inStream2, new[] { "en-US" }); var outStreams2 = storage.LoadData(key2, new[] { "en-US" }); Assert.That(outStreams2, Is.Not.Null); Assert.That(outStreams2.Any()); foreach (var outStream in outStreams2) { Assert.That(outStream, Is.Not.Null); File.WriteAllBytes($@"c:\tmp\test_{key1}.log", outStream.ToBytes()); } var result2 = storage.Upsert(key2, new Fixture().Create<TestDocument>(), new[] { "en-US" }); Assert.That(result2, Is.EqualTo(StorageAction.Updated)); }
/// <summary> /// Initializes a new instance of the <see cref="LocalStorage" /> class. /// </summary> /// <param name="storageOptions">The options for the storage.</param> public LocalStorage(IOptions <StorageOptions> storageOptions) { this.storageOptions = storageOptions.Value; }
public void FindTest(string connectionName, string databaseName, string schemaName) { var options = new StorageOptions(new ConnectionStrings().Get(connectionName), databaseName: databaseName, schemaName: schemaName) { BufferedLoad = false}; var connectionFactory = new SqlConnectionFactory(); var indexMap = TestDocumentIndexMap; var storage = new DocStorage<TestDocument>(connectionFactory, options, new SqlBuilder(), new JsonNetSerializer(), new Md5Hasher(), indexMap); MiniProfiler.Start(); var mp = MiniProfiler.Current; Log.Debug($"pre count: {storage.Count(new[] {"en-US"})}"); var key = DateTime.Now.Epoch() + new Random().Next(10000, 99999) + "c"; var name = "NEWNAME" + key; var sku = ""; using (mp.Step("insert ")) { var document = new Fixture().Create<TestDocument>(); document.Name = name; sku = document.Skus.FirstOrDefault().Sku; dynamic dDocument = document; dDocument.Dyn = "dynamic property"; var result1 = storage.Upsert(key, document, new[] {"en-US"}); Assert.That(result1, Is.EqualTo(StorageAction.Inserted)); Log.Debug("newDoc: " + document.Name); } var count = storage.Count(new[] { "en-US" }); var keys = storage.LoadKeys(new[] { "en-US" }).ToList(); Assert.That(keys, Is.Not.Null); Assert.That(keys.Any(), Is.True); Assert.That(count, Is.EqualTo(keys.Count())); 5.Times(i => { using (mp.Step("find no non-existing by SKU criteria/tags " + i)) { var criterias = new List<Criteria> { new Criteria("sku", CriteriaOperator.Eq, "XYZ_SKU") }; var result = storage.LoadValues(new[] { "en-US" }, criterias).ToList(); Assert.That(result, Is.Null.Or.Empty); } }); 5.Times(i => { using (mp.Step("find by KEY/tags " + i)) { var result = storage.LoadValues(key, new[] {"en-US"}).ToList(); Assert.That(result, Is.Not.Null); Assert.That(result.Any(), Is.True); Assert.That(result.FirstOrDefault().Name, Is.EqualTo(name)); } }); 5.Times(i => { using (mp.Step("find by NAME criteria/tags " + i)) { var criterias = new List<Criteria> {new Criteria("name", CriteriaOperator.Eq, name)}; var result = storage.LoadValues(new[] {"en-US"}, criterias).ToList(); Assert.That(result, Is.Not.Null); Assert.That(result.Any(), Is.True); Assert.That(result.FirstOrDefault().Name, Is.EqualTo(name)); } }); 5.Times(i => { using (mp.Step("find by NAME with sql delimiter character " + i)) { var criterias = new List<Criteria> { new Criteria("name", CriteriaOperator.Eq, "'") }; var result = storage.LoadValues(new[] { "en-US" }, criterias).ToList(); Assert.That(result, Is.Null.Or.Empty); } }); 5.Times(i => { using (mp.Step("find by SKU criteria/tags " + i)) { var criterias = new List<Criteria> {new Criteria("sku", CriteriaOperator.Contains, sku)}; var result = storage.LoadValues(new[] {"en-US"}, criterias).ToList(); Assert.That(result, Is.Not.Null); Assert.That(result.Any(), Is.True); Assert.That(result.FirstOrDefault().Skus.FirstOrDefault().Sku, Is.EqualTo(sku)); } }); 5.Times(i => { using (mp.Step("find by timestamp " + i)) { var result = storage.LoadValues(new[] { "en-US" }, fromDateTime:DateTime.Now.AddMonths(-1), tillDateTime:DateTime.Now).ToList(); Assert.That(result, Is.Not.Null); Assert.That(result.Any(), Is.True); } }); Log.Debug($"trace: {mp.RenderPlainText()}"); MiniProfiler.Stop(); }
public void InitializeTest(string connectionName, string databaseName, string schemaName) { var options = new StorageOptions(new ConnectionStrings().Get(connectionName), databaseName: databaseName, schemaName: schemaName); var connectionFactory = new SqlConnectionFactory(); var indexMap = TestDocumentIndexMap; var storage = new DocStorage<TestDocument>(connectionFactory, options, new SqlBuilder(), new JsonNetSerializer(), new Md5Hasher(), indexMap); storage.Reset(); Assert.That(storage.Count(), Is.EqualTo(0)); }
private static IHealthChecksBuilder RegisterHealthChecks(IServiceCollection services, CacheOptions cacheOptions, MessageBusOptions messageBusOptions, MetricOptions metricOptions, StorageOptions storageOptions, QueueOptions queueOptions) { services.AddStartupActionToWaitForHealthChecks(); return(services.AddHealthChecks() .AddCheckForStartupActionsComplete() .AddAutoNamedCheck <ElasticsearchHealthCheck>("Critical") .AddAutoNamedCheck <CacheHealthCheck>("Critical") .AddAutoNamedCheck <StorageHealthCheck>("EventPosts", "AllJobs") .AddAutoNamedCheck <QueueHealthCheck <EventPost> >("EventPosts", "AllJobs") .AddAutoNamedCheck <QueueHealthCheck <EventUserDescription> >("EventUserDescriptions", "AllJobs") .AddAutoNamedCheck <QueueHealthCheck <EventNotificationWorkItem> >("EventNotifications", "AllJobs") .AddAutoNamedCheck <QueueHealthCheck <WebHookNotification> >("WebHooks", "AllJobs") .AddAutoNamedCheck <QueueHealthCheck <MailMessage> >("AllJobs") .AddAutoNamedCheck <QueueHealthCheck <WorkItemData> >("WorkItem", "AllJobs") .AddAutoNamedCheck <CloseInactiveSessionsJob>("AllJobs") .AddAutoNamedCheck <DailySummaryJob>("AllJobs") .AddAutoNamedCheck <DownloadGeoIPDatabaseJob>("AllJobs") .AddAutoNamedCheck <MaintainIndexesJob>("AllJobs") .AddAutoNamedCheck <RetentionLimitsJob>("AllJobs") .AddAutoNamedCheck <StackEventCountJob>("AllJobs")); }
private EndpointOptions FindWriteEndpoint(MethodOptions methodOptions, StorageOptions storageOptions) { return(methodOptions?.WriteEndpoint ?? methodOptions?.Endpoint ?? storageOptions?.WriteEndpoint ?? storageOptions?.Endpoint); }
public void UpsertTest(string connectionName, string databaseName, string schemaName) { var options = new StorageOptions(new ConnectionStrings().Get(connectionName), databaseName: databaseName, schemaName: schemaName); var connectionFactory = new SqlConnectionFactory(); var indexMap = TestDocumentIndexMap; var storage = new DocStorage<TestDocument>(connectionFactory, options, new SqlBuilder(), new JsonNetSerializer(), new Md5Hasher(), indexMap); MiniProfiler.Start(); var mp = MiniProfiler.Current; storage.Reset(); var key = DateTime.Now.Epoch() + new Random().Next(10000, 99999); using (mp.Step("insert ")) { var newDoc = new Fixture().Create<TestDocument>(); var result1 = storage.Upsert(key, newDoc, new[] {"en-US"}); Assert.That(result1, Is.EqualTo(StorageAction.Inserted)); Log.Debug("newDoc: " + newDoc.Name); newDoc.Name = Guid.NewGuid().ToString(); var result2 = storage.Upsert(key, newDoc, new[] {"en-US"}); Assert.That(result2, Is.EqualTo(StorageAction.Updated)); Log.Debug("newDoc: " + newDoc.Name); var updatedDoc = storage.LoadValues(key, new[] {"en-US"}).ToList(); Assert.That(updatedDoc, Is.Not.Null); Assert.That(updatedDoc.Any(), Is.True); Assert.That(updatedDoc.Count(), Is.EqualTo(1)); Assert.That(updatedDoc.First().Name, Is.Not.Null); Assert.That(updatedDoc.First().Name, Is.EqualTo(newDoc.Name)); Log.Debug("updatedDoc: " + updatedDoc.First().Name); } Log.Debug($"trace: {mp.RenderPlainText()}"); MiniProfiler.Stop(); }
public async Task <ICollection <FileMetadatas> > ListFilesAsync(MethodOptions methodOptions, StorageOptions storageOptions, ListOptions listOptions = null) { ICollection <EndpointOptions> endpoints = GetReadEndpoints(methodOptions, storageOptions).ToList(); _logger.LogInformation("List files in directories {@directory}", endpoints.Select(e => e.Path)); IEnumerable <IBlobStorage> storages = endpoints.Select(e => _storageProvider.GetStorage(e.Provider)); IEnumerable <Task <IReadOnlyCollection <BlobId> > > readTasks = storages.Select(x => x.ListAsync(listOptions)); IEnumerable <BlobId> blobs = (await Task.WhenAll(readTasks)).SelectMany(x => x.Select(t => t)); _logger.LogDebug("Listed files {@blobs}", blobs); return(blobs.Select(x => x.ToFileMetadata()).ToList()); }
public static StorageRoot CreateStorage(Context context, Uri uri, StorageOptions storageOptions = null) { return(CreateStorage(context, AndroidUriFromUri(uri), storageOptions)); }
private async Task OpenAsync() { if (!options.UseExistingDatabase) Close(); Debug.Assert(!string.IsNullOrEmpty(options.DatabaseName)); Debug.Assert(storage == null); var filterPolicy = options.BloomBits >= 0 ? new BloomFilterPolicy(options.BloomBits) : null; var storageOptions = new StorageOptions { CreateIfMissing = !options.UseExistingDatabase, WriteBatchSize = options.WriteBatchSize, FilterPolicy = filterPolicy, //Comparator = new ByteWiseComparator() }; if (options.CacheSize > 0) { storageOptions.CacheSizeInMegabytes = options.CacheSize; } storage = new Storage(options.DatabaseName, storageOptions); await storage.InitAsync(); }
public static StorageRoot CreateStorage(Context context, Android.Net.Uri androidUri, StorageOptions storageOptions = null) { var provider = new SafStorgeProvider(context, androidUri); return(new StorageRoot(provider, storageOptions)); }