public Examples() { // This is where you configure nevermore. The config lets you control how JSON is serialized and a whole lot // more. We'll start simple. var config = new RelationalStoreConfiguration(ConnectionString); // Your mappings define how your documents will be stored in the database. You need to tell Nevermore about // all your mappings. config.DocumentMaps.Register(new PersonMap()); // Create your store. You'll do this once when the application starts up. store = new RelationalStore(config); // Of course, this is a SQL database, so you'll need a SQL schema. Here's ours. ExecuteSql(@" CREATE TABLE [Person] ( [Id] NVARCHAR(50) NOT NULL CONSTRAINT [PK__Id] PRIMARY KEY CLUSTERED, [FirstName] NVARCHAR(20) NOT NULL, [LastName] NVARCHAR(200) NULL, [Email] NVARCHAR(200) NOT NULL, [JSON] NVARCHAR(MAX) NOT NULL ) ALTER TABLE [Person] ADD CONSTRAINT [UQ_UniquePersonEmail] UNIQUE([Email]) "); }
public VariableManifestFactory(ILog log, IDeploymentManifestFactory deploymentManifestFactory, IWebPortalConfigurationStore webPortalConfigurationStore, IRelationalStore store) { _log = log; _deploymentManifestFactory = deploymentManifestFactory; _webPortalConfigurationStore = webPortalConfigurationStore; _store = store; }
public override void SetUp() { base.SetUp(); var config = new RelationalStoreConfiguration(ConnectionString); config.DocumentMaps.Register(new BigObjectMap(Format)); store = new RelationalStore(config); readTransaction = store.BeginReadTransaction(); using var writer = store.BeginWriteTransaction(); var rand = new Random(42); var randomString = new Func <string>(() => { var buffer = new byte[3]; rand.NextBytes(buffer); return(Convert.ToBase64String(buffer)); }); var historyEntries = Enumerable.Range(1, DocumentSize / 256).Select(n => new BigObjectHistoryEntry { Id = Guid.NewGuid(), Comment = randomString(), LuckyNumbers = Enumerable.Range(0, rand.Next(130, 330)).ToArray(), Date = DateTime.Today.AddDays(n) }); writer.Insert(new BigObject { Id = "BigObject-1", History = historyEntries.OfType <object>().ToList() }); writer.Commit(); }
public override void SetUp() { base.SetUp(); var config = new RelationalStoreConfiguration(ConnectionString); config.DocumentMaps.Register(new CustomerMap()); store = new RelationalStore(config); transaction = store.BeginReadTransaction(); }
public override void SetUp() { base.SetUp(); var config = new RelationalStoreConfiguration(ConnectionString); config.DocumentMaps.Register(new CustomerMap()); store = new RelationalStore(config); transaction = store.BeginReadTransaction(); allIdsRandomlySorted = transaction.Query <Customer>().ToList().Select(p => p.Id).OrderByDescending(p => Guid.NewGuid()).ToList(); }
public void Migrate(IRelationalStore store) { var upgrader = DeployChanges.To .SqlDatabase(store.ConnectionString) .WithScriptsAndCodeEmbeddedInAssembly(typeof(RelationalStore).GetTypeInfo().Assembly) .WithScriptsAndCodeEmbeddedInAssembly(typeof(IntegrationTestDatabase).GetTypeInfo().Assembly) .LogScriptOutput() .WithVariable("databaseName", new SqlConnectionStringBuilder(store.ConnectionString).InitialCatalog) .LogTo(_log) .Build(); var result = upgrader.PerformUpgrade(); if (!result.Successful) { throw new Exception("Database migration failed: " + result.Error.GetErrorSummary(), result.Error); } }
public Allocation(IRelationalStore store, string collectionName, int blockSize) { this.store = store; this.collectionName = collectionName; this.blockSize = blockSize; }
public KeyAllocator(IRelationalStore store, int blockSize) { this.store = store; this.blockSize = blockSize; }
public override void SetUp() { base.SetUp(); var config = new RelationalStoreConfiguration(ConnectionString); config.DocumentMaps.Register(new BigObjectMap(JsonStorageFormat.TextOnly)); store = new RelationalStore(config); readTransaction = store.BeginReadTransaction(); using var writer = store.BeginWriteTransaction(); var history = GenerateHistory(); var doc1 = history.Take(1).ToList(); var doc10 = history.Take(10).ToList(); var doc100 = history.Take(100).ToList(); var doc500 = history.Take(500).ToList(); var rand = new Random(42); for (var i = 0; i < 1000; i++) { var doc = new BigObject { Name = "Document " + i }; var distribution = rand.Next(1, 100); if (distribution < 70) { doc.History = doc1; } else if (distribution < 85) { doc.History = doc10; } else if (distribution < 95) { doc.History = doc100; } else if (distribution <= 100) { doc.History = doc500; } writer.Insert(doc, new InsertOptions { CommandTimeout = TimeSpan.FromSeconds(180) }); if (i % 100 == 0) { Console.WriteLine($"Inserted: {i} history: {doc.History.Count} rand: {distribution}"); } } foreach (var item in writer.Stream <(long?Bucket, int?Count)>( "select len([JSON]) as Bucket, count(*) from BigObject group by len([JSON]) order by len([JSON])")) { Console.WriteLine($"{item.Bucket} bytes: {item.Count} documents"); } writer.Commit(); }
public StoreInitializer(IRelationalStore store, IInitializeRelationalStore[] initializers) { this.store = store; this.initializers = initializers; }