public GalleryExportBatcher(int batchSize, CatalogWriter writer) { _batchSize = batchSize; _writer = writer; _currentBatch = new List<GalleryExportPackage>(); Total = 0; }
public CatalogBatcher(int batchSize, CatalogWriter writer) { _batchSize = batchSize; _writer = writer; _currentBatch = new List<CatalogItem>(); Total = 0; }
public static async Task Test0Async() { Storage storage = new FileStorage { Path = @"c:\data\site\test", Container = "test", BaseAddress = "http://localhost:8000/" }; // first save the delete into the catalog CatalogContext context = new CatalogContext(); using (CatalogWriter writer = new CatalogWriter(storage, context, 1000, true)) { //writer.Add(new DeletePackageCatalogItem("Test.Metadata.Service", "1.0.0")); writer.Add(new DeletePackageCatalogItem("Test.Metadata.Service", "2.0.0")); //writer.Add(new DeletePackageCatalogItem("Test.Metadata.Service", "3.0.0")); await writer.Commit(DateTime.Now); } // second perform that delete on the various feeds - in this case the default resolver feed ResolverDeleteCollector collector = new ResolverDeleteCollector(storage); await collector.Run(new Uri("http://localhost:8000/test/catalog/index.json"), DateTime.MinValue); }
public static async Task Test0Async() { string nuspecs = @"c:\data\nuget\versions"; Storage storage = new FileStorage("http://*****:*****@"c:\data\site\full"); CatalogContext context = new CatalogContext(); CatalogWriter writer = new CatalogWriter(storage, context, 10); const int BatchSize = 1; int i = 0; int commitCount = 0; DirectoryInfo directoryInfo = new DirectoryInfo(nuspecs); foreach (FileInfo fileInfo in directoryInfo.EnumerateFiles("*.xml")) { writer.Add(new NuspecPackageCatalogItem(fileInfo.FullName)); if (++i % BatchSize == 0) { await writer.Commit(DateTime.UtcNow); Console.WriteLine("commit number {0}", commitCount++); } } await writer.Commit(DateTime.UtcNow); Console.WriteLine("commit number {0}", commitCount++); }
public CatalogUpdater(CatalogWriter writer, ChecksumRecords checksums, CollectorHttpClient client) { DatabaseChecksumBatchSize = DefaultDatabaseChecksumBatchSize; CatalogAddBatchSize = DefaultCatalogAddBatchSize; _writer = writer; _checksums = checksums; _client = client; }
public static async Task Test0Async() { //Storage storage = new FileStorage //{ // Path = @"c:\data\site\test", // Container = "test", // BaseAddress = "http://localhost:8000/" //}; Storage storage = new AzureStorage( CloudStorageAccount.Parse("AccountName=nuget3;AccountKey=;DefaultEndpointsProtocol=https"), "test"); CatalogContext context = new CatalogContext(); CatalogWriter writer = new CatalogWriter(storage, context, 4, false); string[] first = { "john", "paul", "ringo", "george" }; foreach (string item in first) { writer.Add(new TestCatalogItem(item)); } await writer.Commit(new DateTime(2010, 12, 25, 12, 0, 0)); string[] second = { "jimmy", "robert" }; foreach (string item in second) { writer.Add(new TestCatalogItem(item)); } await writer.Commit(new DateTime(2011, 12, 25, 12, 0, 0)); string[] third = { "john-paul", "john" }; foreach (string item in third) { writer.Add(new TestCatalogItem(item)); } await writer.Commit(new DateTime(2012, 12, 25, 12, 0, 0)); // collection... Uri index = storage.ResolveUri("index.json"); TestItemCollector collector = new TestItemCollector(); Console.WriteLine("----------------"); await collector.Run(index, new DateTime(2012, 10, 31, 12, 0, 0)); Console.WriteLine("----------------"); await collector.Run(index, new DateTime(2011, 10, 31, 12, 0, 0)); Console.WriteLine("----------------"); CollectorCursor cursor = await collector.Run(index, new CollectorCursor(DateTime.MinValue)); }
public static async Task Test1Async() { string nuspecs = @"c:\data\nuget\nuspecs"; Storage storage = new FileStorage("http://*****:*****@"c:\data\site\full"); CatalogContext context = new CatalogContext(); CatalogWriter writer = new CatalogWriter(storage, context, 200); int total = 0; //int[] commitSize = { 50, 40, 25, 50, 10, 30, 40, 5, 400, 30, 10, 20, 40, 50, 90, 70, 50, 50, 50, 50, 60, 70 }; int[] commitSize = { 200, 200, 200, 200, 200, //200, 200, 200, 200, 200, //200, 200, 200, 200, 200, //200, 200, 200, 200, 200, //200, 200, 200, 200, 200, //200, 200, 200, 200, 200 }; int i = 0; int commitCount = 0; DirectoryInfo directoryInfo = new DirectoryInfo(nuspecs); foreach (FileInfo fileInfo in directoryInfo.EnumerateFiles("*.xml")) { if (commitCount == commitSize.Length) { break; } writer.Add(new NuspecPackageCatalogItem(fileInfo.FullName)); total++; if (++i == commitSize[commitCount]) { await writer.Commit(DateTime.UtcNow); Console.WriteLine("commit number {0}", commitCount); commitCount++; i = 0; } } Console.WriteLine("total: {0}", total); }
public static void Test0() { const int SqlChunkSize = 8000; string sqlConnectionString = ""; const int CatalogBatchSize = 1000; const int CatalogMaxPageSize = 1000; Storage storage = new FileStorage { Path = @"c:\data\site\export2", Container = "export2", BaseAddress = "http://localhost:8000/" }; CatalogWriter writer = new CatalogWriter(storage, new CatalogContext(), CatalogMaxPageSize); GalleryExportBatcher batcher = new GalleryExportBatcher(CatalogBatchSize, writer); int lastHighestPackageKey = 0; int count = 0; while (true) { Tuple<int, int> range = GalleryExport.GetNextRange(sqlConnectionString, lastHighestPackageKey, SqlChunkSize); if (range.Item1 == 0 && range.Item2 == 0) { break; } if (count++ == 3) { break; } Console.WriteLine("{0} {1}", range.Item1, range.Item2); GalleryExport.FetchRange(sqlConnectionString, range, batcher); lastHighestPackageKey = range.Item2; } batcher.Complete(); Console.WriteLine(batcher.Total); }
public static async Task Test0Async() { string nuspecs = @"c:\data\nuspecs"; Storage storage = new FileStorage { Path = @"c:\data\site\pub", Container = "pub", BaseAddress = "http://localhost:8000/" }; //Storage storage = new AzureStorage //{ // AccountName = "nuget3", // AccountKey = "", // Container = "pub", // BaseAddress = "http://nuget3.blob.core.windows.net" //}; CatalogContext context = new CatalogContext(); CatalogWriter writer = new CatalogWriter(storage, context, 1000); const int BatchSize = 1000; int i = 0; int commitCount = 0; DirectoryInfo directoryInfo = new DirectoryInfo(nuspecs); foreach (FileInfo fileInfo in directoryInfo.EnumerateFiles("*.xml")) { writer.Add(new NuspecPackageCatalogItem(fileInfo)); if (++i % BatchSize == 0) { await writer.Commit(DateTime.Now); Console.WriteLine("commit number {0}", commitCount++); } } await writer.Commit(DateTime.Now); Console.WriteLine("commit number {0}", commitCount++); }
public static async Task Test1Async() { Storage storage = new FileStorage { Path = @"c:\data\site\full", Container = "full", BaseAddress = "http://localhost:8000/" }; // first save the delete into the catalog CatalogContext context = new CatalogContext(); using (CatalogWriter writer = new CatalogWriter(storage, context, 1000, true)) { writer.Add(new DeleteRegistrationCatalogItem("abc")); await writer.Commit(DateTime.Now); } // second perform that delete on the various feeds - in this case the default resolver feed ResolverDeleteCollector collector = new ResolverDeleteCollector(storage); await collector.Run(new Uri("http://localhost:8000/full/catalog/index.json"), DateTime.MinValue); }
public static async Task CreateStatisticsCatalogAsync(Storage storage, string connectionString) { const int BatchSize = 100; int i = 0; using (CatalogWriter writer = new CatalogWriter(storage, new CatalogContext(), 500)) { int lastKey = 0; int iterations = 0; while (true) { iterations++; DateTime minDownloadTimeStamp; DateTime maxDownloadTimeStamp; JArray batch = GetNextBatch(connectionString, ref lastKey, out minDownloadTimeStamp, out maxDownloadTimeStamp); if (batch == null) { break; } writer.Add(new StatisticsCatalogItem(batch, lastKey.ToString(), minDownloadTimeStamp, maxDownloadTimeStamp)); if (++i % BatchSize == 0) { await writer.Commit(); } } await writer.Commit(); } }
protected override void RunCore() { Config.Catalog.LocalFolder.Create(); int total = _nupkgs.Count; Log("Processing " + total + " nupkgs"); ParallelOptions options = new ParallelOptions(); options.MaxDegreeOfParallelism = 8; Task commitTask = null; using (var writer = new CatalogWriter(Config.Catalog.Storage, new CatalogContext())) { while (_nupkgs.Count > 0) { Queue<PackageCatalogItem> currentBatch = new Queue<PackageCatalogItem>(_batchSize); // create the batch while (currentBatch.Count < _batchSize && _nupkgs.Count > 0) { string file = _nupkgs.Dequeue(); if (file.EndsWith(".nupkg", StringComparison.OrdinalIgnoreCase)) { currentBatch.Enqueue(new NupkgCatalogItem(file)); } else { currentBatch.Enqueue(new NuspecPackageCatalogItem(file)); } } // process the nupkgs and nuspec files in parallel Parallel.ForEach(currentBatch, options, nupkg => { nupkg.Load(); }); // wait for the previous commit to finish before adding more if (commitTask != null) { commitTask.Wait(); } // add everything from the queue foreach (PackageCatalogItem item in currentBatch) { writer.Add(item); } // commit commitTask = Task.Run(async () => await writer.Commit(DateTime.UtcNow)); ProgressUpdate(total - _nupkgs.Count, total); } // wait for the final commit if (commitTask != null) { commitTask.Wait(); } } }
public static async Task Test1Async() { Storage storage = new FileStorage("http://*****:*****@"c:\data\site\test"); //Storage storage = new AzureStorage //{ // AccountName = "", // AccountKey = "", // Container = "test", // BaseAddress = "http://nuget3.blob.core.windows.net" //}; CatalogContext context = new CatalogContext(); CatalogWriter writer = new CatalogWriter(storage, context, 4, false); string[] first = { "john", "paul", "ringo", "george" }; foreach (string item in first) { writer.Add(new TestCatalogItem(item)); } await writer.Commit(new DateTime(2010, 12, 25, 12, 0, 0)); string baseAddress = storage.BaseAddress + "/"; Uri index = new Uri(baseAddress + "catalog/index.json"); TestItemCollector collector = new TestItemCollector(); CollectorCursor cursor = await collector.Run(index, DateTime.MinValue); string[] second = { "jimmy", "robert", "john-paul", "john" }; foreach (string item in second) { writer.Add(new TestCatalogItem(item)); } await writer.Commit(new DateTime(2011, 12, 25, 12, 0, 0)); cursor = await collector.Run(index, cursor); }
public void Rebuild(RebuildArgs args) { const int batchSize = 2000; if (Directory.Exists(args.CatalogFolder)) { Console.WriteLine("Catalog folder exists. Deleting!"); Directory.Delete(args.CatalogFolder, recursive: true); } // Load storage Storage storage = new FileStorage(args.BaseAddress, args.CatalogFolder); using (var writer = new CatalogWriter(storage, new CatalogContext())) { if (!String.IsNullOrEmpty(args.DatabaseConnection)) { var batcher = new GalleryExportBatcher(batchSize, writer); int lastHighest = 0; while (true) { var range = GalleryExport.GetNextRange( args.DatabaseConnection, lastHighest, batchSize).Result; if (range.Item1 == 0 && range.Item2 == 0) { break; } Console.WriteLine("Writing packages with Keys {0}-{1} to catalog...", range.Item1, range.Item2); GalleryExport.WriteRange( args.DatabaseConnection, range, batcher).Wait(); lastHighest = range.Item2; } batcher.Complete().Wait(); } else if (!String.IsNullOrEmpty(args.NuPkgFolder)) { Stopwatch timer = new Stopwatch(); timer.Start(); // files are sorted by GetFiles Queue<string> files = new Queue<string>(Directory.GetFiles(args.NuPkgFolder, "*.nu*", SearchOption.TopDirectoryOnly) .Where(s => s.EndsWith(".nupkg", StringComparison.OrdinalIgnoreCase) || s.EndsWith(".nuspec", StringComparison.OrdinalIgnoreCase))); int total = files.Count; ParallelOptions options = new ParallelOptions(); options.MaxDegreeOfParallelism = 8; Task commitTask = null; while (files.Count > 0) { Queue<PackageCatalogItem> currentBatch = new Queue<PackageCatalogItem>(batchSize); // create the batch while (currentBatch.Count < batchSize && files.Count > 0) { string file = files.Dequeue(); if (file.EndsWith(".nupkg", StringComparison.OrdinalIgnoreCase)) { currentBatch.Enqueue(new NupkgCatalogItem(file)); } else { currentBatch.Enqueue(new NuspecPackageCatalogItem(file)); } } // process the nupkgs and nuspec files in parallel Parallel.ForEach(currentBatch, options, nupkg => { nupkg.Load(); }); // wait for the previous commit to finish before adding more if (commitTask != null) { commitTask.Wait(); } // add everything from the queue foreach(PackageCatalogItem item in currentBatch) { writer.Add(item); } // commit commitTask = Task.Run(async () => await writer.Commit(DateTime.UtcNow)); Console.WriteLine("committing {0}/{1}", total - files.Count, total); } // wait for the final commit if (commitTask != null) { commitTask.Wait(); } timer.Stop(); Console.ForegroundColor = ConsoleColor.Green; Console.WriteLine("Committed {0} catalog items in {1}", total, timer.Elapsed); Console.ResetColor(); } } }
public static async Task Test4Async() { Storage storage = new FileStorage { Path = @"c:\data\site\test", Container = "test", BaseAddress = "http://localhost:8000/" }; CatalogContext context = new CatalogContext(); CatalogWriter writer = new CatalogWriter(storage, context, 4); string[] names1 = { "a", "b", "c", "d", "e" }; string[] names2 = { "f", "g", "h" }; string[] names3 = { "i", "j", "k" }; DateTime timeStamp = DateTime.UtcNow; foreach (string name in names1) { writer.Add(new TestCatalogItem(name)); } await writer.Commit(timeStamp); Console.WriteLine("commit #1 timeStamp {0}", await CatalogWriter.GetLastCommitTimeStamp(storage)); Console.WriteLine("commit #1 count {0}", await CatalogWriter.GetCount(storage)); timeStamp = timeStamp.AddHours(1); foreach (string name in names2) { writer.Add(new TestCatalogItem(name)); } await writer.Commit(timeStamp); Console.WriteLine("commit #2 timeStamp {0}", await CatalogWriter.GetLastCommitTimeStamp(storage)); Console.WriteLine("commit #2 count {0}", await CatalogWriter.GetCount(storage)); timeStamp = timeStamp.AddHours(1); foreach (string name in names3) { writer.Add(new TestCatalogItem(name)); } await writer.Commit(timeStamp); Console.WriteLine("commit #3 timeStamp {0}", await CatalogWriter.GetLastCommitTimeStamp(storage)); Console.WriteLine("commit #3 count {0}", await CatalogWriter.GetCount(storage)); }
public static async Task Test3Async() { Storage storage = new FileStorage { Path = @"c:\data\site\test", Container = "test", BaseAddress = "http://localhost:8000/" }; CatalogContext context = new CatalogContext(); CatalogWriter writer = new CatalogWriter(storage, context, 1000); string[] names1 = { "a", "b", "c", "d", "e" }; string[] names2 = { "f", "g", "h" }; string[] names3 = { "i", "j", "k" }; foreach (string name in names1) { writer.Add(new TestCatalogItem(name)); } await writer.Commit(new Dictionary<string, string> { { "prop1", "value1.1" }, { "prop2", "value2.1" } }); Console.WriteLine("commit user data #1"); foreach (KeyValuePair<string, string> items in await CatalogWriter.GetCommitUserData(storage)) { Console.WriteLine("{0} {1}", items.Key, items.Value); } foreach (string name in names2) { writer.Add(new TestCatalogItem(name)); } await writer.Commit(new Dictionary<string, string> { { "prop1", "value1.2" }, { "prop2", "value2.2" } }); Console.WriteLine("commit user data #2"); foreach (KeyValuePair<string, string> items in await CatalogWriter.GetCommitUserData(storage)) { Console.WriteLine("{0} {1}", items.Key, items.Value); } foreach (string name in names3) { writer.Add(new TestCatalogItem(name)); } await writer.Commit(new Dictionary<string, string> { { "prop1", "value1.3" }, { "prop2", "value2.3" } }); Console.WriteLine("commit user data #3"); foreach (KeyValuePair<string, string> items in await CatalogWriter.GetCommitUserData(storage)) { Console.WriteLine("{0} {1}", items.Key, items.Value); } }