public CatalogToDatabaseCommand( CatalogReader catalogReader, CatalogToDatabaseProcessor processor) { _catalogReader = catalogReader; _processor = processor; }
public void LibraryManagementTest_TestAll() { var filepath = path + @"TestCatalog.xml"; IEnumerable <ICatalogElement> Catalog = new List <ICatalogElement>() { book, paper, patent }; CatalogWriter.WriteDocument(filepath, Catalog); IEnumerable <ICatalogElement> readCatalog = CatalogReader.readFrom(filepath); Book bookResult = null; Newspaper paperResult = null; Patent patentResult = null; foreach (var p in Catalog) { if (p is Newspaper) { paperResult = (Newspaper)p; } if (p is Book) { bookResult = (Book)p; } if (p is Patent) { patentResult = (Patent)p; } } Assert.AreEqual(patentResult.Name.name, patent.Name.name); Assert.AreEqual(bookResult.ISBN, book.ISBN); Assert.AreEqual(paperResult.ISSN, paper.ISSN); }
public CatalogToNuspecsCommand( CatalogReader catalogReader, CatalogToNuspecsProcessor processor) { _catalogReader = catalogReader; _processor = processor; }
private static async Task <List <PackageInfo> > LoadAllDependenciesAsync() { ServicePointManager.DefaultConnectionLimit = 64; var feed = new Uri("https://api.nuget.org/v3/index.json"); using (var catalog = new CatalogReader(feed, TimeSpan.FromDays(1))) { Console.WriteLine("Loading latest package IDs"); // The documentation lies: this retrieves all entries. Ignore pre-release, // and find the latest version for each package. var catalogEntries = (await catalog.GetFlattenedEntriesAsync()) .Where(p => !p.Version.IsPrerelease) .Where(p => !p.IsDelete) .GroupBy(p => p.Id) // Spam... there must be a better way of detecting this. .Where(g => !g.Key.Contains("1-800") && !g.Key.ToLowerInvariant().Contains("-phone-") && !g.Key.ToLowerInvariant().Contains("-number-")) .Select(g => g.OrderByDescending(p => p.Version).First()) .ToList(); Console.WriteLine($"Loading dependencies for {catalogEntries.Count} packages"); return(catalogEntries .AsParallel() .WithDegreeOfParallelism(16) .Select(GetPackageInfo) .Where(p => p != null) .ToList()); } }
public override void ReadHeader(TagRepository rpa, CatalogReader catalog, HighMethodBodyParseContext methodBody, HighCfgNodeHandle[] cfgNodes, List<HighSsaRegister> ssaRegisters, CodeLocationTag baseLocation, bool haveDebugInfo, BinaryReader reader) { m_arithOp = (NumberUnaryArithOp)reader.ReadByte(); if (m_arithOp < 0 || m_arithOp >= NumberUnaryArithOp.NumHighUnaryArithOpTypes) throw new Exception("Invalid unary arithmetic op"); m_arithType = (NumberArithType)reader.ReadByte(); if (m_arithType < 0 || m_arithType >= NumberArithType.NumHighArithTypes) throw new Exception("Invalid arith type"); }
public async Task VerifyStartTimeIsExclusiveAndEndTimeIsInclusive() { // Arrange using (var cache = new LocalCache()) using (var cacheContext = new SourceCacheContext()) using (var workingDir = new TestFolder()) { var log = new TestLogger(); var baseUri = Sleet.UriUtility.CreateUri("https://localhost:8080/testFeed/"); var feedFolder = Path.Combine(workingDir, "feed"); var nupkgsFolder = Path.Combine(workingDir, "nupkgs"); Directory.CreateDirectory(feedFolder); Directory.CreateDirectory(nupkgsFolder); const int packageCount = 10; await CatalogReaderTestHelpers.CreateCatalogAsync( workingDir, feedFolder, nupkgsFolder, baseUri, catalogPageSize : 2, log : log); foreach (var i in Enumerable.Range(0, packageCount)) { var nupkgFolder = Path.Combine(nupkgsFolder, i.ToString()); TestNupkg.Save(nupkgFolder, new TestNupkg($"Package{i}", "1.0.0")); await CatalogReaderTestHelpers.PushPackagesAsync(workingDir, nupkgFolder, baseUri, log); } var feedUri = Sleet.UriUtility.CreateUri(baseUri.AbsoluteUri + "index.json"); var httpSource = CatalogReaderTestHelpers.GetHttpSource(cache, feedFolder, baseUri); using (var catalogReader = new CatalogReader(feedUri, httpSource, cacheContext, TimeSpan.FromMinutes(1), log)) { var allEntries = await catalogReader.GetEntriesAsync(); var timestamps = allEntries .OrderBy(x => x.CommitTimeStamp) .Select(x => x.CommitTimeStamp) .ToList(); var start = timestamps[2]; var end = timestamps[packageCount - 3]; // Act var entries = await catalogReader.GetEntriesAsync(start, end, CancellationToken.None); // Assert Assert.Equal( timestamps.Skip(3).Take(5), entries.Select(x => x.CommitTimeStamp)); Assert.Equal(packageCount, timestamps.Distinct().Count()); } } }
public CatalogProcessorQueue( CatalogReader catalogReader, ICatalogEntriesProcessor processor) { _catalogReader = catalogReader; _processor = processor; _taskQueue = new TaskQueue <Work>( workerCount: 1, workAsync: WorkAsync); }
public NuGetCatalogReader(ILogger<NuGetCatalogReader> logger, CacheOptions config) { this._log = logger; var indexUrl = config.UpstreamIndex; var cache = new SourceCacheContext() { NoCache = true, DirectDownload = true }; reader = new CatalogReader(indexUrl, null, cache, TimeSpan.Zero, new NuGetLoggerAdapter<NuGetCatalogReader>(_log)); }
public NuGetCatalogReader(ICachingProxyConfig config) { var indexUrl = new System.Uri(config.V3NugetIndexSource); var cache = new SourceCacheContext() { NoCache = true, DirectDownload = true }; reader = new CatalogReader(indexUrl, null, cache, TimeSpan.Zero, new Log4NetLoggerAdapter(_log)); }
public async Task VerifyEditsAreIgnoredInFlattenedViewAsync() { // Arrange using (var cache = new LocalCache()) using (var cacheContext = new SourceCacheContext()) using (var workingDir = new TestFolder()) { var log = new TestLogger(); var baseUri = Sleet.UriUtility.CreateUri("https://localhost:8080/testFeed/"); var feedFolder = Path.Combine(workingDir, "feed"); var nupkgsFolder = Path.Combine(workingDir, "nupkgs"); Directory.CreateDirectory(feedFolder); Directory.CreateDirectory(nupkgsFolder); var packageA = new TestNupkg("a", "1.0.0"); TestNupkg.Save(nupkgsFolder, packageA); // Create and push await CatalogReaderTestHelpers.CreateCatalogAsync(workingDir, feedFolder, nupkgsFolder, baseUri, log); // 2nd push await CatalogReaderTestHelpers.PushPackagesAsync(workingDir, nupkgsFolder, baseUri, log); // 3rd push await CatalogReaderTestHelpers.PushPackagesAsync(workingDir, nupkgsFolder, baseUri, log); var feedUri = Sleet.UriUtility.CreateUri(baseUri.AbsoluteUri + "index.json"); var httpSource = CatalogReaderTestHelpers.GetHttpSource(cache, feedFolder, baseUri); // Act using (var catalogReader = new CatalogReader(feedUri, httpSource, cacheContext, TimeSpan.FromMinutes(1), log)) { var entries = await catalogReader.GetEntriesAsync(); var flatEntries = await catalogReader.GetFlattenedEntriesAsync(); var set = await catalogReader.GetPackageSetAsync(); var entry = entries.FirstOrDefault(); // Assert // 3 adds, 2 removes Assert.Equal(5, entries.Count); Assert.Equal(1, flatEntries.Count); Assert.Equal(1, set.Count); Assert.Equal("a", entry.Id); Assert.Equal("1.0.0", entry.Version.ToNormalizedString()); } } }
public void DisposeDoesNotThrowWhenNothingHasBeenDone() { // Arrange var baseUri = Sleet.UriUtility.CreateUri("https://localhost:8080/testFeed/"); var feedUri = Sleet.UriUtility.CreateUri(baseUri.AbsoluteUri + "index.json"); using (var catalogReader = new CatalogReader(feedUri)) { // Act & Assert catalogReader.Dispose(); // If this does not throw, we're good! } }
public async Task VerifyCatalogEntryPropertiesAsync() { // Arrange using (var cache = new LocalCache()) using (var cacheContext = new SourceCacheContext()) using (var workingDir = new TestFolder()) { var log = new TestLogger(); var baseUri = Sleet.UriUtility.CreateUri("https://localhost:8080/testFeed/"); var feedFolder = Path.Combine(workingDir, "feed"); var nupkgsFolder = Path.Combine(workingDir, "nupkgs"); Directory.CreateDirectory(feedFolder); Directory.CreateDirectory(nupkgsFolder); var packageA = new TestNupkg("a", "1.0.0.1-RC.1.2.b0.1+meta.blah.1"); TestNupkg.Save(nupkgsFolder, packageA); // Create and push await CatalogReaderTestHelpers.CreateCatalogAsync(workingDir, feedFolder, nupkgsFolder, baseUri, log); var feedUri = Sleet.UriUtility.CreateUri(baseUri.AbsoluteUri + "index.json"); var httpSource = CatalogReaderTestHelpers.GetHttpSource(cache, feedFolder, baseUri); // Act using (var catalogReader = new CatalogReader(feedUri, httpSource, cacheContext, TimeSpan.FromMinutes(1), log)) { var entries = await catalogReader.GetEntriesAsync(); var entry = entries.FirstOrDefault(); // Assert Assert.Equal("a", entry.Id); Assert.Equal("1.0.0.1-RC.1.2.b0.1", entry.Version.ToNormalizedString()); Assert.NotEmpty(entry.CommitId); Assert.True(DateTimeOffset.MinValue < entry.CommitTimeStamp); Assert.Equal("a.1.0.0.1-rc.1.2.b0.1", entry.FileBaseName); Assert.True(entry.IsAddOrUpdate); Assert.False(entry.IsDelete); Assert.True(await entry.IsListedAsync()); Assert.Equal("https://localhost:8080/testFeed/flatcontainer/a/1.0.0.1-rc.1.2.b0.1/a.1.0.0.1-rc.1.2.b0.1.nupkg", entry.NupkgUri.AbsoluteUri); Assert.Equal("https://localhost:8080/testFeed/flatcontainer/a/1.0.0.1-rc.1.2.b0.1/a.nuspec", entry.NuspecUri.AbsoluteUri); Assert.Equal("https://localhost:8080/testFeed/flatcontainer/a/index.json", entry.PackageBaseAddressIndexUri.AbsoluteUri); Assert.Equal("https://localhost:8080/testFeed/registration/a/1.0.0.1-rc.1.2.b0.1.json", entry.PackageRegistrationUri.AbsoluteUri); Assert.Equal("https://localhost:8080/testFeed/registration/a/index.json", entry.RegistrationIndexUri.AbsoluteUri); Assert.Equal("nuget:PackageDetails", string.Join("|", entry.Types)); Assert.StartsWith("https://localhost:8080/testFeed/catalog/data/", entry.Uri.AbsoluteUri); } } }
private async Task LoadCatalog() { if ((State?.Catalog != null) && (State.Catalog.Metadatas.Count > 0)) { return; } Catalog catalog = await CatalogReader.Read( State.AppConfig.CatalogPath); catalog.Metadatas = catalog.Metadatas .OrderByDescending(m => m.ModifiedDate).ToList(); State.UpdateCatalog(this, catalog); }
public List <BookRecDTO> FetchBookData(string resourceName, bool useSampleData) { // Use the default value if no value was provided and // remove the .xml extension if included. resourceName = NormalizeResourceName(resourceName); // Open the XML Data Stream for reading. Stream stream = GetXmlDataStream(resourceName, useSampleData); // Create an asynchronous task that will... // parse the XML data file into a list of BookRecDTO objects. CatalogReader catReader = new CatalogReader(); List <BookRecDTO> result = catReader.FetchBookData(stream); return(result); }
public static async Task <Projection> BuildAsync(IDiagnosticsCallback diagnosticsCallback, string mappingFile, string connectionString) { SqlConnection connection = null; try { connection = new SqlConnection(connectionString); } catch (InvalidOperationException) { throw new ConnectionStringFormatException(connectionString); } var diagnosticsCallbackScope = new DiagnosticsCallbackScope( diagnosticsCallback, $"{Path.GetFileName(mappingFile)} <-> [{connection.DataSource}].[{connection.Database}] >"); var catalogReader = new CatalogReader(connectionString, diagnosticsCallback); var mappingReader = new MappingReader(mappingFile, diagnosticsCallback); var modelReader = new ProjectionBuilder(catalogReader, mappingReader, diagnosticsCallbackScope); return(await modelReader.BuildAsync()); }
static void Main(string[] args) { //@"..\..\CatalogWriterExample.xml" //@"..\..\CatalogSample.xml" var path = AppDomain.CurrentDomain.SetupInformation.ApplicationBase + @"..\..\"; IEnumerable <ICatalogElement> catalog = CatalogReader.readFrom(path + @"CatalogSample.xml"); foreach (var c in catalog) { Console.WriteLine(c.ToString()); } Console.ReadLine(); }
public void LibraryManagementTest_TestBook() { var filepath = path + @"TestBook.xml"; IEnumerable <ICatalogElement> bookCatalog = new List <ICatalogElement>() { book }; CatalogWriter.WriteDocument(filepath, bookCatalog); IEnumerable <ICatalogElement> readBookCatalog = CatalogReader.readFrom(filepath); Book resultBook = null; foreach (var b in readBookCatalog) { resultBook = (Book)b; } Assert.AreEqual(resultBook.Title.name, book.Title.name); Assert.AreEqual(resultBook.ISBN, book.ISBN); }
public void LibraryManagementTest_TestPatent() { var filepath = path + @"TestPatent.xml"; IEnumerable <ICatalogElement> patentCatalog = new List <ICatalogElement>() { patent }; CatalogWriter.WriteDocument(filepath, patentCatalog); IEnumerable <ICatalogElement> readPatentCatalog = CatalogReader.readFrom(filepath); Patent result = null; foreach (var p in readPatentCatalog) { result = (Patent)p; } Assert.AreEqual(result.Name.name, patent.Name.name); Assert.AreEqual(result.RegistrationNumber, patent.RegistrationNumber); }
public void LibraryManagementTest_TestNewspaper() { var filepath = path + @"TestNewspaper.xml"; IEnumerable <ICatalogElement> paperCatalog = new List <ICatalogElement>() { paper }; CatalogWriter.WriteDocument(filepath, paperCatalog); IEnumerable <ICatalogElement> readPaperCatalog = CatalogReader.readFrom(filepath); Newspaper result = null; foreach (var p in readPaperCatalog) { result = (Newspaper)p; } Assert.AreEqual(result.Name.name, paper.Name.name); Assert.AreEqual(result.ISSN, paper.ISSN); }
public async Task GetCatalogEntryVerifyUrlsCanBeOpenedAsJsonAsync() { // Arrange using (var cache = new LocalCache()) using (var cacheContext = new SourceCacheContext()) using (var workingDir = new TestFolder()) { var log = new TestLogger(); var baseUri = Sleet.UriUtility.CreateUri("https://localhost:8080/testFeed/"); var feedFolder = Path.Combine(workingDir, "feed"); var nupkgsFolder = Path.Combine(workingDir, "nupkgs"); Directory.CreateDirectory(feedFolder); Directory.CreateDirectory(nupkgsFolder); var packageA = new TestNupkg("a", "1.0.0.1-RC.1.2.b0.1+meta.blah.1"); TestNupkg.Save(nupkgsFolder, packageA); // Create and push await CatalogReaderTestHelpers.CreateCatalogAsync(workingDir, feedFolder, nupkgsFolder, baseUri, log); var feedUri = Sleet.UriUtility.CreateUri(baseUri.AbsoluteUri + "index.json"); var httpSource = CatalogReaderTestHelpers.GetHttpSource(cache, feedFolder, baseUri); // Act using (var catalogReader = new CatalogReader(feedUri, httpSource, cacheContext, TimeSpan.FromMinutes(1), log)) { var entries = await catalogReader.GetEntriesAsync(); var entry = entries.FirstOrDefault(); // Assert (await entry.GetNupkgAsync()).Should().NotBeNull(); (await entry.GetNupkgAsync()).Should().NotBeNull(); (await entry.GetNuspecAsync()).Should().NotBeNull(); (await entry.GetPackageBaseAddressIndexUriAsync()).Should().NotBeNull(); (await entry.GetPackageDetailsAsync()).Should().NotBeNull(); (await entry.GetPackageRegistrationUriAsync()).Should().NotBeNull(); (await entry.GetRegistrationIndexUriAsync()).Should().NotBeNull(); } } }
public async Task VerifyNoEntriesWhenReadingAnEmptyCatalogAsync() { // Arrange using (var cache = new LocalCache()) using (var cacheContext = new SourceCacheContext()) using (var workingDir = new TestFolder()) { var log = new TestLogger(); var baseUri = Sleet.UriUtility.CreateUri("https://localhost:8080/testFeed/"); var feedFolder = Path.Combine(workingDir, "feed"); var nupkgsFolder = Path.Combine(workingDir, "nupkgs"); Directory.CreateDirectory(feedFolder); Directory.CreateDirectory(nupkgsFolder); await CatalogReaderTestHelpers.CreateCatalogAsync(workingDir, feedFolder, nupkgsFolder, baseUri, log); var feedUri = Sleet.UriUtility.CreateUri(baseUri.AbsoluteUri + "index.json"); var httpSource = CatalogReaderTestHelpers.GetHttpSource(cache, feedFolder, baseUri); // Act using (var catalogReader = new CatalogReader(feedUri, httpSource, cacheContext, TimeSpan.FromMinutes(1), log)) { var entries = await catalogReader.GetEntriesAsync(); var flatEntries = await catalogReader.GetFlattenedEntriesAsync(); var set = await catalogReader.GetPackageSetAsync(); // Assert Assert.Empty(entries); Assert.Empty(flatEntries); Assert.Empty(set); } } }
private static async Task VerifyDownloadMode( Func <string, CatalogEntry, Task> actAndAssertAsync) { // Arrange using (var cache = new LocalCache()) using (var cacheContext = new SourceCacheContext()) using (var workingDir = new TestFolder()) { var log = new TestLogger(); var baseUri = Sleet.UriUtility.CreateUri("https://localhost:8080/testFeed/"); var feedFolder = Path.Combine(workingDir, "feed"); var nupkgsFolder = Path.Combine(workingDir, "nupkgs"); var downloadFolder = Path.Combine(workingDir, "download"); Directory.CreateDirectory(feedFolder); Directory.CreateDirectory(nupkgsFolder); Directory.CreateDirectory(downloadFolder); var packageA = new TestNupkg("a", "1.0.0"); TestNupkg.Save(nupkgsFolder, packageA); await CatalogReaderTestHelpers.CreateCatalogAsync(workingDir, feedFolder, nupkgsFolder, baseUri, log); var feedUri = Sleet.UriUtility.CreateUri(baseUri.AbsoluteUri + "index.json"); var httpSource = CatalogReaderTestHelpers.GetHttpSource(cache, feedFolder, baseUri); // Act using (var catalogReader = new CatalogReader(feedUri, httpSource, cacheContext, TimeSpan.FromMinutes(1), log)) { var entries = await catalogReader.GetEntriesAsync(); var entry = entries.FirstOrDefault(); await actAndAssertAsync(downloadFolder, entry); } } }
public override void ReadHeader(TagRepository rpa, CatalogReader catalog, HighMethodBodyParseContext methodBody, HighCfgNodeHandle[] cfgNodes, List<HighSsaRegister> ssaRegisters, CodeLocationTag baseLocation, bool haveDebugInfo, BinaryReader reader) { }
public LatestCatalogCommitFetcher(CatalogReader catalogReader) { _catalogReader = catalogReader; }
public override void ReadHeader(TagRepository rpa, CatalogReader catalog, HighMethodBodyParseContext methodBody, HighCfgNodeHandle[] cfgNodes, List<HighSsaRegister> ssaRegisters, CodeLocationTag baseLocation, bool haveDebugInfo, BinaryReader reader) { m_methodSpec = catalog.GetMethodSpec(reader.ReadUInt32()); m_constraintType = catalog.GetTypeSpec(reader.ReadUInt32()); m_parameters = new HighSsaRegister[reader.ReadUInt32()]; }
private static void Run(CommandLineApplication cmd, HttpSource httpSource, ILogger log) { cmd.Description = "List packages from a v3 source."; var start = cmd.Option("-s|--start", "Beginning of the commit time range. Packages commited AFTER this time will be included.", CommandOptionType.SingleValue); var end = cmd.Option("-e|--end", "End of the commit time range. Packages commited at this time will be included.", CommandOptionType.SingleValue); var verbose = cmd.Option("-v|--verbose", "Write out additional network call information.", CommandOptionType.NoValue); var argRoot = cmd.Argument( "[root]", "V3 feed index.json URI", multipleValues: false); cmd.HelpOption(Constants.HelpOption); cmd.OnExecute(async() => { try { if (string.IsNullOrEmpty(argRoot.Value)) { throw new ArgumentException("Provide the full http url to a v3 nuget feed."); } var index = new Uri(argRoot.Value); if (!index.AbsolutePath.EndsWith("/index.json", StringComparison.OrdinalIgnoreCase)) { throw new ArgumentException($"Invalid feed url: '{argRoot.Value}'. Provide the full http url to a v3 nuget feed."); } var startTime = DateTimeOffset.MinValue; var endTime = DateTimeOffset.UtcNow; if (start.HasValue()) { startTime = DateTimeOffset.Parse(start.Value()); } if (end.HasValue()) { endTime = DateTimeOffset.Parse(end.Value()); } if (log is ConsoleLogger consoleLogger) { if (verbose.HasValue()) { consoleLogger.VerbosityLevel = LogLevel.Information; } else { consoleLogger.VerbosityLevel = LogLevel.Minimal; } } // CatalogReader using (var cacheContext = new SourceCacheContext()) using (var catalogReader = new CatalogReader(index, httpSource, cacheContext, TimeSpan.Zero, log)) { var entries = await catalogReader.GetFlattenedEntriesAsync(startTime, endTime, CancellationToken.None); foreach (var entry in entries .OrderBy(e => e.Id, StringComparer.OrdinalIgnoreCase) .ThenBy(e => e.Version)) { log.LogMinimal($"{entry.Id} {entry.Version.ToFullString()}"); } }; return(0); } catch (Exception ex) { ExceptionUtils.LogException(ex, log); } return(1); }); }
private static void Run(CommandLineApplication cmd, HttpSource httpSource, ILogger consoleLog) { cmd.Description = "Mirror nupkgs to a folder."; var output = cmd.Option("-o|--output", "Output directory for nupkgs.", CommandOptionType.SingleValue); var folderFormat = cmd.Option("--folder-format", "Output folder format. Defaults to v3. Options: (v2|v3)", CommandOptionType.SingleValue); var ignoreErrors = cmd.Option("--ignore-errors", "Continue on errors.", CommandOptionType.NoValue); var delay = cmd.Option("--delay", "Avoid downloading the very latest packages on the feed to avoid errors. This value is in minutes. Default: 10", CommandOptionType.SingleValue); var maxThreadsOption = cmd.Option("--max-threads", "Maximum number of concurrent downloads. Default: 8", CommandOptionType.SingleValue); var verbose = cmd.Option("--verbose", "Output additional network information.", CommandOptionType.NoValue); var includeIdOption = cmd.Option("-i|--include-id", "Include only these package ids or wildcards. May be provided multiple times.", CommandOptionType.MultipleValue); var excludeIdOption = cmd.Option("-e|--exclude-id", "Exclude these package ids or wildcards. May be provided multiple times.", CommandOptionType.MultipleValue); var additionalOutput = cmd.Option("--additional-output", "Additional output directory for nupkgs. The output path with the most free space will be used.", CommandOptionType.MultipleValue); var onlyLatestVersion = cmd.Option("--latest-only", "Include only the latest version of that package in the result", CommandOptionType.NoValue); var argRoot = cmd.Argument( "[root]", "V3 feed index.json URI", multipleValues: false); cmd.HelpOption(Constants.HelpOption); cmd.OnExecute(async() => { var timer = new Stopwatch(); timer.Start(); if (string.IsNullOrEmpty(argRoot.Value)) { throw new ArgumentException("Provide the full http url to a v3 nuget feed."); } var index = new Uri(argRoot.Value); if (!index.AbsolutePath.EndsWith("/index.json", StringComparison.OrdinalIgnoreCase)) { throw new ArgumentException($"Invalid feed url: '{argRoot.Value}'. Provide the full http url to a v3 nuget feed. For nuget.org use: https://api.nuget.org/v3/index.json"); } // Create root var outputPath = Directory.GetCurrentDirectory(); if (output.HasValue()) { outputPath = output.Value(); } var tmpCachePath = Path.Combine(outputPath, ".tmp"); var storagePaths = new HashSet <DirectoryInfo>() { new DirectoryInfo(outputPath) }; if (additionalOutput.Values?.Any() == true) { storagePaths.UnionWith(additionalOutput.Values.Select(e => new DirectoryInfo(e))); } // Create all output folders foreach (var path in storagePaths) { path.Create(); } var delayTime = TimeSpan.FromMinutes(10); if (delay.HasValue()) { if (int.TryParse(delay.Value(), out int x)) { var delayMinutes = Math.Max(0, x); delayTime = TimeSpan.FromMinutes(delayMinutes); } else { throw new ArgumentException("Invalid --delay value. This must be an integer."); } } var maxThreads = 8; if (maxThreadsOption.HasValue()) { if (int.TryParse(maxThreadsOption.Value(), out int x)) { maxThreads = Math.Max(1, x); } else { throw new ArgumentException("Invalid --max-threads value. This must be an integer."); } } var batchSize = 64; var outputRoot = new DirectoryInfo(outputPath); var outputFilesInfo = new FileInfo(Path.Combine(outputRoot.FullName, "updatedFiles.txt")); FileUtility.Delete(outputFilesInfo.FullName); var useV3Format = true; if (folderFormat.HasValue()) { switch (folderFormat.Value().ToLowerInvariant()) { case "v2": useV3Format = false; break; case "v3": useV3Format = true; break; default: throw new ArgumentException($"Invalid {folderFormat.LongName} value: '{folderFormat.Value()}'."); } } var start = MirrorUtility.LoadCursor(outputRoot); var end = DateTimeOffset.UtcNow.Subtract(delayTime); var token = CancellationToken.None; var mode = DownloadMode.OverwriteIfNewer; var errorLogPath = Path.Combine(outputPath, "lastRunErrors.txt"); FileUtility.Delete(errorLogPath); // Loggers // source -> deep -> file -> Console var log = new FileLogger(consoleLog, LogLevel.Error, errorLogPath); var deepLogger = new FilterLogger(log, LogLevel.Error); // Init log.LogInformation($"Mirroring {index.AbsoluteUri} -> {outputPath}"); var formatName = useV3Format ? "{id}/{version}/{id}.{version}.nupkg" : "{id}/{id}.{version}.nupkg"; log.LogInformation($"Folder format:\t{formatName}"); log.LogInformation($"Cursor:\t\t{Path.Combine(outputPath, "cursor.json")}"); log.LogInformation($"Change log:\t{outputFilesInfo.FullName}"); log.LogInformation($"Error log:\t{errorLogPath}"); log.LogInformation("Range start:\t" + start.ToString("o")); log.LogInformation("Range end:\t" + end.ToString("o")); log.LogInformation($"Batch size:\t{batchSize}"); log.LogInformation($"Threads:\t{maxThreads}"); // CatalogReader using (var cacheContext = new SourceCacheContext()) { cacheContext.SetTempRoot(tmpCachePath); using (var catalogReader = new CatalogReader(index, httpSource, cacheContext, TimeSpan.Zero, deepLogger)) { // Clear old cache files catalogReader.ClearCache(); // Find the most recent entry for each package in the range // Order by oldest first IEnumerable <CatalogEntry> entryQuery = (await catalogReader .GetFlattenedEntriesAsync(start, end, token)); // Remove all but includes if given if (includeIdOption.HasValue()) { var regex = includeIdOption.Values.Select(s => MirrorUtility.WildcardToRegex(s)).ToArray(); entryQuery = entryQuery.Where(e => regex.Any(r => r.IsMatch(e.Id))); } // Remove all excludes if given if (excludeIdOption.HasValue()) { var regex = excludeIdOption.Values.Select(s => MirrorUtility.WildcardToRegex(s)).ToArray(); entryQuery = entryQuery.Where(e => regex.All(r => !r.IsMatch(e.Id))); } if (onlyLatestVersion.HasValue()) { entryQuery = entryQuery.GroupBy(x => x.Id).Select(y => y.OrderByDescending(z => z.Version).First()); } var toProcess = new Queue <CatalogEntry>(entryQuery.OrderBy(e => e.CommitTimeStamp)); log.LogInformation($"Catalog entries found: {toProcess.Count}"); var done = new List <CatalogEntry>(batchSize); var complete = 0; var total = toProcess.Count; var totalDownloads = 0; // Download files var tasks = new List <Task <NupkgResult> >(maxThreads); var batchTimersMax = 20; var batchTimers = new Queue <Tuple <Stopwatch, int> >(batchTimersMax); // Download with throttling while (toProcess.Count > 0) { // Create batches var batch = new Queue <CatalogEntry>(batchSize); var files = new List <string>(); var batchTimer = new Stopwatch(); batchTimer.Start(); while (toProcess.Count > 0 && batch.Count < batchSize) { batch.Enqueue(toProcess.Dequeue()); } while (batch.Count > 0) { if (tasks.Count == maxThreads) { await CompleteTaskAsync(files, tasks, done); } var entry = batch.Dequeue(); Func <CatalogEntry, Task <FileInfo> > getNupkg = null; if (useV3Format) { getNupkg = (e) => DownloadNupkgV3Async(e, storagePaths, mode, log, deepLogger, token); } else { getNupkg = (e) => DownloadNupkgV2Async(e, storagePaths, mode, log, token); } // Queue download task tasks.Add(Task.Run(async() => await RunWithRetryAsync(entry, ignoreErrors.HasValue(), getNupkg, log, token))); } // Wait for all batch downloads while (tasks.Count > 0) { await CompleteTaskAsync(files, tasks, done); } files = files.Where(e => e != null).ToList(); // Write out new files using (var newFileWriter = new StreamWriter(new FileStream(outputFilesInfo.FullName, FileMode.Append, FileAccess.Write))) { foreach (var file in files) { newFileWriter.WriteLine(file); } } complete += done.Count; totalDownloads += files.Count; batchTimer.Stop(); batchTimers.Enqueue(new Tuple <Stopwatch, int>(batchTimer, done.Count)); while (batchTimers.Count > batchTimersMax) { batchTimers.Dequeue(); } // Update cursor var newestCommit = GetNewestCommit(done, toProcess); if (newestCommit != null) { log.LogMinimal($"================[batch complete]================"); log.LogMinimal($"Processed:\t\t{complete} / {total}"); log.LogMinimal($"Batch downloads:\t{files.Count}"); log.LogMinimal($"Batch time:\t\t{batchTimer.Elapsed}"); log.LogMinimal($"Updating cursor.json:\t{newestCommit.Value.ToString("o")}"); var rate = batchTimers.Sum(e => e.Item1.Elapsed.TotalSeconds) / Math.Max(1, batchTimers.Sum(e => e.Item2)); var timeLeft = TimeSpan.FromSeconds(rate * (total - complete)); var timeLeftString = string.Empty; if (timeLeft.TotalHours >= 1) { timeLeftString = $"{(int)timeLeft.TotalHours} hours"; } else if (timeLeft.TotalMinutes >= 1) { timeLeftString = $"{(int)timeLeft.TotalMinutes} minutes"; } else { timeLeftString = $"{(int)timeLeft.TotalSeconds} seconds"; } log.LogMinimal($"Estimated time left:\t{timeLeftString}"); log.LogMinimal($"================================================"); MirrorUtility.SaveCursor(outputRoot, newestCommit.Value); } done.Clear(); // Free up space catalogReader.ClearCache(); } // Set cursor to end time MirrorUtility.SaveCursor(outputRoot, end); timer.Stop(); var plural = totalDownloads == 1 ? "" : "s"; log.LogMinimal($"Downloaded {totalDownloads} nupkg{plural} in {timer.Elapsed.ToString()}."); } } return(0); }); }
public override void ReadHeader(TagRepository rpa, CatalogReader catalog, HighMethodBodyParseContext methodBody, HighCfgNodeHandle[] cfgNodes, List<HighSsaRegister> ssaRegisters, CodeLocationTag baseLocation, bool haveDebugInfo, BinaryReader reader) { m_type = catalog.GetTypeSpec(reader.ReadUInt32()); m_fieldName = catalog.GetString(reader.ReadUInt32()); m_isStatic = reader.ReadBoolean(); }
public override void ReadHeader(TagRepository rpa, CatalogReader catalog, HighMethodBodyParseContext methodBody, HighCfgNodeHandle[] cfgNodes, List<HighSsaRegister> ssaRegisters, CodeLocationTag baseLocation, bool haveDebugInfo, BinaryReader reader) { m_operation = (NumberCompareOperation)reader.ReadByte(); if (m_operation < 0 || m_operation >= NumberCompareOperation.NumHighCompareTypes) throw new Exception("Invalid compare operation"); m_numberType = (NumberArithType)reader.ReadByte(); if (m_numberType < 0 || m_numberType >= NumberArithType.NumHighArithTypes) throw new Exception("Invalid arithmetic type"); m_trueValue = reader.ReadInt32(); m_falseValue = reader.ReadInt32(); }