public ActionResult Feed(int? page, int? pageSize) { var siteRoot = EnsureTrailingSlash(Configuration.GetSiteRoot(useHttps:false)); IQueryable<Package> packageVersions = packageSvc .GetPackagesForListing(includePrerelease: false) .OrderByDescending(p => p.Published); SyndicationFeed feed = new SyndicationFeed("Chocolatey", "Chocolatey Packages", new Uri(siteRoot)); feed.Copyright = new TextSyndicationContent("Chocolatey copyright FerventCoder Software, Packages copyright original owner(s), Products copyright original author(s)."); feed.Language = "en-US"; List<SyndicationItem> items = new List<SyndicationItem>(); foreach (Package package in packageVersions.ToList()) { string title = string.Format("{0} ({1})", package.PackageRegistration.Id, package.Version); var galleryUrl = siteRoot + "packages/" + package.PackageRegistration.Id + "/" + package.Version; SyndicationItem item = new SyndicationItem( title, package.Summary, new Uri(galleryUrl), package.PackageRegistration.Id + "." + package.Version, package.Published ); item.PublishDate = package.Published; items.Add(item); } //this will bomb until you have one published package in the feed. feed.LastUpdatedTime = packageVersions.First().Published; feed.Items = items; return new RSSActionResult { Feed = feed }; }
public static IQueryable<Package> Search(this IQueryable<Package> source, string searchTerm) { // Split the search terms by spaces var terms = (searchTerm ?? String.Empty).Split(); // Build a list of expressions for each term var expressions = new List<LambdaExpression>(); foreach (var criteria in searchCriteria) { foreach (var term in terms) { expressions.Add(criteria(term)); } } // Build a giant or statement using the bodies of the lambdas var body = expressions.Select(p => p.Body) .Aggregate(Expression.OrElse); // Now build the final predicate var parameterExpr = Expression.Parameter(typeof(Package)); // Fix up the body to use our parameter expression body = new ParameterExpressionReplacer(parameterExpr).Visit(body); // Build the final predicate var predicate = Expression.Lambda<Func<Package, bool>>(body, parameterExpr); // Apply it to the query return source.Where(predicate); }
// Returns list of 2-element arrays, element 0 is field name (or null), element 1 is term/phrase public List<NuGetSearchTerm> Parse(string searchTerm) { var terms = new List<NuGetSearchTerm>(); _tokenizer = new Tokenizer(searchTerm); while (_tokenizer.Peek() != TokenType.Eof) { var term = new NuGetSearchTerm(); if (_tokenizer.Peek() == TokenType.Field) { if (ParseField(term)) { terms.Add(term); } } else { if (ParseTermOrPhrase(term)) { terms.Add(term); } } } return terms; }
public override void ExecuteCommand() { TableErrorLog log = new TableErrorLog(string.Format(ElmahAccountCredentials)); List<ErrorLogEntry> entities = new List<ErrorLogEntry>(); log.GetErrors(0, 500, entities); //retrieve n * LastNHours errors assuming a max of 500 errors per hour. int count = entities.Where(entity => entity.Error.Time.ToUniversalTime() > DateTime.UtcNow.AddHours(-1) && entity.Error.Time.ToUniversalTime() < DateTime.UtcNow).ToList().Count; ReportHelpers.AppendDatatoBlob(StorageAccount, "ErrorRate" + string.Format("{0:MMdd}", DateTime.Now) + ".json", new Tuple<string, string>(String.Format("{0:HH:mm}", DateTime.Now), count.ToString()), 50, ContainerName); }
public override void ExecuteCommand() { sqlQueryForDbAge = string.Format("select create_date from sys.databases where name = '{0}'", DbName); thresholdValues = new JavaScriptSerializer().Deserialize<AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName)); List<Tuple<string, string>> jobOutputs = new List<Tuple<string, string>>(); jobOutputs.Add(new Tuple<string,string>("SyncPackagesToFailoverDC", CheckLagBetweenDBAndBlob())); jobOutputs.Add(new Tuple<string, string>("ImportCompletionStatus", CheckForInCompleteDBImport())); JArray reportObject = ReportHelpers.GetJson(jobOutputs); ReportHelpers.CreateBlob(StorageAccount, "RunBackgroundCheckForFailoverDCReport.json", ContainerName, "application/json", ReportHelpers.ToStream(reportObject)); }
public StatisticsPackagesReport() { Total = String.Empty; Id = String.Empty; Columns = Enumerable.Empty<string>(); Facts = new List<StatisticsFact>(); Table = new List<StatisticsPivot.TableEntry[]>(); Rows = new List<StatisticsPackagesItemViewModel>(); Dimensions = new List<StatisticsDimension>(); }
public override void ExecuteCommand() { thresholdValues = new JavaScriptSerializer().Deserialize<AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName)); List<Tuple<string, string>> jobOutputs = new List<Tuple<string, string>>(); jobOutputs.Add(new Tuple<string, string>("PackageStatics", CheckoutForPackageStatics())); //jobOutputs.Add(new Tuple<string, string>("PurgePackageStatistics", CheckForPurgePackagStatisticsJob())); jobOutputs.Add(new Tuple<string, string>("HandleQueuedPackageEdits", CheckForHandleQueuedPackageEditJob())); // jobOutputs.Add(new Tuple<string, string>("BackupPackages", CheckForBackupPackagesJob())); commenting out this check temporarily as ListBlobs on ng-backups container is giving error. JArray reportObject = ReportHelpers.GetJson(jobOutputs); ReportHelpers.CreateBlob(StorageAccount, "RunBackGroundChecksForWorkerJobsReport.json", ContainerName, "application/json", ReportHelpers.ToStream(reportObject)); }
public static IQueryable<Package> Search(this IQueryable<Package> source, string searchTerm) { if (String.IsNullOrWhiteSpace(searchTerm)) { return source; } // Split the search terms by spaces var terms = (searchTerm ?? String.Empty).Split(); // Build a list of expressions for each term var expressions = new List<LambdaExpression>(); foreach (var term in terms) { var localSearchTerm = term.to_lower(); if (localSearchTerm.StartsWith("id:")) { expressions.Add(idCriteria(localSearchTerm.Replace("id:", string.Empty))); } if (localSearchTerm.StartsWith("author:")) { expressions.Add(authorCriteria(localSearchTerm.Replace("author:", string.Empty))); } else if (localSearchTerm.StartsWith("tag:")) { expressions.Add(tagCriteria(localSearchTerm.Replace("tag:", string.Empty))); } else { foreach (var criteria in searchCriteria) { expressions.Add(criteria(localSearchTerm)); } } } //todo this becomes an AND // Build a giant or statement using the bodies of the lambdas var body = expressions.Select(p => p.Body) .Aggregate(Expression.OrElse); // Now build the final predicate var parameterExpr = Expression.Parameter(typeof(Package)); // Fix up the body to use our parameter expression body = new ParameterExpressionReplacer(parameterExpr).Visit(body); // Build the final predicate var predicate = Expression.Lambda<Func<Package, bool>>(body, parameterExpr); // Apply it to the query return source.Where(predicate); }
public ActionResult Feed(int? page, int? pageSize) { var siteRoot = EnsureTrailingSlash(Configuration.GetSiteRoot(useHttps: false)); IQueryable<Package> packageVersions = packageSvc .GetPackagesForListing(includePrerelease: false) .OrderByDescending(p => p.Published); if (page != null && pageSize != null) { int skip = page.GetValueOrDefault() * pageSize.GetValueOrDefault(1); packageVersions = packageVersions.Skip(skip).Take(pageSize.GetValueOrDefault(1)); } else if (pageSize != null) { packageVersions = packageVersions.Take(pageSize.GetValueOrDefault(1)); } SyndicationFeed feed = new SyndicationFeed("Chocolatey", "Chocolatey Packages", new Uri(siteRoot)); feed.Copyright = new TextSyndicationContent("Chocolatey copyright RealDimensions Software, LLC, Packages copyright original maintainer(s), Products copyright original author(s)."); feed.Language = "en-US"; List<SyndicationItem> items = new List<SyndicationItem>(); foreach (Package package in packageVersions.ToList().OrEmptyListIfNull()) { string title = string.Format("{0} ({1})", package.PackageRegistration.Id, package.Version); var galleryUrl = siteRoot + "packages/" + package.PackageRegistration.Id + "/" + package.Version; SyndicationItem item = new SyndicationItem( title, package.Summary, new Uri(galleryUrl), package.PackageRegistration.Id + "." + package.Version, package.Published ); item.PublishDate = package.Published; items.Add(item); } try { var mostRecentPackage = packageVersions.FirstOrDefault(); feed.LastUpdatedTime = mostRecentPackage == null ? DateTime.Now : mostRecentPackage.Published; } catch (Exception) { feed.LastUpdatedTime = DateTime.Now; } feed.Items = items; return new RSSActionResult { Feed = feed }; }
private static void AddOrderedNext(Level level) { if (level.Next != null) { List<KeyValuePair<string, Level>> orderedNext = new List<KeyValuePair<string, Level>>(level.Next); orderedNext.Sort((x, y) => { return y.Value.Total.CompareTo(x.Value.Total); }); level.OrderedNext = orderedNext; foreach (KeyValuePair<string, Level> item in level.Next) { AddOrderedNext(item.Value); } } }
public override IEnumerable<object[]> GetData(MethodInfo testMethod) { var folderNames = new List<object[]> { new object[] { Constants.PackagesFolderName, true }, new object[] { Constants.UploadsFolderName, false } }; if (!IncludePermissions) { folderNames = folderNames.Select(fn => new[] { fn.ElementAt(0) }).ToList(); } return folderNames; }
private static void AddPackages(List<PackageIndexEntity> packages, bool creatingIndex) { if (!creatingIndex) { // If this is not the first time we're creating the index, clear any package registrations for packages we are going to updating. var packagesToDelete = from packageRegistrationKey in packages.Select(p => p.PackageRegistrationKey).Distinct() select new Term("PackageRegistrationKey", packageRegistrationKey.ToString(CultureInfo.InvariantCulture)); indexWriter.DeleteDocuments(packagesToDelete.ToArray()); } // As per http://stackoverflow.com/a/3894582. The IndexWriter is CPU bound, so we can try and write multiple packages in parallel. // The IndexWriter is thread safe and is primarily CPU-bound. Parallel.ForEach(packages, AddPackage); indexWriter.Commit(); }
public override void ExecuteCommand() { AlertThresholds thresholds = new JavaScriptSerializer().Deserialize<AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName)); List<ElmahError> listOfErrors = new List<ElmahError>(); RefreshElmahError RefreshExecute = new RefreshElmahError(StorageAccount, ContainerName, LastNHours, ElmahAccountCredentials); listOfErrors = RefreshExecute.ExecuteRefresh(); foreach (ElmahError error in listOfErrors) { if (error.Severity == 0) { if (error.Occurecnes > thresholds.ElmahCriticalErrorPerHourAlertErrorThreshold && LastNHours == 1) { new SendAlertMailTask { AlertSubject = string.Format("Error: Elmah Error Alert activated for {0}", error.Error), Details = String.Format("Number of {0} exceeded Error threshold limit during the last hour.Threshold error count per hour : {1}, Events recorded in the last hour: {2}", error.Error, thresholds.ElmahCriticalErrorPerHourAlertErrorThreshold, error.Occurecnes.ToString()), AlertName = string.Format("Error: Elmah Error Alert for {0}", error.Error), Component = "Web Server", Level = "Error" }.ExecuteCommand(); } else if (error.Occurecnes > thresholds.ElmahCriticalErrorPerHourAlertWarningThreshold && LastNHours == 1) { new SendAlertMailTask { AlertSubject = string.Format("Warning: Elmah Error Alert activated for {0}", error.Error), Details = String.Format("Number of {0} exceeded Warning threshold limit during the last hour.Threshold error count per hour : {1}, Events recorded in the last hour: {2}", error.Error, thresholds.ElmahCriticalErrorPerHourAlertWarningThreshold, error.Occurecnes.ToString()), AlertName = string.Format("Warning: Elmah Error Alert for {0}", error.Error), Component = "Web Server", Level = "Warning" }.ExecuteCommand(); } } } var json = new JavaScriptSerializer().Serialize(listOfErrors); ReportHelpers.CreateBlob(StorageAccount, "ElmahErrorsDetailed" + LastNHours.ToString() + "hours.json", ContainerName, "application/json", ReportHelpers.ToStream(json)); }
public UserProfileModel(User user, List<PackageViewModel> allPackages, int pageIndex, int pageSize, UrlHelper url) { Username = user.Username; EmailAddress = user.EmailAddress; UnconfirmedEmailAddress = user.UnconfirmedEmailAddress; AllPackages = allPackages; TotalPackages = allPackages.Count; PackagePage = pageIndex; PackagePageSize = pageSize; TotalPackageDownloadCount = AllPackages.Sum(p => p.TotalDownloadCount); PackagePageTotalCount = (TotalPackages + PackagePageSize - 1) / PackagePageSize; var pager = new PreviousNextPagerViewModel<PackageViewModel>(allPackages, pageIndex, PackagePageTotalCount, page => url.User(user, page)); Pager = pager; PagedPackages = AllPackages.Skip(PackagePageSize * pageIndex) .Take(PackagePageSize).ToList(); }
public List<ElmahError> GetElmahError(DateTime start, DateTime end) { if (StorageAccount == null) StorageAccount = CloudStorageAccount.Parse(ConnectionString); List<string> nonCriticalErrorDictionary = new JavaScriptSerializer().Deserialize<List<string>>(Load(StorageAccount, "Configuration.ElmahNonCriticalErrors.json", ContainerName)); TableErrorLog log = new TableErrorLog(string.Format(ElmahAccountCredentials)); List<ErrorLogEntry> entities = new List<ErrorLogEntry>(); int lasthours = DateTime.Now.Subtract(start).Hours + 1; log.GetErrors(0, 500 * lasthours, entities); //retrieve n * LastNHours errors assuming a max of 500 errors per hour. List<ElmahError> listOfErrors = new List<ElmahError>(); //Get the error from Last N hours. if (entities.Any(entity => entity.Error.Time.ToUniversalTime() > start.ToUniversalTime() && entity.Error.Time.ToUniversalTime() < end.ToUniversalTime())) { entities = entities.Where(entity => entity.Error.Time.ToUniversalTime() > start.ToUniversalTime() && entity.Error.Time.ToUniversalTime() < end.ToUniversalTime()).ToList(); var elmahGroups = entities.GroupBy(item => item.Error.Message); //Group the error based on exception and send alerts if critical errors exceed the thresold values. foreach (IGrouping<string, ErrorLogEntry> errorGroups in elmahGroups) { Console.WriteLine(errorGroups.Key.ToString() + " " + errorGroups.Count()); int severity = 0; if (nonCriticalErrorDictionary.Any(item => errorGroups.Key.ToString().Contains(item))) { severity = 1; //sev 1 is low pri and sev 0 is high pri. } string link = "https://www.nuget.org/Admin/Errors.axd/detail?id={0}"; if (ContainerName.Contains("qa")) { link = "https://int.nugettest.org/Admin/Errors.axd/detail?id={0}"; } //for severity, assume all refresh error, severity = 0 listOfErrors.Add(new ElmahError(errorGroups.Key.ToString(), errorGroups.Count(), errorGroups.Min(item => item.Error.Time.ToLocalTime()), errorGroups.Max(item => item.Error.Time.ToLocalTime()), string.Format(link, errorGroups.First().Id), errorGroups.First().Error.Detail, severity)); } } return listOfErrors; }
public static IDisposable All(params IDisposable[] tokens) { return new DisposableAction(() => { var exceptions = new List<Exception>(); foreach (var token in tokens) { try { token.Dispose(); } catch (Exception ex) { exceptions.Add(ex); } } if (exceptions.Any()) { throw new AggregateException(exceptions); } }); }
private static void AddPackages(IndexWriter indexWriter, List<PackageIndexEntity> packages) { foreach (var package in packages) { if (package.Key != package.LatestKey) { indexWriter.DeleteDocuments(new TermQuery(new Term("Key", package.Key.ToString(CultureInfo.InvariantCulture)))); continue; } // If there's an older entry for this package, remove it. var document = new Document(); document.Add(new Field("Key", package.Key.ToString(CultureInfo.InvariantCulture), Field.Store.YES, Field.Index.NO)); document.Add(new Field("Id-Exact", package.Id, Field.Store.NO, Field.Index.ANALYZED)); document.Add(new Field("Description", package.Description, Field.Store.NO, Field.Index.ANALYZED)); foreach (var idToken in CamelCaseTokenize(package.Id)) { document.Add(new Field("Id", idToken, Field.Store.NO, Field.Index.ANALYZED)); } if (!String.IsNullOrEmpty(package.Title)) { document.Add(new Field("Title", package.Title, Field.Store.NO, Field.Index.ANALYZED)); } if (!String.IsNullOrEmpty(package.Tags)) { document.Add(new Field("Tags", package.Tags, Field.Store.NO, Field.Index.ANALYZED)); } document.Add(new Field("Author", package.Authors, Field.Store.NO, Field.Index.ANALYZED)); indexWriter.AddDocument(document); } }
private static void AddToIndex(Lucene.Net.Store.Directory directory, List<IndexDocumentData> rangeToIndex, TextWriter log, PerfEventTracker perfTracker) { log.WriteLine("begin AddToIndex"); int highestPackageKey = -1; using (IndexWriter indexWriter = CreateIndexWriter(directory, create: false)) { // Just write the document to index. No Facet. foreach (IndexDocumentData data in rangeToIndex) { indexWriter.AddDocument(CreateLuceneDocument(data)); } highestPackageKey = rangeToIndex.Max(i => i.Package.Key); log.WriteLine("about to commit {0} packages", rangeToIndex.Count); IDictionary<string, string> commitUserData = indexWriter.GetReader().CommitUserData; string lastEditsIndexTime = commitUserData["last-edits-index-time"]; if (lastEditsIndexTime == null) { // this should never happen but if it did Lucene would throw lastEditsIndexTime = DateTime.MinValue.ToString(); } indexWriter.Commit(CreateCommitMetadata(lastEditsIndexTime, highestPackageKey, rangeToIndex.Count, "add")); log.WriteLine("commit done"); } log.WriteLine("end AddToIndex"); }
public User(string username) { Credentials = new List<Credential>(); Username = username; }
private static void BackgroundJobsPostStart() { var jobs = new List<IJob>(); var indexer = DependencyResolver.Current.GetService<IIndexingService>(); if (indexer != null) { indexer.RegisterBackgroundJobs(jobs); } jobs.Add(new UpdateStatisticsJob(TimeSpan.FromMinutes(5), () => new EntitiesContext(), timeout: TimeSpan.FromMinutes(5))); jobs.Add(new WorkItemCleanupJob(TimeSpan.FromDays(1), () => new EntitiesContext(), timeout: TimeSpan.FromDays(4))); var jobCoordinator = new WebFarmJobCoordinator(new EntityWorkItemRepository(() => new EntitiesContext())); _jobManager = new JobManager(jobs, jobCoordinator) { RestartSchedulerOnFailure = true }; _jobManager.Fail(e => ErrorLog.GetDefault(null).Log(new Error(e))); _jobManager.Start(); }
private async Task WillThrowIfTheNuGetPackageDependenciesIsLongerThanInt16MaxValue() { var service = CreateService(); var versionSpec = VersionRange.Parse("[1.0]"); var numDependencies = 5000; var packageDependencies = new List<NuGet.Packaging.Core.PackageDependency>(); for (int i = 0; i < numDependencies; i++) { packageDependencies.Add(new NuGet.Packaging.Core.PackageDependency("dependency" + i, versionSpec)); } var nugetPackage = CreateNuGetPackage(packageDependencyGroups: new[] { new PackageDependencyGroup( new NuGetFramework("net40"), packageDependencies), }); var ex = await Assert.ThrowsAsync<EntityException>(async () => await service.CreatePackageAsync(nugetPackage.Object, new PackageStreamMetadata(), null)); Assert.Equal(String.Format(Strings.NuGetPackagePropertyTooLong, "Dependencies", Int16.MaxValue), ex.Message); }
private Package CreatePackageFromNuGetPackage(PackageRegistration packageRegistration, IPackage nugetPackage) { var package = packageRegistration.Packages .Where(pv => pv.Version == nugetPackage.Version.ToString()) .SingleOrDefault(); if (package != null) { switch (package.Status) { case PackageStatusType.Rejected : throw new EntityException( string.Format( "This package has been {0} and can no longer be submitted.", package.Status.GetDescriptionOrValue().ToLower())); case PackageStatusType.Submitted : //continue on break; default : throw new EntityException( "A package with identifier '{0}' and version '{1}' already exists.", packageRegistration.Id, package.Version); } } var now = DateTime.UtcNow; var packageFileStream = nugetPackage.GetStream(); //if new package versus updating an existing package. if (package == null) package = new Package(); package.Version = nugetPackage.Version.ToString(); package.Description = nugetPackage.Description; package.ReleaseNotes = nugetPackage.ReleaseNotes; package.RequiresLicenseAcceptance = nugetPackage.RequireLicenseAcceptance; package.HashAlgorithm = Constants.Sha512HashAlgorithmId; package.Hash = cryptoSvc.GenerateHash(packageFileStream.ReadAllBytes()); package.PackageFileSize = packageFileStream.Length; package.Created = now; package.Language = nugetPackage.Language; package.LastUpdated = now; package.Published = now; package.Copyright = nugetPackage.Copyright; package.IsPrerelease = !nugetPackage.IsReleaseVersion(); package.Listed = false; package.Status = PackageStatusType.Submitted; package.SubmittedStatus = PackageSubmittedStatusType.Ready; package.ApprovedDate = null; if (package.ReviewedDate.HasValue) package.SubmittedStatus = PackageSubmittedStatusType.Updated; //we don't moderate prereleases if (package.IsPrerelease) { package.Listed = true; package.Status = PackageStatusType.Exempted; } if (packageRegistration.IsTrusted) { package.Listed = true; package.Status = PackageStatusType.Approved; package.ReviewedDate = now; package.ApprovedDate = now; } package.IconUrl = nugetPackage.IconUrl == null ? string.Empty : nugetPackage.IconUrl.ToString(); package.LicenseUrl = nugetPackage.LicenseUrl == null ? string.Empty : nugetPackage.LicenseUrl.ToString(); package.ProjectUrl = nugetPackage.ProjectUrl == null ? string.Empty : nugetPackage.ProjectUrl.ToString(); package.ProjectSourceUrl = nugetPackage.ProjectSourceUrl == null ? string.Empty : nugetPackage.ProjectSourceUrl.ToString(); package.PackageSourceUrl = nugetPackage.PackageSourceUrl == null ? string.Empty : nugetPackage.PackageSourceUrl.ToString(); package.DocsUrl = nugetPackage.DocsUrl == null ? string.Empty : nugetPackage.DocsUrl.ToString(); package.MailingListUrl = nugetPackage.MailingListUrl == null ? string.Empty : nugetPackage.MailingListUrl.ToString(); package.BugTrackerUrl = nugetPackage.BugTrackerUrl == null ? string.Empty : nugetPackage.BugTrackerUrl.ToString(); package.Summary = nugetPackage.Summary ?? string.Empty; package.Tags = nugetPackage.Tags ?? string.Empty; package.Title = nugetPackage.Title ?? string.Empty; foreach (var item in package.Authors.OrEmptyListIfNull().ToList()) { packageAuthorRepo.DeleteOnCommit(item); } packageAuthorRepo.CommitChanges(); foreach (var author in nugetPackage.Authors) { package.Authors.Add( new PackageAuthor { Name = author }); } foreach (var item in package.SupportedFrameworks.OrEmptyListIfNull().ToList()) { packageFrameworksRepo.DeleteOnCommit(item); } packageFrameworksRepo.CommitChanges(); var supportedFrameworks = GetSupportedFrameworks(nugetPackage).Select(fn => fn.ToShortNameOrNull()).ToArray(); if (!supportedFrameworks.AnySafe(sf => sf == null)) { foreach (var supportedFramework in supportedFrameworks) { package.SupportedFrameworks.Add( new PackageFramework { TargetFramework = supportedFramework }); } } foreach (var item in package.Dependencies.OrEmptyListIfNull().ToList()) { packageDependenciesRepo.DeleteOnCommit(item); } packageDependenciesRepo.CommitChanges(); foreach (var dependencySet in nugetPackage.DependencySets) { if (dependencySet.Dependencies.Count == 0) { package.Dependencies.Add( new PackageDependency { Id = null, VersionSpec = null, TargetFramework = dependencySet.TargetFramework.ToShortNameOrNull() }); } else { foreach (var dependency in dependencySet.Dependencies.Select( d => new { d.Id, d.VersionSpec, dependencySet.TargetFramework })) { package.Dependencies.Add( new PackageDependency { Id = dependency.Id, VersionSpec = dependency.VersionSpec == null ? null : dependency.VersionSpec.ToString(), TargetFramework = dependency.TargetFramework.ToShortNameOrNull() }); } } } foreach (var item in package.Files.OrEmptyListIfNull().ToList()) { packageFilesRepo.DeleteOnCommit(item); } packageFilesRepo.CommitChanges(); foreach (var packageFile in nugetPackage.GetFiles().OrEmptyListIfNull()) { var filePath = packageFile.Path; var fileContent = " "; IList<string> extensions = new List<string>(); var approvedExtensions = Configuration.ReadAppSettings("PackageFileTextExtensions"); if (!string.IsNullOrWhiteSpace(approvedExtensions)) { foreach (var extension in approvedExtensions.Split(',', ';')) { extensions.Add("." + extension); } } try { var extension = Path.GetExtension(filePath); if (extension != null) { if (extensions.Contains(extension)) fileContent = packageFile.GetStream().ReadToEnd(); else if (extension.Equals(".exe", StringComparison.InvariantCultureIgnoreCase)) { var bytes = packageFile.GetStream().ReadAllBytes(); var md5Hash = BitConverter.ToString(Convert.FromBase64String(cryptoSvc.GenerateHash(bytes, "MD5"))) .Replace("-", string.Empty); var sha1Hash = BitConverter.ToString(Convert.FromBase64String(cryptoSvc.GenerateHash(bytes, "SHA1"))) .Replace("-", string.Empty); fileContent = string.Format("md5: {0} | sha1: {1}", md5Hash, sha1Hash); } } } catch (Exception ex) { // Log but swallow the exception ErrorSignal.FromCurrentContext().Raise(ex); } package.Files.Add( new PackageFile { FilePath = filePath, FileContent = fileContent, }); } package.FlattenedAuthors = package.Authors.Flatten(); package.FlattenedDependencies = package.Dependencies.Flatten(); return package; }
public virtual ActionResult ListPackages(string q, string sortOrder = null, int page = 1, bool prerelease = true, bool moderatorQueue = false) { if (page < 1) page = 1; IQueryable<Package> packageVersions = packageSvc.GetPackagesForListing(prerelease); IEnumerable<Package> packagesToShow = new List<Package>(); if (moderatorQueue) { var unknownStatus = PackageStatusType.Unknown.GetDescriptionOrValue(); //This is going to be fun. Unknown status ones would be listed, but sometimes a few might slip through the cracks if a maintainer unlists a package. // A user can just email us to catch those though. packageVersions = packageVersions.Where(p => !p.IsPrerelease).Where(p => p.StatusForDatabase == unknownStatus || p.StatusForDatabase == null); } q = (q ?? "").Trim(); if (String.IsNullOrEmpty(sortOrder)) { // Determine the default sort order. If no query string is specified, then the sortOrder is DownloadCount // If we are searching for something, sort by relevance. sortOrder = q.IsEmpty() ? Constants.PopularitySortOrder : Constants.RelevanceSortOrder; } int totalHits = 0; int updatedPackagesCount = 0; int unreviewedPackagesCount = 0; int waitingPackagesCount = 0; var searchFilter = GetSearchFilter(q, sortOrder, page, prerelease); if (moderatorQueue) { var submittedPackages = packageSvc.GetSubmittedPackages().ToList(); var updatedStatus = PackageSubmittedStatusType.Updated.ToString(); var readyStatus = PackageSubmittedStatusType.Ready.ToString(); var waitingStatus = PackageSubmittedStatusType.Waiting.ToString(); //var resubmittedPackages = submittedPackages.Where(p => p.ReviewedDate.HasValue && p.Published > p.ReviewedDate).OrderBy(p => p.Published).ToList(); var resubmittedPackages = submittedPackages.Where(p => p.SubmittedStatusForDatabase == updatedStatus).OrderBy(p => p.Published).ToList(); updatedPackagesCount = resubmittedPackages.Count; var unreviewedPackages = submittedPackages.Where(p => p.SubmittedStatusForDatabase == readyStatus || p.SubmittedStatusForDatabase == null).OrderBy(p => p.Published).ToList(); unreviewedPackagesCount = unreviewedPackages.Count; //var waitingForMaintainerPackages = submittedPackages.Where(p => p.ReviewedDate >= p.Published).OrderByDescending(p => p.ReviewedDate).ToList(); var waitingForMaintainerPackages = submittedPackages.Where(p => p.SubmittedStatusForDatabase == waitingStatus).OrderByDescending(p => p.ReviewedDate).ToList(); waitingPackagesCount = waitingForMaintainerPackages.Count; packagesToShow = resubmittedPackages.Union(unreviewedPackages).Union(waitingForMaintainerPackages); totalHits = packagesToShow.Count() + packageVersions.Count(); if ((searchFilter.Skip + searchFilter.Take) >= packagesToShow.Count()) packagesToShow = packagesToShow.Union(packageVersions.OrderByDescending(pv => pv.DownloadCount)); packagesToShow = packagesToShow.Skip(searchFilter.Skip).Take(searchFilter.Take); } else packagesToShow = searchSvc.Search(packageVersions, searchFilter, out totalHits).ToList(); if (page == 1 && !packagesToShow.Any()) { // In the event the index wasn't updated, we may get an incorrect count. totalHits = 0; } var viewModel = new PackageListViewModel( packagesToShow, q, sortOrder, totalHits, page - 1, Constants.DefaultPackageListPageSize, Url, prerelease, moderatorQueue, updatedPackagesCount, unreviewedPackagesCount, waitingPackagesCount); ViewBag.SearchTerm = q; return View("~/Views/Packages/ListPackages.cshtml", viewModel); }
private static IList<StatisticsFact> CreateFacts(JObject data) { IList<StatisticsFact> facts = new List<StatisticsFact>(); JToken itemsToken; // Check if the "Items" exist before trying to access them. if (!data.TryGetValue("Items", out itemsToken)) { throw new StatisticsReportNotFoundException(); } foreach (JObject perVersion in data["Items"]) { string version = (string)perVersion["Version"]; foreach (JObject perClient in perVersion["Items"]) { var clientName = (string)perClient["ClientName"]; var clientVersion = (string)perClient["ClientVersion"]; var operation = "unknown"; JToken opt; if (perClient.TryGetValue("Operation", out opt)) { operation = (string)opt; } var downloads = (int)perClient["Downloads"]; facts.Add(new StatisticsFact(CreateDimensions(version, clientName, clientVersion, operation), downloads)); } } return facts; }
public async Task<StatisticsPackagesReport> GetPackageVersionDownloadsByClient(string packageId, string packageVersion) { try { if (string.IsNullOrEmpty(packageId) || string.IsNullOrEmpty(packageVersion)) { return null; } var reportName = string.Format(CultureInfo.CurrentCulture, RecentpopularityDetailBlobNameFormat, StatisticsReportName.RecentPopularityDetail_, packageId).ToLowerInvariant(); var reportContent = await _reportService.Load(reportName); if (reportContent == null) { return null; } var content = JObject.Parse(reportContent.Content); var report = new StatisticsPackagesReport { LastUpdatedUtc = reportContent.LastUpdatedUtc }; var facts = new List<StatisticsFact>(); foreach (var fact in CreateFacts(content)) { if (fact.Dimensions["Version"] == packageVersion) { facts.Add(fact); } } report.Facts = facts; return report; } catch (NullReferenceException e) { QuietLog.LogHandledException(e); return null; } catch (JsonReaderException e) { QuietLog.LogHandledException(e); return null; } catch (StorageException e) { QuietLog.LogHandledException(e); return null; } catch (ArgumentException e) { QuietLog.LogHandledException(e); return null; } }
// helper functions public static IDictionary<int, IndexDocumentData> LoadDocumentData(string connectionString, List<int> adds, List<int> updates, List<int> deletes, IDictionary<int, IEnumerable<string>> feeds, IDictionary<int, int> checksums, TextWriter log = null) { log = log ?? DefaultTraceWriter; IDictionary<int, IndexDocumentData> packages = new Dictionary<int, IndexDocumentData>(); List<Package> addsPackages = GalleryExport.GetPackages(connectionString, adds, log, verbose: false); List<IndexDocumentData> addsIndexDocumentData = MakeIndexDocumentData(addsPackages, feeds, checksums); foreach (IndexDocumentData indexDocumentData in addsIndexDocumentData) { packages.Add(indexDocumentData.Package.Key, indexDocumentData); } List<Package> updatesPackages = GalleryExport.GetPackages(connectionString, updates, log, verbose: false); List<IndexDocumentData> updatesIndexDocumentData = MakeIndexDocumentData(updatesPackages, feeds, checksums); foreach (IndexDocumentData indexDocumentData in updatesIndexDocumentData) { packages.Add(indexDocumentData.Package.Key, indexDocumentData); } return packages; }
private static void AddPackagesToIndex(List<IndexDocumentData> indexDocumentData, Lucene.Net.Store.Directory directory, TextWriter log, PerfEventTracker perfTracker) { log.WriteLine("About to add {0} packages", indexDocumentData.Count); for (int index = 0; index < indexDocumentData.Count; index += MaxDocumentsPerCommit) { int count = Math.Min(MaxDocumentsPerCommit, indexDocumentData.Count - index); List<IndexDocumentData> rangeToIndex = indexDocumentData.GetRange(index, count); AddToIndex(directory, rangeToIndex, log, perfTracker); } }
private void CreateIPDetailsReport(DirectoryInfo info) { string standardError = string.Empty; string standardOutput = string.Empty; List<IISIPDetails> ipDetails = new List<IISIPDetails>(); string query = string.Format(@"select c-ip, avg(time-taken), count(*) from {0}\*{1}*.log GROUP BY c-ip", info.FullName, ReportDate); ipDetails = InvokeLogParserProcessForIPDetails(@"-i:IISW3C -o:CSV " + @"""" + query + @"""" + " -stats:OFF", 3); if (ipDetails.Count > 0) { string blobName = "IISIPDetails" + ReportDate + ".json"; int count = 0; foreach (IISIPDetails detail in ipDetails) { var json = new JavaScriptSerializer().Serialize(detail); ReportHelpers.AppendDatatoBlob(StorageAccount, blobName, new Tuple<string, string>(count.ToString(), json), ipDetails.Count, ContainerName); count++; } } }
public virtual ActionResult ListPackages(string q, string sortOrder = null, int page = 1, bool prerelease = false, bool moderatorQueue = false) { if (page < 1) page = 1; q = (q ?? string.Empty).Trim(); IQueryable<Package> packageVersions = packageSvc.GetPackagesForListing(prerelease); IEnumerable<Package> packagesToShow = new List<Package>(); if (moderatorQueue) { var unknownStatus = PackageStatusType.Unknown.GetDescriptionOrValue(); //This is going to be fun. Unknown status ones would be listed, but sometimes a few might slip through the cracks if a maintainer unlists a package. // A user can just email us to catch those though. packageVersions = packageVersions.Where(p => !p.IsPrerelease).Where(p => p.StatusForDatabase == unknownStatus || p.StatusForDatabase == null); } q = (q ?? "").Trim(); if (String.IsNullOrEmpty(sortOrder)) { // Determine the default sort order. If no query string is specified, then the sortOrder is DownloadCount // If we are searching for something, sort by relevance. sortOrder = q.IsEmpty() ? Constants.PopularitySortOrder : Constants.RelevanceSortOrder; } int totalHits = 0; int updatedPackagesCount = 0; int respondedPackagesCount = 0; int unreviewedPackagesCount = 0; int waitingPackagesCount = 0; var searchFilter = GetSearchFilter(q, sortOrder, page, prerelease); if (moderatorQueue) { var submittedPackages = packageSvc.GetSubmittedPackages(useCache: !Request.IsAuthenticated).ToList(); var updatedStatus = PackageSubmittedStatusType.Updated.ToString(); var respondedStatus = PackageSubmittedStatusType.Responded.ToString(); var readyStatus = PackageSubmittedStatusType.Ready.ToString(); var pendingStatus = PackageSubmittedStatusType.Pending.ToString(); var waitingStatus = PackageSubmittedStatusType.Waiting.ToString(); //var resubmittedPackages = submittedPackages.Where(p => p.ReviewedDate.HasValue && p.Published > p.ReviewedDate).OrderBy(p => p.Published).ToList(); var resubmittedPackages = submittedPackages.Where(p => p.SubmittedStatusForDatabase == updatedStatus).OrderBy(p => p.Published).ToList(); updatedPackagesCount = resubmittedPackages.Count; var respondedPackages = submittedPackages.Where(p => p.SubmittedStatusForDatabase == respondedStatus).OrderBy(p => p.LastUpdated).ToList(); respondedPackagesCount = respondedPackages.Count; var unreviewedPackages = submittedPackages.Where(p => p.SubmittedStatusForDatabase == readyStatus).OrderBy(p => p.Published).ToList(); unreviewedPackagesCount = unreviewedPackages.Count; var pendingAutoReviewPackages = submittedPackages.Where(p => p.SubmittedStatusForDatabase == pendingStatus || p.SubmittedStatusForDatabase == null).OrderBy(p => p.Published).ToList(); unreviewedPackagesCount += pendingAutoReviewPackages.Count; //var waitingForMaintainerPackages = submittedPackages.Where(p => p.ReviewedDate >= p.Published).OrderByDescending(p => p.ReviewedDate).ToList(); var waitingForMaintainerPackages = submittedPackages.Where(p => p.SubmittedStatusForDatabase == waitingStatus).OrderByDescending(p => p.ReviewedDate).ToList(); waitingPackagesCount = waitingForMaintainerPackages.Count; packagesToShow = resubmittedPackages.Union(respondedPackages).Union(unreviewedPackages).Union(pendingAutoReviewPackages).Union(waitingForMaintainerPackages); if (!string.IsNullOrWhiteSpace(q)) { packagesToShow = packagesToShow.AsQueryable().Search(q).ToList(); } switch (searchFilter.SortProperty) { case SortProperty.DisplayName: packagesToShow = packagesToShow.OrderBy(p => p.Title); break; case SortProperty.Recent: packagesToShow = packagesToShow.OrderByDescending(p => p.Published); break; default: //do not change the search order break; } totalHits = packagesToShow.Count() + packageVersions.Count(); if ((searchFilter.Skip + searchFilter.Take) >= packagesToShow.Count() & string.IsNullOrWhiteSpace(q)) packagesToShow = packagesToShow.Union(packageVersions.OrderByDescending(pv => pv.PackageRegistration.DownloadCount).ToList()); packagesToShow = packagesToShow.Skip(searchFilter.Skip).Take(searchFilter.Take); } else { var results = searchSvc.Search(searchFilter); var cacheTime = DateTime.UtcNow.AddSeconds(30); // fetch most common query from cache to relieve load on the search service if (string.IsNullOrEmpty(q) && page == 1) { cacheTime = DateTime.UtcNow.AddMinutes(10); } totalHits = 0; int.TryParse(Cache.Get( string.Format( "searchResultsHits-{0}-{1}-{2}-{3}-{4}", searchFilter.SearchTerm.to_lower(), searchFilter.IncludePrerelease, searchFilter.Skip, searchFilter.SortProperty.to_string(), searchFilter.SortDirection.to_string()), cacheTime, () => results.Hits.to_string()), out totalHits); packagesToShow = Cache.Get( string.Format( "searchResults-{0}-{1}-{2}-{3}-{4}", searchFilter.SearchTerm.to_lower(), searchFilter.IncludePrerelease, searchFilter.Skip, searchFilter.SortProperty.to_string(), searchFilter.SortDirection.to_string()), cacheTime, () => results.Data.ToList()); } if (page == 1 && !packagesToShow.Any()) { // In the event the index wasn't updated, we may get an incorrect count. totalHits = 0; } var viewModel = new PackageListViewModel( packagesToShow, q, sortOrder, totalHits, page - 1, Constants.DefaultPackageListPageSize, Url, prerelease, moderatorQueue, updatedPackagesCount, unreviewedPackagesCount, waitingPackagesCount, respondedPackagesCount); ViewBag.SearchTerm = q; return View("~/Views/Packages/ListPackages.cshtml", viewModel); }
private static void BackgroundJobsPostStart(IAppConfiguration configuration) { var indexer = Container.Kernel.TryGet<IIndexingService>(); var jobs = new List<IJob>(); if (indexer != null) { indexer.RegisterBackgroundJobs(jobs, configuration); } if (!configuration.HasWorker) { jobs.Add( new UpdateStatisticsJob(TimeSpan.FromMinutes(5), () => new EntitiesContext(configuration.SqlConnectionString, readOnly: false), timeout: TimeSpan.FromMinutes(5))); } if (configuration.CollectPerfLogs) { jobs.Add(CreateLogFlushJob()); } if (jobs.AnySafe()) { var jobCoordinator = new NuGetJobCoordinator(); _jobManager = new JobManager(jobs, jobCoordinator) { RestartSchedulerOnFailure = true }; _jobManager.Fail(e => ErrorLog.GetDefault(null).Log(new Error(e))); _jobManager.Start(); } }
private List<IISResponseTimeDetails> InvokeLogParserProcessForResponseTime(string arguments, int numFields) { List<IISResponseTimeDetails> responseTimeDetails = new List<IISResponseTimeDetails>(); Process nugetProcess = new Process(); ProcessStartInfo nugetProcessStartInfo = new ProcessStartInfo(Path.Combine(Environment.CurrentDirectory, "LogParser.exe")); nugetProcessStartInfo.Arguments = arguments; nugetProcessStartInfo.RedirectStandardError = true; nugetProcessStartInfo.RedirectStandardOutput = true; nugetProcessStartInfo.RedirectStandardInput = true; nugetProcessStartInfo.UseShellExecute = false; nugetProcess.StartInfo = nugetProcessStartInfo; nugetProcess.Start(); StreamReader reader = nugetProcess.StandardOutput; reader.ReadLine(); //this has the field names, ignore it. while (!reader.EndOfStream) { string responseTimeOutput = reader.ReadLine(); string[] responseTimeOutputs = responseTimeOutput.Split(','); if (responseTimeOutputs.Count() == numFields) { responseTimeDetails.Add(new IISResponseTimeDetails(responseTimeOutputs[0], Convert.ToInt32(responseTimeOutputs[1]))); } } nugetProcess.WaitForExit(); List<IISResponseTimeDetails> filteredList = new List<IISResponseTimeDetails>(); filteredList = responseTimeDetails.OrderByDescending(x => x.AvgTimeTakenInMilliSeconds).Take(15).ToList(); return filteredList; }