// MLE does not work well when the item reponses is either all correct or all incorrect. See the bottom section on page 354 of Ayala for a discussion of this. // In this case, we use the strategy described in the paragraph spanning pages 378-379 of Ayala. private bool IsZeroVarianceReponsePattern(List<int> responseVector) { bool allResponsesIncorrect = responseVector.TrueForAll(x => x == 0); bool allResponsesCorrect = responseVector.TrueForAll(x => x == 1); return allResponsesCorrect || allResponsesIncorrect; }
internal static GroupType GetGroupType(List<CUEToolsLocalDBEntry> group) { if (group.Count < 2) return GroupType.Single; if (!group.TrueForAll(i => i.OffsetSafeCRC != null)) return GroupType.Unverified; if (!group.TrueForAll(i => i.OffsetSafeCRC.DifferByOffset(group[0].OffsetSafeCRC))) return GroupType.Different; if (!group.TrueForAll(i => i.OffsetSafeCRC == group[0].OffsetSafeCRC)) return GroupType.Offsetted; return GroupType.Equal; }
public IfStmt(Position position, List<ConditionalStmt>/*!!*/ conditions) : base(position) { Debug.Assert(conditions != null && conditions.Count > 0); Debug.Assert(conditions.TrueForAll(delegate(ConditionalStmt stmt) { return stmt != null; })); this.conditions = conditions; }
/* * 1. TrueForAll() - Returns true or false depending on whether if every element in the list matches the condition defined by the specified predicate. * * 2. AsReadOnly() - Returns a read only wrapper for the current collection. Use this method, if you don't want the client to modify the collection. * i.e add or remove any element from the collection. The ReadOnlyCollection will not have methods to add or remove items from the collection. * You can only read items from the collection. * * 3. TrimExcess() - Sets the capacity to the actual number of elements in the list, if that number is less than a threshold value. * * According to MSDN: * This method can be used to minimize a collection's memory overhead if no new wlement will be added to the collection. * The cost of reallocation and copying a large List<T> can be considrable. * So the TrimExcess() method does nothing if the list is at more than 90% of capacity. * This avoids incurring a large reallocation cost for a relatively small gain. The current threshold s 90%, but this could change in the future. */ static void Main(string[] args) { Customer customer1 = new Customer() { ID = 101, Name = "Anand Dev", Salary = 4000 }; Customer customer2 = new Customer() { ID = 102, Name = "Nanhi", Salary = 8000 }; Customer customer3 = new Customer() { ID = 103, Name = "Praveen", Salary = 6000 }; List<Customer> listCustomers = new List<Customer>(); listCustomers.Add(customer1); listCustomers.Add(customer3); listCustomers.Add(customer2); #region 1. TrueForAll() method. bool trueForAll = listCustomers.TrueForAll(x => x.Salary > 4000); Console.WriteLine("Are all salaries greater than 4000 = {0}", trueForAll); #endregion #region 2. AsReadOnly() method. ReadOnlyCollection<Customer> listReadOnlyCustomers = listCustomers.AsReadOnly(); Console.WriteLine("Number of items = " + listReadOnlyCustomers.Count); #endregion #region 3. TrimExcess() method. List<Customer> listTrimCstomers = new List<Customer>(100); listTrimCstomers.AddRange(listCustomers); Console.WriteLine("\nCapacity before trim = " + listTrimCstomers.Capacity); listTrimCstomers.TrimExcess(); Console.WriteLine("Capacity after trim = " + listTrimCstomers.Capacity); #endregion Console.ReadKey(); }
/// <summary> /// Processes the list of events asynchronously. /// </summary> /// <param name="evs">List of events.</param> /// <returns>Returns <c>True</c>, if all events have been consumed; otherwise returns <c>False.</c></returns> public async Task<bool> ProcessEventsAsync(IEnumerable<BaseEvent> evs) { var results = new List<bool>(); using (var uow = this._uowm.CreateInstance<SampleDbContext>()) { uow.BeginTransaction(); try { foreach (var ev in evs) { var handlers = this.GetHandlers(ev); foreach (var handler in handlers) { var result = await handler.ProcessAsync(ev); results.Add(result); } } uow.Commit(); } catch { uow.Rollback(); results.Add(false); throw; } } return await Task.FromResult(results.TrueForAll(p => p)); }
public ScriptInventory(string scriptPath, bool deepSearch) { this.scriptPath = Directory.GetCurrentDirectory(); if (!string.IsNullOrEmpty(scriptPath)) { this.scriptPath = Path.Combine(this.scriptPath, scriptPath); } List<Statement> unsupportedStatements = new List<Statement>(); foreach (string fileName in Directory.GetFiles(this.scriptPath, "*.sql", deepSearch ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly)) { unsupportedStatements.Clear(); using (TextReader reader = new StreamReader(fileName, true)) { try { ProcessSingleScript(reader, unsupportedStatements.Add); } catch (ParseException ex) { ex.FileName = fileName; throw; } } if (unsupportedStatements.Count > 0) { // only files which have no DDL as "unsupported statements" are assumed to be setup scripts if (unsupportedStatements.TrueForAll(statement => !(statement is DdlStatement))) { foreach (Statement statement in unsupportedStatements) { AddAdditionalSetupStatement(statement); } } else { Trace.WriteLine(string.Format("Script {0} contains {1} unsupported statements", fileName, unsupportedStatements.Count)); } } } AdditionalSetupStatementSetSchemaOverride(); }
public static Event Compound(this Event ev, Action<Event> action) { List<Event> events = new List<Event> (); Event comp = Event.Create(); action(comp); ev.Started += (args) => { Event e = comp; do { events.Add(e); e.StartEx(true, true); Event t = e.Next; e.Detach(); e = t; }while(e != null); }; ev.Updated += (args) => { return events.TrueForAll(e=>!e.IsRunningSingular); }; return ev.Extend (); }
static void Main(string[] args) { var lista = new List<int>(); lista.AddRange(new int[]{36,30,8,4,2}); bool pares = lista.TrueForAll(n => n%2 == 0); // Esto es equivalente a lo que aparece en: I) pares = lista.TrueForAll(EsPar); // I) if (pares) { Console.WriteLine("Todos los numeros de lista son pares."); } else { Console.WriteLine("No todos los numeros de lista son pares."); } Console.ReadKey(); }
public void ClearRemovesAllStreams(List<string> ids) { List<IConnection<User, string>> connections = ids.Select(id => Connect(id)).ToList(); Clear(); ids.TrueForAll(id => false == ExistsFor(id)).Should().BeTrue(); }
public BulkPutRequest(string bucketName, List<Ds3Object> objects) { this.BucketName = bucketName; this.Objects = objects; if (!objects.TrueForAll(obj => obj.Size.HasValue)) { throw new Ds3RequestException(Resources.ObjectsMissingSizeException); } QueryParams.Add("operation", "start_bulk_put"); }
public override VetoResult AllowPut(string key, RavenJObject document, RavenJObject metadata, TransactionInformation transactionInformation) { if (Database.Name != null && Database.Name != Constants.SystemDatabase) return VetoResult.Allowed; if (key.StartsWith(RavenDatabasesPrefix, StringComparison.InvariantCultureIgnoreCase) == false) return VetoResult.Allowed; var tempPermission = metadata[Constants.AllowBundlesChange]; if (tempPermission != null) metadata.Remove(Constants.AllowBundlesChange); // this is a temp marker so do not persist this medatada var bundlesChangesAllowed = tempPermission != null && tempPermission.Value<string>() .Equals("true", StringComparison.InvariantCultureIgnoreCase); if (bundlesChangesAllowed) return VetoResult.Allowed; var existingDbDoc = Database.Documents.Get(key, transactionInformation); if (existingDbDoc == null) return VetoResult.Allowed; var currentDbDocument = existingDbDoc.DataAsJson.JsonDeserialization<DatabaseDocument>(); var currentBundles = new List<string>(); string value; if (currentDbDocument.Settings.TryGetValue(Constants.ActiveBundles, out value)) currentBundles = value.GetSemicolonSeparatedValues(); var newDbDocument = document.JsonDeserialization<DatabaseDocument>(); var newBundles = new List<string>(); if (newDbDocument.Settings.TryGetValue(Constants.ActiveBundles, out value)) newBundles = value.GetSemicolonSeparatedValues(); if (currentBundles.Count == newBundles.Count) return VetoResult.Allowed; if (currentBundles.Count == 0) return VetoResult.Allowed; if (currentBundles.TrueForAll(x => newBundles.Contains(x))) return VetoResult.Allowed; return VetoResult.Deny( "You should not change 'Raven/ActiveBundles' setting for a database. This setting should be set only once when a database is created. " + "If you really need to override it you have to specify {\"" + Constants.AllowBundlesChange + "\": true} in metadata of a database document every time when you send it." + Environment.NewLine + "Current: " + string.Join("; ", currentBundles) + Environment.NewLine + "New: " + string.Join("; '", newBundles)); }
private static int SolveProblem() { var primes = new List<int> {2, 3, 5, 7, 11, 13}; for (var i = 15; primes.Count < 10001; i += 2) { var i1 = i; if(primes.TrueForAll(x => i1%x != 0)) primes.Add(i1); } return primes.Max(); }
public ListEx(Position p, List<Expression>/*!*/ lvalues, Expression rvalue) : base(p) { Debug.Assert(lvalues != null /*&& rvalue != null*/); // rvalue can be determined during runtime in case of list in list. Debug.Assert(lvalues.TrueForAll(delegate(Expression lvalue) { return lvalue == null || lvalue is VarLikeConstructUse || lvalue is ListEx; })); this.LValues = lvalues; this.RValue = rvalue; }
public bool IsMatch(string filename) { var matches = new List<bool>(); if (Path != null) { matches.Add(Regex.IsMatch(filename, PathPattern)); } if (Name != null) { matches.Add(Regex.IsMatch(filename, NamePattern)); } return matches.Any() && matches.TrueForAll(m => m); }
public bool Archive() { var result = new List<bool>(); var applications = _applicationRepository.GetApplicationsOutsideCurrentWindow(); foreach (var application in applications.GenerateStatuses(_statusGeneratorFactory, _pipelinePositionGenerator)) { if (!application.PipelinePosition.IsActive()) { result.Add(_applicationRepository.Archive(application.ApplicationReference)); } } return result.TrueForAll(x => x); }
public static void Initialize() { var tables = new List<ClilocTable>(Tables.Values); //bool noFind = false; Core.DataDirectories.TakeWhile(path => !tables.TrueForAll(t => t.Loaded)) .Where(path => !String.IsNullOrWhiteSpace(path)) .ForEach( path => Parallel.ForEach( Tables, kvp => { if (kvp.Value.Loaded) { return; } string file = "Cliloc." + kvp.Key.ToString().ToLower(); string stub = IOUtility.GetSafeFilePath(path + "/" + file, true); if (!File.Exists(stub)) { Console.WriteLine("WARNING: {0} not found!", file); //CSOptions.ToConsole("WARNING: {0} not found!", file); //noFind = true; return; } Console.WriteLine("SUCCESS: {0} processed!", file); kvp.Value.Load(new FileInfo(stub)); })); /*if (noFind) { CSOptions.ToConsole( "WARNING: One or more required cliloc files could not be loaded, any features that rely on this service will not work as expected and/or may cause a fatal exception!"); }*/ tables.Clear(); tables.TrimExcess(); }
private static void CSConfig() { CommandUtility.Register("ExportCliloc", AccessLevel.Administrator, ExportCommand); var tables = new List<ClilocTable>(Tables.Values); //bool noFind = false; Core.DataDirectories.TakeWhile(path => !tables.TrueForAll(t => t.Loaded)) .Where(path => !String.IsNullOrWhiteSpace(path)) .ForEach( path => Parallel.ForEach( Tables, kvp => { if (kvp.Value.Loaded) { return; } var file = "Cliloc." + kvp.Key.ToString().ToLower(); var stub = IOUtility.GetSafeFilePath(path + "/" + file, true); if (!File.Exists(stub)) { //CSOptions.ToConsole("WARNING: {0} not found!", file); //noFind = true; return; } kvp.Value.Load(new FileInfo(stub)); })); /*if (noFind) { CSOptions.ToConsole( "WARNING: One or more required cliloc files could not be loaded, any features that rely on this service will not work as expected and/or may cause a fatal exception!"); }*/ tables.Free(true); }
/// <summary> /// Generate zone points using Poisson sampling /// </summary> /// <param name="count"></param> /// <param name="landBounds"></param> /// <param name="density"></param> /// <returns></returns> protected override Vector2[] GeneratePoints(int count, Bounds2i landBounds, Vector2 density) { var checkedPoints = new List<Vector2>(); var uncheckedPoints = new List<Vector2>(); //Generate start point var zoneCenterX = Random.Range((float)landBounds.Min.X, landBounds.Max.X); var zoneCenterY = Random.Range((float)landBounds.Min.Z, landBounds.Max.Z); var startPoint = new Vector2(zoneCenterX, zoneCenterY); uncheckedPoints.Add(startPoint); //Generate point around first unchecked while (uncheckedPoints.Any()) { var processedPoint = uncheckedPoints.First(); uncheckedPoints.RemoveAt(0); for (int i = 0; i < 10; i++) { var r = Random.Range(density.x + 0.1f, density.y); var a = Random.Range(0, 2*Mathf.PI); var newPoint = processedPoint + new Vector2(r * Mathf.Cos(a), r*Mathf.Sin(a)); if (landBounds.Contains((Vector2i) newPoint)) { if(checkedPoints.TrueForAll(p => Vector2.SqrMagnitude(p - newPoint) > density.x * density.x) && uncheckedPoints.TrueForAll(p => Vector2.SqrMagnitude(p - newPoint) > density.x * density.x)) uncheckedPoints.Add(newPoint); } } checkedPoints.Add(processedPoint); if (checkedPoints.Count >= count) break; } return checkedPoints.ToArray(); }
public void SetData(List<PartyMember> memberList) { if (memberList.TrueForAll(_member => _member.IsFullData)) { if(m_members == null || m_members.Count == 0) { ((EOGame)Game).Hud.SetStatusLabel(DATCONST2.STATUS_LABEL_TYPE_INFORMATION, DATCONST2.STATUS_LABEL_PARTY_YOU_JOINED); ((EOGame)Game).Hud.AddChat(ChatTabs.System, "", World.GetString(DATCONST2.STATUS_LABEL_PARTY_YOU_JOINED), ChatType.PlayerParty, ChatColor.PM); } Visible = true; m_numMembers.Text = string.Format("{0}", memberList.Count); m_members = memberList; m_mainIsLeader = m_members.FindIndex(_member => _member.IsLeader && _member.ID == World.Instance.MainPlayer.ActiveCharacter.ID) >= 0; m_scrollBar.UpdateDimensions(memberList.Count); m_buttons.Clear(); foreach (PartyMember member in m_members) { _addRemoveButtonForMember(member); } } else { //update HP only // ReSharper disable once ForCanBeConvertedToForeach for (int i = 0; i < memberList.Count; ++i) { int ndx = m_members.FindIndex(_member => _member.ID == memberList[i].ID); PartyMember member = m_members[ndx]; member.SetPercentHealth(memberList[i].PercentHealth); m_members[ndx] = member; } } }
internal void EventTimer_Tick(object sender, EventArgs e) { if (_ShutdownProcess == null) { _Main.ClearTable(); List<bool> readyItems = new List<bool>(_Events.Count); foreach (Event item in _Events) { bool ready = item.Ready(); int index = _Main.AddEvent(item); _Main.SetReadyState(index, ready); readyItems.Add(ready); } //if all elements in the list are 'true' if (readyItems.TrueForAll(item => { return item; })) this.InitiateShutdown(); } }
public DicomFile Serialize(SegmentationSerializerCallback callback) { Platform.CheckForNullReference(callback, "callback"); Debug.Assert(!_segDocument.Saved, "Cannot serialize previously saved document"); // TODO: validate that all Segs are coming from the same study! IPresentationImage firstIPresentationImage = null; if (_segDocument.Segs != null) { var oneSeg = _segDocument.Segs.FirstOrDefault(item => item != null && item.SegGraphics != null && item.SegGraphics.OfType<PolygonalRoiSegGraphic>().Any()); if (oneSeg != null) { var polyGraphic = oneSeg.SegGraphics.OfType<PolygonalRoiSegGraphic>().First(); firstIPresentationImage = callback(polyGraphic.ImageSopInstanceUid, polyGraphic.ImageFrameNumber); } } var firstImageSopProvider = firstIPresentationImage as IImageSopProvider; if (firstImageSopProvider == null) return null; var sourceSop = firstImageSopProvider.ImageSop; // source of the common DICOM attributes var dicomFile = _sopInstanceFactory.CreateFile(sourceSop); // NOTE. These modules are initialized by the factory: // patient IE // - PatientModule // - ClinicalTrialSubjectModule // study IE // - GeneralStudyModule // - PatientStudyModule // - ClinicalTrialStudyModule // equipment IE // - GeneralEquipmentModule // Data values const int instanceNumber = 1; var contentDateTime = DateTime.Now; var segDocumentIod = new SegmentationDocumentIod(dicomFile.DataSet); // PatientModule var patientModule = segDocumentIod.PatientModuleIod; // patientModule.PatientBreedDescription = null; // bug in CC code patientModule.DicomAttributeProvider[DicomTags.PatientBreedDescription].SetEmptyValue(); // GeneralEquipmentModule var generalEquipmentModuleIod = segDocumentIod.GeneralEquipmentModuleIod; generalEquipmentModuleIod.DeviceSerialNumber = EnvironmentUtilities.MachineIdentifier; // GeneralSeriesModule var srcGeneralSeriesModuleIod = new GeneralSeriesModuleIod(sourceSop.DataSource); var generalSeriesModuleIod = segDocumentIod.GeneralSeriesModuleIod; generalSeriesModuleIod.SeriesDescription = _segDocument.SeriesDescription; generalSeriesModuleIod.SeriesNumber = _segDocument.SeriesNumber; generalSeriesModuleIod.Modality = Modality.Seg; generalSeriesModuleIod.SeriesInstanceUid = DicomUid.GenerateUid().UID; generalSeriesModuleIod.Laterality = srcGeneralSeriesModuleIod.Laterality; generalSeriesModuleIod.SeriesDateTime = _segDocument.CreationDateTime; generalSeriesModuleIod.PerformingPhysiciansName = srcGeneralSeriesModuleIod.PerformingPhysiciansName; generalSeriesModuleIod.PerformingPhysicianIdentificationSequence = srcGeneralSeriesModuleIod.PerformingPhysicianIdentificationSequence; generalSeriesModuleIod.ProtocolName = srcGeneralSeriesModuleIod.ProtocolName; { // General Description Code Sequence is missing from the GeneralSeriesModuleIod implementation var seriesDescriptionCodeSequence = new CodeSequenceMacro { CodeValue = "113076", CodeMeaning = "Segmentation", CodingSchemeDesignator = "DCM" }; var result = new[] { seriesDescriptionCodeSequence.DicomSequenceItem }; generalSeriesModuleIod.DicomAttributeProvider[DicomTags.SeriesDescriptionCodeSequence].Values = result; } string userDicomName = null; if (_segDocument.UserInfo != null && !string.IsNullOrEmpty(_segDocument.UserInfo.Name)) { userDicomName = FormatDicomName(_segDocument.UserInfo.Name); if (userDicomName != null) generalSeriesModuleIod.OperatorsName = userDicomName; // NOTE: Login name is being ignored for now } generalSeriesModuleIod.BodyPartExamined = srcGeneralSeriesModuleIod.BodyPartExamined; generalSeriesModuleIod.PatientPosition = srcGeneralSeriesModuleIod.PatientPosition; generalSeriesModuleIod.RequestAttributesSequence = srcGeneralSeriesModuleIod.RequestAttributesSequence; //generalSeriesModuleIod.AnatomicalOrientationType = srcGeneralSeriesModuleIod.AnatomicalOrientationType; // Not needed // FrameOfReferenceModule var srcFrameOfReferenceModuleIod = new FrameOfReferenceModuleIod(sourceSop.DataSource); segDocumentIod.FrameOfReferenceModuleIod.FrameOfReferenceUid = srcFrameOfReferenceModuleIod.FrameOfReferenceUid; segDocumentIod.FrameOfReferenceModuleIod.PositionReferenceIndicator = srcFrameOfReferenceModuleIod.PositionReferenceIndicator; // Initialize Segmentation Image Module first var segmentationImageModuleIod = segDocumentIod.SegmentationImageModuleIod; segmentationImageModuleIod.InitializeAttributes(); // General Image Module and Segmentation Image Module var srcGeneralImageModuleIod = new GeneralImageModuleIod(sourceSop.DataSource); var generalImageModuleIod = segDocumentIod.GeneralImageModuleIod; generalImageModuleIod.InstanceNumber = instanceNumber; generalImageModuleIod.PatientOrientation = srcGeneralImageModuleIod.PatientOrientation; generalImageModuleIod.ContentDateTime = contentDateTime; generalImageModuleIod.ImageType = "DERIVED\\PRIMARY"; generalImageModuleIod.AcquisitionNumber = srcGeneralImageModuleIod.AcquisitionNumber; generalImageModuleIod.AcquisitionDateTime = srcGeneralImageModuleIod.AcquisitionDateTime; generalImageModuleIod.QualityControlImage = srcGeneralImageModuleIod.QualityControlImage; generalImageModuleIod.BurnedInAnnotation = srcGeneralImageModuleIod.BurnedInAnnotation; generalImageModuleIod.RecognizableVisualFeatures = srcGeneralImageModuleIod.RecognizableVisualFeatures; generalImageModuleIod.LossyImageCompression = srcGeneralImageModuleIod.LossyImageCompression.HasValue && srcGeneralImageModuleIod.LossyImageCompression.Value; generalImageModuleIod.LossyImageCompressionMethod = srcGeneralImageModuleIod.LossyImageCompressionMethod; generalImageModuleIod.LossyImageCompressionRatio = srcGeneralImageModuleIod.LossyImageCompressionRatio; generalImageModuleIod.IrradiationEventUid = srcGeneralImageModuleIod.IrradiationEventUid; // Image Pixel Module and Segmentation Image Module var srcImagePixelModule = new ImagePixelMacroIod(sourceSop.DataSource); var imagePixelModule = segDocumentIod.ImagePixelModuleIod; imagePixelModule.Rows = srcImagePixelModule.Rows; // same height as the the image imagePixelModule.Columns = srcImagePixelModule.Columns; // same width as the image //imagePixelModule.PixelAspectRatio = srcImagePixelModule.PixelAspectRatio; // same as the image // Continue initialization of non-default values for the Segmentation Image Module segmentationImageModuleIod.ContentLabel = SanitizeDicomCsValue(_segDocument.ContentLabel); if (!string.IsNullOrEmpty(userDicomName)) segmentationImageModuleIod.ContentCreatorsName = userDicomName; segmentationImageModuleIod.SegmentationType = SegmentationType.BINARY; // Per segmentation and per frame item initialization var docHasOneFrame = _segDocument.Segs.Where(item => item != null && item.SegGraphics != null).Sum(seg => seg.SegGraphics.OfType<PolygonalRoiSegGraphic>().Count()) == 1; var docHasOneSeg = _segDocument.Segs.Count(item => item != null && item.SegGraphics != null && item.SegGraphics.OfType<PolygonalRoiSegGraphic>().Any()) == 1; var multiFrameDimensionsModuleIod = segDocumentIod.MultiFrameDimensionModuleIod; multiFrameDimensionsModuleIod.InitializeAttributes(); var segmentSequenceItems = new List<SegmentSequence>(); var dimensionIndexSequenceItems = new List<DimensionIndexSequenceItem>(); var dimensionOrganizationSequenceItems = new List<DimensionOrganizationSequenceItem>(); var multiFrameFunctionalGroupsModuleIod = segDocumentIod.MultiFrameFunctionalGroupsModuleIod; multiFrameFunctionalGroupsModuleIod.InitializeAttributes(); var perFrameFunctionalGroupSequenceItems = new List<FunctionalGroupsSequenceItem>(); var frameBytesList = new List<byte[]>(); // list of pixel data for each frame var seriesUidToSopClassUidToSopInstanceUid = new Dictionary<string, Dictionary<string, HashSet<string>>>(); var segmentNumber = 0; foreach (var seg in _segDocument.Segs) { segmentNumber++; Debug.Assert(segmentNumber == 1, "We're only supposed to create one Segment per document for now"); // Segment Sequence initialization var segmentSequenceItemIod = segmentationImageModuleIod.CreateSegmentSequence(); segmentSequenceItemIod.SegmentNumber = segmentNumber; segmentSequenceItemIod.SegmentLabel = seg.Label; segmentSequenceItemIod.SegmentDescription = seg.Description; segmentSequenceItemIod.SegmentAlgorithmType = "MANUAL"; #region Category, Type, Anatomic Region, Anatomic Region Modifier var selectedCategory = seg.SelectedCategory; if (selectedCategory != null) { // Category var segmentedPropertyCategoryCodeSequenceItem = segmentSequenceItemIod.CreateSegmentedPropertyCategoryCodeSequence(); segmentedPropertyCategoryCodeSequenceItem.CodeValue = selectedCategory.CodeValue; segmentedPropertyCategoryCodeSequenceItem.CodeMeaning = selectedCategory.CodeMeaning; segmentedPropertyCategoryCodeSequenceItem.CodingSchemeDesignator = selectedCategory.CodingSchemeDesignator; //if (!string.IsNullOrEmpty(selectedCategory.CodingSchemeVersion)) // segmentedPropertyCategoryCodeSequenceItem.CodingSchemeVersion = selectedCategory.CodingSchemeVersion; segmentSequenceItemIod.SegmentedPropertyCategoryCodeSequence = new[] { segmentedPropertyCategoryCodeSequenceItem }; // Type if (selectedCategory.SelectedType != null) { var segmentedPropertyTypeCodeSequenceItem = segmentSequenceItemIod.CreateSegmentedPropertyTypeCodeSequence(); segmentedPropertyTypeCodeSequenceItem.CodeValue = selectedCategory.SelectedType.CodeValue; segmentedPropertyTypeCodeSequenceItem.CodeMeaning = selectedCategory.SelectedType.CodeMeaning; segmentedPropertyTypeCodeSequenceItem.CodingSchemeDesignator = selectedCategory.SelectedType.CodingSchemeDesignator; //if (!string.IsNullOrEmpty(selectedCategory.SelectedType.CodingSchemeVersion)) // segmentedPropertyTypeCodeSequenceItem.CodingSchemeVersion = selectedCategory.SelectedType.CodingSchemeVersion; // Type modifier if (selectedCategory.SelectedType.SelectedTypeModifier != null) { var segmentedPropertyTypeModifierCodeSequenceItem = new CodeSequenceMacro(); segmentedPropertyTypeModifierCodeSequenceItem.CodeValue = selectedCategory.SelectedType.SelectedTypeModifier.CodeValue; segmentedPropertyTypeModifierCodeSequenceItem.CodeMeaning = selectedCategory.SelectedType.SelectedTypeModifier.CodeMeaning; segmentedPropertyTypeModifierCodeSequenceItem.CodingSchemeDesignator = selectedCategory.SelectedType.SelectedTypeModifier.CodingSchemeDesignator; //if (!string.IsNullOrEmpty(selectedCategory.SelectedType.SelectedTypeModifier.CodingSchemeVersion)) // segmentedPropertyTypeModifierCodeSequenceItem.CodingSchemeVersion = selectedCategory.SelectedType.SelectedTypeModifier.CodingSchemeVersion; segmentedPropertyTypeCodeSequenceItem.SegmentedPropertyTypeModifierCodeSequence = new[] {segmentedPropertyTypeModifierCodeSequenceItem}; } segmentSequenceItemIod.SegmentedPropertyTypeCodeSequence = new[] { segmentedPropertyTypeCodeSequenceItem }; } // Anatomic Region var selectedAnatomicRegion = selectedCategory.SelectedAnatomicRegion; if (selectedAnatomicRegion != null) { var anatomicRegionSequenceItem = segmentSequenceItemIod.CreateAnatomicRegionSequence(); anatomicRegionSequenceItem.CodeValue = selectedAnatomicRegion.CodeValue; anatomicRegionSequenceItem.CodeMeaning = selectedAnatomicRegion.CodeMeaning; anatomicRegionSequenceItem.CodingSchemeDesignator = selectedAnatomicRegion.CodingSchemeDesignator; //if (!string.IsNullOrEmpty(selectedAnatomicRegion.CodingSchemeVersion)) // anatomicRegionSequenceItem.CodingSchemeVersion = selectedAnatomicRegion.CodingSchemeVersion; // Anatomic region Modifier if (selectedAnatomicRegion.SelectedAnatomicRegionModifier != null) { var anatomicRegionModifierSequenceItem = new CodeSequenceMacro(); anatomicRegionModifierSequenceItem.CodeValue = selectedAnatomicRegion.SelectedAnatomicRegionModifier.CodeValue; anatomicRegionModifierSequenceItem.CodeMeaning = selectedAnatomicRegion.SelectedAnatomicRegionModifier.CodeMeaning; anatomicRegionModifierSequenceItem.CodingSchemeDesignator = selectedAnatomicRegion.SelectedAnatomicRegionModifier.CodingSchemeDesignator; //if (!string.IsNullOrEmpty(selectedAnatomicRegion.SelectedAnatomicRegionModifier.CodingSchemeVersion)) // anatomicRegionModifierSequenceItem.CodingSchemeVersion = selectedAnatomicRegion.SelectedAnatomicRegionModifier.CodingSchemeVersion; anatomicRegionSequenceItem.AnatomicRegionModifierSequence = new[] { anatomicRegionModifierSequenceItem }; } segmentSequenceItemIod.AnatomicRegionSequence = new[] { anatomicRegionSequenceItem }; } } #endregion segmentSequenceItemIod.RecomendedDisplayCIELabValue = LabColorHelpers.RgbColorToCIELabColor(seg.Color); segmentSequenceItems.Add(segmentSequenceItemIod); // Dimension Organization Sequence item var dimensionOrganizationUid = DicomUid.GenerateUid().UID; var dimensionOrganizationSequenceItem = multiFrameDimensionsModuleIod.CreateDimensionOrganizationSequenceItem(); dimensionOrganizationSequenceItem.DimensionOrganizationUid = dimensionOrganizationUid; dimensionOrganizationSequenceItems.Add(dimensionOrganizationSequenceItem); // Dimension Index Sequence items var dimensionIndexSequenceItem1 = multiFrameDimensionsModuleIod.CreateDimensionIndexSequenceItem(); dimensionIndexSequenceItem1.DimensionIndexPointer = DicomTags.StackId; dimensionIndexSequenceItem1.FunctionalGroupPointer = DicomTags.FrameContentSequence; dimensionIndexSequenceItem1.DimensionOrganizationUid = dimensionOrganizationUid; dimensionIndexSequenceItem1.DimensionDescriptionLabel = "Stack ID"; dimensionIndexSequenceItems.Add(dimensionIndexSequenceItem1); var dimensionIndexSequenceItem2 = multiFrameDimensionsModuleIod.CreateDimensionIndexSequenceItem(); dimensionIndexSequenceItem2.DimensionIndexPointer = DicomTags.InStackPositionNumber; dimensionIndexSequenceItem2.FunctionalGroupPointer = DicomTags.FrameContentSequence; dimensionIndexSequenceItem2.DimensionOrganizationUid = dimensionOrganizationUid; dimensionIndexSequenceItem2.DimensionDescriptionLabel = "In Stack Position Number"; dimensionIndexSequenceItems.Add(dimensionIndexSequenceItem2); var inStackPositionIndex = 0; var presentationImagePolygons = new Dictionary<IPresentationImage, List<PolygonalRoiSegGraphic>>(); foreach (var polygonalSegGraphic in seg.SegGraphics.OfType<PolygonalRoiSegGraphic>()) { var poly = polygonalSegGraphic.PolygonalRoiGraphic.Roi as PolygonalRoi; if (poly != null) { var currentPresentationImage = callback(polygonalSegGraphic.ImageSopInstanceUid, polygonalSegGraphic.ImageFrameNumber); if (presentationImagePolygons.ContainsKey(currentPresentationImage)) presentationImagePolygons[currentPresentationImage].Add(polygonalSegGraphic); else presentationImagePolygons.Add(poly.PresentationImage, new List<PolygonalRoiSegGraphic> { polygonalSegGraphic }); } else { Debug.Assert(false, "Encountered non-polygonal graphic during segmentation serialization"); } } foreach (var presentationImage in presentationImagePolygons.Keys) { var currentImageSopProvider = presentationImage as IImageSopProvider; if (presentationImage == null) { Debug.Assert(false, "Failed to get IImageSopProvider for the current Segmentation graphic"); continue; } Debug.Assert(presentationImagePolygons[presentationImage].FirstOrDefault().ImageFrameNumber == currentImageSopProvider.Frame.FrameNumber, "Stored frame number must match with the current SOP Instance's value"); #region PerFrameFunctionalGroupSequenceItem // Initialize Per Frame Functional Groups here and groups var perFrameFunctionalGroupSequenceItem = multiFrameFunctionalGroupsModuleIod.CreatePerFrameFunctionalGroupsSequence(); if (!docHasOneSeg) { // Pixel Measures Functional Group (per frame) InitializePixelMeasureFunctionalGroup(perFrameFunctionalGroupSequenceItem, currentImageSopProvider.Frame); // Initialize Segmentation Functional Group (per frame) InitializeSegmentationFunctionalGroup(perFrameFunctionalGroupSequenceItem, segmentNumber); // Plane Orientation (Patient) Functional Group InitializePlaneOrientationPatientFunctionalGroup(perFrameFunctionalGroupSequenceItem, currentImageSopProvider.Frame.ImageOrientationPatient); } if (!docHasOneFrame) { // Plain Position Patient Functional Group (per frame) InitializePlanePositionPatientFunctionalGroup(perFrameFunctionalGroupSequenceItem, currentImageSopProvider.Frame.ImagePositionPatient); // Derivation Image Functional Group (per frame) InitializeDerivationImageFunctionalGroup(perFrameFunctionalGroupSequenceItem, currentImageSopProvider.ImageSop, currentImageSopProvider.Frame.FrameNumber); } else { Debug.Assert(firstImageSopProvider.ImageSop.SeriesInstanceUid == currentImageSopProvider.Frame.SeriesInstanceUid && firstImageSopProvider.ImageSop.SopInstanceUid == currentImageSopProvider.ImageSop.SopInstanceUid, "initial image reference and the single image reference must be the same"); } // Initialize Frame Content Functional Group InitializeFrameContentFunctionalGroup(perFrameFunctionalGroupSequenceItem, segmentNumber, ++inStackPositionIndex); perFrameFunctionalGroupSequenceItems.Add(perFrameFunctionalGroupSequenceItem); #endregion PerFrameFunctionalGroupSequenceItem // Store referenced image info in a dictionary for later use { var currentSeriesInstanceUid = currentImageSopProvider.ImageSop.SeriesInstanceUid; var currentSopClassUid = currentImageSopProvider.ImageSop.SopClassUid; var currentSopInstanceUid = currentImageSopProvider.ImageSop.SopInstanceUid; if (!seriesUidToSopClassUidToSopInstanceUid.ContainsKey(currentSeriesInstanceUid)) seriesUidToSopClassUidToSopInstanceUid.Add(currentSeriesInstanceUid, new Dictionary<string, HashSet<string>>()); var sopClassToSopInstanceDic = seriesUidToSopClassUidToSopInstanceUid[currentSeriesInstanceUid]; if (!sopClassToSopInstanceDic.ContainsKey(currentSopClassUid)) sopClassToSopInstanceDic.Add(currentSopClassUid, new HashSet<string>()); sopClassToSopInstanceDic[currentSopClassUid].Add(currentSopInstanceUid); } var polygons = new List<IList<PointF>>(); // Get frame's pixel data here foreach (var polygonalSegGraphic in presentationImagePolygons[presentationImage]) { var poly = polygonalSegGraphic.PolygonalRoiGraphic.Roi as PolygonalRoi; if (poly != null) { polygons.Add(poly.Polygon.Vertices); } else { Debug.Assert(false, "Encountered non-polygonal graphic during segmentation serialization"); } } var grayscalePixelData = CreateFramePixelData(presentationImage, polygons); frameBytesList.Add(grayscalePixelData.Raw); } } segmentationImageModuleIod.SegmentSequence = segmentSequenceItems.ToArray(); // Per Frame Functional Groups module multiFrameFunctionalGroupsModuleIod.PerFrameFunctionalGroupsSequence = perFrameFunctionalGroupSequenceItems.ToArray(); #region SharedFunctionalGroupSequence // Shared Functional Group Sequence Item var sharedFunctionalGroupSequenceItem = multiFrameFunctionalGroupsModuleIod.CreateSharedFunctionalGroupsSequence(); if (docHasOneSeg) { Debug.Assert(segmentNumber == 1, "This is for a single segment only"); // Pixel Measures Functional Group (shared) InitializePixelMeasureFunctionalGroup(sharedFunctionalGroupSequenceItem, firstImageSopProvider.Frame); // Initialize Segmentation Functional Group (shared) InitializeSegmentationFunctionalGroup(sharedFunctionalGroupSequenceItem, segmentNumber); // Plane Orientation (Patient) Functional Group InitializePlaneOrientationPatientFunctionalGroup(sharedFunctionalGroupSequenceItem, firstImageSopProvider.Frame.ImageOrientationPatient); } if (docHasOneFrame) { // Plain Position Patient Functional Group InitializePlanePositionPatientFunctionalGroup(sharedFunctionalGroupSequenceItem, firstImageSopProvider.Frame.ImagePositionPatient); // Derivation Image Functional Group InitializeDerivationImageFunctionalGroup(sharedFunctionalGroupSequenceItem, firstImageSopProvider.ImageSop, firstImageSopProvider.Frame.FrameNumber); } multiFrameFunctionalGroupsModuleIod.SharedFunctionalGroupsSequence = sharedFunctionalGroupSequenceItem; #endregion SharedFunctionalGroupSequence // Multi-frame Dimensions module multiFrameDimensionsModuleIod.DimensionIndexSequence = dimensionIndexSequenceItems.ToArray(); multiFrameDimensionsModuleIod.DimensionOrganizationSequence = dimensionOrganizationSequenceItems.ToArray(); multiFrameDimensionsModuleIod.DimensionOrganizationType = "3D"; // Multi-frame Functional Groups module multiFrameFunctionalGroupsModuleIod.SharedFunctionalGroupsSequence = sharedFunctionalGroupSequenceItem; multiFrameFunctionalGroupsModuleIod.PerFrameFunctionalGroupsSequence = perFrameFunctionalGroupSequenceItems.ToArray(); multiFrameFunctionalGroupsModuleIod.NumberOfFrames = perFrameFunctionalGroupSequenceItems.Count; // Specimen Module var srcSpecimenModuleIod = new SpecimenModuleIod(sourceSop.DataSource); var specimenModuleIod = segDocumentIod.SpecimenModuleIod; //specimenModuleIod.ContainerIdentifier = srcSpecimenModuleIod.ContainerIdentifier; specimenModuleIod.IssuerOfTheContainterIdentifier = srcSpecimenModuleIod.IssuerOfTheContainterIdentifier; specimenModuleIod.AlternateContainerIdentifierSequence = srcSpecimenModuleIod.AlternateContainerIdentifierSequence; specimenModuleIod.ContainerTypeCodeSequence = srcSpecimenModuleIod.ContainerTypeCodeSequence; //specimenModuleIod.ContainerDescription = srcSpecimenModuleIod.ContainerDescription; specimenModuleIod.ContainerComponentSequence = srcSpecimenModuleIod.ContainerComponentSequence; specimenModuleIod.SpecimenDescriptionSequence = srcSpecimenModuleIod.SpecimenDescriptionSequence; // Common Instance Reference Module var referencedSeriesSequenceItems = new List<ReferencedSeriesSequenceIod>(); foreach ( var seriesToSopClassToSopInstanceDic in seriesUidToSopClassUidToSopInstanceUid.Where(seriesToSopClassToSopInstanceDic => seriesToSopClassToSopInstanceDic.Value != null)) { var referencedSopInstances = new List<ReferencedInstanceSequenceIod>(); foreach (var sopClassToSopInstanceDic in seriesToSopClassToSopInstanceDic.Value.Where(sopClassToSopInstanceDic => sopClassToSopInstanceDic.Value != null)) { referencedSopInstances.AddRange(sopClassToSopInstanceDic.Value.Select(sopInstanceUid => new ReferencedInstanceSequenceIod { ReferencedSopClassUid = sopClassToSopInstanceDic.Key, ReferencedSopInstanceUid = sopInstanceUid })); } if (referencedSopInstances.Count > 0) { referencedSeriesSequenceItems.Add(new ReferencedSeriesSequenceIod { SeriesInstanceUid = seriesToSopClassToSopInstanceDic.Key, ReferencedInstanceSequence = referencedSopInstances.ToArray() }); } } if (referencedSeriesSequenceItems.Count > 0) { var commonInstanceReferenceModuleIod = segDocumentIod.CommonInstanceReferenceModuleIod; commonInstanceReferenceModuleIod.InitializeAttributes(); commonInstanceReferenceModuleIod.ReferencedSeriesSequence = referencedSeriesSequenceItems.ToArray(); } // SOP Common Module var srcSopCommonModuleIod = new SopCommonModuleIod(sourceSop.DataSource); var sopCommonModuleIod = segDocumentIod.SopCommonModuleIod; sopCommonModuleIod.SopClass = SopClass.SegmentationStorage; sopCommonModuleIod.SopInstanceUid = DicomUid.GenerateUid().UID; //sopCommonModuleIod.SpecificCharacterSet = "UTF-8"; // TBD -it's ISO_IR 192 by default sopCommonModuleIod.InstanceCreationDateTime = contentDateTime; sopCommonModuleIod.InstanceCreatorUid = InstanceCreatorUid; sopCommonModuleIod.TimezoneOffsetFromUtc = contentDateTime.ToString("zzz", DateTimeFormatInfo.InvariantInfo); //sopCommonModuleIod.LongitudinalTemporalInformationModified = srcSopCommonModuleIod.LongitudinalTemporalInformationModified; // has a bug in CC // Pixel data { Debug.Assert(frameBytesList.TrueForAll(bytes => bytes.Length == frameBytesList[0].Length), "Allocated buffers for all frames must be of the same size"); var byteBuffer = new byte[frameBytesList[0].Length * frameBytesList.Count]; using (var stream = new MemoryStream(byteBuffer)) { foreach (var frameBytes in frameBytesList) stream.Write(frameBytes, 0, frameBytes.Length); } // Byte Packing // TODO FIXME: we can do in-place byte packing without allocating the second array! var packetBuffer = new byte[(int) Math.Ceiling(byteBuffer.Length/8.0)]; var numberOfFullBytes = byteBuffer.Length/8; for (var i = 0; i < numberOfFullBytes; i++) { var newByte = packetBuffer[i]; for (var y = 0; y < 8; y++) { var bitMask = (byte) (1 << y); newByte = (byte) ((byteBuffer[8*i + y] & 0xFF) > 0 ? newByte | bitMask : newByte & ~bitMask); } packetBuffer[i] = newByte; } // last byte(s) TODO VK: handle padding for non-even number of bytes. make sure padded bits are initialized to 0 if (numberOfFullBytes < packetBuffer.Length) { // Pack leftover bytes ( < 8) Debug.Assert(packetBuffer.Length - numberOfFullBytes == 1, "Wrong destination bytes count during packing"); Debug.Assert(byteBuffer.Length - numberOfFullBytes*8 < 8, "Wrong leftover bytes count during packing"); var newByte = packetBuffer[packetBuffer.Length - 1]; for (var y = numberOfFullBytes * 8; y < byteBuffer.Length; y++) { var bitMask = (byte) (1 << (y%8)); newByte = (byte) ((byteBuffer[y] & 0xFF) > 0 ? newByte | bitMask : newByte & ~bitMask); } packetBuffer[packetBuffer.Length - 1] = newByte; } var pdAttribute = new DicomAttributeOW(DicomTags.PixelData); using (var stream = pdAttribute.AsStream()) { stream.Write(packetBuffer, 0, packetBuffer.Length); } multiFrameFunctionalGroupsModuleIod.DicomAttributeProvider[DicomTags.PixelData] = pdAttribute; } dicomFile.MediaStorageSopClassUid = SopClass.SegmentationStorageUid; dicomFile.MediaStorageSopInstanceUid = segDocumentIod.SopInstanceUid; // Update the original document with new values _segDocument.SeriesInstanceUid = segDocumentIod.SeriesInstanceUid; _segDocument.SopInstanceUid = segDocumentIod.SopInstanceUid; return dicomFile; }
private static bool ValidatePassengers(List<Passenger> passengers, out string errorMessage) { string message = string.Empty; bool boolResponse = passengers. TrueForAll(x => { bool response = false; if (x.DateOfBirth > DateTime.Today) message = "Birth Date cannot be greater than today."; else if (string.IsNullOrEmpty(x.FirstName) || x.FirstName.Length < 2) message = "First Name null or invalid. Name should be atleast 2 characters long."; else if (string.IsNullOrEmpty(x.LastName) || x.LastName.Length < 2) message = "Last Name null or invalid. Name should be atleast 2 characters long."; else if (string.IsNullOrEmpty(x.Gender) || !(string.Equals(x.Gender, "Male", StringComparison. OrdinalIgnoreCase) || string.Equals(x.Gender, "Female", StringComparison. OrdinalIgnoreCase))) message = "Invalid Gender. Allowed values are 'Male' and 'Female'."; else response = true; return response; }); errorMessage = message; return boolResponse; }
public static void TestMaxItemsPerTask(int maxConcurrency, int maxItemsPerTask, bool completeBeforeTaskWait) { //Create a custom TaskScheduler with specified max concurrency (TrackingTaskScheduler is defined in Common\tools\CommonUtils\TPLTestSchedulers.cs) TrackingTaskScheduler scheduler = new TrackingTaskScheduler(maxConcurrency); //We need to use the custom scheduler to achieve the results. As a by-product, we test to ensure custom schedulers are supported ConcurrentExclusiveSchedulerPair schedPair = new ConcurrentExclusiveSchedulerPair(scheduler, maxConcurrency, maxItemsPerTask); TaskFactory readers = new TaskFactory(schedPair.ConcurrentScheduler); //get reader and writer schedulers TaskFactory writers = new TaskFactory(schedPair.ExclusiveScheduler); //These are threadlocals to ensure that no concurrency side effects occur ThreadLocal<int> itemsExecutedCount = new ThreadLocal<int>(); //Track the items executed by CEScheduler Task ThreadLocal<int> schedulerIDInsideTask = new ThreadLocal<int>(); //Used to store the Scheduler ID observed by a Task Executed by CEScheduler Task //Work done by both reader and writer tasks Action work = () => { //Get the id of the parent Task (which is the task created by the scheduler). Each task run by the scheduler task should //see the same SchedulerID value since they are run on the same thread int id = ((TrackingTaskScheduler)scheduler).SchedulerID.Value; if (id == schedulerIDInsideTask.Value) { //since ids match, this is one more Task being executed by the CEScheduler Task itemsExecutedCount.Value = ++itemsExecutedCount.Value; //This does not need to be thread safe since we are looking to ensure that only n number of tasks were executed and not the order //in which they were executed. Also asserting inside the thread is fine since we just want the test to be marked as failure Assert.True(itemsExecutedCount.Value <= maxItemsPerTask, string.Format("itemsExecutedCount={0} cant be greater than maxValue={1}. Parent TaskID={2}", itemsExecutedCount, maxItemsPerTask, id)); } else { //Since ids dont match, this is the first Task being executed in the CEScheduler Task schedulerIDInsideTask.Value = id; //cache the scheduler ID seen by the thread, so other tasks running in same thread can see this itemsExecutedCount.Value = 1; } //Give enough time for a Task to stay around, so that other tasks will be executed by the same CEScheduler Task //or else the CESchedulerTask will die and each Task might get executed by a different CEScheduler Task. This does not affect the //verifications, but its increases the chance of finding a bug if the maxItemPerTask is not respected new ManualResetEvent(false).WaitOne(20); }; List<Task> taskList = new List<Task>(); int maxConcurrentTasks = maxConcurrency * maxItemsPerTask * 5; int maxExclusiveTasks = maxConcurrency * maxItemsPerTask * 2; // Schedule Tasks in both concurrent and exclusive mode for (int i = 0; i < maxConcurrentTasks; i++) taskList.Add(readers.StartNew(work)); for (int i = 0; i < maxExclusiveTasks; i++) taskList.Add(writers.StartNew(work)); if (completeBeforeTaskWait) { schedPair.Complete(); schedPair.Completion.Wait(); Assert.True(taskList.TrueForAll(t => t.IsCompleted), "All tasks should have completed for scheduler to complete"); } //finally wait for all of the tasks, to ensure they all executed properly Task.WaitAll(taskList.ToArray()); if (!completeBeforeTaskWait) { schedPair.Complete(); schedPair.Completion.Wait(); Assert.True(taskList.TrueForAll(t => t.IsCompleted), "All tasks should have completed for scheduler to complete"); } }
bool IsFlush(List<Pair<int, string>> cards) { var suit = cards.First().Second; return cards.TrueForAll(c => c.Second == suit); }
public void RaciveHartbeat(AppendEntryMessage appendEntry) { if (_serverIdentifier.Equals(appendEntry.Leader)) return; if (_options.UseLogging) _logger.LogInformation($"Processing {nameof(RaciveHartbeat)} \n\t\t Leader {appendEntry.Leader} \n\t\t Term {appendEntry.Term}"); // If not in your term reject request if (appendEntry.Term < _election.CurrentTerm) { _entryReply.OnNext(new AppendEntryResultMessage() { Term = _election.CurrentTerm, Success = false, }); return; } // Update to follow new term if (appendEntry.Term > _election.CurrentTerm) { _election.CurrentTerm = appendEntry.Term; _election.VotedFor = appendEntry.Leader; _state?.OnNext(ServerStateType.Follower); _entryReply.OnNext(new AppendEntryResultMessage() { Term = _election.CurrentTerm, Success = false, }); return; } _hartbeat.Reset(); var term = _election.CurrentTerm; var outOfSync = false; if (_logReplication.Contains(appendEntry.PreviousLogIndex)) { // If the appendEntry from the leader is in sync with us, the follower if (_logReplication.Term(appendEntry.PreviousLogIndex) == appendEntry.PreviousLogTerm) { // If we need to rollback the logs var lastApplied = appendEntry.PreviousLogIndex + 1; if (lastApplied != _logReplication.LastApplied) { _logReplication.Rollback(lastApplied); } // We can start processing the logs var successes = new List<bool>(); if (appendEntry.Entries != null) { foreach (var log in appendEntry.Entries) { successes.Add(_logReplication.Append(log)); } } // Confirm Log append _entryReply.OnNext(new AppendEntryResultMessage() { Term = term, LogIndex = _logReplication.LastApplied, LogTerm = _logReplication.LastTerm, Success = successes.TrueForAll(p => p), From = _serverIdentifier }); } else { outOfSync = true; } } else { outOfSync = true; } // reject the append entry as the leader is out of sync with the follower if (outOfSync) { // Will keep moving the log index back until the followr and the leader match on the log consistency // This will let the lead overwrite the followers logs to bring it back into full consistency. var previousIndex = _logReplication.LastApplied; if (appendEntry.PreviousLogIndex <= previousIndex) { previousIndex = appendEntry.PreviousLogIndex - 1; } var previousTerm = _logReplication.Term(previousIndex); // update the lead with our last index and term of a successfully append _entryReply.OnNext(new AppendEntryResultMessage() { Term = term, LogIndex = previousIndex, LogTerm = previousTerm, Success = false, From = _serverIdentifier }); } var logsToProcess = _logReplication.ToCommit(appendEntry.LeaderCommit, appendEntry.Term); // Process the logs to commit as a task so not to block RPC ProcessLogs(logsToProcess); }
List<TextWord> ParseLine(IDocument document) { List<TextWord> words = new List<TextWord>(); HighlightColor markNext = null; currentOffset = 0; currentLength = 0; UpdateSpanStateVariables(); int currentLineLength = currentLine.Length; int currentLineOffset = currentLine.Offset; for (int i = 0; i < currentLineLength; ++i) { char ch = document.GetCharAt(currentLineOffset + i); switch (ch) { case '\n': case '\r': PushCurWord(document, ref markNext, words); ++currentOffset; break; case ' ': PushCurWord(document, ref markNext, words); if (activeSpan != null && activeSpan.Color.HasBackground) { words.Add(new TextWord.SpaceTextWord(activeSpan.Color)); } else { words.Add(TextWord.Space); } ++currentOffset; break; case '\t': PushCurWord(document, ref markNext, words); if (activeSpan != null && activeSpan.Color.HasBackground) { words.Add(new TextWord.TabTextWord(activeSpan.Color)); } else { words.Add(TextWord.Tab); } ++currentOffset; break; default: { // handle escape characters char escapeCharacter = '\0'; if (activeSpan != null && activeSpan.EscapeCharacter != '\0') { escapeCharacter = activeSpan.EscapeCharacter; } else if (activeRuleSet != null) { escapeCharacter = activeRuleSet.EscapeCharacter; } if (escapeCharacter != '\0' && escapeCharacter == ch) { // we found the escape character if (activeSpan != null && activeSpan.End != null && activeSpan.End.Length == 1 && escapeCharacter == activeSpan.End[0]) { // the escape character is a end-doubling escape character // it may count as escape only when the next character is the escape, too if (i + 1 < currentLineLength) { if (document.GetCharAt(currentLineOffset + i + 1) == escapeCharacter) { currentLength += 2; PushCurWord(document, ref markNext, words); ++i; continue; } } } else { // this is a normal \-style escape ++currentLength; if (i + 1 < currentLineLength) { ++currentLength; } PushCurWord(document, ref markNext, words); ++i; continue; } } // highlight digits if (!inSpan && (Char.IsDigit(ch) || (ch == '.' && i + 1 < currentLineLength && Char.IsDigit(document.GetCharAt(currentLineOffset + i + 1)))) && currentLength == 0) { bool ishex = false; bool isfloatingpoint = false; if (ch == '0' && i + 1 < currentLineLength && Char.ToUpper(document.GetCharAt(currentLineOffset + i + 1)) == 'X') { // hex digits const string hex = "0123456789ABCDEF"; ++currentLength; ++i; // skip 'x' ++currentLength; ishex = true; while (i + 1 < currentLineLength && hex.IndexOf(Char.ToUpper(document.GetCharAt(currentLineOffset + i + 1))) != -1) { ++i; ++currentLength; } } else { ++currentLength; while (i + 1 < currentLineLength && Char.IsDigit(document.GetCharAt(currentLineOffset + i + 1))) { ++i; ++currentLength; } } if (!ishex && i + 1 < currentLineLength && document.GetCharAt(currentLineOffset + i + 1) == '.') { isfloatingpoint = true; ++i; ++currentLength; while (i + 1 < currentLineLength && Char.IsDigit(document.GetCharAt(currentLineOffset + i + 1))) { ++i; ++currentLength; } } if (i + 1 < currentLineLength && Char.ToUpper(document.GetCharAt(currentLineOffset + i + 1)) == 'E') { isfloatingpoint = true; ++i; ++currentLength; if (i + 1 < currentLineLength && (document.GetCharAt(currentLineOffset + i + 1) == '+' || document.GetCharAt(currentLine.Offset + i + 1) == '-')) { ++i; ++currentLength; } while (i + 1 < currentLine.Length && Char.IsDigit(document.GetCharAt(currentLineOffset + i + 1))) { ++i; ++currentLength; } } if (i + 1 < currentLine.Length) { char nextch = Char.ToUpper(document.GetCharAt(currentLineOffset + i + 1)); if (nextch == 'F' || nextch == 'M' || nextch == 'D') { isfloatingpoint = true; ++i; ++currentLength; } } if (!isfloatingpoint) { bool isunsigned = false; if (i + 1 < currentLineLength && Char.ToUpper(document.GetCharAt(currentLineOffset + i + 1)) == 'U') { ++i; ++currentLength; isunsigned = true; } if (i + 1 < currentLineLength && Char.ToUpper(document.GetCharAt(currentLineOffset + i + 1)) == 'L') { ++i; ++currentLength; if (!isunsigned && i + 1 < currentLineLength && Char.ToUpper(document.GetCharAt(currentLineOffset + i + 1)) == 'U') { ++i; ++currentLength; } } } words.Add(new TextWord(document, currentLine, currentOffset, currentLength, DigitColor, false)); currentOffset += currentLength; currentLength = 0; continue; } // Check for SPAN ENDs if (inSpan) { if (activeSpan.End != null && activeSpan.End.Length > 0) { if (MatchExpr(currentLine, activeSpan.End, i, document, activeSpan.IgnoreCase)) { PushCurWord(document, ref markNext, words); string regex = GetRegString(currentLine, activeSpan.End, i, document); currentLength += regex.Length; words.Add(new TextWord(document, currentLine, currentOffset, currentLength, activeSpan.EndColor, false)); currentOffset += currentLength; currentLength = 0; i += regex.Length - 1; currentSpanStack.Pop(); UpdateSpanStateVariables(); continue; } } } // check for SPAN BEGIN if (activeRuleSet != null) { foreach (Span span in activeRuleSet.Spans) { if ((!span.IsBeginSingleWord || currentLength == 0) && (!span.IsBeginStartOfLine.HasValue || span.IsBeginStartOfLine.Value == (currentLength == 0 && words.TrueForAll(delegate(TextWord textWord) { return textWord.Type != TextWordType.Word; }))) && MatchExpr(currentLine, span.Begin, i, document, activeRuleSet.IgnoreCase)) { PushCurWord(document, ref markNext, words); string regex = GetRegString(currentLine, span.Begin, i, document); if (!OverrideSpan(regex, document, words, span, ref i)) { currentLength += regex.Length; words.Add(new TextWord(document, currentLine, currentOffset, currentLength, span.BeginColor, false)); currentOffset += currentLength; currentLength = 0; i += regex.Length - 1; if (currentSpanStack == null) { currentSpanStack = new SpanStack(); } currentSpanStack.Push(span); span.IgnoreCase = activeRuleSet.IgnoreCase; UpdateSpanStateVariables(); } goto skip; } } } // check if the char is a delimiter if (activeRuleSet != null && (int)ch < 256 && activeRuleSet.Delimiters[(int)ch]) { PushCurWord(document, ref markNext, words); if (currentOffset + currentLength +1 < currentLine.Length) { ++currentLength; PushCurWord(document, ref markNext, words); goto skip; } } ++currentLength; skip: continue; } } } PushCurWord(document, ref markNext, words); OnParsedLine(document, currentLine, words); return words; }
public static bool HasAllTags(List<string> tags){ return tags.TrueForAll (tag => HasTag (tag)); //TrueForAll returns true if the list has no elements. }
private static void TestDescriptorIsExceptionSafeCore(DiagnosticDescriptor descriptor) { var localizableTitle = descriptor.Title; var localizableMessage = descriptor.MessageFormat; var localizableDescription = descriptor.Description; // Verify exceptions from LocalizableResourceString don't go unhandled. var title = localizableTitle.ToString(); var message = localizableMessage.ToString(); var description = localizableDescription.ToString(); // Verify exceptions from LocalizableResourceString are raised if OnException is set. var exceptions = new List<Exception>(); var handler = new EventHandler<Exception>((sender, ex) => exceptions.Add(ex)); localizableTitle.OnException += handler; localizableMessage.OnException += handler; localizableDescription.OnException += handler; // Access and evaluate localizable fields. var unused1 = localizableTitle.ToString(); var unused2 = localizableMessage.ToString(); var unused3 = localizableDescription.ToString(); Assert.Equal(3, exceptions.Count); // Verify DiagnosticAnalyzer.SupportedDiagnostics is also exception safe. var analyzer = new MyAnalyzer(descriptor); var exceptionDiagnostics = new List<Diagnostic>(); Action<Exception, DiagnosticAnalyzer, Diagnostic> onAnalyzerException = (ex, a, diag) => exceptionDiagnostics.Add(diag); var analyzerExecutor = AnalyzerExecutor.CreateForSupportedDiagnostics(onAnalyzerException, AnalyzerManager.Instance); var descriptors = AnalyzerManager.Instance.GetSupportedDiagnosticDescriptors(analyzer, analyzerExecutor); Assert.Equal(1, descriptors.Length); Assert.Equal(descriptor.Id, descriptors[0].Id); // Access and evaluate localizable fields. unused1 = descriptors[0].Title.ToString(); unused2 = descriptors[0].MessageFormat.ToString(); unused3 = descriptors[0].Description.ToString(); // Verify logged analyzer exception diagnostics. Assert.Equal(3, exceptionDiagnostics.Count); Assert.True(exceptionDiagnostics.TrueForAll(AnalyzerExecutor.IsAnalyzerExceptionDiagnostic)); }
private ToolStripMenuItem GeneratePoolFilterItem(Pool p) { List<ToolStripMenuItem> subItems = new List<ToolStripMenuItem>(); foreach (Host h in p.Connection.Cache.Hosts) { var hostItem = GenerateFilterItem(h, h.uuid); hostItem.Checked = HostCheckStates.ContainsKey(h.uuid); subItems.Add(hostItem); } var poolItem = GenerateFilterItem(p, p.uuid); poolItem.DropDownItems.AddRange(subItems.ToArray()); poolItem.CheckState = subItems.TrueForAll(item => item.Checked) ? CheckState.Checked : subItems.TrueForAll(item => !item.Checked) ? CheckState.Unchecked : CheckState.Indeterminate; poolItem.DropDownItemClicked += poolItem_DropDownItemClicked; return poolItem; }
private static bool IsAnonymousUserAuthorizedForMultipleSecurityActions(SecurityActions securityRequests, bool isPrivateAlbum, IGallerySettings gallerySettings, SecurityActionsOption secActionsOption) { // There are multiple security actions in securityAction enum. Iterate through each one and determine if the user // has permission for it. List<bool> authResults = new List<bool>(); foreach (SecurityActions securityAction in SecurityActionEnumHelper.ParseSecurityAction(securityRequests)) { authResults.Add(IsAnonymousUserAuthorizedForSingleSecurityAction(securityAction, isPrivateAlbum, gallerySettings)); } if (secActionsOption == SecurityActionsOption.RequireAll) { return (authResults.Count > 0 ? authResults.TrueForAll(delegate(bool value) { return value; }) : false); } else if (secActionsOption == SecurityActionsOption.RequireOne) { return authResults.Contains(true); } else { throw new InvalidEnumArgumentException("secActionsOption", (int)secActionsOption, typeof(SecurityActionsOption)); } }