public void TryAddNode(char name) { if (Nodes.All(n => n.Name != name)) { Nodes.Add(new Node(name)); } }
/// <summary> /// Add node to the Graph /// </summary> public void AddNode(Node <T> node) { if (Nodes.All(x => x.Id != node.Id)) { Nodes.Add(node); } }
public void SetDefaultHandlers() { var defaultNodeHandlers = new Dictionary <string, INodeHandler>() { { "startEvent", new DefaultStartHandler() }, { "endEvent", new DefaultEndHandler() }, { "task", new DefaultTaskHandler() }, { "sequenceFlow", new DefaultSequenceHandler() }, { "businessRuleTask", new DefaultBusinessRuleHandler() }, { "exclusiveGateway", new DefaultExclusiveGatewayHandler() }, { "inclusiveGateway", new DefaultInclusiveGatewayHandler() }, { "scriptTask", new DefaultScriptTaskHandler() } }; if (Nodes.All(t => defaultNodeHandlers.ContainsKey(t.Value.NodeType))) { nodeHandlers = new Dictionary <string, INodeHandler>(); foreach (string n in Nodes.Values.Select(n => n.NodeType).Distinct()) { nodeHandlers.Add(n, defaultNodeHandlers[n]); } } else { throw new Exception("Process contains an unknown node type"); } }
/// <summary> /// Returns a list of ISerializable items which exist in the preloaded /// trace data but do not exist in the current CallSite data. /// </summary> /// <returns></returns> internal IList <ISerializable> GetOrphanedSerializablesAndClearHistoricalTraceData() { var orphans = new List <ISerializable>(); if (historicalTraceData == null) { return(orphans); } // If a Guid exists in the historical trace data // and there is no corresponding node in the workspace // then add the serializables for that guid to the list of // orphans. foreach (var nodeData in historicalTraceData) { var nodeGuid = nodeData.Key; if (Nodes.All(n => n.GUID != nodeGuid)) { orphans.AddRange(nodeData.Value.SelectMany(CallSite.GetAllSerializablesFromSingleRunTraceData).ToList()); } } // When reconciliation is complete, wipe the historical data. // This avoids this data being re-used after a future update. historicalTraceData = null; return(orphans); }
/// <summary> /// Take a taxi to the node selected by number /// </summary> /// <param name="parNodeNumber">The node number.</param> public void SelectNodeByNumber(int parNodeNumber) { if (Nodes.All(x => x.NodeNumber != parNodeNumber)) { return; } Lua.Instance.Execute("TakeTaxiNode(" + parNodeNumber + ")"); }
public string Lca(int node1, int node2) { if (Nodes.All(n => !n.Equals(node1)) || Nodes.All(n => !n.Equals(node2))) { return("Nodes are not present in the tree."); } return(CalculateLca(Root, node1, node2).Data.ToString()); }
public void ArrangeNodes() { if (Nodes.All(x => x.Visual != null)) { var loadedNodes = nodes.Where(x => x.Visual != null).ToList(); //check if nodes have uniform height bool uniformNodeHeights = loadedNodes.Select(x => x.Height).Distinct().Count() == 1; var visuals = loadedNodes.Select(x => x.Visual).ToList(); bool uniformVisualHeights = visuals.Select(x => x.Height).Distinct().Count() == 1; double tallestNodeHeight = loadedNodes.Max(x => x.Height); INodeViewModel tallestNode = loadedNodes.First(x => x.Height == tallestNodeHeight); double tallestVisualHeight = visuals.Max(x => x.ActualHeight); var tallestVisual = visuals.First(x => x.ActualHeight == tallestVisualHeight); var totalHeight = tallestNodeHeight / 2 + tallestVisualHeight; double xPos = 0; foreach (var node in loadedNodes) { node.X = xPos; if (!uniformNodeHeights && !uniformVisualHeights) { node.Y = tallestNodeHeight / 2 - node.Height / 2; } else if (!uniformVisualHeights) { if (node.Visual == tallestVisual) { node.Y = 0; } else { node.Y = 0; } } else if (!uniformNodeHeights) { } else { node.Y = 0; } xPos = node.RightEdgeViewModel.X + node.RightEdgeViewModel.Width; } } UpdateVisual(); }
/// <summary> /// Удаление вершины /// </summary> /// <param name="nodeID">ID вершины</param> public void DeleteNode(int nodeID) { if (Nodes.All(x => x.ID != nodeID)) { throw new ArgumentException(ErrMsg + "Мы пытаемся удалить несуществующую вершину"); } //Удаляем входящие и исходящие дуги var arc = Arcs.Where(x => (x.From == nodeID || x.To == nodeID)).ToList(); for (int i = arc.Count - 1; i >= 0; i--) { DeleteArc(arc[i]); } //удаляем вершину Nodes.Remove(Nodes.Single(x => x.ID == nodeID)); IsChanged = true; }
public virtual void ToggleVisibility() { if (CanToggleVisibility()) { if (Nodes.All(x => x is ElementGroupNode) || Nodes.All(x => x is ProjectElementNode)) { var elements = GetChildHierarchy().OfType <ProjectElementNode>().Select(x => x.Element); var nodeStates = Nodes.Select(x => x.GetVisibilityState()); int hiddenCount = nodeStates.Count(x => x == VisibilityState.Hidden || x == VisibilityState.HiddenNotVisible); int visibleCount = nodeStates.Count() - hiddenCount; bool hideElements = visibleCount > hiddenCount; Manager.SetElementsHidden(elements, hideElements); } } }
private void CacheMovableData() { if (Nodes == null || Definition == null || Nodes.All(x => x == null)) { return; } _movableNodeData.Clear(); for (var i = 0; i < Math.Min(Definition.Nodes.Count, Nodes.Length); i++) { if (Definition.Nodes[i].Movable && Nodes[i] != null) { var inv = Entity.PositionComp.WorldMatrixInvScaled; _movableNodeData[(uint)i] = new MyObjectBuilder_BendyComponent.NodePose { Index = (uint)i, Position = (Vector3)Vector3D.Transform(Nodes[i].Position, inv), Up = (Vector3)Vector3D.TransformNormal(Nodes[i].Up, inv) }; } } }
public string Print() { StringBuilder graph = new StringBuilder(); if (!Nodes.All(n => n is ContainerNode)) { graph.Append($"\n----------------------------"); graph.Append($"\nPriting Network [{ID}] {this.GetHashCode().ToString()}: \n"); graph.Append("Networks: \n"); graph.Append("Inputs: \n"); foreach (InputPipeNode input in Inputs) { graph.Append(input.Print() + ", "); } graph.Append("\n"); graph.Append("Outputs: \n"); foreach (OutputPipeNode output in Outputs) { graph.Append(output.Print() + ", \n"); foreach (InputPipeNode input in output.ConnectedInputs.Keys) { graph.Append("Output Connected Inputs: \n"); graph.Append(input.Print() + " | "); } graph.Append("\n"); } graph.Append("Connectors: \n"); foreach (ConnectorPipeNode conn in Connectors) { graph.Append(conn.Print() + ", "); } graph.Append("\n"); } else { graph.Append($"Network {ID} is only chests."); } return(graph.ToString()); }
public bool IsSolved() => Nodes.All(n => n.IsFinal());
private bool IsWholeRowInOneColor() { return(Nodes.All(n => n.State == NodeState.White) || Nodes.All(n => n.State == NodeState.Black)); }
public override void Load(XElement element, KeyMapping keyMapping) { base.Load(element, keyMapping); Nodes = element .Elements(XName.Get("node", element.GetDefaultNamespace().NamespaceName)) .Select(x => Node.Create(x, keyMapping)) .ToArray(); Transitions = element .Elements(XName.Get("edge", element.GetDefaultNamespace().NamespaceName)) .Select(x => Transition.Create(x, keyMapping)) .Distinct(new TransitionEqualityComparer()) .ToArray(); var initialNodes = Nodes .Where(n => n.IsInitial) .ToArray(); if (initialNodes.Length == 1) { InitialNode = initialNodes[0]; } else if (initialNodes.Length > 1) { throw new FormatException("Several nodes are declared as initial, however there can be at most one."); } // find not properly connected transitions foreach (var t in Transitions) { if (Nodes.All(n => n.Identifier != t.Source)) { throw new FormatException(string.Format("Source of transition '{0}' is invalid.", t.Description)); } if (Nodes.All(n => n.Identifier != t.Target)) { throw new FormatException(string.Format("Target of transition '{0}' is invalid.", t.Description)); } if (t.Description == null) { t.UpdateDescription(Nodes.First(n => n.Identifier == t.Target).Description); } } //foreach (var grp in Transitions.GroupBy(t => t.Description)) //{ // var orderedTransitions = grp.OrderBy(t => t.Source).ToArray(); // for (int i = 0; i < orderedTransitions.Length - 1; i++) // { // if (orderedTransitions[i].Source == orderedTransitions[i + 1].Source) // { // var from = Nodes.Single(x => x.Identifier == orderedTransitions[i].Source).Description; // var to1 = Nodes.Single(x => x.Identifier == orderedTransitions[i].Target).Description; // var to2 = Nodes.Single(x => x.Identifier == orderedTransitions[i + 1].Target).Description; // throw new FormatException(string.Format("Illegal transitions from '{0}' to '{1}' and '{2}'.", from, to1, to2)); // } // } //} }
public void AddDirectedEdge(IChainNode <TN> from, IChainNode <TN> to, TE data) { if (CheckArguments) { if (from == null) { throw new ArgumentNullException(nameof(from)); } if (to == null) { throw new ArgumentNullException(nameof(to)); } if (Nodes.All(x => x.Id != from.Id)) { throw new UnknownNodeException(from.Id); } if (Nodes.All(x => x.Id != to.Id)) { throw new UnknownNodeException(to.Id); } } if (from.Id == to.Id) { //skip infinity loop return; } var forwardNodeIds = new List <string>(); var backwardNodeIds = new List <string>(); var isLoop = false; forwardNodeIds.Add(to.Id); VisitNodes(to, node => { forwardNodeIds.Add(node.Id); if (node.Id == from.Id) { isLoop = true; } }); if (isLoop) { //search for nodes in loop backwardNodeIds.Add(from.Id); VisitNodesBack(from, node => { backwardNodeIds.Add(node.Id); }); var loopIds = forwardNodeIds.Union(backwardNodeIds) .Where(x => forwardNodeIds.Contains(x) && backwardNodeIds.Contains(x)) .Distinct().ToList(); //try to find entry - it should be node nearest to head var heads = GetHeads(); var deep = new Dictionary <string, int>(); var multiInIds = new List <string>(); foreach (var head in heads) { var lvl = 0; VisitNodes(head, node => { lvl++; if (loopIds.Contains(node.Id)) { if (deep.ContainsKey(node.Id)) { deep[node.Id] = Math.Min(deep[node.Id], lvl); multiInIds.Add(node.Id); } else { deep.Add(node.Id, lvl); } } }); } if (multiInIds.Count == 1 && multiInIds[0] == from.Id) { multiInIds.Add(to.Id); if (deep.ContainsKey(to.Id)) { deep[to.Id] = -1; } else { deep.Add(to.Id, -1); } } if (!multiInIds.Any()) { multiInIds.Add(to.Id); if (deep.ContainsKey(to.Id)) { deep[to.Id] = -1; } else { deep.Add(to.Id, -1); } } var minLvl = deep.Where(x => multiInIds.Contains(x.Key)).Select(x => x.Value).Min(); var loopEntry = this[deep.Where(x => multiInIds.Contains(x.Key)).First(x => x.Value == minLvl).Key]; var nodeToEntry = Edges.Any(x => loopIds.Contains(x.FromId) && x.ToId == loopEntry.Id) ? Edges.Single(x => loopIds.Contains(x.FromId) && x.ToId == loopEntry.Id).FromId : from.Id; this[nodeToEntry].IsLoop = true; } if (CheckArguments) { if (Edges.Any(x => x.FromId == from.Id && x.ToId == to.Id)) { throw new DuplicateEdgeException(from.Id, to.Id); } } else { //check and skip if (Edges.Any(x => x.FromId == from.Id && x.ToId == to.Id)) { return; } } Edges.Add(new EdgeType <TE>(from.Id, to.Id, data)); }
public bool IsEmpty() { return(Nodes == null || Nodes.Length == 0 || Nodes.All(val => val.IsEmpty())); }
internal override void WriteContent(EndianBinaryWriter writer, IOContext context) { if (!context.IsFieldObject) { // TODO: implement this properly writer.OffsetPositions.Clear(); } var start = writer.Position; if (!context.IsFieldObject) { // Relocation table needs this base offset writer.PushBaseOffset(start + 16); // Write relocation table last (lowest priority) writer.ScheduleWriteOffsetAligned(-1, 16, () => { // Encode & write relocation table var encodedRelocationTable = RelocationTableEncoding.Encode(writer.OffsetPositions.Select(x => ( int )x).ToList(), ( int )writer.BaseOffset); writer.Write(encodedRelocationTable); // Kind of a hack here, but we need to write the relocation table size after the offset // Seeing as we have the offset positions required to encode the relocation table only at the very end // I can't really think of a better solution var end = writer.Position; writer.SeekBegin(start + 4); writer.Write(encodedRelocationTable.Length); writer.SeekBegin(end); }); } else { writer.Write(0); writer.Write(0); } writer.Align(16); writer.ScheduleWriteOffsetAligned(16, () => { writer.Write(Nodes.Count); writer.Align(16); for (int i = 0; i < Nodes.Count; i++) { writer.WriteObject(Nodes[i], (i, Nodes)); } }); writer.ScheduleWriteOffsetAligned(16, () => { writer.Write(Materials.Count); for (int i = 0; i < Materials.Count; i++) { writer.WriteObject(Materials[i], i); } }); var morpherMeshCount = Nodes.Where(x => x.Geometry != null && x.Geometry.Meshes?.Count > 0) .Sum(x => x.Geometry.Meshes.Count(y => MeshTypeTraits.HasMorphers(y.Type))); writer.Write(morpherMeshCount); writer.ScheduleWriteOffsetAligned(16, () => { // NDNM is a special case as we stored the data in the nodes themselves. bool noNodeNames = Nodes.All(x => x.Name == null); if (!noNodeNames) { WriteExtension(writer, ModelExtensionIdentifier.NodeName, () => { for (int i = 0; i < Nodes.Count; i++) { writer.Write(Nodes[i].Name, StringBinaryFormat.NullTerminated); writer.Align(4); writer.Write(i); } }); } foreach (var extension in Extensions) { WriteExtension(writer, extension.Identifier, () => writer.WriteObject(extension)); } // Write dummy end extension writer.Write(0); writer.Write(0); writer.Align(16); }); if (!context.IsFieldObject) { // TODO(TGE): implement this properly writer.PerformScheduledWrites(); } }
public bool Contains(Vector2 point) => Nodes.All(node => node.Phi(point) >= -1e-5);