private void CreateRoom(Vector3Int origin) { var roomPositions = new HashSet <Vector3Int>(); var freePositions = new UniqueQueue <Vector3Int>(); freePositions.Enqueue(origin); var isSpace = false; // breadth-first search of the connected tiles that are not occupied while (!freePositions.IsEmpty) { if (freePositions.TryDequeue(out Vector3Int position)) { roomPositions.Add(position); Vector3Int[] neighbors = MetaUtils.GetNeighbors(position, null); for (var i = 0; i < neighbors.Length; i++) { Vector3Int neighbor = neighbors[i]; if (metaTileMap.IsSpaceAt(neighbor, true)) { Vector3Int worldPosition = MatrixManager.LocalToWorldInt(neighbor, MatrixManager.Get(matrix.Id)); // If matrix manager says, the neighboring positions is space, the whole room is connected to space. // Otherwise there is another matrix, blocking off the connection to space. if (MatrixManager.IsSpaceAt(worldPosition, true)) { isSpace = true; } } else if (metaTileMap.IsAtmosPassableAt(position, neighbor, true)) { // if neighbor position is not yet a room in the meta data layer and not in the room positions list, // add it to the positions that need be checked if (!roomPositions.Contains(neighbor) && !metaDataLayer.IsRoomAt(neighbor)) { freePositions.Enqueue(neighbor); } } } } } AssignType(roomPositions, isSpace ? NodeType.Space : NodeType.Room); SetupNeighbors(roomPositions); }
public void AddPlayer(MatchPlayer player) { lock (m_queue) { m_queue.Enqueue(player); } }
public int QueueObjects(IList <T> objects) { if (objects == null || objects.Count == 0) { return(0); } Log.To.NoDomain.V(TAG, "QueueObjects called with {0} objects", objects.Count); int added = 0; foreach (var obj in objects) { if (_inbox.Enqueue(obj)) { added++; } if (_inbox.Count >= _options.Capacity) { ScheduleWithDelay(TimeSpan.Zero); } } ScheduleWithDelay(DelayToUse()); return(added); }
private void Event_OnChunkDirty(Vec3i chunkCoord, IWorldChunk chunk, bool isNewChunk) { if (isNewChunk || !mapSink.IsOpened) { return; } if (!loadedMapData.ContainsKey(new Vec2i(chunkCoord.X, chunkCoord.Z))) { return; } lock (chunksToGenLock) { chunksToGen.Enqueue(new Vec2i(chunkCoord.X, chunkCoord.Z)); } }
public void Enqueue_QueueString_Queued() { UniqueQueue <string> queue = new UniqueQueue <string>(); queue.Enqueue("abc"); Assert.Equal(1, queue.Count); }
public IEnumerable <State> Closure() { var once = new UniqueQueue <State>(); once.Enqueue(this); while (once.Count > 0) { var state = once.Dequeue(); foreach (var transition in state.EpsilonTransitions) { once.Enqueue(transition.Target); } } return(once.Seen); }
private void Event_OnChunkDirty(Vec3i chunkCoord, IWorldChunk chunk, EnumChunkDirtyReason reason) { if (reason == EnumChunkDirtyReason.NewlyCreated || !mapSink.IsOpened) { return; } if (!loadedMapData.ContainsKey(new Vec2i(chunkCoord.X, chunkCoord.Z))) { return; } lock (chunksToGenLock) { chunksToGen.Enqueue(new Vec2i(chunkCoord.X, chunkCoord.Z)); } }
public void UniqueQueueDequeue() { UniqueQueue <int> queue = new UniqueQueue <int>(); queue.Enqueue(10); Assert.That(queue.Dequeue(), Is.EqualTo(10)); Assert.Throws <InvalidOperationException>(() => queue.Dequeue()); }
public void Enqueue_Null_InvalidOperationException() { UniqueQueue <string> queue = new UniqueQueue <string>(); Assert.Throws <InvalidOperationException>(() => { queue.Enqueue(null); }); }
public void Dequeue_QueuedString_Dequeued() { UniqueQueue <string> queue = new UniqueQueue <string>(); queue.Enqueue("abc"); Assert.Equal("abc", queue.Dequeue()); Assert.Equal(0, queue.Count); }
private void CreateRoom(Vector3Int origin) { var roomPositions = new HashSet <Vector3Int>(); var freePositions = new UniqueQueue <Vector3Int>(); freePositions.Enqueue(origin); var isSpace = false; while (!freePositions.IsEmpty) { Vector3Int position; if (freePositions.TryDequeue(out position)) { roomPositions.Add(position); Vector3Int[] neighbors = MetaUtils.GetNeighbors(position); for (var i = 0; i < neighbors.Length; i++) { Vector3Int neighbor = neighbors[i]; if (metaTileMap.IsSpaceAt(neighbor)) { Vector3 worldPosition = transform.TransformPoint(neighbor + Vector3Int.one); worldPosition.z = 0; if (MatrixManager.IsSpaceAt(worldPosition.RoundToInt())) { isSpace = true; } } else if (metaTileMap.IsAtmosPassableAt(neighbor)) { if (!roomPositions.Contains(neighbor) && !metaDataLayer.IsRoomAt(neighbor)) { freePositions.Enqueue(neighbor); } } } } } AssignType(roomPositions, isSpace ? NodeType.Space : NodeType.Room); SetupNeighbors(roomPositions); }
static void Main(string[] args) { string seed = "http://web.archive.org/web/20100101163446/http://www.fool.com/"; int downloadCounter = 0; UniqueQueue <string> urlQueue = new UniqueQueue <string>(); WebClient client = new WebClient(); if (!Directory.Exists(DOWNLOADS_FOLDER)) { Directory.CreateDirectory(DOWNLOADS_FOLDER); } urlQueue.Enqueue(seed); while (urlQueue.Count > 0 && downloadCounter < DOWNLOAD_LIMIT) { string url = urlQueue.Dequeue(); Console.WriteLine("Downloading from {0}", url); try { string html = client.DownloadString(url); string filename = (url.Split('/').Last().Length > 0 ? url.Split('/').Last() : DateTime.Now.Ticks.ToString() + ".html"); using (StreamWriter outfile = new StreamWriter(Path.Combine(DOWNLOADS_FOLDER, filename))) { outfile.Write(html); } IEnumerable <string> links = GetLinks(html); foreach (var link in links) { urlQueue.Enqueue(link); } seen[url] = true; downloadCounter++; } catch (Exception e) { Console.WriteLine(e.Message); } } Console.WriteLine("{0} URLs downloaded, {1} URLs remaining in queue", downloadCounter, urlQueue.Count); Console.ReadLine(); }
private bool TryImmediateSnowUpdate(WeatherSimulationRegion simregion, IServerMapChunk mc, Vec2i chunkCoord, IWorldChunk[] chunksCol) { UpdateSnowLayerChunk dummy = new UpdateSnowLayerChunk() { Coords = chunkCoord }; lock (updateSnowLayerQueue) { if (updateSnowLayerQueue.Contains(dummy)) { return(false); } } double nowTotalHours = ws.api.World.Calendar.TotalHours; if (nowTotalHours - simregion.LastUpdateTotalHours > 1) // Lets wait until WeatherSimulationRegion is done updating { return(false); } UpdateSnowLayerChunk ch = GetSnowUpdate(simregion, mc, chunkCoord, chunksCol); if (ch == null) { return(true); // The only path returning null, if we provided a full set of chunksCol, will be where newCount == 0 } if (ch.SetBlocks.Count == 0) { return(true); } cuba.SetChunks(chunkCoord, chunksCol); processBlockUpdates(mc, ch, cuba); cuba.Commit(); lock (updateSnowLayerQueue) { updateSnowLayerQueue.Enqueue(dummy); } return(true); }
public void AddWindEvent(MetaDataNode node, Vector2Int windDirection, float pressureDifference) { if (node != MetaDataNode.None && pressureDifference > AtmosConstants.MinWindForce && windDirection != Vector2Int.zero) { node.WindForce = pressureDifference; node.WindDirection = windDirection; winds.Enqueue(node); } }
public void AddWindEvent(MetaDataNode node, Vector2Int windDirection, float pressureDifference) { if (node != MetaDataNode.None && pressureDifference > AtmosConstants.MinWindForce && windDirection != Vector2Int.zero) { node.WindForce = pressureDifference; node.WindDirection = windDirection; winds.Enqueue(node); Logger.LogTraceFormat(LogAddingWindyNode, Category.Atmos, node.Position.To2Int(), windDirection, pressureDifference); } }
/// <summary> /// /// </summary> /// <param name="item"></param> public void Recycle(T item) { if (item == null) { throw new ArgumentNullException(); } if (!m_items.Enqueue(item)) { throw new InvalidOperationException("This item has already been recycled"); } }
/// <summary> /// /// </summary> /// <param name="capacity">Preallocated elements in the pool. OnRecycle is called when the instance is preallocated</param> public Pool(int capacity = 0) { m_items = new UniqueQueue <T>(capacity); // preallocate instances for (int i = 0; i < capacity; i++) { var item = m_allocator.CreateNew(); item.OnRecycle(); m_items.Enqueue(item); } }
public void TryDequeue_WithOneQueued_DequeueSuccessful() { UniqueQueue <string> queue = new UniqueQueue <string>(); queue.Enqueue("abc"); string str; bool result = queue.TryDequeue(out str); Assert.Equal("abc", str); Assert.True(result); Assert.Equal(0, queue.Count); }
public static FA ToDfa(FA nfa) { if (nfa.Final == null) { EnsureDfa(nfa); return(nfa); } var once = new UniqueQueue <Closure>(); var start = new Closure(nfa.Start, nfa.Final); once.Enqueue(start); while (once.Count > 0) { var closure = once.Dequeue(); var transitions = closure.UnambiguateTransitions(); foreach (var transition in transitions) { var terminal = transition.Key; var targets = transition.Value; var targetClosure = new Closure(targets, nfa.Final); once.Enqueue(targetClosure, out targetClosure); var target = targetClosure.DfaState; closure.DfaState.Add(Integers.From(terminal), target); } } var dfa = From(start.DfaState); EnsureDfa(dfa); return(dfa); }
private void RefreshCommand(object state) { string APIKey = Properties.Constants.CONFIGURATION["APIKey"]; string url = $"https://api.telegram.org/{APIKey}/getUpdates"; if (lastUpdateId != 0) { url += $"?offset={lastUpdateId.ToString()}"; } //Console.WriteLine($"Offset: {lastUpdateId} Request URL: {url}"); try { string response = Synchronizer.RunSync(new Func <Task <string> > (async() => await VisitAsync(url))); var responseJson = JObject.Parse(response); if (responseJson.Value <bool>("ok")) { var Commands = responseJson["result"] as JArray; //Console.WriteLine($"{Commands.Count} results"); foreach (JObject command in Commands) { //Console.WriteLine($"Received command: {command}"); int offset = command.Value <int>("update_id"); if (lastUpdateId == offset) {// already processed message and next loop can be update continue; } lastUpdateId = offset; // update file too File.WriteAllText(Properties.Constants.LAST_UPDATE_ID, JsonConvert.SerializeObject(lastUpdateId)); commandsList.Enqueue(command); } } } catch (HttpRequestException ex) {//TODO: Will this resolve timeout exception? Console.WriteLine($"Web visit failed with {ex.ToString()}"); } }
public void Write(uint address, byte val) { if (RangedRegions.TryGetValue((byte)(address >> 24), out RangedMemoryRegion rangedRegion) && rangedRegion.IsValidAddress(address)) { rangedRegion.Write(address, val); } else if (AuxiliaryMap.TryGetValue(address, out IMemoryRegion region)) { IMmioRegion mmioRegion = region as IMmioRegion; if (mmioRegion != null) { DirtyRegions.Enqueue(mmioRegion); } region.Write(address, val); } else { throw new Exception($"invalid write addr 0x{address:x8}"); } }
public static List <string> PrepareAndFindPath(int n, int i_start, int j_start, int i_end, int j_end, out bool isAnswer) { var startNode = $"{i_start}-{j_start}"; var visited = new HashSet <string>(); var notVisited = new UniqueQueue <string>(); notVisited.Enqueue(startNode); var shortestPaths = new Dictionary <string, Path>() { { startNode, new Path() { cost = 0, PreviousNode = startNode, Move = "" } } }; isAnswer = FindPath(n, i_end, j_end, shortestPaths, visited, notVisited); if (!isAnswer) { return(new List <string>()); } var endNode = $"{i_end}-{j_end}"; var cost = 0; var path = new List <string>(); var currentNode = endNode; while (currentNode != startNode) { cost += shortestPaths[currentNode].cost; path.Add(shortestPaths[currentNode].Move); currentNode = shortestPaths[currentNode].PreviousNode; } path.Reverse(); return(path); }
private void Update() { timePassed += Time.deltaTime; timePassed2 += Time.deltaTime; if (timePassed2 >= 0.1) { int count = winds.Count; if (count > 0) { for (int i = 0; i < count; i++) { if (winds.TryDequeue(out var windyNode)) { foreach (var pushable in matrix.Get <PushPull>(windyNode.Position, true)) { float correctedForce = windyNode.WindForce / ( int )pushable.Pushable.Size; if (correctedForce >= AtmosConstants.MinPushForce) { if (pushable.Pushable.IsTileSnap) { byte pushes = (byte)Mathf.Clamp((int)correctedForce / 10, 1, 10); for (byte j = 0; j < pushes; j++) { //converting push to world coords because winddirection is in local coords pushable.QueuePush((transform.rotation * windyNode.WindDirection.To3Int()).To2Int(), Random.Range(( float )(correctedForce * 0.8), correctedForce)); } } else { pushable.Pushable.Nudge(new NudgeInfo { OriginPos = pushable.Pushable.ServerPosition, Trajectory = (Vector2)windyNode.WindDirection, SpinMode = SpinMode.None, SpinMultiplier = 1, InitialSpeed = correctedForce, }); } } } windyNode.WindForce = 0; windyNode.WindDirection = Vector2Int.zero; } } } timePassed2 = 0; } if (timePassed < 0.5) { return; } foreach (MetaDataNode node in hotspots.Values.ToArray()) { if (node.Hotspot != null) { if (node.Hotspot.Process()) { if (node.Hotspot.Volume > 0.95 * node.GasMix.Volume) { for (var i = 0; i < node.Neighbors.Length; i++) { MetaDataNode neighbor = node.Neighbors[i]; if (neighbor != null) { ExposeHotspot(node.Neighbors[i].Position, node.GasMix.Temperature * 0.85f, node.GasMix.Volume / 4); } } } tileChangeManager.UpdateTile(node.Position, TileType.Effects, "Fire"); } else { RemoveHotspot(node); } } } //Here we check to see if chemical fog fx needs to be applied, and if so, add them. If not, we remove them int addFogCount = addFog.Count; if (addFogCount > 0) { for (int i = 0; i < addFogCount; i++) { if (addFog.TryDequeue(out var addFogNode)) { if (!hotspots.ContainsKey(addFogNode.Position)) //Make sure the tile currently isn't on fire. If it is on fire, we don't want to overright the fire effect { tileChangeManager.UpdateTile(addFogNode.Position, TileType.Effects, "PlasmaAir"); } else if (!removeFog.Contains(addFogNode)) //If the tile is on fire, but there is still plasma on the tile, put this tile back into the queue so we can try again { addFog.Enqueue(addFogNode); } } } } //Similar to above, but for removing chemical fog fx int removeFogCount = removeFog.Count; if (removeFogCount > 0) { for (int i = 0; i < removeFogCount; i++) { if (removeFog.TryDequeue(out var removeFogNode)) { if (!hotspots.ContainsKey(removeFogNode.Position)) //Make sure the tile isn't on fire, as we don't want to delete fire effects here { tileChangeManager.RemoveTile(removeFogNode.Position, LayerType.Effects); } //If it's on fire, we don't need to do anything else, as the system managing fire will remove all effects from the tile //after the fire burns out } } } timePassed = 0; }
//Add tile to remove fog effect queue //Being called by AtmosSimulation public void RemoveFogEvent(MetaDataNode node) { removeFog.Enqueue(node); }
public static bool FindPath(int size, int iEnd, int jEnd, Dictionary <string, Path> paths, HashSet <string> visited, UniqueQueue <string> notVisited ) { while (notVisited.Any()) { var currentNode = notVisited.Dequeue(); var ijCurrent = currentNode.Split('-').Select(x => int.Parse(x)).ToList(); var iStart = ijCurrent[0]; var jStart = ijCurrent[1]; if (iStart == iEnd && jStart == jEnd) { return(true); } //if (visited.Count >= size) //{ // ifImpossible = true; // return false; //} //if (iStart >= size || iStart < 0 // || jStart >= size || jStart < 0 //) //{ // return false; //} //var currentNode = $"{iStart}-{jStart}"; visited.Add(currentNode); if (!paths.ContainsKey(currentNode)) { paths.Add(currentNode, new Path()); } foreach (var move in _movesMap) { var newIStart = move.Value.Item1 + iStart; var newJStart = move.Value.Item2 + jStart; var newNode = $"{newIStart}-{newJStart}"; if (newIStart >= size || newIStart < 0 || newJStart >= size || newJStart < 0 || visited.Contains(newNode)) { continue; } if (!paths.ContainsKey(newNode)) { paths.Add(newNode, new Path()); } notVisited.Enqueue(newNode); var newCost = paths[currentNode].cost == int.MaxValue ? int.MaxValue : paths[currentNode].cost + 1; var cost = paths[newNode].cost; if (newCost < cost) { paths[newNode].cost = newCost; paths[newNode].PreviousNode = currentNode; paths[newNode].Move = move.Key; } } //foreach (var neighborNode in notVisited) //{ // var ij = neighborNode.Split('-').Select(x => int.Parse(x)).ToList(); // var isFound = FindPath(size, ij[0], ij[1], iEnd, jEnd, paths, visited, out ifImpossible); // if (isFound) // { // return true; // } // if (ifImpossible) // { // ifImpossible = true; // return false; // } //} } return(false); }
public void UpdateSnowLayerOffThread(WeatherSimulationRegion simregion, IServerMapChunk mc, Vec2i chunkPos) { #region Tyrons brain cloud // Trick 1: Each x/z coordinate gets a "snow accum" threshold by using a locational random (murmurhash3). Once that threshold is reached, spawn snow. If its doubled, spawn 2nd layer of snow. => Patchy "fade in" of snow \o/ // Trick 2: We store a region wide snow accum value for the ground level and the map ceiling level. We can now interpolate between those values for each Y-Coordinate \o/ // Trick 3: We loop through each x/z block in a separate thread, then hand over "place snow" tasks to the main thread // Trick 4: Lets pre-gen 50 random shuffles for every x/z coordinate of a chunk. Loop through the region chunks, check which one is loaded and select one random shuffle from the list, then iterate over every x/z coord // Trick 5: Snowed over blocks: // - New VSMC util: "Automatically Try to add a snow cover to all horizontal faces" // - New Block property: SnowCoverableShape. // - Block.OnJsonTesselation adds snow adds cover shape to the sourceMesh!! // Trick 6: Turn Cloud Patterns into a "dumb slave system". They are visual information only, so lets make them follow internal mechanisms. // - Create a precipitation perlin noise generator. If the precipitation value goes above or below a certain value, we force the cloud pattern system to adapt to a fitting pattern // => We gain easy to probe, deterministic precipitation values!! // => We gain the ability to do unloaded chunk snow accumulation and unloaded chunk farmland rain-wetness accum // Trick 6 v2.0: // Rain clouds are simply overlaid onto the normal clouds. // Questions: // - Q1: When should it hail now? // - Q2: How is particle size determined? // - Q3: When should there be thunder? // - Q4: How to control the precipitation by command? // A1/A3: What if we read the slope of precipitation change. If there is a drastic increase of rain fall launch a // a. wind + thunder event // b. thunder event // c. rarely a hail event // d. extra rarely thunder + hail event // A2: Particle size is determiend by precipitation intensity // Trick 7 v2.0 // - Hail and Thunder are also triggered by a perlin noise generator. That way I don't need to care about event range. // A4: /weather setprecip [auto or 0..1] // - Q5: How do we overlay rain clouds onto the normal clouds? // Q5a: Will they be hardcoded? Or configurable? // Q5b: How does the overlay work? Lerp? // Q5c: Rain cloud intensity should relate to precip level. // How? Lerp from zero to max rain clouds? Multiple cloud configs and lerp between them? // - A5a: Configurable // A5b: Lerp. // A5c: Single max rain cloud config seems sufficient // TODO: // 1. Rain cloud overlay // 2. Snow accum // 3. Hail, Thunder perlin noise // 4. Done? // Idea 8: // - F**K the region based weather sim. // - Generate clouds patterns like you generate terrain from landforms // - Which is grid based indices, neatly abstracted with LerpedIndex2DMap and nicely shaped with domain warping // - Give it enough padding to ensure domain warping does not go out of bounds // - Every 2-3 minutes regenerate this map in a seperate thread, cloud renderer lerps between old and new map. // - Since the basic indices input is grid based, we can cycle those individually through time // for a future version // Hm. Maybe one noise generator for cloud coverage? // => Gain the ability to affect local temperature based on cloud coverage // Hm. Or maybe one noise generator for each cloud pattern? // => Gain the abillity for small scale and very large scale cloud patterns // Maybe even completely ditch per-region simulation? // => Gain the ability for migrating weather patterns // but then what will determine the cloud pattern? // Region-less Concept: // Take an LCGRandom. Use xpos and zpos+((int)totalDays) / 5 for coords // Iterate over every player // - iterate over a 20x20 chunk area around it (or max view dist + 5 chunks) // - domain warp x/z coords. use those coords to init position seed on lcgrand. get random value // - store in an LerpedWeightedIndex2DMap // Iterate over every cloud tile // - read cloud pattern data from the map // Snow accum needs to take the existing world information into account, i.e. current snow level // We should probably // - Store snow accumulation as a float value in mapchunkdata as Dictionary<BlockPos, float> // - Every 3 seconds or so, "commit" that snow accum into actual snow layer blocks, i.e. if accum >= 1 then add one snow layer and do accum-=1 #endregion UpdateSnowLayerChunk ch = new UpdateSnowLayerChunk() { Coords = chunkPos }; // Lets wait until we're done with the current job for this chunk if (updateSnowLayerQueue.Contains(ch)) { return; } double nowTotalHours = ws.api.World.Calendar.TotalHours; if (nowTotalHours - simregion.LastUpdateTotalHours > 1) // Lets wait until WeatherSimulationRegion is done updating { return; } byte[] data = mc.GetData("lastSnowAccumUpdateTotalHours"); double lastSnowAccumUpdateTotalHours = data == null ? 0 : SerializerUtil.Deserialize <double>(data); double startTotalHours = lastSnowAccumUpdateTotalHours; int reso = WeatherSimulationRegion.snowAccumResolution; SnowAccumSnapshot sumsnapshot = new SnowAccumSnapshot() { //SumTemperatureByRegionCorner = new API.FloatDataMap3D(reso, reso, reso), SnowAccumulationByRegionCorner = new API.FloatDataMap3D(reso, reso, reso) }; float[] sumdata = sumsnapshot.SnowAccumulationByRegionCorner.Data; if (simregion == null) { return; } // Can't grow bigger than one full snow block float max = ws.GeneralConfig.SnowLayerBlocks.Count + 0.5f; int len = simregion.SnowAccumSnapshots.Length; int i = simregion.SnowAccumSnapshots.Start; int newCount = 0; lock (WeatherSimulationRegion.lockTest) { while (len-- > 0) { SnowAccumSnapshot hoursnapshot = simregion.SnowAccumSnapshots[i]; i = (i + 1) % simregion.SnowAccumSnapshots.Length; if (hoursnapshot == null || lastSnowAccumUpdateTotalHours >= hoursnapshot.TotalHours) { continue; } float[] snowaccumdata = hoursnapshot.SnowAccumulationByRegionCorner.Data; for (int j = 0; j < snowaccumdata.Length; j++) { sumdata[j] = GameMath.Clamp(sumdata[j] + snowaccumdata[j], -max, max); } lastSnowAccumUpdateTotalHours = Math.Max(lastSnowAccumUpdateTotalHours, hoursnapshot.TotalHours); newCount++; } } if (newCount == 0) { return; } bool ignoreOldAccum = false; if (lastSnowAccumUpdateTotalHours - startTotalHours >= sapi.World.Calendar.DaysPerYear * sapi.World.Calendar.HoursPerDay) { ignoreOldAccum = true; } ch = UpdateSnowLayer(sumsnapshot, ignoreOldAccum, mc, chunkPos); if (ch != null) { //Console.WriteLine("{0} snaps used for {1}/{2}", newCount, chunkPos.X, chunkPos.Y); ch.LastSnowAccumUpdateTotalHours = lastSnowAccumUpdateTotalHours; ch.Coords = chunkPos.Copy(); lock (updateSnowLayerQueueLock) { updateSnowLayerQueue.Enqueue(ch); } } }
public void AddBlockToUpdateQueue(byte col, byte row, byte depth) { nextBlockUpdateQueue.Enqueue(new byte[] { col, row, depth }); }
public void addFloorToQueue(int i) { floorQueue.Enqueue(i); Debug.Log("i is " + i + "Floorqueuecount is " + floorQueue.Count); }
public void AddToUpdateList(MetaDataNode node) { updateList.Enqueue(node); }
protected virtual void GetOutput(bool forceUpdate) { int loops = 0; UniqueQueue <Guid> queue = new UniqueQueue <Guid>(); UniqueQueue <Guid> outputQueue = new UniqueQueue <Guid>(); // This is a new update loops so make sure gates and chips are not dirty foreach (Chip chip in Chips) { chip.Dirty.SetAll(false); } foreach (Gate gate in Gates) { gate.SetClean(); } // Update input wires foreach (Wire wire in WireDict[ID]) { if (forceUpdate || FirstRun || (OldInput[wire.FromIndex] != Input[wire.FromIndex])) { if (wire.IsChip) { Chip chip = Chips[wire.CircuitIndex]; chip.SetInputBit(wire.ToIndex, Input[wire.FromIndex] ^ wire.InvertValue); chip.Update(forceUpdate); if (forceUpdate || FirstRun || chip.IsDirty()) { queue.Enqueue(chip.ID); } } else { Gate gate = Gates[wire.CircuitIndex]; gate.SetInputBit(wire.ToIndex, Input[wire.FromIndex] ^ wire.InvertValue); gate.Update(ScrubOutput); if (forceUpdate || FirstRun || gate.IsDirty()) { queue.Enqueue(gate.ID); } } } } //Update internal components while (queue.Count > 0) { //Stop infinite loops from continuing loops++; if (loops >= 100) { Debug.Log("Infinite loop, breaking"); return; } Guid guid = queue.Dequeue(); BitArray FromValues; BitArray FromDirty; // The output of the previous gate Gate previousGate = FindGate(guid); if (previousGate != null) { FromValues = new BitArray(1, previousGate.Output); FromDirty = new BitArray(1, previousGate.IsDirty()); } else { Chip c = FindChip(guid); FromValues = c.Output; FromDirty = c.Dirty; } foreach (Wire wire in WireDict[guid]) { if (wire.IsChip) { if (wire.CircuitIndex == -1) { outputQueue.Enqueue(guid); Output[wire.ToIndex] = FromValues[wire.FromIndex] ^ wire.InvertValue; //Debug.Log("Updated output: " + wire.ToIndex); } else if (FromDirty[wire.FromIndex]) { Chip chip = Chips[wire.CircuitIndex]; chip.SetInputBit(wire.ToIndex, FromValues[wire.FromIndex] ^ wire.InvertValue); chip.Update(false); //Debug.Log("Updated chip: " + (char)(Chips.FindIndex(x => x.ID == guid) + 65) + "->" + (char)(wire.CircuitIndex + 65)); if (chip.IsDirty()) { queue.Enqueue(chip.ID); } } } else if (FromDirty[wire.FromIndex]) { Gate gate = Gates[wire.CircuitIndex]; gate.SetInputBit(wire.ToIndex, FromValues[wire.FromIndex] ^ wire.InvertValue); gate.Update(ScrubOutput); //Debug.Log("Updated gate: " + (char)(wire.CircuitIndex + 65)); if (forceUpdate || FirstRun || gate.IsDirty()) { queue.Enqueue(gate.ID); } } } } //Update output wires while (outputQueue.Count > 0) { Guid guid = outputQueue.Dequeue(); BitArray FromValues; Gate gate = FindGate(guid); if (gate != null) { FromValues = new BitArray(1, gate.Output); } else { FromValues = FindChip(guid).Output; } foreach (Wire wire in WireDict[guid]) { if (wire.IsChip && wire.CircuitIndex == -1) { Output[wire.ToIndex] = FromValues[wire.FromIndex] ^ wire.InvertValue; } } } }