public void GetData(NodeInfo nodeInfo, CellInfo[] cellData) { // int commentCol = ShowLinks ? (int) FavoritesGridColumns.Comment // : (int) FavoritesGridColumns.Name; cellData[(int)FavoritesGridColumns.Name].Text = Name; cellData[(int)FavoritesGridColumns.Name].ImageIndex = Links.Count > 0 ? FavoritesDummyForm.Instance.FolderStartIndex : FavoritesDummyForm.Instance.EmptyFolderStartIndex; nodeInfo.Highlight = true; // if (ShowLinks) // { // cellData[(int) FavoritesGridColumns.ColSubj].Image = -1; // cellData[(int) FavoritesGridColumns.ColSubj].Text = string.Empty; // cellData[(int) FavoritesGridColumns.ColAuthor].Image = -1; // cellData[(int) FavoritesGridColumns.ColAuthor].Text = string.Empty; // cellData[(int) FavoritesGridColumns.ColDate].Image = -1; // cellData[(int) FavoritesGridColumns.ColDate].Text = string.Empty; // cellData[(int) FavoritesGridColumns.ColForum].Image = -1; // cellData[(int) FavoritesGridColumns.ColForum].Text = string.Empty; // } // cellData[commentCol].Text = Comment.ToString(); // cellData[commentCol].Image = -1; cellData[(int)FavoritesGridColumns.Comment].Text = Comment; }
protected override CellInfo GetCellInfo(DateTime dt) { CellInfo ci = new CellInfo(dt); do { if (null == m_calendarData) break; DataRow[] drc = m_calendarData.Select( String.Format("calDate='{0}-{1}-{2}'", ci.date.Year, ci.date.Month, ci.date.Day)); if (drc.Length != 1) break; if (showTrainers) { ci.sTip += String.Format("Trainer: {0}", drc[0]["trainer"]); } if (showClientCount) { if (showTrainers) ci.sTip += "\n"; ci.sTip += String.Format("Clients:{0}", drc[0]["clientsCount"]); } } while (false); return ci; }
public ushort nSize; // Size of the cell content on the main b-tree page public bool Equals(CellInfo ci) { if (ci.CellID >= ci.Cells.Length || CellID >= Cells.Length) return false; if (ci.Cells[ci.CellID] != Cells[CellID]) return false; if (ci.nKey != nKey || ci.nData != nData || ci.nPayload != this.nPayload) return false; if (ci.nHeader != nHeader || ci.nLocal != nLocal) return false; if (ci.iOverflow != iOverflow || ci.nSize != nSize) return false; return true; }
public byte[] pCell; // Pointer to the start of cell content #endregion Fields #region Methods public bool Equals(CellInfo ci) { if (ci.iCell >= ci.pCell.Length || iCell >= this.pCell.Length) return false; if (ci.pCell[ci.iCell] != this.pCell[iCell]) return false; if (ci.nKey != this.nKey || ci.nData != this.nData || ci.nPayload != this.nPayload) return false; if (ci.nHeader != this.nHeader || ci.nLocal != this.nLocal) return false; if (ci.iOverflow != this.iOverflow || ci.nSize != this.nSize) return false; return true; }
void IGetData.GetData(NodeInfo nodeInfo, CellInfo[] cellData) { var om = _provider.GetRequiredService<IOutboxManager>(); cellData[0].Text = Description; cellData[0].ImageIndex = ImageIndex; cellData[1].Text = $"{(om.NewMessages.Count == 0 ? string.Empty : om.NewMessages.Count.ToString())}/{(om.RateMarks.Count == 0 ? string.Empty : om.RateMarks.Count.ToString())}/{(om.DownloadTopics.Count == 0 ? string.Empty : om.DownloadTopics.Count.ToString())}"; nodeInfo.Highlight = (om.NewMessages.Count > 0) || (om.RateMarks.Count > 0) || (om.DownloadTopics.Count > 0); }
void IGetData.GetData(NodeInfo nodeInfo, CellInfo[] cellData) { cellData[OutboxManager.ForumColumn].Text = Source; cellData[OutboxManager.ForumColumn].ImageIndex = OutboxImageManager.RegetTopicImageIndex; cellData[OutboxManager.SubjectColun].Text = Hint; cellData[OutboxManager.AddInfoColumn].Text = "ID = " + MessageID; }
internal void ptrmapPutOvflPtr(byte[] pCell, ref RC pRC) { if (pRC != 0) return; var info = new CellInfo(); Debug.Assert(pCell != null); btreeParseCellPtr(pCell, ref info); Debug.Assert((info.nData + (this.HasIntKey ? 0 : info.nKey)) == info.nPayload); if (info.iOverflow != 0) { Pgno ovfl = ConvertEx.Get4(pCell, info.iOverflow); this.Shared.ptrmapPut(ovfl, PTRMAP.OVERFLOW1, this.ID, ref pRC); } }
internal void btreeParseCellPtr(byte[] cell, int cellID, ref CellInfo info) { var nPayload = (uint)0; // Number of bytes of cell payload Debug.Assert(MutexEx.Held(Shared.Mutex)); if (info.Cells != cell) info.Cells = cell; info.CellID = cellID; Debug.Assert(Leaf == 0 || Leaf == 1); var n = (ushort)ChildPtrSize; // Number bytes in cell content header Debug.Assert(n == 4 - 4 * Leaf); if (HasIntKey) { if (HasData != 0) n += (ushort)ConvertEx.GetVarint4(cell, (uint)(cellID + n), out nPayload); else nPayload = 0; n += (ushort)ConvertEx.GetVarint9L(cell, (uint)(cellID + n), out info.nKey); info.nData = nPayload; } else { info.nData = 0; n += (ushort)ConvertEx.GetVarint4(cell, (uint)(cellID + n), out nPayload); info.nKey = nPayload; } info.nPayload = nPayload; info.nHeader = n; if (Check.LIKELY(nPayload <= this.MaxLocal)) { // This is the (easy) common case where the entire payload fits on the local page. No overflow is required. if ((info.nSize = (ushort)(n + nPayload)) < 4) info.nSize = 4; info.nLocal = (ushort)nPayload; info.iOverflow = 0; } else { // If the payload will not fit completely on the local page, we have to decide how much to store locally and how much to spill onto // overflow pages. The strategy is to minimize the amount of unused space on overflow pages while keeping the amount of local storage // in between minLocal and maxLocal. // Warning: changing the way overflow payload is distributed in any way will result in an incompatible file format. var minLocal = (int)MinLocal; // Minimum amount of payload held locally var maxLocal = (int)MaxLocal;// Maximum amount of payload held locally var surplus = (int)(minLocal + (nPayload - minLocal) % (Shared.UsableSize - 4));// Overflow payload available for local storage info.nLocal = (surplus <= maxLocal ? (ushort)surplus : (ushort)minLocal); info.iOverflow = (ushort)(info.nLocal + n); info.nSize = (ushort)(info.iOverflow + 4); } }
/// <summary> /// Получить CellInfo на основе CellController. /// </summary> /// <param name="cell">Ячейка.</param> public static CellInfo ToCellInfo(CellController cell) { var res = new CellInfo(); res.IndexCell = cell.Index; var item = cell.Item; if (item != null) { res.IndexItem = item.BaseItem.Index; res.Modification = item.Modification; res.RarityItem = (int)item.RarityItem; } return res; }
private void ComboBoxDrawItem(object sender, DrawItemEventArgs e) { e.DrawBackground(); if (e.Index < 0) return; var bounds = e.Bounds; var cont = (FeatureContainer)_comboBox.Items[e.Index]; bounds.X += _leftMargin + cont.Level * Config.Instance.ForumDisplayConfig.GridIndent; var cd = new CellInfo[5]; var ni = new NodeInfo(_comboBox.ForeColor, _comboBox.BackColor, _comboBox.Font, false); ((IGetData)cont.Feature).GetData(ni, cd); if (cd[0].Image != null) { e.Graphics.DrawImage( cd[0].Image, bounds.X, bounds.Y, cd[0].Image.Width, cd[0].Image.Height); bounds.X += cd[0].Image.Width; } var brush = (e.State & DrawItemState.Selected) == 0 ? _comboBox.DroppedDown ? new SolidBrush(ni.ForeColor) : SystemBrushes.ControlText : SystemBrushes.HighlightText; e.Graphics.DrawString( cont.Feature.ToString(), ni.Highlight ? new Font(_comboBox.Font, FontStyle.Bold) : _comboBox.Font, brush, bounds); }
public bool isIncrblobHandle; // True if this cursor is an incr. io handle #endif #region Methods public void Clear() { pNext = null; pPrev = null; pKeyInfo = null; pgnoRoot = 0; cachedRowid = 0; info = new CellInfo(); wrFlag = 0; atLast = 0; validNKey = false; eState = 0; pKey = null; nKey = 0; skipNext = 0; #if !SQLITE_OMIT_INCRBLOB isIncrblobHandle = false; aOverflow = null; #endif iPage = 0; }
private void CheckHovered() { if (currentMode == Mode.Build) { return; } hoverInfo = null; tooltippable = null; if (EventSystem.current.IsPointerOverGameObject()) { return; } Vector3 mousePosWorld = mainCamera.ScreenToWorldPoint(Input.mousePosition); foreach (GridElement e in WorldGrid.Instance.ElementsAtPosition((Vector2Int)WorldGrid.Instance.grid.WorldToCell(mousePosWorld))) { Tooltippable t = e.GetComponent <Tooltippable>(); if (t != null) { tooltippable = t; currentMode = Mode.Other; return; } CellInfo info = e.GetComponent <CellInfo>(); if (info != null) { hoverInfo = info; currentMode = Mode.Destroy; return; } } }
//Click on cell public void ClickOnCell(Vector3Int pos) { //Get the clicked on cell. CellInfo ci = cellInfos[selectedCell]; //If we are adding any cell on the overlay layer. if (ci.type != CellType.BLANK_CELL) { if (BeehiveManager.bm.frameTilemap.GetTile(pos) != null && beehive.currentHoney >= ci.buildCost && overlayTilemap.GetTile <Tile>(pos) == null) { beehive.addHoney(-ci.buildCost); PlaceTile(ci.type, pos); } } else { //If we are adding a new hive cell (a honeycomb hexagon), add to the frame layer. if (beehive.currentHoney >= ci.buildCost && frameTilemap.GetTile <Tile>(pos) == null) { beehive.addHoney(-ci.buildCost); PlaceTile(ci.type, pos); } } }
public void setup() { /* Initial generation of the board where all cells are dead */ board = new CellInfo[sizeX, sizeY]; timer = 0; for (int i = 0; i < sizeX; i++) { for (int j = 0; j < sizeY; j++) { board[i, j] = new CellInfo(); board[i, j].setLocation(new Vector2(i, j)); board[i, j].setup(cellPrefab); } } for (int i = 0; i < sizeX; i++) { for (int j = 0; j < sizeY; j++) { board[i, j].setNeighbours(genNeighbourList(i, j)); } } }
public ushort nSize; // Size of the cell content on the main b-tree page public bool Equals(CellInfo ci) { if (ci.CellID >= ci.Cells.Length || CellID >= Cells.Length) { return(false); } if (ci.Cells[ci.CellID] != Cells[CellID]) { return(false); } if (ci.nKey != nKey || ci.nData != nData || ci.nPayload != this.nPayload) { return(false); } if (ci.nHeader != nHeader || ci.nLocal != nLocal) { return(false); } if (ci.iOverflow != iOverflow || ci.nSize != nSize) { return(false); } return(true); }
public override bool Equals(object obj) { if (object.ReferenceEquals(this, obj)) { return(true); } else if (obj == null) { return(false); } else if (obj.GetType() != this.GetType()) { return(false); } else { CellInfo other = (CellInfo)obj; return (object.Equals(this.MCC, other.MCC) && object.Equals(this.MNC, other.MNC) && object.Equals(this.LAC, other.LAC) && object.Equals(this.CID, other.CID)); } }
public u16 nSize; /* Size of the cell content on the main b-tree page */ public bool Equals(CellInfo ci) { if (ci.iCell >= ci.pCell.Length || iCell >= this.pCell.Length) { return(false); } if (ci.pCell[ci.iCell] != this.pCell[iCell]) { return(false); } if (ci.nKey != this.nKey || ci.nData != this.nData || ci.nPayload != this.nPayload) { return(false); } if (ci.nHeader != this.nHeader || ci.nLocal != this.nLocal) { return(false); } if (ci.iOverflow != this.iOverflow || ci.nSize != this.nSize) { return(false); } return(true); }
public WallInfo GetGapWall(CellInfo cellInfo) { WallInfo gapWall = GetNeighborWallByDir(cellInfo, CellDirType.up); if (gapWall != null && gapWall.IsNull()) { return(gapWall); } gapWall = GetNeighborWallByDir(cellInfo, CellDirType.right_up); if (gapWall != null && gapWall.IsNull()) { return(gapWall); } gapWall = GetNeighborWallByDir(cellInfo, CellDirType.right_down); if (gapWall != null && gapWall.IsNull()) { return(gapWall); } gapWall = GetNeighborWallByDir(cellInfo, CellDirType.down); if (gapWall != null && gapWall.IsNull()) { return(gapWall); } gapWall = GetNeighborWallByDir(cellInfo, CellDirType.left_down); if (gapWall != null && gapWall.IsNull()) { return(gapWall); } gapWall = GetNeighborWallByDir(cellInfo, CellDirType.left_up); if (gapWall != null && gapWall.IsNull()) { return(gapWall); } return(null); }
public List <Node> setPlan(BoardInfo boardInfo, CellInfo currentPos, Vector2Int goal) //Esta función crea un plan para llegar a una posición destino desde una posición origen { List <Node> plan = new List <Node>(); //NODO INICIAL Node firstNode = new Node(currentPos, goal); //Se crea el primer nodo de la lista, ubicado en el origen nodes.Add(firstNode); visitedNodes.Add(firstNode); while (finalNode == null && nodes.Count > 0) { expandNode(boardInfo, goal); //Se expande el árbol hasta que se encuentre una solución o se agoten las posibilidades } if (nodes.Count == 0) //Si no hay un camino posible { Debug.Log("No se ha podido encontrar un camino hacia el objetivo"); } else { Node next = finalNode; while (next != null) //Se recorren los padres de finalNode de forma sucesiva para crear el plan hasta llegar al origen { plan.Add(next); next = next.getParent(); } plan.RemoveAt(plan.Count - 1); //Al ser el nodo inicial la posición original del personaje, //la descartamos, ya que no queremos que el jugador vaya a su propia posición } //Se limpian las variables auxiliares que se han usado para la creación del plan finalNode = null; nodes.Clear(); visitedNodes.Clear(); return(plan); }
public override void GetData(NodeInfo nodeInfo, CellInfo[] cellData) { if (_partiallyLoaded) LoadFrame(this); base.GetData(nodeInfo, cellData); }
void IGetData.GetData(NodeInfo nodeInfo, CellInfo[] cellData) { cellData[OutboxManager.ForumColumn].Text = ForumName; cellData[OutboxManager.ForumColumn].ImageIndex = GetImage(); cellData[OutboxManager.SubjectColun].Text = Subject; cellData[OutboxManager.AddInfoColumn].Text = UserNick; }
static ushort cellSizePtr(MemPage page, byte[] cell) { #if DEBUG // The value returned by this function should always be the same as the (CellInfo.nSize) value found by doing a full parse of the // cell. If SQLITE_DEBUG is defined, an assert() at the bottom of this function verifies that this invariant is not violated. var debuginfo = new CellInfo(); btreeParseCellPtr(page, cell, ref debuginfo); #else var debuginfo = new CellInfo(); #endif var iter = page.ChildPtrSize; uint size = 0; if (page.IntKey) { if (page.HasData) iter += ConvertEx.GetVarint32(cell, out size); // iter += ConvertEx.GetVarint32(iter, out size); else size = 0; // pIter now points at the 64-bit integer key value, a variable length integer. The following block moves pIter to point at the first byte // past the end of the key value. int end = iter + 9; // end = &pIter[9]; while (((cell[iter++]) & 0x80) != 0 && iter < end) { } // while ((iter++) & 0x80 && iter < end); } else iter += ConvertEx.GetVarint32(cell, iter, out size); //pIter += getVarint32( pIter, out nSize ); if (size > page.MaxLocal) { int minLocal = page.MinLocal; size = (uint)(minLocal + (size - minLocal) % (page.Bt.UsableSize - 4)); if (size > page.MaxLocal) size = (uint)minLocal; size += 4; } size += (uint)iter; // size += (uint32)(iter - cell); // The minimum size of any cell is 4 bytes. if (size < 4) size = 4; Debug.Assert(size == debuginfo.Size); return (ushort)size; }
static ushort cellSizePtr(MemPage page, uint cell_) // For C# { var info = new CellInfo(); var cell2 = new byte[13]; // Minimum Size = (2 bytes of Header or (4) Child Pointer) + (maximum of) 9 bytes data if (cell_ < 0) // Overflow Cell Buffer.BlockCopy(page.Ovfls[-(cell_ + 1)].Cell, 0, cell2, 0, cell2.Length < page.Ovfls[-(cell_ + 1)].Cell.Length ? cell2.Length : page.Ovfls[-(cell_ + 1)].Cell.Length); else if (cell_ >= page.Data.Length + 1 - cell2.Length) Buffer.BlockCopy(page.Data, (int)cell_, cell2, 0, (int)(page.Data.Length - cell_)); else Buffer.BlockCopy(page.Data, (int)cell_, cell2, 0, cell2.Length); btreeParseCellPtr(page, cell2, ref info); return info.Size; }
static void parseCell(MemPage page, uint cell_, ref CellInfo info) { btreeParseCellPtr(page, findCell(page, cell_), ref info); }
static void btreeParseCellPtr(MemPage page, byte[] cell, uint cellIdx, ref CellInfo info) { Debug.Assert(MutexEx.Held(page.Bt.Mutex)); if (info.Cell != cell) info.Cell = cell; info.Cell_ = cellIdx; ushort n = page.ChildPtrSize; // Number bytes in cell content header Debug.Assert(n == (page.Leaf ? 0 : 4)); uint payloadLength = 0; // Number of bytes of cell payload if (page.IntKey) { if (page.HasData) n += (ushort)ConvertEx.GetVarint32(cell, cellIdx + n, out payloadLength); else payloadLength = 0; n += (ushort)ConvertEx.GetVarint(cell, cellIdx + n, out info.Key); info.Data = payloadLength; } else { info.Data = 0; n += (ushort)ConvertEx.GetVarint32(cell, cellIdx + n, out payloadLength); info.Key = payloadLength; } info.Payload = payloadLength; info.Header = n; if (payloadLength <= page.MaxLocal) { // This is the (easy) common case where the entire payload fits on the local page. No overflow is required. if ((info.Size = (ushort)(n + payloadLength)) < 4) info.Size = 4; info.Local = (ushort)payloadLength; info.Overflow = 0; } else { // If the payload will not fit completely on the local page, we have to decide how much to store locally and how much to spill onto // overflow pages. The strategy is to minimize the amount of unused space on overflow pages while keeping the amount of local storage // in between minLocal and maxLocal. // // Warning: changing the way overflow payload is distributed in any way will result in an incompatible file format. int minLocal = page.MinLocal; // Minimum amount of payload held locally int maxLocal = page.MaxLocal; // Maximum amount of payload held locally int surplus = (int)(minLocal + (payloadLength - minLocal) % (page.Bt.UsableSize - 4)); // Overflow payload available for local storage ASSERTCOVERAGE(surplus == maxLocal); ASSERTCOVERAGE(surplus == maxLocal + 1); if (surplus <= maxLocal) info.Local = (ushort)surplus; else info.Local = (ushort)minLocal; info.Overflow = (ushort)(info.Local + n); info.Size = (ushort)(info.Overflow + 4); } }
public override Locomotion.MoveDirection GetNextMove(BoardInfo boardInfo, CellInfo currentPos, CellInfo[] goals) { if (faseactual == Fases.FASE1) { posicionIncial = new Nodo(currentPos, posicionIncial, Locomotion.MoveDirection.None, currentPos.WalkCost); posicionIncial.g = 0; posicionIncial.h_distanciaManhattan = posicionIncial.heuristic(posicionIncial.estado.GetPosition, boardInfo.Exit.GetPosition); posicionIncial.nodoPadre = null; abierta.Add(posicionIncial); faseactual = Fases.FASE2; } if (faseactual == Fases.FASE2) { Nodo NodoActual = null; //Si la lista no se ha completado, es decir, no se encuentra el nodo meta while (abierta.Count != 0 && !listaCompleta) { //Nodo actual abierta = abierta.OrderBy(node => node.f).ToList(); NodoActual = abierta[0]; abierta.Remove(NodoActual); cerrada.Add(NodoActual); if (NodoActual.esMeta(goals[0])) { listaCompleta = true; ruta = TakeRout(NodoActual); Debug.Log("Nodos expandidos: " + currentNodos); faseactual = Fases.FASE3; } else { var sucesores = NodoActual.ExpandirOffline(boardInfo); foreach (var vecino in sucesores) { if (!cerrada.Contains(vecino)) { vecino.nodoPadre = NodoActual; vecino.h_distanciaManhattan = vecino.heuristic(vecino.estado.GetPosition, boardInfo.Exit.GetPosition); vecino.g = vecino.estado.WalkCost + vecino.nodoPadre.g; abierta.Add(vecino); currentNodos++; } } } if (currentNodos > maxNodos) { listaCompleta = true; } } } if (faseactual == Fases.FASE3 && currentNodos < maxNodos) { var currentMove = ruta[ruta.Count - 1]; ruta.RemoveAt(ruta.Count - 1); return(currentMove); } return(Locomotion.MoveDirection.None); }
public void SetExplored(ulong packedCoord, Base6Directions.DirectionFlags directionFlags) { CellInfo info = new CellInfo(); if (m_cellInfos.TryGetValue(packedCoord, out info)) { info.ExploredDirections = directionFlags; m_cellInfos[packedCoord] = info; } else { Debug.Assert(false, "Could not find navmesh cell info for setting explored directions!"); } }
private static bool HasBlindBarrier(CellInfo blankCell, CellDirType dir) { int cellX, cellY, wallX, wallY, wallN; bool isHump = blankCell.IsHump(); switch (dir) { case CellDirType.left_up: cellX = blankCell.posX - 1; if (isHump) { cellY = blankCell.posY - 1; } else { cellY = blankCell.posY; } break; case CellDirType.right_up: cellX = blankCell.posX + 1; if (isHump) { cellY = blankCell.posY - 1; } else { cellY = blankCell.posY; } break; default: cellX = blankCell.posX; cellY = blankCell.posY - 1; break; } wallX = blankCell.posX; wallY = blankCell.posY; wallN = (int)dir; CellInfo cellInfo = CellModel.Instance.GetCellByPos(cellX, cellY); WallInfo wallInfo = WallModel.Instance.GetWallByPos(wallY, wallX, wallN); if (cellY < 0 && wallInfo.CanPass()) { return(false); } if (cellX < 0 || cellX >= BattleModel.Instance.crtBattle.battle_width) { return(true); } if (cellInfo == null) { return(true); } if (cellInfo.isBlindBlank) { return(true); } if (cellInfo.isMonsterHold) { return(true); } bool isCoverOpen = CoverModel.Instance.IsOpen(cellInfo.posX, cellInfo.posY); if (!isCoverOpen) { return(true); } if (cellInfo.isBlank == false && !cellInfo.CanMove()) { return(true); } if (!wallInfo.CanPass()) { return(true); } return(false); }
static protected float VcLerpT(CellInfo info, float lerpToDist) => LerpT(info.cornerDist, info.topDist, lerpToDist);
static protected float HzLerpT(CellInfo info, float lerpToDist) => LerpT(info.cornerDist, info.rightDist, lerpToDist);
public void CalculateVertices(int index, CellInfo info, CellVertices verts, NativeArray <float3> vertices) => LerpedVertexCalculator.CalculateVertices(index, info, verts, vertices, colNum, heightOffset + heightScale * heights[index], cellSize, lerpToEdge);
public void CalculateVertices(int index, CellInfo info, CellVertices verts, NativeArray <float3> vertices) => CalculateVertices(index, info, verts, vertices, colNum, heightOffset, cellSize, lerpToEdge);
public int GetNoOfSimilarConsecutiveCells(CellInfo mask, int j0, int Lmax) { throw new NotImplementedException(); }
private void ExportSheet(System.Data.DataTable sheet, StreamWriter sw) { int cellCount = sheet.Columns.Count; //sheet.Workbook.GetCreationHelper(). CellInfo[] cellInfos = new CellInfo[cellCount]; for (int i = 1; i < cellCount; i++) { string fieldDesc = GetCellString(sheet, 0, i); string fieldName = GetCellString(sheet, 1, i); string fieldType = GetCellString(sheet, 2, i); cellInfos[i] = new CellInfo() { Name = fieldName, Type = fieldType, Desc = fieldDesc }; } for (int i = 3; i < sheet.Rows.Count; ++i) { if (GetCellString(sheet, i, 0).StartsWith("#")) { continue; } //if (GetCellString(sheet, i, 2) == "") //{ // continue; //} StringBuilder sb = new StringBuilder(); sb.Append("{"); //IRow row = sheet.GetRow(i); for (int j = 1; j < cellCount; ++j) { string desc = cellInfos[j].Desc.ToLower(); if (desc.StartsWith("#")) { continue; } string fieldValue = GetCellString(sheet, i, j); if (fieldValue == "") { //Log.Warning($"sheet: {sheet.SheetName} 中有空白字段 {i},{j}"); continue; } if (j > 1) { sb.Append(","); } string fieldName = cellInfos[j].Name; if (fieldName == "Id" || fieldName == "_id") { fieldName = "_id"; } string fieldType = cellInfos[j].Type; sb.Append($"\"{fieldName}\":{Convert(fieldType, fieldValue)}"); } sb.Append("}"); if (i != sheet.Rows.Count - 1) { sb.Append("\n"); } sw.Write(sb.ToString()); } }
/// <summary> /// Invokes to Draw Each cell value and background /// </summary> /// <param name="drawingcontext"></param> /// <param name="rowsinfolist"></param> /// <param name="cellinfo"></param> /// <returns></returns> protected override void OnRenderCell(DrawingContext drawingcontext, RowInfo rowinfo, CellInfo cellinfo) { var index = dataGrid.View.Records.IndexOfRecord(rowinfo.Record); var rect = new Rect((cellinfo.CellRect).X, (cellinfo.CellRect).Y + 0.5, (cellinfo.CellRect).Width, (cellinfo.CellRect).Height); if (index % 2 == 0) { drawingcontext.DrawGeometry(new SolidColorBrush(Colors.Bisque), new Pen(), new RectangleGeometry(rect)); } base.OnRenderCell(drawingcontext, rowinfo, cellinfo); }
public void ClearCell(ulong packedCoord, ref CellInfo cellInfo) { Debug.Assert(m_cellInfos.ContainsKey(packedCoord), "Could not find navmesh cell info for clearing!"); // Remove information about containing cell for (int i = 0; i < cellInfo.ComponentNum; ++i) { bool success = m_componentCells.Remove(cellInfo.StartingIndex + i); Debug.Assert(success, "Inconsistency! Couldn't remove information about cell of a cached navmesh component cell"); } m_cellInfos.Remove(packedCoord); DeallocateComponentStartingIndex(cellInfo.StartingIndex, cellInfo.ComponentNum); }
static void btreeParseCellPtr(MemPage page, byte[] cell, ref CellInfo info) { btreeParseCellPtr(page, cell, 0, ref info); }
public void AddInitialCell(CPos location) { if (!self.World.Map.Contains(location)) return; CellInfo[location] = new CellInfo(0, location, false); Queue.Add(new PathDistance(Heuristic(location), location)); }
static int checkTreePage(IntegrityCk check, Pid pageID, string parentContext, ref long parentMinKey, bool hasParentMinKey, ref long parentMaxKey, bool hasParentMaxKey) { var msg = new StringBuilder(100); msg.AppendFormat("Page {0}: ", pageID); // Check that the page exists var bt = check.Bt; var usableSize = (int)bt.UsableSize; if (pageID == 0) return 0; if (checkRef(check, pageID, parentContext)) return 0; RC rc; MemPage page = new MemPage(); if ((rc = btreeGetPage(bt, pageID, ref page, false)) != RC.OK) { checkAppendMsg(check, msg.ToString(), "unable to get the page. error code=%d", rc); return 0; } // Clear MemPage.isInit to make sure the corruption detection code in btreeInitPage() is executed. page.IsInit = false; if ((rc = btreeInitPage(page)) != RC.OK) { Debug.Assert(rc == RC.CORRUPT); // The only possible error from InitPage checkAppendMsg(check, msg.ToString(), "btreeInitPage() returns error code %d", rc); releasePage(page); return 0; } // Check out all the cells. Pid id; uint i; int depth = 0; long minKey = 0; long maxKey = 0; for (i = 0U; i < page.Cells && check.MaxErrors != 0; i++) { // Check payload overflow pages msg.AppendFormat("On tree page {0} cell {1}: ", pageID, i); uint cell_ = findCell(page, i); var info = new CellInfo(); btreeParseCellPtr(page, cell_, ref info); uint sizeCell = info.Data; if (!page.IntKey) sizeCell += (uint)info.Key; // For intKey pages, check that the keys are in order. else if (i == 0) minKey = maxKey = info.Key; else { if (info.Key <= maxKey) checkAppendMsg(check, msg.ToString(), "Rowid %lld out of order (previous was %lld)", info.Key, maxKey); maxKey = info.Key; } Debug.Assert(sizeCell == info.Payload); if (sizeCell > info.Local) //&& pCell[info.iOverflow]<=&pPage.aData[pBt.usableSize] { int pages = (int)(sizeCell - info.Local + usableSize - 5) / (usableSize - 4); Pid ovflID = ConvertEx.Get4(page.Data, cell_ + info.Overflow); #if !OMIT_AUTOVACUUM if (bt.AutoVacuum) checkPtrmap(check, ovflID, PTRMAP.OVERFLOW1, pageID, msg.ToString()); #endif checkList(check, false, ovflID, pages, msg.ToString()); } // Check sanity of left child page. if (!page.Leaf) { id = (Pid)ConvertEx.Get4(page.Data, cell_); #if !OMIT_AUTOVACUUM if (bt.AutoVacuum) checkPtrmap(check, id, PTRMAP.BTREE, pageID, msg.ToString()); #endif int depth2; if (i == 0) depth2 = checkTreePage(check, id, msg.ToString(), ref minKey, true, ref _nullRef_, false); else depth2 = checkTreePage(check, id, msg.ToString(), ref minKey, true, ref maxKey, true); if (i > 0 && depth2 != depth) checkAppendMsg(check, msg, "Child page depth differs"); depth = depth2; } } if (!page.Leaf) { id = (Pid)ConvertEx.Get4(page.Data, page.HdrOffset + 8); msg.AppendFormat("On page {0} at right child: ", pageID); #if !OMIT_AUTOVACUUM if (bt.AutoVacuum) checkPtrmap(check, id, PTRMAP.BTREE, pageID, msg.ToString()); #endif if (page.Cells == 0) checkTreePage(check, id, msg.ToString(), ref _nullRef_, false, ref _nullRef_, false); else checkTreePage(check, id, msg.ToString(), ref _nullRef_, false, ref maxKey, true); } // For intKey leaf pages, check that the min/max keys are in order with any left/parent/right pages. if (page.Leaf && page.IntKey) { // if we are a left child page if (hasParentMinKey) { // if we are the left most child page if (!hasParentMaxKey) { if (maxKey > parentMinKey) checkAppendMsg(check, msg, "Rowid %lld out of order (max larger than parent min of %lld)", maxKey, parentMinKey); } else { if (minKey <= parentMinKey) checkAppendMsg(check, msg, "Rowid %lld out of order (min less than parent min of %lld)", minKey, parentMinKey); if (maxKey > parentMaxKey) checkAppendMsg(check, msg, "Rowid %lld out of order (max larger than parent max of %lld)", maxKey, parentMaxKey); parentMinKey = maxKey; } } // else if we're a right child page else if (hasParentMaxKey) { if (minKey <= parentMaxKey) checkAppendMsg(check, msg, "Rowid %lld out of order (min less than parent max of %lld)", minKey, parentMaxKey); } } // Check for complete coverage of the page byte[] data = page.Data; uint hdr = page.HdrOffset; byte[] hit = PCache.PageAlloc2((int)bt.PageSize); if (hit == null) check.MallocFailed = true; else { uint contentOffset = ConvertEx.Get2nz(data, hdr + 5); Debug.Assert(contentOffset <= usableSize); // Enforced by btreeInitPage() Array.Clear(hit, (int)contentOffset, (int)(usableSize - contentOffset)); { for (uint z = contentOffset - 1; z >= 0; z--) hit[z] = 1; }// memset(hit, 1, contentOffset); uint cells = ConvertEx.Get2(data, hdr + 3); uint cellStart = hdr + 12 - 4 * (page.Leaf ? 1U : 0U); for (i = 0; i < cells; i++) { var sizeCell = 65536U; uint pc = ConvertEx.Get2(data, cellStart + i * 2); if (pc <= usableSize - 4) sizeCell = cellSizePtr(page, data, pc); if ((int)(pc + sizeCell - 1) >= usableSize) checkAppendMsg(check, (string)null, "Corruption detected in cell %d on page %d", i, pageID); else for (var j = (int)(pc + sizeCell - 1); j >= pc; j--) hit[j]++; } i = ConvertEx.Get2(data, hdr + 1); while (i > 0) { Debug.Assert(i <= usableSize - 4); // Enforced by btreeInitPage() uint size = ConvertEx.Get2(data, i + 2); Debug.Assert(i + size <= usableSize); // Enforced by btreeInitPage() uint j; for (j = i + size - 1; j >= i; j--) hit[j]++; j = ConvertEx.Get2(data, i); Debug.Assert(j == 0 || j > i + size); // Enforced by btreeInitPage() Debug.Assert(j <= usableSize - 4); // Enforced by btreeInitPage() i = j; } uint cnt; for (i = cnt = 0; i < usableSize; i++) { if (hit[i] == 0) cnt++; else if (hit[i] > 1) { checkAppendMsg(check, (string)null, "Multiple uses for byte %d of page %d", i, pageID); break; } } if (cnt != data[hdr + 7]) checkAppendMsg(check, (string)null, "Fragmentation of %d bytes reported as %d on page %d", cnt, data[hdr + 7], pageID); } PCache.PageFree2(ref hit); releasePage(page); return depth + 1; }
public static void SetKey(string projectId, string keyId, string fileNameOrValueOrGeometry) { CheckForInitAndLoginSDK(); Project project = GetProjectById(projectId); CellInfo key = project.DataTable.Cells.FirstOrDefault(c => c.CellId == keyId); if (key == null) { throw new Exception(string.Format("Key by id: '{0}' not found.", keyId)); } string valueForSet; SourceOfData sourceOfData = SourceOfData.Primitive; string name = string.Empty; //checking parameter as filename string fileName = string.Empty; if (fileNameOrValueOrGeometry.StartsWith("<") && fileNameOrValueOrGeometry.Length > 1) { //delete first char fileName = fileNameOrValueOrGeometry.Substring(1); if (!FileHelper.IsFileExists(ref fileName)) { Console.WriteLine("File name '{0}' is not exists", fileName); } valueForSet = File.ReadAllText(fileName); sourceOfData = SourceOfData.File; } else { name = fileNameOrValueOrGeometry.ToUpper(); //check parameter, if name is inside Geometric Primitives if (!GeometricPrimitives.TryGetValue(name, out valueForSet)) { //check parameter, if number is inside Geometric Primitives int numberPrimitive; if (int.TryParse(name, out numberPrimitive) && numberPrimitive <= GeometricPrimitives.Count) { valueForSet = GeometricPrimitives.Values.ElementAt(numberPrimitive - 1); name = GeometricPrimitives.Keys.ElementAt(numberPrimitive - 1); } else { valueForSet = fileNameOrValueOrGeometry; sourceOfData = SourceOfData.Value; } } } var serializedValueStr = DataSerializer.Serialize(valueForSet); Stream stream = StreamUtils.GenerateStreamFromString(serializedValueStr); CellInfo updatingKey = project.DataTable.SetCell(key.CellId, stream, key.ClientMetadata); switch (sourceOfData) { case SourceOfData.File: Console.WriteLine("Key '{0}' was updated from file: '{1}'", updatingKey.ClientMetadata.Label, fileName); break; case SourceOfData.Primitive: Console.WriteLine("Key '{0}' was updated by predefined geometric primitive '{1}': ", updatingKey.ClientMetadata.Label, name); Console.WriteLine(@"Please vivsit https://flux.io, project - '{0}', key - '{1}' to review this changes.", project.Name, key.ClientMetadata.Label); break; case SourceOfData.Value: Console.WriteLine("Key '{0}' was updated with value: '{1}'", updatingKey.ClientMetadata.Label, valueForSet); break; } }
static void btreeParseCell(MemPage page, uint cell, ref CellInfo info) { parseCell(page, cell, ref info); }
private bool ProcessRegionInfo(StreamWriter s, PacketTypes p, RegionInfo ri) { switch (p) { case PacketTypes.REQUEST: { //Get Cell Type Info if ((ri.regionDataTypes & WorldConstants.REGION_INFO_CELL) != 0) { CellInfo c = new CellInfo(PacketTypes.REQUESTED); List <WorldCell> region = new List <WorldCell>(WorldInterface.GetRegionCells(ri.x1, ri.x2, ri.y1, ri.y2)); int count = region.Count; foreach (WorldCell wc in region) { c.cellType = (byte)wc.WorldCellType; c.remainingCells = --count; // I don't even know if this is necessary anymore... c.x = wc.X; c.y = wc.Y; s(NetHelpers.ConvertStructToBytes(c)); } } //Get Object Info if ((ri.regionDataTypes & WorldConstants.REGION_INFO_OBJECT) != 0 || (ri.regionDataTypes & WorldConstants.REGION_INFO_PLAYER) != 0) { bool obj = (ri.regionDataTypes & WorldConstants.REGION_INFO_OBJECT) != 0; bool ply = (ri.regionDataTypes & WorldConstants.REGION_INFO_PLAYER) != 0; ObjectInfo o = new ObjectInfo(PacketTypes.REQUESTED); foreach (var wobject in WorldInterface.GetRegionObjects(ri.x1, ri.x2, ri.y1, ri.y2)) { if (wobject == null) { continue; } var pos = wobject.GetPosition(); if (wobject.ObjectType == WorldObjectTypes.PLAYER && ply) { var vel = ((Player)wobject).GetVelocity(); o.id = wobject.Id; o.x = pos.x; o.y = pos.y; o.vx = vel.x; o.vy = vel.y; o.objecttype = WorldConstants.TYPE_PLAYER; s(NetHelpers.ConvertStructToBytes(o)); } else if (obj) { o.id = wobject.Id; o.x = pos.x; o.y = pos.y; if (wobject.ObjectType == WorldObjectTypes.MARKER) { o.objecttype = WorldConstants.TYPE_MARKER; } else if (wobject.ObjectType == WorldObjectTypes.MOVEABLE) { var vel = ((Moveable)wobject).GetVelocity(); o.objecttype = WorldConstants.TYPE_MOVEABLE; o.vx = vel.x; o.vy = vel.y; } else { o.objecttype = WorldConstants.TYPE_GENERIC; } s(NetHelpers.ConvertStructToBytes(o)); } } } return(true); } } return(false); }
static ushort cellSizePtr(MemPage page, byte[] cell, uint offset_) // For C# { var info = new CellInfo(); info.Cell = C._alloc(cell.Length); Buffer.BlockCopy(cell, (int)offset_, info.Cell, 0, (int)(cell.Length - offset_)); btreeParseCellPtr(page, info.Cell, ref info); return info.Size; }
// was:btreeParseCellPtr private void btreeParseCellPtr(int cellID, ref CellInfo info) { btreeParseCellPtr(Data, cellID, ref info); }
static void ptrmapPutOvflPtr(MemPage page, byte[] cell, ref RC rcRef) { if (rcRef != RC.OK) return; Debug.Assert(cell != null); var info = new CellInfo(); btreeParseCellPtr(page, cell, ref info); Debug.Assert((info.Data + (page.IntKey ? 0 : info.Key)) == info.Payload); if (info.Overflow != 0) { Pid ovfl = ConvertEx.Get4(cell, info.Overflow); ptrmapPut(page.Bt, ovfl, PTRMAP.OVERFLOW1, page.ID, ref rcRef); } }
internal static RC balance_nonroot(MemPage pParent, int iParentIdx, byte[] aOvflSpace, int isRoot) { var apOld = new MemPage[NB]; // pPage and up to two siblings var apCopy = new MemPage[NB]; // Private copies of apOld[] pages var apNew = new MemPage[NB + 2]; // pPage and up to NB siblings after balancing var apDiv = new int[NB - 1]; // Divider cells in pParent var cntNew = new int[NB + 2]; // Index in aCell[] of cell after i-th page var szNew = new int[NB + 2]; // Combined size of cells place on i-th page var szCell = new ushort[1]; // Local size of all cells in apCell[] BtShared pBt; // The whole database int nCell = 0; // Number of cells in apCell[] int nMaxCells = 0; // Allocated size of apCell, szCell, aFrom. int nNew = 0; // Number of pages in apNew[] ushort leafCorrection; // 4 if pPage is a leaf. 0 if not int leafData; // True if pPage is a leaf of a LEAFDATA tree int usableSpace; // Bytes in pPage beyond the header int pageFlags; // Value of pPage.aData[0] int subtotal; // Subtotal of bytes in cells on one page int iOvflSpace = 0; // First unused byte of aOvflSpace[] //int szScratch; // Size of scratch memory requested byte[][] apCell = null; // All cells begin balanced // pBt = pParent.Shared; Debug.Assert(MutexEx.Held(pBt.Mutex)); Debug.Assert(Pager.IsPageWriteable(pParent.DbPage)); #if false Btree.TRACE("BALANCE: begin page %d child of %d\n", pPage.pgno, pParent.pgno); #endif // At this point pParent may have at most one overflow cell. And if this overflow cell is present, it must be the cell with // index iParentIdx. This scenario comes about when this function is called (indirectly) from sqlite3BtreeDelete(). Debug.Assert(pParent.NOverflows == 0 || pParent.NOverflows == 1); Debug.Assert(pParent.NOverflows == 0 || pParent.Overflows[0].Index == iParentIdx); // Find the sibling pages to balance. Also locate the cells in pParent that divide the siblings. An attempt is made to find NN siblings on // either side of pPage. More siblings are taken from one side, however, if there are fewer than NN siblings on the other side. If pParent // has NB or fewer children then all children of pParent are taken. // This loop also drops the divider cells from the parent page. This way, the remainder of the function does not have to deal with any // overflow cells in the parent page, since if any existed they will have already been removed. int nOld; // Number of pages in apOld[] int nxDiv; // Next divider slot in pParent.aCell[] var i = pParent.NOverflows + pParent.Cells; if (i < 2) { nxDiv = 0; nOld = i + 1; } else { nOld = 3; if (iParentIdx == 0) { nxDiv = 0; } else if (iParentIdx == i) { nxDiv = i - 2; } else { nxDiv = iParentIdx - 1; } i = 2; } var pRight = ((i + nxDiv - pParent.NOverflows) == pParent.Cells ? pParent.HeaderOffset + 8 : pParent.FindCell(i + nxDiv - pParent.NOverflows)); // Location in parent of right-sibling pointer var pgno = (Pgno)ConvertEx.Get4(pParent.Data, pRight); var rc = RC.OK; while (true) { rc = pBt.getAndInitPage(pgno, ref apOld[i]); if (rc != RC.OK) { goto balance_cleanup; } nMaxCells += 1 + apOld[i].Cells + apOld[i].NOverflows; if (i-- == 0) { break; } if (i + nxDiv == pParent.Overflows[0].Index && pParent.NOverflows != 0) { apDiv[i] = 0; pgno = ConvertEx.Get4(pParent.Overflows[0].Cell, apDiv[i]); szNew[i] = pParent.cellSizePtr(apDiv[i]); pParent.NOverflows = 0; } else { apDiv[i] = pParent.FindCell(i + nxDiv - pParent.NOverflows); pgno = ConvertEx.Get4(pParent.Data, apDiv[i]); szNew[i] = pParent.cellSizePtr(apDiv[i]); // Drop the cell from the parent page. apDiv[i] still points to the cell within the parent, even though it has been dropped. // This is safe because dropping a cell only overwrites the first four bytes of it, and this function does not need the first // four bytes of the divider cell. So the pointer is safe to use later on. // // Unless SQLite is compiled in secure-delete mode. In this case, the dropCell() routine will overwrite the entire cell with zeroes. // In this case, temporarily copy the cell into the aOvflSpace[] buffer. It will be copied out again as soon as the aSpace[] buffer // is allocated. //if (pBt.secureDelete) //{ // int iOff = (int)(apDiv[i]) - (int)(pParent.aData); //SQLITE_PTR_TO_INT(apDiv[i]) - SQLITE_PTR_TO_INT(pParent.aData); // if( (iOff+szNew[i])>(int)pBt->usableSize ) // { // rc = SQLITE_CORRUPT_BKPT(); // Array.Clear(apOld[0].aData,0,apOld[0].aData.Length); //memset(apOld, 0, (i + 1) * sizeof(MemPage*)); // goto balance_cleanup; // } // else // { // memcpy(&aOvflSpace[iOff], apDiv[i], szNew[i]); // apDiv[i] = &aOvflSpace[apDiv[i] - pParent.aData]; // } //} pParent.dropCell(i + nxDiv - pParent.NOverflows, szNew[i], ref rc); } } // Make nMaxCells a multiple of 4 in order to preserve 8-byte alignment nMaxCells = (nMaxCells + 3) & ~3; // Allocate space for memory structures apCell = MallocEx.sqlite3ScratchMalloc(apCell, nMaxCells); if (szCell.Length < nMaxCells) { Array.Resize(ref szCell, nMaxCells); } // Load pointers to all cells on sibling pages and the divider cells into the local apCell[] array. Make copies of the divider cells // into space obtained from aSpace1[] and remove the the divider Cells from pParent. // If the siblings are on leaf pages, then the child pointers of the divider cells are stripped from the cells before they are copied // into aSpace1[]. In this way, all cells in apCell[] are without child pointers. If siblings are not leaves, then all cell in // apCell[] include child pointers. Either way, all cells in apCell[] are alike. // leafCorrection: 4 if pPage is a leaf. 0 if pPage is not a leaf. // leafData: 1 if pPage holds key+data and pParent holds only keys. leafCorrection = (ushort)(apOld[0].Leaf * 4); leafData = apOld[0].HasData; int j; for (i = 0; i < nOld; i++) { // Before doing anything else, take a copy of the i'th original sibling The rest of this function will use data from the copies rather // that the original pages since the original pages will be in the process of being overwritten. var pOld = apCopy[i] = apOld[i].Clone(); var limit = pOld.Cells + pOld.NOverflows; if (pOld.NOverflows > 0 || true) { for (j = 0; j < limit; j++) { Debug.Assert(nCell < nMaxCells); var iFOFC = pOld.FindOverflowCell(j); szCell[nCell] = pOld.cellSizePtr(iFOFC); // Copy the Data Locally if (apCell[nCell] == null) { apCell[nCell] = new byte[szCell[nCell]]; } else if (apCell[nCell].Length < szCell[nCell]) { Array.Resize(ref apCell[nCell], szCell[nCell]); } if (iFOFC < 0) // Overflow Cell { Buffer.BlockCopy(pOld.Overflows[-(iFOFC + 1)].Cell, 0, apCell[nCell], 0, szCell[nCell]); } else { Buffer.BlockCopy(pOld.Data, iFOFC, apCell[nCell], 0, szCell[nCell]); } nCell++; } } else { var aData = pOld.Data; var maskPage = pOld.MaskPage; var cellOffset = pOld.CellOffset; for (j = 0; j < limit; j++) { Debugger.Break(); Debug.Assert(nCell < nMaxCells); apCell[nCell] = FindCellv2(aData, maskPage, cellOffset, j); szCell[nCell] = pOld.cellSizePtr(apCell[nCell]); nCell++; } } if (i < nOld - 1 && 0 == leafData) { var sz = (ushort)szNew[i]; var pTemp = MallocEx.sqlite3Malloc(sz + leafCorrection); Debug.Assert(nCell < nMaxCells); szCell[nCell] = sz; Debug.Assert(sz <= pBt.MaxLocal + 23); Buffer.BlockCopy(pParent.Data, apDiv[i], pTemp, 0, sz); if (apCell[nCell] == null || apCell[nCell].Length < sz) { Array.Resize(ref apCell[nCell], sz); } Buffer.BlockCopy(pTemp, leafCorrection, apCell[nCell], 0, sz); Debug.Assert(leafCorrection == 0 || leafCorrection == 4); szCell[nCell] = (ushort)(szCell[nCell] - leafCorrection); if (0 == pOld.Leaf) { Debug.Assert(leafCorrection == 0); Debug.Assert(pOld.HeaderOffset == 0); // The right pointer of the child page pOld becomes the left pointer of the divider cell Buffer.BlockCopy(pOld.Data, 8, apCell[nCell], 0, 4);//memcpy( apCell[nCell], ref pOld.aData[8], 4 ); } else { Debug.Assert(leafCorrection == 4); if (szCell[nCell] < 4) { // Do not allow any cells smaller than 4 bytes. szCell[nCell] = 4; } } nCell++; } } // Figure out the number of pages needed to hold all nCell cells. Store this number in "k". Also compute szNew[] which is the total // size of all cells on the i-th page and cntNew[] which is the index in apCell[] of the cell that divides page i from page i+1. // cntNew[k] should equal nCell. // Values computed by this block: // k: The total number of sibling pages // szNew[i]: Spaced used on the i-th sibling page. // cntNew[i]: Index in apCell[] and szCell[] for the first cell to // the right of the i-th sibling page. // usableSpace: Number of bytes of space available on each sibling. usableSpace = (int)pBt.UsableSize - 12 + leafCorrection; int k; for (subtotal = k = i = 0; i < nCell; i++) { Debug.Assert(i < nMaxCells); subtotal += szCell[i] + 2; if (subtotal > usableSpace) { szNew[k] = subtotal - szCell[i]; cntNew[k] = i; if (leafData != 0) { i--; } subtotal = 0; k++; if (k > NB + 1) { rc = SysEx.SQLITE_CORRUPT_BKPT(); goto balance_cleanup; } } } szNew[k] = subtotal; cntNew[k] = nCell; k++; // The packing computed by the previous block is biased toward the siblings on the left side. The left siblings are always nearly full, while the // right-most sibling might be nearly empty. This block of code attempts to adjust the packing of siblings to get a better balance. // // This adjustment is more than an optimization. The packing above might be so out of balance as to be illegal. For example, the right-most // sibling might be completely empty. This adjustment is not optional. for (i = k - 1; i > 0; i--) { var szRight = szNew[i]; // Size of sibling on the right var szLeft = szNew[i - 1]; // Size of sibling on the left var r = cntNew[i - 1] - 1; // Index of right-most cell in left sibling var d = r + 1 - leafData; // Index of first cell to the left of right sibling Debug.Assert(d < nMaxCells); Debug.Assert(r < nMaxCells); while (szRight == 0 || szRight + szCell[d] + 2 <= szLeft - (szCell[r] + 2)) { szRight += szCell[d] + 2; szLeft -= szCell[r] + 2; cntNew[i - 1]--; r = cntNew[i - 1] - 1; d = r + 1 - leafData; } szNew[i] = szRight; szNew[i - 1] = szLeft; } // Either we found one or more cells (cntnew[0])>0) or pPage is a virtual root page. A virtual root page is when the real root // page is page 1 and we are the only child of that page. Debug.Assert(cntNew[0] > 0 || (pParent.ID == 1 && pParent.Cells == 0)); Btree.TRACE("BALANCE: old: %d %d %d ", apOld[0].ID, (nOld >= 2 ? apOld[1].ID : 0), (nOld >= 3 ? apOld[2].ID : 0)); // Allocate k new pages. Reuse old pages where possible. if (apOld[0].ID <= 1) { rc = SysEx.SQLITE_CORRUPT_BKPT(); goto balance_cleanup; } pageFlags = apOld[0].Data[0]; for (i = 0; i < k; i++) { var pNew = new MemPage(); if (i < nOld) { pNew = apNew[i] = apOld[i]; apOld[i] = null; rc = Pager.Write(pNew.DbPage); nNew++; if (rc != RC.OK) { goto balance_cleanup; } } else { Debug.Assert(i > 0); rc = pBt.allocateBtreePage(ref pNew, ref pgno, pgno, 0); if (rc != 0) { goto balance_cleanup; } apNew[i] = pNew; nNew++; // Set the pointer-map entry for the new sibling page. #if !SQLITE_OMIT_AUTOVACUUM if (pBt.AutoVacuum) #else if (false) #endif { pBt.ptrmapPut(pNew.ID, PTRMAP.BTREE, pParent.ID, ref rc); if (rc != RC.OK) { goto balance_cleanup; } } } } // Free any old pages that were not reused as new pages. while (i < nOld) { apOld[i].freePage(ref rc); if (rc != RC.OK) { goto balance_cleanup; } apOld[i].releasePage(); apOld[i] = null; i++; } // Put the new pages in accending order. This helps to keep entries in the disk file in order so that a scan // of the table is a linear scan through the file. That in turn helps the operating system to deliver pages // from the disk more rapidly. // An O(n^2) insertion sort algorithm is used, but since n is never more than NB (a small constant), that should // not be a problem. // When NB==3, this one optimization makes the database about 25% faster for large insertions and deletions. for (i = 0; i < k - 1; i++) { var minV = (int)apNew[i].ID; var minI = i; for (j = i + 1; j < k; j++) { if (apNew[j].ID < (uint)minV) { minI = j; minV = (int)apNew[j].ID; } } if (minI > i) { var pT = apNew[i]; apNew[i] = apNew[minI]; apNew[minI] = pT; } } Btree.TRACE("new: %d(%d) %d(%d) %d(%d) %d(%d) %d(%d)\n", apNew[0].ID, szNew[0], (nNew >= 2 ? apNew[1].ID : 0), (nNew >= 2 ? szNew[1] : 0), (nNew >= 3 ? apNew[2].ID : 0), (nNew >= 3 ? szNew[2] : 0), (nNew >= 4 ? apNew[3].ID : 0), (nNew >= 4 ? szNew[3] : 0), (nNew >= 5 ? apNew[4].ID : 0), (nNew >= 5 ? szNew[4] : 0)); Debug.Assert(Pager.IsPageWriteable(pParent.DbPage)); ConvertEx.Put4L(pParent.Data, pRight, apNew[nNew - 1].ID); // Evenly distribute the data in apCell[] across the new pages. Insert divider cells into pParent as necessary. j = 0; for (i = 0; i < nNew; i++) { // Assemble the new sibling page. MemPage pNew = apNew[i]; Debug.Assert(j < nMaxCells); pNew.zeroPage(pageFlags); pNew.assemblePage(cntNew[i] - j, apCell, szCell, j); Debug.Assert(pNew.Cells > 0 || (nNew == 1 && cntNew[0] == 0)); Debug.Assert(pNew.NOverflows == 0); j = cntNew[i]; // If the sibling page assembled above was not the right-most sibling, insert a divider cell into the parent page. Debug.Assert(i < nNew - 1 || j == nCell); if (j < nCell) { Debug.Assert(j < nMaxCells); var pCell = apCell[j]; var sz = szCell[j] + leafCorrection; var pTemp = MallocEx.sqlite3Malloc(sz); if (pNew.Leaf == 0) { Buffer.BlockCopy(pCell, 0, pNew.Data, 8, 4); } else if (leafData != 0) { // If the tree is a leaf-data tree, and the siblings are leaves, then there is no divider cell in apCell[]. Instead, the divider // cell consists of the integer key for the right-most cell of the sibling-page assembled above only. var info = new CellInfo(); j--; pNew.btreeParseCellPtr(apCell[j], ref info); pCell = pTemp; sz = 4 + ConvertEx.PutVarint9L(pCell, 4, (ulong)info.nKey); pTemp = null; } else { //------------ pCell -= 4; var _pCell_4 = MallocEx.sqlite3Malloc(pCell.Length + 4); Buffer.BlockCopy(pCell, 0, _pCell_4, 4, pCell.Length); pCell = _pCell_4; // Obscure case for non-leaf-data trees: If the cell at pCell was previously stored on a leaf node, and its reported size was 4 // bytes, then it may actually be smaller than this (see btreeParseCellPtr(), 4 bytes is the minimum size of // any cell). But it is important to pass the correct size to insertCell(), so reparse the cell now. // Note that this can never happen in an SQLite data file, as all cells are at least 4 bytes. It only happens in b-trees used // to evaluate "IN (SELECT ...)" and similar clauses. if (szCell[j] == 4) { Debug.Assert(leafCorrection == 4); sz = pParent.cellSizePtr(pCell); } } iOvflSpace += sz; Debug.Assert(sz <= pBt.MaxLocal + 23); Debug.Assert(iOvflSpace <= (int)pBt.PageSize); pParent.insertCell(nxDiv, pCell, sz, pTemp, pNew.ID, ref rc); if (rc != RC.OK) { goto balance_cleanup; } Debug.Assert(Pager.IsPageWriteable(pParent.DbPage)); j++; nxDiv++; } } Debug.Assert(j == nCell); Debug.Assert(nOld > 0); Debug.Assert(nNew > 0); if ((pageFlags & Btree.PTF_LEAF) == 0) { Buffer.BlockCopy(apCopy[nOld - 1].Data, 8, apNew[nNew - 1].Data, 8, 4); } if (isRoot != 0 && pParent.Cells == 0 && pParent.HeaderOffset <= apNew[0].FreeBytes) { // The root page of the b-tree now contains no cells. The only sibling page is the right-child of the parent. Copy the contents of the // child page into the parent, decreasing the overall height of the b-tree structure by one. This is described as the "balance-shallower" // sub-algorithm in some documentation. // If this is an auto-vacuum database, the call to copyNodeContent() sets all pointer-map entries corresponding to database image pages // for which the pointer is stored within the content being copied. // The second Debug.Assert below verifies that the child page is defragmented (it must be, as it was just reconstructed using assemblePage()). This // is important if the parent page happens to be page 1 of the database image. */ Debug.Assert(nNew == 1); Debug.Assert(apNew[0].FreeBytes == (ConvertEx.Get2(apNew[0].Data, 5) - apNew[0].CellOffset - apNew[0].Cells * 2)); copyNodeContent(apNew[0], pParent, ref rc); apNew[0].freePage(ref rc); } else #if !SQLITE_OMIT_AUTOVACUUM if (pBt.AutoVacuum) #else if (false) #endif { // Fix the pointer-map entries for all the cells that were shifted around. There are several different types of pointer-map entries that need to // be dealt with by this routine. Some of these have been set already, but many have not. The following is a summary: // 1) The entries associated with new sibling pages that were not siblings when this function was called. These have already // been set. We don't need to worry about old siblings that were moved to the free-list - the freePage() code has taken care // of those. // 2) The pointer-map entries associated with the first overflow page in any overflow chains used by new divider cells. These // have also already been taken care of by the insertCell() code. // 3) If the sibling pages are not leaves, then the child pages of cells stored on the sibling pages may need to be updated. // 4) If the sibling pages are not internal intkey nodes, then any overflow pages used by these cells may need to be updated // (internal intkey nodes never contain pointers to overflow pages). // 5) If the sibling pages are not leaves, then the pointer-map entries for the right-child pages of each sibling may need // to be updated. // Cases 1 and 2 are dealt with above by other code. The next block deals with cases 3 and 4 and the one after that, case 5. Since // setting a pointer map entry is a relatively expensive operation, this code only sets pointer map entries for child or overflow pages that have // actually moved between pages. var pNew = apNew[0]; var pOld = apCopy[0]; var nOverflow = pOld.NOverflows; var iNextOld = pOld.Cells + nOverflow; var iOverflow = (nOverflow != 0 ? pOld.Overflows[0].Index : -1); j = 0; // Current 'old' sibling page k = 0; // Current 'new' sibling page for (i = 0; i < nCell; i++) { var isDivider = 0; while (i == iNextOld) { // Cell i is the cell immediately following the last cell on old sibling page j. If the siblings are not leaf pages of an // intkey b-tree, then cell i was a divider cell. pOld = apCopy[++j]; iNextOld = i + (0 == leafData ? 1 : 0) + pOld.Cells + pOld.NOverflows; if (pOld.NOverflows != 0) { nOverflow = pOld.NOverflows; iOverflow = i + (0 == leafData ? 1 : 0) + pOld.Overflows[0].Index; } isDivider = 0 == leafData ? 1 : 0; } Debug.Assert(nOverflow > 0 || iOverflow < i); Debug.Assert(nOverflow < 2 || pOld.Overflows[0].Index == pOld.Overflows[1].Index - 1); Debug.Assert(nOverflow < 3 || pOld.Overflows[1].Index == pOld.Overflows[2].Index - 1); if (i == iOverflow) { isDivider = 1; if (--nOverflow > 0) { iOverflow++; } } if (i == cntNew[k]) { // Cell i is the cell immediately following the last cell on new sibling page k. If the siblings are not leaf pages of an // intkey b-tree, then cell i is a divider cell. pNew = apNew[++k]; if (leafData == 0) { continue; } } Debug.Assert(j < nOld); Debug.Assert(k < nNew); // If the cell was originally divider cell (and is not now) or an overflow cell, or if the cell was located on a different sibling // page before the balancing, then the pointer map entries associated with any child or overflow pages need to be updated. if (isDivider != 0 || pOld.ID != pNew.ID) { if (leafCorrection == 0) { pBt.ptrmapPut(ConvertEx.Get4(apCell[i]), PTRMAP.BTREE, pNew.ID, ref rc); } if (szCell[i] > pNew.MinLocal) { pNew.ptrmapPutOvflPtr(apCell[i], ref rc); } } } if (leafCorrection == 0) { for (i = 0; i < nNew; i++) { var key = ConvertEx.Get4(apNew[i].Data, 8); pBt.ptrmapPut(key, PTRMAP.BTREE, apNew[i].ID, ref rc); } } #if false // The ptrmapCheckPages() contains Debug.Assert() statements that verify that all pointer map pages are set correctly. This is helpful while // debugging. This is usually disabled because a corrupt database may cause an Debug.Assert() statement to fail. ptrmapCheckPages(apNew, nNew); ptrmapCheckPages(pParent, 1); #endif } Debug.Assert(pParent.HasInit); Btree.TRACE("BALANCE: finished: old=%d new=%d cells=%d\n", nOld, nNew, nCell); // Cleanup before returning. balance_cleanup: MallocEx.sqlite3ScratchFree(apCell); for (i = 0; i < nOld; i++) { apOld[i].releasePage(); } for (i = 0; i < nNew; i++) { apNew[i].releasePage(); } return(rc); }
protected override void GetDataExt(CellInfo[] aryCellData) { aryCellData[ExtInfoColumn].Text = GetFormattedRatingForReplies(); }
internal RC clearCell(int pCell) { Debug.Assert(MutexEx.Held(this.Shared.Mutex)); var info = new CellInfo(); btreeParseCellPtr(pCell, ref info); if (info.iOverflow == 0) { return(RC.OK); // No overflow pages. Return without doing anything } var pBt = this.Shared; var ovflPgno = (Pgno)ConvertEx.Get4(this.Data, pCell + info.iOverflow); Debug.Assert(pBt.UsableSize > 4); var ovflPageSize = (uint)(pBt.UsableSize - 4); var nOvfl = (int)((info.nPayload - info.nLocal + ovflPageSize - 1) / ovflPageSize); Debug.Assert(ovflPgno == 0 || nOvfl > 0); RC rc; while (nOvfl-- != 0) { Pgno iNext = 0; MemPage pOvfl = null; if (ovflPgno < 2 || ovflPgno > pBt.btreePagecount()) { // 0 is not a legal page number and page 1 cannot be an overflow page. Therefore if ovflPgno<2 or past the end of the file the database must be corrupt. return(SysEx.SQLITE_CORRUPT_BKPT()); } if (nOvfl != 0) { rc = pBt.getOverflowPage(ovflPgno, out pOvfl, out iNext); if (rc != RC.OK) { return(rc); } } if ((pOvfl != null || ((pOvfl = pBt.btreePageLookup(ovflPgno)) != null)) && Pager.GetPageRefCount(pOvfl.DbPage) != 1) { // There is no reason any cursor should have an outstanding reference to an overflow page belonging to a cell that is being deleted/updated. // So if there exists more than one reference to this page, then it must not really be an overflow page and the database must be corrupt. // It is helpful to detect this before calling freePage2(), as freePage2() may zero the page contents if secure-delete mode is // enabled. If this 'overflow' page happens to be a page that the caller is iterating through or using in some other way, this // can be problematic. rc = SysEx.SQLITE_CORRUPT_BKPT(); } else { rc = pBt.freePage2(pOvfl, ovflPgno); } if (pOvfl != null) { Pager.Unref(pOvfl.DbPage); } if (rc != RC.OK) { return(rc); } ovflPgno = iNext; } return(RC.OK); }
private static void SingleMove(bool single = false, bool isDeductStep = false) { bool change = false; CellModel.Instance.lineCells.Sort(); //fill int n = CellModel.Instance.lineCells.Count - 1; for (; n >= 0; n--) { CellInfo blankTopCell = CellModel.Instance.lineCells[n]; bool isCoverOpen = CoverModel.Instance.IsOpen(blankTopCell.posX, blankTopCell.posY); if (blankTopCell.isMonsterHold == false && blankTopCell.posY == 0 && isCoverOpen) { WallInfo topWall = WallModel.Instance.GetWallByPos(blankTopCell.posY, blankTopCell.posX, (int)CellDirType.up); if (topWall.CanPass()) { CellInfo addCell = CellModel.Instance.FillNewItem(blankTopCell, n, isDeductStep); CellModel.Instance.AddMoveAnim(addCell, false); } } } int i; for (i = 0; i < CellModel.Instance.lineCells.Count; i++) { CellInfo blankCell = CellModel.Instance.lineCells[i]; bool isCoverOpen = CoverModel.Instance.CanMoveIn(blankCell.posX, blankCell.posY); bool hasFall = false; if (blankCell.isMonsterHold == false && isCoverOpen) { if (blankCell.posY > 0 || (blankCell.posY == 0 && blankCell.IsHump() == false)) { //fall WallInfo topWall = WallModel.Instance.GetWallByPos(blankCell.posY, blankCell.posX, (int)CellDirType.up); CellInfo topCell = CellModel.Instance.GetCellByPos(blankCell.posX, blankCell.posY - 1); if (topWall.CanPass()) { if (topCell != null && topCell.CanMove()) { CellModel.Instance.SwitchPos(blankCell, topCell); blankCell.SwitchPos(topCell); CellModel.Instance.AddMoveAnim(topCell, false); hasFall = true; change = true; break; } } //Slid if (hasFall == false) { if (topCell == null && blankCell.posY != 0) { continue; } if (topCell != null && topCell.isBlank) { //isCoverOpen = CoverModel.Instance.IsOpen(topCell.posX, topCell.posY); if (topCell.isBlindBlank == false && topCell.isMonsterHold == false) { continue; } } CellInfo slidCell = FindSlider(blankCell); if (slidCell != null && slidCell.CanMove()) { int posn = (int)CellDirType.left_up; if (slidCell.posX > blankCell.posX) { posn = (int)CellDirType.right_up; } WallInfo slidWall = WallModel.Instance.GetWallByPos(blankCell.posY, blankCell.posX, posn); if (slidWall.CanPass()) { CellModel.Instance.SwitchPos(slidCell, blankCell); slidCell.SwitchPos(blankCell); CellModel.Instance.AddMoveAnim(slidCell, true); change = true; break; } } } } } } if (change && single == false) { SingleMove(false, isDeductStep); } }
internal void btreeParseCellPtr(byte[] cell, ref CellInfo info) { btreeParseCellPtr(cell, 0, ref info); }
public void MarkExplored(ulong otherCell, Base6Directions.Direction direction) { CellInfo info = new CellInfo(); if (m_cellInfos.TryGetValue(otherCell, out info)) { info.ExploredDirections |= Base6Directions.GetDirectionFlag(direction); m_cellInfos[otherCell] = info; } else { Debug.Assert(false, "Inconsistency: Cannot find cell info to mark explored information!"); } }
public override Locomotion.MoveDirection GetNextMove(BoardInfo boardInfo, CellInfo currentPos, CellInfo[] goals) { // si la Stack no está vacía, hacer siguiente movimient if (...) { // devuelve siguiente movimient (pop the Stack) } // calcular camino, devuelve resultado de A* var searchResult = Search(boardInfo, currentPos, goals);//lista de nodos //currentPlan lista de moviminetos // recorre searchResult and copia el camino a currentPlan while (searchResult.Parent != null) { currentPlan.Push(searchResult.ProducedBy);//ProducedBy = direccion en la que avanzo searchResult = searchResult.Parent; } // returns next move (pop Stack) if (currentPlan.Any()) { return(currentPlan.Pop()); } return(Locomotion.MoveDirection.None); //var val = Random.Range(0, 4); //if (val == 0) return Locomotion.MoveDirection.Up; //if (val == 1) return Locomotion.MoveDirection.Down; //if (val == 2) return Locomotion.MoveDirection.Left; //return Locomotion.MoveDirection.Right; }
public bool TryGetCell(ulong packedCoord, out CellInfo cellInfo) { return m_cellInfos.TryGetValue(packedCoord, out cellInfo); }
private void PlayPutAutoSkill() { rootAction = new OrderAction(); WaitActor waitActor = new WaitActor(200); rootAction.AddNode(waitActor); List <SkillEntityInfo> skillEntitys = SkillModel.Instance.GetNewSkillEntitys(); fightUI.ShowSkill(); if (lastTouchCell != null && skillEntitys.Count > 0) { ParallelAction parallelAction = new ParallelAction(); int holdIndex = 0; bool lastIsHump = lastTouchCell.IsHump(); for (int i = 0; i < skillEntitys.Count; i++) { OrderAction skillOrder = new OrderAction(); SkillEntityInfo skillEntity = skillEntitys[i]; Vector2 addPos = new Vector2(); for (int h = holdIndex; h < 19; h++) { Vector2 holePos = FightConst.GetHoleByLevel(h, lastIsHump); Vector2 checkPos = new Vector2(lastTouchCell.posX + holePos.x, lastTouchCell.posY - holePos.y); CellInfo checkCell = CellModel.Instance.GetCellByPos((int)checkPos.x, (int)checkPos.y); if (checkCell != null && checkCell.isBlank) { addPos = checkPos; holdIndex = h + 1; break; } } CellInfo addItem = CellModel.Instance.AddItem(skillEntity.seed.config_cell_item.id, (int)addPos.x, (int)addPos.y); SkillModel.Instance.ThrowSkillEntity(skillEntity, addItem); GameObject itemObj = CreateCellItem(addItem); itemObj.transform.SetParent(effectLayer.transform, false); Vector2 toPos = PosUtil.GetFightCellPos(addItem.posX, addItem.posY); PosUtil.SetCellPos(itemObj.transform, skillEntity.seed.seed_x, skillEntity.seed.seed_y, 1.0f); rootAction.AddNode(new PlaySoundActor("Useskill")); rootAction.AddNode(new ShowEffectActor(itemObj.transform, "effect_skill_fly")); rootAction.AddNode(new MoveActor((RectTransform)itemObj.transform, new Vector3(toPos.x, toPos.y, 0), 1200)); skillOrder.AddNode(new SetLayerActor(itemObj.transform, transform)); skillOrder.AddNode(new PlaySoundActor("PutAutoSkill")); skillOrder.AddNode(new ClearEffectActor(itemObj.transform, "effect_skill_fly")); skillOrder.AddNode(new ScaleActor((RectTransform)itemObj.transform, new Vector3(1.2f, 1.2f, 0), 0.2f)); skillOrder.AddNode(new ScaleActor((RectTransform)itemObj.transform, new Vector3(1, 1, 0), 0.1f)); parallelAction.AddNode(skillOrder); } rootAction.AddNode(parallelAction); } waitActor = new WaitActor(200); rootAction.AddNode(waitActor); ExecuteAction(FightStadus.auto_skill); }
public void CloseAndCacheCell(ref ClosedCellInfo output) { Debug.Assert(m_cellOpen, "Closing a cell in TriangleComponentMapping, but no cell is open!"); CellInfo info = new CellInfo(); bool reallocateIndices = true; if (m_cellInfos.TryGetValue(m_cellCoord, out info)) { output.NewCell = false; output.OldComponentNum = info.ComponentNum; output.OldStartingIndex = info.StartingIndex; if (info.ComponentNum == m_componentNum) { reallocateIndices = false; info.ComponentNum = output.OldComponentNum; info.StartingIndex = output.OldStartingIndex; } } else { output.NewCell = true; } if (reallocateIndices) { info.ComponentNum = m_componentNum; info.StartingIndex = AllocateComponentStartingIndex(m_componentNum); // Remove containing cell information from the old cell indices if (output.NewCell == false) { DeallocateComponentStartingIndex(output.OldStartingIndex, output.OldComponentNum); for (int i = 0; i < output.OldComponentNum; i++) { m_componentCells.Remove(output.OldStartingIndex + i); } } // Save information about containing cell to the newly allocated cell indices for (int i = 0; i < info.ComponentNum; ++i) { m_componentCells[info.StartingIndex + i] = m_cellCoord; } } m_cellInfos[m_cellCoord] = info; output.ComponentNum = info.ComponentNum; output.ExploredDirections = info.ExploredDirections; output.StartingIndex = info.StartingIndex; m_components = null; m_componentNum = 0; m_cellOpen = false; }
private void UnlockMonster(bool isJet = false) { List <int> unLockIds = MonsterModel.Instance.UnLock(isJet); MonsterModel.Instance.BackUpUnLockMonster(unLockIds); rootAction = new OrderAction(); for (int j = 0; j < unLockIds.Count; j++) { int monsterRunId = unLockIds[j]; FightMonsterItem monsterItem = monsterLayer.GetItemByRunId(monsterRunId); rootAction.AddNode(new SetLayerActor(monsterItem.transform, effectLayer.transform)); CellInfo monsterCell = new CellInfo(); monsterCell.posX = monsterItem.monsterInfo.posX; monsterCell.posY = monsterItem.monsterInfo.posY; if (isJet) { if (monsterItem.monsterInfo.IsNull()) { CellDirType dirType = WallModel.Instance.GetGapWallDir(monsterCell); int zrotate = PosUtil.GetRotateByDir(dirType); rootAction.AddNode(new RoatateActor((RectTransform)monsterItem.transform, new Vector3(0, 0, zrotate), 0.25f)); } else { rootAction.AddNode(new ScaleActor((RectTransform)monsterItem.transform, new Vector3(1.15f, 1.15f, 1f), 0.2f)); } } List <CellInfo> releaseList = MonsterModel.Instance.ReleaseList(monsterRunId); if (releaseList.Count > 0) { ParallelAction paralle = new ParallelAction(); for (int i = 0; i < releaseList.Count; i++) { CellInfo cellInfo = releaseList[i]; FightCellItem item = GetItemByRunId(cellInfo.runId); if (item == null) { GameObject itemObj = CreateCellItem(cellInfo); item = itemObj.GetComponent <FightCellItem>(); } OrderAction order = new OrderAction(); order.AddNode(new PlaySoundActor("Refresh")); order.AddNode(new ShowEffectLineActor(effectLayer, cellInfo, monsterCell, monsterItem.monsterInfo.releaseId)); order.AddNode(new ScaleActor((RectTransform)item.transform, new Vector3(0, 0, 0), 0.1f)); order.AddNode(new ChangeCellActor(item, cellInfo, monsterItem.monsterInfo.releaseId)); paralle.AddNode(order); } rootAction.AddNode(paralle); } if (monsterItem.monsterInfo.IsNull()) { rootAction.AddNode(new ScaleActor((RectTransform)monsterItem.transform, new Vector3(1.25f, 1.25f, 0), 0.15f)); if (j == 0) { rootAction.AddNode(new ScaleActor((RectTransform)monsterItem.transform, new Vector3(0, 0, 0), 0.25f)); } else { rootAction.AddNode(new ScaleActor((RectTransform)monsterItem.transform, new Vector3(0, 0, 0), 0.15f)); } } else { rootAction.AddNode(new ScaleActor((RectTransform)monsterItem.transform, new Vector3(1, 1, 1), 0.05f)); CoverInfo coverInfo = CoverModel.Instance.GetCoverByPos(monsterItem.monsterInfo.posY, monsterItem.monsterInfo.posX); FightCoverItem coverItem = coverLayer.GetItemByRunId(coverInfo.runId); rootAction.AddNode(new ChangeCoverActor(coverLayer, coverItem, coverInfo)); rootAction.AddNode(new ProgressMonsterActor(monsterItem, monsterItem.monsterInfo.progress)); rootAction.AddNode(new SetLayerActor(monsterItem.transform, monsterLayer.transform)); } } if (isJet) { ExecuteAction(FightStadus.jet_monster); } else { ParallelAction paralleTimer = new ParallelAction(); List <CellInfo> timerCells = CellModel.Instance.Timing(); for (int i = 0; i < timerCells.Count; i++) { CellInfo cellInfo = timerCells[i]; FightCellItem item = GetItemByRunId(cellInfo.runId); if (item != null) { OrderAction order = new OrderAction(); order.AddNode(new PlaySoundActor("Refresh")); order.AddNode(new ChangeCellActor(item, cellInfo)); if (cellInfo.isBlank) { order.AddNode(new ScaleActor((RectTransform)item.transform, new Vector3(0, 0, 0), 0.2f)); order.AddNode(new DestroyActor(item.gameObject)); } paralleTimer.AddNode(order); } } List <CoverInfo> timerCovers = CoverModel.Instance.Timing(); for (int i = 0; i < timerCovers.Count; i++) { CoverInfo coverInfo = timerCovers[i]; FightCoverItem item = coverLayer.GetItemByRunId(coverInfo.runId); OrderAction order = new OrderAction(); order.AddNode(new PlaySoundActor("Refresh")); order.AddNode(new ChangeCoverActor(coverLayer, item, coverInfo)); if (coverInfo.IsNull()) { order.AddNode(new ScaleActor((RectTransform)item.transform, new Vector3(0, 0, 0), 0.2f)); order.AddNode(new DestroyActor(item.gameObject)); List <CoverInfo> covers = CoverModel.Instance.GetNeighbors(coverInfo); for (int n = 0; n < covers.Count; n++) { CoverInfo cover = covers[n]; if (cover != null) { item = coverLayer.GetItemByRunId(cover.runId); order.AddNode(new ChangeCoverActor(coverLayer, item, cover)); if (cover.config != null) { coverFlowInterrupt = true; } } } } paralleTimer.AddNode(order); } rootAction.AddNode(paralleTimer); if (coverFlowInterrupt) { rootAction.AddNode(new FuncActor(coverLayer.ShowList)); //todo 爆后流动导致多出蜘蛛网 } ExecuteAction(FightStadus.unlock_monster); } }
CellLayer<CellInfo> InitCellInfo() { CellLayer<CellInfo> result = null; var mapSize = new Size(self.World.Map.MapSize.X, self.World.Map.MapSize.Y); // HACK: Uses a static cache so that double-ended searches (which have two PathSearch instances) // can implicitly share data. The PathFinder should allocate the CellInfo array and pass it // explicitly to the things that need to share it. while (CellInfoPool.Count > 0) { var cellInfo = GetFromPool(); if (cellInfo.Size != mapSize || cellInfo.Shape != self.World.Map.TileShape) { Log.Write("debug", "Discarding old pooled CellInfo of wrong size."); continue; } result = cellInfo; break; } if (result == null) result = new CellLayer<CellInfo>(self.World.Map); foreach (var cell in self.World.Map.Cells) result[cell] = new CellInfo(int.MaxValue, cell, false); return result; }
private void Crawl() { rootAction = new OrderAction(); if (!isDeductStep) { MonsterModel.Instance.Crawl(); List <MonsterCrawlInfo> crawAnims = MonsterModel.Instance.crawAnims; rootAction = new OrderAction(); ParallelAction paralle = new ParallelAction(); for (int i = 0; i < crawAnims.Count; i++) { MonsterCrawlInfo crawAnim = crawAnims[i]; OrderAction orderAction = new OrderAction(); paralle.AddNode(orderAction); FightMonsterItem monsterItem = monsterLayer.GetItemByRunId(crawAnim.monster.runId); for (int j = 0; j < crawAnim.pathCells.Count; j++) { CellInfo pathCell = crawAnim.pathCells[j]; Vector2 toPos = PosUtil.GetFightCellPos(pathCell.posX, pathCell.posY); float zrotate = 0; if (j > 0) { Vector2 fromPos = PosUtil.GetFightCellPos(crawAnim.pathCells[j - 1].posX, crawAnim.pathCells[j - 1].posY); zrotate = PosUtil.VectorAngle(new Vector2(fromPos.x, fromPos.y), new Vector2(toPos.x, toPos.y)); } else { Vector2 anchoredPos = ((RectTransform)monsterItem.transform).anchoredPosition; zrotate = PosUtil.VectorAngle(new Vector2(anchoredPos.x, anchoredPos.y), new Vector2(toPos.x, toPos.y)); } orderAction.AddNode(new RotationActor((RectTransform)monsterItem.transform, zrotate)); float speed = 600; orderAction.AddNode(new MoveActor((RectTransform)monsterItem.transform, new Vector3(toPos.x, toPos.y, 0), speed)); if (pathCell.isBlank == false) { FightCellItem cellItem = GetItemByRunId(pathCell.runId); pathCell.SetConfig((int)crawAnim.monster.releaseList[0].id); pathCell.changer = 0; orderAction.AddNode(new ChangeCellActor(cellItem, pathCell)); } } if (crawAnim.roadEnd) { orderAction.AddNode(new ScaleActor((RectTransform)monsterItem.transform, new Vector3(0, 0, 0), 0.3f)); orderAction.AddNode(new ChangeMonsterActor(monsterItem, crawAnim.monster)); } } rootAction.AddNode(paralle); } ExecuteAction(FightStadus.crawl); }
private void ExportSheet(ISheet sheet, StreamWriter sw) { int cellCount = sheet.GetRow(3).LastCellNum; CellInfo[] cellInfos = new CellInfo[cellCount]; for (int i = 2; i < cellCount; i++) { string fieldDesc = GetCellString(sheet, 2, i); string fieldName = GetCellString(sheet, 3, i); string fieldType = GetCellString(sheet, 4, i); cellInfos[i] = new CellInfo() { Name = fieldName, Type = fieldType, Desc = fieldDesc }; } for (int i = 5; i <= sheet.LastRowNum; ++i) { if (GetCellString(sheet, i, 2) == "") { continue; } StringBuilder sb = new StringBuilder(); sb.Append("{"); IRow row = sheet.GetRow(i); for (int j = 2; j < cellCount; ++j) { string desc = cellInfos[j].Desc.ToLower(); if (desc.StartsWith("#")) { continue; } // s开头表示这个字段是服务端专用 if (desc.StartsWith("s") && this.isClient) { continue; } // c开头表示这个字段是客户端专用 if (desc.StartsWith("c") && !this.isClient) { continue; } string fieldValue = GetCellString(row, j); if (fieldValue == "") { throw new Exception($"sheet: {sheet.SheetName} 中有空白字段 {i},{j}"); } if (j > 2) { sb.Append(","); } string fieldName = cellInfos[j].Name; if (fieldName == "Id" || fieldName == "_id") { if (this.isClient) { fieldName = "Id"; } else { fieldName = "_id"; } } string fieldType = cellInfos[j].Type; sb.Append($"\"{fieldName}\":{Convert(fieldType, fieldValue)}"); } sb.Append("}"); sw.WriteLine(sb.ToString()); } }
private void Refresh(int waitmillisecond = 0, List <CellInfo> cells = null, FightStadus fightStadus = FightStadus.prop_refresh) { bool hasRefresh = false; rootAction = new OrderAction(); ParallelAction scale1 = new ParallelAction(); ParallelAction movos = new ParallelAction(); ParallelAction scale2 = new ParallelAction(); if (cells == null) { for (int i = 0; i < CellModel.Instance.allCells.Count; i++) { List <CellInfo> xCells = CellModel.Instance.allCells[i]; for (int j = 0; j < xCells.Count; j++) { CellInfo cellInfo = xCells[j]; CoverInfo coverInfo = CoverModel.Instance.GetCoverByPos(cellInfo.posY, cellInfo.posX); if (cellInfo.isBlank == false && cellInfo.config.cell_type == (int)CellType.five && coverInfo.IsNull()) { hasRefresh = true; FightCellItem item = GetItemByRunId(cellInfo.runId); if (fightStadus == FightStadus.changer) { item.transform.localRotation = Quaternion.identity; scale1.AddNode(new RoatateActor((RectTransform)item.transform, new Vector3(0, 0, 180), 0.2f)); } else { scale1.AddNode(new ScaleActor((RectTransform)item.transform, new Vector3(0, 0, 0), 0.3f)); } if (fightStadus == FightStadus.changer) { scale2.AddNode(new RoatateActor((RectTransform)item.transform, new Vector3(0, 0, 360), 0.2f)); } else { Vector2 toPos = PosUtil.GetFightCellPos(cellInfo.posX, cellInfo.posY); movos.AddNode(new MoveActor((RectTransform)item.transform, new Vector3(toPos.x, toPos.y, 0), 0, 0.1f)); scale2.AddNode(new ScaleActor((RectTransform)item.transform, new Vector3(1, 1, 1), 0.3f)); } } } } } else { for (int j = 0; j < cells.Count; j++) { CellInfo cellInfo = cells[j]; if (cellInfo.isBlank == false && cellInfo.config.cell_type == (int)CellType.five) { hasRefresh = true; FightCellItem item = GetItemByRunId(cellInfo.runId); if (fightStadus == FightStadus.changer) { item.transform.localRotation = Quaternion.identity; scale1.AddNode(new RoatateActor((RectTransform)item.transform, new Vector3(0, 0, 180), 0.2f)); } else { scale1.AddNode(new ScaleActor((RectTransform)item.transform, new Vector3(0, 0, 0), 0.3f)); } if (fightStadus == FightStadus.changer) { scale2.AddNode(new RoatateActor((RectTransform)item.transform, new Vector3(0, 0, 360), 0.2f)); } else { Vector2 toPos = PosUtil.GetFightCellPos(cellInfo.posX, cellInfo.posY); movos.AddNode(new MoveActor((RectTransform)item.transform, new Vector3(toPos.x, toPos.y, 0), 0, 0.1f)); scale2.AddNode(new ScaleActor((RectTransform)item.transform, new Vector3(1, 1, 1), 0.3f)); } } } } if (waitmillisecond > 0) { rootAction.AddNode(new WaitActor(waitmillisecond)); } if (hasRefresh) { rootAction.AddNode(new PlaySoundActor("Refresh")); } rootAction.AddNode(scale1); rootAction.AddNode(movos); rootAction.AddNode(scale2); ExecuteAction(fightStadus); }