//C++ TO C# CONVERTER WARNING: The original C++ declaration of the following method implementation was not found: public bool extract_ponder_from_tt(Position pos) { StateInfo st = new StateInfo(); bool ttHit; Debug.Assert(pv.size() == 1); if (!pv[0]) { return(false); } pos.do_move(pv[0], st, pos.gives_check(pv[0])); TTEntry tte = GlobalMembersTt.TT.probe(pos.key(), ref ttHit); if (ttHit) { Move m = tte.move(); // Local copy to be SMP safe if (new MoveList <GenType.LEGAL>(pos).contains(m)) { pv.push_back(m); } } pos.undo_move(pv[0]); return(pv.size() > 1); }
/** Constructor. Creates an empty transposition table with numEntries slots. */ public TranspositionTable(int log2Size) { int numEntries = (1 << log2Size); table = new TTEntry[numEntries]; for (int i = 0; i < numEntries; i++) { TTEntry ent = new TTEntry(); ent.key = 0; ent.depthSlot = 0; ent.type = TTEntry.T_EMPTY; table[i] = ent; } emptySlot = new TTEntry(); emptySlot.type = TTEntry.T_EMPTY; generation = 0; }
/// <summary> /// Return true if this object is more valuable than the other, false otherwise. /// </summary> /// <param name="other"></param> /// <param name="currGen"></param> /// <returns></returns> public bool betterThan(TTEntry other, int currGen) { if ((generation == currGen) != (other.generation == currGen)) { return(generation == currGen); // Old entries are less valuable } if ((type == T_EXACT) != (other.type == T_EXACT)) { return(type == T_EXACT); // Exact score more valuable than lower/upper bound } if (getDepth() != other.getDepth()) { return(getDepth() > other.getDepth()); // Larger depth is more valuable } return(false); // Otherwise, pretty much equally valuable }
/// <summary> /// Constructor. Creates an empty transposition table with numEntries slots. /// </summary> /// <param name="log2Size"></param> public TranspositionTable(int log2Size) { int numEntries = (1 << log2Size); table = new TTEntry[numEntries]; for (int i = 0; i < numEntries; i++) { TTEntry ent = new TTEntry(); ent.key = 0; ent.depthSlot = 0; ent.type = TTEntry.T_EMPTY; table[i] = ent; } emptySlot = new TTEntry(); emptySlot.type = TTEntry.T_EMPTY; generation = 0; }
/// <summary> /// Retrieve an entry from the hash table corresponding to "pos". /// </summary> /// <param name="key"></param> /// <returns></returns> public TTEntry probe(ulong key) { int idx0 = h0(key); TTEntry ent = table[idx0]; if (ent.key == key) { return(ent); } int idx1 = h1(key); ent = table[idx1]; if (ent.key == key) { return(ent); } return(emptySlot); }
public void FirstRecordOfAHashGetsReturned() { ulong hash = 0x12_34_56_78_9A_BC_DE_F1; TransposTable tt = new TransposTable(8); TTEntry orig = new TTEntry(hash, new Square(4, 5), 6, 7, TTFlag.Exact); tt.Write(orig); TTEntry returned; bool retVal = tt.TryGetValue(hash, out returned); Assert.AreEqual(true, retVal); Assert.AreEqual(orig.Hash, returned.Hash); Assert.AreEqual(orig.BestMove, returned.BestMove); Assert.AreEqual(orig.Depth, returned.Depth); Assert.AreEqual(orig.Score, returned.Score); Assert.AreEqual(orig.Flag, returned.Flag); }
/// <summary> /// Extract a list of PV moves, starting from "rootPos" and first move "m". /// </summary> /// <param name="rootPos"></param> /// <param name="m"></param> /// <returns></returns> public List <Move> extractPVMoves(Position rootPos, Move m) { Position pos = new Position(rootPos); m = new Move(m); List <Move> ret = new List <Move>(); UndoInfo ui = new UndoInfo(); List <ulong> hashHistory = new List <ulong>(); MoveGen moveGen = new MoveGen(); while (true) { ret.Add(m); pos.makeMove(m, ui); if (hashHistory.Contains(pos.zobristHash())) { break; } hashHistory.Add(pos.zobristHash()); TTEntry ent = probe(pos.historyHash()); if (ent.type == TTEntry.T_EMPTY) { break; } m = new Move(0, 0, 0); ent.getMove(m); MoveGen.MoveList moves = moveGen.pseudoLegalMoves(pos); MoveGen.RemoveIllegal(pos, moves); bool contains = false; for (int mi = 0; mi < moves.size; mi++) { if (moves.m[mi].equals(m)) { contains = true; break; } } if (!contains) { break; } } return(ret); }
/// <summary> /// Print hash table statistics. /// </summary> public void printStats() { int unused = 0; int thisGen = 0; List <int> depHist = new List <int>(); int maxDepth = 20; for (int i = 0; i < maxDepth; i++) { depHist.Add(0); } for (int i = 0; i < table.Length; i++) { TTEntry ent = table[i]; if (ent.type == TTEntry.T_EMPTY) { unused++; } else { if (ent.generation == generation) { thisGen++; } int dp = ent.getDepth(); if (dp < maxDepth) { depHist[dp]++; } } } SystemHelper.println("Hash stats: unused:" + unused.ToString() + " thisGen:" + thisGen.ToString()); for (int i = 0; i < maxDepth; i++) { SystemHelper.println(i.ToString("##") + " " + depHist[i].ToString()); } }
public void IndexCollision_HigherDepth_Remains() { ulong hash1 = 0x12_34_56_78_9A_BC_DE_F1; ulong hash2 = 0xFF_FF_FF_FF_9A_BC_DE_F1; TransposTable tt = new TransposTable(8); TTEntry orig = new TTEntry(hash1, new Square(4, 5), 6, 7, TTFlag.Exact); tt.Write(orig); TTEntry higherDepth = new TTEntry(hash2, new Square(4, 5), 60, 7, TTFlag.Exact); tt.Write(higherDepth); //should overwrite TTEntry returned; bool retVal = tt.TryGetValue(hash2, out returned); Assert.AreEqual(true, retVal); tt.Write(orig); //should not overwrite retVal = tt.TryGetValue(hash2, out returned); Assert.AreEqual(true, retVal); }
public byte type; // exact score, lower bound, upper bound #endregion Fields #region Methods /** Return true if this object is more valuable than the other, false otherwise. */ public bool betterThan(TTEntry other, int currGen) { if ((generation == currGen) != (other.generation == currGen)) { return generation == currGen; // Old entries are less valuable } if ((type == T_EXACT) != (other.type == T_EXACT)) { return type == T_EXACT; // Exact score more valuable than lower/upper bound } if (getDepth() != other.getDepth()) { return getDepth() > other.getDepth(); // Larger depth is more valuable } return false; // Otherwise, pretty much equally valuable }
// search<>() is the main search function for both PV and non-PV nodes and for // normal and SplitPoint nodes. When called just after a split point the search // is simpler because we have already probed the hash table, done a null move // search, and searched the first move before splitting, so we don't have to // repeat all this work again. We also don't need to store anything to the hash // table here: This is taken care of after we return from the split point. private static ValueT search(NodeType NT, bool SpNode, Position pos, StackArrayWrapper ss, ValueT alpha, ValueT beta, Depth depth, bool cutNode) { Utils.WriteToLog($"search(NT={(int) NT}, SpNode={(SpNode ? 1 : 0)}, pos={pos.key()}, ss, alpha={alpha}, beta={beta}, depth={(int) depth}, cutNode={(cutNode ? 1 : 0)})"); var RootNode = NT == NodeType.Root; var PvNode = RootNode || NT == NodeType.PV; Debug.Assert(-Value.VALUE_INFINITE <= alpha && alpha < beta && beta <= Value.VALUE_INFINITE); Debug.Assert(PvNode || (alpha == beta - 1)); Debug.Assert(depth > Depth.DEPTH_ZERO); var st = new StateInfo(); TTEntry tte; SplitPoint splitPoint = null; ulong posKey = 0; MoveT ttMove, move, excludedMove, bestMove; ValueT bestValue, value, ttValue, eval; bool ttHit; int moveCount = 0; int quietCount = 0; var stack = ss[ss.current]; var stackPlus1 = ss[ss.current + 1]; var stackPlus2 = ss[ss.current + 2]; var stackMinus1 = ss[ss.current - 1]; var stackMinus2 = ss[ss.current - 2]; // Step 1. Initialize node var thisThread = pos.this_thread(); bool inCheck = pos.checkers() != 0; if (SpNode) { splitPoint = stack.splitPoint; bestMove = Move.Create(splitPoint.bestMove); bestValue = Value.Create(splitPoint.bestValue); tte = new TTEntry(); ttMove = excludedMove = Move.MOVE_NONE; ttValue = Value.VALUE_NONE; Debug.Assert(splitPoint.bestValue > -Value.VALUE_INFINITE && splitPoint.moveCount > 0); goto moves_loop; } moveCount = quietCount = stack.moveCount = 0; bestValue = -Value.VALUE_INFINITE; stack.ply = stackMinus1.ply + 1; // Used to send selDepth info to GUI if (PvNode && thisThread.maxPly < stack.ply) thisThread.maxPly = stack.ply; if (!RootNode) { // Step 2. Check for aborted search and immediate draw if (Signals.stop || pos.is_draw() || stack.ply >= _.MAX_PLY) return stack.ply >= _.MAX_PLY && !inCheck ? Eval.evaluate(false, pos) : DrawValue[pos.side_to_move()]; // Step 3. Mate distance pruning. Even if we mate at the next move our score // would be at best mate_in(ss.ply+1), but if alpha is already bigger because // a shorter mate was found upward in the tree then there is no need to search // because we will never beat the current alpha. Same logic but with reversed // signs applies also in the opposite condition of being mated instead of giving // mate. In this case return a fail-high score. alpha = Value.Create(Math.Max(Value.mated_in(stack.ply), alpha)); beta = Value.Create(Math.Min(Value.mate_in(stack.ply + 1), beta)); if (alpha >= beta) return alpha; } Debug.Assert(0 <= stack.ply && stack.ply < _.MAX_PLY); stack.currentMove = stack.ttMove = stackPlus1.excludedMove = bestMove = Move.MOVE_NONE; stackPlus1.skipEarlyPruning = false; stackPlus1.reduction = Depth.DEPTH_ZERO; stackPlus2.killers0 = stackPlus2.killers1 = Move.MOVE_NONE; // Step 4. Transposition table lookup // We don't want the score of a partial search to overwrite a previous full search // TT value, so we use a different position key in case of an excluded move. excludedMove = stack.excludedMove; posKey = excludedMove != 0 ? pos.exclusion_key() : pos.key(); tte = TranspositionTable.probe(posKey, out ttHit); stack.ttMove = ttMove = RootNode ? RootMoves[(int) PVIdx].pv[0] : ttHit ? tte.move() : Move.MOVE_NONE; ttValue = ttHit ? value_from_tt(tte.value(), stack.ply) : Value.VALUE_NONE; // At non-PV nodes we check for a fail high/low. We don't prune at PV nodes if (!PvNode && ttHit && tte.depth() >= depth && ttValue != Value.VALUE_NONE // Only in case of TT access race && (ttValue >= beta ? (tte.bound() & Bound.BOUND_LOWER) != 0 : (tte.bound() & Bound.BOUND_UPPER) != 0)) { stack.currentMove = ttMove; // Can be Move.MOVE_NONE // If ttMove is quiet, update killers, history, counter move on TT hit if (ttValue >= beta && ttMove != 0 && !pos.capture_or_promotion(ttMove)) update_stats(pos, ss, ttMove, depth, null, 0); return ttValue; } // Step 4a. Tablebase probe if (!RootNode && Tablebases.Cardinality != 0) { var piecesCnt = pos.count(PieceType.ALL_PIECES, Color.WHITE) + pos.count(PieceType.ALL_PIECES, Color.BLACK); if (piecesCnt <= Tablebases.Cardinality && (piecesCnt < Tablebases.Cardinality || depth >= Tablebases.ProbeDepth) && pos.rule50_count() == 0) { var found = 0; var v = Tablebases.probe_wdl(pos, ref found); if (found != 0) { Tablebases.Hits++; var drawScore = Tablebases.UseRule50 ? 1 : 0; value = v < -drawScore ? -Value.VALUE_MATE + _.MAX_PLY + stack.ply : v > drawScore ? Value.VALUE_MATE - _.MAX_PLY - stack.ply : Value.VALUE_DRAW + 2*v*drawScore; tte.save(posKey, value_to_tt(value, stack.ply), Bound.BOUND_EXACT, new Depth(Math.Min(_.MAX_PLY - Depth.ONE_PLY_C, (int)depth + 6*Depth.ONE_PLY_C)), Move.MOVE_NONE, Value.VALUE_NONE, TranspositionTable.generation()); return value; } } } // Step 5. Evaluate the position statically if (inCheck) { stack.staticEval = Value.VALUE_NONE; goto moves_loop; } if (ttHit) { // Never assume anything on values stored in TT if ((stack.staticEval = eval = tte.eval()) == Value.VALUE_NONE) eval = stack.staticEval = Eval.evaluate(false, pos); // Can ttValue be used as a better position evaluation? if (ttValue != Value.VALUE_NONE) if ((tte.bound() & (ttValue > eval ? Bound.BOUND_LOWER : Bound.BOUND_UPPER)) != 0) eval = ttValue; } else { eval = stack.staticEval = stackMinus1.currentMove != Move.MOVE_NULL ? Eval.evaluate(false, pos) : -stackMinus1.staticEval + 2*Eval.Tempo; tte.save(posKey, Value.VALUE_NONE, Bound.BOUND_NONE, Depth.DEPTH_NONE, Move.MOVE_NONE, stack.staticEval, TranspositionTable.generation()); } if (stack.skipEarlyPruning) goto moves_loop; // Step 6. Razoring (skipped when in check) if (!PvNode && depth < 4*Depth.ONE_PLY && eval + razor_margin(depth) <= alpha && ttMove == Move.MOVE_NONE) { if (depth <= Depth.ONE_PLY_C && eval + razor_margin(3*Depth.ONE_PLY) <= alpha) return qsearch(NodeType.NonPV, false, pos, ss, alpha, beta, Depth.DEPTH_ZERO); var ralpha = alpha - razor_margin(depth); var v = qsearch(NodeType.NonPV, false, pos, ss, ralpha, ralpha + 1, Depth.DEPTH_ZERO); if (v <= ralpha) return v; } // Step 7. Futility pruning: child node (skipped when in check) if (!RootNode && depth < 7*Depth.ONE_PLY && eval - futility_margin(depth) >= beta && eval < Value.VALUE_KNOWN_WIN // Do not return unproven wins && pos.non_pawn_material(pos.side_to_move())!=0) return eval - futility_margin(depth); // Step 8. Null move search with verification search (is omitted in PV nodes) if (!PvNode && depth >= 2*Depth.ONE_PLY_C && eval >= beta && pos.non_pawn_material(pos.side_to_move())!=0) { stack.currentMove = Move.MOVE_NULL; Debug.Assert(eval - beta >= 0); // Null move dynamic reduction based on depth and value var R = ((823 + 67*depth)/256 + Math.Min((eval - beta)/Value.PawnValueMg, 3))*(int) Depth.ONE_PLY; pos.do_null_move(st); stackPlus1.skipEarlyPruning = true; var nullValue = depth - R < Depth.ONE_PLY ? -qsearch(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -beta, -beta + 1, Depth.DEPTH_ZERO) : -search(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -beta, -beta + 1, depth - R, !cutNode); stackPlus1.skipEarlyPruning = false; pos.undo_null_move(); if (nullValue >= beta) { // Do not return unproven mate scores if (nullValue >= Value.VALUE_MATE_IN_MAX_PLY) nullValue = beta; if (depth < 12*Depth.ONE_PLY && Math.Abs(beta) < Value.VALUE_KNOWN_WIN) return nullValue; // Do verification search at high depths stack.skipEarlyPruning = true; var v = depth - R < Depth.ONE_PLY ? qsearch(NodeType.NonPV, false, pos, ss, beta - 1, beta, Depth.DEPTH_ZERO) : search(NodeType.NonPV, false, pos, ss, beta - 1, beta, depth - R, false); stack.skipEarlyPruning = false; if (v >= beta) return nullValue; } } // Step 9. ProbCut (skipped when in check) // If we have a very good capture (i.e. SEE > seeValues[captured_piece_type]) // and a reduced search returns a value much above beta, we can (almost) safely // prune the previous move. if (!PvNode && depth >= 5*Depth.ONE_PLY_C && Math.Abs(beta) < Value.VALUE_MATE_IN_MAX_PLY) { var rbeta = Value.Create(Math.Min(beta + 200, Value.VALUE_INFINITE)); var rdepth = depth - 4*Depth.ONE_PLY; Debug.Assert(rdepth >= Depth.ONE_PLY_C); Debug.Assert(stackMinus1.currentMove != Move.MOVE_NONE); Debug.Assert(stackMinus1.currentMove != Move.MOVE_NULL); var mp2 = new MovePicker(pos, ttMove, History, CounterMovesHistory, Value.PieceValue[(int) Phase.MG][pos.captured_piece_type()]); var ci2 = new CheckInfo(pos); while ((move = mp2.next_move(false)) != Move.MOVE_NONE) if (pos.legal(move, ci2.pinned)) { stack.currentMove = move; pos.do_move(move, st, pos.gives_check(move, ci2)); value = -search(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -rbeta, -rbeta + 1, rdepth, !cutNode); pos.undo_move(move); if (value >= rbeta) return value; } } // Step 10. Internal iterative deepening (skipped when in check) if (depth >= (PvNode ? 5*Depth.ONE_PLY_C : 8*Depth.ONE_PLY_C) && ttMove == 0 && (PvNode || stack.staticEval + 256 >= beta)) { var d = depth - 2*Depth.ONE_PLY - (PvNode ? Depth.DEPTH_ZERO : depth/4); stack.skipEarlyPruning = true; search(PvNode ? NodeType.PV : NodeType.NonPV, false, pos, ss, alpha, beta, d, true); stack.skipEarlyPruning = false; tte = TranspositionTable.probe(posKey, out ttHit); ttMove = ttHit ? tte.move() : Move.MOVE_NONE; } moves_loop: // When in check and at SpNode search starts from here var prevMoveSq = Move.to_sq(stackMinus1.currentMove); var countermove = Countermoves.table[pos.piece_on(prevMoveSq), prevMoveSq]; var mp = new MovePicker(pos, ttMove, depth, History, CounterMovesHistory, countermove, ss); var ci = new CheckInfo(pos); value = bestValue; // Workaround a bogus 'uninitialized' warning under gcc var improving = stack.staticEval >= stackMinus2.staticEval || stack.staticEval == Value.VALUE_NONE || stackMinus2.staticEval == Value.VALUE_NONE; var singularExtensionNode = !RootNode && !SpNode && depth >= 8*Depth.ONE_PLY_C && ttMove != Move.MOVE_NONE /* && ttValue != Value.VALUE_NONE Already implicit in the next condition */ && Math.Abs(ttValue) < Value.VALUE_KNOWN_WIN && excludedMove == 0// Recursive singular search is not allowed && ((tte.bound() & Bound.BOUND_LOWER) != 0) && tte.depth() >= depth - 3*Depth.ONE_PLY_C; var quietsSearched = new MoveT[64]; // Step 11. Loop through moves // Loop through all pseudo-legal moves until no moves remain or a beta cutoff occurs while ((move = mp.next_move(SpNode)) != Move.MOVE_NONE) { Utils.WriteToLog($"mp.next_move = {(int) move}"); Debug.Assert(Move.is_ok(move)); if (move == excludedMove) continue; // At root obey the "searchmoves" option and skip moves not listed in Root // Move List. As a consequence any illegal move is also skipped. In MultiPV // mode we also skip PV moves which have been already searched. if (RootNode && RootMoves.All(rootMove => rootMove.pv[0] != move)) continue; if (SpNode) { // Shared counter cannot be decremented later if the move turns out to be illegal if (!pos.legal(move, ci.pinned)) continue; stack.moveCount = moveCount = ++splitPoint.moveCount; ThreadHelper.lock_release(splitPoint.spinLock); } else stack.moveCount = ++moveCount; if (RootNode) { Signals.firstRootMove = (moveCount == 1); if (thisThread == ThreadPool.main() && TimeManagement.elapsed() > 3000) Output.WriteLine( $"info depth {depth/Depth.ONE_PLY} currmove {UCI.move(move, pos.is_chess960())} currmovenumber {moveCount + PVIdx}"); } if (PvNode) stackPlus1.pv = new List<MoveT>(); var extension = Depth.DEPTH_ZERO; var captureOrPromotion = pos.capture_or_promotion(move); var givesCheck = Move.type_of(move) == MoveType.NORMAL && ci.dcCandidates == 0 ? Bitboard.AndWithSquare(ci.checkSquares[Piece.type_of(pos.piece_on(Move.from_sq(move)))], Move.to_sq(move))!=0 : pos.gives_check(move, ci); // Step 12. Extend checks if (givesCheck && pos.see_sign(move) >= Value.VALUE_ZERO) extension = Depth.ONE_PLY; // Singular extension search. If all moves but one fail low on a search of // (alpha-s, beta-s), and just one fails high on (alpha, beta), then that move // is singular and should be extended. To verify this we do a reduced search // on all the other moves but the ttMove and if the result is lower than // ttValue minus a margin then we extend the ttMove. if (singularExtensionNode && move == ttMove && extension == 0 && pos.legal(move, ci.pinned)) { var rBeta = ttValue - 2*depth/Depth.ONE_PLY; stack.excludedMove = move; stack.skipEarlyPruning = true; value = search(NodeType.NonPV, false, pos, ss, rBeta - 1, rBeta, depth/2, cutNode); stack.skipEarlyPruning = false; stack.excludedMove = Move.MOVE_NONE; if (value < rBeta) extension = Depth.ONE_PLY; } // Update the current move (this must be done after singular extension search) var newDepth = depth - Depth.ONE_PLY + extension; // Step 13. Pruning at shallow depth if (!RootNode && !captureOrPromotion && !inCheck && !givesCheck && !pos.advanced_pawn_push(move) && bestValue > Value.VALUE_MATED_IN_MAX_PLY) { // Move count based pruning if (depth < 16*Depth.ONE_PLY && moveCount >= FutilityMoveCounts[improving ? 1 : 0, depth]) { if (SpNode) ThreadHelper.lock_grab(splitPoint.spinLock); continue; } var predictedDepth = newDepth - reduction(PvNode, improving, depth, moveCount); // Futility pruning: parent node if (predictedDepth < 7*Depth.ONE_PLY) { var futilityValue = stack.staticEval + futility_margin(predictedDepth) + 256; if (futilityValue <= alpha) { bestValue = Value.Create(Math.Max(bestValue, futilityValue)); if (SpNode) { ThreadHelper.lock_grab(splitPoint.spinLock); if (bestValue > splitPoint.bestValue) splitPoint.bestValue = bestValue; } continue; } } // Prune moves with negative SEE at low depths if (predictedDepth < 4*Depth.ONE_PLY && pos.see_sign(move) < Value.VALUE_ZERO) { if (SpNode) ThreadHelper.lock_grab(splitPoint.spinLock); continue; } } // Speculative prefetch as early as possible //prefetch(TT.first_entry(pos.key_after(move))); // Check for legality just before making the move if (!RootNode && !SpNode && !pos.legal(move, ci.pinned)) { stack.moveCount = --moveCount; continue; } stack.currentMove = move; // Step 14. Make the move pos.do_move(move, st, givesCheck); // Step 15. Reduced depth search (LMR). If the move fails high it will be // re-searched at full depth. bool doFullDepthSearch; if (depth >= 3*Depth.ONE_PLY_C && moveCount > 1 && !captureOrPromotion && move != stack.killers0 && move != stack.killers1) { stack.reduction = reduction(PvNode, improving, depth, moveCount); if ((!PvNode && cutNode) || (History.table[pos.piece_on(Move.to_sq(move)), Move.to_sq(move)] < Value.VALUE_ZERO && CounterMovesHistory.table[pos.piece_on(prevMoveSq), prevMoveSq].table[ pos.piece_on(Move.to_sq(move)), Move.to_sq(move)] <= Value.VALUE_ZERO)) stack.reduction += Depth.ONE_PLY; if (History.table[pos.piece_on(Move.to_sq(move)), Move.to_sq(move)] > Value.VALUE_ZERO && CounterMovesHistory.table[pos.piece_on(prevMoveSq), prevMoveSq].table[ pos.piece_on(Move.to_sq(move)), Move.to_sq(move)] > Value.VALUE_ZERO) stack.reduction = new Depth(Math.Max(Depth.DEPTH_ZERO_C, stack.reduction - Depth.ONE_PLY_C)); // Decrease reduction for moves that escape a capture if (stack.reduction > 0 && Move.type_of(move) == MoveType.NORMAL && Piece.type_of(pos.piece_on(Move.to_sq(move))) != PieceType.PAWN && pos.see(Move.make_move(Move.to_sq(move), Move.from_sq(move))) < Value.VALUE_ZERO) stack.reduction = new Depth(Math.Max(Depth.DEPTH_ZERO_C, stack.reduction - Depth.ONE_PLY_C)); var d = new Depth(Math.Max(newDepth - (int)stack.reduction, Depth.ONE_PLY_C)); if (SpNode) alpha = Value.Create(splitPoint.alpha); value = -search(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -(alpha + 1), -alpha, d, true); doFullDepthSearch = (value > alpha && stack.reduction != Depth.DEPTH_ZERO); stack.reduction = Depth.DEPTH_ZERO; } else doFullDepthSearch = !PvNode || moveCount > 1; // Step 16. Full depth search, when LMR is skipped or fails high if (doFullDepthSearch) { if (SpNode) alpha = Value.Create(splitPoint.alpha); value = newDepth < Depth.ONE_PLY ? givesCheck ? -qsearch(NodeType.NonPV, true, pos, new StackArrayWrapper(ss.table, ss.current + 1), -(alpha + 1), -alpha, Depth.DEPTH_ZERO) : -qsearch(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -(alpha + 1), -alpha, Depth.DEPTH_ZERO) : -search(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -(alpha + 1), -alpha, newDepth, !cutNode); } // For PV nodes only, do a full PV search on the first move or after a fail // high (in the latter case search only if value < beta), otherwise let the // parent node fail low with value <= alpha and to try another move. if (PvNode && (moveCount == 1 || (value > alpha && (RootNode || value < beta)))) { stackPlus1.pv = new List<MoveT>() { Move.MOVE_NONE }; stackPlus1.pv[0] = Move.MOVE_NONE; value = newDepth < Depth.ONE_PLY ? givesCheck ? -qsearch(NodeType.PV, true, pos, new StackArrayWrapper(ss.table, ss.current + 1), -beta, -alpha, Depth.DEPTH_ZERO) : -qsearch(NodeType.PV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -beta, -alpha, Depth.DEPTH_ZERO) : -search(NodeType.PV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -beta, -alpha, newDepth, false); } // Step 17. Undo move pos.undo_move(move); Debug.Assert(value > -Value.VALUE_INFINITE && value < Value.VALUE_INFINITE); // Step 18. Check for new best move if (SpNode) { ThreadHelper.lock_grab(splitPoint.spinLock); bestValue = Value.Create(splitPoint.bestValue); alpha = Value.Create(splitPoint.alpha); } // Finished searching the move. If a stop or a cutoff occurred, the return // value of the search cannot be trusted, and we return immediately without // updating best move, PV and TT. if (Signals.stop || thisThread.cutoff_occurred()) return Value.VALUE_ZERO; if (RootNode) { var rm = RootMoves.Find(rootmove => rootmove.pv[0] == move); // PV move or new best move ? if (moveCount == 1 || value > alpha) { rm.score = value; var firstEntry = rm.pv[0]; rm.pv.Clear(); rm.pv.Add(firstEntry); Debug.Assert(stackPlus1.pv != null); foreach (var m in stackPlus1.pv.TakeWhile(m => m != Move.MOVE_NONE)) { rm.pv.Add(m); } // We record how often the best move has been changed in each // iteration. This information is used for time management: When // the best move changes frequently, we allocate some more time. if (moveCount > 1) ++BestMoveChanges; } else // All other moves but the PV are set to the lowest value: this is // not a problem when sorting because the sort is stable and the // move position in the list is preserved - just the PV is pushed up. rm.score = -Value.VALUE_INFINITE; } if (value > bestValue) { bestValue = Value.Create(SpNode ? splitPoint.bestValue = value : value); if (value > alpha) { // If there is an easy move for this position, clear it if unstable if (PvNode && EasyMove.get(pos.key()) != 0 && (move != EasyMove.get(pos.key()) || moveCount > 1)) EasyMove.clear(); bestMove = Move.Create(SpNode ? splitPoint.bestMove = move : move); if (PvNode && !RootNode) // Update pv even in fail-high case update_pv(SpNode ? splitPoint.ss[ss.current].pv : stack.pv, move, stackPlus1.pv); if (PvNode && value < beta) // Update alpha! Always alpha < beta alpha = Value.Create(SpNode ? splitPoint.alpha = value : value); else { Debug.Assert(value >= beta); // Fail high if (SpNode) splitPoint.cutoff = true; break; } } } if (!SpNode && !captureOrPromotion && move != bestMove && quietCount < 64) quietsSearched[quietCount++] = move; // Step 19. Check for splitting the search if (!SpNode && ThreadPool.threads.Count >= 2 && depth >= ThreadPool.minimumSplitDepth && (thisThread.activeSplitPoint == null || !thisThread.activeSplitPoint.allSlavesSearching || (ThreadPool.threads.Count > _.MAX_SLAVES_PER_SPLITPOINT && Bitcount.popcount_Full(thisThread.activeSplitPoint.slavesMask) == _.MAX_SLAVES_PER_SPLITPOINT)) && thisThread.splitPointsSize < _.MAX_SPLITPOINTS_PER_THREAD) { Debug.Assert(bestValue > -Value.VALUE_INFINITE && bestValue < beta); thisThread.split(pos, ss, alpha, beta, ref bestValue, ref bestMove, depth, moveCount, mp, NT, cutNode); if (Signals.stop || thisThread.cutoff_occurred()) return Value.VALUE_ZERO; if (bestValue >= beta) break; } } if (SpNode) return bestValue; // Following condition would detect a stop or a cutoff set only after move // loop has been completed. But in this case bestValue is valid because we // have fully searched our subtree, and we can anyhow save the result in TT. /* if (Signals.stop || thisThread.cutoff_occurred()) return VALUE_DRAW; */ // Step 20. Check for mate and stalemate // All legal moves have been searched and if there are no legal moves, it // must be mate or stalemate. If we are in a singular extension search then // return a fail low score. if (moveCount == 0) bestValue = excludedMove != 0 ? alpha : inCheck ? Value.mated_in(stack.ply) : DrawValue[pos.side_to_move()]; // Quiet best move: update killers, history and countermoves else if (bestMove != 0 && !pos.capture_or_promotion(bestMove)) update_stats(pos, ss, bestMove, depth, quietsSearched, quietCount); // Bonus for prior countermove that caused the fail low else if (bestMove==0) { if (Move.is_ok(stackMinus2.currentMove) && Move.is_ok(stackMinus1.currentMove) && pos.captured_piece_type()==0 && !inCheck && depth >= 3*Depth.ONE_PLY_C) { var bonus = Value.Create((depth/Depth.ONE_PLY)*(depth/Depth.ONE_PLY)); var prevSq = Move.to_sq(stackMinus1.currentMove); var prevPrevSq = Move.to_sq(stackMinus2.currentMove); var flMoveCmh = CounterMovesHistory.table[pos.piece_on(prevPrevSq), prevPrevSq]; flMoveCmh.updateCMH(pos.piece_on(prevSq), prevSq, bonus); } } tte.save(posKey, value_to_tt(bestValue, stack.ply), bestValue >= beta ? Bound.BOUND_LOWER : PvNode && bestMove!=0 ? Bound.BOUND_EXACT : Bound.BOUND_UPPER, depth, bestMove, stack.staticEval, TranspositionTable.generation()); Debug.Assert(bestValue > -Value.VALUE_INFINITE && bestValue < Value.VALUE_INFINITE); return bestValue; }
/// <summary> /// Extract the PV starting from pos, using hash entries, both exact scores and bounds. /// </summary> /// <param name="pos"></param> /// <returns></returns> public string extractPV(Position pos) { pos = new Position(pos); string ret = ""; bool first = true; TTEntry ent = probe(pos.historyHash()); UndoInfo ui = new UndoInfo(); List <ulong> hashHistory = new List <ulong>(); bool repetition = false; while (ent.type != TTEntry.T_EMPTY) { string type = ""; if (ent.type == TTEntry.T_LE) { type = "<"; } else if (ent.type == TTEntry.T_GE) { type = ">"; } Move m = new Move(0, 0, 0); ent.getMove(m); MoveGen MG = new MoveGen(); MoveGen.MoveList moves = MG.pseudoLegalMoves(pos); MoveGen.RemoveIllegal(pos, moves); bool contains = false; for (int mi = 0; mi < moves.size; mi++) { if (moves.m[mi].equals(m)) { contains = true; break; } } if (!contains) { break; } string moveStr = TextIO.moveTostring(pos, m, false); if (repetition) { break; } if (!first) { ret += " "; } ret += type + moveStr; pos.makeMove(m, ui); if (hashHistory.Contains(pos.zobristHash())) { repetition = true; } hashHistory.Add(pos.zobristHash()); ent = probe(pos.historyHash()); first = false; } return(ret); }
public void Insert(ulong key, Move sm, int type, int ply, int depth, int evalScore) { if (depth < 0) { depth = 0; } int idx0 = h0(key); int idx1 = h1(key); TTEntry ent = table[idx0]; byte hashSlot = 0; if (ent.key != key) { ent = table[idx1]; hashSlot = 1; } if (ent.key != key) { if (table[idx1].betterThan(table[idx0], generation)) { ent = table[idx0]; hashSlot = 0; } if (ent.valuable(generation)) { int altEntIdx = (ent.getHashSlot() == 0) ? h1(ent.key) : h0(ent.key); if (ent.betterThan(table[altEntIdx], generation)) { TTEntry altEnt = table[altEntIdx]; altEnt.key = ent.key; altEnt.move = ent.move; altEnt.score = ent.score; altEnt.depthSlot = ent.depthSlot; altEnt.generation = (byte)ent.generation; altEnt.type = ent.type; altEnt.setHashSlot(1 - ent.getHashSlot()); altEnt.evalScore = ent.evalScore; } } } bool doStore = true; if ((ent.key == key) && (ent.getDepth() > depth) && (ent.type == type)) { if (type == TTEntry.T_EXACT) { doStore = false; } else if ((type == TTEntry.T_GE) && (sm.score <= ent.score)) { doStore = false; } else if ((type == TTEntry.T_LE) && (sm.score >= ent.score)) { doStore = false; } } if (doStore) { if ((ent.key != key) || (sm.from != sm.to)) { ent.setMove(sm); } ent.key = key; ent.setScore(sm.score, ply); ent.setDepth(depth); ent.generation = (byte)generation; ent.type = (byte)type; ent.setHashSlot(hashSlot); ent.evalScore = (short)evalScore; } }
private void convertStopTimesTable() { //sdb.RegisterTableType(typeof(TripTypeHeadsign)); var set = new SortedDictionary <StopTimeList, List <string> >(); var timeDiffs = new List <int>(); var seenTrips = new HashSet <string>(); using (var table = gtfs.GetTable("stop_times")) { string lastTrip = null; StopTimeList curList = null; TimeSpan startTime = new TimeSpan(); foreach (var record in table.Records) { string trip = record["trip_id"]; string arrivalTime = record["arrival_time"]; string departureTime = record["departure_time"]; if (trip != lastTrip) { if (lastTrip != null) { if (!set.Keys.Contains(curList)) { set.Add(curList, new List <String>()); } set[curList].Add(lastTrip); } startTime = parseGtfsTime(arrivalTime); getTrip[trip].StartTime = startTime; curList = new StopTimeList(getRouteOfTrip[trip]); lastTrip = trip; if (seenTrips.Contains(trip)) { throw new InvalidDataException("Trip contained in an interrupted sequence."); } seenTrips.Add(trip); } TimeSpan curTime = parseGtfsTime(arrivalTime); TimeSpan curEndTime = parseGtfsTime(departureTime); while (curTime < startTime) { curTime += TimeSpan.FromDays(1); } curList.Add(Tuple.Create(getStop[record["stop_id"]], curTime - startTime, record["stop_headsign"])); timeDiffs.Add((int)(curEndTime - curTime).TotalMinutes); calculatePercent(table); } if (lastTrip != null) { if (!set.Keys.Contains(curList)) { set.Add(curList, new List <String>()); } set[curList].Add(lastTrip); } } foreach (var pair in set) { pair.Key.CreateLists(pair.Value.Select(str => getTrip[str]).ToList()); } var getTripType = new Dictionary <StopList, TripType>(); foreach (var stopList in StopTimeList.StopLists) { TripType tt = new TripType { Route = stopList.Key.Route }; tt.HeadsignEntries = new List <TripTypeHeadsign>(); tt.Route.TripTypes = addToList(tt, tt.Route.TripTypes); sdb.AddEntity(tt); getTripType[stopList.Value] = tt; string prevName = tt.Route.Name; int pos = 0; foreach (var stopAndName in stopList.Key) { StopEntry se = new StopEntry { Stop = stopAndName.Item1, TripType = tt }; tt.StopEntries = addToList(se, tt.StopEntries); TTEntry te = new TTEntry { Position = pos, Stop = stopAndName.Item1, TripType = tt }; sdb.AddEntity(se); sdb.AddEntity(te); if (stopAndName.Item2 != null && stopAndName.Item2 != prevName) { TripTypeHeadsign he = new TripTypeHeadsign { Headsign = stopAndName.Item2, StartIndex = (short)pos, TripType = tt }; tt.HeadsignEntries = addToList(he, tt.HeadsignEntries); //sdb.AddEntity(he); prevName = stopAndName.Item2; } pos++; } } foreach (var timeList in StopTimeList.TimeLists) { TripTimeType ttt = new TripTimeType { TripType = getTripType[timeList.Key.Base] }; ttt.TripType.TripTimeTypes = addToList(ttt, ttt.TripType.TripTimeTypes); sdb.AddEntity(ttt); foreach (var timeEntry in timeList.Key) { TimeEntry te = new TimeEntry { Time = (short)timeEntry.TotalMinutes, TripTimeType = ttt }; sdb.AddEntity(te); ttt.TimeEntries = addToList(te, ttt.TimeEntries); } foreach (var trip in timeList.Key.Trips) { trip.TripTimeType = ttt; ttt.Trips = addToList(trip, ttt.Trips); //if (ttt.TripType.Shape == null) // ttt.TripType.Shape = getShapeOfTrip[trip]; //else if (ttt.TripType.Shape != getShapeOfTrip[trip]) // throw new InvalidDataException(); } } var emptyTrips = sdb.GetTable <Trip>().Where(t => t.TripTimeType == null).ToList(); Log(0, "Trips missing from stop_times.txt count: " + emptyTrips.Count); sdb.RemoveEntityAll(emptyTrips); //Setting shapes to TripTypes int wrongShapeTrips = 0; foreach (var tt in sdb.GetTable <TripType>()) { var tripAndShapes = tt.Trips.Select(trip => new { Trip = trip, Shape = getShapeOfTrip[trip] }).ToList(); tt.Shape = tripAndShapes.GroupBy(x => x.Shape).MaxBy(x => x.Key == nullShape ? int.MinValue : x.Count()).Key; wrongShapeTrips += tripAndShapes.Where(x => x.Shape != tt.Shape).Count(); } log(0, "Shapes set to triptypes. Trips with wrong shape: " + wrongShapeTrips * 100 / getTrip.Count + "%"); //Settings names to TripTypes int wrongNameTrips = 0; foreach (var tt in sdb.GetTable <TripType>()) { var ttNames = tt.Trips.Select(trip => getNameOfTrip[trip]).Except(new string[] { null, "" }).ToList(); if (ttNames.Any()) { tt.Name = ttNames.MostFrequent(); wrongNameTrips += ttNames.Except(new string[] { tt.Name }).Count(); } } log(0, "Names set to triptypes. Trips with wrong name: " + wrongNameTrips * 100 / getTrip.Count + "%"); //setting unknown route names var unknownTripTypes = sdb.GetTable <TripType>().Where(tt => tt.Name == "" || tt.Name == null).ToList(); foreach (var tt in unknownTripTypes) { if (tt.HeadsignEntries.Any()) { var names = new Dictionary <string, int>(); for (int i = 0; i < tt.HeadsignEntries.Count; i++) { var headsignEntry = tt.HeadsignEntries[i]; int size = (i < tt.HeadsignEntries.Count - 1 ? tt.HeadsignEntries[i + 1].StartIndex : tt.StopEntries.Count) - headsignEntry.StartIndex; int oldValue = 0; names.TryGetValue(headsignEntry.Headsign, out oldValue); names[headsignEntry.Headsign] = oldValue + size; } tt.Name = names.MaxBy(n => n.Value).Key; } else { var stoplist = sdb.GetTable <StopEntry>().Where(se => se.TripType == tt); tt.Name = stoplist.Last().Stop.Name; } } var unknownRoutes = sdb.GetTable <Route>().Where(r => r.Name == "Unknown" || r.Name == "" || r.Name == null).ToList(); foreach (var route in unknownRoutes) { var triptypes = sdb.GetTable <TripType>().Where(tt => tt.Route == route).ToList(); if (triptypes.Count == 0) { sdb.RemoveEntity(route); } else { route.Name = triptypes.Where(tt => tt.Name != "" && tt.Name != null).GroupBy(tt => tt.Name).MaxBy(x => x.Sum(y => y.Trips.Count())).Key; } } log(0, "Average end-stop time difference: " + timeDiffs.Average() + ", max diff: " + timeDiffs.Max()); log(0, "Compressing stop_times done! First compression efficiency: " + set.Average(x => x.Value.Count)); long firstSize = set.Sum(x => x.Key.Count * (4 + 2)); long totalSize = StopTimeList.StopLists.Sum(x => x.Key.Count * 4) + StopTimeList.TimeLists.Sum(x => x.Key.Count * 2); log(0, "Second total db compression rate: " + (totalSize * 100 / firstSize) + "%"); log(0, "Stoplist number compression rate: " + (StopTimeList.StopLists.Count * 100 / set.Count) + "%"); int changingCount = sdb.GetTable <TripType>().Where(tt => tt.HeadsignEntries.Count > 1).Count(); int totalTripCount = sdb.GetTable <TripType>().Count(); log(0, string.Format("Triptype with changing name: {0}/{1} ({2}%)", changingCount, totalTripCount, changingCount * 100 / totalTripCount)); }