// search<>() is the main search function for both PV and non-PV nodes and for // normal and SplitPoint nodes. When called just after a split point the search // is simpler because we have already probed the hash table, done a null move // search, and searched the first move before splitting, so we don't have to // repeat all this work again. We also don't need to store anything to the hash // table here: This is taken care of after we return from the split point. private static ValueT search(NodeType NT, bool SpNode, Position pos, StackArrayWrapper ss, ValueT alpha, ValueT beta, Depth depth, bool cutNode) { Utils.WriteToLog($"search(NT={(int) NT}, SpNode={(SpNode ? 1 : 0)}, pos={pos.key()}, ss, alpha={alpha}, beta={beta}, depth={(int) depth}, cutNode={(cutNode ? 1 : 0)})"); var RootNode = NT == NodeType.Root; var PvNode = RootNode || NT == NodeType.PV; Debug.Assert(-Value.VALUE_INFINITE <= alpha && alpha < beta && beta <= Value.VALUE_INFINITE); Debug.Assert(PvNode || (alpha == beta - 1)); Debug.Assert(depth > Depth.DEPTH_ZERO); var st = new StateInfo(); TTEntry tte; SplitPoint splitPoint = null; ulong posKey = 0; MoveT ttMove, move, excludedMove, bestMove; ValueT bestValue, value, ttValue, eval; bool ttHit; int moveCount = 0; int quietCount = 0; var stack = ss[ss.current]; var stackPlus1 = ss[ss.current + 1]; var stackPlus2 = ss[ss.current + 2]; var stackMinus1 = ss[ss.current - 1]; var stackMinus2 = ss[ss.current - 2]; // Step 1. Initialize node var thisThread = pos.this_thread(); bool inCheck = pos.checkers() != 0; if (SpNode) { splitPoint = stack.splitPoint; bestMove = Move.Create(splitPoint.bestMove); bestValue = Value.Create(splitPoint.bestValue); tte = new TTEntry(); ttMove = excludedMove = Move.MOVE_NONE; ttValue = Value.VALUE_NONE; Debug.Assert(splitPoint.bestValue > -Value.VALUE_INFINITE && splitPoint.moveCount > 0); goto moves_loop; } moveCount = quietCount = stack.moveCount = 0; bestValue = -Value.VALUE_INFINITE; stack.ply = stackMinus1.ply + 1; // Used to send selDepth info to GUI if (PvNode && thisThread.maxPly < stack.ply) thisThread.maxPly = stack.ply; if (!RootNode) { // Step 2. Check for aborted search and immediate draw if (Signals.stop || pos.is_draw() || stack.ply >= _.MAX_PLY) return stack.ply >= _.MAX_PLY && !inCheck ? Eval.evaluate(false, pos) : DrawValue[pos.side_to_move()]; // Step 3. Mate distance pruning. Even if we mate at the next move our score // would be at best mate_in(ss.ply+1), but if alpha is already bigger because // a shorter mate was found upward in the tree then there is no need to search // because we will never beat the current alpha. Same logic but with reversed // signs applies also in the opposite condition of being mated instead of giving // mate. In this case return a fail-high score. alpha = Value.Create(Math.Max(Value.mated_in(stack.ply), alpha)); beta = Value.Create(Math.Min(Value.mate_in(stack.ply + 1), beta)); if (alpha >= beta) return alpha; } Debug.Assert(0 <= stack.ply && stack.ply < _.MAX_PLY); stack.currentMove = stack.ttMove = stackPlus1.excludedMove = bestMove = Move.MOVE_NONE; stackPlus1.skipEarlyPruning = false; stackPlus1.reduction = Depth.DEPTH_ZERO; stackPlus2.killers0 = stackPlus2.killers1 = Move.MOVE_NONE; // Step 4. Transposition table lookup // We don't want the score of a partial search to overwrite a previous full search // TT value, so we use a different position key in case of an excluded move. excludedMove = stack.excludedMove; posKey = excludedMove != 0 ? pos.exclusion_key() : pos.key(); tte = TranspositionTable.probe(posKey, out ttHit); stack.ttMove = ttMove = RootNode ? RootMoves[(int) PVIdx].pv[0] : ttHit ? tte.move() : Move.MOVE_NONE; ttValue = ttHit ? value_from_tt(tte.value(), stack.ply) : Value.VALUE_NONE; // At non-PV nodes we check for a fail high/low. We don't prune at PV nodes if (!PvNode && ttHit && tte.depth() >= depth && ttValue != Value.VALUE_NONE // Only in case of TT access race && (ttValue >= beta ? (tte.bound() & Bound.BOUND_LOWER) != 0 : (tte.bound() & Bound.BOUND_UPPER) != 0)) { stack.currentMove = ttMove; // Can be Move.MOVE_NONE // If ttMove is quiet, update killers, history, counter move on TT hit if (ttValue >= beta && ttMove != 0 && !pos.capture_or_promotion(ttMove)) update_stats(pos, ss, ttMove, depth, null, 0); return ttValue; } // Step 4a. Tablebase probe if (!RootNode && Tablebases.Cardinality != 0) { var piecesCnt = pos.count(PieceType.ALL_PIECES, Color.WHITE) + pos.count(PieceType.ALL_PIECES, Color.BLACK); if (piecesCnt <= Tablebases.Cardinality && (piecesCnt < Tablebases.Cardinality || depth >= Tablebases.ProbeDepth) && pos.rule50_count() == 0) { var found = 0; var v = Tablebases.probe_wdl(pos, ref found); if (found != 0) { Tablebases.Hits++; var drawScore = Tablebases.UseRule50 ? 1 : 0; value = v < -drawScore ? -Value.VALUE_MATE + _.MAX_PLY + stack.ply : v > drawScore ? Value.VALUE_MATE - _.MAX_PLY - stack.ply : Value.VALUE_DRAW + 2*v*drawScore; tte.save(posKey, value_to_tt(value, stack.ply), Bound.BOUND_EXACT, new Depth(Math.Min(_.MAX_PLY - Depth.ONE_PLY_C, (int)depth + 6*Depth.ONE_PLY_C)), Move.MOVE_NONE, Value.VALUE_NONE, TranspositionTable.generation()); return value; } } } // Step 5. Evaluate the position statically if (inCheck) { stack.staticEval = Value.VALUE_NONE; goto moves_loop; } if (ttHit) { // Never assume anything on values stored in TT if ((stack.staticEval = eval = tte.eval()) == Value.VALUE_NONE) eval = stack.staticEval = Eval.evaluate(false, pos); // Can ttValue be used as a better position evaluation? if (ttValue != Value.VALUE_NONE) if ((tte.bound() & (ttValue > eval ? Bound.BOUND_LOWER : Bound.BOUND_UPPER)) != 0) eval = ttValue; } else { eval = stack.staticEval = stackMinus1.currentMove != Move.MOVE_NULL ? Eval.evaluate(false, pos) : -stackMinus1.staticEval + 2*Eval.Tempo; tte.save(posKey, Value.VALUE_NONE, Bound.BOUND_NONE, Depth.DEPTH_NONE, Move.MOVE_NONE, stack.staticEval, TranspositionTable.generation()); } if (stack.skipEarlyPruning) goto moves_loop; // Step 6. Razoring (skipped when in check) if (!PvNode && depth < 4*Depth.ONE_PLY && eval + razor_margin(depth) <= alpha && ttMove == Move.MOVE_NONE) { if (depth <= Depth.ONE_PLY_C && eval + razor_margin(3*Depth.ONE_PLY) <= alpha) return qsearch(NodeType.NonPV, false, pos, ss, alpha, beta, Depth.DEPTH_ZERO); var ralpha = alpha - razor_margin(depth); var v = qsearch(NodeType.NonPV, false, pos, ss, ralpha, ralpha + 1, Depth.DEPTH_ZERO); if (v <= ralpha) return v; } // Step 7. Futility pruning: child node (skipped when in check) if (!RootNode && depth < 7*Depth.ONE_PLY && eval - futility_margin(depth) >= beta && eval < Value.VALUE_KNOWN_WIN // Do not return unproven wins && pos.non_pawn_material(pos.side_to_move())!=0) return eval - futility_margin(depth); // Step 8. Null move search with verification search (is omitted in PV nodes) if (!PvNode && depth >= 2*Depth.ONE_PLY_C && eval >= beta && pos.non_pawn_material(pos.side_to_move())!=0) { stack.currentMove = Move.MOVE_NULL; Debug.Assert(eval - beta >= 0); // Null move dynamic reduction based on depth and value var R = ((823 + 67*depth)/256 + Math.Min((eval - beta)/Value.PawnValueMg, 3))*(int) Depth.ONE_PLY; pos.do_null_move(st); stackPlus1.skipEarlyPruning = true; var nullValue = depth - R < Depth.ONE_PLY ? -qsearch(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -beta, -beta + 1, Depth.DEPTH_ZERO) : -search(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -beta, -beta + 1, depth - R, !cutNode); stackPlus1.skipEarlyPruning = false; pos.undo_null_move(); if (nullValue >= beta) { // Do not return unproven mate scores if (nullValue >= Value.VALUE_MATE_IN_MAX_PLY) nullValue = beta; if (depth < 12*Depth.ONE_PLY && Math.Abs(beta) < Value.VALUE_KNOWN_WIN) return nullValue; // Do verification search at high depths stack.skipEarlyPruning = true; var v = depth - R < Depth.ONE_PLY ? qsearch(NodeType.NonPV, false, pos, ss, beta - 1, beta, Depth.DEPTH_ZERO) : search(NodeType.NonPV, false, pos, ss, beta - 1, beta, depth - R, false); stack.skipEarlyPruning = false; if (v >= beta) return nullValue; } } // Step 9. ProbCut (skipped when in check) // If we have a very good capture (i.e. SEE > seeValues[captured_piece_type]) // and a reduced search returns a value much above beta, we can (almost) safely // prune the previous move. if (!PvNode && depth >= 5*Depth.ONE_PLY_C && Math.Abs(beta) < Value.VALUE_MATE_IN_MAX_PLY) { var rbeta = Value.Create(Math.Min(beta + 200, Value.VALUE_INFINITE)); var rdepth = depth - 4*Depth.ONE_PLY; Debug.Assert(rdepth >= Depth.ONE_PLY_C); Debug.Assert(stackMinus1.currentMove != Move.MOVE_NONE); Debug.Assert(stackMinus1.currentMove != Move.MOVE_NULL); var mp2 = new MovePicker(pos, ttMove, History, CounterMovesHistory, Value.PieceValue[(int) Phase.MG][pos.captured_piece_type()]); var ci2 = new CheckInfo(pos); while ((move = mp2.next_move(false)) != Move.MOVE_NONE) if (pos.legal(move, ci2.pinned)) { stack.currentMove = move; pos.do_move(move, st, pos.gives_check(move, ci2)); value = -search(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -rbeta, -rbeta + 1, rdepth, !cutNode); pos.undo_move(move); if (value >= rbeta) return value; } } // Step 10. Internal iterative deepening (skipped when in check) if (depth >= (PvNode ? 5*Depth.ONE_PLY_C : 8*Depth.ONE_PLY_C) && ttMove == 0 && (PvNode || stack.staticEval + 256 >= beta)) { var d = depth - 2*Depth.ONE_PLY - (PvNode ? Depth.DEPTH_ZERO : depth/4); stack.skipEarlyPruning = true; search(PvNode ? NodeType.PV : NodeType.NonPV, false, pos, ss, alpha, beta, d, true); stack.skipEarlyPruning = false; tte = TranspositionTable.probe(posKey, out ttHit); ttMove = ttHit ? tte.move() : Move.MOVE_NONE; } moves_loop: // When in check and at SpNode search starts from here var prevMoveSq = Move.to_sq(stackMinus1.currentMove); var countermove = Countermoves.table[pos.piece_on(prevMoveSq), prevMoveSq]; var mp = new MovePicker(pos, ttMove, depth, History, CounterMovesHistory, countermove, ss); var ci = new CheckInfo(pos); value = bestValue; // Workaround a bogus 'uninitialized' warning under gcc var improving = stack.staticEval >= stackMinus2.staticEval || stack.staticEval == Value.VALUE_NONE || stackMinus2.staticEval == Value.VALUE_NONE; var singularExtensionNode = !RootNode && !SpNode && depth >= 8*Depth.ONE_PLY_C && ttMove != Move.MOVE_NONE /* && ttValue != Value.VALUE_NONE Already implicit in the next condition */ && Math.Abs(ttValue) < Value.VALUE_KNOWN_WIN && excludedMove == 0// Recursive singular search is not allowed && ((tte.bound() & Bound.BOUND_LOWER) != 0) && tte.depth() >= depth - 3*Depth.ONE_PLY_C; var quietsSearched = new MoveT[64]; // Step 11. Loop through moves // Loop through all pseudo-legal moves until no moves remain or a beta cutoff occurs while ((move = mp.next_move(SpNode)) != Move.MOVE_NONE) { Utils.WriteToLog($"mp.next_move = {(int) move}"); Debug.Assert(Move.is_ok(move)); if (move == excludedMove) continue; // At root obey the "searchmoves" option and skip moves not listed in Root // Move List. As a consequence any illegal move is also skipped. In MultiPV // mode we also skip PV moves which have been already searched. if (RootNode && RootMoves.All(rootMove => rootMove.pv[0] != move)) continue; if (SpNode) { // Shared counter cannot be decremented later if the move turns out to be illegal if (!pos.legal(move, ci.pinned)) continue; stack.moveCount = moveCount = ++splitPoint.moveCount; ThreadHelper.lock_release(splitPoint.spinLock); } else stack.moveCount = ++moveCount; if (RootNode) { Signals.firstRootMove = (moveCount == 1); if (thisThread == ThreadPool.main() && TimeManagement.elapsed() > 3000) Output.WriteLine( $"info depth {depth/Depth.ONE_PLY} currmove {UCI.move(move, pos.is_chess960())} currmovenumber {moveCount + PVIdx}"); } if (PvNode) stackPlus1.pv = new List<MoveT>(); var extension = Depth.DEPTH_ZERO; var captureOrPromotion = pos.capture_or_promotion(move); var givesCheck = Move.type_of(move) == MoveType.NORMAL && ci.dcCandidates == 0 ? Bitboard.AndWithSquare(ci.checkSquares[Piece.type_of(pos.piece_on(Move.from_sq(move)))], Move.to_sq(move))!=0 : pos.gives_check(move, ci); // Step 12. Extend checks if (givesCheck && pos.see_sign(move) >= Value.VALUE_ZERO) extension = Depth.ONE_PLY; // Singular extension search. If all moves but one fail low on a search of // (alpha-s, beta-s), and just one fails high on (alpha, beta), then that move // is singular and should be extended. To verify this we do a reduced search // on all the other moves but the ttMove and if the result is lower than // ttValue minus a margin then we extend the ttMove. if (singularExtensionNode && move == ttMove && extension == 0 && pos.legal(move, ci.pinned)) { var rBeta = ttValue - 2*depth/Depth.ONE_PLY; stack.excludedMove = move; stack.skipEarlyPruning = true; value = search(NodeType.NonPV, false, pos, ss, rBeta - 1, rBeta, depth/2, cutNode); stack.skipEarlyPruning = false; stack.excludedMove = Move.MOVE_NONE; if (value < rBeta) extension = Depth.ONE_PLY; } // Update the current move (this must be done after singular extension search) var newDepth = depth - Depth.ONE_PLY + extension; // Step 13. Pruning at shallow depth if (!RootNode && !captureOrPromotion && !inCheck && !givesCheck && !pos.advanced_pawn_push(move) && bestValue > Value.VALUE_MATED_IN_MAX_PLY) { // Move count based pruning if (depth < 16*Depth.ONE_PLY && moveCount >= FutilityMoveCounts[improving ? 1 : 0, depth]) { if (SpNode) ThreadHelper.lock_grab(splitPoint.spinLock); continue; } var predictedDepth = newDepth - reduction(PvNode, improving, depth, moveCount); // Futility pruning: parent node if (predictedDepth < 7*Depth.ONE_PLY) { var futilityValue = stack.staticEval + futility_margin(predictedDepth) + 256; if (futilityValue <= alpha) { bestValue = Value.Create(Math.Max(bestValue, futilityValue)); if (SpNode) { ThreadHelper.lock_grab(splitPoint.spinLock); if (bestValue > splitPoint.bestValue) splitPoint.bestValue = bestValue; } continue; } } // Prune moves with negative SEE at low depths if (predictedDepth < 4*Depth.ONE_PLY && pos.see_sign(move) < Value.VALUE_ZERO) { if (SpNode) ThreadHelper.lock_grab(splitPoint.spinLock); continue; } } // Speculative prefetch as early as possible //prefetch(TT.first_entry(pos.key_after(move))); // Check for legality just before making the move if (!RootNode && !SpNode && !pos.legal(move, ci.pinned)) { stack.moveCount = --moveCount; continue; } stack.currentMove = move; // Step 14. Make the move pos.do_move(move, st, givesCheck); // Step 15. Reduced depth search (LMR). If the move fails high it will be // re-searched at full depth. bool doFullDepthSearch; if (depth >= 3*Depth.ONE_PLY_C && moveCount > 1 && !captureOrPromotion && move != stack.killers0 && move != stack.killers1) { stack.reduction = reduction(PvNode, improving, depth, moveCount); if ((!PvNode && cutNode) || (History.table[pos.piece_on(Move.to_sq(move)), Move.to_sq(move)] < Value.VALUE_ZERO && CounterMovesHistory.table[pos.piece_on(prevMoveSq), prevMoveSq].table[ pos.piece_on(Move.to_sq(move)), Move.to_sq(move)] <= Value.VALUE_ZERO)) stack.reduction += Depth.ONE_PLY; if (History.table[pos.piece_on(Move.to_sq(move)), Move.to_sq(move)] > Value.VALUE_ZERO && CounterMovesHistory.table[pos.piece_on(prevMoveSq), prevMoveSq].table[ pos.piece_on(Move.to_sq(move)), Move.to_sq(move)] > Value.VALUE_ZERO) stack.reduction = new Depth(Math.Max(Depth.DEPTH_ZERO_C, stack.reduction - Depth.ONE_PLY_C)); // Decrease reduction for moves that escape a capture if (stack.reduction > 0 && Move.type_of(move) == MoveType.NORMAL && Piece.type_of(pos.piece_on(Move.to_sq(move))) != PieceType.PAWN && pos.see(Move.make_move(Move.to_sq(move), Move.from_sq(move))) < Value.VALUE_ZERO) stack.reduction = new Depth(Math.Max(Depth.DEPTH_ZERO_C, stack.reduction - Depth.ONE_PLY_C)); var d = new Depth(Math.Max(newDepth - (int)stack.reduction, Depth.ONE_PLY_C)); if (SpNode) alpha = Value.Create(splitPoint.alpha); value = -search(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -(alpha + 1), -alpha, d, true); doFullDepthSearch = (value > alpha && stack.reduction != Depth.DEPTH_ZERO); stack.reduction = Depth.DEPTH_ZERO; } else doFullDepthSearch = !PvNode || moveCount > 1; // Step 16. Full depth search, when LMR is skipped or fails high if (doFullDepthSearch) { if (SpNode) alpha = Value.Create(splitPoint.alpha); value = newDepth < Depth.ONE_PLY ? givesCheck ? -qsearch(NodeType.NonPV, true, pos, new StackArrayWrapper(ss.table, ss.current + 1), -(alpha + 1), -alpha, Depth.DEPTH_ZERO) : -qsearch(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -(alpha + 1), -alpha, Depth.DEPTH_ZERO) : -search(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -(alpha + 1), -alpha, newDepth, !cutNode); } // For PV nodes only, do a full PV search on the first move or after a fail // high (in the latter case search only if value < beta), otherwise let the // parent node fail low with value <= alpha and to try another move. if (PvNode && (moveCount == 1 || (value > alpha && (RootNode || value < beta)))) { stackPlus1.pv = new List<MoveT>() { Move.MOVE_NONE }; stackPlus1.pv[0] = Move.MOVE_NONE; value = newDepth < Depth.ONE_PLY ? givesCheck ? -qsearch(NodeType.PV, true, pos, new StackArrayWrapper(ss.table, ss.current + 1), -beta, -alpha, Depth.DEPTH_ZERO) : -qsearch(NodeType.PV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -beta, -alpha, Depth.DEPTH_ZERO) : -search(NodeType.PV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -beta, -alpha, newDepth, false); } // Step 17. Undo move pos.undo_move(move); Debug.Assert(value > -Value.VALUE_INFINITE && value < Value.VALUE_INFINITE); // Step 18. Check for new best move if (SpNode) { ThreadHelper.lock_grab(splitPoint.spinLock); bestValue = Value.Create(splitPoint.bestValue); alpha = Value.Create(splitPoint.alpha); } // Finished searching the move. If a stop or a cutoff occurred, the return // value of the search cannot be trusted, and we return immediately without // updating best move, PV and TT. if (Signals.stop || thisThread.cutoff_occurred()) return Value.VALUE_ZERO; if (RootNode) { var rm = RootMoves.Find(rootmove => rootmove.pv[0] == move); // PV move or new best move ? if (moveCount == 1 || value > alpha) { rm.score = value; var firstEntry = rm.pv[0]; rm.pv.Clear(); rm.pv.Add(firstEntry); Debug.Assert(stackPlus1.pv != null); foreach (var m in stackPlus1.pv.TakeWhile(m => m != Move.MOVE_NONE)) { rm.pv.Add(m); } // We record how often the best move has been changed in each // iteration. This information is used for time management: When // the best move changes frequently, we allocate some more time. if (moveCount > 1) ++BestMoveChanges; } else // All other moves but the PV are set to the lowest value: this is // not a problem when sorting because the sort is stable and the // move position in the list is preserved - just the PV is pushed up. rm.score = -Value.VALUE_INFINITE; } if (value > bestValue) { bestValue = Value.Create(SpNode ? splitPoint.bestValue = value : value); if (value > alpha) { // If there is an easy move for this position, clear it if unstable if (PvNode && EasyMove.get(pos.key()) != 0 && (move != EasyMove.get(pos.key()) || moveCount > 1)) EasyMove.clear(); bestMove = Move.Create(SpNode ? splitPoint.bestMove = move : move); if (PvNode && !RootNode) // Update pv even in fail-high case update_pv(SpNode ? splitPoint.ss[ss.current].pv : stack.pv, move, stackPlus1.pv); if (PvNode && value < beta) // Update alpha! Always alpha < beta alpha = Value.Create(SpNode ? splitPoint.alpha = value : value); else { Debug.Assert(value >= beta); // Fail high if (SpNode) splitPoint.cutoff = true; break; } } } if (!SpNode && !captureOrPromotion && move != bestMove && quietCount < 64) quietsSearched[quietCount++] = move; // Step 19. Check for splitting the search if (!SpNode && ThreadPool.threads.Count >= 2 && depth >= ThreadPool.minimumSplitDepth && (thisThread.activeSplitPoint == null || !thisThread.activeSplitPoint.allSlavesSearching || (ThreadPool.threads.Count > _.MAX_SLAVES_PER_SPLITPOINT && Bitcount.popcount_Full(thisThread.activeSplitPoint.slavesMask) == _.MAX_SLAVES_PER_SPLITPOINT)) && thisThread.splitPointsSize < _.MAX_SPLITPOINTS_PER_THREAD) { Debug.Assert(bestValue > -Value.VALUE_INFINITE && bestValue < beta); thisThread.split(pos, ss, alpha, beta, ref bestValue, ref bestMove, depth, moveCount, mp, NT, cutNode); if (Signals.stop || thisThread.cutoff_occurred()) return Value.VALUE_ZERO; if (bestValue >= beta) break; } } if (SpNode) return bestValue; // Following condition would detect a stop or a cutoff set only after move // loop has been completed. But in this case bestValue is valid because we // have fully searched our subtree, and we can anyhow save the result in TT. /* if (Signals.stop || thisThread.cutoff_occurred()) return VALUE_DRAW; */ // Step 20. Check for mate and stalemate // All legal moves have been searched and if there are no legal moves, it // must be mate or stalemate. If we are in a singular extension search then // return a fail low score. if (moveCount == 0) bestValue = excludedMove != 0 ? alpha : inCheck ? Value.mated_in(stack.ply) : DrawValue[pos.side_to_move()]; // Quiet best move: update killers, history and countermoves else if (bestMove != 0 && !pos.capture_or_promotion(bestMove)) update_stats(pos, ss, bestMove, depth, quietsSearched, quietCount); // Bonus for prior countermove that caused the fail low else if (bestMove==0) { if (Move.is_ok(stackMinus2.currentMove) && Move.is_ok(stackMinus1.currentMove) && pos.captured_piece_type()==0 && !inCheck && depth >= 3*Depth.ONE_PLY_C) { var bonus = Value.Create((depth/Depth.ONE_PLY)*(depth/Depth.ONE_PLY)); var prevSq = Move.to_sq(stackMinus1.currentMove); var prevPrevSq = Move.to_sq(stackMinus2.currentMove); var flMoveCmh = CounterMovesHistory.table[pos.piece_on(prevPrevSq), prevPrevSq]; flMoveCmh.updateCMH(pos.piece_on(prevSq), prevSq, bonus); } } tte.save(posKey, value_to_tt(bestValue, stack.ply), bestValue >= beta ? Bound.BOUND_LOWER : PvNode && bestMove!=0 ? Bound.BOUND_EXACT : Bound.BOUND_UPPER, depth, bestMove, stack.staticEval, TranspositionTable.generation()); Debug.Assert(bestValue > -Value.VALUE_INFINITE && bestValue < Value.VALUE_INFINITE); return bestValue; }