internal static BitmapSource PlayerDepthToBitmapSource(Depth playerDepthFrame, bool p) { short[] vals = new short[playerDepthFrame.PlayerDepthFrame.PlayerDepths.Length]; int ctnr = 0; foreach (PlayerDepth pd in playerDepthFrame.PlayerDepthFrame.PlayerDepths) { if (p) { vals[ctnr] = (short)pd.Player; } else { vals[ctnr] = (short)pd.Depth; } } BitmapSource bmap = BitmapSource.Create( playerDepthFrame.DepthFrame.Width, playerDepthFrame.DepthFrame.Height, 96, 96, PixelFormats.Gray16, null, vals, playerDepthFrame.DepthFrame.Width*2); return bmap; }
/// Constructors of the MovePicker class. As arguments we pass information /// to help it to return the (presumably) good moves first, to decide which /// moves to return (in the quiescence search, for instance, we only want to /// search captures, promotions and some checks) and how important good move /// ordering is at the current node. internal MovePicker( Position p, MoveT ttm, Depth d, HistoryStats h, CounterMovesHistoryStats cmh, MoveT cm, StackArrayWrapper s) { endBadCaptures = new ExtMoveArrayWrapper(moves, _.MAX_MOVES - 1); cur = new ExtMoveArrayWrapper(moves); endMoves = new ExtMoveArrayWrapper(moves); pos = p; history = h; counterMovesHistory = cmh; ss = s; countermove = cm; depth = d; Debug.Assert(d > Depth.DEPTH_ZERO); stage = pos.checkers() != 0 ? Stages.EVASION : Stages.MAIN_SEARCH; ttMove = ttm != 0 && pos.pseudo_legal(ttm) ? ttm : Move.MOVE_NONE; endMoves += ttMove != Move.MOVE_NONE ? 1 : 0; }
protected override void Kinect_NewDepthFrame(Depth depthImage) { base.Kinect_NewDepthFrame(depthImage); if (NewCompressedKinectDepthFrame != null) { Compression<DepthImageFrame> comp = new Compression<DepthImageFrame>(); byte[] compressedVal = comp.GzipCompress(depthImage.DepthFrame); NewCompressedKinectDepthFrame(compressedVal, depthImage.DepthFrame); } }
public bool Subscribe(CommonLab.TradePair tp, SubscribeTypes st) { //throw new NotImplementedException(); //订阅 string tradingpairs = GetLocalTradingPairString(tp, st); if (st == SubscribeTypes.WSS) { //throw new Exception("This exchange does not support wss subscribing"); if (_subscribedtradingpairs == null) { _subscribedtradingpairs = new Dictionary <string, KFCC.ExchangeInterface.SubscribeInterface>(); } if (_subscribedtradingpairs.ContainsKey(tradingpairs)) { //有这个订阅 } else { string raw; Ticker t = GetTicker(GetLocalTradingPairString(tp), out raw); Depth d = GetDepth(GetLocalTradingPairString(tp), out raw); _subscribedtradingpairs.Add(tradingpairs, new RESTHelper(tp, t, d)); //这里根据循环给出假事件 //_subscribedtradingpairs[tradingpairs].TradeInfoEvent += OkCoinExchange_TradeInfoEvent; } } else if (st == SubscribeTypes.RESTAPI) { if (_subscribedtradingpairs == null) { _subscribedtradingpairs = new Dictionary <string, KFCC.ExchangeInterface.SubscribeInterface>(); } if (_subscribedtradingpairs.ContainsKey(tradingpairs)) { //有这个订阅 } else { string raw; Ticker t = GetTicker(GetLocalTradingPairString(tp), out raw); Depth d = GetDepth(GetLocalTradingPairString(tp), out raw); _subscribedtradingpairs.Add(tradingpairs, new RESTHelper(tp, t, d)); //_subscribedtradingpairs[tradingpairs].TradeInfoEvent += OkCoinExchange_TradeInfoEvent; } } if (SubscribedEvent != null) { SubscribedEvent(this, st, tp); } return(true); }
public WebDav(Logger logger, WebDavDb webDavDb, Target target, Document document, string urlStr, string depthStr, ContentType contentType, bool useEtag) { _logger = logger; _webDavDb = webDavDb; _document = document; _webDavKind = target.WebDavKind; _targetKind = target.TargetKind; _contentType = contentType; _useEtag = useEtag; if (depthStr != null) { if (depthStr == "0") { _depth = Depth.Depth0; } else if (depthStr == "1") { _depth = Depth.Depth1; } else if (depthStr == "infinity") { _depth = Depth.DepthInfinity; } } _fullPath = target.FullPath; _hrefHost = urlStr + target.Uri; //hrefをhttp://hostname と uri部分に分解する var index = _hrefHost.IndexOf("://"); if (index != -1) { _hrefUri = _hrefHost.Substring(index + 3); var pos = _hrefUri.IndexOf('/'); if (pos != -1) { _hrefUri = _hrefUri.Substring(pos); _hrefHost = _hrefHost.Substring(0, index + pos + 3); } } if (_hrefUri != "") { if (_targetKind == TargetKind.Dir && _hrefUri[_hrefUri.Length - 1] != '/') _hrefUri = _hrefUri + "/"; } //RFC 2518(5.2) コレクションに対するリクエストで最後に/(スラッシュ)なし //で参照されるとき自動的にこれを付加して処理することができる //この際、Content-Locationで見なしたURLをクライアントに返すべき //document.AddHeader("Content-Location",hrefHost+Util.SwapStr("%2f","/",HttpUtility.UrlEncode(hrefUri))); //document.AddHeader("Content-Location",HttpUtility.UrlPathEncode(hrefHost + hrefUri)); var href = Uri.EscapeDataString(_hrefUri); //Ver5.4.6 href = Util.SwapStr("%2F", "/", href); href = Util.SwapStr("%2f", "/", href); href = Util.SwapStr("+", "%20", href); document.AddHeader("Content-Location", _hrefHost + href); }
/// <summary> /// Recursivly traverses all the subfiles and folders of a given base directory and returns a collection that contains all records /// </summary> /// <param name="uri">The base uri to the directory to list all the subfiles and subfolders</param> /// <param name="revision">The revision that has to be traversed</param> /// <returns>A collection with all subfiles and folders</returns> public List <Item> GetItems(Uri uri, int revision, Depth depth) { //Remark: This method may return thousands of records. Therefore it might has to limit the amount for once pass. if (uri == null) { throw new ArgumentNullException("uri"); } EnsureAuthenticated(); return(m_client.GetItems(uri, revision, depth)); }
public HUD(string id, Size screenSize) : base(id, Vector2.Empty, screenSize, Shape.None, BorderStyle.None, Shapes.ShadeNone, ColorArray.Transparent) { spriteList = new List <ISpriteControl>(); renderInfoList = new List <RenderInfo>(); depth = new Depth(0, 0, 0, 0); sprite = new Sprite(UI.Device); focusedControl = enteredControl = this; isInside = true; windowManager = new WindowManager(); }
public override void DumpBody(XmlWriter writer) { if (SwfDumpService.DumpDisplayListTags) { writer.WriteElementString("cid", CharId.ToString()); writer.WriteElementString("depth", Depth.ToString()); writer.WriteElementString("matrix", Matrix.GetMatrixString()); if (ColorTransform != null) { ColorTransform.Dump(writer, false); } } }
private void GenerateNewLine() { string marks = ""; int count = Depth.Split(new string[] { "." }, StringSplitOptions.RemoveEmptyEntries).Length; for (int i = 0; i < count; i++) { marks += "#"; } // Build the new line with the updated marker, depth and the title this.NewLine = String.Format("{0} {1} {2}", marks, Depth, this._title); }
private static void Exchange_DepthEvent(object sender, Depth d, EventTypes et, TradePair tp) { OrderBook = d; bidPrice = OrderBook.Bids[0].Price * 0.618 + OrderBook.Asks[0].Price * 0.382;//需要调整价格 askPrice = OrderBook.Bids[0].Price * 0.382 + OrderBook.Asks[0].Price * 0.618; //prices = prices[1.. - 1] + [( // (orderBook.bids[0].limitPrice + orderBook.asks[0].limitPrice) / 2 * 0.7 + // (orderBook.bids[1].limitPrice + orderBook.asks[1].limitPrice) / 2 * 0.2 + // (orderBook.bids[2].limitPrice + orderBook.asks[2].limitPrice) / 2 * 0.1)] prices.RemoveAt(0); prices.Add(((OrderBook.Bids[0].Price + OrderBook.Asks[0].Price) / 2 * 0.7) + ((OrderBook.Bids[1].Price + OrderBook.Asks[1].Price) / 2 * 0.2) + ((OrderBook.Bids[2].Price + OrderBook.Asks[2].Price) / 2 * 0.1)); }
public override void CreateShape() { base.CreateShape(); buttonDescriptor = ShapeDescriptor.ComputeShape(this, Shape.Rectangle); triangleDescriptor = Shapes.DrawEquilateralTriangle(triangleLeftVertexAbsolutePosition, DefaultTriangleSideLength, ColorOperator.Scale(Color.Black, 0.5f), false, false); buttonDescriptor.Depth = depth; triangleDescriptor.Depth = Depth.AsChildOf(depth); shapeDescriptors[0] = buttonDescriptor; shapeDescriptors[1] = triangleDescriptor; }
static TElement[] Process <TArray, TElement>(TArray input, Depth depth) where TArray : Arr where TElement : struct { var inputHeader = input.GetMat(); var output = new TElement[inputHeader.Rows * inputHeader.Cols]; using (var outputHeader = Mat.CreateMatHeader(output, inputHeader.Rows, inputHeader.Cols, depth, 1)) { CV.Convert(inputHeader, outputHeader); } return(output); }
public static Mat EnsureFormat(Mat output, Size size, Depth depth, int channels) { if (output == null || output.Size != size || output.Depth != depth || output.Channels != channels) { if (output != null) { output.Close(); } return(new Mat(size, depth, channels)); } return(output); }
public override string ToString() { return(string.Concat(new[] { "{X=", X.ToString(CultureInfo.CurrentCulture), ",Y=", Y.ToString(CultureInfo.CurrentCulture), ",Z=", Z.ToString(CultureInfo.CurrentCulture), ",Width=", Width.ToString(CultureInfo.CurrentCulture), ",Height=", Height.ToString(CultureInfo.CurrentCulture), ",Depth=", Depth.ToString(CultureInfo.CurrentCulture), "}" })); }
void PickRawDefaults(string path) { FileStream file = File.Open(path, FileMode.Open, FileAccess.Read); int fileSize = (int)file.Length; file.Close(); m_TerrainSize = terrainData.size; if (terrainData.heightmapWidth * terrainData.heightmapHeight == fileSize) { m_Width = terrainData.heightmapWidth; m_Height = terrainData.heightmapHeight; m_Depth = Depth.Bit8; } else if (terrainData.heightmapWidth * terrainData.heightmapHeight * 2 == fileSize) { m_Width = terrainData.heightmapWidth; m_Height = terrainData.heightmapHeight; m_Depth = Depth.Bit16; } else { m_Depth = Depth.Bit16; int pixels = fileSize / (int)m_Depth; int width = Mathf.RoundToInt(Mathf.Sqrt(pixels)); int height = Mathf.RoundToInt(Mathf.Sqrt(pixels)); if ((width * height * (int)m_Depth) == fileSize) { m_Width = width; m_Height = height; return; } m_Depth = Depth.Bit8; pixels = fileSize / (int)m_Depth; width = Mathf.RoundToInt(Mathf.Sqrt(pixels)); height = Mathf.RoundToInt(Mathf.Sqrt(pixels)); if ((width * height * (int)m_Depth) == fileSize) { m_Width = width; m_Height = height; return; } m_Depth = Depth.Bit16; } }
} // protected void InsertUniqueEdge( Edge e ) /** * If either of the GeometryLocations for the existing label is * exactly opposite to the one in the labelToMerge, * this indicates a dimensional collapse has happened. * In this case, convert the label for that Geometry to a Line label */ /* NOT NEEDED? * private void CheckDimensionalCollapse(Label labelToMerge, Label existingLabel) * { * if (existingLabel.isArea() && labelToMerge.isArea()) { * for (int i = 0; i < 2; i++) { * if (! labelToMerge.isNull(i) * && labelToMerge.getLocation(i, Position.LEFT) == existingLabel.getLocation(i, Position.RIGHT) * && labelToMerge.getLocation(i, Position.RIGHT) == existingLabel.getLocation(i, Position.LEFT) ) * { * existingLabel.toLine(i); * } * } * } * } */ /// <summary> /// Update the labels for edges according to their depths. /// For each edge, the depths are first normalized. /// Then, if the depths for the edge are equal, /// this edge must have collapsed into a line edge. /// If the depths are not equal, update the label /// with the locations corresponding to the depths /// (i.e. a depth of 0 corresponds to a Location of EXTERIOR, /// a depth of 1 corresponds to INTERIOR) /// </summary> private void ComputeLabelsFromDepths() { foreach (object obj in _edgeList) { Edge e = (Edge)obj; Label lbl = e.Label; Depth depth = e.Depth; // Only check edges for which there were duplicates, // since these are the only ones which might // be the result of dimensional collapses. if (!depth.IsNull()) { depth.Normalize(); for (int i = 0; i < 2; i++) { if (!lbl.IsNull(i) && lbl.IsArea() && !depth.IsNull(i)) { // if the depths are equal, this edge is the result of // the dimensional collapse of two or more edges. // It has the same location on both sides of the edge, // so it has collapsed to a line. if (depth.GetDelta(i) == 0) { lbl.ToLine(i); } else { // This edge may be the result of a dimensional collapse, // but it still has different locations on both sides. The // label of the edge must be updated to reflect the resultant // side locations indicated by the depth values. if (depth.IsNull(i, Position.Left)) { throw new InvalidOperationException("Depth of LEFT side has not been initialized"); } lbl.SetLocation(i, Position.Left, depth.GetLocation(i, Position.Left)); if (depth.IsNull(i, Position.Right)) { throw new InvalidCastException("Depth of RIGHT side has not been initialized"); } lbl.SetLocation(i, Position.Right, depth.GetLocation(i, Position.Right)); } // else } // if ( !lbl.IsNull( i ) && lbl.IsArea() && !depth.IsNull( i ) ) } // for ( int i = 0; i < 2; i++ ) } // if ( !depth.IsNull() ) } // foreach ( object obj in _edgeList ) } // private void ComputeLabelsFromDepths()
internal void WriteItem(Node node, Depth itemDepth, bool child) { var done = false; //.Content files are not written to the client currently switch (_handler.WebdavType) { case WebdavType.Content: /*WriteSingleItem(node, WebdavType.Content);*/ done = true; break; case WebdavType.ContentType: WriteSingleItem(node, WebdavType.ContentType); done = true; break; case WebdavType.Page: WriteSingleItem(node, WebdavType.File); done = true; break; case WebdavType.File: if (node is IFolder) { WriteSingleItem(node, WebdavType.Folder); done = true; } if (child && (node is Page || (!(node is IFile) && (node.NodeType.Name != "Folder")))) { //WriteSingleItem(node, WebdavType.Content); done = true; } break; } //write items for all the binary properties (e.g: .aspx, .xml, .PersonalizationSettings) if (child && itemDepth == Depth.Current && (_handler.WebdavType == WebdavType.File || _handler.WebdavType == WebdavType.Folder)) { foreach (var propType in node.PropertyTypes) { if (propType.DataType != DataType.Binary || propType.Name.CompareTo("Binary") != 0) { continue; } WriteSingleItem(node, WebdavType.File, propType.Name); done = true; } } //hack if (!done && node is IFile) { WriteSingleItem(node, WebdavType.File); } }
/// <summary> /// Kinectで取得したデータを点群に変換する /// </summary> /// <returns></returns> bool GetPoint() { KinectSensor kinect = KinectSensor.KinectSensors[0]; ColorImageStream colorStream = kinect.ColorStream; DepthImageStream depthStream = kinect.DepthStream; // RGBカメラと距離カメラのフレームデータを取得する using (ColorImageFrame colorFrame = kinect.ColorStream.OpenNextFrame(100)) { using (DepthImageFrame depthFrame = kinect.DepthStream.OpenNextFrame(100)) { if (colorFrame == null || depthFrame == null) { return(false); } // RGBカメラのデータを作成する byte[] colorPixel = new byte[colorFrame.PixelDataLength]; colorFrame.CopyPixelDataTo(colorPixel); rgb = new RGB[colorFrame.Width * colorFrame.Height]; for (int i = 0; i < rgb.Length; i++) { int colorIndex = i * 4; rgb[i] = new RGB(colorPixel[colorIndex + 2], colorPixel[colorIndex + 1], colorPixel[colorIndex]); } // 距離カメラのピクセルデータを取得する short[] depthPixel = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(depthPixel); // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ) ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength]; kinect.MapDepthFrameToColorFrame(depthStream.Format, depthPixel, colorStream.Format, colorPoint); // 距離データを作成する depth = new Depth[depthFrame.Width * depthFrame.Height]; for (int i = 0; i < depth.Length; i++) { int x = Math.Min(colorPoint[i].X, colorStream.FrameWidth - 1); int y = Math.Min(colorPoint[i].Y, colorStream.FrameHeight - 1); int distance = depthPixel[i] >> DepthImageFrame.PlayerIndexBitmaskWidth; depth[i] = new Depth(x, y, distance); } } } return(true); }
public void UpdateTickerBuyDepth(Depth d) { Sell = d.Asks[0].Price; Buy = d.Bids[0].Price; if (High < Sell) { High = Sell; } if (Low > Buy) { Low = Buy; } ExchangeTimeStamp = d.ExchangeTimeStamp; }
public override int GetHashCode() { unchecked { int hash = 17; hash = (hash * 34) + Name.GetHashCode(); hash = (hash * 23) + Depth.GetHashCode(); hash = (hash * 23) + Size.GetHashCode(); hash = (hash * 23) + CreationTime.GetHashCode(); hash = (hash * 23) + Attributes.GetHashCode(); hash = (hash * 23) + IsLeaf.GetHashCode(); return(hash); } }
/// <summary> /// Write instruction operands into bytecode stream. /// </summary> /// <param name="writer">Bytecode writer.</param> public override void WriteOperands(WordWriter writer) { SampledType.Write(writer); Dim.Write(writer); Depth.Write(writer); Arrayed.Write(writer); MS.Write(writer); Sampled.Write(writer); ImageFormat.Write(writer); if (AccessQualifier != null) { AccessQualifier.Write(writer); } }
public override string Write() { var temp = "\nShading:Overhang,\n"; temp += Name + ", !Name\n"; temp += FenestrationSurface.Name + ", !fenestrationSurfaceName\n"; temp += Height.ToString(CultureInfo.InvariantCulture) + ", !- height above Window or Door\n"; temp += TiltAngle.ToString(CultureInfo.InvariantCulture) + ", !- Tilt Angle\n"; temp += Extension.ToString(CultureInfo.InvariantCulture) + ", !- Left Extension from Window/Door Width\n"; temp += Extension.ToString(CultureInfo.InvariantCulture) + ", !- Right Extension from Window/Door Width\n"; temp += Depth.ToString(CultureInfo.InvariantCulture) + ", !- Depth\n"; return(temp); }
public void HandleMethod() { _handler.Context.Response.StatusCode = 207; _handler.Context.Response.ContentType = "text/xml"; _handler.Context.Response.CacheControl = "no-cache"; _handler.Context.Response.AddHeader("Content-Location", _handler.RepositoryPathToUrl(_handler.Path)); RequestDepth = Common.GetDepth(_handler.Context.Request.Headers["Depth"]); switch (RequestDepth) { case Depth.Current: { ProcessCurrent(); break; } case Depth.Children: { ProcessChildren(); break; } case Depth.Infinity: { ProcessChildren(); break; //throw (new NotImplementedException("Infinity request is not implemented.")); } } if (_writer == null) { return; } _writer.Flush(); _writer.BaseStream.Seek(0, System.IO.SeekOrigin.Begin); var reader = new System.IO.StreamReader(_writer.BaseStream, System.Text.Encoding.UTF8); string ret = reader.ReadToEnd(); _writer.Close(); #region Debug System.Diagnostics.Debug.Write(string.Concat("RESPONSE: ", ret)); #endregion _handler.Context.Response.Write(ret); }
public PusherHelper(string tradingpair, Ticker t, Depth d, CommonLab.TradePair tp, string appkey = "de504dc5763aeef9ff52") { _tradingpair = tradingpair; _tradinginfo = new TradingInfo(SubscribeTypes.WSS, tradingpair, tp); _tradinginfo.t = t; _tradinginfo.d = d; Tp = tp; _appkey = appkey; _pusher = new Pusher(_appkey); _pusher.ConnectionStateChanged += _pusher_ConnectionStateChanged; _pusher.Error += _pusher_Error; _pusher.Connect(); }
public void TestIncreaseDecreaseAsk() { var depth = new Depth(); var cc = new ChangedChecker(depth); depth.AddOrder(1236, 300, false); Assert.True(cc.VerifyAskChanged(true, false, false, false, false)); cc.Reset(); depth.AddOrder(1235, 200, false); Assert.True(cc.VerifyAskChanged(true, true, false, false, false)); cc.Reset(); depth.AddOrder(1232, 100, false); Assert.True(cc.VerifyAskChanged(true, true, true, false, false)); cc.Reset(); depth.AddOrder(1235, 400, false); Assert.True(cc.VerifyAskChanged(false, true, false, false, false)); cc.Reset(); depth.ChangeOrderQuantity(1236, 37, false); Assert.True(cc.VerifyAskChanged(false, false, true, false, false)); cc.Reset(); depth.ChangeOrderQuantity(1235, -41, false); Assert.True(cc.VerifyAskChanged(false, true, false, false, false)); cc.Reset(); depth.ChangeOrderQuantity(1232, 51, false); Assert.True(cc.VerifyAskChanged(true, false, false, false, false)); cc.Reset(); depth.ChangeOrderQuantity(1236, -41, false); Assert.True(cc.VerifyAskChanged(false, false, true, false, false)); cc.Reset(); depth.ChangeOrderQuantity(1236, 201, false); Assert.True(cc.VerifyAskChanged(false, false, true, false, false)); cc.Reset(); var ask0 = depth.Asks.First().Value; var ask1 = depth.Asks.ElementAt(1).Value; var ask2 = depth.Asks.ElementAt(2).Value; Assert.True(VerifyLevel(ask0, 1232, 1, 151)); Assert.True(VerifyLevel(ask1, 1235, 2, 559)); Assert.True(VerifyLevel(ask2, 1236, 1, 497)); Assert.Equal(3, depth.Asks.Count); }
/// <summary> /// Dynamically constructs a quantity of the given <see cref="QuantityInfo"/> with the value in the quantity's base units. /// </summary> /// <param name="quantityInfo">The <see cref="QuantityInfo"/> of the quantity to create.</param> /// <param name="value">The value to construct the quantity with.</param> /// <returns>The created quantity.</returns> public static IQuantity FromQuantityInfo(QuantityInfo quantityInfo, QuantityValue value) { switch (quantityInfo.Name) { case "Depth": return(Depth.From(value, Depth.BaseUnit)); case "Jerk": return(Jerk.From(value, Jerk.BaseUnit)); default: throw new ArgumentException($"{quantityInfo.Name} is not a supported quantity."); } }
public void TestIncreaseDecreaseBid() { var depth = new Depth(); var cc = new ChangedChecker(depth); depth.AddOrder(1236, 300, true); Assert.True(cc.VerifyBidChanged(true, false, false, false, false)); cc.Reset(); depth.AddOrder(1235, 200, true); Assert.True(cc.VerifyBidChanged(false, true, false, false, false)); cc.Reset(); depth.AddOrder(1232, 100, true); Assert.True(cc.VerifyBidChanged(false, false, true, false, false)); cc.Reset(); depth.AddOrder(1235, 400, true); Assert.True(cc.VerifyBidChanged(false, true, false, false, false)); cc.Reset(); depth.ChangeOrderQuantity(1236, 37, true); Assert.True(cc.VerifyBidChanged(true, false, false, false, false)); cc.Reset(); depth.ChangeOrderQuantity(1235, -41, true); Assert.True(cc.VerifyBidChanged(false, true, false, false, false)); cc.Reset(); depth.ChangeOrderQuantity(1232, 60, true); Assert.True(cc.VerifyBidChanged(false, false, true, false, false)); cc.Reset(); depth.ChangeOrderQuantity(1236, -41, true); Assert.True(cc.VerifyBidChanged(true, false, false, false, false)); cc.Reset(); depth.ChangeOrderQuantity(1236, 210, true); Assert.True(cc.VerifyBidChanged(true, false, false, false, false)); cc.Reset(); var bid0 = depth.Bids.ElementAt(0).Value; var bid1 = depth.Bids.ElementAt(1).Value; var bid2 = depth.Bids.ElementAt(2).Value; Assert.True(VerifyLevel(bid0, 1236, 1, 506)); Assert.True(VerifyLevel(bid1, 1235, 2, 559)); Assert.True(VerifyLevel(bid2, 1232, 1, 160)); Assert.Equal(3, depth.Bids.Count); }
public void TestReplaceBid() { var depth = new Depth(); var cc = new ChangedChecker(depth); depth.AddOrder(1236, 300, true); Assert.True(cc.VerifyBidChanged(true, false, false, false, false)); cc.Reset(); depth.AddOrder(1235, 200, true); Assert.True(cc.VerifyBidChanged(false, true, false, false, false)); cc.Reset(); depth.AddOrder(1232, 100, true); Assert.True(cc.VerifyBidChanged(false, false, true, false, false)); cc.Reset(); depth.AddOrder(1235, 400, true); Assert.True(cc.VerifyBidChanged(false, true, false, false, false)); cc.Reset(); // Verify Levels var bid0 = depth.Bids.First().Value; var bid1 = depth.Bids.ElementAt(1).Value; var bid2 = depth.Bids.ElementAt(2).Value; Assert.True(VerifyLevel(bid0, 1236, 1, 300)); Assert.True(VerifyLevel(bid1, 1235, 2, 600)); Assert.True(VerifyLevel(bid2, 1232, 1, 100)); Assert.Equal(3, depth.Bids.Count); // Replace bid depth.ReplaceOrder(1235, 1237, 200, 200, true); // Verify Levels bid0 = depth.Bids.First().Value; bid1 = depth.Bids.ElementAt(1).Value; bid2 = depth.Bids.ElementAt(2).Value; var bid3 = depth.Bids.ElementAt(3).Value; Assert.True(VerifyLevel(bid0, 1237, 1, 200)); Assert.True(VerifyLevel(bid1, 1236, 1, 300)); Assert.True(VerifyLevel(bid2, 1235, 1, 400)); Assert.True(VerifyLevel(bid3, 1232, 1, 100)); Assert.Equal(4, depth.Bids.Count); Assert.True(cc.VerifyBidChanged(true, true, true, true, false)); cc.Reset(); }
public void TestAddBids() { var depth = new Depth(); var cc = new ChangedChecker(depth); depth.AddOrder(1234, 100, true); depth.AddOrder(1234, 200, true); depth.AddOrder(1234, 300, true); var firstBid = depth.Bids.First().Value; Assert.True(VerifyLevel(firstBid, 1234, 3, 600)); Assert.True(cc.VerifyBidChanged(true, false, false, false, false)); Assert.Equal(1, depth.Bids.Count); }
protected override void Kinect_NewDepthFrame(Depth depthImage) { base.Kinect_NewDepthFrame(depthImage); if (Options.StoreDepth) { this.DepthImages.Enqueue(depthImage.DepthFrame); if (this.DepthImages.Count > this.Options.DepthMax) { DepthImageFrame dead; this.DepthImages.TryDequeue(out dead); } } }
/// <summary>Returns the soil layer index for a specified soil depth (mm)</summary> /// <param name="depth">Soil depth (mm)</param> /// <param name="dlayer">Array of soil layer depths in the profile (mm)</param> /// <returns>soil layer index</returns> /// <exception cref="System.Exception"></exception> private int LayerIndex(double depth, double[] dlayer) { double CumDepth = 0.0; for (int i = 0; i < dlayer.Length; i++) { CumDepth = CumDepth + dlayer[i]; if (CumDepth >= depth) { return(i); } } throw new Exception(Name + ": Specified soil depth of " + Depth.ToString() + " mm is greater than profile depth of " + CumDepth.ToString() + " mm"); }
public int CompareTo(DictSchema other) { if (other == null) { return(1); } if (Id == other.Id) { return(0); } int result = Depth.CompareTo(other.Depth); if (result == 0) { DictSchema p1 = this; DictSchema p2 = other; while (p1.Parent != p2.Parent) { p1 = p1.Parent; p2 = p2.Parent; } result = p1.SortOrder.CompareTo(p2.SortOrder); } else { DictSchema s = result > 0 ? this : other; int d = Math.Abs(Depth - other.Depth); while (d-- > 0) { s = s.Parent; } if (result > 0) { if (s.Id != other.Id) { result = s.CompareTo(other); } } else { if (s.Id != Id) { result = this.CompareTo(s); } } } return(result); }
public override string ToString() { var sb = new StringBuilder(50); // [depth] name (type) numberOfChildren sb.AppendFormat("[{0}] {1} ({2})", Depth.ToString("D3"), _name, Target.GetType().Name); if (_visualChildrenCount != 0) { sb.Append(' '); sb.Append(_visualChildrenCount.ToString()); } return(sb.ToString()); }
public void TestReplaceAsk() { var depth = new Depth(); var cc = new ChangedChecker(depth); depth.AddOrder(1236, 300, false); Assert.True(cc.VerifyAskChanged(true, false, false, false, false)); cc.Reset(); depth.AddOrder(1235, 200, false); Assert.True(cc.VerifyAskChanged(true, true, false, false, false)); cc.Reset(); depth.AddOrder(1232, 100, false); Assert.True(cc.VerifyAskChanged(true, true, true, false, false)); cc.Reset(); depth.AddOrder(1235, 400, false); Assert.True(cc.VerifyAskChanged(false, true, false, false, false)); cc.Reset(); // Verify Levels var ask0 = depth.Asks.First().Value; var ask1 = depth.Asks.ElementAt(1).Value; var ask2 = depth.Asks.ElementAt(2).Value; Assert.True(VerifyLevel(ask0, 1232, 1, 100)); Assert.True(VerifyLevel(ask1, 1235, 2, 600)); Assert.True(VerifyLevel(ask2, 1236, 1, 300)); Assert.Equal(3, depth.Asks.Count); // Replace ask depth.ReplaceOrder(1235, 1237, 200, 200, false); // Verify Levels ask0 = depth.Asks.First().Value; ask1 = depth.Asks.ElementAt(1).Value; ask2 = depth.Asks.ElementAt(2).Value; var ask3 = depth.Asks.ElementAt(3).Value; Assert.True(VerifyLevel(ask0, 1232, 1, 100)); Assert.True(VerifyLevel(ask1, 1235, 1, 400)); Assert.True(VerifyLevel(ask2, 1236, 1, 300)); Assert.True(VerifyLevel(ask3, 1237, 1, 200)); Assert.Equal(4, depth.Asks.Count); Assert.True(cc.VerifyAskChanged(false, true, false, true, false)); cc.Reset(); }
/// <summary> /// Indicates whether the current object is equal to another object of the same type. /// </summary> /// <returns> /// true if the current object is equal to the <paramref name="other"/> parameter; otherwise, false. /// </returns> /// <param name="other">An object to compare with this object. /// </param> public bool Equals(SizeIndex other) { bool equals = true; equals = equals && (Depth != default(int) & other.Depth != default(int)) ? Depth.Equals(other.Depth) : Depth == default(int) & other.Depth == default(int); equals = equals && (CountsAndSizes != default(System.Collections.Generic.Dictionary <Folder, CountAndSize>) & other.CountsAndSizes != default(System.Collections.Generic.Dictionary <Folder, CountAndSize>)) ? CountsAndSizes.Equals(other.CountsAndSizes) : CountsAndSizes == default(System.Collections.Generic.Dictionary <Folder, CountAndSize>) & other.CountsAndSizes == default(System.Collections.Generic.Dictionary <Folder, CountAndSize>); return(equals); }
private void PickRawDefaults(string path) { FileStream stream = File.Open(path, FileMode.Open, FileAccess.Read); int length = (int) stream.Length; stream.Close(); this.m_TerrainSize = base.terrainData.size; if ((base.terrainData.heightmapWidth * base.terrainData.heightmapHeight) == length) { this.m_Width = base.terrainData.heightmapWidth; this.m_Height = base.terrainData.heightmapHeight; this.m_Depth = Depth.Bit8; } else if (((base.terrainData.heightmapWidth * base.terrainData.heightmapHeight) * 2) == length) { this.m_Width = base.terrainData.heightmapWidth; this.m_Height = base.terrainData.heightmapHeight; this.m_Depth = Depth.Bit16; } else { this.m_Depth = Depth.Bit16; int num2 = length / this.m_Depth; int num3 = Mathf.RoundToInt(Mathf.Sqrt((float) num2)); int num4 = Mathf.RoundToInt(Mathf.Sqrt((float) num2)); if (((num3 * num4) * this.m_Depth) == length) { this.m_Width = num3; this.m_Height = num4; } else { this.m_Depth = Depth.Bit8; num2 = length / this.m_Depth; num3 = Mathf.RoundToInt(Mathf.Sqrt((float) num2)); num4 = Mathf.RoundToInt(Mathf.Sqrt((float) num2)); if (((num3 * num4) * this.m_Depth) == length) { this.m_Width = num3; this.m_Height = num4; } else { this.m_Depth = Depth.Bit16; } } } }
public void HandleMethod() { _handler.Context.Response.StatusCode = 207; _handler.Context.Response.ContentType = "text/xml"; _handler.Context.Response.CacheControl = "no-cache"; _handler.Context.Response.AddHeader("Content-Location", _handler.RepositoryPathToUrl(_handler.Path)); RequestDepth = Common.GetDepth(_handler.Context.Request.Headers["Depth"]); switch (RequestDepth) { case Depth.Current: { ProcessCurrent(); break; } case Depth.Children: { ProcessChildren(); break; } case Depth.Infinity: { ProcessChildren(); break; //throw (new NotImplementedException("Infinity request is not implemented.")); } } if(_writer == null) return; _writer.Flush(); _writer.BaseStream.Seek(0, System.IO.SeekOrigin.Begin); var reader = new System.IO.StreamReader(_writer.BaseStream, System.Text.Encoding.UTF8); string ret = reader.ReadToEnd(); _writer.Close(); #region Debug System.Diagnostics.Debug.Write(string.Concat("RESPONSE: ", ret)); #endregion _handler.Context.Response.Write(ret); }
/// UCI::pv() formats PV information according to the UCI protocol. UCI requires /// that all (if any) unsearched PV lines are sent using a previous search score. internal static string pv(Position pos, Depth depth, ValueT alpha, ValueT beta) { var ss = new StringBuilder(); var elapsed = TimeManagement.elapsed() + 1; var multiPV = Math.Min(int.Parse(OptionMap.Instance["MultiPV"].v), Search.RootMoves.Count); var selDepth = ThreadPool.threads.Select(th => th.maxPly).Concat(new[] {0}).Max(); for (var i = 0; i < multiPV; ++i) { var updated = (i <= Search.PVIdx); if (depth == Depth.ONE_PLY && !updated) { continue; } var d = updated ? depth : depth - Depth.ONE_PLY; var v = updated ? Search.RootMoves[i].score : Search.RootMoves[i].previousScore; var tb = Tablebases.RootInTB && Math.Abs(v) < Value.VALUE_MATE - _.MAX_PLY; v = tb? Tablebases.Score : v; ss.Append($"info depth {d/Depth.ONE_PLY} seldepth {selDepth} multipv {i + 1} score {value(v)}"); if (!tb && i == Search.PVIdx) { ss.Append(v >= beta ? " lowerbound" : v <= alpha ? " upperbound" : ""); } ss.Append($" nodes {pos.nodes_searched()} nps {pos.nodes_searched()*1000/elapsed}"); if (elapsed > 1000) // Earlier makes little sense ss.Append($" hashfull {TranspositionTable.hashfull()}"); ss.Append($" tbhits {Tablebases.Hits} time {elapsed} pv"); foreach (var m in Search.RootMoves[i].pv) { ss.Append($" {move(m, pos.is_chess960())}"); } } return ss.ToString(); }
internal void save(ulong k, ValueT v, Bound b, Depth d, MoveT m, ValueT ev, byte g) { // Preserve any existing move for the same position if ((m != 0) || (k >> 48) != key16) { move16 = (ushort) m; } // Don't overwrite more valuable entries if ((k >> 48) != key16 || d > depth8 - 2 /* || g != (genBound8 & 0xFC) // Matching non-zero keys are already refreshed by probe() */ || b == Bound.BOUND_EXACT) { key16 = (ushort) (k >> 48); value16 = (short) v; eval16 = (short) ev; genBound8 = (byte) (g | (int) b); depth8 = d; } }
internal MovePicker(Position p, MoveT ttm, Depth d, HistoryStats h, CounterMovesHistoryStats cmh, SquareT s) { endBadCaptures = new ExtMoveArrayWrapper(moves, _.MAX_MOVES - 1); cur = new ExtMoveArrayWrapper(moves); endMoves = new ExtMoveArrayWrapper(moves); pos = p; history = h; counterMovesHistory = cmh; Debug.Assert(d <= Depth.DEPTH_ZERO_C); if (pos.checkers() != 0) { stage = Stages.EVASION; } else if (d > Depth.DEPTH_QS_NO_CHECKS) { stage = Stages.QSEARCH_WITH_CHECKS; } else if (d > Depth.DEPTH_QS_RECAPTURES) { stage = Stages.QSEARCH_WITHOUT_CHECKS; } else { stage = Stages.RECAPTURE; recaptureSquare = s; ttm = Move.MOVE_NONE; } ttMove = ttm != 0 && pos.pseudo_legal(ttm) ? ttm : Move.MOVE_NONE; endMoves += (ttMove != Move.MOVE_NONE) ? 1 : 0; }
internal static void init() { double[][] K = {new[] {0.83, 2.25}, new[] {0.50, 3.00}}; for (var pv = 0; pv <= 1; ++pv) for (var imp = 0; imp <= 1; ++imp) for (var d = 1; d < 64; ++d) for (var mc = 1; mc < 64; ++mc) { var r = K[pv][0] + Math.Log(d)*Math.Log(mc)/K[pv][1]; if (r >= 1.5) Reductions[pv, imp, d, mc] = new Depth((int) (r*Depth.ONE_PLY_C)); // Increase reduction when eval is not improving if (pv == 0 && imp == 0 && Reductions[pv, imp, d, mc] >= 2*Depth.ONE_PLY_C) Reductions[pv, imp, d, mc] += Depth.ONE_PLY; } for (var d = 0; d < 16; ++d) { FutilityMoveCounts[0, d] = (int) (2.4 + 0.773*Math.Pow(d + 0.00, 1.8)); FutilityMoveCounts[1, d] = (int) (2.9 + 1.045*Math.Pow(d + 0.49, 1.8)); } }
/// <summary> /// Configures the rendering pipeline. /// </summary> /// <param name="cull">The polygon culling options.</param> /// <param name="depth">The depth processing options.</param> /// <param name="stencil">The stenciling mode.</param> /// <param name="blend">The blending mode.</param> public static void Use(Cull cull, Depth depth, Stencil stencil, Blend blend) { if (gl.HasPendingPops) { throw new InvalidOperationException("Cannot change GPU state while the attribute stack is active."); } if (currentCull != cull) { if (cull == Cull.None) { gl.Disable(GL.CULL_FACE); } else { gl.Enable(GL.CULL_FACE); switch (cull) { case Cull.Front: gl.CullFace(GL.FRONT); break; case Cull.Back: gl.CullFace(GL.BACK); break; case Cull.Front | Cull.Back: gl.CullFace(GL.FRONT_AND_BACK); break; } } currentCull = cull; } if (currentDepth != depth) { if (depth == Depth.None) { gl.Disable(GL.DEPTH_TEST); } else { gl.Enable(GL.DEPTH_TEST); if ((depth & Depth.Test) == Depth.Test) { if ((depth & Depth.TestReversed) == Depth.TestReversed) { gl.DepthFunc(GL.GREATER); } else { gl.DepthFunc(GL.LEQUAL); } } else { gl.DepthFunc(GL.ALWAYS); } if ((depth & Depth.Write) == Depth.Write) { gl.DepthMask(true); } else { gl.DepthMask(false); } if ((depth & Depth.Clamp) == Depth.Clamp) { gl.Enable(GL.DEPTH_CLAMP); } else { gl.Disable(GL.DEPTH_CLAMP); } if ((depth & Depth.Offset) == Depth.Offset) { gl.Enable(GL.POLYGON_OFFSET_FILL); gl.PolygonOffset(0.02f, 0); } else { gl.Disable(GL.POLYGON_OFFSET_FILL); } } currentDepth = depth; } if (currentStencil != stencil) { if (stencil == Stencil.None) { gl.Disable(GL.STENCIL_TEST); } else { gl.Enable(GL.STENCIL_TEST); switch (stencil) { case Stencil.Shadows: gl.StencilFunc(GL.FRONT_AND_BACK, GL.ALWAYS, 0, 0xFF); gl.StencilOp(GL.FRONT, GL.KEEP, GL.INCR_WRAP, GL.KEEP); // <- increase when going in gl.StencilOp(GL.BACK, GL.KEEP, GL.DECR_WRAP, GL.KEEP); // <- decrease when going out break; case Stencil.Light: gl.StencilFunc(GL.FRONT_AND_BACK, GL.EQUAL, 0, 0xFF); // <- if (stencil != 0) { in shadow } gl.StencilOp(GL.FRONT_AND_BACK, GL.KEEP, GL.KEEP, GL.KEEP); break; } } currentStencil = stencil; } if (currentBlend != blend) { if (blend == Blend.None) { gl.Disable(GL.BLEND); } else { gl.Enable(GL.BLEND); switch (blend) { case Blend.Additive: gl.BlendFunc(GL.ONE, GL.ONE, GL.ONE, GL.ONE); break; case Blend.Alpha: gl.BlendFunc(GL.SRC_ALPHA, GL.ONE_MINUS_SRC_ALPHA, GL.SRC_ALPHA, GL.ONE_MINUS_SRC_ALPHA); break; } } currentBlend = blend; } }
// ThreadPool::read_uci_options() updates internal threads parameters from the // corresponding UCI options and creates/destroys threads to match the requested // number. Thread objects are dynamically allocated to avoid creating all possible // threads in advance (which include pawns and material tables), even if only a // few are to be used. internal static void read_uci_options(WaitHandle[] initEvents) { minimumSplitDepth = int.Parse(OptionMap.Instance["Min Split Depth"].v)*Depth.ONE_PLY; var requested = int.Parse(OptionMap.Instance["Threads"].v); var current = 0; Debug.Assert(requested > 0); while (threads.Count < requested) { if (initEvents == null) { threads.Add(new Thread(null)); } else { threads.Add(new Thread(initEvents[current + 2])); current++; } } while (threads.Count > requested) { delete_thread(threads[threads.Count - 1]); threads.RemoveAt(threads.Count - 1); } }
// Thread::split() does the actual work of distributing the work at a node between // several available threads. If it does not succeed in splitting the node // (because no idle threads are available), the function immediately returns. // If splitting is possible, a SplitPoint object is initialized with all the // data that must be copied to the helper threads and then helper threads are // informed that they have been assigned work. This will cause them to instantly // leave their idle loops and call search(). When all threads have returned from // search() then split() returns. internal void split( Position pos, StackArrayWrapper ss, ValueT alpha, ValueT beta, ref ValueT bestValue, ref MoveT bestMove, Depth depth, int moveCount, MovePicker movePicker, NodeType nodeType, bool cutNode) { Debug.Assert(searching); Debug.Assert( -Value.VALUE_INFINITE < bestValue && bestValue <= alpha && alpha < beta && beta <= Value.VALUE_INFINITE); Debug.Assert(depth >= ThreadPool.minimumSplitDepth); Debug.Assert(splitPointsSize < _.MAX_SPLITPOINTS_PER_THREAD); // Pick and init the next available split point var sp = splitPoints[splitPointsSize]; ThreadHelper.lock_grab(sp.spinLock); // No contention here until we don't increment splitPointsSize sp.master = this; sp.parentSplitPoint = activeSplitPoint; sp.slavesMask = 0; sp.slavesMask = (1u << idx); sp.depth = depth; sp.bestValue = bestValue; sp.bestMove = bestMove; sp.alpha = alpha; sp.beta = beta; sp.nodeType = nodeType; sp.cutNode = cutNode; sp.movePicker = movePicker; sp.moveCount = moveCount; sp.pos = pos; sp.nodes = 0; sp.cutoff = false; sp.ss = ss; sp.allSlavesSearching = true; // Must be set under lock protection ++splitPointsSize; activeSplitPoint = sp; activePosition = null; // Try to allocate available threads Thread slave; while (Bitcount.popcount_Full(sp.slavesMask) < _.MAX_SLAVES_PER_SPLITPOINT && (slave = ThreadPool.available_slave(sp)) != null) { ThreadHelper.lock_grab(slave.spinlock); if (slave.can_join(activeSplitPoint)) { activeSplitPoint.slavesMask |= 1u << (slave.idx); slave.activeSplitPoint = activeSplitPoint; slave.searching = true; } ThreadHelper.lock_release(slave.spinlock); } // Everything is set up. The master thread enters the idle loop, from which // it will instantly launch a search, because its 'searching' flag is set. // The thread will return from the idle loop when all slaves have finished // their work at this split point. ThreadHelper.lock_release(sp.spinLock); base_idle_loop(null); // Force a call to base class idle_loop() // In the helpful master concept, a master can help only a sub-tree of its // split point and because everything is finished here, it's not possible // for the master to be booked. Debug.Assert(!searching); Debug.Assert(activePosition == null); // We have returned from the idle loop, which means that all threads are // finished. Note that decreasing splitPointsSize must be done under lock // protection to avoid a race with Thread::can_join(). ThreadHelper.lock_grab(spinlock); searching = true; --splitPointsSize; activeSplitPoint = sp.parentSplitPoint; activePosition = pos; ThreadHelper.lock_release(spinlock); // Split point data cannot be changed now, so no need to lock protect pos.set_nodes_searched(pos.nodes_searched() + sp.nodes); bestMove = Move.Create(sp.bestMove); bestValue = Value.Create(sp.bestValue); }
/// <summary> /// Kinectで取得したデータを点群に変換する /// </summary> /// <returns></returns> bool GetPoint() { KinectSensor kinect = KinectSensor.KinectSensors[0]; ColorImageStream colorStream = kinect.ColorStream; DepthImageStream depthStream = kinect.DepthStream; // RGBカメラと距離カメラのフレームデータを取得する using ( ColorImageFrame colorFrame = kinect.ColorStream.OpenNextFrame( 100 ) ) { using ( DepthImageFrame depthFrame = kinect.DepthStream.OpenNextFrame( 100 ) ) { if ( colorFrame == null || depthFrame == null ) { return false; } // RGBカメラのデータを作成する byte[] colorPixel = new byte[colorFrame.PixelDataLength]; colorFrame.CopyPixelDataTo( colorPixel ); rgb = new RGB[colorFrame.Width * colorFrame.Height]; for ( int i = 0; i < rgb.Length; i++ ) { int colorIndex = i * 4; rgb[i] = new RGB( colorPixel[colorIndex + 2], colorPixel[colorIndex + 1], colorPixel[colorIndex] ); } // 距離カメラのピクセルデータを取得する short[] depthPixel = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo( depthPixel ); // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ) ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength]; kinect.MapDepthFrameToColorFrame( depthStream.Format, depthPixel, colorStream.Format, colorPoint ); // 距離データを作成する depth = new Depth[depthFrame.Width * depthFrame.Height]; for ( int i = 0; i < depth.Length; i++ ) { int x = Math.Min( colorPoint[i].X, colorStream.FrameWidth - 1 ); int y = Math.Min( colorPoint[i].Y, colorStream.FrameHeight - 1 ); int distance = depthPixel[i] >> DepthImageFrame.PlayerIndexBitmaskWidth; depth[i] = new Depth( x, y, distance ); } } } return true; }
//DELETE public int Delete() { if (_targetKind == TargetKind.Dir || _targetKind == TargetKind.Move) { _depth = Depth.DepthInfinity;//コレクションに対するDELETE では「infinity」が使用されているように動作しなければならない RFC2518(8.6.2) } if (_depth == Depth.Null) _depth = Depth.DepthInfinity;//指定されない場合は、「infinity」とする RFC2518(9.2) int responseCode = 405; if (_webDavKind == WebDavKind.Write) { _webDavDb.Remove(_hrefUri);//データベース削除 if (Directory.Exists(_fullPath)) { try { //ディレクトリの削除 RemoveDirectory(_hrefUri, _fullPath, true); responseCode = 204;//No Content } catch (Exception ex) { _logger.Set(LogKind.Error, null, 29, ex.Message); responseCode = 500;//ERROR } } else if (File.Exists(_fullPath)) { try { responseCode = RemoveFile(_hrefUri, _fullPath) ? 204 : 500; } catch (Exception ex) { _logger.Set(LogKind.Error, null, 30, ex.Message); responseCode = 500;//ERROR } } else { responseCode = 404;//Not Found } } return responseCode; }
//MOVE.COPY public int MoveCopy(Target destTarget, bool overwrite, HttpMethod httpMethod) { int responseCode = 405; if (_targetKind == TargetKind.Dir) { _depth = Depth.DepthInfinity;//コレクションに対するMOVE では「infinity」が使用されているように動作しなければならない RFC2518(8.9.2) } if (_depth == Depth.Null) _depth = Depth.DepthInfinity;//指定されない場合は、「infinity」とする RFC2518(9.2) if (Directory.Exists(_fullPath)) { try { responseCode = 201; if (overwrite) { if (Directory.Exists(destTarget.FullPath)) { responseCode = 204; try { //ディレクトリの削除 RemoveDirectory(destTarget.Uri, destTarget.FullPath, false); } catch (Exception ex) { _logger.Set(LogKind.Error, null, 32, ex.Message); } } } if (Directory.Exists(destTarget.FullPath)) { //対象が存在する場合は、エラーとなる responseCode = 403; } else { //ディレクトリのコピー if (CopyDirectory(_hrefUri, _fullPath, destTarget.Uri, destTarget.FullPath)) { if (httpMethod == HttpMethod.Move) { //元ディレクトリの削除 RemoveDirectory(_hrefUri, _fullPath, true); } } else { responseCode = 403; } } } catch (Exception ex) { _logger.Set(LogKind.Error, null, 34, ex.Message); responseCode = 500; } } else if (File.Exists(_fullPath)) { try { responseCode = 201; if (overwrite) { if (File.Exists(destTarget.FullPath)) { responseCode = 204; try { RemoveFile(destTarget.Uri, destTarget.FullPath);//ファイルの削除 } catch (Exception ex) { _logger.Set(LogKind.Error, null, 33, ex.Message); } } } if (File.Exists(destTarget.FullPath)) { //対象が存在する場合は、エラーとなる responseCode = 403; } else { //ファイルのコピー if (CopyFile(_hrefUri, _fullPath, destTarget.Uri, destTarget.FullPath)) { if (httpMethod == HttpMethod.Move) { //元ファイルの削除 RemoveFile(_hrefUri, _fullPath); } } else { responseCode = 403; } } } catch (Exception ex) { _logger.Set(LogKind.Error, null, 35, ex.Message); responseCode = 500; } } return responseCode; }
//PROPFIND public int PropFind() { if (_webDavKind == WebDavKind.Non) return 500; if (_targetKind == TargetKind.Non) return 404; if (_depth == Depth.Null) _depth = Depth.DepthInfinity;//指定されない場合は、「infinity」とする RFC2518(8.1 PROPFIND) const int responseCode = 207; //PROPFINDの情報を蓄積してレスポンスを生成するクラス var propFindResponce = new PropFindResponce(_webDavDb); //if(target.Kind == TARGET_KIND.DIR) {//1コレクションのプロパテイ値の取得 if (_targetKind == TargetKind.Dir) { //1コレクションのプロパテイ値の取得 const bool isCollection = true; //コレクション var di = new DirectoryInfo(_fullPath); propFindResponce.Add(isCollection, di.Name, _hrefHost, _hrefUri, "", //contentType "", //etag 0, //Directoryのサイズは0で初期化する di.CreationTime, di.LastWriteTime); if (_depth != Depth.Depth0) { //直下のリソースのプロパテイ値の取得 FindAll(propFindResponce, _depth, _hrefHost, _hrefUri, _fullPath, _useEtag); } } else { //1リソースのプロパテイ値の取得 const bool isCollection = false; //非コレクション var info = new FileInfo(_fullPath); propFindResponce.Add(isCollection, info.Name, _hrefHost, _hrefUri, _contentType.Get(_fullPath), _useEtag ? WebServerUtil.Etag(info) : "", info.Length, info.CreationTime, info.LastWriteTime); } //レスポンス作成 _document.CreateFromXml(propFindResponce.ToString()); return responseCode; }
private static ValueT futility_margin(Depth d) { return Value.Create(200* (int)d); }
//階層下リソースのプロパティ値の取得 void FindAll(PropFindResponce propFindResponce, Depth depth, string hrefHost, string hrefUri, string path, bool useEtag) { if (hrefUri.Length > 1 && hrefUri[hrefUri.Length - 1] != '/') { hrefUri = hrefUri + "/"; } var di = new DirectoryInfo(path); var isCollection = true; foreach (DirectoryInfo info in di.GetDirectories("*.*")) { propFindResponce.Add(isCollection, info.Name, hrefHost, hrefUri + info.Name + "/", "", //contentType "", //etag 0, //Directoryのサイズは0で初期化する info.CreationTime, info.LastWriteTime); if (depth == Depth.DepthInfinity) { //さらに階層下を再帰処理 string newPath = path + info.Name; if (path[path.Length - 1] != '\\') newPath = path + "\\" + info.Name; FindAll(propFindResponce, depth, hrefHost, hrefUri + info.Name + "/", newPath, useEtag); } } isCollection = false; foreach (FileInfo info in di.GetFiles("*.*")) { propFindResponce.Add(isCollection, info.Name, hrefHost, hrefUri + info.Name, _contentType.Get(info.Name), useEtag ? WebServerUtil.Etag(info) : "", //Etag info.Length, info.CreationTime, info.LastWriteTime); } }
internal bool time_to_pick(Depth depth) { return depth/Depth.ONE_PLY == 1 + level; }
internal void WriteItem(Node node, Depth itemDepth) { WriteItem(node, itemDepth, false); }
internal void WriteItem(Node node, Depth itemDepth, bool child) { var done = false; //.Content files are not written to the client currently switch (_handler.WebdavType) { case WebdavType.Content: /*WriteSingleItem(node, WebdavType.Content);*/ done = true; break; case WebdavType.ContentType: WriteSingleItem(node, WebdavType.ContentType); done = true; break; case WebdavType.Page: WriteSingleItem(node, WebdavType.File); done = true; break; case WebdavType.File: if (node is IFolder) { WriteSingleItem(node, WebdavType.Folder); done = true; } if (child && (node is Page || (!(node is IFile) && (node.NodeType.Name != "Folder")))) { //WriteSingleItem(node, WebdavType.Content); done = true; } break; } //write items for all the binary properties (e.g: .aspx, .xml, .PersonalizationSettings) if (child && itemDepth == Depth.Current && (_handler.WebdavType == WebdavType.File || _handler.WebdavType == WebdavType.Folder)) { foreach (var propType in node.PropertyTypes) { if (propType.DataType != DataType.Binary || propType.Name.CompareTo("Binary") != 0) continue; WriteSingleItem(node, WebdavType.File, propType.Name); done = true; } } //hack if (!done && node is IFile) WriteSingleItem(node, WebdavType.File); }
private static Depth reduction(bool PvNode, bool i, Depth d, int mn) { return Reductions[PvNode ? 1 : 0, i ? 1 : 0, Math.Min(d, 63*Depth.ONE_PLY_C), Math.Min(mn, 63)]; }
/// <summary> /// 0/data/getDepth.php /// </summary> public Depth getDepth(Currency currency) { try { string url = (this.baseURL) + "0/data/getDepth.php?currency=" + currency.ToString(); string postData = ""; string responseStr = DoAuthenticatedAPIPost(url, apiKey, apiSecret, postData); Depth returnValue = new Depth();//Depth.getObjects(responseStr); return returnValue; } catch (Exception ex) { return null; } }
public DepthBuffer(Device device, int width, int height, Depth depth) : this(device, width, height, depth, 0) { }
protected virtual void Kinect_NewDepthFrame(Depth depthImage) { }
public DepthBuffer(Device device, int width, int height, Depth depth, int samples) : base(device, width, height, samples) { this.depth = depth; Gl.glBindRenderbufferEXT (Gl.GL_RENDERBUFFER_EXT, this.Handle); if (samples == 0) { Gl.glRenderbufferStorageEXT(Gl.GL_RENDERBUFFER_EXT, (int)this.depth, this.Width, this.Height); } else { Gl.glRenderbufferStorageMultisampleEXT(Gl.GL_RENDERBUFFER_EXT, this.Samples, (int)this.depth, this.Width, this.Height); } }
void k_NewDepthFrame(Depth depthImage) { byte[] pixels = GenerateColoredBytes(depthImage.PlayerDepthFrame.PlayerDepths, depthImage.DepthFrame.Height, depthImage.DepthFrame.Width); this.DepthImage = BitmapSource.Create(depthImage.DepthFrame.Width, depthImage.DepthFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, depthImage.DepthFrame.Width * 4); // this.PlayerImage = UIHelpers.PlayerDepthToBitmapSource(depthImage, false); }
// search<>() is the main search function for both PV and non-PV nodes and for // normal and SplitPoint nodes. When called just after a split point the search // is simpler because we have already probed the hash table, done a null move // search, and searched the first move before splitting, so we don't have to // repeat all this work again. We also don't need to store anything to the hash // table here: This is taken care of after we return from the split point. private static ValueT search(NodeType NT, bool SpNode, Position pos, StackArrayWrapper ss, ValueT alpha, ValueT beta, Depth depth, bool cutNode) { Utils.WriteToLog($"search(NT={(int) NT}, SpNode={(SpNode ? 1 : 0)}, pos={pos.key()}, ss, alpha={alpha}, beta={beta}, depth={(int) depth}, cutNode={(cutNode ? 1 : 0)})"); var RootNode = NT == NodeType.Root; var PvNode = RootNode || NT == NodeType.PV; Debug.Assert(-Value.VALUE_INFINITE <= alpha && alpha < beta && beta <= Value.VALUE_INFINITE); Debug.Assert(PvNode || (alpha == beta - 1)); Debug.Assert(depth > Depth.DEPTH_ZERO); var st = new StateInfo(); TTEntry tte; SplitPoint splitPoint = null; ulong posKey = 0; MoveT ttMove, move, excludedMove, bestMove; ValueT bestValue, value, ttValue, eval; bool ttHit; int moveCount = 0; int quietCount = 0; var stack = ss[ss.current]; var stackPlus1 = ss[ss.current + 1]; var stackPlus2 = ss[ss.current + 2]; var stackMinus1 = ss[ss.current - 1]; var stackMinus2 = ss[ss.current - 2]; // Step 1. Initialize node var thisThread = pos.this_thread(); bool inCheck = pos.checkers() != 0; if (SpNode) { splitPoint = stack.splitPoint; bestMove = Move.Create(splitPoint.bestMove); bestValue = Value.Create(splitPoint.bestValue); tte = new TTEntry(); ttMove = excludedMove = Move.MOVE_NONE; ttValue = Value.VALUE_NONE; Debug.Assert(splitPoint.bestValue > -Value.VALUE_INFINITE && splitPoint.moveCount > 0); goto moves_loop; } moveCount = quietCount = stack.moveCount = 0; bestValue = -Value.VALUE_INFINITE; stack.ply = stackMinus1.ply + 1; // Used to send selDepth info to GUI if (PvNode && thisThread.maxPly < stack.ply) thisThread.maxPly = stack.ply; if (!RootNode) { // Step 2. Check for aborted search and immediate draw if (Signals.stop || pos.is_draw() || stack.ply >= _.MAX_PLY) return stack.ply >= _.MAX_PLY && !inCheck ? Eval.evaluate(false, pos) : DrawValue[pos.side_to_move()]; // Step 3. Mate distance pruning. Even if we mate at the next move our score // would be at best mate_in(ss.ply+1), but if alpha is already bigger because // a shorter mate was found upward in the tree then there is no need to search // because we will never beat the current alpha. Same logic but with reversed // signs applies also in the opposite condition of being mated instead of giving // mate. In this case return a fail-high score. alpha = Value.Create(Math.Max(Value.mated_in(stack.ply), alpha)); beta = Value.Create(Math.Min(Value.mate_in(stack.ply + 1), beta)); if (alpha >= beta) return alpha; } Debug.Assert(0 <= stack.ply && stack.ply < _.MAX_PLY); stack.currentMove = stack.ttMove = stackPlus1.excludedMove = bestMove = Move.MOVE_NONE; stackPlus1.skipEarlyPruning = false; stackPlus1.reduction = Depth.DEPTH_ZERO; stackPlus2.killers0 = stackPlus2.killers1 = Move.MOVE_NONE; // Step 4. Transposition table lookup // We don't want the score of a partial search to overwrite a previous full search // TT value, so we use a different position key in case of an excluded move. excludedMove = stack.excludedMove; posKey = excludedMove != 0 ? pos.exclusion_key() : pos.key(); tte = TranspositionTable.probe(posKey, out ttHit); stack.ttMove = ttMove = RootNode ? RootMoves[(int) PVIdx].pv[0] : ttHit ? tte.move() : Move.MOVE_NONE; ttValue = ttHit ? value_from_tt(tte.value(), stack.ply) : Value.VALUE_NONE; // At non-PV nodes we check for a fail high/low. We don't prune at PV nodes if (!PvNode && ttHit && tte.depth() >= depth && ttValue != Value.VALUE_NONE // Only in case of TT access race && (ttValue >= beta ? (tte.bound() & Bound.BOUND_LOWER) != 0 : (tte.bound() & Bound.BOUND_UPPER) != 0)) { stack.currentMove = ttMove; // Can be Move.MOVE_NONE // If ttMove is quiet, update killers, history, counter move on TT hit if (ttValue >= beta && ttMove != 0 && !pos.capture_or_promotion(ttMove)) update_stats(pos, ss, ttMove, depth, null, 0); return ttValue; } // Step 4a. Tablebase probe if (!RootNode && Tablebases.Cardinality != 0) { var piecesCnt = pos.count(PieceType.ALL_PIECES, Color.WHITE) + pos.count(PieceType.ALL_PIECES, Color.BLACK); if (piecesCnt <= Tablebases.Cardinality && (piecesCnt < Tablebases.Cardinality || depth >= Tablebases.ProbeDepth) && pos.rule50_count() == 0) { var found = 0; var v = Tablebases.probe_wdl(pos, ref found); if (found != 0) { Tablebases.Hits++; var drawScore = Tablebases.UseRule50 ? 1 : 0; value = v < -drawScore ? -Value.VALUE_MATE + _.MAX_PLY + stack.ply : v > drawScore ? Value.VALUE_MATE - _.MAX_PLY - stack.ply : Value.VALUE_DRAW + 2*v*drawScore; tte.save(posKey, value_to_tt(value, stack.ply), Bound.BOUND_EXACT, new Depth(Math.Min(_.MAX_PLY - Depth.ONE_PLY_C, (int)depth + 6*Depth.ONE_PLY_C)), Move.MOVE_NONE, Value.VALUE_NONE, TranspositionTable.generation()); return value; } } } // Step 5. Evaluate the position statically if (inCheck) { stack.staticEval = Value.VALUE_NONE; goto moves_loop; } if (ttHit) { // Never assume anything on values stored in TT if ((stack.staticEval = eval = tte.eval()) == Value.VALUE_NONE) eval = stack.staticEval = Eval.evaluate(false, pos); // Can ttValue be used as a better position evaluation? if (ttValue != Value.VALUE_NONE) if ((tte.bound() & (ttValue > eval ? Bound.BOUND_LOWER : Bound.BOUND_UPPER)) != 0) eval = ttValue; } else { eval = stack.staticEval = stackMinus1.currentMove != Move.MOVE_NULL ? Eval.evaluate(false, pos) : -stackMinus1.staticEval + 2*Eval.Tempo; tte.save(posKey, Value.VALUE_NONE, Bound.BOUND_NONE, Depth.DEPTH_NONE, Move.MOVE_NONE, stack.staticEval, TranspositionTable.generation()); } if (stack.skipEarlyPruning) goto moves_loop; // Step 6. Razoring (skipped when in check) if (!PvNode && depth < 4*Depth.ONE_PLY && eval + razor_margin(depth) <= alpha && ttMove == Move.MOVE_NONE) { if (depth <= Depth.ONE_PLY_C && eval + razor_margin(3*Depth.ONE_PLY) <= alpha) return qsearch(NodeType.NonPV, false, pos, ss, alpha, beta, Depth.DEPTH_ZERO); var ralpha = alpha - razor_margin(depth); var v = qsearch(NodeType.NonPV, false, pos, ss, ralpha, ralpha + 1, Depth.DEPTH_ZERO); if (v <= ralpha) return v; } // Step 7. Futility pruning: child node (skipped when in check) if (!RootNode && depth < 7*Depth.ONE_PLY && eval - futility_margin(depth) >= beta && eval < Value.VALUE_KNOWN_WIN // Do not return unproven wins && pos.non_pawn_material(pos.side_to_move())!=0) return eval - futility_margin(depth); // Step 8. Null move search with verification search (is omitted in PV nodes) if (!PvNode && depth >= 2*Depth.ONE_PLY_C && eval >= beta && pos.non_pawn_material(pos.side_to_move())!=0) { stack.currentMove = Move.MOVE_NULL; Debug.Assert(eval - beta >= 0); // Null move dynamic reduction based on depth and value var R = ((823 + 67*depth)/256 + Math.Min((eval - beta)/Value.PawnValueMg, 3))*(int) Depth.ONE_PLY; pos.do_null_move(st); stackPlus1.skipEarlyPruning = true; var nullValue = depth - R < Depth.ONE_PLY ? -qsearch(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -beta, -beta + 1, Depth.DEPTH_ZERO) : -search(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -beta, -beta + 1, depth - R, !cutNode); stackPlus1.skipEarlyPruning = false; pos.undo_null_move(); if (nullValue >= beta) { // Do not return unproven mate scores if (nullValue >= Value.VALUE_MATE_IN_MAX_PLY) nullValue = beta; if (depth < 12*Depth.ONE_PLY && Math.Abs(beta) < Value.VALUE_KNOWN_WIN) return nullValue; // Do verification search at high depths stack.skipEarlyPruning = true; var v = depth - R < Depth.ONE_PLY ? qsearch(NodeType.NonPV, false, pos, ss, beta - 1, beta, Depth.DEPTH_ZERO) : search(NodeType.NonPV, false, pos, ss, beta - 1, beta, depth - R, false); stack.skipEarlyPruning = false; if (v >= beta) return nullValue; } } // Step 9. ProbCut (skipped when in check) // If we have a very good capture (i.e. SEE > seeValues[captured_piece_type]) // and a reduced search returns a value much above beta, we can (almost) safely // prune the previous move. if (!PvNode && depth >= 5*Depth.ONE_PLY_C && Math.Abs(beta) < Value.VALUE_MATE_IN_MAX_PLY) { var rbeta = Value.Create(Math.Min(beta + 200, Value.VALUE_INFINITE)); var rdepth = depth - 4*Depth.ONE_PLY; Debug.Assert(rdepth >= Depth.ONE_PLY_C); Debug.Assert(stackMinus1.currentMove != Move.MOVE_NONE); Debug.Assert(stackMinus1.currentMove != Move.MOVE_NULL); var mp2 = new MovePicker(pos, ttMove, History, CounterMovesHistory, Value.PieceValue[(int) Phase.MG][pos.captured_piece_type()]); var ci2 = new CheckInfo(pos); while ((move = mp2.next_move(false)) != Move.MOVE_NONE) if (pos.legal(move, ci2.pinned)) { stack.currentMove = move; pos.do_move(move, st, pos.gives_check(move, ci2)); value = -search(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -rbeta, -rbeta + 1, rdepth, !cutNode); pos.undo_move(move); if (value >= rbeta) return value; } } // Step 10. Internal iterative deepening (skipped when in check) if (depth >= (PvNode ? 5*Depth.ONE_PLY_C : 8*Depth.ONE_PLY_C) && ttMove == 0 && (PvNode || stack.staticEval + 256 >= beta)) { var d = depth - 2*Depth.ONE_PLY - (PvNode ? Depth.DEPTH_ZERO : depth/4); stack.skipEarlyPruning = true; search(PvNode ? NodeType.PV : NodeType.NonPV, false, pos, ss, alpha, beta, d, true); stack.skipEarlyPruning = false; tte = TranspositionTable.probe(posKey, out ttHit); ttMove = ttHit ? tte.move() : Move.MOVE_NONE; } moves_loop: // When in check and at SpNode search starts from here var prevMoveSq = Move.to_sq(stackMinus1.currentMove); var countermove = Countermoves.table[pos.piece_on(prevMoveSq), prevMoveSq]; var mp = new MovePicker(pos, ttMove, depth, History, CounterMovesHistory, countermove, ss); var ci = new CheckInfo(pos); value = bestValue; // Workaround a bogus 'uninitialized' warning under gcc var improving = stack.staticEval >= stackMinus2.staticEval || stack.staticEval == Value.VALUE_NONE || stackMinus2.staticEval == Value.VALUE_NONE; var singularExtensionNode = !RootNode && !SpNode && depth >= 8*Depth.ONE_PLY_C && ttMove != Move.MOVE_NONE /* && ttValue != Value.VALUE_NONE Already implicit in the next condition */ && Math.Abs(ttValue) < Value.VALUE_KNOWN_WIN && excludedMove == 0// Recursive singular search is not allowed && ((tte.bound() & Bound.BOUND_LOWER) != 0) && tte.depth() >= depth - 3*Depth.ONE_PLY_C; var quietsSearched = new MoveT[64]; // Step 11. Loop through moves // Loop through all pseudo-legal moves until no moves remain or a beta cutoff occurs while ((move = mp.next_move(SpNode)) != Move.MOVE_NONE) { Utils.WriteToLog($"mp.next_move = {(int) move}"); Debug.Assert(Move.is_ok(move)); if (move == excludedMove) continue; // At root obey the "searchmoves" option and skip moves not listed in Root // Move List. As a consequence any illegal move is also skipped. In MultiPV // mode we also skip PV moves which have been already searched. if (RootNode && RootMoves.All(rootMove => rootMove.pv[0] != move)) continue; if (SpNode) { // Shared counter cannot be decremented later if the move turns out to be illegal if (!pos.legal(move, ci.pinned)) continue; stack.moveCount = moveCount = ++splitPoint.moveCount; ThreadHelper.lock_release(splitPoint.spinLock); } else stack.moveCount = ++moveCount; if (RootNode) { Signals.firstRootMove = (moveCount == 1); if (thisThread == ThreadPool.main() && TimeManagement.elapsed() > 3000) Output.WriteLine( $"info depth {depth/Depth.ONE_PLY} currmove {UCI.move(move, pos.is_chess960())} currmovenumber {moveCount + PVIdx}"); } if (PvNode) stackPlus1.pv = new List<MoveT>(); var extension = Depth.DEPTH_ZERO; var captureOrPromotion = pos.capture_or_promotion(move); var givesCheck = Move.type_of(move) == MoveType.NORMAL && ci.dcCandidates == 0 ? Bitboard.AndWithSquare(ci.checkSquares[Piece.type_of(pos.piece_on(Move.from_sq(move)))], Move.to_sq(move))!=0 : pos.gives_check(move, ci); // Step 12. Extend checks if (givesCheck && pos.see_sign(move) >= Value.VALUE_ZERO) extension = Depth.ONE_PLY; // Singular extension search. If all moves but one fail low on a search of // (alpha-s, beta-s), and just one fails high on (alpha, beta), then that move // is singular and should be extended. To verify this we do a reduced search // on all the other moves but the ttMove and if the result is lower than // ttValue minus a margin then we extend the ttMove. if (singularExtensionNode && move == ttMove && extension == 0 && pos.legal(move, ci.pinned)) { var rBeta = ttValue - 2*depth/Depth.ONE_PLY; stack.excludedMove = move; stack.skipEarlyPruning = true; value = search(NodeType.NonPV, false, pos, ss, rBeta - 1, rBeta, depth/2, cutNode); stack.skipEarlyPruning = false; stack.excludedMove = Move.MOVE_NONE; if (value < rBeta) extension = Depth.ONE_PLY; } // Update the current move (this must be done after singular extension search) var newDepth = depth - Depth.ONE_PLY + extension; // Step 13. Pruning at shallow depth if (!RootNode && !captureOrPromotion && !inCheck && !givesCheck && !pos.advanced_pawn_push(move) && bestValue > Value.VALUE_MATED_IN_MAX_PLY) { // Move count based pruning if (depth < 16*Depth.ONE_PLY && moveCount >= FutilityMoveCounts[improving ? 1 : 0, depth]) { if (SpNode) ThreadHelper.lock_grab(splitPoint.spinLock); continue; } var predictedDepth = newDepth - reduction(PvNode, improving, depth, moveCount); // Futility pruning: parent node if (predictedDepth < 7*Depth.ONE_PLY) { var futilityValue = stack.staticEval + futility_margin(predictedDepth) + 256; if (futilityValue <= alpha) { bestValue = Value.Create(Math.Max(bestValue, futilityValue)); if (SpNode) { ThreadHelper.lock_grab(splitPoint.spinLock); if (bestValue > splitPoint.bestValue) splitPoint.bestValue = bestValue; } continue; } } // Prune moves with negative SEE at low depths if (predictedDepth < 4*Depth.ONE_PLY && pos.see_sign(move) < Value.VALUE_ZERO) { if (SpNode) ThreadHelper.lock_grab(splitPoint.spinLock); continue; } } // Speculative prefetch as early as possible //prefetch(TT.first_entry(pos.key_after(move))); // Check for legality just before making the move if (!RootNode && !SpNode && !pos.legal(move, ci.pinned)) { stack.moveCount = --moveCount; continue; } stack.currentMove = move; // Step 14. Make the move pos.do_move(move, st, givesCheck); // Step 15. Reduced depth search (LMR). If the move fails high it will be // re-searched at full depth. bool doFullDepthSearch; if (depth >= 3*Depth.ONE_PLY_C && moveCount > 1 && !captureOrPromotion && move != stack.killers0 && move != stack.killers1) { stack.reduction = reduction(PvNode, improving, depth, moveCount); if ((!PvNode && cutNode) || (History.table[pos.piece_on(Move.to_sq(move)), Move.to_sq(move)] < Value.VALUE_ZERO && CounterMovesHistory.table[pos.piece_on(prevMoveSq), prevMoveSq].table[ pos.piece_on(Move.to_sq(move)), Move.to_sq(move)] <= Value.VALUE_ZERO)) stack.reduction += Depth.ONE_PLY; if (History.table[pos.piece_on(Move.to_sq(move)), Move.to_sq(move)] > Value.VALUE_ZERO && CounterMovesHistory.table[pos.piece_on(prevMoveSq), prevMoveSq].table[ pos.piece_on(Move.to_sq(move)), Move.to_sq(move)] > Value.VALUE_ZERO) stack.reduction = new Depth(Math.Max(Depth.DEPTH_ZERO_C, stack.reduction - Depth.ONE_PLY_C)); // Decrease reduction for moves that escape a capture if (stack.reduction > 0 && Move.type_of(move) == MoveType.NORMAL && Piece.type_of(pos.piece_on(Move.to_sq(move))) != PieceType.PAWN && pos.see(Move.make_move(Move.to_sq(move), Move.from_sq(move))) < Value.VALUE_ZERO) stack.reduction = new Depth(Math.Max(Depth.DEPTH_ZERO_C, stack.reduction - Depth.ONE_PLY_C)); var d = new Depth(Math.Max(newDepth - (int)stack.reduction, Depth.ONE_PLY_C)); if (SpNode) alpha = Value.Create(splitPoint.alpha); value = -search(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -(alpha + 1), -alpha, d, true); doFullDepthSearch = (value > alpha && stack.reduction != Depth.DEPTH_ZERO); stack.reduction = Depth.DEPTH_ZERO; } else doFullDepthSearch = !PvNode || moveCount > 1; // Step 16. Full depth search, when LMR is skipped or fails high if (doFullDepthSearch) { if (SpNode) alpha = Value.Create(splitPoint.alpha); value = newDepth < Depth.ONE_PLY ? givesCheck ? -qsearch(NodeType.NonPV, true, pos, new StackArrayWrapper(ss.table, ss.current + 1), -(alpha + 1), -alpha, Depth.DEPTH_ZERO) : -qsearch(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -(alpha + 1), -alpha, Depth.DEPTH_ZERO) : -search(NodeType.NonPV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -(alpha + 1), -alpha, newDepth, !cutNode); } // For PV nodes only, do a full PV search on the first move or after a fail // high (in the latter case search only if value < beta), otherwise let the // parent node fail low with value <= alpha and to try another move. if (PvNode && (moveCount == 1 || (value > alpha && (RootNode || value < beta)))) { stackPlus1.pv = new List<MoveT>() { Move.MOVE_NONE }; stackPlus1.pv[0] = Move.MOVE_NONE; value = newDepth < Depth.ONE_PLY ? givesCheck ? -qsearch(NodeType.PV, true, pos, new StackArrayWrapper(ss.table, ss.current + 1), -beta, -alpha, Depth.DEPTH_ZERO) : -qsearch(NodeType.PV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -beta, -alpha, Depth.DEPTH_ZERO) : -search(NodeType.PV, false, pos, new StackArrayWrapper(ss.table, ss.current + 1), -beta, -alpha, newDepth, false); } // Step 17. Undo move pos.undo_move(move); Debug.Assert(value > -Value.VALUE_INFINITE && value < Value.VALUE_INFINITE); // Step 18. Check for new best move if (SpNode) { ThreadHelper.lock_grab(splitPoint.spinLock); bestValue = Value.Create(splitPoint.bestValue); alpha = Value.Create(splitPoint.alpha); } // Finished searching the move. If a stop or a cutoff occurred, the return // value of the search cannot be trusted, and we return immediately without // updating best move, PV and TT. if (Signals.stop || thisThread.cutoff_occurred()) return Value.VALUE_ZERO; if (RootNode) { var rm = RootMoves.Find(rootmove => rootmove.pv[0] == move); // PV move or new best move ? if (moveCount == 1 || value > alpha) { rm.score = value; var firstEntry = rm.pv[0]; rm.pv.Clear(); rm.pv.Add(firstEntry); Debug.Assert(stackPlus1.pv != null); foreach (var m in stackPlus1.pv.TakeWhile(m => m != Move.MOVE_NONE)) { rm.pv.Add(m); } // We record how often the best move has been changed in each // iteration. This information is used for time management: When // the best move changes frequently, we allocate some more time. if (moveCount > 1) ++BestMoveChanges; } else // All other moves but the PV are set to the lowest value: this is // not a problem when sorting because the sort is stable and the // move position in the list is preserved - just the PV is pushed up. rm.score = -Value.VALUE_INFINITE; } if (value > bestValue) { bestValue = Value.Create(SpNode ? splitPoint.bestValue = value : value); if (value > alpha) { // If there is an easy move for this position, clear it if unstable if (PvNode && EasyMove.get(pos.key()) != 0 && (move != EasyMove.get(pos.key()) || moveCount > 1)) EasyMove.clear(); bestMove = Move.Create(SpNode ? splitPoint.bestMove = move : move); if (PvNode && !RootNode) // Update pv even in fail-high case update_pv(SpNode ? splitPoint.ss[ss.current].pv : stack.pv, move, stackPlus1.pv); if (PvNode && value < beta) // Update alpha! Always alpha < beta alpha = Value.Create(SpNode ? splitPoint.alpha = value : value); else { Debug.Assert(value >= beta); // Fail high if (SpNode) splitPoint.cutoff = true; break; } } } if (!SpNode && !captureOrPromotion && move != bestMove && quietCount < 64) quietsSearched[quietCount++] = move; // Step 19. Check for splitting the search if (!SpNode && ThreadPool.threads.Count >= 2 && depth >= ThreadPool.minimumSplitDepth && (thisThread.activeSplitPoint == null || !thisThread.activeSplitPoint.allSlavesSearching || (ThreadPool.threads.Count > _.MAX_SLAVES_PER_SPLITPOINT && Bitcount.popcount_Full(thisThread.activeSplitPoint.slavesMask) == _.MAX_SLAVES_PER_SPLITPOINT)) && thisThread.splitPointsSize < _.MAX_SPLITPOINTS_PER_THREAD) { Debug.Assert(bestValue > -Value.VALUE_INFINITE && bestValue < beta); thisThread.split(pos, ss, alpha, beta, ref bestValue, ref bestMove, depth, moveCount, mp, NT, cutNode); if (Signals.stop || thisThread.cutoff_occurred()) return Value.VALUE_ZERO; if (bestValue >= beta) break; } } if (SpNode) return bestValue; // Following condition would detect a stop or a cutoff set only after move // loop has been completed. But in this case bestValue is valid because we // have fully searched our subtree, and we can anyhow save the result in TT. /* if (Signals.stop || thisThread.cutoff_occurred()) return VALUE_DRAW; */ // Step 20. Check for mate and stalemate // All legal moves have been searched and if there are no legal moves, it // must be mate or stalemate. If we are in a singular extension search then // return a fail low score. if (moveCount == 0) bestValue = excludedMove != 0 ? alpha : inCheck ? Value.mated_in(stack.ply) : DrawValue[pos.side_to_move()]; // Quiet best move: update killers, history and countermoves else if (bestMove != 0 && !pos.capture_or_promotion(bestMove)) update_stats(pos, ss, bestMove, depth, quietsSearched, quietCount); // Bonus for prior countermove that caused the fail low else if (bestMove==0) { if (Move.is_ok(stackMinus2.currentMove) && Move.is_ok(stackMinus1.currentMove) && pos.captured_piece_type()==0 && !inCheck && depth >= 3*Depth.ONE_PLY_C) { var bonus = Value.Create((depth/Depth.ONE_PLY)*(depth/Depth.ONE_PLY)); var prevSq = Move.to_sq(stackMinus1.currentMove); var prevPrevSq = Move.to_sq(stackMinus2.currentMove); var flMoveCmh = CounterMovesHistory.table[pos.piece_on(prevPrevSq), prevPrevSq]; flMoveCmh.updateCMH(pos.piece_on(prevSq), prevSq, bonus); } } tte.save(posKey, value_to_tt(bestValue, stack.ply), bestValue >= beta ? Bound.BOUND_LOWER : PvNode && bestMove!=0 ? Bound.BOUND_EXACT : Bound.BOUND_UPPER, depth, bestMove, stack.staticEval, TranspositionTable.generation()); Debug.Assert(bestValue > -Value.VALUE_INFINITE && bestValue < Value.VALUE_INFINITE); return bestValue; }