public static Bitmap ToBitmap(VideoFrame frame)
        {
            var bytes = new byte[frame.Stride * frame.Height];

            frame.CopyTo <byte>(bytes);

            var         bpp = frame.BitsPerPixel;
            PixelFormat pf  = PixelFormat.Format24bppRgb;

            switch (bpp)
            {
            case 16:
                pf = PixelFormat.Format16bppGrayScale;
                break;

            case 24:
                pf = PixelFormat.Format24bppRgb;
                break;

            case 32:
                pf = PixelFormat.Format32bppArgb;
                break;
            }

            var        bs         = new Bitmap(frame.Width, frame.Height, pf);
            var        BoundsRect = new System.Drawing.Rectangle(0, 0, frame.Width, frame.Height);
            BitmapData bmpData    = bs.LockBits(BoundsRect, ImageLockMode.WriteOnly, pf);

            System.Runtime.InteropServices.Marshal.Copy(bytes, 0, bmpData.Scan0, frame.Stride * frame.Height);
            bs.UnlockBits(bmpData);

            return(bs);
        }
Beispiel #2
0
 /// <summary>
 /// 將VideoFrame格式轉換為1維 ushort 陣列 [height*width]
 /// </summary>
 internal ushort[] FrameTo_1D_ushort(VideoFrame depFrame)
 {
     if (depFrame.Width == 0)
     {
         return(null);
     }
     ushort[] data = new ushort[depFrame.Width * depFrame.Height];
     depFrame.CopyTo(data);
     return(data);
 }
Beispiel #3
0
    private void setParticals(Points points, VideoFrame colorFrame)
    {
        if (points == null)
        {
            throw new Exception("Frame in queue is not a points frame");
        }

        if (colorFrame != null)
        {
            if (lastColorImage == null)
            {
                int colorFrameSize = colorFrame.Height * colorFrame.Stride;
                lastColorImage = new byte[colorFrameSize];
            }
            colorFrame.CopyTo(lastColorImage);
        }

        vertices = vertices ?? new Points.Vertex[points.Count];
        points.CopyTo(vertices);

        // Debug.Assert(vertices.Length == particles.Length);
        int mirror = mirrored ? -1 : 1;

        particlesInRangePos = new List <Vector3>();
        //Destroy(GameObject.Find("Cube"));

        destroy = true;


        for (int index = 0; index < vertices.Length; index += skipParticles)
        {
            var v = vertices[index];
            if (v.z > 0 && v.z < 0.5)
            {
                //particles[index].position = new Vector3(v.x * mirror, v.y, v.z);
                particlesInRangePos.Add(new Vector3(v.x * mirror, v.y, v.z));

                //particles[index].startSize = v.z * pointsSize * 0.02f;
                // if (lastColorImage != null)
                //particles[index].startColor = new Color32(lastColorImage[index * 3], lastColorImage[index * 3 + 1], lastColorImage[index * 3 + 2], 255);
                // else
                //  {
                //      byte z = (byte)(v.z / 2f * 255);
                //particles[index].startColor = new Color32(z, z, z, 255);
                //  }
                //DrawGizmos();
            }
            else //Required since we reuse the array
            {
                // particles[index].position = Vector3.zero;
                //particles[index].startSize = 0;
                // particles[index].startColor = Color.black;
            }
        }
    }
    private void OnFrame(FrameSet frameset)
    {
        //Align depth frame to color frame
        using (FrameSet aligned = aligner.Process(frameset))
        {
            //DepthFrame
            depthFrame = aligned.Where(f => f.Profile.Stream == Stream.Depth).First() as DepthFrame;

            if (depthFrame == null)
            {
                Debug.Log("depth frame is null");
                return;
            }

            //ColorFrame
            vidFrame = aligned.Where(f => f.Profile.Stream == Stream.Color).First() as VideoFrame;

            if (vidFrame == null)
            {
                Debug.Log("color frame is null");
                return;
            }

            UpdateParticleParams(depthFrame.Width, depthFrame.Height, depthFrame.Profile.Format);

            //CoordinateData
            var points = pc.Calculate(depthFrame);
            vertices = vertices ?? new Points.Vertex[points.Count];
            points.CopyTo(vertices);

            //ColorData
            colorData = colorData ?? new byte[vidFrame.Stride * vidFrame.Height];
            vidFrame.CopyTo(colorData);

            for (int index = 0; index < particleSize; index += skipParticles)
            {
                var v = vertices[index];

                if (v.z > 0)
                {
                    particles[index].position   = new Vector3(v.x, v.y, v.z);
                    particles[index].startSize  = pointsSize;
                    particles[index].startColor = new Color32(colorData[index * 3], colorData[index * 3 + 1], colorData[index * 3 + 2], 255);
                }

                else
                {
                    particles[index].position   = new Vector3(0, 0, 0);
                    particles[index].startSize  = (float)0.0;
                    particles[index].startColor = new Color32(0, 0, 0, 0);
                }
            }
        }
    }
    private void OnFrame(FrameSet frameset)
    {
        using (FrameSet aligned = aligner.Process(frameset))
        {
            //Depth
            depthFrame = aligned.Where(f => f.Profile.Stream == Stream.Depth).First() as DepthFrame;

            //Color
            vidFrame = aligned.Where(f => f.Profile.Stream == Stream.Color).First() as VideoFrame;


            if (depthFrame == null || vidFrame == null)
            {
                // Debug.Log("Frame is not a depth frame");
                return;
            }

            UpdateParticleParams(depthFrame.Width, depthFrame.Height, depthFrame.Profile.Format);

            var points = pc.Calculate(depthFrame);

            //Depth
            vertices = vertices ?? new Points.Vertex[points.Count];
            points.CopyTo(vertices);

            //Color
            byteColorData = byteColorData ?? new byte[vidFrame.Stride * vidFrame.Height];
            vidFrame.CopyTo(byteColorData);

            for (int index = 0; index < particleSize; index += skipParticles)
            {
                var v = vertices[index];

                if (v.z > 0)
                {
                    particles[index].position  = new Vector3(v.x, v.y, v.z);
                    particles[index].startSize = pointsSize;
                    //particles[index].startColor = gradient.Evaluate(v.z);
                    particles[index].startColor = new Color32(byteColorData[index * 3], byteColorData[index * 3 + 1], byteColorData[index * 3 + 2], 255);
                }

                /*
                 * else
                 * {
                 *  particles[index].position = new Vector3(0, 0, 0);
                 *  particles[index].startSize = (float)0.0;
                 *  particles[index].startColor = new Color32(0, 0, 0, 0);
                 * }
                 */
            }
        }
    }
    private void setParticals(Points points, VideoFrame colorFrame)
    {
        if (points == null)
        {
            throw new Exception("Frame in queue is not a points frame");
        }

        if (colorFrame != null)
        {
            if (lastColorImage == null)
            {
                int colorFrameSize = colorFrame.Height * colorFrame.Stride;
                lastColorImage = new byte[colorFrameSize];
            }
            colorFrame.CopyTo(lastColorImage);
        }

        vertices = vertices ?? new Points.Vertex[points.Count];
        points.CopyTo(vertices);

        Debug.Assert(vertices.Length == particles.Length);
        int mirror = mirrored ? -1 : 1;

        for (int index = 0; index < vertices.Length; index += skipParticles)
        {
            var v = vertices[index];
            if (v.z > 0)
            {
                particles[index].position  = new Vector3(v.x * mirror, v.y, v.z);
                particles[index].startSize = v.z * pointsSize * 0.02f;
                if (lastColorImage != null)
                {
                    particles[index].startColor = new Color32(lastColorImage[index * 3], lastColorImage[index * 3 + 1], lastColorImage[index * 3 + 2], 255);
                }
                else
                {
                    byte z = (byte)(v.z / 2f * 255);
                    particles[index].startColor = new Color32(z, z, z, 255);
                }
            }
            else //Required since we reuse the array
            {
                particles[index].position   = Vector3.zero;
                particles[index].startSize  = 0;
                particles[index].startColor = Color.black;
            }
        }
    }
Beispiel #7
0
    void onNewSampleSetThreading(FrameSet frameSet)
    {
        using (FrameSet aligned = aligner.Process(frameSet)){
            VideoFrame vidFrame   = aligned.Where(x => x.Profile.Stream == Stream.Color).First() as VideoFrame;
            Frame      depthFrame = aligned.Where(x => x.Profile.Stream == Stream.Depth).First();


            byteColorData = byteColorData ?? new byte[vidFrame.Stride * vidFrame.Height];
            vidFrame.CopyTo(byteColorData);
            uintColorData = Array.ConvertAll(byteColorData, x => (uint)x);

            var points = pointCloud.Calculate(depthFrame);
            vertices = vertices ?? new Points.Vertex[dataLength];
            points.CopyTo(vertices);
        }
    }
Beispiel #8
0
        private void WaitForFrames()
        {
            while (!_streamingEvent.WaitOne(0))
            {
                using (FrameSet set = _pipeline.WaitForFrames())
                {
                    _frameData.Timestamp = DateTime.Now;

                    using (VideoFrame colorFrame = set.ColorFrame)
                    {
                        colorFrame.CopyTo(_frameData.ColorData);
                    }
                    using (FrameSet processed = _aligner.Process(set))
                        using (DepthFrame depthFrame = processed.DepthFrame)
                        {
                            depthFrame.CopyTo(_frameData.DepthData);
                        }

                    OnFrameDataArrived?.Invoke(_frameData);
                }
            }
        }
Beispiel #9
0
        private void StartProcessingBlock(CustomProcessingBlock processingBlock, PipelineProfile pp, Action <VideoFrame> updateColor, Pipeline pipeline)
        {
            float[,,] posMap = new float[1280, 720, 3];

            Size RS_depthSize = new Size(1280, 720);
            Mat  processMat   = new Mat(RS_depthSize, DepthType.Cv8U, 3);

            processingBlock.Start(f =>
            {
                using (var frames = FrameSet.FromFrame(f))
                {
                    //var color_frame = frames.ColorFrame.DisposeWith(frames);
                    //color_frame.CopyTo(processMat.DataPointer);

                    var depthintr   = (pp.GetStream(Stream.Depth) as VideoStreamProfile).GetIntrinsics();
                    var depth_frame = frames.DepthFrame.DisposeWith(frames);

                    //float depth = depth_frame.GetDistance((int)thePoint.X,(int)thePoint.Y); //From
                    //thePos = HelperClass.DeprojectPixelToPoint(depthintr, thePoint, depth);

                    unsafe
                    {
                        Int16 *pixelPtr_byte = (Int16 *)depth_frame.Data;
                        for (int i = 0; i < 1280; i++)
                        {
                            for (int j = 0; j < 720; j++)
                            {
                                var tmpF        = HelperClass.DeprojectPixelToPoint(depthintr, new PointF(i, j), (float)pixelPtr_byte[j * 1280 + i] / 1000f);
                                posMap[i, j, 0] = tmpF[0];
                                posMap[i, j, 1] = tmpF[1];
                                posMap[i, j, 2] = tmpF[2];
                            }
                        }
                    }
                    // Dispatcher.Invoke(DispatcherPriority.Render, updateColor, color_frame);//顯示用
                }
            });


            //start
            var token = _tokenSource.Token;
            Action <VideoFrame> updateOriginColor = UpdateImage(Img_main);
            var t = Task.Factory.StartNew(() =>
            {
                Mat color_orig   = new Mat(RS_depthSize, DepthType.Cv8U, 3);
                Mat color_resize = new Mat(RS_depthSize, DepthType.Cv8U, 3);

                yoloWrapper = new YoloWrapper("modle\\yolov3-tiny-3obj.cfg", "modle\\yolov3-tiny-3obj_3cup.weights", "modle\\obj.names");
                string detectionSystemDetail = string.Empty;
                if (!string.IsNullOrEmpty(yoloWrapper.EnvironmentReport.GraphicDeviceName))
                {
                    detectionSystemDetail = $"({yoloWrapper.EnvironmentReport.GraphicDeviceName})";
                }
                Console.WriteLine($"Detection System:{yoloWrapper.DetectionSystem}{detectionSystemDetail}");

                while (!token.IsCancellationRequested)
                {
                    using (var frames = pipeline.WaitForFrames())
                    {
                        if (showType == imgType.color)
                        {
                            VideoFrame color_frame = frames.ColorFrame.DisposeWith(frames);
                            color_frame.CopyTo(color_orig.DataPointer);

                            CvInvoke.WarpPerspective(color_orig, color_resize, matrix, new Size(1280, 720));


                            //CvInvoke.Imwrite("yolo1.png", color_resize);
                            //try { items = yoloWrapper.Detect(@"yolo1.png"); }
                            //catch { break; }
                            CvInvoke.Imwrite("yolo2.png", color_orig);
                            try { items = yoloWrapper.Detect(@"yolo2.png"); }
                            catch { break; }

                            CvInvoke.CvtColor(color_resize, color_resize, ColorConversion.Bgr2Rgb);
                            processingBlock.ProcessFrames(frames);

                            foreach (YoloItem item in items)
                            {
                                string name = item.Type;
                                //int x = item.X;
                                //int y = item.Y;
                                mapToRsImg(item.X, item.Y, out int x, out int y);
                                mapToRsImg(item.X + item.Width, item.Y + item.Height, out int x2, out int y2);

                                //int H = item.Height;
                                //int W = item.Width;
                                int H = y2 - y;
                                int W = x2 - x;

                                mapToRsImg(item.Center().X, item.Center().Y, out int Cx, out int Cy);
                                //Point center = item.Center();
                                Point center = new Point(Cx, Cy);

                                int evilLine   = 500;
                                int evilLinex1 = 580;
                                int evilLinex2 = 660;
                                if (showEvilLine)
                                {
                                    CvInvoke.Line(color_resize, new Point(0, evilLine), new Point(1280, evilLine), new MCvScalar(100, 100, 250), 2);         //以上代表可能在咖啡機
                                    CvInvoke.Line(color_resize, new Point(evilLinex1, 0), new Point(evilLinex1, evilLine), new MCvScalar(100, 100, 250), 2); //
                                    CvInvoke.Line(color_resize, new Point(evilLinex2, 0), new Point(evilLinex2, evilLine), new MCvScalar(100, 100, 250), 2); //
                                }

                                if (y > evilLine || x < evilLinex1 || x > evilLinex2) //代不再咖啡機附近
                                {
                                    if (item.Confidence < 0.5)                        //沒信心的東西就跳過,避免偵測到其他東西
                                    {
                                        continue;
                                    }
                                }


                                float[] objPos = new float[] { posMap[center.X, center.Y, 0], posMap[center.X, center.Y, 1], posMap[center.X, center.Y, 2] };

                                if (objPos[0] == 0 || objPos[1] == 0 || objPos[2] == 0)//不然會影響平均
                                {
                                    continue;
                                }

                                if (name == "blue cup")//index 0
                                {
                                    //evil color check
                                    MCvScalar clr = MyInvoke.GetColorM(color_resize, center.Y - 5, center.X);
                                    if (clr.V0 > clr.V2)//R>B  //YOLO搞錯
                                    {
                                        continue;
                                    }

                                    //  CvInvoke.PutText(color_resize, "B", new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(80, 150, 220), 2);
                                    CvInvoke.PutText(color_resize, item.Confidence.ToString("0.0"), new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(80, 150, 220), 3);
                                    CvInvoke.Rectangle(color_resize, new Rectangle(x, y, W, H), new MCvScalar(80, 150, 220), 3);

                                    process_actionOfCups(cups[0], mat_cup, TB_Bcup_msg, TB_Bcup_state, objPos, 10, 40);
                                    //this.Dispatcher.Invoke((Action)(() => { TB_Bcup.Text = $"({posMap[center.X, center.Y, 0].ToString("0.000")},{posMap[center.X, center.Y, 1].ToString("0.000")},{posMap[center.X, center.Y, 2].ToString("0.000")})"; }));
                                    CvInvoke.Circle(color_resize, center, 10, new MCvScalar(200, 200, 20), -1);
                                }
                                else if (name == "pink cup")//index 1
                                {
                                    //evil color check
                                    MCvScalar clr = MyInvoke.GetColorM(color_resize, center.Y - 5, center.X);
                                    if (clr.V0 < clr.V2)//R<B  //YOLO搞錯
                                    {
                                        continue;
                                    }

                                    // CvInvoke.PutText(color_resize, "P", new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(250, 80, 80), 2);
                                    CvInvoke.PutText(color_resize, item.Confidence.ToString("0.0"), new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(250, 80, 80), 3);
                                    CvInvoke.Rectangle(color_resize, new Rectangle(x, y, W, H), new MCvScalar(250, 80, 80), 3);

                                    process_actionOfCups(cups[1], mat_cup, TB_Pcup_msg, TB_Pcup_state, objPos, 60, 90);
                                    //this.Dispatcher.Invoke((Action)(() => { TB_Pcup.Text = $"({posMap[center.X, center.Y, 0].ToString("0.000")},{posMap[center.X, center.Y, 1].ToString("0.000")},{posMap[center.X, center.Y, 2].ToString("0.000")})"; }));
                                    CvInvoke.Circle(color_resize, center, 10, new MCvScalar(200, 200, 20), -1);
                                }
                            }//foreach cups
                            timeTick++;
                            this.Dispatcher.Invoke((Action)(() =>
                            {
                                img_cupState.Source = BitmapSourceConvert.ToBitmapSource(mat_cup);
                            }));
                            color_frame.CopyFrom(color_resize.DataPointer);
                            Dispatcher.Invoke(DispatcherPriority.Render, updateOriginColor, color_frame);
                        }
                        else if (showType == imgType.mix)//顯示 mix 圖
                        {
                            processingBlock.ProcessFrames(frames);
                        }
                    }
                }
            }, token);
        }
    private void setParticals(Points points, VideoFrame colorFrame)
    {
        if (points == null)
        {
            throw new Exception("Frame in queue is not a points frame");
        }

        if (colorFrame != null)
        {
            if (lastColorImage == null)
            {
                int colorFrameSize = colorFrame.Height * colorFrame.Stride;
                lastColorImage = new byte[colorFrameSize];
            }
            colorFrame.CopyTo(lastColorImage);
        }

        vertices = vertices ?? new Points.Vertex[points.Count];
        points.CopyTo(vertices);

        Debug.Assert(vertices.Length == particles.Length);
        int mirror = mirrored ? -1 : 1;

        //print("test");
        //print(vertices.Length + "   " + w + "    " + h);


        for (int hor = 0; hor < w; hor += skipParticles)
        {
            for (int ver = 0; ver < h; ver += skipParticles)
            {
                //print("inside test");
                //var index = skipParticles * (hor + ver * (w / skipParticles + 1));
                var index = hor + w * ver;
                //print(index + "   " + hor +"   " + ver);
                var v = vertices[index];
                if (v.z > 0 && v.z < 0.5f)
                {
                    particles[index].position = new Vector3(v.x * mirror * 300, v.y * 300, v.z * 300);
                    particlesInRangePos.Add(new Vector3(v.x * mirror * 300, v.y * 300, v.z * 300));
                    particles[index].startSize = v.z * pointsSize * 0.02f;
                    if (lastColorImage != null)
                    {
                        particles[index].startColor = new Color32(lastColorImage[index * 3], lastColorImage[index * 3 + 1], lastColorImage[index * 3 + 2], 255);
                    }
                    //}
                    else
                    {
                        byte z = (byte)(v.z / 2f * 255);
                        particles[index].startColor = new Color32(z, z, z, 255);
                    }
                }
                else //Required since we reuse the array
                {
                    particles[index].position   = Vector3.zero;
                    particles[index].startSize  = 0;
                    particles[index].startColor = Color.black;
                }
            }
        }



        //for (int index = 0; index < vertices.Length; index += skipParticles)
        //{
        //    var v = vertices[index];
        //    print(index);
        //    if (v.z > 0 && v.z < 1f)
        //    {
        //        particlesInRangePos.Add(new Vector3(v.x * mirror, v.y, v.z));

        //        //if (Vector2.Distance(particles[index].position, new Vector2(v.x * mirror, v.y)) > 0.2)
        //        //{

        //        //currentPos[index] = new Vector2(v.x * mirror, v.y);
        //        //if (Vector2.Distance(prePos[index], currentPos[index]) > 0.1f)
        //        // {
        //        particles[index].position = new Vector3(v.x * mirror, v.y, v.z);
        //        //particlesInRangePos.Add(particles[index].position);
        //        particles[index].startSize = v.z * pointsSize * 0.02f;

        //        //prePos[index] = new Vector2(v.x * mirror, v.y);
        //        //print(prePos[index]);

        //        if (lastColorImage != null)
        //            particles[index].startColor = new Color32(lastColorImage[index * 3], lastColorImage[index * 3 + 1], lastColorImage[index * 3 + 2], 255);
        //        //}
        //        else
        //        {
        //            byte z = (byte)(v.z / 2f * 255);
        //            particles[index].startColor = new Color32(z, z, z, 255);
        //        }

        //        //  }



        //    }
        //    else //Required since we reuse the array
        //    {
        //        particles[index].position = Vector3.zero;
        //        particles[index].startSize = 0;
        //        particles[index].startColor = Color.black;
        //    }


        //}
    }
 /* Extract and return the Color data from the Frame */
 private static byte[] ColorArray(VideoFrame colorFrame)
 {
     byte[] colorArray = new byte[CAMERA_WIDTH * CAMERA_HEIGHT * 3]; // [r,g,b]
     colorFrame.CopyTo(colorArray);
     return(colorArray);
 }
 /* Extract and return the Depth data from the Frame */
 private static ushort[] DepthArray(VideoFrame depthFrame)
 {
     ushort[] depthArray = new ushort[CAMERA_WIDTH * CAMERA_HEIGHT]; // [u]
     depthFrame.CopyTo(depthArray);
     return(depthArray);
 }