Example #1
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="colorFrame"></param>
        /// <param name="headList"></param>
        private void FillBitmap( ColorImageFrame colorFrame, List<SkeletonPoint> headList )
        {
            // 描画の準備
            using ( var drawContecxt = drawVisual.RenderOpen() ) {
                // 画像情報をバッファにコピー
                colorFrame.CopyPixelDataTo( pixelBuffer );

                // カメラの画像情報から、背景のビットマップを作成し描画する
                var backgroundImage = new WriteableBitmap( colorFrame.Width, colorFrame.Height, 96, 96,
                    PixelFormats.Bgr32, null );
                backgroundImage.WritePixels( new Int32Rect( 0, 0, colorFrame.Width, colorFrame.Height ),
                    pixelBuffer, colorFrame.Width * 4, 0 );
                drawContecxt.DrawImage( backgroundImage,
                    new Rect( 0, 0, colorFrame.Width, colorFrame.Height ) );

                // 頭の位置にマスク画像を表示する
                foreach ( SkeletonPoint head in headList ) {
                    ColorImagePoint headPoint = kinect.MapSkeletonPointToColor( head, rgbFormat );
                    drawContecxt.DrawImage( maskImage,
                        new Rect( headPoint.X - 64, headPoint.Y - 64, 128, 128 ) );
                }
            }

            // 画面に表示するビットマップに描画する
            bmpBuffer.Render( drawVisual );
        }
Example #2
0
        public void OnAllFramesReady(ColorImageFrame colorImageFrame, DepthImageFrame depthImageFrame, SkeletonFrame skeletonFrame)
        {
            string stat = "";
            // Face tracking / timing stuff
            if (FaceList.Count > 0) {
                foreach (Face f in FaceList.toList()) {
                    stat += "    {" + f.Id + ", " + String.Format("{0:0.00}", f.Velocity) + "}";
                    if (f.TakePicture) {
                        int[] coordsExpanded = expandBy(f.Coords, 250, colorImageFrame.Width - 1, colorImageFrame.Height - 1);
                        if (coordsExpanded[2] == 0 || coordsExpanded[3] == 0) // width or height can't be 0
                            continue;
                        string time = System.DateTime.Now.ToString("hh'-'mm'-'ss", CultureInfo.CurrentUICulture.DateTimeFormat);
                        string path = "..\\..\\Images\\CroppedPics\\pic" + f.Id + "--" + time + ".jpg";
                        f.Path = path;
                        bool success = SaveImage(path, cropImage(colorImageFrame, coordsExpanded), ImageFormat.Jpeg);
                        if (success && USE_BETAFACE) {
                            bfw.enqueue(f);
                            f.ProcessingBetaface = true;

                        }
                    }

                }
            }
            FaceTracking.Status = stat;
            FaceTracking.Status = "";
        }
        /// <summary>
        /// Sets the data of ColorVideo.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="colorImageFrame"></param>
        public void KinectColorFrameReady(object sender, ColorImageFrameReadyEventArgs colorImageFrame)
        {
            //Get raw image
            ColorVideoFrame = colorImageFrame.OpenColorImageFrame();

            if (ColorVideoFrame != null)
            {
                //Create array for pixel data and copy it from the image frame
                PixelData = new Byte[ColorVideoFrame.PixelDataLength];
                ColorVideoFrame.CopyPixelDataTo(PixelData);

                //Convert RGBA to BGRA, Kinect and XNA uses different color-formats.
                BgraPixelData = new Byte[ColorVideoFrame.PixelDataLength];
                for (int i = 0; i < PixelData.Length; i += 4)
                {
                    BgraPixelData[i] = PixelData[i + 2];
                    BgraPixelData[i + 1] = PixelData[i + 1];
                    BgraPixelData[i + 2] = PixelData[i];
                    BgraPixelData[i + 3] = (Byte)255; //The video comes with 0 alpha so it is transparent
                }

                // Create a texture and assign the realigned pixels
                ColorVideo = new Texture2D(Graphics.GraphicsDevice, ColorVideoFrame.Width, ColorVideoFrame.Height);
                ColorVideo.SetData(BgraPixelData);
                ColorVideoFrame.Dispose();
            }
        }
Example #4
0
 public void findCoordinate(ColorImageFrame frame)
 {
     int[] position = new int[2];
     Image<Bgr, Byte> img = ImageProc.colorFrameToImage(frame);
     position = ImageProc.matchImages(img, this.template);
     this.position = position;
 }
Example #5
0
        public RecordedVideoFrame(ColorImageFrame colorFrame)
        {
            if (colorFrame != null)
            {
                byte[] bits = new byte[colorFrame.PixelDataLength];
                colorFrame.CopyPixelDataTo(bits);

                int BytesPerPixel = colorFrame.BytesPerPixel;
                int Width = colorFrame.Width;
                int Height = colorFrame.Height;

                var bmp = new WriteableBitmap(Width, Height, 96, 96, PixelFormats.Bgr32, null);
                bmp.WritePixels(new System.Windows.Int32Rect(0, 0, Width, Height), bits, Width * BytesPerPixel, 0);
                JpegBitmapEncoder jpeg = new JpegBitmapEncoder();
                jpeg.Frames.Add(BitmapFrame.Create(bmp));
                var SaveStream = new MemoryStream();
                jpeg.Save(SaveStream);
                SaveStream.Flush();
                JpegData = SaveStream.ToArray();
            }
            else
            {
                return;
            }
        }
        public KServerColorStreamPaquet(ColorImageFrame _imageFrame, int _idSensor)
        {
            idSensor = _idSensor;
            imageFrame = _imageFrame;

            byte[] pxlData = new byte[imageFrame.PixelDataLength];
            imageFrame.CopyPixelDataTo(pxlData);

            jpgImage = new MemoryStream();
            unsafe {
               fixed (byte* ptr = pxlData) {
                  using (Bitmap image = new Bitmap(imageFrame.Width, imageFrame.Height, imageFrame.Width*4, PixelFormat.Format32bppArgb, new IntPtr(ptr))) {
                     image.Save(jpgImage, ImageFormat.Jpeg);
                  }
               }
            }

            /* Sets the size of the paquet */
            setBodySize((uint)(jpgImage.Length+5));
            byte[] size = BitConverter.GetBytes((UInt32)(jpgImage.Length+5));

            Array.Reverse(size);
            Buffer.BlockCopy(size, 0, data, 0, size.Length);

            /* Builds the paquet */
            build();
        }
Example #7
0
        void SensorColorFrameReady(object sender, kinect.ColorImageFrameReadyEventArgs e)
        {
            using (kinect.ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    // Copy the pixel data from the image to a temporary array
                    colorFrame.CopyPixelDataTo(this.colorFramePixels);

                    for (int i = 3; i < this.colorFramePixels.Length - 4; i += 4)
                    {
                        this.colorFramePixels[i] = 255;  // set the alpha to max
                    }

                    // Write the pixel data into our bitmap
                    this.colorBitmapVideo.WritePixels(
                        new Int32Rect(0, 0, this.colorBitmapVideo.PixelWidth, this.colorBitmapVideo.PixelHeight),
                        this.colorFramePixels,
                        this.colorBitmapVideo.PixelWidth * sizeof(int),
                        0);
                }
            }

            // draw a sighting frame for precise alignment:
            // Horisontally the frame spans 16.56 degrees on every side and 12.75 degrees either up or down (at 74" the size it covers is 44"W by 33.5"H, i.e. 33.12 degrees by 25.5 degrees)
            // see http://writeablebitmapex.codeplex.com/
            //int sightFrameColor = 255;  // Pbgra32
            Color sightFrameColor = Colors.Red;

            colorBitmapVideo.DrawLine((int)colorBitmapVideo.Width / 2, 0, (int)colorBitmapVideo.Width / 2, (int)colorBitmapVideo.Height, sightFrameColor);
            colorBitmapVideo.DrawLine(0, (int)colorBitmapVideo.Height / 2, (int)colorBitmapVideo.Width, (int)colorBitmapVideo.Height / 2, sightFrameColor);
            colorBitmapVideo.DrawRectangle((int)colorBitmapVideo.Width / 4, (int)colorBitmapVideo.Height / 4,
                                           (int)colorBitmapVideo.Width * 3 / 4, (int)colorBitmapVideo.Height * 3 / 4, sightFrameColor);
        }
 /// <summary>
 /// Initializes a new instance of the <see cref="ColorImageFrameLuminanceSource"/> class.
 /// </summary>
 /// <param name="bitmap">The bitmap.</param>
 /// <param name="flipTheImage">if set to <c>true</c> [flip the image].</param>
 public ColorImageFrameLuminanceSource(ColorImageFrame bitmap, bool flipTheImage)
    : base(bitmap.Width, bitmap.Height)
 {
    var pixelData = new byte[bitmap.PixelDataLength];
    var bitmapFormat = BitmapFormat.Unknown;
    switch (bitmap.Format)
    {
       case ColorImageFormat.InfraredResolution640x480Fps30:
          // not sure, what BitmapFormat should be selected
          break;
       case ColorImageFormat.RawBayerResolution1280x960Fps12:
       case ColorImageFormat.RawBayerResolution640x480Fps30:
          // not sure, what BitmapFormat should be selected
          break;
       case ColorImageFormat.RgbResolution1280x960Fps12:
       case ColorImageFormat.RgbResolution640x480Fps30:
          bitmapFormat = BitmapFormat.BGR32;
          break;
       case ColorImageFormat.RawYuvResolution640x480Fps15:
       case ColorImageFormat.YuvResolution640x480Fps15:
          // not sure, what BitmapFormat should be selected
          break;
       default:
          break;
    }
    bitmap.CopyPixelDataTo(pixelData);
    CalculateLuminance(pixelData, bitmapFormat);
    if (flipTheImage)
    {
       // flip the luminance values because the kinect has it flipped before
       FlipLuminanceValues();
    }
 }
Example #9
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="colorFrame"></param>
        /// <param name="headList"></param>
        private void FillBitmap( ColorImageFrame colorFrame, List<Tuple<SkeletonPoint, Matrix4>> headList )
        {
            // 描画の準備
            using ( var drawContecxt = drawVisual.RenderOpen() ) {
                // 画像情報をバッファにコピー
                colorFrame.CopyPixelDataTo( pixelBuffer );

                // カメラの画像情報から、背景のビットマップを作成し描画する
                var backgroundImage = new WriteableBitmap( colorFrame.Width, colorFrame.Height, 96, 96,
                    PixelFormats.Bgr32, null );
                backgroundImage.WritePixels( new Int32Rect( 0, 0, colorFrame.Width, colorFrame.Height ),
                    pixelBuffer, colorFrame.Width * 4, 0 );
                drawContecxt.DrawImage( backgroundImage,
                    new Rect( 0, 0, colorFrame.Width, colorFrame.Height ) );

                // 頭の位置にマスク画像を表示する
                foreach ( var head in headList ) {
                    ColorImagePoint headPoint = kinect.MapSkeletonPointToColor( head.Item1, rgbFormat );

                    // 頭の位置の向きに回転させたマスク画像を描画する
                    Matrix4 hm = head.Item2;
                    Matrix rot = new Matrix( -hm.M11, hm.M12,
                                             -hm.M21, hm.M22,
                                             headPoint.X, headPoint.Y );
                    drawContecxt.PushTransform( new MatrixTransform( rot ) );

                    drawContecxt.DrawImage( maskImage,
                        new Rect( -64, -64, 128, 128 ) );
                    drawContecxt.Pop();
                }
            }

            // 画面に表示するビットマップに描画する
            bmpBuffer.Render( drawVisual );
        }
Example #10
0
        public WriteableBitmap bmpFromColorFrame(ColorImageFrame NewFrame, bool SkeletonWanted, SkeletonFrame SkelFrame)
        {
            if (bmpColor == null)
            {
                bmpColor = new WriteableBitmap(frameWidth, frameHeight, 96, 96, PixelFormats.Bgr32, null);
            }

            if (NewFrame == null)
            {
                throw new InvalidOperationException("Null Image");
            }

            NewFrame.CopyPixelDataTo(colorPixels);
            // Write the pixel data into our bitmap
            bmpColor.WritePixels(
            new Int32Rect(0, 0, bmpColor.PixelWidth, bmpColor.PixelHeight),
                colorPixels,
                bmpColor.PixelWidth * sizeof(int),
                0);
            //Skeleton
            if (SkeletonWanted != false && SkelFrame != null)
            {
                return bmpWithSkelFromColor(bmpColor, SkelFrame);
            }
            return bmpColor;
        }
        public static Bitmap ColorImageFrameToBitmap(ColorImageFrame colorFrame)
        {
            byte[] pixelBuffer = new byte[colorFrame.PixelDataLength];
            colorFrame.CopyPixelDataTo(pixelBuffer);

            System.Drawing.Imaging.PixelFormat pixelFormat = System.Drawing.Imaging.PixelFormat.Format32bppRgb;

            if (colorFrame.Format == ColorImageFormat.InfraredResolution640x480Fps30)
            {
                pixelFormat = System.Drawing.Imaging.PixelFormat.Format16bppRgb565;
            }

            Bitmap bitmapFrame = new Bitmap(colorFrame.Width, colorFrame.Height, pixelFormat);
            System.Drawing.Rectangle rect = new System.Drawing.Rectangle(0, 0, bitmapFrame.Width, bitmapFrame.Height);
            BitmapData bitmapData = bitmapFrame.LockBits(rect, ImageLockMode.WriteOnly, bitmapFrame.PixelFormat);

            IntPtr intPointer = bitmapData.Scan0;
            Marshal.Copy(pixelBuffer, 0, intPointer, colorFrame.PixelDataLength);

            bitmapFrame.UnlockBits(bitmapData);

            bitmapData = null;
            pixelBuffer = null;

            return bitmapFrame;
        }
Example #12
0
 // coords is {left, top, width, height}
 private Image cropImage(ColorImageFrame source, int[] coords)
 {
     Rectangle cropArea = new Rectangle(coords[0], coords[1], coords[2], coords[3]);
     Bitmap bmpImage = ImageToBitmap(source);
     Bitmap bmpCrop = bmpImage.Clone(cropArea,
     bmpImage.PixelFormat);
     return (Image)(bmpCrop);
 }
        private BitmapSource ObterImagemSensorRGB(ColorImageFrame quadro)
        {
            using (quadro)
            {
                byte[] bytesImagem = new byte[quadro.PixelDataLength];
                quadro.CopyPixelDataTo(bytesImagem);

                return BitmapSource.Create(quadro.Width, quadro.Height,960 , 960, PixelFormats.Bgr32, null, bytesImagem, quadro.Width * quadro.BytesPerPixel);
            }
        }
Example #14
0
        public void CaptureSkeletonData(Skeleton[] skeletonData, ColorImageFrame imageFrame, DateTime timeStamp)
        {
            List<Skeleton> skeletons = RecognizeSkeletons(skeletonData);
            SkeletonCaptureData data = new SkeletonCaptureData(skeletons, imageFrame, timeStamp);

            foreach (ISkeletonCapturingFunction capturingFunction in capturingFunctions)
            {
                ExecuteCapturingFunction(capturingFunction, data);
            }
        }
Example #15
0
        public void Record(ColorImageFrame frame)
        {
            if (writer == null)
                throw new Exception("This recorder is stopped");

            if (colorRecoder == null)
                throw new Exception("Color recording is not actived on this KinectRecorder");

            colorRecoder.Record(frame);
            Flush();
        }
Example #16
0
        public TColorFrame(ColorImageFrame sensorFrame)
        {
            ColorData = sensorFrame.GetRawPixelData();

            PixelDataLength = sensorFrame.PixelDataLength;
            BytesPerPixel = sensorFrame.BytesPerPixel;
            FrameNumber = sensorFrame.FrameNumber;
            Width = sensorFrame.Width;
            Height = sensorFrame.Height;
            Timestamp = sensorFrame.Timestamp;
        }
        private byte[] ObterImagemSensorRGB(ColorImageFrame quadro)
        {
            if (quadro == null) return null;

            using (quadro)
            {
                byte[] bytesImagem = new byte[quadro.PixelDataLength];
                quadro.CopyPixelDataTo(bytesImagem);

                return bytesImagem;
            }
        }
        public ReplayColorImageFrame(ColorImageFrame frame)
        {
            Format = frame.Format;
            BytesPerPixel = frame.BytesPerPixel;
            FrameNumber = frame.FrameNumber;
            TimeStamp = frame.Timestamp;
            Width = frame.Width;
            Height = frame.Height;

            PixelDataLength = frame.PixelDataLength;
            internalFrame = frame;
        }
 private ImageSource ObterImagemRGB(ColorImageFrame quadro)
 {
     if (quadro == null) return null;
     using (quadro)
     {
         byte[] bytesImagem = new byte[quadro.PixelDataLength];
         quadro.CopyPixelDataTo(bytesImagem);
         return BitmapSource.Create(quadro.Width, quadro.Height,
         96, 96, PixelFormats.Gray16, null, bytesImagem,
         quadro.Width * quadro.BytesPerPixel);
     }  
 }
        public void synchronize(
            DepthImageFrame depthFrame,
            ColorImageFrame colorFrame,
            SkeletonFrame skletonFrame,
            Boolean isPauseMode
            )
        {
            IsPauseMode = isPauseMode;
            colorFrame.CopyPixelDataTo(_colorByte);

            //Console.WriteLine("max depth: "+depthFrame.MaxDepth);
            depthFrame.CopyDepthImagePixelDataTo(_depthPixels);

            _sensor.CoordinateMapper.MapColorFrameToDepthFrame(
                ColorImageFormat.RgbResolution640x480Fps30,
                DepthImageFormat.Resolution640x480Fps30,
                _depthPixels,
                _depthPoint
                );

            for (int i = 0; i < _pixelDepthDataLength; i++)
            {
                _depthShort[i] = (short)_depthPoint[i].Depth;
                _depthByte[i] = (byte)(_depthPoint[i].Depth*0.064-1);
            }

            skletonFrame.CopySkeletonDataTo(totalSkeleton);
            Skeleton firstSkeleton = (from trackskeleton in totalSkeleton
                                      where trackskeleton.TrackingState == SkeletonTrackingState.
                                      Tracked
                                      select trackskeleton).FirstOrDefault();

            _isCreation = true;
            if (firstSkeleton != null)
            {
                if (firstSkeleton.Joints[JointType.Spine].TrackingState == JointTrackingState.Tracked)
                {
                    IsSkeletonDetected = true;
                    UserSkeleton[SkeletonDataType.RIGHT_HAND] =
                        ScalePosition(firstSkeleton.Joints[JointType.HandRight].Position);
                    UserSkeleton[SkeletonDataType.LEFT_HAND] =
                        ScalePosition(firstSkeleton.Joints[JointType.HandLeft].Position);
                    UserSkeleton[SkeletonDataType.SPINE] =
                        ScalePosition(firstSkeleton.Joints[JointType.Spine].Position);
                    return;
                }
            }
            IsSkeletonDetected = false;
            _isCreation = false;
        }
 private BitmapSource ObterImagemSensorRGB(ColorImageFrame quadro)
 {
     using (quadro)
     {
         byte[] bytesImagem = new byte[quadro.PixelDataLength];
         quadro.CopyPixelDataTo(bytesImagem);
         int dpiX = 96;
         int dpiY = 96;
         int strideImage = quadro.Width * quadro.BytesPerPixel;
         return BitmapSource.Create(quadro.Width, quadro.Height,
         dpiX, dpiY, PixelFormats.Bgr32, null, bytesImagem,
         strideImage);
     }
 }
 public void Update(ColorImageFrame frame)
 {
     var pixelData = new byte[frame.PixelDataLength];
     frame.CopyPixelDataTo(pixelData);
     if (Bitmap == null)
     {
         Bitmap = new WriteableBitmap(frame.Width, frame.Height,
         96, 96, PixelFormats.Bgr32, null);
     }
     int stride = Bitmap.PixelWidth * Bitmap.Format.BitsPerPixel / 8;
     Int32Rect dirtyRect = new Int32Rect(0, 0, Bitmap.PixelWidth, Bitmap.PixelHeight);
     Bitmap.WritePixels(dirtyRect, pixelData, stride, 0);
     RaisePropertyChanged(() => Bitmap);
 }
Example #23
0
        public static Bitmap colorFrameToAforge(ColorImageFrame frame)
        {
            byte[] pixels = new byte[frame.PixelDataLength];
            frame.CopyPixelDataTo(pixels);
            int stride = frame.Width * 4;
            BitmapSource bitmapSource = BitmapSource.Create(frame.Width, frame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);

            MemoryStream outStream = new MemoryStream();
            BitmapEncoder enc = new BmpBitmapEncoder();
            enc.Frames.Add(BitmapFrame.Create(bitmapSource));
            enc.Save(outStream);

            Bitmap bitmap = new Bitmap(outStream);
            return bitmap;
        }
        /// <summary>
        /// 顔を追跡する
        /// </summary>
        /// <param name="colorFrame"></param>
        /// <param name="depthFrame"></param>
        /// <param name="skeleton"></param>
        private void FaceTracking( ColorImageFrame colorFrame, DepthImageFrame depthFrame, Skeleton skeleton )
        {
            var faceFrame = faceTracker.Track( colorFrame.Format, colorFrame.ToPixelData(),
                depthFrame.Format, depthFrame.ToPixelData(), skeleton );
            if ( faceFrame.TrackSuccessful ) {
                // 四角を移動させる
                rectFace.Margin = new Thickness( faceFrame.FaceRect.Left, faceFrame.FaceRect.Top, 0, 0 );
                rectFace.Width = faceFrame.FaceRect.Width;
                rectFace.Height = faceFrame.FaceRect.Height;

                rectFace.Visibility = System.Windows.Visibility.Visible;
            }
            else {
                rectFace.Visibility = System.Windows.Visibility.Hidden;
            }
        }
Example #25
0
        private static Bitmap ColorImageFrameToBitmap(ColorImageFrame colorFrame)
        {
            byte[] pixelBuffer = new byte[colorFrame.PixelDataLength];
            colorFrame.CopyPixelDataTo(pixelBuffer);

            Bitmap bitmapFrame = new Bitmap(colorFrame.Width, colorFrame.Height, PixelFormat.Format32bppRgb);
            BitmapData bitmapData = bitmapFrame.LockBits(new Rectangle(0, 0, colorFrame.Width, colorFrame.Height), ImageLockMode.WriteOnly, bitmapFrame.PixelFormat);

            IntPtr intPointer = bitmapData.Scan0;
            Marshal.Copy(pixelBuffer, 0, intPointer, colorFrame.PixelDataLength);

            bitmapFrame.UnlockBits(bitmapData);
            colorFrame.Dispose();

            return bitmapFrame;
        }
        void Context_AllFramesUpdated(KinectSensor sensor, ColorImageFrame cf, DepthImageFrame df, SkeletonFrame sf)
        {
            this.sensor = sensor;
            if (colorImage == null)
            {
                colorImage = new byte[cf.PixelDataLength];
                depthImage = new short[df.PixelDataLength];
                skeletonData = new Skeleton[sf.SkeletonArrayLength];
            }

            cf.CopyPixelDataTo(colorImage);
            df.CopyPixelDataTo(depthImage);
            sf.CopySkeletonDataTo(skeletonData);

            TrackFace();
        }
        public static Image<Gray, Byte> cropImage(KinectSensor sensor, ColorImageFrame colorFrame, Skeleton skeleton)
        {
            Image<Bgr, byte> tarImage = null;

            Joint head = skeleton.Joints[JointType.Head];
            if (head.TrackingState == JointTrackingState.NotTracked) return null;

            ColorImagePoint headPoint = sensor.CoordinateMapper.MapSkeletonPointToColorPoint(head.Position, colorFrame.Format);

            byte[] pixelData = new byte[colorFrame.PixelDataLength];
            colorFrame.CopyPixelDataTo(pixelData);
            Bitmap bitmap = null;
            try
            {
                WriteableBitmap wBitmap = new WriteableBitmap(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null);
                Int32Rect rect = new Int32Rect(0, 0, colorFrame.Width, colorFrame.Height);
                int stride = colorFrame.Width * colorFrame.BytesPerPixel;
                wBitmap.WritePixels(rect, pixelData, stride, 0);
                bitmap = bitmapSourceToBitmap(wBitmap);

                long detectionTime;
                List<Rectangle> faces = new List<Rectangle>();
                List<Rectangle> eyes = new List<Rectangle>();

                Image<Bgr, byte> image = new Image<Bgr, byte>(bitmap);
                DetectFace.Detect(image, "haarcascade_frontalface_alt2.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime);

                foreach( var face in faces )
                {
                    if (face.X < headPoint.X && face.X + face.Width > headPoint.X && face.Y < headPoint.Y && face.Y + face.Height > headPoint.Y)
                    {
                        image.ROI = face;
                        tarImage = image.Copy();
                        tarImage = tarImage.Resize(200, 200, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                        break;
                    }
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }

            if (tarImage == null) return null;

            return tarImage.Convert<Gray, Byte>();
        }
        /// <summary>
        ///  プレーヤーだけ表示する
        /// </summary>
        /// <param name="colorFrame"></param>
        /// <param name="depthFrame"></param>
        /// <returns></returns>
        private byte[] BackgroundMask( KinectSensor kinect,
      ColorImageFrame colorFrame, DepthImageFrame depthFrame )
        {
            ColorImageStream colorStream = kinect.ColorStream;
              DepthImageStream depthStream = kinect.DepthStream;

              // RGBカメラのピクセルごとのデータを取得する
              byte[] colorPixel = new byte[colorFrame.PixelDataLength];
              colorFrame.CopyPixelDataTo( colorPixel );

              // 距離カメラのピクセルごとのデータを取得する
              short[] depthPixel = new short[depthFrame.PixelDataLength];
              depthFrame.CopyPixelDataTo( depthPixel );

              // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ)
              ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength];
              kinect.MapDepthFrameToColorFrame( depthStream.Format, depthPixel,
            colorStream.Format, colorPoint );

              // 出力バッファ(初期値は白(255,255,255))
              byte[] outputColor = new byte[colorPixel.Length];
              for ( int i = 0; i < outputColor.Length; i += Bgr32BytesPerPixel ) {
            outputColor[i] = 255;
            outputColor[i + 1] = 255;
            outputColor[i + 2] = 255;
              }

              for ( int index = 0; index < depthPixel.Length; index++ ) {
            // プレイヤーを取得する
            int player = depthPixel[index] & DepthImageFrame.PlayerIndexBitmask;

            // 変換した結果が、フレームサイズを超えることがあるため、小さいほうを使う
            int x = Math.Min( colorPoint[index].X, colorStream.FrameWidth - 1 );
            int y = Math.Min( colorPoint[index].Y, colorStream.FrameHeight - 1 );
            int colorIndex = ((y * depthFrame.Width) + x) * Bgr32BytesPerPixel;

            // プレーヤーを検出した座標だけ、RGBカメラの画像を使う
            if ( player != 0 ) {
              outputColor[colorIndex] = colorPixel[colorIndex];
              outputColor[colorIndex + 1] = colorPixel[colorIndex + 1];
              outputColor[colorIndex + 2] = colorPixel[colorIndex + 2];
            }
              }

              return outputColor;
        }
Example #29
0
		public void Record(ColorImageFrame frame)
		{
			writer.Write((int) FrameType.Color);

			var timeSpan = DateTime.Now.Subtract(referenceTime);
			referenceTime = DateTime.Now;
			writer.Write((long) timeSpan.TotalMilliseconds);
			writer.Write(frame.BytesPerPixel);
			writer.Write((int) frame.Format);
			writer.Write(frame.Width);
			writer.Write(frame.Height);

			writer.Write(frame.FrameNumber);

			writer.Write(frame.PixelDataLength);
			var bytes = new byte[frame.PixelDataLength];
			frame.CopyPixelDataTo(bytes);
			writer.Write(bytes);
		}
Example #30
0
        protected unsafe void ProcessFrame(ColorImageFrame frame)
        {
            var bytes = new byte[frame.PixelDataLength];
            frame.CopyPixelDataTo(bytes);

            BitmapData bitmapData = this.CurrentValue.LockBits(new System.Drawing.Rectangle(0, 0, this.Width, this.Height), ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
            byte* pDest = (byte*)bitmapData.Scan0.ToPointer();
            int pointer = 0;

            var maxIndex = this.Width * this.Height;
            for (int index = 0; index < maxIndex; index++) {
                pDest[0] = bytes[pointer];
                pDest[1] = bytes[pointer + 1];
                pDest[2] = bytes[pointer + 2];
                pDest += 3;
                pointer += 4;
            }
            this.CurrentValue.UnlockBits(bitmapData);
            this.OnNewDataAvailable();
        }
Example #31
0
 public static Bitmap frameToImage(ColorImageFrame f)
 {
     byte[] pixeldata =
              new byte[f.PixelDataLength];
     f.CopyPixelDataTo(pixeldata);
     Bitmap bmap = new Bitmap(
            f.Width,
            f.Height,
            PixelFormat.Format32bppRgb);
     BitmapData bmapdata = bmap.LockBits(
       new Rectangle(0, 0,
                  f.Width, f.Height),
       ImageLockMode.WriteOnly,
       bmap.PixelFormat);
     IntPtr ptr = bmapdata.Scan0;
     Marshal.Copy(pixeldata, 0, ptr,
                f.PixelDataLength);
     bmap.UnlockBits(bmapdata);
     return bmap;
 }