Esempio n. 1
0
 private void cb_sensor_SelectedIndexChanged(object sender, EventArgs e)
 {
     cb_videomode.Items.Clear();
     if (cb_sensor.SelectedItem != null && currentDevice != null)
     {
         if (currentSensor != null && currentSensor.isValid) currentSensor.Stop();
         switch ((string)(cb_sensor.SelectedItem))
         {
             case "Color":
                 currentSensor = currentDevice.CreateVideoStream(Device.SensorType.COLOR);
                 break;
             case "Depth":
                 currentSensor = currentDevice.CreateVideoStream(Device.SensorType.DEPTH);
                 break;
             case "IR":
                 currentSensor = currentDevice.CreateVideoStream(Device.SensorType.IR);
                 break;
             default:
                 break;
         }
         VideoMode[] videoModes = currentSensor.SensorInfo.getSupportedVideoModes();
         for (int i = 0; i < videoModes.Length; i++)
         {
             if (videoModes[i].DataPixelFormat == VideoMode.PixelFormat.GRAY16 ||
                 videoModes[i].DataPixelFormat == VideoMode.PixelFormat.GRAY8 ||
                 videoModes[i].DataPixelFormat == VideoMode.PixelFormat.RGB888 ||
                 videoModes[i].DataPixelFormat == VideoMode.PixelFormat.DEPTH_1MM)
                 cb_videomode.Items.Add(videoModes[i]);
         }
     }
 }
 public static OpenNI.Status convertDepthToColor(VideoStream depthStream, VideoStream colorStream,
     int depthX, int depthY, UInt16 depthZ, out int pColorX, out int pColorY)
 {
     pColorX = 0;
     pColorY = 0;
     return CoordinateConverter_convertDepthToColor(depthStream.Handle, colorStream.Handle,
             depthX, depthY, depthZ,ref pColorX, ref pColorY);
 }
 public static OpenNI.Status convertDepthToWorld(VideoStream depthStream,
     float depthX, float depthY, float depthZ, out float pWorldX, out float pWorldY, out float pWorldZ)
 {
     pWorldX = 0;
     pWorldY = 0;
     pWorldZ = 0;
     return CoordinateConverter_convertDepthToWorld_Float(depthStream.Handle,
             depthX, depthY, depthZ, ref pWorldX, ref pWorldY, ref pWorldZ);
 }
Esempio n. 4
0
 internal static VideoStream Private_Create(Device device, Device.SensorType sensorType)
 {
     IntPtr handle;
     OpenNI.throwIfError(VideoStream_create(out handle, device.Handle, sensorType));
     VideoStream vs = new VideoStream(handle);
     vs.ParentDevice = device;
     vs.handler_events = VideoStream_RegisterListener(handle, vs.internal_NewFrame);
     return vs;
 }
 public static OpenNI.Status convertWorldToDepth(
     VideoStream depthStream, float worldX, float worldY, float worldZ,
     out int pDepthX, out int pDepthY, out UInt16 pDepthZ)
 {
     pDepthX = 0;
     pDepthY = 0;
     pDepthZ = 0;
     return CoordinateConverter_convertWorldToDepth(depthStream.Handle,
         worldX, worldY, worldZ, ref pDepthX, ref pDepthY, ref pDepthZ);
 }
Esempio n. 6
0
        static void Main(string[] args)
        {
            OpenNI.Status status;
            Console.WriteLine(OpenNI.Version.ToString());
            status = OpenNI.Initialize();
            if (!HandleError(status)) { Environment.Exit(0); }
            OpenNI.onDeviceConnected += new OpenNI.DeviceConnectionStateChanged(OpenNI_onDeviceConnected);
            OpenNI.onDeviceDisconnected += new OpenNI.DeviceConnectionStateChanged(OpenNI_onDeviceDisconnected);
            DeviceInfo[] devices = OpenNI.EnumerateDevices();
            if (devices.Length == 0)
                return;
            Device device;
            using (device = Device.Open(null,"lr")) // lean init and no reset flags
            {	
                VideoStream depth;
                SensorInfo sensorInfo = device.getSensorInfo(Device.SensorType.DEPTH);
	            if (sensorInfo != null)
	            {
		            depth = VideoStream.Create(device, OpenNIWrapper.Device.SensorType.DEPTH);
	            }


                if (device.hasSensor(Device.SensorType.DEPTH) &&
                    device.hasSensor(Device.SensorType.COLOR))
                {
                    VideoStream depthStream = device.CreateVideoStream(Device.SensorType.DEPTH);
                    VideoStream colorStream = device.CreateVideoStream(Device.SensorType.COLOR);
                    if (depthStream.isValid && colorStream.isValid)
                    {
                        if (!HandleError(depthStream.Start())) { OpenNI.Shutdown(); return; }
                        if (!HandleError(colorStream.Start())) { OpenNI.Shutdown(); return; }
                        new System.Threading.Thread(new System.Threading.ThreadStart(DisplayInfo)).Start();
                        depthStream.onNewFrame += new VideoStream.VideoStreamNewFrame(depthStream_onNewFrame);
                        colorStream.onNewFrame += new VideoStream.VideoStreamNewFrame(colorStream_onNewFrame);
                        VideoStream[] array = new VideoStream[] { depthStream, colorStream };
                        while (!Console.KeyAvailable)
                        {
                            VideoStream aS;
                            if (OpenNI.WaitForAnyStream(array, out aS) == OpenNI.Status.OK)
                            {
                                if (aS.Equals(colorStream))
                                    inlineColor++;
                                else
                                    inlineDepth++;
                                aS.readFrame().Release();
                            }
                        }
                        
                    }
                }
                Console.ReadLine();
            }
            OpenNI.Shutdown();
            Environment.Exit(0);
        }
Esempio n. 7
0
        public static void Main(string[] args)
        {
            try
            {
                OpenNI.Initialize();
                device = Device.Open(Device.AnyDevice);

                depthStream = device.CreateVideoStream(Device.SensorType.Depth);
                depthStream.VideoMode = new VideoMode
                                            {
                                                DataPixelFormat = VideoMode.PixelFormat.Depth1Mm, 
                                                Fps = 30, 
                                                Resolution = new Size(640, 480)
                                            };

                colorStream = device.CreateVideoStream(Device.SensorType.Color);
                colorStream.VideoMode = new VideoMode
                                            {
                                                DataPixelFormat = VideoMode.PixelFormat.Rgb888, 
                                                Fps = 30, 
                                                Resolution = new Size(640, 480)
                                            };
                device.DepthColorSyncEnabled = true;
                depthStream.Start();
                colorStream.Start();
                device.ImageRegistration = Device.ImageRegistrationMode.DepthToColor;
                Console.WriteLine("Image registration is active and working well.");
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }

            Console.WriteLine("Press enter to exit.");
            Console.ReadLine();
            if (device != null)
            {
                device.Close();
            }

            OpenNI.Shutdown();
        }
 public static OpenNI.Status ConvertDepthToColor(
     VideoStream depthStream, 
     VideoStream colorStream, 
     int depthX, 
     int depthY, 
     ushort depthZ, 
     out int colorX, 
     out int colorY)
 {
     colorX = 0;
     colorY = 0;
     return CoordinateConverter_convertDepthToColor(
         depthStream.Handle, 
         colorStream.Handle, 
         depthX, 
         depthY, 
         depthZ, 
         ref colorX, 
         ref colorY);
 }
Esempio n. 9
0
        public KinectOpenNi2()
        {
            HandleError(OpenNI.Initialize());

              DeviceInfo[] devices = OpenNI.EnumerateDevices();
              if (devices.Length == 0)
            HandleError(OpenNI.Status.NO_DEVICE);

              kinectDevice = devices[0].OpenDevice();

              colorSensor = kinectDevice.CreateVideoStream(Device.SensorType.COLOR);
              VideoMode[] videoModes = colorSensor.SensorInfo.getSupportedVideoModes();
              colorSensor.VideoMode = videoModes[1];
              colorSensor.Start();
              colorSensor.onNewFrame += new VideoStream.VideoStreamNewFrame(colorSensor_onNewFrame);

              depthSensor = kinectDevice.CreateVideoStream(Device.SensorType.DEPTH);
              videoModes = depthSensor.SensorInfo.getSupportedVideoModes();
              depthSensor.VideoMode = videoModes[0];
              depthSensor.Start();
              depthSensor.onNewFrame += new VideoStream.VideoStreamNewFrame(depthSensor_onNewFrame);
        }
Esempio n. 10
0
        public OpenKinect()
        {
            HandleOpenNIError(OpenNI.Initialize());
            DeviceInfo[] devices = OpenNI.EnumerateDevices();
            if (devices.Length == 0)
                HandleOpenNIError(OpenNI.Status.NoDevice);
            kinectDevice = devices[0].OpenDevice();

            colorSensor = kinectDevice.CreateVideoStream(Device.SensorType.Color);
            colorSensor.VideoMode = colorSensor.SensorInfo.GetSupportedVideoModes()[1];

            //Start Skeleton Sensor
            HandleNiteError(NiTE.Initialize());
            try
            {
                SkeletonSensor = UserTracker.Create();
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
            }
        }
Esempio n. 11
0
        private void colorStream_onNewFrame(VideoStream vStream)
        {
            if (vStream.isValid && vStream.isFrameAvailable())
            {
                using (VideoFrameRef frame = vStream.readFrame())
                {
                    if (frame.isValid)
                    {
                        if (this.colorBitmap == null)
                        {
                            this.colorBitmap = new Bitmap(1, 1);
                        }

                        lock (this.colorBitmap)
                        {
                            try
                            {
                                frame.updateBitmap(this.colorBitmap, VideoFrameRef.copyBitmapOptions.Force24BitRGB);
                            }
                            catch (Exception)
                            {
                                this.colorBitmap = frame.toBitmap(VideoFrameRef.copyBitmapOptions.Force24BitRGB);
                            }
                        }

                        this.BeginInvoke(
                            (Action)delegate
                                {
                                    lock (this.colorBitmap)
                                    {
                                        if (this.p_image.Image != null)
                                        {
                                            this.p_image.Image.Dispose();
                                        }

                                        this.p_image.Image = new Bitmap(this.colorBitmap);
                                        this.p_image.Refresh();
                                    }
                                });
                    }
                }
            }
        }
Esempio n. 12
0
 void Stop(bool isApply)
 {
     bool isSameDevice = isApply && currentDevice != null && currentDevice.isValid && currentDevice.DeviceInfo.URI == NiUI.Properties.Settings.Default.DeviceURI;
     bool isSameSensor = isApply && isSameDevice && currentSensor != null && currentSensor.isValid && currentSensor.SensorInfo.getSensorType() == (Device.SensorType)NiUI.Properties.Settings.Default.CameraType;
     if (!isSameSensor)
     {
         if (currentSensor != null && currentSensor.isValid)
         {
             currentSensor.Stop();
             currentSensor.onNewFrame -= currentSensor_onNewFrame;
         }
         currentSensor = null;
     }
     if (!isSameDevice)
     {
         //if (uTracker != null && uTracker.isValid)
         //    uTracker.Destroy();
         //if (hTracker != null && hTracker.isValid)
         //    hTracker.Destroy();
         if (currentDevice != null && currentDevice.isValid)
             currentDevice.Close();
         //hTracker = null;
         //uTracker = null;
         currentDevice = null;
     }
     isIdle = true;
     btn_stopstart.Text = "Start Streaming";
     if (!isApply)
     {
         broadcaster.ClearScreen();
         pb_image.Image = null;
         pb_image.Refresh();
     }
     if (NiUI.Properties.Settings.Default.AutoNotification)
         notify.Visible = false;
 }
Esempio n. 13
0
 public int getNumberOfFrames(VideoStream stream)
 {
     return PlaybackControl_getNumberOfFrames(this.Handle, stream.Handle);
 }
Esempio n. 14
0
 public OpenNI.Status seek(VideoStream stream, int frameIndex)
 {
     return PlaybackControl_seek(this.Handle, stream.Handle, frameIndex);
 }
Esempio n. 15
0
        private void CbSensorSelectedIndexChanged(object sender, EventArgs e)
        {
            this.cb_videomode.Items.Clear();
            if (this.cb_sensor.SelectedItem != null && this.currentDevice != null)
            {
                if (this.currentSensor != null && this.currentSensor.IsValid)
                {
                    this.currentSensor.Stop();
                }

                switch ((string)this.cb_sensor.SelectedItem)
                {
                    case "Color":
                        this.currentSensor = this.currentDevice.CreateVideoStream(Device.SensorType.Color);
                        break;
                    case "Depth":
                        this.currentSensor = this.currentDevice.CreateVideoStream(Device.SensorType.Depth);
                        break;
                    case "IR":
                        this.currentSensor = this.currentDevice.CreateVideoStream(Device.SensorType.Ir);
                        break;
                }

                if (this.currentSensor != null)
                {
                    IEnumerable<VideoMode> videoModes = this.currentSensor.SensorInfo.GetSupportedVideoModes();
                    foreach (VideoMode mode in videoModes)
                    {
                        if (mode.DataPixelFormat == VideoMode.PixelFormat.Gray16
                            || mode.DataPixelFormat == VideoMode.PixelFormat.Gray8
                            || mode.DataPixelFormat == VideoMode.PixelFormat.Rgb888
                            || mode.DataPixelFormat == VideoMode.PixelFormat.Depth1Mm)
                        {
                            this.cb_videomode.Items.Add(mode);
                        }
                    }
                }
            }
        }
Esempio n. 16
0
 public int GetNumberOfFrames(VideoStream stream)
 {
     return(PlaybackControl_getNumberOfFrames(this.Handle, stream.Handle));
 }
Esempio n. 17
0
        public static Status WaitForStream(VideoStream streams, int timeout = TimeoutForever)
        {
            VideoStream vs;
            Status returnValue = WaitForAnyStream(new[] { streams }, out vs, timeout);
            if (returnValue == Status.Ok && !vs.Equals(streams))
            {
                return Status.Error;
            }

            return returnValue;
        }
Esempio n. 18
0
 public OpenNI.Status Attach(VideoStream stream, bool allowLossyCompression = false)
 {
     return Recorder_attach(this.Handle, stream.Handle, allowLossyCompression);
 }
Esempio n. 19
0
 private static void DepthStreamOnNewFrame(VideoStream videoStream)
 {
     eventDepth++;
 }
Esempio n. 20
0
        void depthSensor_onNewFrame(VideoStream vStream)
        {
            if (!vStream.isValid || !vStream.isFrameAvailable())
            return;

              VideoFrameRef frame = vStream.readFrame();
              if (!frame.isValid)
            return;

              lock (depthMutex)
              {
            depthFrame = frame;
              }
        }
Esempio n. 21
0
        void colorSensor_onNewFrame(VideoStream vStream)
        {
            if (!vStream.isValid || !vStream.isFrameAvailable())
            return;

              VideoFrameRef frame = vStream.readFrame();
              if (!frame.isValid)
            return;

              lock (colorMutex)
              {
            colorFrame = frame;
              }
        }
 public static OpenNI.Status ConvertWorldToDepth(
     VideoStream depthStream, 
     float worldX, 
     float worldY, 
     float worldZ, 
     out float depthX, 
     out float depthY, 
     out float depthZ)
 {
     depthX = 0;
     depthY = 0;
     depthZ = 0;
     return CoordinateConverter_convertWorldToDepth_Float(
         depthStream.Handle, 
         worldX, 
         worldY, 
         worldZ, 
         ref depthX, 
         ref depthY, 
         ref depthZ);
 }
Esempio n. 23
0
        private void depthStream_onNewFrame(VideoStream vStream)
        {
            if (vStream.isValid && vStream.isFrameAvailable())
            {
                using (VideoFrameRef frame = vStream.readFrame())
                {
                    if (frame.isValid)
                    {
                        if (this.depthBitmap == null)
                        {
                            this.depthBitmap = new Bitmap(1, 1);
                        }

                        lock (this.depthBitmap)
                        {
                            try
                            {
                                frame.updateBitmap(
                                    this.depthBitmap,
                                    VideoFrameRef.copyBitmapOptions.Force24BitRGB
                                    | VideoFrameRef.copyBitmapOptions.DepthInvert
                                    | VideoFrameRef.copyBitmapOptions.DepthFillShadow
                                    | VideoFrameRef.copyBitmapOptions.DepthHistogramEqualize
                                    | VideoFrameRef.copyBitmapOptions.DepthFillRigthBlack);
                            }
                            catch (Exception)
                            {
                                this.depthBitmap =
                                    frame.toBitmap(
                                        VideoFrameRef.copyBitmapOptions.Force24BitRGB
                                        | VideoFrameRef.copyBitmapOptions.DepthInvert
                                        | VideoFrameRef.copyBitmapOptions.DepthFillShadow
                                        | VideoFrameRef.copyBitmapOptions.DepthHistogramEqualize
                                        | VideoFrameRef.copyBitmapOptions.DepthFillRigthBlack);
                            }
                        }

                        this.BeginInvoke(
                            (Action)delegate
                                {
                                    lock (this.depthBitmap)
                                    {
                                        if (this.p_depth.Image != null)
                                        {
                                            this.p_depth.Image.Dispose();
                                        }

                                        this.p_depth.Image = new Bitmap(this.depthBitmap);
                                        this.p_depth.Refresh();
                                    }
                                });
                    }
                }
            }
        }
Esempio n. 24
0
        //private Bitmap DepthFix(Bitmap depthBitmap)
        //{
        //    Bitmap x = new Bitmap(640, 480);
        //    Graphics g = Graphics.FromImage(x);
        //    g.DrawImage(depthBitmap, new Rectangle(30, 8, 600, 450));
        //    g.Flush();
        //    g.Dispose();
        //    return x;
        //}
        private void DeviceChanged()
        {
            this.but_anag.Enabled = false;
            this.but_stereo.Enabled = false;
            this.but_saveall.Enabled = false;
            if (this.selectedDevice != null)
            {
                this.selectedDevice.Close();
            }

            if (this.cb_devices.Items.Count < 1)
            {
                this.selectedDevice = null;
                return;
            }

            if (this.cb_devices.SelectedItem == null)
            {
                this.selectedDevice = null;
                return;
            }

            if (this.cb_devices.SelectedItem is string && this.cb_devices.SelectedItem.ToString() == "None")
            {
                this.selectedDevice = null;
                return;
            }

            if (!(this.cb_devices.SelectedItem is DeviceInfo))
            {
                this.selectedDevice = null;
                MessageBox.Show(
                    "Selected item is not a device.",
                    "Device Error",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                return;
            }

            try
            {
                this.selectedDevice = (this.cb_devices.SelectedItem as DeviceInfo).OpenDevice();
            }
            catch (Exception)
            {
                this.selectedDevice = null;
                MessageBox.Show(
                    "Can not open selected device.",
                    "Device Error",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                return;
            }

            if (!this.selectedDevice.hasSensor(Device.SensorType.COLOR))
            {
                this.selectedDevice.Close();
                this.selectedDevice = null;
                MessageBox.Show(
                    "Selected device can not offer depth stream.",
                    "Device Error",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                return;
            }

            if (!this.selectedDevice.hasSensor(Device.SensorType.DEPTH))
            {
                this.selectedDevice.Close();
                this.selectedDevice = null;
                MessageBox.Show(
                    "Selected device can not offer depth stream.",
                    "Device Error",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                return;
            }

            try
            {
                this.depthStream = this.selectedDevice.CreateVideoStream(Device.SensorType.DEPTH);
                this.colorStream = this.selectedDevice.CreateVideoStream(Device.SensorType.COLOR);
            }
            catch (Exception)
            {
                this.selectedDevice.Close();
                this.selectedDevice = null;
                this.depthStream = null;
                this.colorStream = null;
                MessageBox.Show(
                    "Can not create Depth and Color streams.",
                    "Device Error",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                return;
            }

            this.cb_hd.Enabled = false;
            foreach (VideoMode vm in this.colorStream.SensorInfo.getSupportedVideoModes())
            {
                if (vm.Resolution.Equals(new Size(1280, 1024)) || vm.Resolution.Equals(new Size(1280, 960)))
                {
                    this.cb_hd.Enabled = true;
                    this.isUse960asHD = vm.Resolution.Height == 960;
                    break;
                }
            }

            VideoMode depthMode = new VideoMode
                                      {
                                          Resolution = new Size(640, 480),
                                          FPS = 30,
                                          DataPixelFormat = VideoMode.PixelFormat.DEPTH_1MM
                                      };
            VideoMode colorMode = new VideoMode
                                      {
                                          Resolution = new Size(640, 480),
                                          FPS = 30,
                                          DataPixelFormat = VideoMode.PixelFormat.RGB888
                                      };
            if (this.cb_hd.Enabled && this.cb_hd.Checked)
            {
                colorMode.Resolution = this.isUse960asHD ? new Size(1280, 960) : new Size(1280, 1024);
            }

            try
            {
                this.depthStream.VideoMode = depthMode;
                this.colorStream.VideoMode = colorMode;
            }
            catch (Exception)
            {
                this.selectedDevice.Close();
                this.selectedDevice = null;
                this.depthStream = null;
                this.colorStream = null;
                MessageBox.Show(
                    "Can not set Depth and Color streams video mode to 640x480@30fps. This application need at least this resolution.",
                    "Device Error",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                return;
            }

            try
            {
                this.selectedDevice.ImageRegistration = Device.ImageRegistrationMode.DEPTH_TO_COLOR;
            }
            catch (Exception)
            {
                MessageBox.Show(
                    "We failed to register image over depth map.",
                    "Device Error",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Exclamation);
            }

            try
            {
                // this.selectedDevice.DepthColorSyncEnabled = true;
            }
            catch (Exception)
            {
            }

            if (!this.HandleError(this.depthStream.Start()) || !this.HandleError(this.colorStream.Start()))
            {
                this.selectedDevice.Close();
                this.selectedDevice = null;
                this.depthStream = null;
                this.colorStream = null;
                MessageBox.Show(
                    "Can not start depth and color streams.",
                    "Device Error",
                    MessageBoxButtons.OK,
                    MessageBoxIcon.Error);
                return;
            }

            this.depthStream.onNewFrame += this.depthStream_onNewFrame;
            this.colorStream.onNewFrame += this.colorStream_onNewFrame;
            this.but_anag.Enabled = true;
            this.but_stereo.Enabled = true;
            this.but_saveall.Enabled = true;
        }
Esempio n. 25
0
 void currentSensor_onNewFrame(VideoStream vStream)
 {
     if (vStream.isValid && vStream.isFrameAvailable())
     {
         using (VideoFrameRef frame = vStream.readFrame())
         {
             if (frame.isValid)
             {
                 VideoFrameRef.copyBitmapOptions options = VideoFrameRef.copyBitmapOptions.Force24BitRGB | VideoFrameRef.copyBitmapOptions.DepthFillShadow;
                 if (cb_invert.Checked)
                     options |= VideoFrameRef.copyBitmapOptions.DepthInvert;
                 if (cb_equal.Checked)
                     options |= VideoFrameRef.copyBitmapOptions.DepthHistogramEqualize;
                 if (cb_fill.Checked)
                     options |= (vStream.Mirroring) ? VideoFrameRef.copyBitmapOptions.DepthFillRigthBlack : VideoFrameRef.copyBitmapOptions.DepthFillLeftBlack;
                 lock (bitmap)
                 {
                     try
                     {
                         frame.updateBitmap(bitmap, options);
                     }
                     catch (Exception)
                     {
                         bitmap = frame.toBitmap(options);
                     }
                 }
                 this.BeginInvoke(new MethodInvoker(delegate()
                 {
                     lock (bitmap)
                     {
                         if (pb_image.Image != null)
                             pb_image.Image.Dispose();
                         pb_image.Image = new Bitmap(bitmap, pb_image.Size);
                         pb_image.Refresh();
                     }
                 }));
             }
         }
     }
 }
Esempio n. 26
0
 public OpenNI.Status Seek(VideoStream stream, int frameIndex)
 {
     return(PlaybackControl_seek(this.Handle, stream.Handle, frameIndex));
 }
Esempio n. 27
0
 private static void ColorStreamOnNewFrame(VideoStream videoStream)
 {
     eventColor++;
 }
Esempio n. 28
0
        private void CurrentSensorOnNewFrame(VideoStream videoStream)
        {
            if (videoStream.IsValid && videoStream.IsFrameAvailable())
            {
                using (VideoFrameRef frame = videoStream.ReadFrame())
                {
                    if (frame.IsValid)
                    {
                        VideoFrameRef.CopyBitmapOptions options = VideoFrameRef.CopyBitmapOptions.Force24BitRgb
                                                                  | VideoFrameRef.CopyBitmapOptions.DepthFillShadow;
                        if (this.cb_invert.Checked)
                        {
                            options |= VideoFrameRef.CopyBitmapOptions.DepthInvert;
                        }

                        if (this.cb_equal.Checked)
                        {
                            options |= VideoFrameRef.CopyBitmapOptions.DepthHistogramEqualize;
                        }

                        if (this.cb_fill.Checked)
                        {
                            options |= videoStream.Mirroring
                                           ? VideoFrameRef.CopyBitmapOptions.DepthFillRigthBlack
                                           : VideoFrameRef.CopyBitmapOptions.DepthFillLeftBlack;
                        }

                        lock (this.bitmap)
                        {
                            /////////////////////// Instead of creating a bitmap object for each frame, you can simply
                            /////////////////////// update one you have. Please note that you must be very careful 
                            /////////////////////// with multi-thread situations.
                            try
                            {
                                frame.UpdateBitmap(this.bitmap, options);
                            }
                            catch (Exception)
                            {
                                // Happens when our Bitmap object is not compatible with returned Frame
                                this.bitmap = frame.ToBitmap(options);
                            }

                            /////////////////////// END NOTE

                            /////////////////////// You can always use .toBitmap() if you dont want to
                            /////////////////////// clone image later and be safe when using it in multi-thread situations
                            /////////////////////// This is little slower, but easier to handle
                            // bitmap = frame.toBitmap(options);
                            /////////////////////// END NOTE
                            if (this.cb_mirrorSoft.Checked)
                            {
                                this.bitmap.RotateFlip(RotateFlipType.RotateNoneFlipX);
                            }
                        }

                        ///////////////////// You can simply pass the newly created/updated image to a
                        ///////////////////// PictureBox right here instead of drawing it with Graphic object
                        // this.BeginInvoke(new MethodInvoker(delegate()
                        // {
                        // if (!pb_image.Visible)
                        // pb_image.Visible = true;
                        // if (bitmap == null)
                        // return;
                        // lock (bitmap) // this.BeginInvoke happens on UI Thread so it is better to always keep this lock in place
                        // {
                        // if (pb_image.Image != null)
                        // pb_image.Image.Dispose();

                        // /////////////////////// If you want to use one bitmap object for all frames, the 
                        // /////////////////////// best way to prevent and multi-thread access problems
                        // /////////////////////// is to clone the bitmap each time you want to send it to PictureBox 
                        // pb_image.Image = new Bitmap(bitmap, bitmap.Size);
                        // /////////////////////// END NOTE

                        // /////////////////////// If you only use toBitmap() method. you can simply skip the
                        // /////////////////////// cloning process. It is perfectly thread-safe.
                        // //pb_image.Image = bitmap;
                        // /////////////////////// END NOTE

                        // pb_image.Refresh();
                        // }
                        // }));
                        ///////////////////// END NOTE
                        if (!this.pb_image.Visible)
                        {
                            this.Invalidate();
                        }
                    }
                }
            }
        }
 public static OpenNI.Status ConvertDepthToWorld(
     VideoStream depthStream, 
     int depthX, 
     int depthY, 
     ushort depthZ, 
     out float worldX, 
     out float worldY, 
     out float worldZ)
 {
     worldX = 0;
     worldY = 0;
     worldZ = 0;
     return CoordinateConverter_convertDepthToWorld(
         depthStream.Handle, 
         depthX, 
         depthY, 
         depthZ, 
         ref worldX, 
         ref worldY, 
         ref worldZ);
 }
Esempio n. 30
0
        public static Status WaitForAnyStream(
            VideoStream[] streams, 
            out VideoStream readyStream, 
            int timeout = TimeoutForever)
        {
            readyStream = null;
            IntPtr[] streamArray = new IntPtr[streams.Length];

            int i = 0;
            foreach (VideoStream vs in streams)
            {
                streamArray[i] = vs.Handle;
                i++;
            }

            int selectedId = -1;
            Status returnValue;
            IntPtr arrayPointer = Marshal.AllocHGlobal(IntPtr.Size * streamArray.Length);
            try
            {
                Marshal.Copy(streamArray, 0, arrayPointer, streamArray.Length);
                returnValue = OpenNI_waitForAnyStream(arrayPointer, streamArray.Length, ref selectedId, timeout);
            }
            finally
            {
                Marshal.FreeHGlobal(arrayPointer);
            }

            if (returnValue == Status.Ok)
            {
                foreach (VideoStream vs in streams)
                {
                    if (vs.Equals(streamArray[selectedId]))
                    {
                        readyStream = vs;
                    }
                }
            }

            return returnValue;
        }
Esempio n. 31
0
 public static Status WaitForAnyStream(VideoStream[] pStreams, out VideoStream pReadyStream, int timeout = OpenNI.TIMEOUT_FOREVER)
 {
     pReadyStream = null;
     IntPtr[] pStreamArray = new IntPtr[pStreams.Length];
     
     int i = 0;
     foreach (VideoStream vs in pStreams)
     {
         pStreamArray[i] = vs.Handle;
         i++;
     }
     int selectedId = -1;
     Status ret = Status.ERROR;
     IntPtr arrayPointer = Marshal.AllocHGlobal(IntPtr.Size * pStreamArray.Length);
     try
     {
         Marshal.Copy(pStreamArray, 0, arrayPointer, pStreamArray.Length);
         ret = OpenNI_waitForAnyStream(arrayPointer, pStreamArray.Length, ref selectedId, timeout);
     }
     finally
     {
         Marshal.FreeHGlobal(arrayPointer);
     }
      
     if (ret == Status.OK)
         foreach (VideoStream vs in pStreams)
             if (vs.Equals(pStreamArray[selectedId]))
                 pReadyStream = vs;
     return ret;
 }
Esempio n. 32
0
 public OpenNI.Status Attach(VideoStream stream, bool allowLossyCompression = false)
 {
     return(Recorder_attach(this.Handle, stream.Handle, allowLossyCompression));
 }
Esempio n. 33
0
 public static Status WaitForStream(VideoStream pStreams, int timeout = OpenNI.TIMEOUT_FOREVER)
 {
     VideoStream vs;
     Status ret = WaitForAnyStream(new VideoStream[] { pStreams }, out vs, timeout);
     if (ret == Status.OK && !vs.Equals(pStreams))
         return Status.ERROR;
     return ret;
 }