Пример #1
0
        public override void Dispose()
        {
            lock (this)
            {
                depthFrame.CloseAllOpenStreams();
                colorFrame.CloseAllOpenStreams();
                textureFrame.CloseAllOpenStreams();

                capture.Dispose();
                session.Dispose();
                capture = null;
                session = null;
            }
        }
Пример #2
0
        public void AdvancedPipeline()
        {
            PXCMSession session;
            pxcmStatus  sts = PXCMSession.CreateInstance(out session);

            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                form.UpdateStatus("Failed to create an SDK session");
                return;
            }

            /* Set Module */
            PXCMSession.ImplDesc desc = new PXCMSession.ImplDesc();
            desc.friendlyName.set(form.GetCheckedModule());

            PXCMEmotion emotionDet;

            sts = session.CreateImpl <PXCMEmotion>(ref desc, PXCMEmotion.CUID, out emotionDet);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                form.UpdateStatus("Failed to create the emotionDet module");
                session.Dispose();
                return;
            }

            UtilMCapture capture = null;

            if (form.GetRecordState())
            {
                capture = new UtilMCaptureFile(session, form.GetFileName(), true);
                capture.SetFilter(form.GetCheckedDevice());
            }
            else if (form.GetPlaybackState())
            {
                capture = new UtilMCaptureFile(session, form.GetFileName(), false);
            }
            else
            {
                capture = new UtilMCapture(session);
                capture.SetFilter(form.GetCheckedDevice());
            }

            form.UpdateStatus("Pair moudle with I/O");
            for (uint i = 0; ; i++)
            {
                PXCMEmotion.ProfileInfo pinfo;
                sts = emotionDet.QueryProfile(i, out pinfo);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }
                sts = capture.LocateStreams(ref pinfo.inputs);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    continue;
                }
                sts = emotionDet.SetProfile(ref pinfo);
                if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }
            }
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                form.UpdateStatus("Failed to pair the emotionDet module with I/O");
                capture.Dispose();
                emotionDet.Dispose();
                session.Dispose();
                return;
            }

            form.UpdateStatus("Streaming");
            PXCMImage[] images            = new PXCMImage[PXCMCapture.VideoStream.STREAM_LIMIT];
            PXCMScheduler.SyncPoint[] sps = new PXCMScheduler.SyncPoint[2];
            while (!form.stop)
            {
                PXCMImage.Dispose(images);
                PXCMScheduler.SyncPoint.Dispose(sps);
                sts = capture.ReadStreamAsync(images, out sps[0]);
                if (DisplayDeviceConnection(sts == pxcmStatus.PXCM_STATUS_DEVICE_LOST))
                {
                    continue;
                }
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }

                sts = emotionDet.ProcessImageAsync(images, out sps[1]);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }

                PXCMScheduler.SyncPoint.SynchronizeEx(sps);
                sts = sps[0].Synchronize();
                if (DisplayDeviceConnection(sts == pxcmStatus.PXCM_STATUS_DEVICE_LOST))
                {
                    continue;
                }
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }

                /* Display Results */
                DisplayPicture(capture.QueryImage(images, PXCMImage.ImageType.IMAGE_TYPE_COLOR));
                DisplayLocation(emotionDet);
                form.UpdatePanel();
            }
            PXCMImage.Dispose(images);
            PXCMScheduler.SyncPoint.Dispose(sps);

            capture.Dispose();
            emotionDet.Dispose();
            session.Dispose();
            form.UpdateStatus("Stopped");
        }
Пример #3
0
        public void AdvancedPipeline()
        {
            PXCMSession session;
            pxcmStatus sts = PXCMSession.CreateInstance(out session);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                form.UpdateStatus("Failed to create an SDK session");
                return;
            }

            /* Set Module */
            PXCMSession.ImplDesc desc = new PXCMSession.ImplDesc();
            desc.friendlyName.set(form.GetCheckedModule());

            PXCMEmotion emotionDet;
            sts = session.CreateImpl<PXCMEmotion>(ref desc, PXCMEmotion.CUID, out emotionDet);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                form.UpdateStatus("Failed to create the emotionDet module");
                session.Dispose();
                return;
            }

            UtilMCapture capture = null;
            if (form.GetRecordState())
            {
                capture = new UtilMCaptureFile(session, form.GetFileName(), true);
                capture.SetFilter(form.GetCheckedDevice());
            }
            else if (form.GetPlaybackState())
            {
                capture = new UtilMCaptureFile(session, form.GetFileName(), false);
            }
            else
            {
                capture = new UtilMCapture(session);
                capture.SetFilter(form.GetCheckedDevice());
            }

            form.UpdateStatus("Pair moudle with I/O");
            for (uint i = 0; ; i++)
            {
                PXCMEmotion.ProfileInfo pinfo;
                sts = emotionDet.QueryProfile(i, out pinfo);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break;
                sts = capture.LocateStreams(ref pinfo.inputs);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) continue;
                sts = emotionDet.SetProfile(ref pinfo);
                if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR) break;
            }
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                form.UpdateStatus("Failed to pair the emotionDet module with I/O");
                capture.Dispose();
                emotionDet.Dispose();
                session.Dispose();
                return;
            }

            form.UpdateStatus("Streaming");
            PXCMImage[] images = new PXCMImage[PXCMCapture.VideoStream.STREAM_LIMIT];
            PXCMScheduler.SyncPoint[] sps = new PXCMScheduler.SyncPoint[2];
            while (!form.stop)
            {
                PXCMImage.Dispose(images);
                PXCMScheduler.SyncPoint.Dispose(sps);
                sts = capture.ReadStreamAsync(images, out sps[0]);
                if (DisplayDeviceConnection(sts == pxcmStatus.PXCM_STATUS_DEVICE_LOST)) continue;
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break;

                sts = emotionDet.ProcessImageAsync(images, out sps[1]);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break;

                PXCMScheduler.SyncPoint.SynchronizeEx(sps);
                sts=sps[0].Synchronize();
                if (DisplayDeviceConnection(sts==pxcmStatus.PXCM_STATUS_DEVICE_LOST)) continue;
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break;

                /* Display Results */
                DisplayPicture(capture.QueryImage(images,PXCMImage.ImageType.IMAGE_TYPE_COLOR));
                DisplayLocation(emotionDet);
                form.UpdatePanel();
            }
            PXCMImage.Dispose(images);
            PXCMScheduler.SyncPoint.Dispose(sps);

            capture.Dispose();
            emotionDet.Dispose();
            session.Dispose();
            form.UpdateStatus("Stopped");
        }
Пример #4
0
        public void Start()
        {
            //create session
            PXCMSession session;
            pxcmStatus  status = PXCMSession.CreateInstance(out session);

            if (IsError(status))
            {
                OnError(CamEvent.FAILED_TO_CREATE_SDK_SESSION);
                return;
            }

            //create gesture-module
            PXCMBase gestureBase;

            status = session.CreateImpl(PXCMGesture.CUID, out gestureBase);
            if (IsError(status))
            {
                OnError(CamEvent.FAILED_TO_LOAD_GESTURE_RECOGNITION);
                session.Dispose();
                return;
            }

            //create gesture-profile
            PXCMGesture gesture = (PXCMGesture)gestureBase;

            PXCMGesture.ProfileInfo profileInfo;
            status = gesture.QueryProfile(0, out profileInfo);
            profileInfo.activationDistance = 70;

            //setup gesture-capture
            UtilMCapture capture = new UtilMCapture(session);

            status = capture.LocateStreams(ref profileInfo.inputs);
            if (IsError(status))
            {
                OnError(CamEvent.FAILED_TO_LOCATE_CAPTURE_MODULE);
                gesture.Dispose();
                capture.Dispose();
                session.Dispose();
                return;
            }

            status = gesture.SetProfile(ref profileInfo);
            status = gesture.SubscribeAlert(this.OnAlertHandler);
            status = gesture.SubscribeGesture(100, this.OnGesureHandler);

            //start capture of frames
            bool device_lost = false;

            PXCMImage[] images = new PXCMImage[PXCMCapture.VideoStream.STREAM_LIMIT];
            PXCMScheduler.SyncPoint[] syncPoints = new PXCMScheduler.SyncPoint[2];

            while (_tracking)
            {
                status = capture.ReadStreamAsync(images, out syncPoints[0]);
                if (IsError(status))
                {
                    if (status == pxcmStatus.PXCM_STATUS_DEVICE_LOST)
                    {
                        if (!device_lost)
                        {
                            OnError(CamEvent.DEVICE_DISCONNECTED);
                        }
                        device_lost = true;
                        continue;
                    }
                    OnError(CamEvent.DEVICE_FAILED);
                    break;
                }
                if (device_lost)
                {
                    OnNotify(CamEvent.DEVICE_RECONNECTED);
                    device_lost = false;
                }

                status = gesture.ProcessImageAsync(images, out syncPoints[1]);
                if (IsError(status))
                {
                    break;
                }

                PXCMScheduler.SyncPoint.SynchronizeEx(syncPoints);
                if (syncPoints[0].Synchronize(0) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    PXCMGesture.GeoNode data;
                    status = gesture.QueryNodeData(0, PXCMGesture.GeoNode.Label.LABEL_BODY_HAND_PRIMARY, out data);
                    if (!IsError(status))
                    {
                        if (ShapeHelper.IsPointInsideRect(data.positionImage.x, data.positionImage.y, Constants.FoVWindow))
                        {
                            //adjust the point to field-of-view window
                            Point cameraPoint = new Point(data.positionImage.x - Constants.FoVWindow.X, data.positionImage.y - Constants.FoVWindow.Y);
                            //cameraPoint = ShapeHelper.RotatePoint(cameraPoint, Constants.FoVCenter, Constants.RotationAngle);
                            OnMovement(cameraPoint);

                            if (data.opennessState != _previousOpenness)
                            {
                                OnOpenClose(data.opennessState, data.openness);
                                _previousOpenness = data.opennessState;
                            }
                        }
                        else
                        {
                            OnNotify(CamEvent.HOVERING_OUTSIDE);
                        }
                    }
                }

                foreach (PXCMScheduler.SyncPoint p in syncPoints)
                {
                    if (p != null)
                    {
                        p.Dispose();
                    }
                }
                foreach (PXCMImage img in images)
                {
                    if (img != null)
                    {
                        img.Dispose();
                    }
                }
            }

            if (gesture != null)
            {
                gesture.Dispose();
            }
            if (capture != null)
            {
                capture.Dispose();
            }
            if (session != null)
            {
                session.Dispose();
            }
        }
        public void RunEmotionRecognition()
        {
            PXCMSession session;
            pxcmStatus sts = PXCMSession.CreateInstance(out session);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Console.WriteLine("Failed to create an SDK session");
                return;
            }

            // Set Module //
            PXCMSession.ImplDesc desc = new PXCMSession.ImplDesc();
            desc.friendlyName.set(moduleName);

            PXCMEmotion emotionDet;
            sts = session.CreateImpl<PXCMEmotion>(ref desc, PXCMEmotion.CUID, out emotionDet);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Console.WriteLine("Failed to create the emotionDet module");
                session.Dispose();
                return;
            }

            UtilMCapture capture = null;
            capture = new UtilMCapture(session);
            capture.SetFilter(captureDeviceName);

            Console.WriteLine("Pair moudle with I/O");
            for (uint i = 0; ; i++)
            {
                PXCMEmotion.ProfileInfo pinfo;
                sts = emotionDet.QueryProfile(i, out pinfo);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break;
                sts = capture.LocateStreams(ref pinfo.inputs);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) continue;
                sts = emotionDet.SetProfile(ref pinfo);
                if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR) break;
            }
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Console.WriteLine("Failed to pair the emotionDet module with I/O");
                capture.Dispose();
                emotionDet.Dispose();
                session.Dispose();
                return;
            }

            Console.WriteLine("Streaming");
            PXCMImage[] images = new PXCMImage[PXCMCapture.VideoStream.STREAM_LIMIT];
            PXCMScheduler.SyncPoint[] sps = new PXCMScheduler.SyncPoint[2];
            while (!_shouldStop)
            {
                PXCMImage.Dispose(images);
                PXCMScheduler.SyncPoint.Dispose(sps);
                sts = capture.ReadStreamAsync(images, out sps[0]);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break;

                sts = emotionDet.ProcessImageAsync(images, out sps[1]);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break;

                PXCMScheduler.SyncPoint.SynchronizeEx(sps);
                sts = sps[0].Synchronize();
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR) break;

                // Display Results //
                GetEmoData(emotionDet);
                Thread.Sleep(500);
            }
            PXCMImage.Dispose(images);
            PXCMScheduler.SyncPoint.Dispose(sps);

            capture.Dispose();
            emotionDet.Dispose();
            session.Dispose();
            Console.WriteLine("Stopped");
        }
        private void recording()
        {
            //Default values
            string output_file_name = timestamp + ".wav";

            // Get a memory stream for the audio data, wrapped with a big try catch for simplicity.
            using (MemoryStream writer = new MemoryStream())
            {
                pxcmStatus status = PXCMSession.CreateInstance(out this.session);
                if (status < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    Console.Error.WriteLine("Failed to create the PXCMSession. status = " + status);
                    return;
                }

                PXCMCapture.AudioStream.DataDesc request = new PXCMCapture.AudioStream.DataDesc();
                request.info.nchannels  = 1;
                request.info.sampleRate = 44100;
                uint subchunk2_data_size = 0;

                // Use the capture utility
                using (this.session)
                    using (UtilMCapture capture = new UtilMCapture(this.session))
                    {
                        // Locate a stream that meets our request criteria
                        status = capture.LocateStreams(ref request);
                        if (status < PXCM_STATUS_NO_ERROR)
                        {
                            Console.Error.WriteLine("Unable to locate audio stream. status = " + status);
                            return;
                        }

                        // Set the volume level
                        status = capture.device.SetProperty(PXCMCapture.Device.Property.PROPERTY_AUDIO_MIX_LEVEL, 0.2f);
                        if (status < pxcmStatus.PXCM_STATUS_NO_ERROR)
                        {
                            Console.Error.WriteLine("Unable to set the volume level. status = " + status);
                            return;
                        }

                        Console.WriteLine("Begin audio recording");

                        isRecording = true;
                        // Get the n frames of audio data.
                        while (isRecording)
                        {
                            PXCMScheduler.SyncPoint sp = null;
                            PXCMAudio audio            = null;

                            // We will asynchronously read the audio stream, which
                            // will create a synchronization point and a reference
                            // to an audio object.
                            status = capture.ReadStreamAsync(out audio, out sp);
                            if (status < PXCM_STATUS_NO_ERROR)
                            {
                                Console.Error.WriteLine("Unable to ReadStreamAsync. status = " + status);
                                return;
                            }

                            using (sp)
                                using (audio)
                                {
                                    // For each audio frame
                                    // 1) Synchronize so that you can access to the data
                                    // 2) acquire access
                                    // 3) write data while you have access,
                                    // 4) release access to the data

                                    status = sp.Synchronize();
                                    if (status < PXCM_STATUS_NO_ERROR)
                                    {
                                        Console.Error.WriteLine("Unable to Synchronize. status = " + status);
                                        return;
                                    }

                                    PXCMAudio.AudioData adata;

                                    status = audio.AcquireAccess(PXCMAudio.Access.ACCESS_READ, PXCMAudio.AudioFormat.AUDIO_FORMAT_PCM, out adata);
                                    if (status < PXCM_STATUS_NO_ERROR)
                                    {
                                        Console.Error.WriteLine("Unable to AcquireAccess. status = " + status);
                                        return;
                                    }

                                    byte[] data = adata.ToByteArray();
                                    int    len  = data.Length;
                                    writer.Write(data, 0, len);

                                    // keep a running total of how much audio data has been captured
                                    subchunk2_data_size += (uint)(adata.dataSize * BYTES_PER_SAMPLE);

                                    audio.ReleaseAccess(ref adata);
                                }
                        }
                        Console.WriteLine("End audio recording");
                    }

                // The header needs to know how much data there is. Now that we are done recording audio
                // we know that information and can write out the header and the audio data to a file.
                using (BinaryWriter bw = new BinaryWriter(File.Open(output_file_name, FileMode.Create, FileAccess.Write)))
                {
                    bw.Seek(0, SeekOrigin.Begin);
                    WriteAudioHeader(bw, subchunk2_data_size, (short)request.info.nchannels, request.info.sampleRate);
                    bw.Write(writer.ToArray());
                }
            }
        }
Пример #7
0
        public override bool Initialize()
        {
            sts = PXCMSession.CreateInstance(out session);

            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Console.WriteLine("Failed to create the SDK session");
                return false;
            }
            PXCMSession.ImplDesc desc;
            session.QueryImpl(0, out desc);
            capture = new UtilMCapture(session);
            PXCMCapture.VideoStream.DataDesc dataDesc = new PXCMCapture.VideoStream.DataDesc();

            PXCMBase tmp1;
            sts = session.CreateImpl(PXCMFaceAnalysis.CUID, out tmp1);

            PXCMCapture.VideoStream.DataDesc.StreamDesc stream;

            stream = new PXCMCapture.VideoStream.DataDesc.StreamDesc();
            stream.format = PXCMImage.ColorFormat.COLOR_FORMAT_NV12;
            dataDesc.streams[0] = stream;

            stream = new PXCMCapture.VideoStream.DataDesc.StreamDesc();
            stream.format = PXCMImage.ColorFormat.COLOR_FORMAT_VERTICES;
            stream.sizeMin.width = 320;
            stream.sizeMin.height = 240;
            stream.sizeMax.width = 320;
            stream.sizeMax.height = 240;
            dataDesc.streams[1] = stream;

            sts = capture.LocateStreams(ref dataDesc);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Console.WriteLine("Failed to locate a capture module");
                capture.Dispose();
                session.Dispose();
                return false;
            }

            return true;
        }