private void InitCamera()
        {
            session = Session.CreateInstance();
            System.Diagnostics.Debug.WriteLine("Version: " + session.Version.major);

            // Instantiate and initialize the SenseManager
            senseManager = session.CreateSenseManager();


            reader = SampleReader.Activate(senseManager);

            reader.EnableStream(StreamType.STREAM_TYPE_COLOR, WIDTH, HEIGHT, FRAME_RATE, StreamOption.STREAM_OPTION_STRONG_STREAM_SYNC);
            reader.EnableStream(StreamType.STREAM_TYPE_DEPTH, WIDTH, HEIGHT, FRAME_RATE, StreamOption.STREAM_OPTION_STRONG_STREAM_SYNC);

            //Configure the Face Module
            faceModule = FaceModule.Activate(senseManager);
            FaceConfiguration faceConfig = faceModule.CreateActiveConfiguration();

            faceConfig.Detection.isEnabled       = detectionEnabled;
            faceConfig.Detection.maxTrackedFaces = maxTrackedFaces;
            faceConfig.Landmarks.isEnabled       = landmarksEnabled;
            faceConfig.Landmarks.maxTrackedFaces = maxTrackedFaces;
            faceConfig.TrackingMode = Intel.RealSense.Face.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH;
            faceConfig.EnableAllAlerts();
            faceConfig.ApplyChanges();

            //init senseManager
            senseManager.Init();
            projection = senseManager.CaptureManager.Device.CreateProjection();

            System.Diagnostics.Debug.WriteLine("IsConnected: " + senseManager.IsConnected());
        }
Beispiel #2
0
    protected void Init()
    {
        try
        {
            // RealSense初期化
            // 参考:https://software.intel.com/sites/landingpage/realsense/camera-sdk/v2016r3/documentation/html/index.html?doc_face_general_procedure.html
            // 参考:.\Intel\RSSDK\sample\core\RawStreams.unity
            SenseManager = SenseManager.CreateInstance();

            FaceModule = FaceModule.Activate(SenseManager);
            FaceModule.FrameProcessed += FaceModule_FrameProcessed;
            FaceData = FaceModule.CreateOutput();

            FaceConfig = FaceModule.CreateActiveConfiguration();
            FaceConfig.TrackingMode = TrackingModeType.FACE_MODE_COLOR;
            FaceConfig.Expressions.Properties.Enabled = true;
            FaceConfig.ApplyChanges();

            SampleReader = SampleReader.Activate(SenseManager);
            SampleReader.EnableStream(StreamType.STREAM_TYPE_COLOR, 640, 480, 30);
            SampleReader.SampleArrived += SampleReader_SampleArrived;

            SenseManager.Init();
            SenseManager.StreamFrames(false);

            // RawStreams
            Texture = NativeTexturePlugin.Activate();
            Material.mainTexture      = new Texture2D(640, 480, TextureFormat.BGRA32, false);
            Material.mainTextureScale = new Vector2(-1, -1);
            TexPtr = Material.mainTexture.GetNativeTexturePtr();

            // 解像度取得
            StreamProfileSet profile;
            SenseManager.CaptureManager.Device.QueryStreamProfileSet(out profile);
            Resolution = profile.color.imageInfo;

            // 平滑化初期化
            // 参考:https://software.intel.com/sites/landingpage/realsense/camera-sdk/v2016r3/documentation/html/index.html?doc_utils_the_smoother_utility.html
            Smoother = Smoother.CreateInstance(SenseManager.Session);

            SmoothBody      = Smoother.Create3DWeighted(BodyPosSmoothWeight);
            SmoothHead      = Smoother.Create3DWeighted(HeadAngSmoothWeight);
            SmoothEyes      = Smoother.Create2DWeighted(EyesPosSmoothWeight);
            SmoothEyesClose = Smoother.Create1DWeighted(EyesCloseSmoothWeight);
            SmoothBrowRai   = Smoother.Create1DWeighted(FaceSmoothWeight);
            SmoothBrowLow   = Smoother.Create1DWeighted(FaceSmoothWeight);
            SmoothSmile     = Smoother.Create1DWeighted(FaceSmoothWeight);
            SmoothKiss      = Smoother.Create1DWeighted(FaceSmoothWeight);
            SmoothMouth     = Smoother.Create1DWeighted(FaceSmoothWeight);
            SmoothTongue    = Smoother.Create1DWeighted(FaceSmoothWeight);
        }
        catch (Exception e)
        {
            ErrorLog.text  = "RealSense Error\n";
            ErrorLog.text += e.Message;
        }
    }
 private void EnsureFaceMapColors(IElementNode node)
 {
     foreach (var elementNode in node.GetNodeEnumerator())
     {
         var fm = FaceModule.GetFaceModuleForElement(elementNode);
         if (fm != null)
         {
             var color = ColorModule.getValidColorsForElementNode(elementNode, true).FirstOrDefault();
             fm.DefaultColor = color;
         }
     }
 }
Beispiel #4
0
        private void SetFaceConfiguration()
        {
            SenseManager.EnableFace();

            FaceModule = SenseManager.QueryFace();

            FaceConfiguration = FaceModule.CreateActiveConfiguration();

            FaceConfiguration.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH);
            FaceConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_CLOSEST_TO_FARTHEST;

            //Detection
            //FaceConfiguration.detection.isEnabled = false;
            //FaceConfiguration.detection.maxTrackedFaces = 0;

            //Landmarks
            FaceConfiguration.landmarks.isEnabled       = true;
            FaceConfiguration.landmarks.maxTrackedFaces = 1;
            FaceConfiguration.landmarks.smoothingLevel  = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED;

            //Configuration of Pose
            FaceConfiguration.pose.isEnabled       = true;
            FaceConfiguration.pose.maxTrackedFaces = 1;
            FaceConfiguration.pose.smoothingLevel  = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED;

            //Configuration of Gaze
            //FaceConfiguration.

            //Configuration of Expressions
            ExpressionsConfiguration = FaceConfiguration.QueryExpressions();
            ExpressionsConfiguration.properties.isEnabled       = true;
            ExpressionsConfiguration.properties.maxTrackedFaces = 1;
            ExpressionsConfiguration.EnableExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_KISS);
            ExpressionsConfiguration.EnableExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_TONGUE_OUT);
            ExpressionsConfiguration.EnableExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE);
            ExpressionsConfiguration.EnableExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN);

            FaceConfiguration.EnableAllAlerts();
            FaceConfiguration.SubscribeAlert(OnAlert);

            pxcmStatus applyChangesStatus = FaceConfiguration.ApplyChanges();

            if (applyChangesStatus < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("FaceConfiguration.ApplyChanges() error: " + applyChangesStatus.ToString());
            }
        }
        private void StartTrackingThread()
        {
            IsTracking     = true;
            TrackingThread = new Thread(() =>
            {
                PXCMFaceData FaceData = FaceModule.CreateOutput();

                while (IsTracking)
                {
                    pxcmStatus acquireFrameStatus = SdkCommonHelper.SenseManager.AcquireFrame(true);
                    if (acquireFrameStatus < pxcmStatus.PXCM_STATUS_NO_ERROR)
                    {
                        ResetUserTrackData();
                        Console.WriteLine("SenseManager.AcquireFrame(true) error: " + acquireFrameStatus.ToString());
                        Thread.Sleep(250);
                        continue;
                    }

                    FaceData.Update();
                    int numberOfDetectedFaces = FaceData.QueryNumberOfDetectedFaces();
                    if (numberOfDetectedFaces != 1)
                    {
                        ResetUserTrackData();
                        SdkCommonHelper.SenseManager.ReleaseFrame();
                        Thread.Sleep(250);
                        continue;
                    }

                    PXCMFaceData.Face faceDataFace = FaceData.QueryFaceByIndex(0);

                    TrackUserPosition(faceDataFace);
                    TrackUserExpressions(faceDataFace);

                    SdkCommonHelper.SenseManager.ReleaseFrame();

                    Thread.Sleep(100);
                }
            });
            TrackingThread.Start();
        }
        private ElementNode FindOrCreateElementNode(ElementModel elementModel, ElementNode parentNode)
        {
            ElementNode node;

            if (!_elementModelMap.TryGetValue(elementModel.Id, out node))
            {
                //Validate we have a name
                if (string.IsNullOrEmpty(elementModel.Name))
                {
                    elementModel.Name = @"Unnamed";
                }
                //We have not created our element yet
                node = ElementNodeService.Instance.CreateSingle(parentNode,
                                                                NamingUtilities.Uniquify(_elementNames, TokenizeName(elementModel.Name)));
                _elementModelMap.Add(elementModel.Id, node);
                _elementNames.Add(node.Name);
                if (elementModel.FaceComponent != FaceComponent.None)
                {
                    FaceModule fm = null;
                    if (node.Properties.Contains(FaceDescriptor.ModuleId))
                    {
                        fm = node.Properties.Get(FaceDescriptor.ModuleId) as FaceModule;
                    }
                    else
                    {
                        fm = node.Properties.Add(FaceDescriptor.ModuleId) as FaceModule;
                    }

                    if (ElementModel.IsPhoneme(elementModel.FaceComponent))
                    {
                        fm.PhonemeList.Add(elementModel.FaceComponent.ToString(), true);
                    }
                    else
                    {
                        switch (elementModel.FaceComponent)
                        {
                        case FaceComponent.EyesOpen:
                            fm.FaceComponents.Add(Property.Face.FaceComponent.EyesOpen, true);
                            break;

                        case FaceComponent.EyesClosed:
                            fm.FaceComponents.Add(Property.Face.FaceComponent.EyesClosed, true);
                            break;

                        case FaceComponent.Outlines:
                            fm.FaceComponents.Add(Property.Face.FaceComponent.Outlines, true);
                            break;
                        }
                    }
                }
                if (elementModel.IsLightNode)
                {
                    if (node.Properties.Add(OrderDescriptor.ModuleId) is OrderModule order)
                    {
                        order.Order = elementModel.Order;
                    }

                    _leafNodes.Add(node);

                    PreviewCustomProp.AddLightNodes(elementModel, node);
                }
            }
            else
            {
                //Our element exists, so add this one as a child.
                VixenSystem.Nodes.AddChildToParent(node, parentNode);
            }

            return(node);
        }
Beispiel #7
0
        private void StartTrackingLoop()
        {
            PXCMFaceData FaceData = FaceModule.CreateOutput();

            while (!_shouldStop)
            {
                pxcmStatus acquireFrameStatus = SenseManager.AcquireFrame(true);
                if (acquireFrameStatus < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    ResetTrackData();
                    Console.WriteLine("SenseManager.AcquireFrame(true) error: " + acquireFrameStatus.ToString());
                    continue;
                }

                PXCMCapture.Sample captureSample = SenseManager.QueryFaceSample();
                if (captureSample == null)
                {
                    ResetTrackData();
                    SenseManager.ReleaseFrame();
                    continue;
                }
                //TODO: Image Daten holen
                TrackImageData(captureSample);

                FaceData.Update();

                int numberOfDetectedFaces = FaceData.QueryNumberOfDetectedFaces();
                if (numberOfDetectedFaces != 1)
                {
                    ResetTrackData();
                    SenseManager.ReleaseFrame();
                    continue;
                }

                PXCMFaceData.Face faceDataFace = FaceData.QueryFaceByIndex(0);

                TrackPose(faceDataFace);
                TrackExpressions(faceDataFace);
                //TrackLandmarks(faceDataFace);
                //TrackGaze();


                //FaceData.QueryRecognitionModule();

                //im nächsten object steckt boundingrectangle und avarageDepth drin
                //PXCMFaceData.DetectionData faceDataDetectionData = faceDataFace.QueryDetection();
                //faceDataDetectionData.QueryFaceAverageDepth();
                //faceDataDetectionData.QueryBoundingRect();

                SenseManager.ReleaseFrame();

                Thread.Sleep(250);
            }

            if (FaceData != null)
            {
                FaceData.Dispose();
            }

            FaceConfiguration.Dispose();
            SenseManager.Close();
            SenseManager.Dispose();
        }
Beispiel #8
0
    // Use this for initialization
    void Start()
    {
        /* Create SenseManager Instance */
        sm = SenseManager.CreateInstance();

        /* Create a SampleReader Instance */
        sampleReader = SampleReader.Activate(sm);

        /* Enable Color & Depth Stream */
        sampleReader.EnableStream(StreamType.STREAM_TYPE_COLOR, colorWidth, colorHeight, colorFPS);
        sampleReader.EnableStream(StreamType.STREAM_TYPE_DEPTH, depthWidth, depthHeight, depthFPS);

        /* Subscribe to sample arrived event */
        sampleReader.SampleArrived += SampleArrived;

        /////////////////////////////////////////////////////// terry add start

        faceModule = FaceModule.Activate(sm);
        if (faceModule == null)
        {
            Debug.LogError("FaceModule Initialization Failed");
        }
        //faceModule.FrameProcessed += FaceModule_FrameProcessed;

        FaceConfiguration moduleConfiguration = faceModule.CreateActiveConfiguration();

        if (moduleConfiguration == null)
        {
            Debug.LogError("FaceConfiguration Initialization Failed");
        }
        moduleConfiguration.TrackingMode = TrackingModeType.FACE_MODE_COLOR;
        moduleConfiguration.Strategy     = TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT;
        moduleConfiguration.Detection.maxTrackedFaces = 1;
        moduleConfiguration.Landmarks.maxTrackedFaces = 1;

        moduleConfiguration.Detection.isEnabled = true;
        moduleConfiguration.Landmarks.isEnabled = true;

        moduleConfiguration.Pose.isEnabled = false;

        moduleConfiguration.EnableAllAlerts();
        //moduleConfiguration.AlertFired += OnFiredAlert;

        Status applyChangesStatus = moduleConfiguration.ApplyChanges();

        Debug.Log(applyChangesStatus.ToString());


        ////////////////////////////////////////////////////// terry add end
        /* Initialize pipeline */
        sm.Init();

        /* Create NativeTexturePlugin to render Texture2D natively */
        texPlugin = NativeTexturePlugin.Activate();

        RGBMaterial.mainTexture      = new Texture2D(colorWidth, colorHeight, TextureFormat.BGRA32, false);   // Update material's Texture2D with enabled image size.
        RGBMaterial.mainTextureScale = new Vector2(-1, -1);                                                   // Flip the image
        colorTex2DPtr = RGBMaterial.mainTexture.GetNativeTexturePtr();                                        // Retrieve native Texture2D Pointer

        DepthMaterial.mainTexture      = new Texture2D(depthWidth, depthHeight, TextureFormat.BGRA32, false); // Update material's Texture2D with enabled image size.
        DepthMaterial.mainTextureScale = new Vector2(-1, -1);                                                 // Flip the image
        depthTex2DPtr = DepthMaterial.mainTexture.GetNativeTexturePtr();                                      // Retrieve native Texture2D Pointer

        /* Start Streaming */
        sm.StreamFrames(false);
    }