public void Configure(RealSenseCamera camera)
        {
            _camera = camera;
            using (var faceModule = _camera.Manager.QueryFace()) {
                using (var moduleConfiguration = faceModule.CreateActiveConfiguration()) {
                    moduleConfiguration.detection.maxTrackedFaces = 1;
                    var expressionCofig = moduleConfiguration.QueryExpressions();
                    expressionCofig.Enable();
                    expressionCofig.EnableAllExpressions();

                    var desc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
                    desc.maxUsers     = 10;
                    desc.isPersistent = true;
                    var recognitionConfiguration = moduleConfiguration.QueryRecognition();
                    recognitionConfiguration.CreateStorage(StorageName, out desc);
                    recognitionConfiguration.UseStorage(StorageName);
                    recognitionConfiguration.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

                    if (File.Exists(StorageFileName))
                    {
                        var bytes = File.ReadAllBytes(StorageFileName);
                        recognitionConfiguration.SetDatabaseBuffer(bytes);
                    }
                    recognitionConfiguration.Enable();
                    moduleConfiguration.ApplyChanges();
                }
            }
        }
 public void Configure(RealSenseCamera camera) {
     _camera = camera;
     _camera.Manager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 
                                 _camera.ResolutionWidth, 
                                 _camera.ResolutionHeight, 
                                 _camera.FramesPerSecond);
 }
        public void Configure(RealSenseCamera camera) {
            _camera = camera;
            using (var faceModule = _camera.Manager.QueryFace()) {
                using (var moduleConfiguration = faceModule.CreateActiveConfiguration()) {
                    moduleConfiguration.detection.maxTrackedFaces = 1;
                    var expressionCofig = moduleConfiguration.QueryExpressions();
                    expressionCofig.Enable();
                    expressionCofig.EnableAllExpressions();

                    var desc = new PXCMFaceConfiguration.RecognitionConfiguration.RecognitionStorageDesc();
                    desc.maxUsers = 10;
                    desc.isPersistent = true;
                    var recognitionConfiguration = moduleConfiguration.QueryRecognition();
                    recognitionConfiguration.CreateStorage(StorageName, out desc);
                    recognitionConfiguration.UseStorage(StorageName);
                    recognitionConfiguration.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);

                    if (File.Exists(StorageFileName)) {
                        var bytes = File.ReadAllBytes(StorageFileName);
                        recognitionConfiguration.SetDatabaseBuffer(bytes);
                    }
                    recognitionConfiguration.Enable();
                    moduleConfiguration.ApplyChanges();
                }
            }
        }
Beispiel #4
0
 public void Configure(RealSenseCamera camera)
 {
     _camera = camera;
     _camera.Manager.EnableFace();
     _faceModule = camera.Manager.QueryFace();
     _faceData   = _faceModule.CreateOutput();
 }
 public void Configure(RealSenseCamera camera) {
     _camera = camera;
     _camera.Manager.EnableHand();
     _camera.Session.CreateImpl(out _smootherFactory);
     _smoothers.Add(_camera.LeftHand, _smootherFactory.Create3DQuadratic(0.8f));
     _smoothers.Add(_camera.RightHand, _smootherFactory.Create3DQuadratic(0.8f));
 }
 public void Configure(RealSenseCamera camera)
 {
     _camera = camera;
     _camera.Manager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR,
                                  _camera.ResolutionWidth,
                                  _camera.ResolutionHeight,
                                  _camera.FramesPerSecond);
 }
 public void Configure(RealSenseCamera camera)
 {
     _camera = camera;
     _camera.Manager.EnableHand();
     _camera.Session.CreateImpl(out _smootherFactory);
     _smoothers.Add(_camera.LeftHand, _smootherFactory.Create3DQuadratic(0.8f));
     _smoothers.Add(_camera.RightHand, _smootherFactory.Create3DQuadratic(0.8f));
 }
 public static void Loop()
 {
     if (Program.RealSenseEnabled && RealSenseCamera.PollForFrames(null, OnPointCloud))
     {
         if (!Ready)
         {
             Ready = true;
         }
     }
 }
Beispiel #9
0
 /// <summary>
 /// Loop to query data from the Realsense camera with RealSenseCamera.PollForFrames
 /// </summary>
 public static void Loop()
 {
     // Custom made wrapper funtion to poll for frames (point cloud) from the Real Sense Camera.
     if (Program.RealSenseEnabled && RealSenseCamera.PollForFrames(null, OnPointCloud))
     {
         if (!Ready)                 // This sets the variable ready to true after the first point cloud frame is recieved. Because other parts of the code cannot work until there is some data from the camera.
         {
             Ready = true;
         }
     }
 }
 public void Configure(RealSenseCamera camera) {
     _camera = camera;
     using (var faceModule = _camera.Manager.QueryFace()) {
         using (var moduleConfiguration = faceModule.CreateActiveConfiguration()) {
             moduleConfiguration.detection.maxTrackedFaces = 1;
             var expressionCofig = moduleConfiguration.QueryExpressions();
             expressionCofig.Enable();
             expressionCofig.EnableAllExpressions();
             moduleConfiguration.ApplyChanges();
         }
     }
 }
 public void Configure(RealSenseCamera camera)
 {
     _camera = camera;
     using (var faceModule = _camera.Manager.QueryFace()) {
         using (var moduleConfiguration = faceModule.CreateActiveConfiguration()) {
             moduleConfiguration.detection.maxTrackedFaces = 1;
             var expressionCofig = moduleConfiguration.QueryExpressions();
             expressionCofig.Enable();
             expressionCofig.EnableAllExpressions();
             moduleConfiguration.ApplyChanges();
         }
     }
 }
Beispiel #12
0
        public static ICamera Create(params Capability[] capabilities)
        {
            RealSenseAssembliesLoader.Load();
            var camera = new RealSenseCamera();

            if (capabilities != null)
            {
                foreach (var capability in capabilities)
                {
                    camera.AddCapability(capability);
                }
            }
            return(camera);
        }
Beispiel #13
0
        public override void Load()
        {
            AddButton("Exit", () => Engine.CreateYesNoPrompt("Exit Program?", () => Environment.Exit(0)).Center((Engine.Window.WindowSize / 2)));
            AddButton("Stop", RealSenseCamera.Stop);
            AddButton("Start", RealSenseCamera.Start);

            Engine.Camera3D.SetPerspective(Engine.Window.WindowSize, (90.0f).ToRad(), NearClip, 100);

            LegShader = new ShaderProgram(new ShaderStage(ShaderType.VertexShader, "content/shaders/leg.vert"),
                                          new ShaderStage(ShaderType.FragmentShader, "content/shaders/leg.frag"));

            VertsMesh = new Mesh3D();
            VertsMesh.PrimitiveType = PrimitiveType.Points;
            VertsMesh.SetVertices(new Vertex3());

            // Variables
            Sparse        = ConVar.Register(nameof(Sparse).ToLower(), 0);         // 3
            LegLength     = ConVar.Register(nameof(LegLength).ToLower(), 100.0f); // 1.0f
            LegWidth      = ConVar.Register(nameof(LegWidth).ToLower(), 100.0f);  // 0.5f
            PickDistance  = ConVar.Register(nameof(PickDistance).ToLower(), 1.0f);
            PickSize      = ConVar.Register(nameof(PickSize).ToLower(), 0.01f);   // 0.025f
            PickSampleNum = ConVar.Register(nameof(PickSampleNum).ToLower(), 10); // 10

            ConCmd.Register("list", (Argv) => {
                GConsole.WriteLine(Sparse);
                GConsole.WriteLine(LegLength);
                GConsole.WriteLine(LegWidth);
                GConsole.WriteLine(PickDistance);
                GConsole.WriteLine(PickSize);
                GConsole.WriteLine(PickSampleNum);
            });

            Thread PollingThread = new Thread(() => {
                while (true)
                {
                    RealSenseCamera.PollForFrames(OnPointCloud: OnPointCloud);
                    Thread.Sleep(0);
                }
            });

            PollingThread.IsBackground = true;
            PollingThread.Start();

            Engine.Camera3D.Position = new Vector3(0, 0, -1);
            Engine.Camera3D.LookAt(Vector3.Zero);
            Engine.Camera3D.Position = new Vector3(0, 0, -NearClip);
        }
 public void Configure(RealSenseCamera camera)
 {
     _camera = camera;
     using (var faceModule = _camera.Manager.QueryFace()) {
         using (var moduleConfiguration = faceModule.CreateActiveConfiguration()) {
             var recognitionConfiguration = moduleConfiguration.QueryRecognition();
             recognitionConfiguration.SetRegistrationMode(PXCMFaceConfiguration.RecognitionConfiguration.RecognitionRegistrationMode.REGISTRATION_MODE_CONTINUOUS);
             if (File.Exists(StorageFileName))
             {
                 var bytes = File.ReadAllBytes(StorageFileName);
                 recognitionConfiguration.SetDatabaseBuffer(bytes);
             }
             recognitionConfiguration.Enable();
             moduleConfiguration.ApplyChanges();
         }
     }
 }
Beispiel #15
0
        private static void Main(string[] args) {
            Item.DefaultNoiseThreshold = 0;

            RealSenseAssembliesLoader.Load();
            _cam = new RealSenseCamera();
            
            //TestHands();
            //TestFace();
            //TestFaceRecognition();
            //TestFacialExpressions();
            //TestEmotions();
            //TestSpeech();
            //TestGestures();
            TestImageStreaming();
            _cam.Start();

            ReadLine();
            _cam.Dispose();
        }
Beispiel #16
0
        private static void Main(string[] args)
        {
            Item.DefaultNoiseThreshold = 0;

            RealSenseAssembliesLoader.Load();
            _cam = new RealSenseCamera();

            //TestHands();
            //TestFingers();
            TestFace();
            //TestFaceRecognition();
            //TestFacialExpressions();
            //TestSpeech();
            //TestGestures();
            //TestImageStreaming();
            _cam.Start();

            ReadLine();
            _cam.Dispose();
        }
 public void Configure(RealSenseCamera camera) {
     _camera = camera;
     camera.Manager.EnableHand();
     using (var handModule = camera.Manager.QueryHand()) {
         using (var handConfig = handModule.CreateActiveConfiguration()) {
             //handConfig.EnableAllAlerts();
             int numGestures = handConfig.QueryGesturesTotalNumber();
             for (int i = 0; i < numGestures; i++) {
                 string name;
                 handConfig.QueryGestureNameByIndex(i, out name);
                 Debug.WriteLine("Gestures: " + name);
             }
             handConfig.EnableAllGestures();
             handConfig.SubscribeGesture(OnGesture);
             handConfig.EnableTrackedJoints(true);
             handConfig.ApplyChanges();
         }
     }
     _sensor = (GestureSensor) camera.Gestures;
     GestureSlide.Configure(camera, (GestureSensor) camera.Gestures);
 }
Beispiel #18
0
 public void Configure(RealSenseCamera camera)
 {
     _camera = camera;
     camera.Manager.EnableHand();
     using (var handModule = camera.Manager.QueryHand()) {
         using (var handConfig = handModule.CreateActiveConfiguration()) {
             //handConfig.EnableAllAlerts();
             int numGestures = handConfig.QueryGesturesTotalNumber();
             for (int i = 0; i < numGestures; i++)
             {
                 string name;
                 handConfig.QueryGestureNameByIndex(i, out name);
                 Debug.WriteLine("Gestures: " + name);
             }
             handConfig.EnableAllGestures();
             handConfig.SubscribeGesture(OnGesture);
             handConfig.EnableTrackedJoints(true);
             handConfig.ApplyChanges();
         }
     }
     _sensor = (GestureSensor)camera.Gestures;
     GestureSlide.Configure(camera, (GestureSensor)camera.Gestures);
 }
        static void InitInternal()
        {
            Console.WriteLine(ConsoleColor.DarkCyan, "Starting RealSense");

            if (!FakePosition)
            {
                IEnumerable <FrameData> Resolutions      = RealSenseCamera.QueryResolutions().OrderBy((Data) => - Data.Framerate);
                IEnumerable <FrameData> DepthResolutions = Resolutions.Where((Data) => Data.Type == FrameType.Depth);

                int ReqW = 640;
                int ReqH = 480;

                //int ReqW = 848;
                //int ReqH = 480;

                //int ReqW = 1280;
                //int ReqH = 720;

                FrameData DepthRes = DepthResolutions.Where((Data) => Data.Width == ReqW && Data.Height == ReqH && Data.Format == FrameFormat.Z16).First();
                FrameData ColorRes = Resolutions.Where((Data) => Data.Width == ReqW && Data.Height == ReqH && Data.Format == FrameFormat.Rgb8).First();

                W = ColorRes.Width;
                H = ColorRes.Height;

                Console.WriteLine(ConsoleColor.DarkCyan, "RealSense running at {0}x{1}", W, H);

                RealSenseCamera.SetOption(DepthRes, RealSenseOption.VisualPreset, 1);                //4
                RealSenseCamera.SetOption(DepthRes, RealSenseOption.EmitterEnabled, 0);
                RealSenseCamera.SetOption(DepthRes, RealSenseOption.EnableAutoExposure, 1);

                //RealSenseCamera.SetOption(DepthRes, RealSenseOption.LaserPower, 30);

                RealSenseCamera.DisableAllStreams();
                RealSenseCamera.EnableStream(DepthRes, ColorRes);
                RealSenseCamera.Start();

                Console.WriteLine(ConsoleColor.DarkCyan, "RealSense ready");

                if (Program.UseThreading)
                {
                    while (true)
                    {
                        Loop();
                    }
                }
            }
            else
            {
                Ready = true;

                float     Scale     = 1.0f / 500.0f;
                int       PlaneSize = 100;
                Vertex3[] Verts     = OnPointCloud(PlaneSize * PlaneSize, null, null);

                for (int y = 0; y < PlaneSize; y++)
                {
                    for (int x = 0; x < PlaneSize; x++)
                    {
                        Verts[y * PlaneSize + x] = new Vertex3(x * Scale - ((PlaneSize / 2) * Scale), y * Scale - ((PlaneSize / 2) * Scale), 0.5f);
                    }
                }

                while (true)
                {
                    OnPointCloud(Verts.Length, Verts, null);
                }
            }
        }
 public void Configure(RealSenseCamera camera)
 {
     _camera = camera;
     _camera.Manager.Enable3DSeg();
 }
Beispiel #21
0
 public void Configure(RealSenseCamera camera)
 {
     _camera = camera;
     _camera.Manager.EnableEmotion();
     Debug.WriteLine("EmotionCapability enabled");
 }
Beispiel #22
0
        /// <summary>
        ///
        /// </summary>
        static void InitInternal()
        {
            Console.WriteLine(ConsoleColor.DarkCyan, "Starting RealSense");

            if (!FakePosition)
            {
                // The variable Resolution contains all available combinations of resolutions, formats sensors, etc. you can get from the camera.
                IEnumerable <FrameData> Resolutions      = RealSenseCamera.QueryResolutions().OrderBy((Data) => - Data.Framerate);   //  Resolutions is a list of frame data which contains the sensors and the resolutions. Basically all the possible formats/outputs of the camera. Depth,ARGB, etc. in different resolutions.
                IEnumerable <FrameData> DepthResolutions = Resolutions.Where((Data) => Data.Type == FrameType.Depth);                // Selects the resolutions/formats for depth frame type

                int ReqW = 640;
                int ReqH = 480;

                //int ReqW = 848;
                //int ReqH = 480;

                //int ReqW = 1280;
                //int ReqH = 720;

                // From the available resolutions/formats pick only the one that we want.
                FrameData DepthRes = DepthResolutions.Where((Data) => Data.Width == ReqW && Data.Height == ReqH && Data.Format == FrameFormat.Z16).First();
                FrameData ColorRes = Resolutions.Where((Data) => Data.Width == ReqW && Data.Height == ReqH && Data.Format == FrameFormat.Rgb8).First();

                W = ColorRes.Width;
                H = ColorRes.Height;

                Console.WriteLine(ConsoleColor.DarkCyan, "RealSense running at {0}x{1}", W, H);

                // This options were copied from the Intel RealSense Viewer. The demo program to connect to the camera...
                RealSenseCamera.SetOption(DepthRes, RealSenseOption.VisualPreset, 1);                 //4
                RealSenseCamera.SetOption(DepthRes, RealSenseOption.EmitterEnabled, 0);               // This enables/disables the IR pattern projector on the camera. Curently turned off, because it interfeers with the ototrack cameras.
                RealSenseCamera.SetOption(DepthRes, RealSenseOption.EnableAutoExposure, 1);

                //RealSenseCamera.SetOption(DepthRes, RealSenseOption.LaserPower, 30); // Set different power leves of the IR laser emitter

                RealSenseCamera.DisableAllStreams();              // Make sure to terminate any open data stream from the camera.
                RealSenseCamera.EnableStream(DepthRes, ColorRes); // This tells the camera what kind of data to send back. Type of video/data streams. In this case the depth image in specified format and color image.
                RealSenseCamera.Start();                          // Starts the camera with the selected streams

                Console.WriteLine(ConsoleColor.DarkCyan, "RealSense ready");

                if (Program.UseThreading)
                {
                    while (true)
                    {
                        Loop();
                    }
                }
            }
            else                 // Fake point cloud of the camera
            {
                Ready = true;

                float     Scale     = 1.0f / 500.0f;
                int       PlaneSize = 100;
                Vertex3[] Verts     = OnPointCloud(PlaneSize * PlaneSize, null, null);

                for (int y = 0; y < PlaneSize; y++)
                {
                    for (int x = 0; x < PlaneSize; x++)
                    {
                        Verts[y * PlaneSize + x] = new Vertex3(x * Scale - ((PlaneSize / 2) * Scale), y * Scale - ((PlaneSize / 2) * Scale), 0.5f);
                    }
                }

                while (true)
                {
                    OnPointCloud(Verts.Length, Verts, null);
                }
            }
        }
Beispiel #23
0
 public void Configure(RealSenseCamera camera)
 {
     _camera = camera;
 }
Beispiel #24
0
 public void Configure(RealSenseCamera camera) {
     _camera = camera;
     _camera.Manager.EnableFace();
     _faceModule = camera.Manager.QueryFace();
     _faceData = _faceModule.CreateOutput();
 }
 public void Configure(RealSenseCamera camera) {
     _camera = camera;
     _camera.Manager.EnableEmotion();
     Debug.WriteLine("EmotionCapability enabled");
 }
Beispiel #26
0
 /// <summary>
 /// Initializes this instance.
 /// </summary>
 public static void Initialize()
 {
     Camera = new RealSenseCamera();
     Log    = new Log();
 }