internal static void CopyToFrameToDrawingContext(this HighDefinitionFaceFrame highDefinitionFaceFrame, DrawingContext context, bool useDepthSpace = true, byte bodyIndex = 1, double pointRadius = 2F)
        {
            var faceAlignment = new FaceAlignment();
            var coordinateMapper = highDefinitionFaceFrame.HighDefinitionFaceFrameSource.KinectSensor.CoordinateMapper;
            var brush = BodyIndexColor.GetBrushFromBodyIndex(bodyIndex);

            highDefinitionFaceFrame.GetAndRefreshFaceAlignmentResult(faceAlignment);

            var faceModel = new FaceModel();
            var vertices = faceModel.CalculateVerticesForAlignment(faceAlignment);

            if (vertices.Count > 0)
            {
                for (int index = 0; index < vertices.Count; index++)
                {
                    CameraSpacePoint vertice = vertices[index];
                    DepthSpacePoint point = coordinateMapper.MapCameraPointToDepthSpace(vertice);

                    if (float.IsInfinity(point.X) || float.IsInfinity(point.Y))
                        return;

                    context.DrawEllipse(brush, null, point.GetPoint(), pointRadius, pointRadius);
                }
            }
        }
Пример #2
0
        public MainWindow()
        {
            InitializeComponent();
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Listen for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;

                // Listen for HD face data.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceSourceSub = new HighDefinitionFaceFrameSource(_sensor);
               // _faceSource.TrackingIdLost += OnTrackingIdLost;
                _faceReader = _faceSource.OpenReader();
                _faceReaderSub = _faceSourceSub.OpenReader();

                _faceReader.FrameArrived += FaceReader_FrameArrived;
                _faceReaderSub.FrameArrived += FaceReaderSub_FrameArrived;

                _faceModel = new FaceModel();
                _faceAlignment = new FaceAlignment();
                _faceAlignmentSub = new FaceAlignment();
                // Start tracking!        
                _sensor.Open();
            }
        }
        public void TestValid()
        {
            FaceModel model = new FaceModel();
            FaceAlignment align = new FaceAlignment();
            HdFaceFrameResultEventArgs args = new HdFaceFrameResultEventArgs(1,model, align);

            Assert.AreEqual(model, args.FaceModel);
            Assert.AreEqual(align, args.FaceAlignment);
        }
Пример #4
0
        private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e)
        {
            if (_faceModel != null)
            {
                _faceModel.Dispose();
                _faceModel = null;
            }

            GC.SuppressFinalize(this);
        }
Пример #5
0
        private void Page_Unloaded(object sender, RoutedEventArgs e)
        {
            if (_faceModel != null)
            {
                _faceModel.Dispose();
                _faceModel = null;
            }

            GC.SuppressFinalize(this);
        }
        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="trackingId">Tracking Id</param>
        /// <param name="faceModel">Face Model</param>
        /// <param name="faceAlignment">Face Alignment</param>
        public HdFaceFrameResultEventArgs(ulong trackingId, FaceModel faceModel, FaceAlignment faceAlignment)
        {
            if (faceModel == null)
                throw new ArgumentNullException("faceModel");
            if (faceAlignment == null)
                throw new ArgumentNullException("faceAlignment");

            this.trackingId = trackingId;
            this.faceAlignment = faceAlignment;
            this.faceModel = faceModel;
        }
        /// <summary>
        /// Initializes a new instance of the KinectFaceTrackingResult class from a set of Kinect face points
        /// </summary>
        public KinectFaceTrackingResult(FaceModel faceModel, FaceModel constructedFaceModel, FaceModelBuilderCollectionStatus builderStatus, FaceAlignment faceAlignment, CoordinateMapper mapper)
        {
            this.FaceModel = faceModel;
            this.ConstructedFaceModel = constructedFaceModel;
            this.BuilderStatus = builderStatus;
            this.FaceAlignment = faceAlignment;

            var vertices = faceModel.CalculateVerticesForAlignment(faceAlignment);
            this.ColorSpaceFacePoints = this.FaceBoundaryPoints(vertices, mapper);

            // Calculate facerect manually from facepoints
            var rectX = this.ColorSpaceFacePoints.Min(x => x.X);
            var rectWidth = this.ColorSpaceFacePoints.Max(x => x.X) - rectX;
            var rectY = this.ColorSpaceFacePoints.Min(x => x.Y);
            var rectHeight = this.ColorSpaceFacePoints.Max(x => x.Y) - rectY;

            this.FaceRect = new System.Drawing.Rectangle(rectX, rectY, rectWidth, rectHeight);
        }
Пример #8
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            #region Interface

            ColorRButton.IsChecked = true;
            RecordButton.IsEnabled = false;
            BodyCheckBox.IsEnabled = false;
            ColorCheckBox.IsEnabled = false;
            DepthCheckBox.IsEnabled = false;
            InfraredCheckBox.IsEnabled = false;
            FaceCheckBox.IsEnabled = false;
            AudioCheckBox.IsEnabled = false;

            #endregion

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();
                _bodies = new List<CustomBody>();

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceModel = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += _bodyReader_FrameArrived;

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += _colorReader_FrameArrived;

                _depthReader = _sensor.DepthFrameSource.OpenReader();
                _depthReader.FrameArrived += _depthReader_FrameArrived;

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += _faceReader_FrameArrived;

                _infraredReader = _sensor.InfraredFrameSource.OpenReader();
                _infraredReader.FrameArrived += _infraredReader_FrameArrived;


            }
        }
Пример #9
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _sensor.Open();
            }
        }
Пример #10
0
        public MainPage()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _sensor.Open();
            }
        }
Пример #11
0
 private void Window_Closing( object sender, System.ComponentModel.CancelEventArgs e )
 {
     if ( faceModelBuilder != null ) {
         faceModelBuilder.Dispose();
         faceModelBuilder = null;
     }
     if ( hdFaceFrameReader != null ) {
         hdFaceFrameReader.Dispose();
         hdFaceFrameReader = null;
     }
     if ( bodyFrameReader != null ) {
         bodyFrameReader.Dispose();
         bodyFrameReader = null;
     }
     if ( faceModel!=null ) {
         faceModel.Dispose();
         faceModel = null;
     }
     if ( kinect != null ) {
         kinect.Close();
         kinect = null;
     }
 }
Пример #12
0
        private void InitializeHDFace()
        {
            hdFaceFrameSource = new HighDefinitionFaceFrameSource( kinect );
            if ( hdFaceFrameSource==null ) {
                throw new Exception( "Cannot create HD Face Frame Source" );
            }
            hdFaceFrameReader = hdFaceFrameSource.OpenReader();
            hdFaceFrameReader.FrameArrived += hdFaceFrameReader_FrameArrived;
            faceModel = new FaceModel();
            faceAlignment = new FaceAlignment();

            FaceModelBuilderAttributes attributes = FaceModelBuilderAttributes.None;
            faceModelBuilder = hdFaceFrameSource.OpenModelBuilder( attributes );
            if ( faceModelBuilder==null ) {
                throw new Exception( "Cannot open Face Model Builder" );
            }
            faceModelBuilder.BeginFaceDataCollection();
            faceModelBuilder.CollectionCompleted += faceModelBuilder_CollectionCompleted;
        }
	        private void InitializeHDFace() 				// Initializes Kinect object
	        {
			this.sensor = KinectSensor.GetDefault();
			this.bodySource = this.sensor.BodyFrameSource;
			this.bodyReader = this.bodySource.OpenReader();
			this.bodyReader.FrameArrived += this.BodyReader_FrameArrived;
			
			this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor);
			this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost;
			
			this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader();
			this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived;
			
			this.currentFaceModel = new FaceModel();
			this.currentFaceAlignment = new FaceAlignment();
			
			this.sensor.Open();
	        }
 public void TestNullAlignment()
 {
     FaceModel model = new FaceModel();
     HdFaceFrameResultEventArgs args = new HdFaceFrameResultEventArgs(0,model, null);
 }
        /// <summary>
        /// Kinectセンサーを初期化し、データの取得用に各種変数を初期化します
        /// </summary>
        private void Initialize()
        {
            // Kinectセンサーを取得
            this.kinect = KinectSensor.GetDefault();

            if (kinect == null) return;

            // KinectセンサーからBody(骨格情報)とColor(色情報)を取得するFrameReaderを作成
            reader = kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Body);
            reader.MultiSourceFrameArrived += OnMultiSourceFrameArrived;

            // Kinectセンサーから詳細なFaceTrackingを行う、ソースとFrameReaderを宣言
            // 1st persion
            this.hdFaceFrameSource = new HighDefinitionFaceFrameSource(this.kinect);
            this.hdFaceFrameSource.TrackingIdLost += this.OnTrackingIdLost;
            
            this.hdFaceFrameReader = this.hdFaceFrameSource.OpenReader();
            this.hdFaceFrameReader.FrameArrived += this.OnFaceFrameArrived;

            this.faceModel = new FaceModel();
            this.faceAlignment = new FaceAlignment();

            
            this._colorReader = this.kinect.ColorFrameSource.OpenReader();
            this._colorReader.FrameArrived += ColorReader_FrameArrived;
            // 各種Viewのアップデート
            InitializeMesh();
            UpdateMesh();

            // センサーの開始
            kinect.Open();
        }
        /// <summary>
        /// Initialize Kinect object
        /// </summary>
        private void InitializeHDFace()
        {
            this.CurrentBuilderStatus = "Ready To Start Capture";

            this.sensor = KinectSensor.GetDefault();
            this.bodySource = this.sensor.BodyFrameSource;
            this.bodyReader = this.bodySource.OpenReader();
            this.bodyReader.FrameArrived += this.BodyReader_FrameArrived;

            this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor);
            this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost;

            this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader();
            this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived;

            this.currentFaceModel = new FaceModel();
            this.currentFaceAlignment = new FaceAlignment();

            this.InitializeMesh();
            this.UpdateMesh();

            this.sensor.Open();
            Console.Write("\n\n******************************************************\n" + "Command, Issued_TS, Checked_TS, Done_TS, Errors\n");
        }
        /// <summary>
        /// This event fires when the face capture operation is completed
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void HdFaceBuilder_CollectionCompleted(object sender, FaceModelBuilderCollectionCompletedEventArgs e)
        {
            var modelData = e.ModelData;

            this.currentFaceModel = modelData.ProduceFaceModel();

            this.faceModelBuilder.Dispose();
            this.faceModelBuilder = null;

            this.CurrentBuilderStatus = "Capture Complete";
        }
 /// <summary>
 /// Dispose based on whether or not managed or native resources should be freed
 /// </summary>
 /// <param name="disposing">Set to true to free both native and managed resources, false otherwise</param>
 protected virtual void Dispose(bool disposing)
 {
     if (disposing)
     {
         if (this.currentFaceModel != null)
         {
             this.currentFaceModel.Dispose();
             this.currentFaceModel = null;
         }
     }
 }
Пример #19
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect hd face projected to rgb");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            VertexShader vertexShader = ShaderCompiler.CompileFromFile<VertexShader>(device, "ProjectedHdFaceView.fx", "VS");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "ProjectedHdFaceView.fx", "PS");

            HdFaceIndexBuffer faceIndexBuffer = new HdFaceIndexBuffer(device, 1);
            DynamicRgbSpaceFaceStructuredBuffer faceRgbBuffer = new DynamicRgbSpaceFaceStructuredBuffer(device, 1);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            bool doQuit = false;
            bool doUpload = false;

            KinectBody[] bodyFrame = null;
            KinectSensorBodyFrameProvider provider = new KinectSensorBodyFrameProvider(sensor);

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            FaceModel currentFaceModel = new FaceModel();
            FaceAlignment currentFaceAlignment = new FaceAlignment();

            SingleHdFaceProcessor hdFaceProcessor = new SingleHdFaceProcessor(sensor);
            hdFaceProcessor.HdFrameReceived += (sender, args) => { currentFaceModel = args.FaceModel; currentFaceAlignment = args.FaceAlignment; doUpload = true; };

            bool uploadColor = false;
            ColorRGBAFrameData currentData = null;
            DynamicColorRGBATexture colorTexture = new DynamicColorRGBATexture(device);
            KinectSensorColorRGBAFrameProvider colorProvider = new KinectSensorColorRGBAFrameProvider(sensor);
            colorProvider.FrameReceived += (sender, args) => { currentData = args.FrameData; uploadColor = true; };

            provider.FrameReceived += (sender, args) =>
            {
                bodyFrame = args.FrameData;
                var body = bodyFrame.TrackedOnly().ClosestBodies().FirstOrDefault();
                if (body != null)
                {
                    hdFaceProcessor.AssignBody(body);
                }
                else
                {
                    hdFaceProcessor.Suspend();
                }
            };

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    var vertices = currentFaceModel.CalculateVerticesForAlignment(currentFaceAlignment).ToArray();
                    var vertRgb = new ColorSpacePoint[vertices.Length];
                    sensor.CoordinateMapper.MapCameraPointsToColorSpace(vertices, vertRgb);

                    faceRgbBuffer.Copy(context, vertRgb);
                    doUpload = false;
                }

                if (uploadColor)
                {
                    colorTexture.Copy(context, currentData);
                    uploadColor = false;
                }

                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);
                context.RenderTargetStack.Push(swapChain);

                context.Context.Rasterizer.State = device.RasterizerStates.BackCullSolid;
                context.Context.OutputMerger.BlendState = device.BlendStates.Disabled;
                device.Primitives.ApplyFullTri(context, colorTexture.ShaderView);
                device.Primitives.FullScreenTriangle.Draw(context);

                if (hdFaceProcessor.IsValid)
                {
                    context.Context.Rasterizer.State = device.RasterizerStates.WireFrame;
                    context.Context.OutputMerger.BlendState = device.BlendStates.AlphaBlend;
                    context.Context.VertexShader.SetShaderResource(0, faceRgbBuffer.ShaderView);

                    //Draw lines
                    context.Context.PixelShader.Set(pixelShader);
                    context.Context.VertexShader.Set(vertexShader);

                    //Attach index buffer, null topology since we fetch
                    faceIndexBuffer.AttachWithLayout(context);
                    faceIndexBuffer.Draw(context, 1);

                }

                context.RenderTargetStack.Pop();

                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            colorProvider.Dispose();
            colorTexture.Dispose();

            faceIndexBuffer.Dispose();
            faceRgbBuffer.Dispose();

            provider.Dispose();
            pixelShader.Dispose();
            vertexShader.Dispose();

            sensor.Close();
        }
Пример #20
0
        /// <summary>
        /// This event fires when the face capture operation is completed
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void HdFaceBuilder_CollectionCompleted(object sender, FaceModelBuilderCollectionCompletedEventArgs e)
        {
            Log.LogMessage("FaceTracking complete");
            if (OnAskIfCaptured != null)
                OnAskIfCaptured(true, true, true, true, true);

            var modelData = e.ModelData;

            this.currentFaceModel = modelData.ProduceFaceModel();

            this.faceModelBuilder.Dispose();
            this.faceModelBuilder = null;

            //The kinect is done preparing here.
        }
Пример #21
0
        /// <summary>
        /// Initialize Kinect object
        /// </summary>
        public void InitializeHDFace()
        {
            this.sensor = KinectSensor.GetDefault();
            this.bodySource = this.sensor.BodyFrameSource;
            this.bodyReader = this.bodySource.OpenReader();
            this.bodyReader.FrameArrived += this.BodyReader_FrameArrived;

            this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor);
            this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost;

            this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader();
            this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived;

            this.highDefinitionFaceFrameSource.TrackingIdLost += (x, y) => Log.LogMessage("Lost tracking id " + y.TrackingId);

            this.currentFaceModel = new FaceModel();

            this.currentFaceAlignment = new FaceAlignment();

            this.sensor.Open();
        }
        /// <summary>
        /// Handles face frame updates
        /// </summary>
        private void FaceFrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            ulong? newTrackingId = null;

            using (var frame = e.FrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    if (frame.IsTrackingIdValid && frame.IsFaceTracked)
                    {
                        frame.GetAndRefreshFaceAlignmentResult(this.faceAlignment);
                        this.faceModel = frame.FaceModel;
                        newTrackingId = frame.TrackingId;
                    }
                }
            }

            if (this.Processors.Any(x => x.RequiresFaceModelBuilder) && newTrackingId.HasValue && this.currentTrackingId != newTrackingId)
            {
                lock (this.processFaceModelMutex)
                {
                    this.currentTrackingId = newTrackingId;
                    this.faceModel = null;
                    this.constructedFaceModel = null;
                    this.DisposeFaceModelBuilder();
                    this.fmb = this.faceSource.OpenModelBuilder(FaceModelBuilderAttributes.HairColor | FaceModelBuilderAttributes.SkinColor);
                    this.fmb.BeginFaceDataCollection();
                    this.fmb.CollectionCompleted += this.FaceModelBuilderCollectionCompleted;
                }
            }

            lock (this)
            {
                this.faceReady = true;
                this.StartWorkerIfReady();
            }
        }
        /// <summary>
        /// Called when face model collection is successful
        /// </summary>
        private void FaceModelBuilderCollectionCompleted(object sender, FaceModelBuilderCollectionCompletedEventArgs e)
        {
            // Unfortunately, running this in a background thread seems to cause frequent crashes from
            // the kinect face library.  I wish there was a better way...
            lock (this.processFaceModelMutex)
            {
                if (this.fmb == null)
                    return;

                this.constructionInProcess = true;

                this.fmb.CollectionCompleted -= this.FaceModelBuilderCollectionCompleted;
                this.constructedFaceModel = e.ModelData.ProduceFaceModel();
                this.DisposeFaceModelBuilder();

                this.constructionInProcess = false;
            }
        }
Пример #24
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            RenderForm form = new RenderForm("Kinect Simple hd face sample");

            RenderDevice device = new RenderDevice(SharpDX.Direct3D11.DeviceCreationFlags.BgraSupport | DeviceCreationFlags.Debug);
            RenderContext context = new RenderContext(device);
            DX11SwapChain swapChain = DX11SwapChain.FromHandle(device, form.Handle);

            VertexShader vertexShader = ShaderCompiler.CompileFromFile <VertexShader>(device, "HdFaceView.fx", "VS");
            PixelShader pixelShader = ShaderCompiler.CompileFromFile<PixelShader>(device, "HdFaceView.fx", "PS");

            HdFaceIndexBuffer faceIndexBuffer = new HdFaceIndexBuffer(device, 1);
            DynamicHdFaceStructuredBuffer faceVertexBuffer = new DynamicHdFaceStructuredBuffer(device, 1);

            KinectSensor sensor = KinectSensor.GetDefault();
            sensor.Open();

            cbCamera camera = new cbCamera();
            camera.Projection = Matrix.PerspectiveFovLH(1.57f*0.5f, 1.3f, 0.01f, 100.0f);
            camera.View = Matrix.Translation(0.0f, 0.0f, 0.5f);

            camera.Projection.Transpose();
            camera.View.Transpose();

            ConstantBuffer<cbCamera> cameraBuffer = new ConstantBuffer<cbCamera>(device);
            cameraBuffer.Update(context, ref camera);

            bool doQuit = false;
            bool doUpload = false;

            KinectBody[] bodyFrame = null;
            KinectSensorBodyFrameProvider provider = new KinectSensorBodyFrameProvider(sensor);

            form.KeyDown += (sender, args) => { if (args.KeyCode == Keys.Escape) { doQuit = true; } };

            FaceModel currentFaceModel = new FaceModel();
            FaceAlignment currentFaceAlignment = new FaceAlignment();

            SingleHdFaceProcessor hdFaceProcessor = new SingleHdFaceProcessor(sensor);
            hdFaceProcessor.HdFrameReceived += (sender, args) => { currentFaceModel = args.FaceModel; currentFaceAlignment = args.FaceAlignment; doUpload = true; };

            provider.FrameReceived += (sender, args) =>
            {
                bodyFrame = args.FrameData;
                var body = bodyFrame.TrackedOnly().ClosestBodies().FirstOrDefault();
                if (body != null)
                {
                    hdFaceProcessor.AssignBody(body);
                }
                else
                {
                    hdFaceProcessor.Suspend();
                }
            };

            context.Context.Rasterizer.State = device.RasterizerStates.WireFrame;

            RenderLoop.Run(form, () =>
            {
                if (doQuit)
                {
                    form.Dispose();
                    return;
                }

                if (doUpload)
                {
                    var vertices = currentFaceModel.CalculateVerticesForAlignment(currentFaceAlignment).ToArray();
                    faceVertexBuffer.Copy(context, vertices);
                    doUpload = false;
                }

                context.Context.ClearRenderTargetView(swapChain.RenderView, SharpDX.Color.Black);

                if (hdFaceProcessor.IsValid)
                {
                    context.RenderTargetStack.Push(swapChain);
                    context.Context.VertexShader.SetShaderResource(0, faceVertexBuffer.ShaderView);
                    context.Context.VertexShader.SetConstantBuffer(0, cameraBuffer.Buffer);

                    //Draw lines
                    context.Context.PixelShader.Set(pixelShader);
                    context.Context.VertexShader.Set(vertexShader);

                    //Attach index buffer, null topology since we fetch
                    faceIndexBuffer.AttachWithLayout(context);
                    faceIndexBuffer.Draw(context, 1);
                    context.RenderTargetStack.Pop();
                }

                swapChain.Present(0, SharpDX.DXGI.PresentFlags.None);
            });

            swapChain.Dispose();
            context.Dispose();
            device.Dispose();

            cameraBuffer.Dispose();
            faceIndexBuffer.Dispose();
            faceVertexBuffer.Dispose();

            provider.Dispose();
            pixelShader.Dispose();
            vertexShader.Dispose();

            hdFaceProcessor.Dispose();
            sensor.Close();
        }
        /// <summary>
        /// FaceModelBuilderがモデルの構築を完了したときのイベントを処理します
        /// </summary>
        private void OnModelBuilderCollectionCompleted(object sender, FaceModelBuilderCollectionCompletedEventArgs e)
        {
            var modelData = e.ModelData;
            this.faceModel = modelData.ProduceFaceModel();
            
            // MeshをUpdate
            UpdateMesh();

            // SkinColorとHairColorの値も更新
            this.SkinColor = UIntToColor(this.faceModel.SkinColor);
            this.HairColor = UIntToColor(this.faceModel.HairColor);
            
            // 更新を行う
            this.FaceModelBuilderStatus = GetCollectionStatus(((FaceModelBuilder)sender).CollectionStatus);
            this.FaceModelCaptureStatus = ((FaceModelBuilder)sender).CaptureStatus.ToString();

            this.faceModelBuilder.Dispose();
            this.faceModelBuilder = null;
        }
Пример #26
0
        void faceModelBuilder_CollectionCompleted( object sender, FaceModelBuilderCollectionCompletedEventArgs e )
        {
            var modelData = e.ModelData;
            faceModel = modelData.ProduceFaceModel();
            produced = true;

            faceModelBuilder.Dispose();
            faceModelBuilder = null;
        }