//------------


        public MainWindow()
        {
            InitializeComponent();


            //configuring end point
            ep = new IPEndPoint(IP, 9999);

            //initializing KinectSensor
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Listen for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

/**/
                // Listen for multisurce data.
                _multiReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color);
                _multiReader.MultiSourceFrameArrived += MultiReader_MultiSourceFrameArrived;
/**/
                // Listen for HD face data.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                // Start tracking!
                _sensor.Open();
            }
        }
Example #2
0
        public MainWindow()
        {
            InitializeComponent();
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Listen for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;

                // Listen for HD face data.
                _faceSource    = new HighDefinitionFaceFrameSource(_sensor);
                _faceSourceSub = new HighDefinitionFaceFrameSource(_sensor);
                // _faceSource.TrackingIdLost += OnTrackingIdLost;
                _faceReader    = _faceSource.OpenReader();
                _faceReaderSub = _faceSourceSub.OpenReader();

                _faceReader.FrameArrived    += FaceReader_FrameArrived;
                _faceReaderSub.FrameArrived += FaceReaderSub_FrameArrived;

                _faceModel        = new FaceModel();
                _faceAlignment    = new FaceAlignment();
                _faceAlignmentSub = new FaceAlignment();
                // Start tracking!
                _sensor.Open();
            }
        }
Example #3
0
        /// <summary>
        /// Kinectセンサーを初期化し、データの取得用に各種変数を初期化します
        /// </summary>
        private void Initialize()
        {
            // Kinectセンサーを取得
            this.kinect = KinectSensor.GetDefault();

            if (kinect == null)
            {
                return;
            }

            // KinectセンサーからBody(骨格情報)とColor(色情報)を取得するFrameReaderを作成
            reader = kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Body);
            reader.MultiSourceFrameArrived += OnMultiSourceFrameArrived;

            // Kinectセンサーから詳細なFaceTrackingを行う、ソースとFrameReaderを宣言
            this.hdFaceFrameSource = new HighDefinitionFaceFrameSource(this.kinect);
            this.hdFaceFrameSource.TrackingIdLost += this.OnTrackingIdLost;

            this.hdFaceFrameReader = this.hdFaceFrameSource.OpenReader();
            this.hdFaceFrameReader.FrameArrived += this.OnFaceFrameArrived;

            this.faceModel     = new FaceModel();
            this.faceAlignment = new FaceAlignment();

            // 各種Viewのアップデート
            InitializeMesh();
            UpdateMesh();

            // センサーの開始
            kinect.Open();
        }
Example #4
0
        static void Main(string[] args)
        {
            _sensor = KinectSensor.GetDefault();

            _worker.getSubjectID();

            if (_sensor != null)
            {
                _sensor.Open();
                Console.WriteLine("sensorOpened");
                if (_sensor.IsOpen)
                {
                    _coordinateMapper = _sensor.CoordinateMapper;
                    _bodyFrameReader  = _sensor.BodyFrameSource.OpenReader();

                    _bodyFrameReader.FrameArrived += BodyFrameReader_FrameArrived;

                    _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                    _faceReader = _faceSource.OpenReader();
                    _faceReader.FrameArrived += FaceReader_FrameArrived;

                    _faceModel     = new FaceModel();
                    _faceAlignment = new FaceAlignment();
                }
            }
            string input = Console.ReadLine();

            _sensor.Close();
        }
Example #5
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();


                _sensor.Open();

                //Added by Aditya
                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
Example #6
0
        private void InitKinect()
        {
            Size displaySize = new Size(0, 0);

            this.kinectSensor = KinectSensor.GetDefault();

            if (this.kinectSensor != null)
            {
                this.kinectSensor.Open();

                var frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;
                displaySize.Width  = frameDescription.Width;
                displaySize.Height = frameDescription.Height;

                this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

                this.faceFrameSource = new HighDefinitionFaceFrameSource(kinectSensor);
                this.faceFrameReader = this.faceFrameSource.OpenReader();

                this.UptimeText       = Properties.Resources.InitializingStatusTextFormat;
                this.currentAlignment = new FaceAlignment();
            }
            else
            {
                this.UptimeText = Properties.Resources.NoSensorFoundText;
            }

            this.kinectCanvas = new KinectCanvas(this.kinectSensor, displaySize);
        }
Example #7
0
        private void InitializeKinect()
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth);// | FrameSourceTypes.LongExposureInfrared);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                FrameDescription colorFrameDescription = _sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
                ColorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceSource.TrackingQuality = FaceAlignmentQuality.Low;
                _faceReader.FrameArrived   += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _sensor.Open();
            }
        }
Example #8
0
        public void update(FaceAlignment face)
        {
            currentOrientation = face.FaceOrientation.toEngineQuat().inverse();

            if (JawTracking)
            {
                float openInterpolate = Math.Min(face.AnimationUnits[FaceShapeAnimations.JawOpen] / openMaxValue, 1.0f);
                float protrusion      = neutralProtrusion.interpolate(fullyOpenProtrusion, openInterpolate);

                float leftAdditiontalSlide  = 0f;
                float rightAdditiontalSlide = 0f;

                //Uncomment to try left/right sliding
                //float slide = face.AnimationUnits[FaceShapeAnimations.JawSlideRight];
                //if(slide > 0)
                //{
                //    rightAdditiontalSlide = 0f.interpolate(additionalOneSideProtrusion, Math.Min(slide / protrusionMaxValue, 1.0f));
                //}
                //else
                //{
                //    leftAdditiontalSlide = 0f.interpolate(additionalOneSideProtrusion, Math.Min(-slide / protrusionMaxValue, 1.0f));
                //}

                leftCP.setLocation(Math.Min(protrusion + leftAdditiontalSlide, 1.0f));
                rightCP.setLocation(Math.Min(protrusion + rightAdditiontalSlide, 1.0f));
                movingMuscleTarget.Offset = new Vector3(0, 0f.interpolate(fullyOpenHinge, openInterpolate), 0.0f);
                movingMuscle.changeForce(70);

                //Logging.Log.Debug("Jaw pos {0} slide {1}", face.AnimationUnits[FaceShapeAnimations.JawOpen], face.AnimationUnits[FaceShapeAnimations.JawSlideRight]);
            }
        }
        private void Page_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor                = KinectSensor.GetDefault();
            _coordinateMapper      = _sensor.CoordinateMapper;
            _collectedMeasurements = new List <double>();

            if (_sensor != null)
            {
                _infraredFrameDescription = _sensor.InfraredFrameSource.FrameDescription;
                _infraredBitmap           = new WriteableBitmap(_infraredFrameDescription.Width, _infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null);
                camera.Source             = _infraredBitmap;

                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyCount  = _sensor.BodyFrameSource.BodyCount;
                _bodies     = new Body[_bodyCount];
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                _faceFrameSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceFrameReader = _faceFrameSource.OpenReader();
                _faceFrameReader.FrameArrived += FaceReader_FrameArrived;

                _irReader = _sensor.InfraredFrameSource.OpenReader();
                _irReader.FrameArrived += InfraredReader_FrameArrived;

                _sensor.Open();
            }

            _settingsVM = DevPortalVM.LoadContext(SETTINGS_FILENAME);
            DevPortalGrid.DataContext = _settingsVM;
            _devicePortalClient       = new DevPortalHelper(_settingsVM);
        }
        /// <summary>
        /// Initialize Kinect object
        /// </summary>
        private void InitializeHDFace()
        {
            this.CurrentBuilderStatus = "Ready To Start Capture";

            this.sensor     = KinectSensor.GetDefault();
            this.bodySource = this.sensor.BodyFrameSource;

            this.bodyReader = this.bodySource.OpenReader();
            this.bodyReader.FrameArrived += this.BodyReader_FrameArrived;
            this.bodyCount = this.sensor.BodyFrameSource.BodyCount;

            this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor);
            this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost;

            this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader();
            this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived;

            this.currentFaceModel     = new FaceModel();
            this.currentFaceAlignment = new FaceAlignment();

            this.coordinateMapper = this.sensor.CoordinateMapper;

            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;


            // create the face frame source with the required face frame features and an initial tracking Id of 0
            this.faceFrameSource = new FaceFrameSource(this.sensor, 0, faceFrameFeatures);

            // open the corresponding reader
            this.faceFrameReader = this.faceFrameSource.OpenReader();


            this.faceFrameResult = null;


            // wire handler for face frame arrival
            if (this.faceFrameReader != null)
            {
                // wire handler for face frame arrival
                this.faceFrameReader.FrameArrived += this.Reader_FaceFrameArrived;
            }

            this.InitializeMesh();


            this.UpdateMesh();

            this.sensor.Open();
        }
Example #11
0
    void Start()
    {
        sensor     = KinectSensor.GetDefault();
        bodySource = sensor.BodyFrameSource;
        bodyReader = bodySource.OpenReader();
        bodyReader.FrameArrived += BodyReader_FrameArrived;
        FaceFrameFeatures faceFrameFeatures =
            FaceFrameFeatures.BoundingBoxInColorSpace
            | FaceFrameFeatures.PointsInColorSpace
            | FaceFrameFeatures.BoundingBoxInInfraredSpace
            | FaceFrameFeatures.PointsInInfraredSpace
            | FaceFrameFeatures.RotationOrientation
            | FaceFrameFeatures.FaceEngagement
            | FaceFrameFeatures.Glasses
            | FaceFrameFeatures.Happy
            | FaceFrameFeatures.LeftEyeClosed
            | FaceFrameFeatures.RightEyeClosed
            | FaceFrameFeatures.LookingAway
            | FaceFrameFeatures.MouthMoved
            | FaceFrameFeatures.MouthOpen;

        FaceFrameSource = FaceFrameSource.Create(sensor, currentTrackingId, faceFrameFeatures);

        FaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost;

        FaceFrameReader = FaceFrameSource.OpenReader();
        FaceFrameReader.FrameArrived += HdFaceReader_FrameArrived;

        //CurrentFaceModel = FaceModel.Create();
        currentFaceAlignment = FaceAlignment.Create();

        sensor.Open();
    }
Example #12
0
        private void DrawFaceFeatures(HighDefinitionFaceFrame frame, FaceAlignment alignment, Color color)
        {
            frame.GetAndRefreshFaceAlignmentResult(alignment);
            var vertices = frame.FaceModel.CalculateVerticesForAlignment(alignment);

            if (vertices.Count > 0)
            {
                for (int index = 0; index < vertices.Count; index++)
                {
                    Ellipse ellipse = new Ellipse
                    {
                        Width  = 2.0,
                        Height = 2.0,
                        Fill   = new SolidColorBrush(color)
                    };

                    CameraSpacePoint vertice = vertices[index];
                    ColorSpacePoint  point   = sensor.CoordinateMapper.MapCameraPointToColorSpace(vertice);

                    if (float.IsInfinity(point.X) || float.IsInfinity(point.Y))
                    {
                        return;
                    }

                    Canvas.SetLeft(ellipse, point.X - ellipse.Width / 2);
                    Canvas.SetTop(ellipse, point.Y - ellipse.Height / 2);

                    canvas.Children.Add(ellipse);
                }
            }
        }
Example #13
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            /*ColorFrameReader cfr = _sensor.ColorFrameSource.OpenReader();
             * fd = _sensor.ColorFrameSource.FrameDescription;
             * colordata=new byte[fd.LengthInPixels*4];
             * bitmap = new WriteableBitmap(fd.Width, fd.Height, 96, 96, PixelFormats.Bgr32, null);
             *
             * this.image.Source = bitmap;*/
            if (_sensor != null)
            {
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                //_bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                //_faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                //cfr.FrameArrived += cfr_FrameArrived;
                //_sensor.Open();
            }
        }
    void initialize()
    {
        IsFaceModelCollectCompleted = false;
        FaceCaptureStatus           = "";
        FaceVertices = new List <CameraSpacePoint>();

        sensor = KinectSensor.GetDefault();
        if (sensor == null)
        {
            return;
        }
        sensor.Open();

        bodySource = sensor.BodyFrameSource;
        bodyReader = bodySource.OpenReader();

        hdFaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor);
        hdFaceFrameReader = hdFaceFrameSource.OpenReader();

        faceModel     = FaceModel.Create();
        faceAlignment = FaceAlignment.Create();
        FaceModelBuilderAttributes attributes = FaceModelBuilderAttributes.None;

        faceModelBuilder = hdFaceFrameSource.OpenModelBuilder(attributes);
        faceModelBuilder.CollectFaceDataAsync(collectFaceModelCompleted, collectFaceModelFailed);
    }
Example #15
0
    void Start()
    {  //this like InitializeHDFace()
        theGeometry = new Mesh();

        //SetViewCollectionStatus();

        sensor = KinectSensor.GetDefault();

        bodySource = sensor.BodyFrameSource;

        bodyReader = bodySource.OpenReader();

        bodyReader.FrameArrived += BodyReader_FrameArrived;

        highDefinitionFaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor);

        highDefinitionFaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost;

        highDefinitionFaceFrameReader = highDefinitionFaceFrameSource.OpenReader();

        highDefinitionFaceFrameReader.FrameArrived += HdFaceReader_FrameArrived;

        CurrentFaceModel = FaceModel.Create();

        currentFaceAlignment = FaceAlignment.Create();

        sensor.Open();

        tempAus = new Dictionary <string, float>();
        actorBlendshapeNames = getBlendShapeNames(actorMesh);
    }
        internal static void CopyToFrameToDrawingContext(this HighDefinitionFaceFrame highDefinitionFaceFrame, DrawingContext context, bool useDepthSpace = true, byte bodyIndex = 1, double pointRadius = 2F)
        {
            var faceAlignment    = new FaceAlignment();
            var coordinateMapper = highDefinitionFaceFrame.HighDefinitionFaceFrameSource.KinectSensor.CoordinateMapper;
            var brush            = BodyIndexColor.GetBrushFromBodyIndex(bodyIndex);

            highDefinitionFaceFrame.GetAndRefreshFaceAlignmentResult(faceAlignment);

            var faceModel = new FaceModel();
            var vertices  = faceModel.CalculateVerticesForAlignment(faceAlignment);

            if (vertices.Count > 0)
            {
                for (int index = 0; index < vertices.Count; index++)
                {
                    CameraSpacePoint vertice = vertices[index];
                    DepthSpacePoint  point   = coordinateMapper.MapCameraPointToDepthSpace(vertice);

                    if (float.IsInfinity(point.X) || float.IsInfinity(point.Y))
                    {
                        return;
                    }

                    context.DrawEllipse(brush, null, point.GetPoint(), pointRadius, pointRadius);
                }
            }
        }
Example #17
0
 public void updateFace(FaceAlignment faceAlignment)
 {
     if (allowMovement && face != null)
     {
         face.update(faceAlignment);
     }
 }
Example #18
0
 private void Send(ulong trackingid, FaceAlignment alignment)
 {
     foreach (var au in alignment.AnimationUnits)
     {
         message = messageBuilder.BuildAUMessage(trackingid, au.Key.ToString(), au.Value);
         this.Broadcast(message);
     }
 }
Example #19
0
    public Server()
    {
        Form = new CustomPerPixelAlphaForm();
        FormSetProperties();
        FormDock();
        Form.Show();

        var clientBuildDirectory = Environment.CurrentDirectory + "\\..\\..\\..\\..\\..\\Reflecta.Client\\bin";
        var clientStartInfo      = new ProcessStartInfo
        {
            FileName         = clientBuildDirectory + "\\Client.exe",
            WorkingDirectory = clientBuildDirectory,
            WindowStyle      = ProcessWindowStyle.Minimized
        };

        Client = Process.Start(clientStartInfo);

        OpenPipes();

        SpeechSynthesizer = new SpeechSynthesizer();
        SpeechSynthesizer.SelectVoiceByHints(VoiceGender.Female);
        SpeechSynthesizer.SpeakStarted   += SpeechSynthesizer_SpeakStarted;
        SpeechSynthesizer.VisemeReached  += SpeechSynthesizer_VisemeReached;
        SpeechSynthesizer.SpeakCompleted += SpeechSynthesizer_SpeakCompleted;

        SpeechRecognitionEngine = new SpeechRecognitionEngine();
        SpeechRecognitionEngine.UnloadAllGrammars();
        SpeechRecognitionEngine.LoadGrammar(new Grammar(new GrammarBuilder(KnownCommands)));
        SpeechRecognitionEngine.SpeechRecognized += SpeechRecognitionEngine_SpeechRecognized;
        SpeechRecognitionEngine.SetInputToDefaultAudioDevice();
        SpeechRecognitionEngine.RecognizeAsync(RecognizeMode.Multiple);

        KinectSensor = KinectSensor.GetDefault();
        KinectSensor.Open();

        BodyFrameSource = KinectSensor.BodyFrameSource;
        BodyFrameReader = BodyFrameSource.OpenReader();
        BodyFrameReader.FrameArrived += BodyFrameReader_FrameArrived;
        Bodies   = null;
        BodyDESP = new DESPQuaternion[(int)MoCapKinectBone.Count];
        for (var i = 0; i < (int)MoCapKinectBone.Count; i++)
        {
            BodyDESP[i] = new DESPQuaternion();
        }

        HighDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(KinectSensor);
        HighDefinitionFaceFrameSource.TrackingQuality = FaceAlignmentQuality.High;
        HighDefinitionFaceFrameReader = HighDefinitionFaceFrameSource.OpenReader();
        HighDefinitionFaceFrameReader.FrameArrived += HighDefinitionFaceFrameReader_FrameArrived;
        FaceAlignment = new FaceAlignment();

        FaceDESP           = new DESPQuaternion();
        FaceExpressionDESP = new DESPFloat[(int)MoCapKinectFacialExpression.Count];
        for (var i = 0; i < (int)MoCapKinectFacialExpression.Count; i++)
        {
            FaceExpressionDESP[i] = new DESPFloat();
        }
    }
Example #20
0
        public void TestValid()
        {
            FaceModel     model             = new FaceModel();
            FaceAlignment align             = new FaceAlignment();
            HdFaceFrameResultEventArgs args = new HdFaceFrameResultEventArgs(1, model, align);

            Assert.AreEqual(model, args.FaceModel);
            Assert.AreEqual(align, args.FaceAlignment);
        }
Example #21
0
 private static void StartFace()
 {
     FaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor);
     if (FaceFrameSource != null)
     {
         faceReader    = FaceFrameSource.OpenReader();
         faceModel     = FaceModel.Create();
         faceAlignment = FaceAlignment.Create();
         faceGeometry  = new Vector[FaceModel.VertexCount];
     }
 }
Example #22
0
        List <int[]> list_arr_index = new List <int[]>();// 配列のlist

        public MainWindow()
        {
            InitializeComponent();
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Linten for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReaderFrameArrived;

                // Listen for HD face data.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                // multi frame reader
                this.multiFrameReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth |
                                                                           FrameSourceTypes.Infrared);

                // IRフレームの情報取得用オブジェクト取得
                infraredFrameDescription = _sensor.InfraredFrameSource.FrameDescription;

                // Depth Frame description
                depthFrameDescription = _sensor.DepthFrameSource.FrameDescription;

                infraredRect = new Int32Rect(0, 0, infraredFrameDescription.Width, infraredFrameDescription.Height);
                depthRect    = new Int32Rect(0, 0, depthFrameDescription.Width, depthFrameDescription.Height);

                // multistream event handlerをセット
                multiFrameReader.MultiSourceFrameArrived += ReaderMultiFrameArrived;

                // -----------------------------------------
                // IRフレームの画面表示用設定
                // -----------------------------------------
                // 表示用のWritableBitmapを作成
                infraredBitmap = new WriteableBitmap(this.infraredFrameDescription.Width,
                                                     this.infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray16, null);


                depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width,
                                                  this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray16, null);
                // WriteableBitmapをWPFのImageコントローラーのソースに関連付け
                //ColorImage.Source = this.infraredBitmap; //ここでは動かない

                // start tracking
                _sensor.Open();
            }
        }
        /// <summary>
        /// コンストラクタ。実行時に一度だけ実行される。
        /// </summary>
        public MainWindow()
        {
            InitializeComponent();

            //Kinect 本体への参照を確保する。
            this.kinect = KinectSensor.GetDefault();

            //読み込む画像のフォーマットとリーダを設定する。
            this.colorImageFormat = ColorImageFormat.Bgra;
            this.colorFrameDescription
                = this.kinect.ColorFrameSource.CreateFrameDescription(this.colorImageFormat);
            this.colorFrameReader = this.kinect.ColorFrameSource.OpenReader();
            this.colorFrameReader.FrameArrived += ColorFrameReader_FrameArrived;

            // 顔回転検出用
            //this.faceFrameSource = new FaceFrameSource(this.kinect, 0, );


            if (this.kinect != null)
            {
                _bodySource = kinect.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _faceSource = new HighDefinitionFaceFrameSource(kinect);

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();
            }

            //Kinect の動作を開始する。
            //aviWriter.FrameRate = 30;
            //aviWriter.Open(@"c:\users\abelab\desktop\log\test.avi", 1920, 1080);
            //writer.Open(@"c:\users\abelab\desktop\log\test.avi", 1920, 1080, 30, VideoCodec.MPEG4);

            /*
             * for (int i=0; i<1347; i++)
             * {
             *      sw.Write(i + ",,,,,,");
             * }
             * sw.WriteLine();
             * for(int i=0; i<1347; i++)
             * {
             *      sw.Write("X(m),Y(m),Z(m),X(pixel),Y(pixel),,");
             * }
             * sw.WriteLine();
             */
            this.kinect.Open();
        }
Example #24
0
        public void StartTracking()
        {
            if (!sensor.IsAvailable)
            {
                this.AwaitSensor();
            }


            faceAlignment = new FaceAlignment();

            Console.WriteLine("Started Face Tracking.");
            this.Run();
        }
        private void onFaceFrameArrived(object sender, HighDefinitionFaceFrameArrivedEventArgs e)
        {
            // Return if there are no face clients.
            if (!this.faceConnector.HasClients)
            {
                return;
            }

            // Retrieve face data for current frame.
            var frame = e.FrameReference.AcquireFrame();

            if (frame == null)
            {
                return;
            }

            using (frame)
            {
                // Ignore untracked faces.
                if (!frame.IsTrackingIdValid)
                {
                    return;
                }
                if (!frame.IsFaceTracked)
                {
                    return;
                }

                // Record the current Unix epoch timestamp and convert it to a byte array for serialization.
                long timestamp = DateTimeOffset.Now.ToUnixTimeMilliseconds();

                // Retrieve face alignment data.
                var faceAlignment = new FaceAlignment();
                frame.GetAndRefreshFaceAlignmentResult(faceAlignment);

                // Combine the body array with a timestamp.
                Dictionary <string, object> faceJson = new Dictionary <string, object> {
                    { "Time", timestamp },
                    { "TrackingId", frame.HighDefinitionFaceFrameSource.TrackingId },
                    { "Alignment", faceAlignment },
                };

                // Send face data to clients.
                string json = JsonConvert.SerializeObject(faceJson,
                                                          new JsonSerializerSettings {
                    ContractResolver = new FaceContractResolver()
                }) + "\n";
                byte[] bytes = System.Text.Encoding.ASCII.GetBytes(json);
                this.faceConnector.Broadcast(bytes);
            }
        }
Example #26
0
        /// <summary>
        /// Converts rotation quaternion to radians
        /// And then maps them to a specified range of values to control the refresh rate
        /// </summary>
        public static void ExtractFaceRotationInRadians(this FaceAlignment faceAlignment, out float pitch, out float yaw, out float roll)
        {
            var rotQuaternion = faceAlignment.FaceOrientation;

            double x = rotQuaternion.X;
            double y = rotQuaternion.Y;
            double z = rotQuaternion.Z;
            double w = rotQuaternion.W;

            // convert face rotation quaternion to Euler angles in degrees
            pitch = (float)Math.Atan2(2 * ((y * z) + (w * x)), (w * w) - (x * x) - (y * y) + (z * z));
            yaw   = (float)Math.Asin(2 * ((w * y) - (x * z))) * -1;
            roll  = (float)Math.Atan2(2 * ((x * y) + (w * z)), (w * w) + (x * x) - (y * y) - (z * z));
        }
        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="trackingId">Tracking Id</param>
        /// <param name="faceModel">Face Model</param>
        /// <param name="faceAlignment">Face Alignment</param>
        public HdFaceFrameResultEventArgs(ulong trackingId, FaceModel faceModel, FaceAlignment faceAlignment)
        {
            if (faceModel == null)
            {
                throw new ArgumentNullException("faceModel");
            }
            if (faceAlignment == null)
            {
                throw new ArgumentNullException("faceAlignment");
            }

            this.trackingId    = trackingId;
            this.faceAlignment = faceAlignment;
            this.faceModel     = faceModel;
        }
Example #28
0
        /// <summary>
        /// Initialize Kinect object
        /// </summary>
        private void InitializeHDFace()
        {
            this.CurrentBuilderStatus = "Ready To Start Capture";

            this.sensor     = KinectSensor.GetDefault();
            this.bodySource = this.sensor.BodyFrameSource;
            this.bodyReader = this.bodySource.OpenReader();
            this.bodyReader.FrameArrived += this.BodyReader_FrameArrived;

            // set the maximum number of bodies that would be tracked by Kinect
            this.bodyCount = this.sensor.BodyFrameSource.BodyCount;

            // allocate storage to store body objects
            this.bodies = new Body[this.bodyCount];

            // specify the required face frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            // create a face frame source + reader to track each face in the FOV
            this.faceFrameSource = new FaceFrameSource(this.sensor, 0, faceFrameFeatures);
            this.faceFrameReader = faceFrameSource.OpenReader();
            this.faceFrameReader.FrameArrived += this.Reader_FaceFrameArrived;

            this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor);
            this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost;

            this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader();
            this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived;

            this.currentFaceModel     = new FaceModel();
            this.currentFaceAlignment = new FaceAlignment();

            this.InitializeMesh();
            this.UpdateMesh();

            this.sensor.Open();
        }
Example #29
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                /// <summary>
                /// Tarea a realizar por alumno
                /// Fase Inicialización
                /// </summary>
                /// /////////////////////////////////////////////////////////////////////////////////////////////////
                // Obtener fuentes de cuerpos, lector de cuerpos, handler para eventos de frames de cuerpos
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // Obtener fuente facial, lector facial, handler para eventos de frames faciales.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;

                //Añadimos el reader de gestos faciales. y el handler.
                _faceFrameSource = new FaceFrameSource(this._sensor, 0,
                                                       FaceFrameFeatures.BoundingBoxInColorSpace |
                                                       FaceFrameFeatures.FaceEngagement |
                                                       FaceFrameFeatures.Glasses |
                                                       FaceFrameFeatures.Happy |
                                                       FaceFrameFeatures.LeftEyeClosed |
                                                       FaceFrameFeatures.MouthOpen |
                                                       FaceFrameFeatures.PointsInColorSpace |
                                                       FaceFrameFeatures.RightEyeClosed
                                                       );
                _faceFrameReader = this._faceFrameSource.OpenReader();
                _faceFrameReader.FrameArrived += FaceFrameReader_FrameArrived;


                // Crear FaceModel, FaceAlignmet
                _faceModel     = new FaceModel();
                _faceAlignment = new FaceAlignment();

                // Abrir el sensor.
                _sensor.Open();
                // Asignar el multireader
                multiSourceReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                multiSourceReader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            }
        }
Example #30
0
        public override bool StartSensor()
        {
            _bodySource = _kinect.BodyFrameSource;
            _bodyReader = _bodySource.OpenReader();
            _bodyReader.FrameArrived += BodyReader_FrameArrived;

            _hdFaceFrameSource =
                new HighDefinitionFaceFrameSource(_kinect);
            _hdFaceFrameSource.TrackingIdLost +=
                HdFaceSource_TrackingIdLost;

            _hdFaceFrameReader =
                _hdFaceFrameSource.OpenReader();
            _hdFaceFrameReader.FrameArrived +=
                HdFaceReader_FrameArrived;

            _currentFaceModel     = new FaceModel();
            _currentFaceAlignment = new FaceAlignment();

            InitializeMesh();
            UpdateMesh();

            // Text style for our jig

            _style      = new TextStyle();
            _style.Font =
                new FontDescriptor("standard.shx", false, false, 0, 0);
            _style.TextSize = 10;

            var res = base.StartSensor();

            if (res)
            {
                if (_faceModelBuilder != null)
                {
                    _faceModelBuilder.Dispose();
                }
                _faceModelBuilder =
                    _hdFaceFrameSource.OpenModelBuilder(
                        FaceModelBuilderAttributes.None
                        );
                _faceModelBuilder.BeginFaceDataCollection();
                _faceModelBuilder.CollectionCompleted +=
                    HdFaceBuilder_CollectionCompleted;
            }
            return(res);
        }