protected virtual void OnFrameSampleAcquired(VideoCaptureSample sample)
        {
            lock (latestImageBytesLockObject){
                //When copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized.
                //You can reuse this byte[] until you need to resize it (for whatever reason).
                if (_latestImageBytes == null || _latestImageBytes.Length < sample.dataLength)
                {
                    _latestImageBytes = new byte[sample.dataLength];
                }
                sample.CopyRawImageDataIntoBuffer(_latestImageBytes);
            }

            float[] cameraToWorldMatrixAsFloat;
            if (sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrixAsFloat) == false)
            {
                sample.Dispose();
                return;
            }

            float[] projectionMatrixAsFloat;
            if (sample.TryGetProjectionMatrix(out projectionMatrixAsFloat) == false)
            {
                sample.Dispose();
                return;
            }

            CameraIntrinsics camIntrinsics = sample.GetCameraIntrinsics();

            // Right now we pass things across the pipe as a float array then convert them back into UnityEngine.Matrix using a utility method
            projectionMatrix    = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(projectionMatrixAsFloat);
            cameraToWorldMatrix = LocatableCameraUtils.ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat);

            sample.Dispose();

            didUpdateThisFrame = true;
            didUpdateImageBufferInCurrentFrame = true;

            if (hasInitEventCompleted && frameMatAcquired != null)
            {
                Mat mat = new Mat(cameraParams.cameraResolutionHeight, cameraParams.cameraResolutionWidth, CvType.CV_8UC4);
                Utils.copyToMat <byte> (latestImageBytes, mat);

                if (_rotate90Degree)
                {
                    Mat rotatedFrameMat = new Mat(cameraParams.cameraResolutionWidth, cameraParams.cameraResolutionHeight, CvType.CV_8UC4);
                    Core.rotate(mat, rotatedFrameMat, Core.ROTATE_90_CLOCKWISE);
                    mat.Dispose();

                    FlipMat(rotatedFrameMat, _flipVertical, _flipHorizontal);

                    frameMatAcquired.Invoke(rotatedFrameMat, projectionMatrix, cameraToWorldMatrix, camIntrinsics);
                }
                else
                {
                    FlipMat(mat, _flipVertical, _flipHorizontal);

                    frameMatAcquired.Invoke(mat, projectionMatrix, cameraToWorldMatrix, camIntrinsics);
                }
            }
        }
Exemplo n.º 2
0
        /// <summary>
        /// 指定された点から主点までの長さ を 焦点距離/2 で割った値の二乗を返します
        /// </summary>
        /// <param name="cameraInfo"></param>
        /// <param name="x"></param>
        /// <param name="y"></param>
        /// <returns></returns>
        public static double GetRatioSqOfPrincipalToFocal(CameraIntrinsics cameraInfo, double x, double y)
        {
            double dx = (x + 0.5 - cameraInfo.PrincipalPointX) / (cameraInfo.FocalLengthX / 2);
            double dy = (y + 0.5 - cameraInfo.PrincipalPointY) / (cameraInfo.FocalLengthY / 2);

            return(dx * dx + dy * dy);
        }
Exemplo n.º 3
0
        private void DepthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            _depthCameraIntrinsics = e.CameraIntrinsics;

            var bitmap = e.GetDisplayableBitmap();

            bitmap = Interlocked.Exchange(ref _depthBackBuffer, bitmap);
            bitmap?.Dispose();

#pragma warning disable CS4014 // Because this call is not awaited, execution of the current method continues before the call is completed

            /*DepthOutput.Dispatcher.RunAsync(
             *  Windows.UI.Core.CoreDispatcherPriority.Normal,
             *  async () =>
             *  {
             *      if (Interlocked.CompareExchange(ref _isRenderingDepth, 1, 0) == 0)
             *      {
             *          try
             *          {
             *              SoftwareBitmap availableFrame = null;
             *              while ((availableFrame = Interlocked.Exchange(ref _depthBackBuffer, null)) != null)
             *              {
             *                  await ((SoftwareBitmapSource)DepthOutput.Source).SetBitmapAsync(availableFrame);
             *                  availableFrame.Dispose();
             *              }
             *          }
             *          finally
             *          {
             *              Interlocked.Exchange(ref _isRenderingDepth, 0);
             *          }
             *      }
             *  });*/
#pragma warning restore CS4014 // Because this call is not awaited, execution of the current method continues before the call is completed
        }
Exemplo n.º 4
0
    void Start()
    {
        int width = 0, height = 0;

        _Sensor = KinectSensor.GetDefault();
        if (_Sensor != null)
        {
            _Mapper           = _Sensor.CoordinateMapper;
            _CameraIntrinsics = _Mapper.GetDepthCameraIntrinsics();

            var frameDesc = _Sensor.DepthFrameSource.FrameDescription;
            width  = frameDesc.Width;
            height = frameDesc.Height;
            // Downsample to lower resolution
            _TrianglesTemplate = CreateMesh(frameDesc.Width / _DownsampleSize, frameDesc.Height / _DownsampleSize);

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }
        // must be greater than 0, less or equal to 2048 and a multiple of 4.
        _Buffer = new ComputeBuffer(_TrianglesTemplate.Length / 3, 60);

        _Shader.SetInt("width", width);
        _Shader.SetInt("height", height);
        _Shader.SetInt("downSampleSize", _DownsampleSize);
        _Shader.SetFloat("cx", _CameraIntrinsics.PrincipalPointX);
        _Shader.SetFloat("cy", _CameraIntrinsics.PrincipalPointY);
        _Shader.SetFloat("fx", _CameraIntrinsics.FocalLengthX);
        _Shader.SetFloat("fy", _CameraIntrinsics.FocalLengthY);
    }
Exemplo n.º 5
0
        private void OnCameraIntrinsicsGot(CameraIntrinsics cameraIntrinsics, VideoEncodingProperties property)
        {
            if (cameraIntrinsics == null)
            {
                Debug.LogError("Getting the CameraIntrinsics object failed.");
                return;
            }


            double calculatedFrameRate = (double)property.FrameRate.Numerator / (double)property.FrameRate.Denominator;

            String result = "\n" + "=============================================";

            result += "\n" + "==== Size: " + property.Width + "x" + property.Height + " FrameRate: " + (int)Math.Round(calculatedFrameRate) + "====";
            result += "\n" + "FocalLength: " + cameraIntrinsics.FocalLength;
            result += "\n" + "ImageHeight: " + cameraIntrinsics.ImageHeight;
            result += "\n" + "ImageWidth: " + cameraIntrinsics.ImageWidth;
            result += "\n" + "PrincipalPoint: " + cameraIntrinsics.PrincipalPoint;
            result += "\n" + "RadialDistortion: " + cameraIntrinsics.RadialDistortion;
            result += "\n" + "TangentialDistortion: " + cameraIntrinsics.TangentialDistortion;
            result += "\n" + "=============================================";

            Debug.Log(result);

            UnityEngine.WSA.Application.InvokeOnAppThread(() =>
            {
                ResultText.text += result;
            }, false);
        }
Exemplo n.º 6
0
        /// <summary>
        /// 骨格座標の信頼性の値(分散以外)を求めます
        /// </summary>
        /// <param name="record"></param>
        /// <param name="time"></param>
        /// <param name="user"></param>
        /// <returns></returns>
        public double GetSkeletonReliability(MotionData prevFrame, MotionData nextFrame, Dictionary <JointType, Joint> prevJoints, Dictionary <JointType, Joint> nextJoints, DateTime time,
                                             CameraIntrinsics cameraInfo)
        {
            double periodAfter  = (time - prevFrame.TimeStamp).TotalSeconds;
            double periodBefore = (nextFrame.TimeStamp - time).TotalSeconds;
            double weightPeriod = Math.Exp(-periodAfter / 0.2) + Math.Exp(-periodBefore / 0.2);

            if (prevJoints == null || prevJoints.Count == 0)
            {
                return(0);
            }
            if (nextJoints == null || nextJoints.Count == 0)
            {
                return(0);
            }
            double prevEdge = 1;
            double nextEdge = 1;

            if (prevJoints.Count > 0)
            {
                prevEdge = prevJoints.Values.Select(p => Utility.GetRatioSqOfPrincipalToFocal(cameraInfo, p.Position.X, p.Position.Y)).Select(v => 1.0 / (1.0 + Math.Pow(v, 4))).Average();
            }
            if (nextJoints.Count > 0)
            {
                nextEdge = nextJoints.Values.Select(p => Utility.GetRatioSqOfPrincipalToFocal(cameraInfo, p.Position.X, p.Position.Y)).Select(v => 1.0 / (1.0 + Math.Pow(v, 4))).Average();
            }
            return(weightPeriod * Math.Sqrt(prevEdge * nextEdge));
        }
        private void OnCameraIntrinsicsGot(CameraIntrinsics cameraIntrinsics, VideoEncodingProperties property)
        {
            if (cameraIntrinsics == null)
            {
                Debug.LogError("Getting the CameraIntrinsics object failed.");
                return;
            }

            //When building the application for Hololens, uncomment the following line in Visual Studio.

            /*
             * double calculatedFrameRate = (double)property.FrameRate.Numerator / (double)property.FrameRate.Denominator;
             *
             * String result = "\n" + "=============================================";
             * result += "\n" + "==== Size: " + property.Width + "x" + property.Height + " FrameRate: " + (int)Math.Round(calculatedFrameRate) + "====";
             * result += "\n" + "FocalLength: " + cameraIntrinsics.FocalLength;
             * result += "\n" + "ImageHeight: " + cameraIntrinsics.ImageHeight;
             * result += "\n" + "ImageWidth: " + cameraIntrinsics.ImageWidth;
             * result += "\n" + "PrincipalPoint: " + cameraIntrinsics.PrincipalPoint;
             * result += "\n" + "RadialDistortion: " + cameraIntrinsics.RadialDistortion;
             * result += "\n" + "TangentialDistortion: " + cameraIntrinsics.TangentialDistortion;
             * result += "\n" + "=============================================";
             *
             * Debug.Log(result);
             *
             * UnityEngine.WSA.Application.InvokeOnAppThread(() =>
             * {
             *  ResultText.text += result;
             * }, false);
             */
        }
Exemplo n.º 8
0
        private void ColorReader_FrameArrived(object sender, ColorFrameArrivedEventArgs e)
        {
            _colorCameraIntrinsics = e.CameraIntrinsics;
            var bitmap = e.GetDisplayableBitmap(BitmapPixelFormat.Bgra8);

            bitmap = Interlocked.Exchange(ref _colorBackBuffer, bitmap);
            bitmap?.Dispose();

#pragma warning disable CS4014 // Because this call is not awaited, execution of the current method continues before the call is completed
            ColorOutput.Dispatcher.RunAsync(
                Windows.UI.Core.CoreDispatcherPriority.Normal,
                async() =>
            {
                if (Interlocked.CompareExchange(ref _isRenderingColor, 1, 0) == 0)
                {
                    try
                    {
                        SoftwareBitmap availableFrame = null;
                        while ((availableFrame = Interlocked.Exchange(ref _colorBackBuffer, null)) != null)
                        {
                            await((SoftwareBitmapSource)ColorOutput.Source).SetBitmapAsync(availableFrame);
                            availableFrame.Dispose();
                        }
                    }
                    finally
                    {
                        Interlocked.Exchange(ref _isRenderingColor, 0);
                    }
                }
            });
#pragma warning restore CS4014 // Because this call is not awaited, execution of the current method continues before the call is completed
        }
Exemplo n.º 9
0
    void Start()
    {
        _Sensor = KinectSensor.GetDefault();
        if (_Sensor != null)
        {
            _Mapper           = _Sensor.CoordinateMapper;
            _CameraIntrinsics = _Mapper.GetDepthCameraIntrinsics();

            var frameDesc = _Sensor.DepthFrameSource.FrameDescription;

            // Downsample to lower resolution
            CreateMesh(frameDesc.Width / _DownsampleSize, frameDesc.Height / _DownsampleSize);

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }

        // 初始化参数
        colmap = new float[512];
        rowmap = new float[424];

        for (int i = 0; i < 512; i++)
        {
            colmap[i] = (i - _CameraIntrinsics.PrincipalPointX + 0.5f) / _CameraIntrinsics.FocalLengthX;
        }
        for (int i = 0; i < 424; i++)
        {
            rowmap[i] = (i - _CameraIntrinsics.PrincipalPointY + 0.5f) / _CameraIntrinsics.FocalLengthY;
        }
    }
Exemplo n.º 10
0
        public void CalibrateCV(ChessBoard cb, out CameraIntrinsics intr)
        {
            Matrix mat, dist;

            CalibrateCV(cb, out mat, out dist);
            intr        = new CameraIntrinsics(mat);
            intr.CVDIST = dist;
        }
Exemplo n.º 11
0
 public void Write(CameraIntrinsics c)
 {
     Write(c.FocalLength);
     Write(c.PrincipalPoint);
     Write(c.Skew);
     Write(c.K1); Write(c.K2); Write(c.K3);
     Write(c.P1); Write(c.P2);
 }
Exemplo n.º 12
0
        /// <summary>
        /// For DEBUG purposes only. Implementation / Output may change in the future.
        /// </summary>
        public void FillBodies(IEnumerable <Body> bodies, CameraIntrinsics cameraIntrinsics, Func <Vector3, Vector3> coordinateTransformation)
        {
            Children.Clear();

            if (bodies == null)
            {
                return;
            }

            foreach (var body in bodies.Where(b => b.IsTracked))
            {
                var brush  = new SolidColorBrush(Colors.Green);
                var xRatio = ActualWidth / cameraIntrinsics.FrameWidth;
                var yRatio = ActualHeight / cameraIntrinsics.FrameHeight;

                //create skeleton
                foreach (var bone in body.CreateSkeleton().Where(bone => bone.TrackingState == TrackingState.Tracked))
                {
                    var colorSpace = coordinateTransformation(bone.Joint1.Position);

                    /*
                     * var origColorFramePoint = cameraIntrinsics.OriginalIntrinsics.ProjectOntoFrame(colorSpace);
                     * var distortedOrig = cameraIntrinsics.OriginalIntrinsics.DistortPoint(origColorFramePoint);
                     */
                    var colorFramePoint  = cameraIntrinsics.ProjectOntoFrame(colorSpace);
                    var unprojectedPoint = cameraIntrinsics.UnprojectFromFrame(colorFramePoint, colorSpace.Z);

                    var line = new Line();
                    line.StrokeThickness = 4;
                    line.Stroke          = brush;

                    line.X1 = colorFramePoint.X * xRatio;
                    line.Y1 = colorFramePoint.Y * yRatio;

                    colorSpace      = coordinateTransformation(bone.Joint2.Position);
                    colorFramePoint = cameraIntrinsics.ProjectOntoFrame(colorSpace);

                    line.X2 = colorFramePoint.X * xRatio;
                    line.Y2 = colorFramePoint.Y * yRatio;

                    Children.Add(line);
                }

                // track hands
                TrackHand(body.Joints[JointType.HandRight], body.HandStateRight, cameraIntrinsics, coordinateTransformation, xRatio, yRatio);
                TrackHand(body.Joints[JointType.HandLeft], body.HandStateLeft, cameraIntrinsics, coordinateTransformation, xRatio, yRatio);

                // clipped edges

                /*
                 * DrawClipEdge(body.ClippedEdges & FrameEdges.Top);
                 * DrawClipEdge(body.ClippedEdges & FrameEdges.Bottom);
                 * DrawClipEdge(body.ClippedEdges & FrameEdges.Left);
                 * DrawClipEdge(body.ClippedEdges & FrameEdges.Right);
                 */
            }
        }
Exemplo n.º 13
0
        public void Apply(CameraIntrinsics intr)
        {
            intr.fx *= .95;
            intr.fy *= .95;

            intr.cx = intr.PictureSize.Width / 2;
            intr.cy = intr.PictureSize.Height / 2;

            intr.SetDistortionsZero();
        }
Exemplo n.º 14
0
        public void CodeCameraIntrinsics(ref CameraIntrinsics v)
        {
            var    focalLength = v.FocalLength; var principalPoint = v.PrincipalPoint;
            double skew = v.Skew, k1 = v.K1, k2 = v.K2, k3 = v.K3, p1 = v.P1, p2 = v.P2;

            CodeV2d(ref focalLength); CodeV2d(ref principalPoint);
            CodeDouble(ref skew);
            CodeDouble(ref k1); CodeDouble(ref k2); CodeDouble(ref k3);
            CodeDouble(ref p1); CodeDouble(ref p1);
        }
    private void FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
    {
        var mediaframereference = sender.TryAcquireLatestFrame();

        if (mediaframereference != null)
        {
            test = 2;
            var videomediaframe     = mediaframereference?.VideoMediaFrame;
            var softwarebitmap      = videomediaframe?.SoftwareBitmap;
            CameraIntrinsics camerI = videomediaframe?.CameraIntrinsics;
            if (camerI != null)
            {
                var imageheight = videomediaframe.CameraIntrinsics.ImageHeight;
                var imagewidth  = videomediaframe.CameraIntrinsics.ImageWidth;

                Globals.maxdepth = imageheight;
                Globals.mindepth = imagewidth;
            }
            Globals.mindepth = 9.8f;
            if (softwarebitmap != null)
            {
                softwarebitmap = SDKTemplate.FrameRenderer.ConvertToDisplayableImage(videomediaframe);

                //softwarebitmap = SoftwareBitmap.Convert(softwarebitmap, BitmapPixelFormat.Rgba8, BitmapAlphaMode.Premultiplied);
                int w = softwarebitmap.PixelWidth;
                int h = softwarebitmap.PixelHeight;

                if (bytes == null)
                {
                    bytes = new byte[w * h * 4];
                }
                softwarebitmap.CopyToBuffer(bytes.AsBuffer());
                softwarebitmap.Dispose();
                UnityEngine.WSA.Application.InvokeOnAppThread(() => {
                    if (tex == null)
                    {
                        tex = new Texture2D(w, h, TextureFormat.RGBA32, false);
                        GetComponent <Renderer>().material.mainTexture = tex;
                    }
                    for (int i = 0; i < bytes.Length / 4; ++i)
                    {
                        byte b           = bytes[i * 4];
                        bytes[i * 4 + 0] = bytes[i * 4 + 2];
                        bytes[i * 4 + 1] = bytes[i * 4 + 1];
                        bytes[i * 4 + 2] = bytes[i * 4 + 3];
                        bytes[i * 4 + 3] = 255;
                    }

                    tex.LoadRawTextureData(bytes);
                    tex.Apply();
                }, true);
            }
            mediaframereference.Dispose();
        }
    }
Exemplo n.º 16
0
        public void CodeCameraIntrinsics(ref CameraIntrinsics v)
        {
            var    focalLength = default(V2d); var principalPoint = default(V2d);
            double skew = 0.0, k1 = 0.0, k2 = 0.0, k3 = 0.0, p1 = 0.0, p2 = 0.0;

            CodeV2d(ref focalLength); CodeV2d(ref principalPoint);
            CodeDouble(ref skew);
            CodeDouble(ref k1); CodeDouble(ref k2); CodeDouble(ref k3);
            CodeDouble(ref p1); CodeDouble(ref p1);
            v = new CameraIntrinsics(focalLength, principalPoint, skew, k1, k2, k3, p1, p2);
        }
Exemplo n.º 17
0
        public CameraIntrinsics GetImageIntrinsics(IntPtr cameraHandle)
        {
            IntPtr cameraIntrinsicsHandle = IntPtr.Zero;

            ExternApi.ArCameraIntrinsics_create(m_NativeSession.SessionHandle, ref cameraIntrinsicsHandle);
            ExternApi.ArCamera_getImageIntrinsics(m_NativeSession.SessionHandle, cameraHandle, cameraIntrinsicsHandle);
            CameraIntrinsics textureIntrinsics = _GetCameraIntrinsicsFromHandle(cameraIntrinsicsHandle);

            ExternApi.ArCameraIntrinsics_destroy(cameraIntrinsicsHandle);
            return(textureIntrinsics);
        }
        /// <summary>
        /// Generate string to print the value in CameraIntrinsics.
        /// </summary>
        /// <param name="intrinsics">The CameraIntrinsics to generate the string from.</param>
        /// <param name="intrinsicsType">The string that describe the type of the intrinsics.</param>
        /// <returns>The generated string.</returns>
        private string _CameraIntrinsicsToString(CameraIntrinsics intrinsics, string intrinsicsType)
        {
            float fovX = 2.0f * Mathf.Atan2(intrinsics.ImageDimensions.x, 2 * intrinsics.FocalLength.x) * Mathf.Rad2Deg;
            float fovY = 2.0f * Mathf.Atan2(intrinsics.ImageDimensions.y, 2 * intrinsics.FocalLength.y) * Mathf.Rad2Deg;

            return(string.Format("Unrotated Camera {4} Intrinsics: {0}  Focal Length: {1}{0}  " +
                                 "Principal Point:{2}{0}  Image Dimensions: {3}{0}  Unrotated Field of View: ({5}º, {6}º)",
                                 Environment.NewLine, intrinsics.FocalLength.ToString(),
                                 intrinsics.PrincipalPoint.ToString(), intrinsics.ImageDimensions.ToString(),
                                 intrinsicsType, fovX, fovY));
        }
Exemplo n.º 19
0
    private void InitializeMesh()
    {
        // Get the camera parameters to create the required number of vertices.
        m_Intrinsics = Frame.CameraImage.TextureIntrinsics;

        // Scale camera intrinsics to the depth map size.
        m_IntrinsicsScale.x = m_DepthTexture.width / (float)m_Intrinsics.ImageDimensions.x;
        m_IntrinsicsScale.y = m_DepthTexture.height / (float)m_Intrinsics.ImageDimensions.y;

        // Create template vertices.
        List <Vector3> vertices = new List <Vector3>();
        List <Vector3> normals  = new List <Vector3>();

        // Create template vertices for the mesh object.
        for (int y = 0; y < m_DepthTexture.height; y++)
        {
            for (int x = 0; x < m_DepthTexture.width; x++)
            {
                Vector3 v = new Vector3(x * 0.01f, -y * 0.01f, 0) + k_DefaultMeshOffset;
                vertices.Add(v);
                normals.Add(Vector3.back);
            }
        }

        // Create template triangle list.
        int[] triangles = GenerateTriangles(m_DepthTexture.width, m_DepthTexture.height);

        // Create the mesh object and set all template data.
        m_Mesh             = new Mesh();
        m_Mesh.indexFormat = UnityEngine.Rendering.IndexFormat.UInt32;
        m_Mesh.SetVertices(vertices);
        m_Mesh.SetNormals(normals);
        m_Mesh.SetTriangles(triangles, 0);
        m_Mesh.bounds = new Bounds(Vector3.zero, new Vector3(1000, 1000, 1000));
        m_Mesh.UploadMeshData(true);

        MeshFilter meshFilter = GetComponent <MeshFilter>();

        meshFilter.sharedMesh = m_Mesh;

        float principalPointX = m_Intrinsics.PrincipalPoint.x * m_IntrinsicsScale.x;
        float principalPointY = m_Intrinsics.PrincipalPoint.y * m_IntrinsicsScale.y;

        // Set camera intrinsics for depth reprojection.
        m_Material.SetFloat("_FocalLengthX", m_Intrinsics.FocalLength.x * m_IntrinsicsScale.x);
        m_Material.SetFloat("_FocalLengthY", m_Intrinsics.FocalLength.y * m_IntrinsicsScale.y);
        m_Material.SetFloat("_PrincipalPointX", principalPointX);
        m_Material.SetFloat("_PrincipalPointY", principalPointY);
        m_Material.SetInt("_ImageDimensionsX", m_DepthTexture.width);
        m_Material.SetInt("_ImageDimensionsY", m_DepthTexture.height);

        m_Initialized = true;
    }
Exemplo n.º 20
0
        /// <summary>
        /// LocalCoordinateMapperをdumpするやつ
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void DumpCalibButton_Click(object sender, RoutedEventArgs e)
        {
            LocalCoordinateMapper lcm = new LocalCoordinateMapper(this.coordinateMapper, this.depthWidth, this.depthHeight);
            string path = Path.Combine(Path.GetDirectoryName(this.recordPath), "coordmap.dump");

            lcm.dump(path);

            CameraIntrinsics cameraIntrinsics = this.coordinateMapper.GetDepthCameraIntrinsics();
            string           path2            = Path.Combine(Path.GetDirectoryName(this.recordPath), "CameraInfo.dump");

            Utility.SaveToBinary(cameraIntrinsics, path2);
        }
Exemplo n.º 21
0
    public void CloseSensor(KinectInterop.SensorData sensorData)
    {
        UnityEngine.WSA.Application.InvokeOnUIThread(() =>
        {
            if (_kinectSensor != null)
            {
                _kinectSensor?.CloseAsync();
                Debug.Log("UWP-K2 sensor closed");
            }
        }, true);

        if (_depthPlaneCoordsBuf != null)
        {
            _depthPlaneCoordsBuf.Release();
            _depthPlaneCoordsBuf = null;
        }

        if (_depthDepthValuesBuf != null)
        {
            _depthDepthValuesBuf.Release();
            _depthDepthValuesBuf = null;
        }

        if (_colorPlaneCoordsBuf != null)
        {
            _colorPlaneCoordsBuf.Release();
            _colorPlaneCoordsBuf = null;
        }

        if (_colorSpaceCoordsBuf != null)
        {
            _colorSpaceCoordsBuf.Release();
            _colorSpaceCoordsBuf = null;
        }

        if (_colorDepthCoordsBuf != null)
        {
            _colorDepthCoordsBuf.Release();
            _colorDepthCoordsBuf = null;
        }

        _colorCameraIntrinsics = null;
        _depthCameraIntrinsics = null;
        _coordinateMapper      = null;
        _coordinateMapper2     = null;

        _coordMapperShader = null;
        _lastDepthDataBuf  = null;

        _clearLatestFrames = true;
        FreeMultiSourceFrame(sensorData);
    }
        /// <summary>
        /// Menu Camera parameter click
        /// </summary>
        /// <param name="sender">Obj.</param>
        /// <param name="e">Arg.</param>
        private void mnuDepthCamera_Click(object sender, RoutedEventArgs e)
        {
            CameraIntrinsics depthInt = this.depthSensor.Mapper.GetDepthCameraIntrinsics();

            string msg = "Focal Lenght:\n" +
                         "\tfx : " + depthInt.FocalLengthX.ToString() + "\n" +
                         "\tfy : " + depthInt.FocalLengthY.ToString() + "\n" +
                         "Principal Point:\n" +
                         "\tcx : " + depthInt.PrincipalPointX.ToString() + "\n" +
                         "\tcy : " + depthInt.PrincipalPointY.ToString() + "\n";

            MessageBox.Show(msg, "Depth Camera Intrinsic Parameter", MessageBoxButton.OK, MessageBoxImage.Information);
        }
Exemplo n.º 23
0
    void PrintParameters()
    {
        var nativeSession       = LifecycleManager.Instance.NativeSession;
        var cameraHandle        = nativeSession.FrameApi.AcquireCamera();
        CameraIntrinsics result =
            nativeSession.CameraApi.GetImageIntrinsics(cameraHandle);

        txt_camera.text = result.FocalLength + " " + result.PrincipalPoint;
        //Debug.Log(result.FocalLength);
        //Debug.Log(result.PrincipalPoint);
        //Debug.Log(Frame.CameraImage.Texture.texelSize);
        //Debug.Log(Frame.CameraImage.Texture.height + " " + Frame.CameraImage.Texture.width);
    }
Exemplo n.º 24
0
    void Start()
    {
        int width = 0, height = 0;

        _Sensor = KinectSensor.GetDefault();
        if (_Sensor != null)
        {
            _Mapper           = _Sensor.CoordinateMapper;
            _CameraIntrinsics = _Mapper.GetDepthCameraIntrinsics();

            var frameDesc = _Sensor.DepthFrameSource.FrameDescription;
            width  = frameDesc.Width;
            height = frameDesc.Height;
            // Downsample to lower resolution
            _TrianglesTemplate = Init(frameDesc.Width / _DownsampleSize, frameDesc.Height / _DownsampleSize);

            if (!_Sensor.IsOpen)
            {
                _Sensor.Open();
            }
        }
        // must be greater than 0, less or equal to 2048 and a multiple of 4.
        _Buffer = new ComputeBuffer(_TrianglesTemplate.Length / 3, 36);

        // 初始化参数
        colmap = new float[512];
        rowmap = new float[424];

        for (int i = 0; i < 512; i++)
        {
            colmap[i] = (i - _CameraIntrinsics.PrincipalPointX + 0.5f) / _CameraIntrinsics.FocalLengthX;
        }
        for (int i = 0; i < 424; i++)
        {
            rowmap[i] = (i - _CameraIntrinsics.PrincipalPointY + 0.5f) / _CameraIntrinsics.FocalLengthY;
        }

        _Shader.SetInt("width", width);
        _Shader.SetInt("height", height);
        _Shader.SetInt("downSampleSize", _DownsampleSize);
        _Shader.SetFloat("cx", _CameraIntrinsics.PrincipalPointX);
        _Shader.SetFloat("cy", _CameraIntrinsics.PrincipalPointY);
        _Shader.SetFloat("fx", _CameraIntrinsics.FocalLengthX);
        _Shader.SetFloat("fy", _CameraIntrinsics.FocalLengthY);
        _Shader.SetFloat("maxDepthLimit", maxDepthLimit);
        _Shader.SetFloat("minDepthLimit", minDepthLimit);
        _Shader.SetFloat("distanceThreshold", distanceThreshold);
        //_Shader.SetFloats("colmap", colmap);
        //_Shader.SetFloats("rowmap", rowmap);
        _NumThread = _TrianglesTemplate.Length / 3 / 8;
    }
Exemplo n.º 25
0
        /// <summary>
        /// 36B
        /// </summary>
        protected void WriteIntrinsics(WriteBuffer writer, CameraIntrinsics intrinsics)
        {
            writer.Write(intrinsics.FocalLengthX);
            writer.Write(intrinsics.FocalLengthY);
            writer.Write(intrinsics.FrameHeight);
            writer.Write(intrinsics.FrameWidth);

            writer.Write(intrinsics.PrincipalPointX);
            writer.Write(intrinsics.PrincipalPointY);

            writer.Write(intrinsics.RadialDistortionSecondOrder);
            writer.Write(intrinsics.RadialDistortionFourthOrder);
            writer.Write(intrinsics.RadialDistortionSixthOrder);
        }
Exemplo n.º 26
0
        /// <summary>
        /// Read out the factory intrinsics for the IR/Depth channel. The principal point in x-direction depends on the prperty <see cref="FlipX"/.>
        /// </summary>
        /// <returns>Factory IR intrinsics.</returns>
        public IProjectiveTransformation GetFactoryIRIntrinsics()
        {
            CameraIntrinsics intrinsics = Coordinates.GetDepthCameraIntrinsics();

            for (int i = 0; i < 100 && intrinsics.FocalLengthX == 0; i++)
            {
                intrinsics = Coordinates.GetDepthCameraIntrinsics();
                System.Threading.Thread.Sleep(100);
            }

            float principalPointX = depthWidthMinusOne - intrinsics.PrincipalPointX; //Principal point in x-direction needs to be mirrored, since native Kinect images are flipped.

            return(new ProjectiveTransformationZhang(depthWidth, depthHeight, intrinsics.FocalLengthX, intrinsics.FocalLengthY, principalPointX, intrinsics.PrincipalPointY, intrinsics.RadialDistortionSecondOrder, intrinsics.RadialDistortionFourthOrder, intrinsics.RadialDistortionSixthOrder, 0, 0));
        }
Exemplo n.º 27
0
        /// <summary>
        /// Helper function that undistorts chessboard images based on the provided camera intrinsics.
        /// </summary>
        /// <param name="image">byte image data</param>
        /// <param name="imageWidth">image width in pixels</param>
        /// <param name="imageHeight">image height in pixels</param>
        /// <param name="intrinsics">camera intrinsics</param>
        /// <returns>Returns true if undistorting the chessboard image succeeded, otherwise false</returns>
        public bool UndistortChessboardImage(
            byte[] image,
            int imageWidth,
            int imageHeight,
            CameraIntrinsics intrinsics)
        {
            var intrinsicsArray = CreateIntrinsicsArray(intrinsics);

            return(UndistortChessboardImageNative(
                       image,
                       imageWidth,
                       imageHeight,
                       intrinsicsArray,
                       intrinsicsArray.Length));
        }
Exemplo n.º 28
0
        /// <summary>
        /// Calculates one camera extrinsic value based on all provided ArUco data sets
        /// </summary>
        /// <param name="intrinsics">Camera intrinsics</param>
        /// <returns>Camera extrinsics</returns>
        public CalculatedCameraExtrinsics CalculateGlobalArUcoExtrinsics(CameraIntrinsics intrinsics)
        {
            float[] inputIntrinsics = CreateIntrinsicsArray(intrinsics);

            float[] extrinsics = new float[sizeExtrinsics];
            if (!ProcessGlobalArUcoExtrinsicsNative(inputIntrinsics, extrinsics, sizeExtrinsics))
            {
                PrintLastError();
                return(null);
            }

            var calcExtrinsics = CreateExtrinsicsFromArray(extrinsics);

            return(calcExtrinsics);
        }
Exemplo n.º 29
0
 private float[] CreateIntrinsicsArray(CameraIntrinsics intrinsics)
 {
     float[] intrinsicsArr = new float[sizeIntrinsics];
     intrinsicsArr[0]  = intrinsics.FocalLength.x;
     intrinsicsArr[1]  = intrinsics.FocalLength.y;
     intrinsicsArr[2]  = intrinsics.PrincipalPoint.x;
     intrinsicsArr[3]  = intrinsics.PrincipalPoint.y;
     intrinsicsArr[4]  = intrinsics.RadialDistortion.x;
     intrinsicsArr[5]  = intrinsics.RadialDistortion.y;
     intrinsicsArr[6]  = intrinsics.RadialDistortion.z;
     intrinsicsArr[7]  = intrinsics.TangentialDistortion.x;
     intrinsicsArr[8]  = intrinsics.TangentialDistortion.y;
     intrinsicsArr[9]  = intrinsics.ImageWidth;
     intrinsicsArr[10] = intrinsics.ImageHeight;
     intrinsicsArr[11] = 0.0f; // reprojection error, unused
     return(intrinsicsArr);
 }
Exemplo n.º 30
0
    private void DepthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
    {
        _depthCameraIntrinsics = e.CameraIntrinsics;

        if (_depthDataBuf == null || sensorData.depthImageWidth != e.Bitmap.PixelWidth || sensorData.depthImageHeight != e.Bitmap.PixelHeight)
        {
            sensorData.depthImageWidth  = e.Bitmap.PixelWidth;
            sensorData.depthImageHeight = e.Bitmap.PixelHeight;

            int imageLen = e.Bitmap.PixelWidth * e.Bitmap.PixelHeight * sizeof(ushort);

            lock (_depthDataLock)
            {
                //_depthDataBuf = new byte[imageLen];
                //sensorData.depthImage = new ushort[e.Bitmap.PixelWidth * e.Bitmap.PixelHeight];
                Array.Resize <byte>(ref _depthDataBuf, imageLen);
                Array.Resize <ushort>(ref sensorData.depthImage, e.Bitmap.PixelWidth * e.Bitmap.PixelHeight);
            }

            int biImageLen = e.Bitmap.PixelWidth * e.Bitmap.PixelHeight;

            lock (_bodyIndexDataLock)
            {
                //_bodyIndexDataBuf = new byte[biImageLen];
                //sensorData.bodyIndexImage = new byte[biImageLen];
                Array.Resize <byte>(ref _bodyIndexDataBuf, biImageLen);
                Array.Resize <byte>(ref sensorData.bodyIndexImage, biImageLen);
            }
        }

        if (_depthDataBuf != null)
        {
            lock (_depthDataLock)
            {
                e.Bitmap.CopyToBuffer(_depthDataBuf.AsBuffer());

                if (_saveLatestFrames)
                {
                    _latestDepthFrame = e.Frame;
                }

                _depthDataTime  = DateTime.Now.Ticks; // depthFrame.RelativeTime.Ticks;
                _depthDataReady = true;
            }
        }
    }
 /// <summary>
 /// 骨格座標の信頼性の値(分散以外)を求めます
 /// </summary>
 /// <param name="record"></param>
 /// <param name="time"></param>
 /// <param name="user"></param>
 /// <returns></returns>
 public double GetSkeletonReliability(MotionData prevFrame, MotionData nextFrame, Dictionary<JointType, Joint> prevJoints, Dictionary<JointType, Joint> nextJoints, DateTime time, 
     CameraIntrinsics cameraInfo)
 {
     double periodAfter = (time - prevFrame.TimeStamp).TotalSeconds;
     double periodBefore = (nextFrame.TimeStamp - time).TotalSeconds;
     double weightPeriod = Math.Exp(-periodAfter / 0.2) + Math.Exp(-periodBefore / 0.2);
     if (prevJoints == null || prevJoints.Count == 0)
         return 0;
     if (nextJoints == null || nextJoints.Count == 0)
         return 0;
     double prevEdge = 1;
     double nextEdge = 1;
     if (prevJoints.Count > 0)
     {
         prevEdge = prevJoints.Values.Select(p => Utility.GetRatioSqOfPrincipalToFocal(cameraInfo, p.Position.X, p.Position.Y)).Select(v => 1.0 / (1.0 + Math.Pow(v, 4))).Average();
     }
     if (nextJoints.Count > 0)
     {
         nextEdge = nextJoints.Values.Select(p => Utility.GetRatioSqOfPrincipalToFocal(cameraInfo, p.Position.X, p.Position.Y)).Select(v => 1.0 / (1.0 + Math.Pow(v, 4))).Average();
     }
     return weightPeriod * Math.Sqrt(prevEdge * nextEdge);
 }