コード例 #1
0
    // TODO: Not the best solution, because it loses the absolute frame ID, but not sure what I'd use that for?
    public void StartRecording()
    {
        base64Frames = new string[maxRecordingSeconds * KinectUtilities.FPStoInt(kinectSettings.fps)];

        frameID    = savedFrameCount;
        saveToFile = true;

        print("Recording started. Max recording length is " + maxRecordingSeconds + " seconds");
    }
コード例 #2
0
    private async Task CameraLoop(Device device)
    {
        Material matt = mesh.material;

        while (running)
        {
            if (collectCameraData)
            {
                using (Capture capture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true))
                {
                    switch (kinectSettings.transformationMode)
                    {
                    case TransformationMode.ColorToDepth:
                        finalColor = transformation.ColorImageToDepthCamera(capture);
                        finalDepth = capture.Depth;
                        break;

                    case TransformationMode.DepthToColor:
                        finalColor = capture.Color;
                        finalDepth = transformation.DepthImageToColorCamera(capture);
                        break;

                    case TransformationMode.None:
                        finalColor = capture.Color;
                        finalDepth = capture.Depth;
                        break;
                    }

                    if (volumeBuffer == null)
                    {
                        matrixSize   = new Vector3Int((int)(finalColor.WidthPixels * kinectSettings.volumeScale.x), (int)(finalColor.HeightPixels * kinectSettings.volumeScale.y), (int)((KinectUtilities.depthRanges[(int)device.CurrentDepthMode].y - KinectUtilities.depthRanges[(int)device.CurrentDepthMode].x) / 11 * kinectSettings.volumeScale.z));
                        volumeBuffer = new ComputeBuffer(matrixSize.x * matrixSize.y * matrixSize.z, 4 * sizeof(float), ComputeBufferType.Default);
                        //print("Made Volume Buffer || Matrix Size: " + matrixSize);
                        extractedVolumeBuffer = new float[matrixSize.x * matrixSize.y * matrixSize.z * 4];
                        extractedVolumeBytes  = new byte[matrixSize.x * matrixSize.y * matrixSize.z * 4 * 4];
                    }

                    if (colorTexture == null)
                    {
                        colorTexture = new Texture2D(finalColor.WidthPixels, finalColor.HeightPixels, TextureFormat.BGRA32, false);
                        colorData    = new byte[finalColor.Memory.Length];
                        //print("Made Color Texture");
                    }

                    if (depthTexture == null)
                    {
                        depthTexture    = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false);
                        oldDepthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false);
                        depthData       = new byte[finalDepth.Memory.Length];
                        //print("Made Depth Texture");
                    }

                    colorData = finalColor.Memory.ToArray();
                    colorTexture.LoadRawTextureData(colorData);
                    colorTexture.Apply();

                    depthData = finalDepth.Memory.ToArray();
                    depthTexture.LoadRawTextureData(depthData);
                    depthTexture.Apply();

                    configureComputeShader();

                    kinectProcessingShader.Dispatch(computeShaderKernelIndex, matrixSize.x / 16, matrixSize.y / 16, 1);

                    // Get the volume buffer data as a byte array
                    volumeBuffer.GetData(extractedVolumeBytes);

                    // TODO: Test which is faster, or if a dedicated thread would be best
                    //Option 1: Use the UserWorkItem Threadpool to manage thread for me
                    ThreadPool.QueueUserWorkItem((state) => Postprocess((Byte[])state), extractedVolumeBytes);

                    //Option 2: Spawn a thread for each frame
                    //new Thread(() => Postprocess(extractedVolumeBytes)).Start();

                    if (compressedBytes == 0)
                    {
                        byte[] compressedArray = CompressData(extractedVolumeBytes);
                        compressedBytes     = compressedArray.Length;
                        maxRecordingSeconds = (maxFileSizeMb * 1000 * 1000) / (compressedBytes * KinectUtilities.FPStoInt(kinectSettings.fps));
                    }

                    matt.SetBuffer("colors", volumeBuffer);
                    matt.SetInt("_MatrixX", matrixSize.x);
                    matt.SetInt("_MatrixY", matrixSize.y);
                    matt.SetInt("_MatrixZ", matrixSize.z);

                    Graphics.CopyTexture(depthTexture, oldDepthTexture);
                }
            }
            else
            {
                await Task.Run(() => { });
            }
        }
    }
コード例 #3
0
ファイル: KinectTester.cs プロジェクト: rjabaker/Skynet
 public void JointTrackingCaptured(KinectUtilities.JointTracking.MovingJoint joint, DateTime timeStamp)
 {
     if (joint.JointType == KinectUtilities.JointTracking.JointType.ElbowRight)
     {
         if (bendAngleFrameCount % updateFrequency == 0)
         {
             bendAngleFrameCount = 1;
             UpdateJointBendAngle(joint, timeStamp);
         }
         else
         {
             bendAngleFrameCount++;
         }
     }
 }