void Initialize()
    {
        initialized      = true;
        occludedUserCols = new Color[userCols.Length];
        userCurrentCols  = new Color[userCols.Length];
        for (int i = 0; i < userCols.Length; i++)
        {
            userCurrentCols[i] = userCols[i];
            float[] hsv = new float[3];
            Color.RGBToHSV(userCols[i], out hsv[0], out hsv[1], out hsv[2]);
            hsv[2] *= 0.25f;
            occludedUserCols[i]   = Color.HSVToRGB(hsv[0], hsv[1], hsv[2]);
            occludedUserCols[i].a = userCols[i].a;
        }

        issuesProcessor = IssuesProcessor.Instance;
        nuitrackModules = FindObjectOfType <NuitrackModules>();
        nuitrack.OutputMode mode = nuitrackModules.DepthSensor.GetOutputMode();
        frameStep = mode.XRes / hRes;
        if (frameStep <= 0)
        {
            frameStep = 1;             // frameStep should be greater then 0
        }
        hRes = mode.XRes / frameStep;

        depthToScale = meshScaling * 2f * Mathf.Tan(0.5f * mode.HFOV) / hRes;

        InitMeshes(
            ((mode.XRes / frameStep) + (mode.XRes % frameStep == 0 ? 0 : 1)),
            ((mode.YRes / frameStep) + (mode.YRes % frameStep == 0 ? 0 : 1)),
            mode.HFOV
            );
    }
Exemplo n.º 2
0
    void Start()
    {
        NuitrackManager.onColorUpdate       += DrawColorFrame;
        NuitrackManager.onUserTrackerUpdate += DrawUserSegment;

        nuitrack.OutputMode colorOutputMode = NuitrackManager.ColorSensor.GetOutputMode();
        int colorFrameWidth  = colorOutputMode.XRes;
        int colorFrameHeight = colorOutputMode.YRes;

        Debug.Log("Color frame: " + colorFrameWidth + "x" + colorFrameHeight);

        NuitrackManager.DepthSensor.SetMirror(false);
        nuitrack.OutputMode depthOutputMode = NuitrackManager.DepthSensor.GetOutputMode();
        int depthFrameWidth  = depthOutputMode.XRes;
        int depthFrameHeight = depthOutputMode.YRes;

        Debug.Log("Depth frame: " + depthFrameWidth + "x" + depthFrameHeight);

        frameWidth   = colorFrameWidth;
        frameHeight  = colorFrameHeight;
        frameTexture = new Texture2D(frameWidth, frameHeight, TextureFormat.RGB24, false);

        maskedFrame = new byte[frameWidth * frameHeight * 4];

        material.mainTexture = frameTexture;
    }
        private void Initialize()
        {
            nuitrack.OutputMode mode = NuitrackManager.DepthSensor.GetOutputMode();
            int xRes = mode.XRes;
            int yRes = mode.YRes;

            InitMeshes(xRes, yRes, mode.HFOV);
        }
Exemplo n.º 4
0
    void Start()
    {
        NuitrackManager.onColorUpdate += DrawColor;

        nuitrack.OutputMode mode = NuitrackManager.ColorSensor.GetOutputMode();
        cols = mode.XRes;
        rows = mode.YRes;

        RecreateTextures();
    }
    private void Initialize()
    {
        nuitrackModules = FindObjectOfType <NuitrackModules>();
        nuitrack.OutputMode mode = nuitrackModules.DepthSensor.GetOutputMode();
        //sensor = nuitrackModules.DepthSensor;
        int xRes = mode.XRes;
        int yRes = mode.YRes;

        InitMeshes(xRes, yRes, mode.HFOV);
    }
Exemplo n.º 6
0
    void Start()
    {
        nuitrackModules = FindObjectOfType <NuitrackModules>();
        nuitrack.OutputMode mode = nuitrackModules.DepthSensor.GetOutputMode();
        sensor = nuitrackModules.DepthSensor;
        int xRes = mode.XRes;
        int yRes = mode.YRes;

        int numVerts = xRes * yRes;

        InitMeshes(xRes, yRes, mode.HFOV);
    }
Exemplo n.º 7
0
 void Start()
 {
     nuitrack.OutputMode mode = NuitrackManager.DepthSensor.GetOutputMode();
     if (needVFov)
     {
         float fov = 2 * Mathf.Atan(Mathf.Tan(mode.HFOV / 2) * (float)mode.YRes / (float)mode.XRes);
         firstLine.localEulerAngles  = new Vector3(0, 0, fov * Mathf.Rad2Deg / 2);
         secondLine.localEulerAngles = new Vector3(0, 0, -fov * Mathf.Rad2Deg / 2);
     }
     else
     {
         firstLine.localEulerAngles  = new Vector3(0, 0, mode.HFOV * Mathf.Rad2Deg / 2 - 90);
         secondLine.localEulerAngles = new Vector3(0, 0, -mode.HFOV * Mathf.Rad2Deg / 2 - 90);
     }
 }
Exemplo n.º 8
0
    //List<float> minZArray;
    //List<float> maxZArray;

    void Start()
    {
        NuitrackManager.onUserTrackerUpdate += ColorizeUser;

        NuitrackManager.DepthSensor.SetMirror(true);

        nuitrack.OutputMode mode = NuitrackManager.DepthSensor.GetOutputMode();
        cols = mode.XRes;
        rows = mode.YRes;

        segmentBuffer = new ComputeBuffer(cols * rows, 4);
        outSegment    = new int[cols * rows];
        PointCloudGPU.Instance.matPointCloud.SetBuffer("segmentBuffer", segmentBuffer);
        //minZArray = new List<float>();
        //maxZArray = new List<float>();
    }
Exemplo n.º 9
0
    void Initialize()
    {
        initialized = true;

        nuitrack.OutputMode mode = NuitrackManager.DepthSensor.GetOutputMode(); //Returns the structure in which there is resolution, FPS and FOV of the sensor

        frameStep = mode.XRes / hRes;
        if (frameStep <= 0)
        {
            frameStep = 1;                 // frameStep must be bigger than 0
        }
        hRes = mode.XRes / frameStep;

        InitMeshes(
            ((mode.XRes / frameStep)), //Width
            ((mode.YRes / frameStep)), //Height
            mode.HFOV);
    }
Exemplo n.º 10
0
        void NuitrackManager_onColorUpdate(nuitrack.ColorFrame frame)
        {
            float frameAspectRatio = (float)frame.Cols / frame.Rows;

            nuitrack.OutputMode mode = NuitrackManager.DepthSensor.GetOutputMode();

            //The frame from the sensor fills the screen and the FOV is
            //determined for the axis along which the frame reaches the edges of the screen.
            //If the screen is wider than the frame from the sensor, then the alignment will
            //occur according to the inverse aspect ratio of the frame(otherwise the screen).
            float targetAspectRatio = ViewWidth / ViewHeight > frameAspectRatio ?
                                      (float)frame.Rows / frame.Cols : ViewHeight / ViewWidth;

            //Setting the camera's vFOV equal to the depth sensor's vFOV.
            // Nuitrack does not yet have a representation of vFOV, so we use the hFOV to vFOV conversion.
            float vFOV = 2 * Mathf.Atan(Mathf.Tan(mode.HFOV * 0.5f) * targetAspectRatio);

            Camera.fieldOfView = vFOV * Mathf.Rad2Deg;
        }
Exemplo n.º 11
0
    void Start()
    {
        //nuitrack initialization and creation of depth and userTracker modules:
        try
        {
            nuitrack.Nuitrack.Init();
            depthSensor = nuitrack.DepthSensor.Create();
            depthSensor.SetMirror(false);
            nuitrack.OutputMode mode = depthSensor.GetOutputMode();

            frameStep = mode.XRes / hRes;
            if (frameStep <= 0)
            {
                frameStep = 1;                             // frameStep should be greater then 0
            }
            depthToScale = 0.9f * 2f * Mathf.Tan(0.5f * mode.HFOV) / hRes;

            InitMeshes(
                ((mode.XRes / frameStep) + (mode.XRes % frameStep == 0 ? 0 : 1)) *
                ((mode.YRes / frameStep) + (mode.YRes % frameStep == 0 ? 0 : 1))
                );
            userTracker = nuitrack.UserTracker.Create();

            //event handlers registering:
            depthSensor.OnUpdateEvent += DepthUpdate;
            userTracker.OnUpdateEvent += UserUpdate;

            exceptionsLogger.AddEntry("hRes: " + hRes);
            exceptionsLogger.AddEntry("mode.HFOV: " + mode.HFOV);
            exceptionsLogger.AddEntry("mode.XRes: " + mode.XRes);
            exceptionsLogger.AddEntry("mode.YRes: " + mode.YRes);
            exceptionsLogger.AddEntry("frameStep: " + frameStep);
            exceptionsLogger.AddEntry("depthtoScale: " + depthToScale);

            nuitrack.Nuitrack.Run();
        }
        catch (Exception ex)
        {
            exceptionsLogger.AddEntry(ex.ToString());
        }
    }
    void Start()
    {
        NuitrackManager.onUserTrackerUpdate += ColorizeUser;

        NuitrackManager.DepthSensor.SetMirror(true);

        nuitrack.OutputMode mode = NuitrackManager.DepthSensor.GetOutputMode();
        cols = mode.XRes;
        rows = mode.YRes;

        imageRect = new Rect(0, 0, cols, rows);

        segmentTexture = new Texture2D(cols, rows, TextureFormat.ARGB32, false);

        outSegment = new byte[cols * rows * 4];

        segmentOut.type           = Image.Type.Simple;
        segmentOut.preserveAspect = true;

        gameColliders.CreateColliders(cols, rows);
        objectSpawner.StartSpawn(cols);
    }
Exemplo n.º 13
0
 // Start is called before the first frame update
 void Start()
 {
     //Update the frame with the user
     NuitrackManager.onUserTrackerUpdate += ColorizeUser;
     //Mirror the image recieved from the sensor using SetMirror
     NuitrackManager.DepthSensor.SetMirror(true);
     //Request the output image parameters from the depth sensor
     nuitrack.OutputMode mode = NuitrackManager.DepthSensor.GetOutputMode();
     cols = mode.XRes;
     rows = mode.YRes;
     //Create the rectangle to define the texture boundaries
     imageRect = new Rect(0, 0, cols, rows);
     //Create segment texture and specify width and height.
     segmentTexture = new Texture2D(cols, rows, TextureFormat.ARGB32, false);//ARGB32 format for the texture that supports an Alpha channel 1 byte per each channel
     //Create an output segment and specify its size in bytes. Multiply the image size by 4 bc there are 4 channels (ARGB32) in every pixel
     dataSegment = new byte[cols * rows * 4];
     //Set Image type to simple (no stretching, etc) retain image aspect ratio
     segmentOut.type           = Image.Type.Simple;
     segmentOut.preserveAspect = true;
     //Colliders method pass the columns and rows
     collider.CreateCollider(cols, rows);
     //Object method to start random spawn
     objectSpawner.StartSpawning(cols);
 }