/* ==================================
     * FlightGoggles Low Level Functions
     * ==================================
     */

    // Old version with managed memory
    // Cut image block for an individual camera from larger provided image.
    //public byte[] get_raw_image(Camera_t cam, byte[] raw, int numCams)
    //{


    //    int num_bytes_to_copy = cam.channels * state.camWidth * state.camHeight;
    //    //Debug.Log(cam.channels);
    //    //int num_bytes_to_copy = 3 * state.camWidth * state.camHeight;

    //    byte[] output = new byte[num_bytes_to_copy];

    //    // Reverse camera indexing since the camera output is globally flipped on the Y axis.
    //    int outputIndex = numCams - 1 - cam.outputIndex;

    //    // Figure out where camera data starts and ends
    //    int y_start = outputIndex * state.camHeight;
    //    int y_end = (outputIndex+1) * state.camHeight;

    //    // Calculate start and end byte
    //    int byte_start = (y_start * state.screenWidth) * 3;
    //    int byte_end = (y_end * state.screenWidth) * 3;


    //    // Create a copy of the array
    //    int px_stride = 4 - cam.channels;
    //    for (int i = 0; i < num_bytes_to_copy; i++)
    //    {
    //        output[i] = raw[byte_start + i*px_stride];

    //    }


    //    return output;
    //}

    //public struct ConvertRGBAToRGB : IJobParallelFor
    //{
    //    [ReadOnly]
    //    public NativeSlice<byte> input;
    //    [WriteOnly]
    //    public NativeArray<byte> output;
    //    // Params
    //    [ReadOnly]
    //    public int px_stride;

    //    // i is the iteration loop index
    //    // In this case, it is the index of the output byte.
    //    public void Execute( int i)
    //    {
    //        output[i] = input[i * px_stride];
    //    }
    //}


    // Cut image block for an individual camera from larger provided image.
    //public byte[] get_raw_image(Camera_t cam, NativeArray<byte> raw, int numCams)
    //{



    //    // int num_bytes_to_copy = cam.channels * state.camWidth * state.camHeight;

    //    //Debug.Log(cam.channels);
    //    //int num_bytes_to_copy = 3 * state.camWidth * state.camHeight;

    //    byte[] output = new byte[num_bytes_to_copy];

    //    // Reverse camera indexing since the camera output is globally flipped on the Y axis.
    //    int outputIndex = numCams - 1 - cam.outputIndex;

    //    // Figure out where camera data starts and ends
    //    int y_start = outputIndex * state.camHeight;
    //    int y_end = (outputIndex + 1) * state.camHeight;

    //    // Calculate start and end byte
    //    int byte_start = (y_start * state.screenWidth) * 3;
    //    int byte_end = (y_end * state.screenWidth) * 3;


    //    // Create a copy of the array
    //    int px_stride = 4 - cam.channels;
    //    for (int i = 0; i < num_bytes_to_copy; i++)
    //    {
    //        output[i] = raw[byte_start + i * px_stride];

    //    }


    //    return output;
    //}



    // Reads a scene frame from the GPU backbuffer and sends it via ZMQ.
    void sendFrameOnWire()
    {
        // Read pixels from screen backbuffer (expensive).
        rendered_frame.ReadPixels(new Rect(0, 0, state.screenWidth, state.screenHeight), 0, 0);
        rendered_frame.Apply(); // Might not actually be needed since only applies setpixel changes.

        byte[] raw = rendered_frame.GetRawTextureData();

        // Get metadata
        RenderMetadata_t metadata = new RenderMetadata_t(state);

        // Create packet metadata
        var msg = new NetMQMessage();

        msg.Append(JsonConvert.SerializeObject(metadata));

        // Process each camera's image, compress the image, and serialize the result.
        // Compression is disabled for now...
        // Stride is also disabled for now. Outputs RGB blocks with stride 3.

        state.cameras.ForEach(cam => {
            // Length of RGB slice
            int rgbImageLength = 3 * state.camWidth * state.camHeight;

            // Reverse camera indexing since the camera output is globally flipped on the Y axis.
            int outputIndex = state.numCameras - 1 - cam.outputIndex;

            // Figure out where camera data starts and ends
            int yStart = outputIndex * state.camHeight;
            int yEnd   = (outputIndex + 1) * state.camHeight;

            // Calculate start and end byte
            int byteStart = (yStart * state.screenWidth) * 3;
            //int byte_end = (y_end * state.screenWidth) * 3;

            // Get view of RGB camera image
            //NativeSlice<byte> rgbImage = raw.Slice<byte>(byteStart, rgbImageLength);



            //Check if need to convert RGB image to grayscale @TODO
            //if (cam.channels == 1)
            //{
            //    // Get single channels
            //    NativeSlice<byte> redArray = rgbImage.SliceWithStride<byte>(0);
            //    NativeSlice<byte> greenArray = rgbImage.SliceWithStride<byte>(1);
            //    NativeSlice<byte> blueArray = rgbImage.SliceWithStride<byte>(2);
            //    // Get output array
            //    NativeArray<byte> grayscaleArray = new NativeArray<byte>(rgbImageLength / 3, Allocator.TempJob);
            //    // Spawn conversion job
            //    // @ TODO
            //    convertToGrayscale(m_NativeRed, m_NativeGreen, m_NativeBlue, ref grayscaleConversionBurstJobHandle);
            //}

            // Append image to message
            //byte[] imageData = rgbImage.ToArray();
            //rgbImage.CopyTo(imageData);

            byte[] imageData = new byte[rgbImageLength];
            Array.Copy(raw, byteStart, imageData, 0, rgbImageLength);


            //fixed (byte* pSource = &raw[0])
            //{
            //    var frame = new NetMQFrame(*(byte*)pSource, rgbImageLength);
            //}

            msg.Append(imageData);
        });

        if (push_socket.HasOut)
        {
            push_socket.SendMultipartMessage(msg);
        }
    }