Example #1
0
    public int processIncomingFrame(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer, int bufferSize)
    {
        Debug.Log("[ndnrtc::videostream] incoming image format " + format + " size " + width + "x" + height);

        unsafe {
            byte *ptr    = (byte *)pixelBuffer.ToPointer();
            int   offset = 0;

            for (int i = 0; i < height; i++)
            {
                for (int j = 0; j < width; j++)
                {
                    float r = (float)ptr [offset + 0];
                    float g = (float)ptr [offset + 1];
                    float b = (float)ptr [offset + 2];
                    float a = (float)ptr [offset + 3];
                    ptr [offset + 0] = (byte)a;
                    ptr [offset + 1] = (byte)r;
                    ptr [offset + 2] = (byte)g;
                    ptr [offset + 3] = (byte)b;
                    offset          += 4;
                }
            }
        }

//		uint offset = imageData.stride;
//		uint yPlaneSize = imageData.stride * imageData.height;
//		uint vPlaneSize = (imageData.stride / 2) * (imageData.height / 2);
//		uint uvPLaneSize = yPlaneSize / 2;

        //GCHandle pinnedBuffer = GCHandle.Alloc (switchAB, GCHandleType.Pinned);

//		IntPtr yPlane = new IntPtr (pinnedBuffer.AddrOfPinnedObject ().ToInt64 () + offset);
//		offset += yPlaneSize;
//		IntPtr uvPlane = new IntPtr (pinnedBuffer.AddrOfPinnedObject ().ToInt64 () + offset);

        //IntPtr buffer = new IntPtr (pinnedBuffer.AddrOfPinnedObject ().ToInt64 ());

//		public static extern int ndnrtc_LocalVideoStream_incomingARGBFrame (IntPtr stream,
//			uint width, uint height, IntPtr argbFrameData, uint frameSize);
        int frameNo = NdnRtcWrapper.ndnrtc_LocalVideoStream_incomingArgbFrame(ndnrtcHandle_, (uint)width, (uint)height, pixelBuffer, (uint)bufferSize);

        //Debug.Log ("frameNo = " + frameNo);
        //pinnedBuffer.Free ();

        return(frameNo);
    }
Example #2
0
    public FrameInfo processIncomingFrame(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer, int bufferSize)
    {
        // Debug.Log ("[ndnrtc::videostream] incoming image format " + format + " size " + width + "x" + height);

        flipFrame(width, height, pixelBuffer, true, true, true);

        // publish frame using NDN-RTC
        // return: res < 0 -- frame was skipped due to encoder decision (or library was busy publishing frame)
        //         res >= 0 -- playback number of published frame
        int res = NdnRtcWrapper.ndnrtc_LocalVideoStream_incomingArgbFrame(ndnrtcHandle_, (uint)width, (uint)height, pixelBuffer, (uint)bufferSize);

        // query additional latest published frame information
        FrameInfo finfo = NdnRtcWrapper.ndnrtc_LocalVideoStream_getLastPublishedInfo(ndnrtcHandle_);

        Debug.LogFormat(this, "res: {0} frameNo: {1} timestamp {2} ndn name {3}", res, finfo.playbackNo_, finfo.timestamp_, finfo.ndnName_);

        if (res < 0)
        {
            finfo.playbackNo_ = -1;
        }
        // return res > 0 ? finfo.playbackNo_ : res;
        return(finfo);
    }