Exemple #1
0
 public static void Release()
 {
     runFrameFetching_ = false;
     queueSem_.Release();
     frameFetchingThread_.Join();
     NdnRtcWrapper.ndnrtc_deinit();
 }
Exemple #2
0
    public void fetch(string frameName, LocalVideoStream stream,
                      OnFrameFetched onFrameFetched, OnFrameFetchFailure onFrameFetchFailure)
    {
        onFrameFetched_      = onFrameFetched;
        onFrameFetchFailure_ = onFrameFetchFailure;

        bufferAllocDelegate  = new FrameFetcherBufferAlloc(bufferAllocate);
        frameFetchedDelegate = new FrameFetcherFrameFetched(frameFetched);

        NdnRtcWrapper.ndnrtc_FrameFetcher_fetch(stream.getHandle(),
                                                frameName,
                                                bufferAllocDelegate,
                                                frameFetchedDelegate);
    }
Exemple #3
0
    public LocalVideoStream(LocalStreamParams p)
    {
        if (sinkCallbackDelegate == null)
        {
            sinkCallbackDelegate = new NdnRtcLibLogHandler(loggerSinkHandler);
        }

        ndnrtcHandle_ = NdnRtcWrapper.ndnrtc_createLocalStream(p, sinkCallbackDelegate);

        basePrefix = Marshal.PtrToStringAnsi(NdnRtcWrapper.ndnrtc_LocalStream_getBasePrefix(ndnrtcHandle_));
        fullPrefix = Marshal.PtrToStringAnsi(NdnRtcWrapper.ndnrtc_LocalStream_getPrefix(ndnrtcHandle_));
        streamName = Marshal.PtrToStringAnsi(NdnRtcWrapper.ndnrtc_LocalStream_getStreamName(ndnrtcHandle_));

        Debug.Log("Initialized ndnrtc stream " + streamName + " (full prefix " + fullPrefix + ")");
    }
Exemple #4
0
    public int processIncomingFrame(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer, int bufferSize)
    {
        Debug.Log("[ndnrtc::videostream] incoming image format " + format + " size " + width + "x" + height);

        unsafe {
            byte *ptr    = (byte *)pixelBuffer.ToPointer();
            int   offset = 0;

            for (int i = 0; i < height; i++)
            {
                for (int j = 0; j < width; j++)
                {
                    float r = (float)ptr [offset + 0];
                    float g = (float)ptr [offset + 1];
                    float b = (float)ptr [offset + 2];
                    float a = (float)ptr [offset + 3];
                    ptr [offset + 0] = (byte)a;
                    ptr [offset + 1] = (byte)r;
                    ptr [offset + 2] = (byte)g;
                    ptr [offset + 3] = (byte)b;
                    offset          += 4;
                }
            }
        }

//		uint offset = imageData.stride;
//		uint yPlaneSize = imageData.stride * imageData.height;
//		uint vPlaneSize = (imageData.stride / 2) * (imageData.height / 2);
//		uint uvPLaneSize = yPlaneSize / 2;

        //GCHandle pinnedBuffer = GCHandle.Alloc (switchAB, GCHandleType.Pinned);

//		IntPtr yPlane = new IntPtr (pinnedBuffer.AddrOfPinnedObject ().ToInt64 () + offset);
//		offset += yPlaneSize;
//		IntPtr uvPlane = new IntPtr (pinnedBuffer.AddrOfPinnedObject ().ToInt64 () + offset);

        //IntPtr buffer = new IntPtr (pinnedBuffer.AddrOfPinnedObject ().ToInt64 ());

//		public static extern int ndnrtc_LocalVideoStream_incomingARGBFrame (IntPtr stream,
//			uint width, uint height, IntPtr argbFrameData, uint frameSize);
        int frameNo = NdnRtcWrapper.ndnrtc_LocalVideoStream_incomingArgbFrame(ndnrtcHandle_, (uint)width, (uint)height, pixelBuffer, (uint)bufferSize);

        //Debug.Log ("frameNo = " + frameNo);
        //pinnedBuffer.Free ();

        return(frameNo);
    }
Exemple #5
0
    public static void Initialize(string signingIdentity, string instanceId)
    {
        if (libraryCallbackDelegate == null)
        {
            libraryCallbackDelegate = new NdnRtcLibLogHandler(ndnrtcLogHandler);
        }

        bool res;

        try {
            res = NdnRtcWrapper.ndnrtc_init("localhost", Application.persistentDataPath, signingIdentity,
                                            instanceId, libraryCallbackDelegate);

            if (res)
            {
                LocalStreamParams p = new LocalStreamParams();

                p.basePrefix               = signingIdentity + "/" + instanceId;
                p.signingOn                = 1;
                p.dropFrames               = 1;
                p.fecOn                    = 1;
                p.frameHeight              = 180;
                p.frameWidth               = 320;
                p.gop                      = 30;
                p.startBitrate             = 300;
                p.maxBitrate               = 7000;
                p.ndnDataFreshnessPeriodMs = 2000;
                p.ndnSegmentSize           = 8000;
                p.typeIsVideo              = 1;
                p.streamName               = "back_camera";
                p.threadName               = "vp9";

                videoStream = new LocalVideoStream(p);
            }
        } catch (System.Exception e) {
            Debug.LogError("Error initializing NDN-RTC: " + e.Message);
        }
    }
Exemple #6
0
    public FrameInfo processIncomingFrame(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer, int bufferSize)
    {
        // Debug.Log ("[ndnrtc::videostream] incoming image format " + format + " size " + width + "x" + height);

        flipFrame(width, height, pixelBuffer, true, true, true);

        // publish frame using NDN-RTC
        // return: res < 0 -- frame was skipped due to encoder decision (or library was busy publishing frame)
        //         res >= 0 -- playback number of published frame
        int res = NdnRtcWrapper.ndnrtc_LocalVideoStream_incomingArgbFrame(ndnrtcHandle_, (uint)width, (uint)height, pixelBuffer, (uint)bufferSize);

        // query additional latest published frame information
        FrameInfo finfo = NdnRtcWrapper.ndnrtc_LocalVideoStream_getLastPublishedInfo(ndnrtcHandle_);

        Debug.LogFormat(this, "res: {0} frameNo: {1} timestamp {2} ndn name {3}", res, finfo.playbackNo_, finfo.timestamp_, finfo.ndnName_);

        if (res < 0)
        {
            finfo.playbackNo_ = -1;
        }
        // return res > 0 ? finfo.playbackNo_ : res;
        return(finfo);
    }
Exemple #7
0
 public static void Release()
 {
     NdnRtcWrapper.ndnrtc_deinit();
 }
Exemple #8
0
    public static void Initialize(string signingIdentity, string instanceId)
    {
        if (libraryCallbackDelegate == null)
        {
            libraryCallbackDelegate = new NdnRtcLibLogHandler(ndnrtcLogHandler);
        }

        bool res;

        try {
            string version = Marshal.PtrToStringAnsi(NdnRtcWrapper.ndnrtc_getVersion());
            Debug.Log("NDN-RTC version " + version);

            res = NdnRtcWrapper.ndnrtc_init("localhost", Application.persistentDataPath, signingIdentity,
                                            instanceId, libraryCallbackDelegate);

            if (res)
            {
                LocalStreamParams p = new LocalStreamParams();

                p.basePrefix     = signingIdentity + "/" + instanceId;
                p.signingOn      = 1;
                p.dropFrames     = 1;
                p.fecOn          = 1;
                p.frameHeight    = 180;
                p.frameWidth     = 320;
                p.gop            = 30;
                p.startBitrate   = 300;
                p.maxBitrate     = 7000;
                p.ndnSegmentSize = 8000;
                p.typeIsVideo    = 1;
                p.streamName     = "back_camera";
                p.threadName     = "vp9";
                p.storagePath    = Application.persistentDataPath + "/ndnrtc_storage";

                videoStream = new LocalVideoStream(p);

                runFrameFetching_       = true;
                queueSem_               = new Semaphore(0, 30); // up to 30 requests. why not?...
                activeTasks_            = new HashSet <FrameFetchingTask>();
                frameFetchingTaskQueue_ = new System.Collections.Generic.Queue <FrameFetchingTask>();
                frameFetchingThread_    = new Thread(new ThreadStart(delegate() {
                    while (runFrameFetching_)
                    {
                        Debug.Log("[ff-task-worker]: waiting for new tasks...");
                        // lock on semaphore / event
                        queueSem_.WaitOne();

                        // deque
                        FrameFetchingTask ffTask = frameFetchingTaskQueue_.Dequeue();

                        Debug.Log("[ff-task-worker]: running task for " + ffTask.frameName_);
                        activeTasks_.Add(ffTask);
                        ffTask.run(delegate(FrameFetchingTask fft){
                            Debug.Log("[ff-task-worker]: task completed: " + fft.frameName_);
                            // cleanup when we are done
                            activeTasks_.Remove(fft);
                        });
                    } // while
                }));
                frameFetchingThread_.Start();
            }
        } catch (System.Exception e) {
            Debug.LogError("Error initializing NDN-RTC: " + e.Message);
        }
    }