/** * Will call callbacks on current thread */ public FrameFetchingTask(string frameName, LocalVideoStream stream, OnFrameFetched onFrameFetched, OnFrameFetchFailure onFrameFetchedFailure) { frameName_ = frameName; stream_ = stream; onFrameFetched_ = onFrameFetched; onFrameFetchFailure_ = onFrameFetchedFailure; }
public static void fetch(string frameName, LocalVideoStream stream, OnFrameFetched onFrameFetched, OnFrameFetchFailure onFrameFetchFailure) { frameFetchingTaskQueue_.Enqueue(new FrameFetchingTask(frameName, stream, onFrameFetched, onFrameFetchFailure)); Debug.Log("[ff-task]: enqueued task for " + frameName + ". queue size " + frameFetchingTaskQueue_.Count); queueSem_.Release(); }
public void Hangup() { _call.RemoteParticipantsUpdated -= _call_RemoteParticipantsUpdated; Com.Laerdal.Azurecommunicationhelper.CallClientHelper.HangUp(_call, new HangUpOptions()); _localRenderer?.Dispose(); _localRenderer = null; _localVideoStream?.Dispose(); _localVideoStream = null; _remoteVideoStreams.Clear(); _call.Dispose(); _call = null; }
public void fetch(string frameName, LocalVideoStream stream, OnFrameFetched onFrameFetched, OnFrameFetchFailure onFrameFetchFailure) { onFrameFetched_ = onFrameFetched; onFrameFetchFailure_ = onFrameFetchFailure; bufferAllocDelegate = new FrameFetcherBufferAlloc(bufferAllocate); frameFetchedDelegate = new FrameFetcherFrameFetched(frameFetched); NdnRtcWrapper.ndnrtc_FrameFetcher_fetch(stream.getHandle(), frameName, bufferAllocDelegate, frameFetchedDelegate); }
public void Hangup() { _call.RemoteParticipantsUpdated -= _call_RemoteParticipantsUpdated; var hangupOptions = new HangupOptions(); _call.Hangup(hangupOptions).Get(); _localRenderer?.Dispose(); _localRenderer = null; _localVideoStream?.Dispose(); _localVideoStream = null; _remoteVideoStreams.Clear(); _call.Dispose(); _call = null; }
public void CallEchoService() { var callOptions = new StartCallOptions(); var camera = _deviceManager.CameraList.First(c => c.CameraFacing == CameraFacing.Front); callOptions.AudioOptions = new AudioOptions(); //callOptions.AudioOptions.Muted = true; var localVideoStream = new LocalVideoStream(camera, MainActivity.Instance); callOptions.VideoOptions = new VideoOptions(localVideoStream); var receivers = new CommunicationIdentifier[] { new CommunicationUserIdentifier("8:echo123") }; _call = _callAgent.Call(MainActivity.Instance, receivers, callOptions); }
public void CallEchoService() { var callOptions = new StartCallOptions(); var camera = _deviceManager.Cameras.First(c => c.CameraFacing == CameraFacing.Front); callOptions.SetAudioOptions(new AudioOptions()); //callOptions.AudioOptions.Muted = true; var localVideoStream = new LocalVideoStream(camera, MainActivity.Instance); callOptions.SetVideoOptions(new VideoOptions(new LocalVideoStream[] { localVideoStream })); var receivers = new CommunicationIdentifier[] { new CommunicationUserIdentifier("8:echo123") }; // TODO: //_callAgent.StartCall(); //var locator = new GroupCallLocator() //_call = _callAgent. Join(MainActivity.Instance, receivers, callOptions); }
public Task JoinGroup(Guid groupID) { var camera = _deviceManager.Cameras.First(c => c.CameraFacing == CameraFacing.Front); _localVideoStream = new LocalVideoStream(camera, MainActivity.Instance); _localRenderer = new VideoStreamRenderer(_localVideoStream, MainActivity.Instance); var renderingOptions = new CreateViewOptions(ScalingMode.Crop); var nativeView = _localRenderer.CreateView(renderingOptions); var formsView = nativeView.ToView(); LocalVideoAdded?.Invoke(this, formsView); var groupCallLocator = new GroupCallLocator(UUID.FromString(groupID.ToString())); var videoOptions = new VideoOptions(new LocalVideoStream[] { _localVideoStream }); var joinCallOptions = new JoinCallOptions(); joinCallOptions.SetVideoOptions(videoOptions); _call = _callAgent.Join(Application.Context, groupCallLocator, joinCallOptions); _call.RemoteParticipantsUpdated += _call_RemoteParticipantsUpdated; return(Task.CompletedTask); }
public static void Initialize(string signingIdentity, string instanceId) { if (libraryCallbackDelegate == null) { libraryCallbackDelegate = new NdnRtcLibLogHandler(ndnrtcLogHandler); } bool res; try { res = NdnRtcWrapper.ndnrtc_init("localhost", Application.persistentDataPath, signingIdentity, instanceId, libraryCallbackDelegate); if (res) { LocalStreamParams p = new LocalStreamParams(); p.basePrefix = signingIdentity + "/" + instanceId; p.signingOn = 1; p.dropFrames = 1; p.fecOn = 1; p.frameHeight = 180; p.frameWidth = 320; p.gop = 30; p.startBitrate = 300; p.maxBitrate = 7000; p.ndnDataFreshnessPeriodMs = 2000; p.ndnSegmentSize = 8000; p.typeIsVideo = 1; p.streamName = "back_camera"; p.threadName = "vp9"; videoStream = new LocalVideoStream(p); } } catch (System.Exception e) { Debug.LogError("Error initializing NDN-RTC: " + e.Message); } }
/// <summary> /// /// </summary> /// <param name="filename"></param> /// <param name="ext"></param> /// <returns></returns> public HttpResponseMessage Get(string filename, string ext) { IVideoStream video = default(IVideoStream); // ************************************************************************************************************************* // If BLOB is not activated, let the code below comment // ************************************************************************************************************************* string startupPath = Path.Combine(Environment.CurrentDirectory, "Media"); video = new LocalVideoStream(startupPath, filename, ext); // ************************************************************************************************************************* // If you want to use BLOB storage, comment the code above and uncomment the code below // NOTE: you may have to change the URL of the file - depending on your setup in the BLOB project // ************************************************************************************************************************* //string fileUri = "http://127.0.0.1:10000/devstoreaccount1/democontainerblockblob/polina.webm"; //video = new RemoteVideoStream(new Uri(fileUri)); HttpResponseMessage response = Request.CreateResponse(); response.Content = new PushStreamContent((x, y, z) => video.WriteToStream(x), new MediaTypeHeaderValue("video/" + ext)); return(response); }
public static void Initialize(string signingIdentity, string instanceId) { if (libraryCallbackDelegate == null) { libraryCallbackDelegate = new NdnRtcLibLogHandler(ndnrtcLogHandler); } bool res; try { string version = Marshal.PtrToStringAnsi(NdnRtcWrapper.ndnrtc_getVersion()); Debug.Log("NDN-RTC version " + version); res = NdnRtcWrapper.ndnrtc_init("localhost", Application.persistentDataPath, signingIdentity, instanceId, libraryCallbackDelegate); if (res) { LocalStreamParams p = new LocalStreamParams(); p.basePrefix = signingIdentity + "/" + instanceId; p.signingOn = 1; p.dropFrames = 1; p.fecOn = 1; p.frameHeight = 180; p.frameWidth = 320; p.gop = 30; p.startBitrate = 300; p.maxBitrate = 7000; p.ndnSegmentSize = 8000; p.typeIsVideo = 1; p.streamName = "back_camera"; p.threadName = "vp9"; p.storagePath = Application.persistentDataPath + "/ndnrtc_storage"; videoStream = new LocalVideoStream(p); runFrameFetching_ = true; queueSem_ = new Semaphore(0, 30); // up to 30 requests. why not?... activeTasks_ = new HashSet <FrameFetchingTask>(); frameFetchingTaskQueue_ = new System.Collections.Generic.Queue <FrameFetchingTask>(); frameFetchingThread_ = new Thread(new ThreadStart(delegate() { while (runFrameFetching_) { Debug.Log("[ff-task-worker]: waiting for new tasks..."); // lock on semaphore / event queueSem_.WaitOne(); // deque FrameFetchingTask ffTask = frameFetchingTaskQueue_.Dequeue(); Debug.Log("[ff-task-worker]: running task for " + ffTask.frameName_); activeTasks_.Add(ffTask); ffTask.run(delegate(FrameFetchingTask fft){ Debug.Log("[ff-task-worker]: task completed: " + fft.frameName_); // cleanup when we are done activeTasks_.Remove(fft); }); } // while })); frameFetchingThread_.Start(); } } catch (System.Exception e) { Debug.LogError("Error initializing NDN-RTC: " + e.Message); } }