public IEnumerator ReceiverGetStreams() { var audioTrack = new AudioStreamTrack("audio"); var stream = new MediaStream(WebRTC.Context.CreateMediaStream("audiostream")); stream.AddTrack(audioTrack); yield return(0); var test = new MonoBehaviourTest <SignalingPeers>(); test.component.SetStream(stream); yield return(test); foreach (var receiver in test.component.GetReceivers(1)) { Assert.That(receiver.Streams, Has.Count.EqualTo(1)); } test.component.Dispose(); foreach (var track in stream.GetTracks()) { track.Dispose(); } stream.Dispose(); }
public IEnumerator ReceiverGetStreams() { var audioTrack = new AudioStreamTrack(); var stream = new MediaStream(); stream.AddTrack(audioTrack); yield return(0); var test = new MonoBehaviourTest <SignalingPeers>(); test.component.AddStream(0, stream); yield return(test); foreach (var receiver in test.component.GetPeerReceivers(1)) { Assert.That(receiver.Streams, Has.Count.EqualTo(1)); } test.component.Dispose(); foreach (var track in stream.GetTracks()) { track.Dispose(); } stream.Dispose(); Object.DestroyImmediate(test.gameObject); }
public IEnumerator VideoStreamAddTrackAndRemoveTrack() { var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var stream = new MediaStream(); var track = new VideoStreamTrack("video", rt); // wait for the end of the initialization for encoder on the render thread. yield return(0); Assert.That(track.Kind, Is.EqualTo(TrackKind.Video)); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0)); Assert.That(stream.AddTrack(track), Is.True); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(1)); Assert.That(stream.GetVideoTracks(), Has.All.Not.Null); Assert.That(stream.RemoveTrack(track), Is.True); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0)); track.Dispose(); // wait for disposing video track. yield return(0); stream.Dispose(); Object.DestroyImmediate(rt); }
public IEnumerator ReceiverGetStreams() { var obj = new GameObject("audio"); var source = obj.AddComponent <AudioSource>(); source.clip = AudioClip.Create("test", 480, 2, 48000, false); var audioTrack = new AudioStreamTrack(source); var stream = new MediaStream(); stream.AddTrack(audioTrack); yield return(0); var test = new MonoBehaviourTest <SignalingPeers>(); test.component.AddStream(0, stream); yield return(test); foreach (var receiver in test.component.GetPeerReceivers(1)) { Assert.That(receiver.Streams, Has.Count.EqualTo(1)); } test.component.Dispose(); foreach (var track in stream.GetTracks()) { track.Dispose(); } stream.Dispose(); Object.DestroyImmediate(test.gameObject); Object.DestroyImmediate(source.clip); Object.DestroyImmediate(obj); }
public IEnumerator MediaStreamAddTrack() { var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var stream = new MediaStream(); var track = new VideoStreamTrack("video", rt); yield return(new WaitForSeconds(0.1f)); Assert.AreEqual(TrackKind.Video, track.Kind); Assert.AreEqual(0, stream.GetVideoTracks().Count()); Assert.True(stream.AddTrack(track)); Assert.AreEqual(1, stream.GetVideoTracks().Count()); Assert.NotNull(stream.GetVideoTracks().First()); Assert.True(stream.RemoveTrack(track)); Assert.AreEqual(0, stream.GetVideoTracks().Count()); track.Dispose(); yield return(new WaitForSeconds(0.1f)); stream.Dispose(); Object.DestroyImmediate(rt); }
public IEnumerator RemoteOnRemoveTrack() { RTCConfiguration config = default; config.iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } }; var peer1 = new RTCPeerConnection(ref config); var peer2 = new RTCPeerConnection(ref config); peer1.OnIceCandidate = candidate => { peer2.AddIceCandidate(candidate); }; peer2.OnIceCandidate = candidate => { peer1.AddIceCandidate(candidate); }; var stream = new MediaStream(); MediaStream receiveStream = null; var track = new AudioStreamTrack(); stream.AddTrack(track); RTCRtpSender sender = peer1.AddTrack(track, stream); bool isInvokeNegotiationNeeded1 = false; peer1.OnNegotiationNeeded = () => isInvokeNegotiationNeeded1 = true; bool isInvokeOnRemoveTrack = false; peer2.OnTrack = e => { Assert.That(e.Streams, Has.Count.EqualTo(1)); receiveStream = e.Streams.First(); receiveStream.OnRemoveTrack = ev => isInvokeOnRemoveTrack = true; }; yield return(SignalingOffer(peer1, peer2)); peer1.RemoveTrack(sender); var op9 = new WaitUntilWithTimeout(() => isInvokeNegotiationNeeded1, 5000); yield return(op9); Assert.That(op9.IsCompleted, Is.True); yield return(SignalingOffer(peer1, peer2)); var op10 = new WaitUntilWithTimeout(() => isInvokeOnRemoveTrack, 5000); yield return(op10); Assert.That(op10.IsCompleted, Is.True); stream.Dispose(); receiveStream.Dispose(); track.Dispose(); peer1.Dispose(); peer2.Dispose(); }
public void AddTrack(string connectionId, MediaStreamTrack track) { if (!string.IsNullOrEmpty(this.connectionId) && connectionId != this.connectionId) { return; } m_receiveStream.AddTrack(track); }
public IEnumerator VideoReceive() { var config = new RTCConfiguration { iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } } }; var pc1 = new RTCPeerConnection(ref config); var pc2 = new RTCPeerConnection(ref config); var sendStream = new MediaStream(); var receiveStream = new MediaStream(); VideoStreamTrack receiveVideoTrack = null; RenderTexture receiveImage = null; receiveStream.OnAddTrack = e => { if (e.Track is VideoStreamTrack track) { receiveVideoTrack = track; receiveImage = receiveVideoTrack.InitializeReceiver(640, 320); } }; pc2.OnTrack = e => receiveStream.AddTrack(e.Track); var camObj = new GameObject("Camera"); var cam = camObj.AddComponent <Camera>(); cam.backgroundColor = Color.red; var sendVideoTrack = cam.CaptureStreamTrack(1280, 720, 1000000); yield return(new WaitForSeconds(0.1f)); pc1.AddTrack(sendVideoTrack, sendStream); pc2.AddTransceiver(TrackKind.Video); yield return(SignalingPeers(pc1, pc2)); yield return(new WaitUntil(() => receiveVideoTrack != null && receiveVideoTrack.IsDecoderInitialized)); Assert.NotNull(receiveImage); sendVideoTrack.Update(); yield return(new WaitForSeconds(0.1f)); receiveVideoTrack.UpdateReceiveTexture(); yield return(new WaitForSeconds(0.1f)); receiveVideoTrack.Dispose(); receiveStream.Dispose(); sendVideoTrack.Dispose(); sendStream.Dispose(); pc2.Dispose(); pc1.Dispose(); Object.DestroyImmediate(receiveImage); }
private void Setup() { Debug.Log("Set up source/receive streams"); sourceStream = new MediaStream(); var videoTrack = cam.CaptureStreamTrack(WebRTCSettings.StreamSize.x, WebRTCSettings.StreamSize.y); sourceStream.AddTrack(videoTrack); sourceImage.texture = cam.targetTexture; sourceAudio.clip = audioclipStereoSample; sourceAudio.loop = true; sourceAudio.Play(); var audioTrack = new AudioStreamTrack(sourceAudio); sourceStream.AddTrack(audioTrack); startButton.interactable = false; callButton.interactable = true; }
public IEnumerator VideoStreamAddTrackAndRemoveTrack() { var width = 256; var height = 256; var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType); var rt = new UnityEngine.RenderTexture(width, height, 0, format); rt.Create(); var stream = new MediaStream(); var track = new VideoStreamTrack(rt); bool isCalledOnAddTrack = false; bool isCalledOnRemoveTrack = false; stream.OnAddTrack = e => { Assert.That(e.Track, Is.EqualTo(track)); isCalledOnAddTrack = true; }; stream.OnRemoveTrack = e => { Assert.That(e.Track, Is.EqualTo(track)); isCalledOnRemoveTrack = true; }; // wait for the end of the initialization for encoder on the render thread. yield return(0); Assert.That(track.Kind, Is.EqualTo(TrackKind.Video)); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0)); Assert.That(stream.AddTrack(track), Is.True); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(1)); Assert.That(stream.GetVideoTracks(), Has.All.Not.Null); Assert.That(stream.RemoveTrack(track), Is.True); Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0)); var op1 = new WaitUntilWithTimeout(() => isCalledOnAddTrack, 5000); yield return(op1); var op2 = new WaitUntilWithTimeout(() => isCalledOnRemoveTrack, 5000); yield return(op2); track.Dispose(); stream.Dispose(); Object.DestroyImmediate(rt); }
public void AddAndRemoveAudioStreamTrack() { var stream = new MediaStream(); var track = new AudioStreamTrack("audio"); Assert.AreEqual(TrackKind.Audio, track.Kind); Assert.AreEqual(0, stream.GetAudioTracks().Count()); Assert.True(stream.AddTrack(track)); Assert.AreEqual(1, stream.GetAudioTracks().Count()); Assert.NotNull(stream.GetAudioTracks().First()); Assert.True(stream.RemoveTrack(track)); Assert.AreEqual(0, stream.GetAudioTracks().Count()); track.Dispose(); stream.Dispose(); }
public void AddAndRemoveAudioStreamTrack() { var stream = new MediaStream(); var track = new AudioStreamTrack("audio"); Assert.That(TrackKind.Audio, Is.EqualTo(track.Kind)); Assert.That(stream.GetAudioTracks(), Has.Count.EqualTo(0)); Assert.That(stream.AddTrack(track), Is.True); Assert.That(stream.GetAudioTracks(), Has.Count.EqualTo(1)); Assert.That(stream.GetAudioTracks(), Has.All.Not.Null); Assert.That(stream.RemoveTrack(track), Is.True); Assert.That(stream.GetAudioTracks(), Has.Count.EqualTo(0)); track.Dispose(); stream.Dispose(); }
public void onIceServers(IList <PeerConnection.IceServer> iceServers) { factory = new PeerConnectionFactory(); pc = factory.CreatePeerConnection(iceServers, appRtcClient.pcConstraints(), pcObserver); // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging. // NOTE: this _must_ happen while |factory| is alive! // Logging.enableTracing( // "logcat:", // EnumSet.of(Logging.TraceLevel.TRACE_ALL), // Logging.Severity.LS_SENSITIVE); { PeerConnection finalPC = pc; //JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final': //ORIGINAL LINE: final Runnable repeatedStatsLogger = new Runnable() IRunnable repeatedStatsLogger = new RunnableAnonymousInnerClassHelper(this, finalPC); vsv.PostDelayed(repeatedStatsLogger, 10000); } { logAndToast("Creating local video source..."); MediaStream lMS = factory.CreateLocalMediaStream("ARDAMS"); if (appRtcClient.videoConstraints() != null) { VideoCapturer capturer = VideoCapturer; videoSource = factory.CreateVideoSource(capturer, appRtcClient.videoConstraints()); VideoTrack videoTrack = factory.CreateVideoTrack("ARDAMSv0", videoSource); videoTrack.AddRenderer(new VideoRenderer(new VideoCallbacks(this, vsv, VideoStreamsView.Endpoint.LOCAL))); lMS.AddTrack(videoTrack); } lMS.AddTrack(factory.CreateAudioTrack("ARDAMSa0")); pc.AddStream(lMS, new MediaConstraints()); } logAndToast("Waiting for ICE candidates..."); }
public void AddAndRemoveAudioStreamTrack() { GameObject obj = new GameObject("audio"); AudioSource source = obj.AddComponent <AudioSource>(); source.clip = AudioClip.Create("test", 480, 2, 48000, false); var stream = new MediaStream(); var track = new AudioStreamTrack(source); Assert.AreEqual(TrackKind.Audio, track.Kind); Assert.AreEqual(0, stream.GetAudioTracks().Count()); Assert.True(stream.AddTrack(track)); Assert.AreEqual(1, stream.GetAudioTracks().Count()); Assert.NotNull(stream.GetAudioTracks().First()); Assert.True(stream.RemoveTrack(track)); Assert.AreEqual(0, stream.GetAudioTracks().Count()); track.Dispose(); stream.Dispose(); Object.DestroyImmediate(source.clip); Object.DestroyImmediate(obj); }
public void AddAndRemoveAudioTrack() { var obj = new GameObject("audio"); var source = obj.AddComponent <AudioSource>(); source.clip = AudioClip.Create("test", 480, 2, 48000, false); var stream = new MediaStream(); var track = new AudioStreamTrack(source); Assert.That(TrackKind.Audio, Is.EqualTo(track.Kind)); Assert.That(stream.GetAudioTracks(), Has.Count.EqualTo(0)); Assert.That(stream.AddTrack(track), Is.True); Assert.That(stream.GetAudioTracks(), Has.Count.EqualTo(1)); Assert.That(stream.GetAudioTracks(), Has.All.Not.Null); Assert.That(stream.RemoveTrack(track), Is.True); Assert.That(stream.GetAudioTracks(), Has.Count.EqualTo(0)); track.Dispose(); stream.Dispose(); Object.DestroyImmediate(source.clip); Object.DestroyImmediate(obj); }
private void OnStart() { startButton.interactable = false; callButton.interactable = true; dropDownResolution.interactable = false; dropDownFramerate.interactable = false; if (sendStream == null) { Vector2Int resolution = listResolution[dropDownResolution.value]; int width = resolution.x; int height = resolution.y; var format = WebRTC.GetSupportedGraphicsFormat(SystemInfo.graphicsDeviceType); var tex = new RenderTexture(width, height, 0, format); tex.Create(); destTexture = new RenderTexture(width, height, 0, format); destTexture.Create(); sourceImage.texture = tex; sourceImage.color = Color.white; sendStream = new MediaStream(); sendStream.AddTrack(new VideoStreamTrack(destTexture)); } if (receiveStream == null) { receiveStream = new MediaStream(); receiveStream.OnAddTrack = e => { if (e.Track is VideoStreamTrack track) { track.OnVideoReceived += tex => { receiveImage.texture = tex; receiveImage.color = Color.white; videoUpdateStarted = true; }; } }; } }
public IEnumerator AddAndRemoveTrack() { var camObj = new GameObject("Camera"); var cam = camObj.AddComponent <Camera>(); var stream1 = cam.CaptureStream(1280, 720); var stream2 = new MediaStream(); var test = new MonoBehaviourTest <SignalingPeers>(); test.component.AddStream(0, stream1); yield return(test); test.component.CoroutineUpdate(); yield return(new WaitUntil(() => test.component.NegotiationCompleted())); bool calledOnAddTrack = false; stream2.OnAddTrack = e => { Assert.That(e.Track, Is.Not.Null); calledOnAddTrack = true; }; var receivers = test.component.GetPeerReceivers(1); stream2.AddTrack(receivers.First().Track); yield return(new WaitUntil(() => calledOnAddTrack)); var tracks = stream2.GetTracks().ToArray(); foreach (var track in tracks) { stream2.RemoveTrack(track); track.Dispose(); } Object.DestroyImmediate(camObj); Object.DestroyImmediate(test.gameObject); yield return(new WaitForSeconds(0.1f)); }
public void AddTrack(IAudioTrack audioTrack) { _mediaStream.AddTrack(audioTrack.ToNative <AudioTrack>()); }
public IEnumerator GetStatsReturnsReport() { if (SystemInfo.processorType == "Apple M1") { Assert.Ignore("todo:: This test will hang up on Apple M1"); } var stream = new MediaStream(); var go = new GameObject("Test"); var cam = go.AddComponent <Camera>(); stream.AddTrack(cam.CaptureStreamTrack(1280, 720, 0)); var source = go.AddComponent <AudioSource>(); source.clip = AudioClip.Create("test", 480, 2, 48000, false); stream.AddTrack(new AudioStreamTrack(source)); yield return(new WaitForSeconds(0.1f)); var test = new MonoBehaviourTest <SignalingPeers>(); test.component.AddStream(0, stream); yield return(test); test.component.CoroutineUpdate(); yield return(new WaitForSeconds(0.1f)); var op = test.component.GetPeerStats(0); yield return(op); Assert.That(op.IsDone, Is.True); Assert.That(op.Value.Stats, Is.Not.Empty); Assert.That(op.Value.Stats.Keys, Is.Not.Empty); Assert.That(op.Value.Stats.Values, Is.Not.Empty); Assert.That(op.Value.Stats.Count, Is.GreaterThan(0)); foreach (RTCStats stats in op.Value.Stats.Values) { Assert.That(stats, Is.Not.Null); Assert.That(stats.Timestamp, Is.GreaterThan(0)); Assert.That(stats.Id, Is.Not.Empty); foreach (var pair in stats.Dict) { Assert.That(pair.Key, Is.Not.Empty); } StatsCheck.Test(stats); } op.Value.Dispose(); test.component.Dispose(); foreach (var track in stream.GetTracks()) { track.Dispose(); } stream.Dispose(); Object.DestroyImmediate(go); Object.DestroyImmediate(test.gameObject); }