예제 #1
0
        protected virtual void OnEnable()
        {
            if (Source != null)
            {
                return;
            }

            // Create the external source
            //< TODO - Better abstraction
            if (typeof(T) == typeof(I420AVideoFrameStorage))
            {
                Source = ExternalVideoTrackSource.CreateFromI420ACallback(OnFrameRequested);
            }
            else if (typeof(T) == typeof(Argb32VideoFrameStorage))
            {
                Source = ExternalVideoTrackSource.CreateFromArgb32Callback(OnFrameRequested);
            }
            else
            {
                throw new NotSupportedException("This frame storage is not supported. Use I420AVideoFrameStorage or Argb32VideoFrameStorage.");
            }
            if (Source == null)
            {
                throw new Exception("Failed to create external video track source.");
            }

            VideoStreamStarted.Invoke(Source);
        }
예제 #2
0
        protected override Task CreateLocalVideoTrackAsync()
        {
            // Ensure the track has a valid name
            string trackName = TrackName;

            if (string.IsNullOrEmpty(trackName))
            {
                // Generate a unique name (GUID)
                trackName = Guid.NewGuid().ToString();
                TrackName = trackName;
            }
            SdpTokenAttribute.Validate(trackName, allowEmpty: false);

            // Create the external source
            //< TODO - Better abstraction
            if (typeof(T) == typeof(I420AVideoFrameStorage))
            {
                Source = ExternalVideoTrackSource.CreateFromI420ACallback(OnFrameRequested);
            }
            else if (typeof(T) == typeof(Argb32VideoFrameStorage))
            {
                Source = ExternalVideoTrackSource.CreateFromArgb32Callback(OnFrameRequested);
            }
            else
            {
                throw new NotSupportedException("This frame storage is not supported. Use I420AVideoFrameStorage or Argb32VideoFrameStorage.");
            }
            if (Source == null)
            {
                throw new Exception("Failed to create external video track source.");
            }

            // Create the local video track
            Track = LocalVideoTrack.CreateFromExternalSource(trackName, Source);
            if (Track == null)
            {
                throw new Exception("Failed ot create webcam video track.");
            }

            // Synchronize the track status with the Unity component status
            Track.Enabled = enabled;

            // This implementation is fast, so executes synchronously.
            return(Task.CompletedTask);
        }
        protected virtual void OnEnable()
        {
            Debug.Assert(Source == null);

            // Create the external source
            //< TODO - Better abstraction
            if (typeof(T) == typeof(I420AVideoFrameStorage))
            {
                AttachSource(ExternalVideoTrackSource.CreateFromI420ACallback(OnFrameRequested));
            }
            else if (typeof(T) == typeof(Argb32VideoFrameStorage))
            {
                AttachSource(ExternalVideoTrackSource.CreateFromArgb32Callback(OnFrameRequested));
            }
            else
            {
                throw new NotSupportedException("This frame storage is not supported. Use I420AVideoFrameStorage or Argb32VideoFrameStorage.");
            }
        }
예제 #4
0
        /// <summary>
        /// Add a new track to the peer connection and start the video track playback.
        /// </summary>
        public void StartTrack()
        {
            // Ensure the track has a valid name
            string trackName = TrackName;

            if (trackName == null || trackName.Length == 0)
            {
                // Generate a unique name (GUID)
                trackName = Guid.NewGuid().ToString();
                TrackName = trackName;
            }
            //SdpTokenAttribute.Validate(trackName, allowEmpty: false);

            // Create the external source
            var nativePeer = PeerConnection;

            //< TODO - Better abstraction
            if (typeof(T) == typeof(I420AVideoFrameStorage))
            {
                Source = ExternalVideoTrackSource.CreateFromI420ACallback(OnFrameRequested);
            }
            else if (typeof(T) == typeof(Argb32VideoFrameStorage))
            {
                Source = ExternalVideoTrackSource.CreateFromArgb32Callback(OnFrameRequested);
            }
            else
            {
                throw new NotSupportedException("");
            }

            // Create the local video track
            if (Source != null)
            {
                Track = nativePeer.AddCustomLocalVideoTrack(trackName, Source);
                if (Track != null)
                {
                    NotifyVideoStreamStarted();
                }
            }
        }
예제 #5
0
        public async Task Init(IceServerModel[] iceServers)
        {
            Logger.Debug("Starting WebRTC connection.");

            IceServers = iceServers;

            PeerSession = new PeerConnection();

            var iceList = IceServers.Select(x => new IceServer()
            {
                Urls         = { x.Url },
                TurnPassword = x.TurnPassword ?? string.Empty,
                TurnUserName = x.TurnUsername ?? string.Empty
            }).ToList();

            var config = new PeerConnectionConfiguration()
            {
                IceServers = iceList
            };

            await PeerSession.InitializeAsync(config);

            PeerSession.LocalSdpReadytoSend     += PeerSession_LocalSdpReadytoSend;;
            PeerSession.Connected               += PeerConnection_Connected;
            PeerSession.IceStateChanged         += PeerConnection_IceStateChanged;
            PeerSession.IceCandidateReadytoSend += PeerSession_IceCandidateReadytoSend;;

            CaptureChannel = await PeerSession.AddDataChannelAsync("ScreenCapture", true, true);

            CaptureChannel.BufferingChanged += DataChannel_BufferingChanged;
            CaptureChannel.MessageReceived  += CaptureChannel_MessageReceived;
            CaptureChannel.StateChanged     += CaptureChannel_StateChanged;

            VideoSource = ExternalVideoTrackSource.CreateFromArgb32Callback(GetCaptureFrame);
            Transceiver = PeerSession.AddTransceiver(MediaKind.Video);

            PeerSession.CreateOffer();
        }
예제 #6
0
        public void SimpleExternalArgb32()
        {
            // Connect
            StartOfferWith(pc1_);
            WaitForTransportsWritable();
            Assert.True(pc1_.IsConnected);
            Assert.True(pc2_.IsConnected);
            WaitForSdpExchangeCompleted();

            // No track yet
            Assert.AreEqual(0, pc1_.LocalVideoTracks.Count());
            Assert.AreEqual(0, pc1_.RemoteVideoTracks.Count());
            Assert.AreEqual(0, pc2_.LocalVideoTracks.Count());
            Assert.AreEqual(0, pc2_.RemoteVideoTracks.Count());

            // Create external ARGB32 source
            var source1 = ExternalVideoTrackSource.CreateFromArgb32Callback(
                VideoTrackSourceTests.CustomArgb32FrameCallback);

            Assert.NotNull(source1);
            Assert.AreEqual(0, source1.Tracks.Count());

            // Add video transceiver #1
            renegotiationEvent1_.Reset();
            remoteDescAppliedEvent1_.Reset();
            remoteDescAppliedEvent2_.Reset();
            Assert.IsFalse(videoTrackAddedEvent2_.IsSet);
            var transceiver_settings = new TransceiverInitSettings
            {
                Name = "transceiver1",
            };
            var transceiver1 = pc1_.AddTransceiver(MediaKind.Video, transceiver_settings);

            Assert.NotNull(transceiver1);
            Assert.IsNull(transceiver1.LocalTrack);
            Assert.IsNull(transceiver1.RemoteTrack);
            Assert.AreEqual(pc1_, transceiver1.PeerConnection);

            // Wait for renegotiation
            Assert.True(renegotiationEvent1_.Wait(TimeSpan.FromSeconds(60.0)));
            Assert.True(videoTrackAddedEvent2_.Wait(TimeSpan.FromSeconds(60.0)));
            Assert.True(remoteDescAppliedEvent1_.Wait(TimeSpan.FromSeconds(60.0)));
            Assert.True(remoteDescAppliedEvent2_.Wait(TimeSpan.FromSeconds(60.0)));
            WaitForSdpExchangeCompleted();

            // Create external ARGB32 track
            var track_config1 = new LocalVideoTrackInitConfig
            {
                trackName = "custom_argb32"
            };
            var track1 = LocalVideoTrack.CreateFromSource(source1, track_config1);

            Assert.NotNull(track1);
            Assert.AreEqual(source1, track1.Source);
            Assert.IsNull(track1.PeerConnection);
            Assert.IsNull(track1.Transceiver);
            Assert.IsFalse(pc1_.LocalVideoTracks.Contains(track1));

            // Set track on transceiver
            renegotiationEvent1_.Reset();
            transceiver1.LocalVideoTrack = track1;
            Assert.AreEqual(pc1_, track1.PeerConnection);
            Assert.IsTrue(pc1_.LocalVideoTracks.Contains(track1));
            Assert.IsFalse(renegotiationEvent1_.IsSet); // renegotiation not needed

            // Remove the track from #1
            renegotiationEvent1_.Reset();
            transceiver1.LocalVideoTrack = null;
            Assert.IsNull(track1.PeerConnection);
            Assert.IsNull(track1.Transceiver);
            Assert.IsFalse(pc1_.LocalVideoTracks.Contains(track1));

            // Dispose of the track and its source
            track1.Dispose();
            track1 = null;
            source1.Dispose();
            source1 = null;

            // On peer #1 the track was replaced on the transceiver, but the transceiver stays
            // on the peer connection, so no renegotiation is needed.
            Assert.IsFalse(renegotiationEvent1_.IsSet);
        }