Esempio n. 1
0
        public async Task AddAudioTrackFromDeviceAsync(string trackName)
        {
            const string DefaultAudioDeviceName = "Default audio device";

            await RequestMediaAccessAsync(StreamingCaptureMode.Audio);

            // FIXME - this leaks 'source', never disposed (and is the track itself disposed??)
            var initConfig = new LocalAudioDeviceInitConfig();
            var source     = await AudioTrackSource.CreateFromDeviceAsync(initConfig);

            var settings = new LocalAudioTrackInitConfig
            {
                trackName = trackName
            };
            var track = LocalAudioTrack.CreateFromSource(source, settings);

            SessionModel.Current.AudioTracks.Add(new AudioTrackViewModel
            {
                Source     = source,
                Track      = track,
                TrackImpl  = track,
                IsRemote   = false,
                DeviceName = DefaultAudioDeviceName
            });
            SessionModel.Current.LocalTracks.Add(new TrackViewModel(Symbol.Volume)
            {
                DisplayName = DefaultAudioDeviceName
            });
        }
Esempio n. 2
0
        public async Task Initialization(string user)
        {
            try
            {
                var config = new PeerConnectionConfiguration
                {
                    IceServers = new List <IceServer> {
                        new IceServer {
                            Urls = { "stun:stun.l.google.com:19302" }
                        }
                    }
                };
                await Connection.InitializeAsync(config);

                microphoneSource = await DeviceAudioTrackSource.CreateAsync();

                var audioTrackConfig = new LocalAudioTrackInitConfig {
                    trackName = "microphone_track"
                };
                localAudioTrack  = LocalAudioTrack.CreateFromSource(microphoneSource, audioTrackConfig);
                audioTransceiver = Connection.AddTransceiver(MediaKind.Audio);
                audioTransceiver.LocalAudioTrack  = localAudioTrack;
                audioTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;

                Console.WriteLine("Peer connection initialized.");
            }
            catch (Exception e)
            {
                await Log.WriteAsync(e.Message);

                Console.WriteLine(e);
                throw;
            }
        }
Esempio n. 3
0
 public async Task CreateFromDevice()
 {
     using (AudioTrackSource source = await DeviceAudioTrackSource.CreateAsync())
     {
         Assert.IsNotNull(source);
         Assert.AreEqual(string.Empty, source.Name);
         Assert.AreEqual(0, source.Tracks.Count);
     }
 }
Esempio n. 4
0
        public async Task Name()
        {
            using (AudioTrackSource source = await DeviceAudioTrackSource.CreateAsync())
            {
                Assert.IsNotNull(source);

                const string kTestName = "test_audio_track_source_name";
                source.Name = kTestName;
                Assert.AreEqual(kTestName, source.Name);
            }
        }
        public void EqualIdWithAudioTrack()
        {
            var guid   = Guid.NewGuid().ToString();
            var source = new AudioTrackSource();
            var track  = new AudioStreamTrack(WebRTC.Context.CreateAudioTrack(guid, source.self));

            Assert.That(track, Is.Not.Null);
            Assert.That(track.Id, Is.EqualTo(guid));
            track.Dispose();
            source.Dispose();
        }
Esempio n. 6
0
        /// <summary>
        /// Accesses the local audio track as specified
        /// by the operating system.
        /// MUST NOT BE CALLED FROM THE UI THREAD.
        /// </summary>
        /// <param name="factory"></param>
        /// <returns></returns>
        private IMediaStreamTrack getLocalAudio(IWebRtcFactory factory)
        {
            var audioOptions = new AudioOptions()
            {
                Factory = factory
            };

            // this will throw a very unhelpful exception if called from the UI thread
            var audioTrackSource = AudioTrackSource.Create(audioOptions);

            return(MediaStreamTrack.CreateAudioTrack("LocalAudio", audioTrackSource));
        }
Esempio n. 7
0
        public async Task CreateFromSource()
        {
            using (AudioTrackSource source = await DeviceAudioTrackSource.CreateAsync())
            {
                Assert.IsNotNull(source);

                var settings = new LocalAudioTrackInitConfig {
                    trackName = "track_name"
                };
                using (LocalAudioTrack track = LocalAudioTrack.CreateFromSource(source, settings))
                {
                    Assert.IsNotNull(track);
                }
            }
        }
Esempio n. 8
0
        void SetupAudioTrack()
        {
            Logger.Debug("PeerChannel", "SetupAudioTrack");

            var opts = new AudioOptions
            {
                Factory = factory
            };

            Logger.Debug("PeerChannel", "create audio source");

            var audioSource = AudioTrackSource.Create(opts);

            Logger.Debug("PeerChannel", "create audio track");

            var track = MediaStreamTrack.CreateAudioTrack(factory,
                                                          mediaOption.AudioTrackId, audioSource);

            Logger.Debug("PeerChannel", "add audio track");

            Conn.AddTrack(track);

            OnAddLocalAudioTrack?.Invoke(track);
        }
Esempio n. 9
0
        public MediaPlayerViewModel()
        {
            _videoPlayer.CurrentStateChanged += OnMediaStateChanged;
            _videoPlayer.MediaOpened         += OnMediaOpened;
            _videoPlayer.MediaFailed         += OnMediaFailed;
            _videoPlayer.MediaEnded          += OnMediaEnded;
            _videoPlayer.RealTimePlayback     = true;
            _videoPlayer.AutoPlay             = false;

            AudioTrackTypeList.Add(new AudioTrackTypeViewModel
            {
                DisplayName = "Local microphone (default device)",
                Factory     = async() =>
                {
                    // FIXME - this leaks 'source', never disposed (and is the track itself disposed??)
                    var source   = await AudioTrackSource.CreateFromDeviceAsync();
                    var settings = new LocalAudioTrackInitConfig();
                    return(LocalAudioTrack.CreateFromSource(source, settings));
                }
            });

            VideoTrackTypeList.Add(new VideoTrackTypeViewModel
            {
                DisplayName = "Local webcam (default device)",
                Factory     = async() =>
                {
                    // FIXME - this leaks 'source', never disposed (and is the track itself disposed??)
                    var source   = await DeviceVideoTrackSource.CreateAsync();
                    var settings = new LocalVideoTrackInitConfig();
                    return(LocalVideoTrack.CreateFromSource(source, settings));
                }
            });

            _videoStatsTimer.Interval = TimeSpan.FromMilliseconds(300);
            _videoStatsTimer.Tick    += (_1, _2) => UpdateVideoStats();
        }
Esempio n. 10
0
        private void GetUserMedia()
        {
            Debug.WriteLine("Getting user media.");

            MediaDevice _selectedVideoDevice = (MediaDevice)Devices.Instance.VideoMediaDevicesList[0];

            for (int i = 0; i < Devices.Instance.VideoMediaDevicesList.Count; i++)
            {
                if (Devices.Instance.VideoMediaDevicesList[i].DisplayName == (string)_localSettings.Values["SelectedCameraName"])
                {
                    _selectedVideoDevice = (MediaDevice)Devices.Instance.VideoMediaDevicesList[i];
                }
            }

            List <int> widths     = new List <int>();
            List <int> heights    = new List <int>();
            List <int> frameRates = new List <int>();

            foreach (var videoFormat in _selectedVideoDevice.VideoFormats)
            {
                widths.Add(videoFormat.Dimension.Width);
                heights.Add(videoFormat.Dimension.Height);

                foreach (var frameRate in videoFormat.FrameRates)
                {
                    frameRates.Add(frameRate);
                }
            }

            // Maximum and minimum values for the selected camera
            IReadOnlyList <IConstraint> mandatoryConstraints = new List <IConstraint>()
            {
                new Constraint("maxWidth", widths.Max().ToString()),
                new Constraint("minWidth", widths.Min().ToString()),
                new Constraint("maxHeight", heights.Max().ToString()),
                new Constraint("minHeight", heights.Min().ToString()),
                new Constraint("maxFrameRate", frameRates.Max().ToString()),
                new Constraint("minFrameRate", frameRates.Min().ToString())
            };

            // Add optional constrains
            IReadOnlyList <IConstraint> optionalConstraints = new List <IConstraint>();

            IMediaConstraints mediaConstraints = new MediaConstraints(mandatoryConstraints, optionalConstraints);

            var videoCapturer = VideoCapturer.Create(_selectedVideoDevice.DisplayName, _selectedVideoDevice.Id, false);

            var videoOptions = new VideoOptions();

            videoOptions.Factory     = _factory;
            videoOptions.Capturer    = videoCapturer;
            videoOptions.Constraints = mediaConstraints;

            var videoTrackSource = VideoTrackSource.Create(videoOptions);

            _selfVideoTrack = MediaStreamTrack.CreateVideoTrack("SELF_VIDEO", videoTrackSource);

            var audioOptions = new AudioOptions();

            audioOptions.Factory = _factory;

            var audioTrackSource = AudioTrackSource.Create(audioOptions);

            _selfAudioTrack = MediaStreamTrack.CreateAudioTrack("SELF_AUDIO", audioTrackSource);
        }
Esempio n. 11
0
        static async Task Main(string[] args)
        {
            Transceiver      audioTransceiver = null;
            Transceiver      videoTransceiver = null;
            AudioTrackSource audioTrackSource = null;
            VideoTrackSource videoTrackSource = null;
            LocalAudioTrack  localAudioTrack  = null;
            LocalVideoTrack  localVideoTrack  = null;

            try
            {
                bool needVideo = Array.Exists(args, arg => (arg == "-v") || (arg == "--video"));
                bool needAudio = Array.Exists(args, arg => (arg == "-a") || (arg == "--audio"));

                // Asynchronously retrieve a list of available video capture devices (webcams).
                var deviceList = await PeerConnection.GetVideoCaptureDevicesAsync();

                // For example, print them to the standard output
                foreach (var device in deviceList)
                {
                    Console.WriteLine($"Found webcam {device.name} (id: {device.id})");
                }

                // Create a new peer connection automatically disposed at the end of the program
                using var pc = new PeerConnection();

                // Initialize the connection with a STUN server to allow remote access
                var config = new PeerConnectionConfiguration
                {
                    IceServers = new List <IceServer> {
                        new IceServer {
                            Urls = { "stun:stun.l.google.com:19302" }
                        }
                    }
                };
                await pc.InitializeAsync(config);

                Console.WriteLine("Peer connection initialized.");

                // Record video from local webcam, and send to remote peer
                if (needVideo)
                {
                    Console.WriteLine("Opening local webcam...");
                    videoTrackSource = await DeviceVideoTrackSource.CreateAsync();

                    Console.WriteLine("Create local video track...");
                    var trackSettings = new LocalVideoTrackInitConfig {
                        trackName = "webcam_track"
                    };
                    localVideoTrack = LocalVideoTrack.CreateFromSource(videoTrackSource, trackSettings);

                    Console.WriteLine("Create video transceiver and add webcam track...");
                    videoTransceiver = pc.AddTransceiver(MediaKind.Video);
                    videoTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
                    videoTransceiver.LocalVideoTrack  = localVideoTrack;
                }

                // Record audio from local microphone, and send to remote peer
                if (needAudio)
                {
                    Console.WriteLine("Opening local microphone...");
                    audioTrackSource = await DeviceAudioTrackSource.CreateAsync();

                    Console.WriteLine("Create local audio track...");
                    var trackSettings = new LocalAudioTrackInitConfig {
                        trackName = "mic_track"
                    };
                    localAudioTrack = LocalAudioTrack.CreateFromSource(audioTrackSource, trackSettings);

                    Console.WriteLine("Create audio transceiver and add mic track...");
                    audioTransceiver = pc.AddTransceiver(MediaKind.Audio);
                    audioTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
                    audioTransceiver.LocalAudioTrack  = localAudioTrack;
                }

                // Setup signaling
                Console.WriteLine("Starting signaling...");
                var signaler = new NamedPipeSignaler.NamedPipeSignaler(pc, "testpipe");
                signaler.SdpMessageReceived += async(SdpMessage message) =>
                {
                    await pc.SetRemoteDescriptionAsync(message);

                    if (message.Type == SdpMessageType.Offer)
                    {
                        pc.CreateAnswer();
                    }
                };
                signaler.IceCandidateReceived += (IceCandidate candidate) =>
                {
                    pc.AddIceCandidate(candidate);
                };
                await signaler.StartAsync();

                // Start peer connection
                pc.Connected       += () => { Console.WriteLine("PeerConnection: connected."); };
                pc.IceStateChanged += (IceConnectionState newState) => { Console.WriteLine($"ICE state: {newState}"); };
                int numFrames = 0;
                pc.VideoTrackAdded += (RemoteVideoTrack track) =>
                {
                    track.I420AVideoFrameReady += (I420AVideoFrame frame) =>
                    {
                        ++numFrames;
                        if (numFrames % 60 == 0)
                        {
                            Console.WriteLine($"Received video frames: {numFrames}");
                        }
                    };
                };
                if (signaler.IsClient)
                {
                    Console.WriteLine("Connecting to remote peer...");
                    pc.CreateOffer();
                }
                else
                {
                    Console.WriteLine("Waiting for offer from remote peer...");
                }

                Console.WriteLine("Press a key to stop recording...");
                Console.ReadKey(true);

                signaler.Stop();
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }

            localAudioTrack?.Dispose();
            localVideoTrack?.Dispose();

            Console.WriteLine("Program termined.");

            localAudioTrack.Dispose();
            localVideoTrack.Dispose();
            audioTrackSource.Dispose();
            videoTrackSource.Dispose();
        }
Esempio n. 12
0
    async void OnClientConnected()
    {
        var pc = signaler.PeerConnection;

        // Record video from local webcam, and send to remote peer
        if (NeedVideo)
        {
            // For example, print them to the standard output

            var deviceSettings = new LocalVideoDeviceInitConfig
            {
                width  = VideoWidth,
                height = VideoHeight,
            };
            if (VideoFps > 0)
            {
                deviceSettings.framerate = VideoFps;
            }
            if (VideoProfileId.Length > 0)
            {
                deviceSettings.videoProfileId = VideoProfileId;
            }

            Debug.Log($"Attempt to grab Camera - {deviceSettings.videoProfileId}: {deviceSettings.width}x{deviceSettings.height}@{deviceSettings.framerate}fps");
            videoTrackSource = await DeviceVideoTrackSource.CreateAsync(deviceSettings);

            Debug.Log($"Create local video track... {videoTrackSource}");
            var trackSettings = new LocalVideoTrackInitConfig
            {
                trackName = "webcam_track"
            };
            localVideoTrack = LocalVideoTrack.CreateFromSource(videoTrackSource, trackSettings);

            Debug.Log("Create video transceiver and add webcam track...");
            videoTransceiver = pc.AddTransceiver(MediaKind.Video);
            videoTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
            videoTransceiver.LocalVideoTrack  = localVideoTrack;
        }

        // Record audio from local microphone, and send to remote peer
        if (NeedAudio)
        {
            Debug.Log("Opening local microphone...");
            audioTrackSource = await DeviceAudioTrackSource.CreateAsync();

            Debug.Log("Create local audio track...");
            var trackSettings = new LocalAudioTrackInitConfig {
                trackName = "mic_track"
            };
            localAudioTrack = LocalAudioTrack.CreateFromSource(audioTrackSource, trackSettings);

            Debug.Log("Create audio transceiver and add mic track...");
            audioTransceiver = pc.AddTransceiver(MediaKind.Audio);
            audioTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
            audioTransceiver.LocalAudioTrack  = localAudioTrack;
        }

        // Start peer connection
        int numFrames = 0;

        pc.VideoTrackAdded += (RemoteVideoTrack track) =>
        {
            Debug.Log($"Attach Frame Listener...");
            track.I420AVideoFrameReady += (I420AVideoFrame frame) =>
            {
                ++numFrames;
                if (numFrames % 60 == 0)
                {
                    Debug.Log($"Received video frames: {numFrames}");
                }
            };
        };
        // we need a short delay here for the video stream to settle...
        // I assume my Logitech webcam is sending some garbage frames in the beginning.
        await Task.Delay(200);

        pc.CreateOffer();
        Debug.Log("Send offer to remote peer");
    }
Esempio n. 13
0
        public async Task <string> InitiateCallRTC()
        {
            var list = new List <string>();

            list.Add(this.Configuration.GetSection("Key")["iceServer"]);
            AudioTrackSource microphoneSource = null;
            LocalAudioTrack  localAudioTrack  = null;
            Transceiver      audioTransceiver = null;

            var iceServer = new IceServer
            {
                Urls         = list,
                TurnPassword = this.Configuration.GetSection("Key")["turnPwd"],
                TurnUserName = this.Configuration.GetSection("Key")["turnUser"]
            };

            var serverList = new List <IceServer>();

            serverList.Add(iceServer);
            var connectionConfig = new PeerConnectionConfiguration {
                IceServers       = serverList,
                IceTransportType = IceTransportType.All,
                BundlePolicy     = BundlePolicy.Balanced,
                SdpSemantic      = SdpSemantic.UnifiedPlan
            };
            var connection = new PeerConnection();
            await connection.InitializeAsync(connectionConfig);

            microphoneSource = await DeviceAudioTrackSource.CreateAsync();

            var audioTrackConfig = new LocalAudioTrackInitConfig
            {
                trackName = "microphone_track"
            };

            localAudioTrack = LocalAudioTrack.CreateFromSource(microphoneSource, audioTrackConfig);

            audioTransceiver = connection.AddTransceiver(MediaKind.Audio);
            audioTransceiver.LocalAudioTrack  = localAudioTrack;
            audioTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;

            var signaler = new NamedPipeSignaler.NamedPipeSignaler(connection, "testpipe");

            connection.Connected += () => {
                Console.WriteLine("PeerConnection: connected.");
            };

            signaler.SdpMessageReceived += async(SdpMessage message) =>
            {
                // Note: we use 'await' to ensure the remote description is applied
                // before calling CreateAnswer(). Failing to do so will prevent the
                // answer from being generated, and the connection from establishing.
                await connection.SetRemoteDescriptionAsync(message);

                if (message.Type == SdpMessageType.Offer)
                {
                    connection.CreateAnswer();
                }
            };

            await signaler.StartAsync();

            signaler.IceCandidateReceived += (IceCandidate candidate) => {
                connection.AddIceCandidate(candidate);
            };

            connection.IceStateChanged += (IceConnectionState newState) => {
                Console.WriteLine($"ICE state: {newState}");
            };

            if (signaler.IsClient)
            {
                Console.WriteLine("Connecting to remote peer...");
                connection.CreateOffer();
            }
            else
            {
                Console.WriteLine("Waiting for offer from remote peer...");
            }

            return(connection.IsConnected + "-" + connection.Name + "-");
        }