Example #1
0
        public async Task Initialization(string user)
        {
            try
            {
                var config = new PeerConnectionConfiguration
                {
                    IceServers = new List <IceServer> {
                        new IceServer {
                            Urls = { "stun:stun.l.google.com:19302" }
                        }
                    }
                };
                await Connection.InitializeAsync(config);

                microphoneSource = await DeviceAudioTrackSource.CreateAsync();

                var audioTrackConfig = new LocalAudioTrackInitConfig {
                    trackName = "microphone_track"
                };
                localAudioTrack  = LocalAudioTrack.CreateFromSource(microphoneSource, audioTrackConfig);
                audioTransceiver = Connection.AddTransceiver(MediaKind.Audio);
                audioTransceiver.LocalAudioTrack  = localAudioTrack;
                audioTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;

                Console.WriteLine("Peer connection initialized.");
            }
            catch (Exception e)
            {
                await Log.WriteAsync(e.Message);

                Console.WriteLine(e);
                throw;
            }
        }
        public async Task AddAudioTrackFromDeviceAsync(string trackName)
        {
            const string DefaultAudioDeviceName = "Default audio device";

            await RequestMediaAccessAsync(StreamingCaptureMode.Audio);

            // FIXME - this leaks 'source', never disposed (and is the track itself disposed??)
            var initConfig = new LocalAudioDeviceInitConfig();
            var source     = await DeviceAudioTrackSource.CreateAsync(initConfig);

            var settings = new LocalAudioTrackInitConfig
            {
                trackName = trackName
            };
            var track = LocalAudioTrack.CreateFromSource(source, settings);

            SessionModel.Current.AudioTracks.Add(new AudioTrackViewModel
            {
                Source     = source,
                Track      = track,
                TrackImpl  = track,
                IsRemote   = false,
                DeviceName = DefaultAudioDeviceName
            });
            SessionModel.Current.LocalTracks.Add(new TrackViewModel(Symbol.Volume)
            {
                DisplayName = DefaultAudioDeviceName
            });
        }
        // This method might be run outside the app thread and should not access the Unity API.
        private static async Task <WebRTC.AudioTrackSource> CreateSourceAsync(LocalAudioDeviceInitConfig deviceConfig)
        {
            var createTask = DeviceAudioTrackSource.CreateAsync(deviceConfig);

            // Continue the task outside the Unity app context, in order to avoid deadlock
            // if OnDisable waits on this task.
            return(await createTask.ConfigureAwait(continueOnCapturedContext : false));
        }
Example #4
0
 public async Task CreateFromDevice()
 {
     using (AudioTrackSource source = await DeviceAudioTrackSource.CreateAsync())
     {
         Assert.IsNotNull(source);
         Assert.AreEqual(string.Empty, source.Name);
         Assert.AreEqual(0, source.Tracks.Count);
     }
 }
Example #5
0
        public async Task Name()
        {
            using (AudioTrackSource source = await DeviceAudioTrackSource.CreateAsync())
            {
                Assert.IsNotNull(source);

                const string kTestName = "test_audio_track_source_name";
                source.Name = kTestName;
                Assert.AreEqual(kTestName, source.Name);
            }
        }
Example #6
0
        public async Task CreateFromSource()
        {
            using (AudioTrackSource source = await DeviceAudioTrackSource.CreateAsync())
            {
                Assert.IsNotNull(source);

                var settings = new LocalAudioTrackInitConfig {
                    trackName = "track_name"
                };
                using (LocalAudioTrack track = LocalAudioTrack.CreateFromSource(source, settings))
                {
                    Assert.IsNotNull(track);
                }
            }
        }
Example #7
0
        public static async Task AddAudioTrackFromDeviceAsync(string trackName)
        {
            const string DefaultAudioDeviceName = "Default audio device";

            await Utils.RequestMediaAccessAsync(StreamingCaptureMode.Audio);

            // FIXME - this leaks 'source', never disposed (and is the track itself disposed??)
            var initConfig = new LocalAudioDeviceInitConfig();
            var source     = await DeviceAudioTrackSource.CreateAsync(initConfig);

            var settings = new LocalAudioTrackInitConfig
            {
                trackName = trackName
            };
            var track = LocalAudioTrack.CreateFromSource(source, settings);

            SessionModel.Current.AddAudioTrack(track, DefaultAudioDeviceName);
        }
Example #8
0
        protected async void OnEnable()
        {
            if (Source != null)
            {
                return;
            }

#if UNITY_WSA && !UNITY_EDITOR
            // Request access to audio capture. The OS may show some popup dialog to the
            // user to request permission. This will succeed only if the user approves it.
            try
            {
                if (UnityEngine.WSA.Application.RunningOnUIThread())
                {
                    await RequestAccessAsync();
                }
                else
                {
                    UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAsync(), waitUntilDone: true);
                }
            }
            catch (Exception ex)
            {
                // Log an error and prevent activation
                Debug.LogError($"Audio access failure: {ex.Message}.");
                this.enabled = false;
                return;
            }
#endif

            var initConfig = new LocalAudioDeviceInitConfig
            {
                AutoGainControl = _autoGainControl,
            };
            Source = await DeviceAudioTrackSource.CreateAsync(initConfig);

            if (Source == null)
            {
                throw new Exception("Failed to create microphone audio source.");
            }

            IsStreaming = true;
            AudioSourceStarted.Invoke(this);
        }
Example #9
0
        public MediaPlayerViewModel()
        {
            _videoPlayer.CurrentStateChanged += OnMediaStateChanged;
            _videoPlayer.MediaOpened         += OnMediaOpened;
            _videoPlayer.MediaFailed         += OnMediaFailed;
            _videoPlayer.MediaEnded          += OnMediaEnded;
            _videoPlayer.RealTimePlayback     = true;
            _videoPlayer.AutoPlay             = false;

            AudioTrackTypeList.Add(new AudioTrackTypeViewModel
            {
                DisplayName = "Local microphone (default device)",
                Factory     = async() =>
                {
                    // FIXME - this leaks 'source', never disposed (and is the track itself disposed??)
                    var source   = await DeviceAudioTrackSource.CreateAsync();
                    var settings = new LocalAudioTrackInitConfig();
                    return(LocalAudioTrack.CreateFromSource(source, settings));
                }
            });

            VideoTrackTypeList.Add(new VideoTrackTypeViewModel
            {
                DisplayName = "Local webcam (default device)",
                Factory     = async() =>
                {
                    // FIXME - this leaks 'source', never disposed (and is the track itself disposed??)
                    var source   = await DeviceVideoTrackSource.CreateAsync();
                    var settings = new LocalVideoTrackInitConfig();
                    return(LocalVideoTrack.CreateFromSource(source, settings));
                }
            });

            _videoStatsTimer.Interval = TimeSpan.FromMilliseconds(300);
            _videoStatsTimer.Tick    += (_1, _2) => UpdateVideoStats();
        }
Example #10
0
        protected async void OnEnable()
        {
            if (Source != null)
            {
                return;
            }

#if PLATFORM_ANDROID
            // Ensure Android binding is initialized before accessing the native implementation
            Android.Initialize();

            // Check for permission to access the camera
            if (!Permission.HasUserAuthorizedPermission(Permission.Microphone))
            {
                if (!_androidRecordAudioRequestPending)
                {
                    // Monitor the OnApplicationFocus(true) event during the next 5 minutes,
                    // and check for permission again each time (see below why).
                    _androidRecordAudioRequestPending        = true;
                    _androidRecordAudioRequestRetryUntilTime = Time.time + 300;

                    // Display dialog requesting user permission. This will return immediately,
                    // and unfortunately there's no good way to tell when this completes. As a rule
                    // of thumb, application should lose focus, so check when focus resumes should
                    // be sufficient without having to poll every frame.
                    Permission.RequestUserPermission(Permission.Microphone);
                }
                return;
            }
#endif

#if UNITY_WSA && !UNITY_EDITOR
            // Request access to audio capture. The OS may show some popup dialog to the
            // user to request permission. This will succeed only if the user approves it.
            try
            {
                if (UnityEngine.WSA.Application.RunningOnUIThread())
                {
                    await RequestAccessAsync();
                }
                else
                {
                    UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAsync(), waitUntilDone: true);
                }
            }
            catch (Exception ex)
            {
                // Log an error and prevent activation
                Debug.LogError($"Audio access failure: {ex.Message}.");
                this.enabled = false;
                return;
            }
#endif

            var initConfig = new LocalAudioDeviceInitConfig
            {
                AutoGainControl = _autoGainControl,
            };
            try
            {
                Source = await DeviceAudioTrackSource.CreateAsync(initConfig);

                if (Source == null)
                {
                    throw new Exception("DeviceAudioTrackSource.CreateAsync() returned a NULL source.");
                }
            }
            catch (Exception ex)
            {
                Debug.LogError($"Failed to create device track source for {nameof(MicrophoneSource)} component '{name}'.");
                Debug.LogException(ex, this);
                return;
            }

            IsStreaming = true;
            AudioSourceStarted.Invoke(this);
        }
Example #11
0
        static async Task Main(string[] args)
        {
            Transceiver      audioTransceiver = null;
            Transceiver      videoTransceiver = null;
            AudioTrackSource audioTrackSource = null;
            VideoTrackSource videoTrackSource = null;
            LocalAudioTrack  localAudioTrack  = null;
            LocalVideoTrack  localVideoTrack  = null;

            try
            {
                bool needVideo = Array.Exists(args, arg => (arg == "-v") || (arg == "--video"));
                bool needAudio = Array.Exists(args, arg => (arg == "-a") || (arg == "--audio"));

                // Asynchronously retrieve a list of available video capture devices (webcams).
                var deviceList = await PeerConnection.GetVideoCaptureDevicesAsync();

                // For example, print them to the standard output
                foreach (var device in deviceList)
                {
                    Console.WriteLine($"Found webcam {device.name} (id: {device.id})");
                }

                // Create a new peer connection automatically disposed at the end of the program
                using var pc = new PeerConnection();

                // Initialize the connection with a STUN server to allow remote access
                var config = new PeerConnectionConfiguration
                {
                    IceServers = new List <IceServer> {
                        new IceServer {
                            Urls = { "stun:stun.l.google.com:19302" }
                        }
                    }
                };
                await pc.InitializeAsync(config);

                Console.WriteLine("Peer connection initialized.");

                // Record video from local webcam, and send to remote peer
                if (needVideo)
                {
                    Console.WriteLine("Opening local webcam...");
                    videoTrackSource = await DeviceVideoTrackSource.CreateAsync();

                    Console.WriteLine("Create local video track...");
                    var trackSettings = new LocalVideoTrackInitConfig {
                        trackName = "webcam_track"
                    };
                    localVideoTrack = LocalVideoTrack.CreateFromSource(videoTrackSource, trackSettings);

                    Console.WriteLine("Create video transceiver and add webcam track...");
                    videoTransceiver = pc.AddTransceiver(MediaKind.Video);
                    videoTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
                    videoTransceiver.LocalVideoTrack  = localVideoTrack;
                }

                // Record audio from local microphone, and send to remote peer
                if (needAudio)
                {
                    Console.WriteLine("Opening local microphone...");
                    audioTrackSource = await DeviceAudioTrackSource.CreateAsync();

                    Console.WriteLine("Create local audio track...");
                    var trackSettings = new LocalAudioTrackInitConfig {
                        trackName = "mic_track"
                    };
                    localAudioTrack = LocalAudioTrack.CreateFromSource(audioTrackSource, trackSettings);

                    Console.WriteLine("Create audio transceiver and add mic track...");
                    audioTransceiver = pc.AddTransceiver(MediaKind.Audio);
                    audioTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
                    audioTransceiver.LocalAudioTrack  = localAudioTrack;
                }

                // Setup signaling
                Console.WriteLine("Starting signaling...");
                var signaler = new NamedPipeSignaler.NamedPipeSignaler(pc, "testpipe");
                signaler.SdpMessageReceived += async(SdpMessage message) =>
                {
                    await pc.SetRemoteDescriptionAsync(message);

                    if (message.Type == SdpMessageType.Offer)
                    {
                        pc.CreateAnswer();
                    }
                };
                signaler.IceCandidateReceived += (IceCandidate candidate) =>
                {
                    pc.AddIceCandidate(candidate);
                };
                await signaler.StartAsync();

                // Start peer connection
                pc.Connected       += () => { Console.WriteLine("PeerConnection: connected."); };
                pc.IceStateChanged += (IceConnectionState newState) => { Console.WriteLine($"ICE state: {newState}"); };
                int numFrames = 0;
                pc.VideoTrackAdded += (RemoteVideoTrack track) =>
                {
                    track.I420AVideoFrameReady += (I420AVideoFrame frame) =>
                    {
                        ++numFrames;
                        if (numFrames % 60 == 0)
                        {
                            Console.WriteLine($"Received video frames: {numFrames}");
                        }
                    };
                };
                if (signaler.IsClient)
                {
                    Console.WriteLine("Connecting to remote peer...");
                    pc.CreateOffer();
                }
                else
                {
                    Console.WriteLine("Waiting for offer from remote peer...");
                }

                Console.WriteLine("Press a key to stop recording...");
                Console.ReadKey(true);

                signaler.Stop();
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }

            localAudioTrack?.Dispose();
            localVideoTrack?.Dispose();

            Console.WriteLine("Program termined.");

            localAudioTrack.Dispose();
            localVideoTrack.Dispose();
            audioTrackSource.Dispose();
            videoTrackSource.Dispose();
        }
Example #12
0
    async void OnClientConnected()
    {
        var pc = signaler.PeerConnection;

        // Record video from local webcam, and send to remote peer
        if (NeedVideo)
        {
            // For example, print them to the standard output

            var deviceSettings = new LocalVideoDeviceInitConfig
            {
                width  = VideoWidth,
                height = VideoHeight,
            };
            if (VideoFps > 0)
            {
                deviceSettings.framerate = VideoFps;
            }
            if (VideoProfileId.Length > 0)
            {
                deviceSettings.videoProfileId = VideoProfileId;
            }

            Debug.Log($"Attempt to grab Camera - {deviceSettings.videoProfileId}: {deviceSettings.width}x{deviceSettings.height}@{deviceSettings.framerate}fps");
            videoTrackSource = await DeviceVideoTrackSource.CreateAsync(deviceSettings);

            Debug.Log($"Create local video track... {videoTrackSource}");
            var trackSettings = new LocalVideoTrackInitConfig
            {
                trackName = "webcam_track"
            };
            localVideoTrack = LocalVideoTrack.CreateFromSource(videoTrackSource, trackSettings);

            Debug.Log("Create video transceiver and add webcam track...");
            videoTransceiver = pc.AddTransceiver(MediaKind.Video);
            videoTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
            videoTransceiver.LocalVideoTrack  = localVideoTrack;
        }

        // Record audio from local microphone, and send to remote peer
        if (NeedAudio)
        {
            Debug.Log("Opening local microphone...");
            audioTrackSource = await DeviceAudioTrackSource.CreateAsync();

            Debug.Log("Create local audio track...");
            var trackSettings = new LocalAudioTrackInitConfig {
                trackName = "mic_track"
            };
            localAudioTrack = LocalAudioTrack.CreateFromSource(audioTrackSource, trackSettings);

            Debug.Log("Create audio transceiver and add mic track...");
            audioTransceiver = pc.AddTransceiver(MediaKind.Audio);
            audioTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;
            audioTransceiver.LocalAudioTrack  = localAudioTrack;
        }

        // Start peer connection
        int numFrames = 0;

        pc.VideoTrackAdded += (RemoteVideoTrack track) =>
        {
            Debug.Log($"Attach Frame Listener...");
            track.I420AVideoFrameReady += (I420AVideoFrame frame) =>
            {
                ++numFrames;
                if (numFrames % 60 == 0)
                {
                    Debug.Log($"Received video frames: {numFrames}");
                }
            };
        };
        // we need a short delay here for the video stream to settle...
        // I assume my Logitech webcam is sending some garbage frames in the beginning.
        await Task.Delay(200);

        pc.CreateOffer();
        Debug.Log("Send offer to remote peer");
    }
        public async Task <string> InitiateCallRTC()
        {
            var list = new List <string>();

            list.Add(this.Configuration.GetSection("Key")["iceServer"]);
            AudioTrackSource microphoneSource = null;
            LocalAudioTrack  localAudioTrack  = null;
            Transceiver      audioTransceiver = null;

            var iceServer = new IceServer
            {
                Urls         = list,
                TurnPassword = this.Configuration.GetSection("Key")["turnPwd"],
                TurnUserName = this.Configuration.GetSection("Key")["turnUser"]
            };

            var serverList = new List <IceServer>();

            serverList.Add(iceServer);
            var connectionConfig = new PeerConnectionConfiguration {
                IceServers       = serverList,
                IceTransportType = IceTransportType.All,
                BundlePolicy     = BundlePolicy.Balanced,
                SdpSemantic      = SdpSemantic.UnifiedPlan
            };
            var connection = new PeerConnection();
            await connection.InitializeAsync(connectionConfig);

            microphoneSource = await DeviceAudioTrackSource.CreateAsync();

            var audioTrackConfig = new LocalAudioTrackInitConfig
            {
                trackName = "microphone_track"
            };

            localAudioTrack = LocalAudioTrack.CreateFromSource(microphoneSource, audioTrackConfig);

            audioTransceiver = connection.AddTransceiver(MediaKind.Audio);
            audioTransceiver.LocalAudioTrack  = localAudioTrack;
            audioTransceiver.DesiredDirection = Transceiver.Direction.SendReceive;

            var signaler = new NamedPipeSignaler.NamedPipeSignaler(connection, "testpipe");

            connection.Connected += () => {
                Console.WriteLine("PeerConnection: connected.");
            };

            signaler.SdpMessageReceived += async(SdpMessage message) =>
            {
                // Note: we use 'await' to ensure the remote description is applied
                // before calling CreateAnswer(). Failing to do so will prevent the
                // answer from being generated, and the connection from establishing.
                await connection.SetRemoteDescriptionAsync(message);

                if (message.Type == SdpMessageType.Offer)
                {
                    connection.CreateAnswer();
                }
            };

            await signaler.StartAsync();

            signaler.IceCandidateReceived += (IceCandidate candidate) => {
                connection.AddIceCandidate(candidate);
            };

            connection.IceStateChanged += (IceConnectionState newState) => {
                Console.WriteLine($"ICE state: {newState}");
            };

            if (signaler.IsClient)
            {
                Console.WriteLine("Connecting to remote peer...");
                connection.CreateOffer();
            }
            else
            {
                Console.WriteLine("Waiting for offer from remote peer...");
            }

            return(connection.IsConnected + "-" + connection.Name + "-");
        }