예제 #1
0
        /// <summary>
        /// Accesses the local video track, specified by
        /// this.selectedDevice and this.selectedProfile.
        /// MUST NOT BE CALLED FROM THE UI THREAD.
        /// </summary>
        /// <param name="factory"></param>
        /// <returns></returns>
        private IMediaStreamTrack getLocalVideo(IWebRtcFactory factory)
        {
            IReadOnlyList <IConstraint> mandatoryConstraints = new List <IConstraint>()
            {
                new Constraint("maxWidth", this.selectedProfile.Width.ToString()),
                new Constraint("minWidth", this.selectedProfile.Width.ToString()),
                new Constraint("maxHeight", this.selectedProfile.Height.ToString()),
                new Constraint("minHeight", this.selectedProfile.Height.ToString()),
                new Constraint("maxFrameRate", this.selectedProfile.FrameRate.ToString()),
                new Constraint("minFrameRate", this.selectedProfile.FrameRate.ToString())
            };
            IReadOnlyList <IConstraint> optionalConstraints = new List <IConstraint>();
            var mediaConstraints = new MediaConstraints(mandatoryConstraints, optionalConstraints);

            // this will throw a very unhelpful exception if called from the UI thread
            var videoCapturer = VideoCapturer.Create(this.selectedDevice.Name, this.selectedDevice.Id, false);

            var options = new VideoOptions()
            {
                Factory     = factory,
                Capturer    = videoCapturer,
                Constraints = mediaConstraints
            };
            var videoTrackSource = VideoTrackSource.Create(options);

            return(MediaStreamTrack.CreateVideoTrack("LocalVideo", videoTrackSource));
        }
예제 #2
0
        public Form1()
        {
            InitializeComponent();

            wpfPublisherHost.Child  = new OpenTok.VideoRenderer();
            wpfSubscriberHost.Child = new OpenTok.VideoRenderer();


            var cams        = VideoCapturer.EnumerateDevices();
            var selectedcam = cams[0];

            capturer  = selectedcam.CreateVideoCapturer(VideoCapturer.Resolution.High);
            publisher = new Publisher.Builder(Context.Instance)
            {
                Renderer = (IVideoRenderer)wpfPublisherHost.Child,
                Capturer = capturer
            }.Build();

            var mics = AudioDevice.EnumerateInputAudioDevices();

            AudioDevice.SetInputAudioDevice(mics[0]); // Go with first microphone in the list

            session = new Session.Builder(Context.Instance, API_KEY, SESSION_ID).Build();

            session.Connected         += Session_Connected;
            session.Disconnected      += Session_Disconnected;
            session.Error             += Session_Error;
            session.ConnectionCreated += Session_ConnectionCreated;
            session.StreamReceived    += Session_StreamReceived;
            session.StreamDropped     += Session_StreamDropped;

            Closing += MainWindow_Closing;
        }
예제 #3
0
 private void InitCamera()
 {
     if (!VideoCapturer.Start(device, this))
     {
         throw new RecoverableException("Couldn't initialize host camera - see logs for details.");
     }
 }
예제 #4
0
        void SetupVideoTrack()
        {
            Logger.Debug("PeerChannel", "SetupVideoTrack");

            var capability = mediaOption.VideoCaptureCapability;

            Logger.Debug("PeerChannel", "create video capturer");

            var capturerParams = capability.CreationParameters(factory);
            var capturer       = VideoCapturer.Create(capturerParams);

            if (capturer == null)
            {
                Logger.Debug("PeerChannel",
                             "failed to create video capturer");
                return;
            }

            Logger.Debug("PeerChannel", "create video track");

            var track = MediaStreamTrack.CreateVideoTrack(factory,
                                                          mediaOption.VideoTrackId, capturer);

            (capturer as IDisposable).Dispose();

            Logger.Debug("PeerChannel", "add video track");

            Conn.AddTrack(track);

            Logger.Debug("PeerChannel", "add track");

            OnAddLocalVideoTrack?.Invoke(capturer, track);
        }
        private void ReleaseMedia()
        {
            try
            {
                if (VideoCapturer != null)
                {
                    VideoCapturer.StopCapture();
                    VideoCapturer = null;
                }

                if (CurrentVideoTrack != null)
                {
                    var videoTrack = CurrentVideoTrack;
                    CurrentVideoTrack = null;
                    CurrentRoom?.LocalParticipant.UnpublishTrack(videoTrack);
                    DropRenderings(videoTrack);
                    videoTrack.Release();
                }

                if (CurrentAudioTrack != null)
                {
                    var audioTrack = CurrentAudioTrack;
                    CurrentAudioTrack = null;
                    CurrentRoom?.LocalParticipant.UnpublishTrack(audioTrack);
                    audioTrack.Enable(false);
                    audioTrack.Release();
                }
            }
            catch (Exception e)
            {
                Methods.DisplayReportResultTrack(e);
            }
        }
예제 #6
0
        public void ReleaseMedia()
        {
            try
            {
                if (VideoCapturer != null)
                {
                    VideoCapturer.StopCapture();
                    VideoCapturer = null;
                }

                if (CurrentVideoTrack != null)
                {
                    var videoTrack = CurrentVideoTrack;
                    CurrentVideoTrack = null;
                    CurrentRoom?.LocalParticipant.RemoveVideoTrack(videoTrack);
                    DropRenderers(videoTrack);
                    videoTrack.Release();
                }

                if (CurrentAudioTrack != null)
                {
                    var audioTrack = CurrentAudioTrack;
                    CurrentAudioTrack = null;
                    CurrentRoom?.LocalParticipant.RemoveAudioTrack(audioTrack);
                    audioTrack.Enable(false);
                    audioTrack.Release();
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }
        }
예제 #7
0
 /// <summary>
 /// Clean up any resources being used.
 /// </summary>
 /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
 protected override void Dispose(bool disposing)
 {
     if (disposing && (components != null))
     {
         components.Dispose();
     }
     base.Dispose(disposing);
     VideoCapturer.Dispose();
 }
예제 #8
0
 public void FlipCamera()
 {
     try
     {
         VideoCapturer?.SwitchCamera();
     }
     catch (Exception e)
     {
         Console.WriteLine(e);
     }
 }
예제 #9
0
 public void FlipCamera()
 {
     try
     {
         VideoCapturer?.SwitchCamera();
     }
     catch (Exception e)
     {
         Methods.DisplayReportResultTrack(e);
     }
 }
예제 #10
0
        public async Task <IList <VideoDevice> > GetVideoDevices()
        {
            var devices = await VideoCapturer.GetDevices();

            return(devices
                   .Select(dev => new VideoDevice()
            {
                Id = dev.Info.Id,
                Name = dev.Info.Name
            })
                   .ToList());
        }
예제 #11
0
        public byte[] GrabFrame()
        {
            var frame = VideoCapturer.GrabSingleFrame();

            if (ForcedScaleDownFactor != 1 || Quality != -1 || cropToSize != null)
            {
                var decompressed = DecompressJpgToRaw(frame);
                frame = CompressRawToJpeg(decompressed.Data, decompressed.Width, decompressed.Height, ForcedScaleDownFactor, Quality, cropToSize);
            }

            lastFrame = frame;
            return(lastFrame);
        }
예제 #12
0
        public override bool Connect()
        {
            IList <VideoCapturer.VideoDevice> devices = VideoCapturer.EnumerateDevices();

            if (devices.Count > 2)
            {
                _capturer = devices[2].CreateVideoCapturer();
                Status    = ConnectionStatus.Ready;
                return(true);
            }
            Status = ConnectionStatus.Disconnected;
            return(false);
        }
 public void FlipCamera()
 {
     try
     {
         if (VideoCapturer != null)
         {
             var cameraId = VideoCapturer.CameraId.Equals(GetFrontCameraId()) ? GetBackCameraId() : GetFrontCameraId();
             VideoCapturer.SwitchCamera(cameraId);
         }
     }
     catch (Exception e)
     {
         Methods.DisplayReportResultTrack(e);
     }
 }
        public MainWindow()
        {
            InitializeComponent();

            // This shows how to enumarate the available capturer devices on the system to allow the user of the app
            // to select the desired camera. If a capturer is not provided in the publisher constructor the first available
            // camera will be used.
            var devices = VideoCapturer.EnumerateDevices();

            if (devices.Count > 0)
            {
                var selectedDevice = devices[0];
                Trace.WriteLine("Using camera: " + devices[0].Name);
                Capturer = selectedDevice.CreateVideoCapturer(VideoCapturer.Resolution.High);
            }
            else
            {
                Trace.WriteLine("Warning: no cameras available, the publisher will be audio only.");
            }

            // We create the publisher here to show the preview when application starts
            // Please note that the PublisherVideo component is added in the xaml file
            Publisher = new Publisher.Builder(Context.Instance)
            {
                Renderer = PublisherVideo,
                Capturer = Capturer
            }.Build();

            if (API_KEY == "" || SESSION_ID == "" || TOKEN == "")
            {
                MessageBox.Show("Please fill out the API_KEY, SESSION_ID and TOKEN variables in the source code " +
                                "in order to connect to the session", "Error", MessageBoxButton.OK, MessageBoxImage.Error);
                ConnectDisconnectButton.IsEnabled = false;
            }
            else
            {
                Session = new Session.Builder(Context.Instance, API_KEY, SESSION_ID).Build();

                Session.Connected      += Session_Connected;
                Session.Disconnected   += Session_Disconnected;
                Session.Error          += Session_Error;
                Session.StreamReceived += Session_StreamReceived;
                Session.StreamDropped  += Session_StreamDropped;
            }

            Closing += MainWindow_Closing;
        }
예제 #15
0
        public void SetImageSize(int width, int height)
        {
            var result = VideoCapturer.SetImageSize(width, height);

            if (result == null)
            {
                throw new RecoverableException("There was an error when setting image size. See log for details");
            }

            if (result.Item1 != width || result.Item2 != height)
            {
                // image returned from the video capturer will not match the expected size precisely,
                // so we'll need to recompress and crop it manually
                cropToSize = Tuple.Create(width, height);
            }
            else
            {
                // image returned from the video capturer will be of the expected size,
                // so there is no need for manual cropping
                cropToSize = null;
            }
        }
예제 #16
0
 public void ReleaseMedia()
 {
     if (VideoCapturer != null)
     {
         VideoCapturer.StopCapture();
         VideoCapturer = null;
     }
     if (CurrentVideoTrack != null)
     {
         var videoTrack = CurrentVideoTrack;
         CurrentVideoTrack = null;
         CurrentRoom?.LocalParticipant.UnpublishTrack(videoTrack);
         DropRenderers(videoTrack);
         videoTrack.Release();
     }
     if (CurrentAudioTrack != null)
     {
         var audioTrack = CurrentAudioTrack;
         CurrentAudioTrack = null;
         CurrentRoom?.LocalParticipant.UnpublishTrack(audioTrack);
         audioTrack.Enable(false);
         audioTrack.Release();
     }
 }
예제 #17
0
        public void onIceServers(IList <PeerConnection.IceServer> iceServers)
        {
            factory = new PeerConnectionFactory();
            pc      = factory.CreatePeerConnection(iceServers, appRtcClient.pcConstraints(), pcObserver);

            // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
            // NOTE: this _must_ happen while |factory| is alive!
            // Logging.enableTracing(
            //     "logcat:",
            //     EnumSet.of(Logging.TraceLevel.TRACE_ALL),
            //     Logging.Severity.LS_SENSITIVE);

            {
                PeerConnection finalPC = pc;
                //JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final':
                //ORIGINAL LINE: final Runnable repeatedStatsLogger = new Runnable()
                IRunnable repeatedStatsLogger = new RunnableAnonymousInnerClassHelper(this, finalPC);
                vsv.PostDelayed(repeatedStatsLogger, 10000);
            }

            {
                logAndToast("Creating local video source...");
                MediaStream lMS = factory.CreateLocalMediaStream("ARDAMS");
                if (appRtcClient.videoConstraints() != null)
                {
                    VideoCapturer capturer = VideoCapturer;
                    videoSource = factory.CreateVideoSource(capturer, appRtcClient.videoConstraints());
                    VideoTrack videoTrack = factory.CreateVideoTrack("ARDAMSv0", videoSource);
                    videoTrack.AddRenderer(new VideoRenderer(new VideoCallbacks(this, vsv, VideoStreamsView.Endpoint.LOCAL)));
                    lMS.AddTrack(videoTrack);
                }
                lMS.AddTrack(factory.CreateAudioTrack("ARDAMSa0"));
                pc.AddStream(lMS, new MediaConstraints());
            }
            logAndToast("Waiting for ICE candidates...");
        }
예제 #18
0
        public MainWindow()
        {
            InitializeComponent();

            var devices        = VideoCapturer.EnumerateDevices();
            var selectedDevice = devices[0];

            capturer  = selectedDevice.CreateVideoCapturer(VideoCapturer.Resolution.High);
            publisher = new Publisher(Context.Instance, renderer: publisherVideo, capturer: capturer);

            //var screenSharing = new ScreenSharingCapturer();
            //publisher = new Publisher(Context.Instance, renderer: publisherVideo, capturer: screenSharing);

            session = new Session(Context.Instance, API_KEY, SESSION_ID);

            session.Connected         += Session_Connected;
            session.Disconnected      += Session_Disconnected;
            session.Error             += Session_Error;
            session.ConnectionCreated += Session_ConnectionCreated;
            session.StreamReceived    += Session_StreamReceived;
            session.StreamDropped     += Session_StreamDropped;

            Closing += MainWindow_Closing;
        }
예제 #19
0
 public void FlipCamera()
 {
     VideoCapturer?.SwitchCamera();
 }
예제 #20
0
        private void GetUserMedia()
        {
            Debug.WriteLine("Getting user media.");

            MediaDevice _selectedVideoDevice = (MediaDevice)Devices.Instance.VideoMediaDevicesList[0];

            for (int i = 0; i < Devices.Instance.VideoMediaDevicesList.Count; i++)
            {
                if (Devices.Instance.VideoMediaDevicesList[i].DisplayName == (string)_localSettings.Values["SelectedCameraName"])
                {
                    _selectedVideoDevice = (MediaDevice)Devices.Instance.VideoMediaDevicesList[i];
                }
            }

            List <int> widths     = new List <int>();
            List <int> heights    = new List <int>();
            List <int> frameRates = new List <int>();

            foreach (var videoFormat in _selectedVideoDevice.VideoFormats)
            {
                widths.Add(videoFormat.Dimension.Width);
                heights.Add(videoFormat.Dimension.Height);

                foreach (var frameRate in videoFormat.FrameRates)
                {
                    frameRates.Add(frameRate);
                }
            }

            // Maximum and minimum values for the selected camera
            IReadOnlyList <IConstraint> mandatoryConstraints = new List <IConstraint>()
            {
                new Constraint("maxWidth", widths.Max().ToString()),
                new Constraint("minWidth", widths.Min().ToString()),
                new Constraint("maxHeight", heights.Max().ToString()),
                new Constraint("minHeight", heights.Min().ToString()),
                new Constraint("maxFrameRate", frameRates.Max().ToString()),
                new Constraint("minFrameRate", frameRates.Min().ToString())
            };

            // Add optional constrains
            IReadOnlyList <IConstraint> optionalConstraints = new List <IConstraint>();

            IMediaConstraints mediaConstraints = new MediaConstraints(mandatoryConstraints, optionalConstraints);

            var videoCapturer = VideoCapturer.Create(_selectedVideoDevice.DisplayName, _selectedVideoDevice.Id, false);

            var videoOptions = new VideoOptions();

            videoOptions.Factory     = _factory;
            videoOptions.Capturer    = videoCapturer;
            videoOptions.Constraints = mediaConstraints;

            var videoTrackSource = VideoTrackSource.Create(videoOptions);

            _selfVideoTrack = MediaStreamTrack.CreateVideoTrack("SELF_VIDEO", videoTrackSource);

            var audioOptions = new AudioOptions();

            audioOptions.Factory = _factory;

            var audioTrackSource = AudioTrackSource.Create(audioOptions);

            _selfAudioTrack = MediaStreamTrack.CreateAudioTrack("SELF_AUDIO", audioTrackSource);
        }
예제 #21
0
        public Task <IList <IMediaDevice> > GetMediaDevicesAsync(MediaKind kind)
        {
            if (kind == MediaKind.AudioInputDevice)
            {
                return(Task.Run(async() =>
                {
                    IList <IMediaDevice> audioMediaDevicesCapturersList = new List <IMediaDevice>();

                    DeviceInformationCollection audioCapturers = await DeviceInformation.FindAllAsync(Windows.Media.Devices.MediaDevice.GetAudioCaptureSelector());

                    foreach (var microphone in audioCapturers)
                    {
                        var mediaDevice = new MediaDevice();
                        mediaDevice.GetMediaKind(MediaKind.AudioInputDevice.ToString());
                        mediaDevice.GetId(microphone.Id);
                        mediaDevice.GetDisplayName(microphone.Name);

                        audioMediaDevicesCapturersList.Add(mediaDevice);
                    }
                    return audioMediaDevicesCapturersList;
                }));
            }

            if (kind == MediaKind.AudioOutputDevice)
            {
                return(Task.Run(async() =>
                {
                    IList <IMediaDevice> audioMediaDevicesRendersList = new List <IMediaDevice>();

                    DeviceInformationCollection audioRenders = await DeviceInformation.FindAllAsync(Windows.Media.Devices.MediaDevice.GetAudioRenderSelector());

                    foreach (var speaker in audioRenders)
                    {
                        var mediaDevice = new MediaDevice();
                        mediaDevice.GetMediaKind(MediaKind.AudioOutputDevice.ToString());
                        mediaDevice.GetId(speaker.Id);
                        mediaDevice.GetDisplayName(speaker.Name);

                        audioMediaDevicesRendersList.Add(mediaDevice);
                    }
                    return audioMediaDevicesRendersList;
                }));
            }

            if (kind == MediaKind.VideoDevice)
            {
                return(Task.Run(async() =>
                {
                    IList <IMediaDevice> videoMediaDevicesList = new List <IMediaDevice>();

                    IReadOnlyList <IVideoDeviceInfo> videoDevices = await VideoCapturer.GetDevices();

                    foreach (IVideoDeviceInfo videoDevice in videoDevices)
                    {
                        var mediaDevice = new MediaDevice();
                        mediaDevice.GetMediaKind(MediaKind.VideoDevice.ToString());
                        mediaDevice.GetId(videoDevice.Info.Id);
                        mediaDevice.GetDisplayName(videoDevice.Info.Name);

                        IList <MediaVideoFormat> videoFormatsList = await GetMediaVideoFormatList(videoDevice.Info.Id);

                        mediaDevice.GetVideoFormats(videoFormatsList);

                        videoMediaDevicesList.Add(mediaDevice);
                    }
                    return videoMediaDevicesList;
                }));
            }
            else
            {
                return(null);
            }
        }