예제 #1
0
        protected override async Task CreateLocalAudioTrackAsyncImpl()
        {
            if (Track == null)
            {
                // Ensure the track has a valid name
                string trackName = TrackName;
                if (trackName.Length == 0)
                {
                    trackName = Guid.NewGuid().ToString();
                    // Re-assign the generated track name for consistency
                    TrackName = trackName;
                }
                SdpTokenAttribute.Validate(trackName, allowEmpty: false);

                // Create the local track
                var trackSettings = new LocalAudioTrackSettings
                {
                    trackName = trackName
                };
                Track = await LocalAudioTrack.CreateFromDeviceAsync(trackSettings);

                // Synchronize the track status with the Unity component status
                Track.Enabled = enabled;
            }
        }
예제 #2
0
        protected override Task CreateLocalVideoTrackAsync()
        {
            // Ensure the track has a valid name
            string trackName = TrackName;

            if (string.IsNullOrEmpty(trackName))
            {
                // Generate a unique name (GUID)
                trackName = Guid.NewGuid().ToString();
                TrackName = trackName;
            }
            SdpTokenAttribute.Validate(trackName, allowEmpty: false);

            // Create the external source
            //< TODO - Better abstraction
            if (typeof(T) == typeof(I420AVideoFrameStorage))
            {
                Source = ExternalVideoTrackSource.CreateFromI420ACallback(OnFrameRequested);
            }
            else if (typeof(T) == typeof(Argb32VideoFrameStorage))
            {
                Source = ExternalVideoTrackSource.CreateFromArgb32Callback(OnFrameRequested);
            }
            else
            {
                throw new NotSupportedException("This frame storage is not supported. Use I420AVideoFrameStorage or Argb32VideoFrameStorage.");
            }
            if (Source == null)
            {
                throw new Exception("Failed to create external video track source.");
            }

            // Create the local video track
            Track = LocalVideoTrack.CreateFromExternalSource(trackName, Source);
            if (Track == null)
            {
                throw new Exception("Failed ot create webcam video track.");
            }

            // Synchronize the track status with the Unity component status
            Track.Enabled = enabled;

            // This implementation is fast, so executes synchronously.
            return(Task.CompletedTask);
        }
        /// <summary>
        /// Add a new track to the peer connection and start the video track playback.
        /// </summary>
        public void StartTrack()
        {
            // Ensure the track has a valid name
            string trackName = TrackName;

            if (trackName.Length == 0)
            {
                // Generate a unique name (GUID)
                trackName = Guid.NewGuid().ToString();
                TrackName = trackName;
            }
            SdpTokenAttribute.Validate(trackName, allowEmpty: false);

            // Create the external source
            var nativePeer = PeerConnection.Peer;

            //< TODO - Better abstraction
            if (typeof(T) == typeof(I420AVideoFrameStorage))
            {
                Source = ExternalVideoTrackSource.CreateFromI420ACallback(OnFrameRequested);
            }
            else if (typeof(T) == typeof(Argb32VideoFrameStorage))
            {
                Source = ExternalVideoTrackSource.CreateFromArgb32Callback(OnFrameRequested);
            }
            else
            {
                throw new NotSupportedException("");
            }

            // Create the local video track
            if (Source != null)
            {
                Track = nativePeer.AddCustomLocalVideoTrack(trackName, Source);
                if (Track != null)
                {
                    VideoStreamStarted.Invoke();
                }
            }
        }
예제 #4
0
        private async Task AddLocalVideoTrackImplAsync(WebRTC.PeerConnection nativePeer)
        {
            string videoProfileId   = VideoProfileId;
            var    videoProfileKind = VideoProfileKind;
            int    width            = Constraints.width;
            int    height           = Constraints.height;
            double framerate        = Constraints.framerate;

#if ENABLE_WINMD_SUPPORT
            if (Mode == LocalVideoSourceFormatMode.Automatic)
            {
                // Do not constrain resolution by default, unless the device calls for it (see below).
                width  = 0; // auto
                height = 0; // auto

                // Avoid constraining the framerate; this is generally not necessary (formats are listed
                // with higher framerates first) and is error-prone as some formats report 30.0 FPS while
                // others report 29.97 FPS.
                framerate = 0; // auto

                // For HoloLens, use video profile to reduce resolution and save power/CPU/bandwidth
                if (Windows.Graphics.Holographic.HolographicSpace.IsAvailable)
                {
                    if (!Windows.Graphics.Holographic.HolographicDisplay.GetDefault().IsOpaque)
                    {
                        if (Windows.ApplicationModel.Package.Current.Id.Architecture == Windows.System.ProcessorArchitecture.X86)
                        {
                            // Holographic AR (transparent) x86 platform - Assume HoloLens 1
                            videoProfileKind = WebRTC.PeerConnection.VideoProfileKind.VideoRecording; // No profile in VideoConferencing
                            width            = 896;                                                   // Target 896 x 504
                        }
                        else
                        {
                            // Holographic AR (transparent) non-x86 platform - Assume HoloLens 2
                            videoProfileKind = WebRTC.PeerConnection.VideoProfileKind.VideoConferencing;
                            width            = 1280; // Target 1280 x 720
                        }
                    }
                }
            }
#endif
            // Force again PreferredVideoCodec right before starting the local capture,
            // so that modifications to the property done after OnPeerInitialized() are
            // accounted for.
            nativePeer.PreferredVideoCodec = PreferredVideoCodec;

            // Ensure the track has a valid name
            string trackName = TrackName;
            if (trackName.Length == 0)
            {
                trackName = Guid.NewGuid().ToString();
                TrackName = trackName;
            }
            SdpTokenAttribute.Validate(trackName, allowEmpty: false);

            _frameQueue.Clear();

            var trackSettings = new WebRTC.PeerConnection.LocalVideoTrackSettings
            {
                trackName                   = trackName,
                videoDevice                 = default,
        protected override async Task CreateLocalVideoTrackAsync()
        {
            string videoProfileId   = VideoProfileId;
            var    videoProfileKind = VideoProfileKind;
            int    width            = Constraints.width;
            int    height           = Constraints.height;
            double framerate        = Constraints.framerate;

#if ENABLE_WINMD_SUPPORT
            if (FormatMode == LocalVideoSourceFormatMode.Automatic)
            {
                // Do not constrain resolution by default, unless the device calls for it (see below).
                width  = 0; // auto
                height = 0; // auto

                // Avoid constraining the framerate; this is generally not necessary (formats are listed
                // with higher framerates first) and is error-prone as some formats report 30.0 FPS while
                // others report 29.97 FPS.
                framerate = 0; // auto

                // For HoloLens, use video profile to reduce resolution and save power/CPU/bandwidth
                if (global::Windows.Graphics.Holographic.HolographicSpace.IsAvailable)
                {
                    if (!global::Windows.Graphics.Holographic.HolographicDisplay.GetDefault().IsOpaque)
                    {
                        if (global::Windows.ApplicationModel.Package.Current.Id.Architecture == global::Windows.System.ProcessorArchitecture.X86)
                        {
                            // Holographic AR (transparent) x86 platform - Assume HoloLens 1
                            videoProfileKind = WebRTC.VideoProfileKind.VideoRecording; // No profile in VideoConferencing
                            width            = 896;                                    // Target 896 x 504
                        }
                        else
                        {
                            // Holographic AR (transparent) non-x86 platform - Assume HoloLens 2
                            videoProfileKind = WebRTC.VideoProfileKind.VideoConferencing;
                            width            = 960; // Target 960 x 540
                        }
                    }
                }
            }
#endif
            // Force again PreferredVideoCodec right before starting the local capture,
            // so that modifications to the property done after OnPeerInitialized() are
            // accounted for.
            //< FIXME
            //PeerConnection.Peer.PreferredVideoCodec = PreferredVideoCodec;

            // Check H.264 requests on Desktop (not supported)
#if !ENABLE_WINMD_SUPPORT
            if (PreferredVideoCodec == "H264")
            {
                Debug.LogError("H.264 encoding is not supported on Desktop platforms. Using VP8 instead.");
                PreferredVideoCodec = "VP8";
            }
#endif

            // Ensure the track has a valid name
            string trackName = TrackName;
            if (trackName.Length == 0)
            {
                trackName = Guid.NewGuid().ToString();
                // Re-assign the generated track name for consistency
                TrackName = trackName;
            }
            SdpTokenAttribute.Validate(trackName, allowEmpty: false);

            // Create the track
            var trackSettings = new LocalVideoTrackSettings
            {
                trackName                   = trackName,
                videoDevice                 = WebcamDevice,
                videoProfileId              = videoProfileId,
                videoProfileKind            = videoProfileKind,
                width                       = (width > 0 ? (uint?)width : null),
                height                      = (height > 0 ? (uint?)height : null),
                framerate                   = (framerate > 0 ? (double?)framerate : null),
                enableMrc                   = EnableMixedRealityCapture,
                enableMrcRecordingIndicator = EnableMRCRecordingIndicator
            };
            Track = await LocalVideoTrack.CreateFromDeviceAsync(trackSettings);

            if (Track == null)
            {
                throw new Exception("Failed ot create webcam video track.");
            }

            // Synchronize the track status with the Unity component status
            Track.Enabled = enabled;
        }