protected virtual void OnEnable() { if (Source != null) { return; } // Create the external source //< TODO - Better abstraction if (typeof(T) == typeof(I420AVideoFrameStorage)) { Source = ExternalVideoTrackSource.CreateFromI420ACallback(OnFrameRequested); } else if (typeof(T) == typeof(Argb32VideoFrameStorage)) { Source = ExternalVideoTrackSource.CreateFromArgb32Callback(OnFrameRequested); } else { throw new NotSupportedException("This frame storage is not supported. Use I420AVideoFrameStorage or Argb32VideoFrameStorage."); } if (Source == null) { throw new Exception("Failed to create external video track source."); } VideoStreamStarted.Invoke(Source); }
/// <summary> /// Internal free-threaded helper callback on track added, which enqueues the /// <see cref="VideoSource.VideoStreamStarted"/> event to be fired from the main /// Unity thread. /// </summary> private void TrackAdded(WebRTC.PeerConnection.TrackKind trackKind) { if (trackKind == WebRTC.PeerConnection.TrackKind.Video) { // Enqueue invoking the unity event from the main Unity thread, so that listeners // can directly access Unity objects from their handler function. _mainThreadWorkQueue.Enqueue(() => VideoStreamStarted.Invoke()); } }
/// <summary> /// Add a new track to the peer connection and start the video track playback. /// </summary> public void StartTrack() { // Ensure the track has a valid name string trackName = TrackName; if (trackName.Length == 0) { // Generate a unique name (GUID) trackName = Guid.NewGuid().ToString(); TrackName = trackName; } SdpTokenAttribute.Validate(trackName, allowEmpty: false); // Create the external source var nativePeer = PeerConnection.Peer; //< TODO - Better abstraction if (typeof(T) == typeof(I420AVideoFrameStorage)) { Source = ExternalVideoTrackSource.CreateFromI420ACallback(OnFrameRequested); } else if (typeof(T) == typeof(Argb32VideoFrameStorage)) { Source = ExternalVideoTrackSource.CreateFromArgb32Callback(OnFrameRequested); } else { throw new NotSupportedException(""); } // Create the local video track if (Source != null) { Track = nativePeer.AddCustomLocalVideoTrack(trackName, Source); if (Track != null) { VideoStreamStarted.Invoke(); } } }
protected async void OnEnable() { if (Source != null) { return; } #if UNITY_WSA && !UNITY_EDITOR // Request UWP access to video capture. The OS may show some popup dialog to the // user to request permission. This will succeed only if the user grants permission. try { // Note that the UWP UI thread and the main Unity app thread are always different. // https://docs.unity3d.com/Manual/windowsstore-appcallbacks.html // We leave the code below as an example of generic handling in case this would be used in // some other place, and in case a future version of Unity decided to change that assumption, // but currently OnEnable() is always invoked from the main Unity app thread so here the first // branch is never taken. if (UnityEngine.WSA.Application.RunningOnUIThread()) { await RequestAccessAsync(); } else { UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAsync(), waitUntilDone: true); } } catch (Exception ex) { // Log an error and prevent activation Debug.LogError($"Video access failure: {ex.Message}."); this.enabled = false; return; } #endif string videoProfileId = VideoProfileId; var videoProfileKind = VideoProfileKind; int width = Constraints.width; int height = Constraints.height; double framerate = Constraints.framerate; #if ENABLE_WINMD_SUPPORT if (FormatMode == LocalVideoSourceFormatMode.Automatic) { // Do not constrain resolution by default, unless the device calls for it (see below). width = 0; // auto height = 0; // auto // Avoid constraining the framerate; this is generally not necessary (formats are listed // with higher framerates first) and is error-prone as some formats report 30.0 FPS while // others report 29.97 FPS. framerate = 0; // auto // For HoloLens, use video profile to reduce resolution and save power/CPU/bandwidth if (global::Windows.Graphics.Holographic.HolographicSpace.IsAvailable) { if (!global::Windows.Graphics.Holographic.HolographicDisplay.GetDefault().IsOpaque) { if (global::Windows.ApplicationModel.Package.Current.Id.Architecture == global::Windows.System.ProcessorArchitecture.X86) { // Holographic AR (transparent) x86 platform - Assume HoloLens 1 videoProfileKind = WebRTC.VideoProfileKind.VideoRecording; // No profile in VideoConferencing width = 896; // Target 896 x 504 } else { // Holographic AR (transparent) non-x86 platform - Assume HoloLens 2 videoProfileKind = WebRTC.VideoProfileKind.VideoConferencing; width = 960; // Target 960 x 540 } } } } #endif // TODO - Fix codec selection (was as below before change) // Force again PreferredVideoCodec right before starting the local capture, // so that modifications to the property done after OnPeerInitialized() are // accounted for. //< FIXME //PeerConnection.Peer.PreferredVideoCodec = PreferredVideoCodec; // Check H.264 requests on Desktop (not supported) //#if !ENABLE_WINMD_SUPPORT // if (PreferredVideoCodec == "H264") // { // Debug.LogError("H.264 encoding is not supported on Desktop platforms. Using VP8 instead."); // PreferredVideoCodec = "VP8"; // } //#endif //// Ensure the track has a valid name //string trackName = TrackName; //if (trackName.Length == 0) //{ // trackName = Guid.NewGuid().ToString(); // // Re-assign the generated track name for consistency // TrackName = trackName; //} //SdpTokenAttribute.Validate(trackName, allowEmpty: false); // Create the track var deviceConfig = new LocalVideoDeviceInitConfig { videoDevice = WebcamDevice, videoProfileId = videoProfileId, videoProfileKind = videoProfileKind, width = (width > 0 ? (uint?)width : null), height = (height > 0 ? (uint?)height : null), framerate = (framerate > 0 ? (double?)framerate : null), enableMrc = EnableMixedRealityCapture, enableMrcRecordingIndicator = EnableMRCRecordingIndicator }; Source = await DeviceVideoTrackSource.CreateAsync(deviceConfig); if (Source == null) { throw new Exception("Failed ot create webcam video source."); } IsStreaming = true; VideoStreamStarted.Invoke(this); }
protected async void OnEnable() { if (Source != null) { return; } #if PLATFORM_ANDROID // Ensure Android binding is initialized before accessing the native implementation Android.Initialize(); // Check for permission to access the camera if (!Permission.HasUserAuthorizedPermission(Permission.Camera)) { if (!_androidCameraRequestPending) { // Monitor the OnApplicationFocus(true) event during the next 5 minutes, // and check for permission again each time (see below why). _androidCameraRequestPending = true; _androidCameraRequestRetryUntilTime = Time.time + 300; // Display dialog requesting user permission. This will return immediately, // and unfortunately there's no good way to tell when this completes. As a rule // of thumb, application should lose focus, so check when focus resumes should // be sufficient without having to poll every frame. Permission.RequestUserPermission(Permission.Camera); } return; } #elif UNITY_WSA && !UNITY_EDITOR // Request UWP access to video capture. The OS may show some popup dialog to the // user to request permission. This will succeed only if the user grants permission. try { // Note that the UWP UI thread and the main Unity app thread are always different. // https://docs.unity3d.com/Manual/windowsstore-appcallbacks.html // We leave the code below as an example of generic handling in case this would be used in // some other place, and in case a future version of Unity decided to change that assumption, // but currently OnEnable() is always invoked from the main Unity app thread so here the first // branch is never taken. if (UnityEngine.WSA.Application.RunningOnUIThread()) { await RequestAccessAsync(); } else { UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAsync(), waitUntilDone: true); } } catch (Exception ex) { // Log an error and prevent activation Debug.LogError($"Video access failure: {ex.Message}."); this.enabled = false; return; } #endif // Handle automatic capture format constraints string videoProfileId = VideoProfileId; var videoProfileKind = VideoProfileKind; int width = Constraints.width; int height = Constraints.height; double framerate = Constraints.framerate; #if ENABLE_WINMD_SUPPORT if (FormatMode == LocalVideoSourceFormatMode.Automatic) { // Do not constrain resolution by default, unless the device calls for it (see below). width = 0; // auto height = 0; // auto // Avoid constraining the framerate; this is generally not necessary (formats are listed // with higher framerates first) and is error-prone as some formats report 30.0 FPS while // others report 29.97 FPS. framerate = 0; // auto // For HoloLens, use video profile to reduce resolution and save power/CPU/bandwidth if (global::Windows.Graphics.Holographic.HolographicSpace.IsAvailable) { if (!global::Windows.Graphics.Holographic.HolographicDisplay.GetDefault().IsOpaque) { if (global::Windows.ApplicationModel.Package.Current.Id.Architecture == global::Windows.System.ProcessorArchitecture.X86) { // Holographic AR (transparent) x86 platform - Assume HoloLens 1 videoProfileKind = WebRTC.VideoProfileKind.VideoRecording; // No profile in VideoConferencing width = 896; // Target 896 x 504 } else { // Holographic AR (transparent) non-x86 platform - Assume HoloLens 2 videoProfileKind = WebRTC.VideoProfileKind.VideoConferencing; width = 960; // Target 960 x 540 } } } } #elif PLATFORM_ANDROID if (FormatMode == LocalVideoSourceFormatMode.Automatic) { // Avoid constraining the framerate; this is generally not necessary (formats are listed // with higher framerates first) and is error-prone as some formats report 30.0 FPS while // others report 29.97 FPS. framerate = 0; // auto string deviceId = WebcamDevice.id; if (string.IsNullOrEmpty(deviceId)) { List <VideoCaptureDevice> listedDevices = await PeerConnection.GetVideoCaptureDevicesAsync(); if (listedDevices.Count > 0) { deviceId = listedDevices[0].id; } } if (!string.IsNullOrEmpty(deviceId)) { // Find the closest format to 720x480, independent of framerate List <VideoCaptureFormat> formats = await DeviceVideoTrackSource.GetCaptureFormatsAsync(deviceId); double smallestDiff = double.MaxValue; bool hasFormat = false; foreach (var fmt in formats) { double diff = Math.Abs(fmt.width - 720) + Math.Abs(fmt.height - 480); if ((diff < smallestDiff) || !hasFormat) { hasFormat = true; smallestDiff = diff; width = (int)fmt.width; height = (int)fmt.height; } } if (hasFormat) { Debug.Log($"WebcamSource automated mode selected resolution {width}x{height} for Android video capture device #{deviceId}."); } } } #endif // TODO - Fix codec selection (was as below before change) // Force again PreferredVideoCodec right before starting the local capture, // so that modifications to the property done after OnPeerInitialized() are // accounted for. //< FIXME //PeerConnection.Peer.PreferredVideoCodec = PreferredVideoCodec; // Check H.264 requests on Desktop (not supported) //#if !ENABLE_WINMD_SUPPORT // if (PreferredVideoCodec == "H264") // { // Debug.LogError("H.264 encoding is not supported on Desktop platforms. Using VP8 instead."); // PreferredVideoCodec = "VP8"; // } //#endif // Create the track var deviceConfig = new LocalVideoDeviceInitConfig { videoDevice = WebcamDevice, videoProfileId = videoProfileId, videoProfileKind = videoProfileKind, width = (width > 0 ? (uint?)width : null), height = (height > 0 ? (uint?)height : null), framerate = (framerate > 0 ? (double?)framerate : null), enableMrc = EnableMixedRealityCapture, enableMrcRecordingIndicator = EnableMRCRecordingIndicator }; try { Source = await DeviceVideoTrackSource.CreateAsync(deviceConfig); if (Source == null) { throw new Exception("DeviceVideoTrackSource.CreateAsync() returned a NULL source."); } } catch (Exception ex) { Debug.LogError($"Failed to create device track source for {nameof(WebcamSource)} component '{name}'."); Debug.LogException(ex, this); return; } IsStreaming = true; VideoStreamStarted.Invoke(this); }
/// <summary> /// Internal free-threaded helper callback on track added, which enqueues the /// <see cref="VideoSource.VideoStreamStarted"/> event to be fired from the main /// Unity thread. /// </summary> private void TrackAdded() { // Enqueue invoking the unity event from the main Unity thread, so that listeners // can directly access Unity objects from their handler function. _mainThreadWorkQueue.Enqueue(() => VideoStreamStarted.Invoke()); }
protected void NotifyVideoStreamStarted() => VideoStreamStarted?.Invoke();