Beispiel #1
0
        public void RefreshVideoProfiles(VideoCaptureDeviceInfo item, VideoProfileKind kind)
        {
            var videoProfiles = new CollectionViewModel <MediaCaptureVideoProfile>();

            if (item != null)
            {
                IReadOnlyList <MediaCaptureVideoProfile> profiles;
                if (kind == VideoProfileKind.Unspecified)
                {
                    profiles = MediaCapture.FindAllVideoProfiles(item.Id);
                }
                else
                {
                    int index = (int)kind;
                    profiles = MediaCapture.FindKnownVideoProfiles(item.Id, (KnownVideoProfile)index);
                }
                foreach (var profile in profiles)
                {
                    videoProfiles.Add(profile);
                }
            }
            VideoProfiles = videoProfiles;

            // Select first item for convenience
            VideoProfiles.SelectFirstItemIfAny();
        }
        public void RefreshVideoProfiles(VideoCaptureDeviceInfo item, VideoProfileKind kind)
        {
            var videoProfiles = new CollectionViewModel <MediaCaptureVideoProfile>();

            if (item != null)
            {
                IReadOnlyList <MediaCaptureVideoProfile> profiles;
                if (kind == VideoProfileKind.Unspecified)
                {
                    profiles = MediaCapture.FindAllVideoProfiles(item.Id);
                }
                else
                {
                    // VideoProfileKind and KnownVideoProfile are the same with the exception of
                    // `Unspecified` that takes value 0.
                    var profile = (KnownVideoProfile)((int)kind - 1);
                    profiles = MediaCapture.FindKnownVideoProfiles(item.Id, profile);
                }
                foreach (var profile in profiles)
                {
                    videoProfiles.Add(profile);
                }
            }
            VideoProfiles = videoProfiles;

            // Select first item for convenience
            VideoProfiles.SelectFirstItemIfAny();
        }
Beispiel #3
0
        /// <summary>
        /// Initializes <see cref="MediaCapture"/> to use the world-facing locatable color camera.
        /// </summary>
        private async Task <bool> InitializeMediaCapture()
        {
            if (_mediaCapture != null)
            {
                _logger.LogWarning("Media capture already initialized");
                return(false);
            }
            if (_device != Device.HoloLens2)
            {
                throw new InvalidOperationException("Device not supported.");
            }

            string deviceId = await GetDeviceId();

            IReadOnlyList <MediaCaptureVideoProfile> profiles = MediaCapture.FindKnownVideoProfiles(deviceId, KnownVideoProfile.VideoConferencing);
            MediaCaptureInitializationSettings       settings = new MediaCaptureInitializationSettings
            {
                VideoDeviceId = deviceId,
                VideoProfile  = profiles.First(),
                // Exclusive control is necessary to control frame-rate and resolution.
                // Note: The resolution and frame-rate of the built-in MRC camera UI might be reduced from its normal values when another app is using the photo/video camera.
                // See <see href="https://docs.microsoft.com/en-us/windows/mixed-reality/develop/platform-capabilities-and-apis/mixed-reality-capture-for-developers"/>
                SharingMode          = MediaCaptureSharingMode.ExclusiveControl,
                StreamingCaptureMode = StreamingCaptureMode.Video,
                // TODO: For RGB conversion, the MediaCaptureMemoryPreference.Auto could provide a Direct3DSurface instead of a SoftwareBitmap.
                MemoryPreference = MediaCaptureMemoryPreference.Cpu
            };

            _mediaCapture = new MediaCapture();
            await _mediaCapture.InitializeAsync(settings);

            _logger.Log("Media capture successfully initialized.");
            return(true);
        }
Beispiel #4
0
        /// <summary>
        /// Finds the video profiles for the back camera. Then queries for a profile that supports
        /// HDR video recording. If a HDR supported video profile is located, then we configure media settings to the
        /// matching profile. Finally we initialize media capture HDR recording mode to auto.
        /// </summary>
        /// <param name="sender">Contains information regarding button that fired event</param>
        /// <param name="e">Contains state information and event data associated with the event</param>
        private async void CheckHdrSupportBtn_Click(object sender, Windows.UI.Xaml.RoutedEventArgs e)
        {
            string       videoDeviceId     = string.Empty;
            bool         HdrVideoSupported = false;
            MediaCapture mediaCapture      = new MediaCapture();
            MediaCaptureInitializationSettings mediaCaptureInitSetttings = new MediaCaptureInitializationSettings();

            // Select the first video capture device found on the back of the device
            await LogStatusToOutputBox("Querying for video capture device on back of the device that supports Video Profile");

            videoDeviceId = await GetVideoProfileSupportedDeviceIdAsync(Windows.Devices.Enumeration.Panel.Back);

            if (string.IsNullOrEmpty(videoDeviceId))
            {
                await LogStatus("ERROR: No Video Device Id found, verify your device supports profiles", NotifyType.ErrorMessage);

                return;
            }

            await LogStatusToOutputBox(string.Format(CultureInfo.InvariantCulture,
                                                     "Found device that supports Video Profile, Device Id:\r\n {0}", videoDeviceId));

            IReadOnlyList <MediaCaptureVideoProfile> profiles = MediaCapture.FindKnownVideoProfiles(videoDeviceId, KnownVideoProfile.VideoRecording);

            // Walk through available profiles, look for first profile with HDR supported Video Profile
            foreach (MediaCaptureVideoProfile profile in profiles)
            {
                IReadOnlyList <MediaCaptureVideoProfileMediaDescription> recordMediaDescription = profile.SupportedRecordMediaDescription;
                foreach (MediaCaptureVideoProfileMediaDescription videoProfileMediaDescription in recordMediaDescription)
                {
                    if (videoProfileMediaDescription.IsHdrVideoSupported)
                    {
                        // We've located the profile and description for HDR Video, set profile and flag
                        mediaCaptureInitSetttings.VideoProfile           = profile;
                        mediaCaptureInitSetttings.RecordMediaDescription = videoProfileMediaDescription;
                        HdrVideoSupported = true;
                        await LogStatus("HDR supported video profile found, set video profile to current HDR supported profile", NotifyType.StatusMessage);

                        break;
                    }
                }

                if (HdrVideoSupported)
                {
                    break;
                }
            }

            await LogStatusToOutputBox("Initializing Media settings to HDR Supported video profile");

            await mediaCapture.InitializeAsync(mediaCaptureInitSetttings);

            if (HdrVideoSupported)
            {
                await LogStatusToOutputBox("Initializing HDR Video Mode to Auto");

                mediaCapture.VideoDeviceController.HdrVideoControl.Mode = Windows.Media.Devices.HdrVideoMode.Auto;
            }
        }
Beispiel #5
0
        //<SnippetGetSettingsWithProfile>
        public async Task <MediaCaptureInitializationSettings> FindHdrWithWcgPhotoProfile()
        {
            IReadOnlyList <MediaFrameSourceGroup> sourceGroups = await MediaFrameSourceGroup.FindAllAsync();

            MediaCaptureInitializationSettings settings = null;

            foreach (MediaFrameSourceGroup sourceGroup in sourceGroups)
            {
                // Find a device that support AdvancedColorPhoto
                IReadOnlyList <MediaCaptureVideoProfile> profileList = MediaCapture.FindKnownVideoProfiles(
                    sourceGroup.Id,
                    KnownVideoProfile.HdrWithWcgPhoto);

                if (profileList.Count > 0)
                {
                    settings = new MediaCaptureInitializationSettings();
                    settings.VideoProfile  = profileList[0];
                    settings.VideoDeviceId = sourceGroup.Id;
                    break;
                }
            }
            return(settings);
        }
    async Task Start()
    {
        // Socket listener
        audioSocketListener = new StreamSocketListener();
        audioSocketListener.ConnectionReceived += OnConnectionAudio;
        await audioSocketListener.BindServiceNameAsync(audioServiceName);

        videoSocketListener = new StreamSocketListener();
        videoSocketListener.ConnectionReceived += OnConnectionVideo;
        await videoSocketListener.BindServiceNameAsync(videoServiceName);

        // Find a media source group which gives us webcam and microphone input streams
        var sourceGroups = await MediaFrameSourceGroup.FindAllAsync();

        MediaFrameSourceGroup    selectedSourceGroup  = null;
        MediaCaptureVideoProfile selectedVideoProfile = null;
        MediaCaptureVideoProfileMediaDescription selectedDescription = null;

        foreach (MediaFrameSourceGroup sourceGroup in sourceGroups)
        {
            var videoProfiles = MediaCapture.FindKnownVideoProfiles(sourceGroup.Id, KnownVideoProfile.VideoConferencing);
            foreach (MediaCaptureVideoProfile videoProfile in videoProfiles)
            {
                foreach (var desc in videoProfile.SupportedRecordMediaDescription)
                {
                    if (desc.Width == videoWidth && desc.Height == videoHeight && desc.FrameRate == frameRate)
                    {
                        selectedSourceGroup  = sourceGroup;
                        selectedVideoProfile = videoProfile;
                        selectedDescription  = desc;
                    }
                }
            }
        }

        if (selectedSourceGroup == null)
        {
            Debug.Log("No source group was found.");
            return;
        }

        mediaCapture = new MediaCapture();
        var settings = new MediaCaptureInitializationSettings()
        {
            AudioProcessing        = AudioProcessing.Raw,
            MemoryPreference       = MediaCaptureMemoryPreference.Cpu,
            RecordMediaDescription = selectedDescription,
            SharingMode            = MediaCaptureSharingMode.ExclusiveControl,
            SourceGroup            = selectedSourceGroup,
            StreamingCaptureMode   = StreamingCaptureMode.AudioAndVideo,
            VideoProfile           = selectedVideoProfile,
        };

        try
        {
            await mediaCapture.InitializeAsync(settings);
        }
        catch (Exception ex)
        {
            Debug.Log("MediaCapture initialization failed: " + ex.Message);
            return;
        }

        var audioFrameSources = mediaCapture.FrameSources.Where(src => src.Value.Info.MediaStreamType == MediaStreamType.Audio);

        if (audioFrameSources.Count() == 0)
        {
            Debug.Log("No audio source was found.");
            return;
        }
        MediaFrameSource audioFrameSource = audioFrameSources.FirstOrDefault().Value;
        var videoFrameSources             = mediaCapture.FrameSources.Where(src => src.Value.Info.SourceKind == MediaFrameSourceKind.Color);

        if (videoFrameSources.Count() == 0)
        {
            Debug.Log("No video source was found.");
            return;
        }
        // MediaFrameSource videoFrameSource = videoFrameSources.FirstOrDefault().Value;
        MediaFrameSource videoFrameSource = null;
        MediaFrameFormat selectedFormat   = null;

        foreach (var kv in videoFrameSources)
        {
            MediaFrameSource source = kv.Value;
            foreach (MediaFrameFormat format in source.SupportedFormats)
            {
                if (format.VideoFormat.Width == videoWidth && format.VideoFormat.Height == videoHeight &&
                    format.FrameRate.Numerator == frameRate && format.FrameRate.Denominator == 1)
                {
                    videoFrameSource = source;
                    selectedFormat   = format;
                    break;
                }
            }
            if (videoFrameSource != null)
            {
                break;
            }
        }
        if (selectedFormat != null)
        {
            await videoFrameSource.SetFormatAsync(selectedFormat);
        }
        else
        {
            Debug.Log("Cannot find a proper MediaFrameFormat.");
            return;
        }

        // Start streaming
        audioFrameReader = await mediaCapture.CreateFrameReaderAsync(audioFrameSource);

        audioFrameReader.FrameArrived += AudioFrameArrived;
        videoFrameReader = await mediaCapture.CreateFrameReaderAsync(videoFrameSource);

        videoFrameReader.FrameArrived += VideoFrameArrived;

        var audioStartStatus = audioFrameReader.StartAsync();
        var videoStartStatus = videoFrameReader.StartAsync();

        if (await audioStartStatus != MediaFrameReaderStartStatus.Success)
        {
            Debug.Log("The audioFrameReader couldn't start.");
        }
        if (await videoStartStatus != MediaFrameReaderStartStatus.Success)
        {
            Debug.Log("The videoFrameReader couldn't start.");
        }
    }
Beispiel #7
0
    /// <summary>
    /// The Task to asynchronously initialize MediaCapture in UWP. The camera of HoloLens will
    /// be configured to preview video of 896x504 at 30 fps, pixel format is NV12. MediaFrameReader
    /// will be initialized and register the callback function OnFrameArrived to each video
    /// frame. Note that this task does not start running the video preview, but configures the
    /// running behavior. This task should be executed when ARUWPController status is
    /// ARUWP_STATUS_CLEAN, and will change it to ARUWP_STATUS_VIDEO_INITIALIZED if no error
    /// occurred. [internal use]
    /// </summary>
    /// <returns>Whether video pipeline is successfully initialized</returns>
    public async Task <bool> InitializeMediaCaptureAsyncTask()
    {
        if (controller.status != ARUWP.ARUWP_STATUS_CLEAN)
        {
            Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() unsupported status");
            return(false);
        }

        if (mediaCapture != null)
        {
            Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because mediaCapture is not null");
            return(false);
        }

        int   targetVideoWidth, targetVideoHeight;
        float targetVideoFrameRate;

        switch (videoParameter)
        {
        case VideoParameter.HL1Param1280x720x15:
            HL = 1;
            targetVideoWidth     = 1280;
            targetVideoHeight    = 720;
            targetVideoFrameRate = 15.0f;
            break;

        case VideoParameter.HL1Param1280x720x30:
            HL = 1;
            targetVideoWidth     = 1280;
            targetVideoHeight    = 720;
            targetVideoFrameRate = 30.0f;
            break;

        case VideoParameter.HL1Param1344x756x15:
            HL = 1;
            targetVideoWidth     = 1344;
            targetVideoHeight    = 756;
            targetVideoFrameRate = 15.0f;
            break;

        case VideoParameter.HL1Param1344x756x30:
            HL = 1;
            targetVideoWidth     = 1344;
            targetVideoHeight    = 756;
            targetVideoFrameRate = 30.0f;
            break;

        case VideoParameter.HL1Param896x504x15:
            HL = 1;
            targetVideoWidth     = 896;
            targetVideoHeight    = 504;
            targetVideoFrameRate = 15.0f;
            break;

        case VideoParameter.HL1Param896x504x30:
            HL = 1;
            targetVideoWidth     = 896;
            targetVideoHeight    = 504;
            targetVideoFrameRate = 30.0f;
            break;

        case VideoParameter.HL2Param1504x846x60:
            HL = 2;
            targetVideoWidth     = 1504;
            targetVideoHeight    = 846;
            targetVideoFrameRate = 60.0f;
            break;

        case VideoParameter.HL2Param1504x846x30:
            HL = 2;
            targetVideoWidth     = 1504;
            targetVideoHeight    = 846;
            targetVideoFrameRate = 30.0f;
            break;

        default:
            return(false);

            break;
        }

        var allGroups = await MediaFrameSourceGroup.FindAllAsync();

        int selectedGroupIndex = -1;

        for (int i = 0; i < allGroups.Count; i++)
        {
            var group = allGroups[i];
            Debug.Log(group.DisplayName + ", " + group.Id);
            // for HoloLens 1
            if (HL == 1)
            {
                if (group.DisplayName == "MN34150")
                {
                    selectedGroupIndex = i;
                    Debug.Log(TAG + ": Selected group " + i + " on HoloLens 1");
                    break;
                }
            }
            // for HoloLens 2
            else if (HL == 2)
            {
                if (group.DisplayName == "QC Back Camera")
                {
                    selectedGroupIndex = i;
                    Debug.Log(TAG + ": Selected group " + i + " on HoloLens 2");
                    break;
                }
            }
        }

        if (selectedGroupIndex == -1)
        {
            Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because there is no suitable source group");
            return(false);
        }

        // Initialize mediacapture with the source group.
        mediaCapture = new MediaCapture();
        MediaStreamType mediaStreamType = MediaStreamType.VideoPreview;

        if (HL == 1)
        {
            var settings = new MediaCaptureInitializationSettings {
                SourceGroup = allGroups[selectedGroupIndex],
                // This media capture can share streaming with other apps.
                SharingMode = MediaCaptureSharingMode.SharedReadOnly,
                // Only stream video and don't initialize audio capture devices.
                StreamingCaptureMode = StreamingCaptureMode.Video,
                // Set to CPU to ensure frames always contain CPU SoftwareBitmap images
                // instead of preferring GPU D3DSurface images.
                MemoryPreference = MediaCaptureMemoryPreference.Cpu
            };
            await mediaCapture.InitializeAsync(settings);

            Debug.Log(TAG + ": MediaCapture is successfully initialized in SharedReadOnly mode for HoloLens 1.");
            mediaStreamType = MediaStreamType.VideoPreview;
        }
        else if (HL == 2)
        {
            string deviceId = allGroups[selectedGroupIndex].Id;
            // Look up for all video profiles
            //IReadOnlyList<MediaCaptureVideoProfile> profiles = MediaCapture.FindAllVideoProfiles(deviceId);
            //MediaCaptureVideoProfile selectedProfile;
            IReadOnlyList <MediaCaptureVideoProfile> profileList = MediaCapture.FindKnownVideoProfiles(deviceId, KnownVideoProfile.VideoConferencing);

            // Initialize mediacapture with the source group.
            var settings = new MediaCaptureInitializationSettings {
                //SourceGroup = allGroups[selectedGroupIndex],
                VideoDeviceId = deviceId,
                VideoProfile  = profileList[0],
                // This media capture can share streaming with other apps.
                SharingMode = MediaCaptureSharingMode.ExclusiveControl,
                // Only stream video and don't initialize audio capture devices.
                StreamingCaptureMode = StreamingCaptureMode.Video,
                // Set to CPU to ensure frames always contain CPU SoftwareBitmap images
                // instead of preferring GPU D3DSurface images.
                MemoryPreference = MediaCaptureMemoryPreference.Cpu
            };
            await mediaCapture.InitializeAsync(settings);

            Debug.Log(TAG + ": MediaCapture is successfully initialized in ExclusiveControl mode for HoloLens 2.");
            mediaStreamType = MediaStreamType.VideoRecord;
        }


        try {
            var mediaFrameSourceVideo         = mediaCapture.FrameSources.Values.Single(x => x.Info.MediaStreamType == mediaStreamType);
            MediaFrameFormat targetResFormat  = null;
            float            framerateDiffMin = 60f;
            foreach (var f in mediaFrameSourceVideo.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height))
            {
                if (f.VideoFormat.Width == targetVideoWidth && f.VideoFormat.Height == targetVideoHeight)
                {
                    if (targetResFormat == null)
                    {
                        targetResFormat  = f;
                        framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate);
                    }
                    else if (Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate) < framerateDiffMin)
                    {
                        targetResFormat  = f;
                        framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate);
                    }
                }
            }
            if (targetResFormat == null)
            {
                targetResFormat = mediaFrameSourceVideo.SupportedFormats[0];
                Debug.Log(TAG + ": Unable to choose the selected format, fall back");
                targetResFormat = mediaFrameSourceVideo.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault();
            }

            await mediaFrameSourceVideo.SetFormatAsync(targetResFormat);

            frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSourceVideo, targetResFormat.Subtype);

            frameReader.FrameArrived += OnFrameArrived;
            controller.frameWidth     = Convert.ToInt32(targetResFormat.VideoFormat.Width);
            controller.frameHeight    = Convert.ToInt32(targetResFormat.VideoFormat.Height);
            // if controller.frameWidth is not aligned with 64, then pad to 64
            // on HoloLens 2, it is a must
            if (controller.frameWidth % 64 != 0)
            {
                int paddedFrameWidth = ((controller.frameWidth >> 6) + 1) << 6;
                Debug.Log(TAG + ": the width is padded to " + paddedFrameWidth);
                controller.frameWidth = paddedFrameWidth;
            }
            // Since v0.3, feature grayscale is forced
            frameData = new byte[controller.frameWidth * controller.frameHeight];
            Debug.Log(TAG + ": FrameReader is successfully initialized, " + controller.frameWidth + "x" + controller.frameHeight +
                      ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator);
        }
        catch (Exception e) {
            Debug.Log(TAG + ": FrameReader is not initialized");
            Debug.Log(TAG + ": Exception: " + e);
            return(false);
        }

        controller.status = ARUWP.ARUWP_STATUS_VIDEO_INITIALIZED;
        signalInitDone    = true;
        Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() is successful");
        return(true);
    }
Beispiel #8
0
    private async Task <bool> InitializeMediaCaptureAsync()
    {
        if (captureStatus != CaptureStatus.Clean)
        {
            Debug.Log(TAG + ": InitializeMediaCaptureAsync() fails because of incorrect status");
            return(false);
        }

        if (mediaCapture != null)
        {
            return(false);
        }

        var allGroups = await MediaFrameSourceGroup.FindAllAsync();

        int selectedGroupIndex = -1;

        for (int i = 0; i < allGroups.Count; i++)
        {
            var group = allGroups[i];
            Debug.Log(group.DisplayName + ", " + group.Id);
            // for HoloLens 1
            if (group.DisplayName == "MN34150")
            {
                selectedGroupIndex = i;
                HL = 1;
                Debug.Log(TAG + ": Selected group " + i + " on HoloLens 1");
                break;
            }
            // for HoloLens 2
            else if (group.DisplayName == "QC Back Camera")
            {
                selectedGroupIndex = i;
                HL = 2;
                Debug.Log(TAG + ": Selected group " + i + " on HoloLens 2");
                break;
            }
        }

        if (selectedGroupIndex == -1)
        {
            Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because there is no suitable source group");
            return(false);
        }

        // Initialize mediacapture with the source group.
        mediaCapture = new MediaCapture();
        MediaStreamType mediaStreamType = MediaStreamType.VideoPreview;

        if (HL == 1)
        {
            var settings = new MediaCaptureInitializationSettings {
                SourceGroup = allGroups[selectedGroupIndex],
                // This media capture can share streaming with other apps.
                SharingMode = MediaCaptureSharingMode.SharedReadOnly,
                // Only stream video and don't initialize audio capture devices.
                StreamingCaptureMode = StreamingCaptureMode.Video,
                // Set to CPU to ensure frames always contain CPU SoftwareBitmap images
                // instead of preferring GPU D3DSurface images.
                MemoryPreference = MediaCaptureMemoryPreference.Cpu
            };
            await mediaCapture.InitializeAsync(settings);

            Debug.Log(TAG + ": MediaCapture is successfully initialized in SharedReadOnly mode for HoloLens 1.");
            mediaStreamType = MediaStreamType.VideoPreview;
        }
        else if (HL == 2)
        {
            string deviceId = allGroups[selectedGroupIndex].Id;
            // Look up for all video profiles
            //IReadOnlyList<MediaCaptureVideoProfile> profiles = MediaCapture.FindAllVideoProfiles(deviceId);
            //MediaCaptureVideoProfile selectedProfile;
            IReadOnlyList <MediaCaptureVideoProfile> profileList = MediaCapture.FindKnownVideoProfiles(deviceId, KnownVideoProfile.VideoConferencing);

            // Initialize mediacapture with the source group.
            var settings = new MediaCaptureInitializationSettings {
                //SourceGroup = allGroups[selectedGroupIndex],
                VideoDeviceId = deviceId,
                VideoProfile  = profileList[0],
                // This media capture can share streaming with other apps.
                SharingMode = MediaCaptureSharingMode.ExclusiveControl,
                // Only stream video and don't initialize audio capture devices.
                StreamingCaptureMode = StreamingCaptureMode.Video,
                // Set to CPU to ensure frames always contain CPU SoftwareBitmap images
                // instead of preferring GPU D3DSurface images.
                MemoryPreference = MediaCaptureMemoryPreference.Cpu
            };
            await mediaCapture.InitializeAsync(settings);

            Debug.Log(TAG + ": MediaCapture is successfully initialized in ExclusiveControl mode for HoloLens 2.");
            mediaStreamType = MediaStreamType.VideoRecord;
        }



        try {
            var mediaFrameSourceVideo         = mediaCapture.FrameSources.Values.Single(x => x.Info.MediaStreamType == mediaStreamType);
            MediaFrameFormat targetResFormat  = null;
            float            framerateDiffMin = 60f;
            foreach (var f in mediaFrameSourceVideo.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height))
            {
                if (f.VideoFormat.Width == targetVideoWidth && f.VideoFormat.Height == targetVideoHeight)
                {
                    if (targetResFormat == null)
                    {
                        targetResFormat  = f;
                        framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate);
                    }
                    else if (Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate) < framerateDiffMin)
                    {
                        targetResFormat  = f;
                        framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - targetVideoFrameRate);
                    }
                }
            }
            if (targetResFormat == null)
            {
                targetResFormat = mediaFrameSourceVideo.SupportedFormats[0];
                Debug.Log(TAG + ": Unable to choose the selected format, fall back");
            }
            // choose the smallest resolution
            //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault();
            // choose the specific resolution
            //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => (x.VideoFormat.Width == 1344 && x.VideoFormat.Height == 756)).FirstOrDefault();
            await mediaFrameSourceVideo.SetFormatAsync(targetResFormat);

            Debug.Log(TAG + ": mediaFrameSourceVideo.SetFormatAsync()");
            frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSourceVideo, targetResFormat.Subtype);

            Debug.Log(TAG + ": mediaCapture.CreateFrameReaderAsync()");
            frameReader.FrameArrived += OnFrameArrived;
            videoWidth  = Convert.ToInt32(targetResFormat.VideoFormat.Width);
            videoHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height);
            Debug.Log(TAG + ": FrameReader is successfully initialized, " + videoWidth + "x" + videoHeight +
                      ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator);
        }
        catch (Exception e) {
            Debug.Log(TAG + ": FrameReader is not initialized");
            Debug.Log(TAG + ": Exception: " + e);
            return(false);
        }

        captureStatus = CaptureStatus.Initialized;
        return(true);
    }