Beispiel #1
0
    // Used an explicit tuple here as I'm in C# 6.0
    async Task <Tuple <MediaCapture, MediaFrameSource> > GetMediaCaptureForDescriptionAsync(
        MediaFrameSourceKind sourceKind,
        int width,
        int height,
        int frameRate,
        string[] bitmapFormats = null)
    {
        MediaCapture     mediaCapture = null;
        MediaFrameSource frameSource  = null;

        var allSources = await MediaFrameSourceGroup.FindAllAsync();

        // Ignore frame rate here on the description as both depth streams seem to tell me they are
        // 30fps whereas I don't think they are (from the docs) so I leave that to query later on.
        // NB: LastOrDefault here is a NASTY, NASTY hack - just my way of getting hold of the
        // *LAST* depth stream rather than the *FIRST* because I'm assuming that the *LAST*
        // one is the longer distance stream rather than the short distance stream.
        // I should fix this and find a better way of choosing the right depth stream rather
        // than relying on some ordering that's not likely to always work!
        var sourceInfo =
            allSources.SelectMany(group => group.SourceInfos)
            .LastOrDefault(
                si =>
                (si.MediaStreamType == MediaStreamType.VideoRecord) &&
                (si.SourceKind == sourceKind) &&
                (si.VideoProfileMediaDescription.Any(
                     desc =>
                     desc.Width == width &&
                     desc.Height == height &&
                     desc.FrameRate == frameRate)));

        if (sourceInfo != null)
        {
            var sourceGroup = sourceInfo.SourceGroup;

            mediaCapture = new MediaCapture();

            await mediaCapture.InitializeAsync(
                new MediaCaptureInitializationSettings()
            {
                // I want software bitmaps
                MemoryPreference     = MediaCaptureMemoryPreference.Cpu,
                SourceGroup          = sourceGroup,
                StreamingCaptureMode = StreamingCaptureMode.Video
            }
                );

            frameSource = mediaCapture.FrameSources[sourceInfo.Id];

            var selectedFormat = frameSource.SupportedFormats.First(
                format =>
                format.VideoFormat.Width == width && format.VideoFormat.Height == height &&
                format.FrameRate.Numerator / format.FrameRate.Denominator == frameRate &&
                ((bitmapFormats == null) || (bitmapFormats.Contains(format.Subtype.ToLower()))));

            await frameSource.SetFormatAsync(selectedFormat);
        }
        return(Tuple.Create(mediaCapture, frameSource));
    }
Beispiel #2
0
        private async Task InitMediaSourceAsync()
        {
            var allGroups = await MediaFrameSourceGroup.FindAllAsync();

            if (allGroups.Count == 0)
            {
                Debug.LogError("cannot found MediaFrameSourceGroup. アプリケーションマニュフェストを確認してください。");
                return;
            }

            MediaFrameSourceGroup sourceGroup =
                allGroups.FirstOrDefault(g => g.SourceInfos.Any(s => s.SourceKind == MediaFrameSourceKind.Depth));

            if (sourceGroup == null)
            {
                Debug.LogError("深度カメラが見つからないようです。");
                return;
            }

            try
            {
                await InitializeMediaCaptureAsync(sourceGroup);
            }
            catch (Exception exception)
            {
                Debug.LogError("InitializeMediaCaptureAsyncに失敗しました" + exception.Message);
                await CleanupMediaCaptureAsync();

                return;
            }

            MediaFrameSource source = _mediaCapture.FrameSources.Values
                                      .FirstOrDefault(s => s.Info.SourceKind == MediaFrameSourceKind.Depth);

            if (source == null)
            {
                Debug.LogError("sourceが見つかりません。");
            }

            MediaFrameFormat format = source.SupportedFormats.FirstOrDefault(f =>
                                                                             String.Equals(f.Subtype, MediaEncodingSubtypes.D16, StringComparison.OrdinalIgnoreCase));

            if (format == null)
            {
                return;
            }

            await source.SetFormatAsync(format);

            _depthReader = await _mediaCapture.CreateFrameReaderAsync(source, format.Subtype);

            MediaFrameReaderStartStatus status = await _depthReader.StartAsync();

            if (status != MediaFrameReaderStartStatus.Success)
            {
                Debug.LogError("_depthReader.StartAsyncに失敗しました");
            }
        }
        /// <summary>
        /// Initializes MediaCapture's frame source with a compatible format, if possible.
        /// Throws Exception if no compatible stream(s) available
        /// </summary>
        /// <returns></returns>
        private async Task InitializeMediaFrameSourceAsync()
        {
            if (m_mediaCapture == null)
            {
                return;
            }

            // Get preview or record stream as source
            Func <KeyValuePair <string, MediaFrameSource>, MediaStreamType, bool> filterFrameSources = (source, type) =>
            {
                return(source.Value.Info.MediaStreamType == type && source.Value.Info.SourceKind == MediaFrameSourceKind.Color);
            };

            m_frameSource = m_mediaCapture.FrameSources.FirstOrDefault(source => filterFrameSources(source, MediaStreamType.VideoPreview)).Value
                            ?? m_mediaCapture.FrameSources.FirstOrDefault(source => filterFrameSources(source, MediaStreamType.VideoRecord)).Value;

            // if no preview stream is available, bail
            if (m_frameSource == null)
            {
                throw new Exception("No preview or record stream available");
            }

            // If we can, let's attempt to change the format set on the source to our preferences
            if (m_mediaCaptureInitializationSettings.SharingMode == MediaCaptureSharingMode.ExclusiveControl)
            {
                // Filter MediaType given resolution and framerate preference, and filter out non-compatible subtypes
                // Prefer a BGRA8 format and defer to other supported subtypes if none is supported on the source
                var selectedFormat = m_frameSource.SupportedFormats.Where(format =>
                                                                          format.FrameRate.Numerator / format.FrameRate.Denominator > 15 &&
                                                                          string.Compare(format.Subtype, MediaEncodingSubtypes.Bgra8, true) == 0
                                                                          )?.OrderBy(format => Math.Abs((int)(format.VideoFormat.Width * format.VideoFormat.Height) - (1920 * 1080))).FirstOrDefault();

                if (selectedFormat == null)
                {
                    selectedFormat = m_frameSource.SupportedFormats.Where(format =>
                                                                          format.FrameRate.Numerator / format.FrameRate.Denominator > 15 &&
                                                                          (string.Compare(format.Subtype, MediaEncodingSubtypes.Nv12, true) == 0 ||
                                                                           string.Compare(format.Subtype, MediaEncodingSubtypes.Yuy2, true) == 0 ||
                                                                           string.Compare(format.Subtype, MediaEncodingSubtypes.Rgb32, true) == 0)
                                                                          )?.OrderBy(format => Math.Abs((int)(format.VideoFormat.Width * format.VideoFormat.Height) - (1920 * 1080))).FirstOrDefault();
                }
                if (selectedFormat == null)
                {
                    throw new Exception("No compatible formats available");
                }

                await m_frameSource.SetFormatAsync(selectedFormat);
            }
            FrameWidth  = m_frameSource.CurrentFormat.VideoFormat.Width;
            FrameHeight = m_frameSource.CurrentFormat.VideoFormat.Height;
        }
Beispiel #4
0
        public async void InitializeManager()
        {
            _helper = new OpenCVHelper();

            // Find the sources
            var allGroups = await MediaFrameSourceGroup.FindAllAsync();

            var sourceGroups = allGroups.Select(g => new
            {
                Group      = g,
                SourceInfo = g.SourceInfos.FirstOrDefault(i => i.SourceKind == MediaFrameSourceKind.Color)
            }).Where(g => g.SourceInfo != null).ToList();

            if (sourceGroups.Count == 0)
            {
                // No camera sources found
                return;
            }
            var selectedSource = sourceGroups.FirstOrDefault();

            // Initialize MediaCapture
            try
            {
                await InitializeMediaCaptureAsync(selectedSource.Group);
            }
            catch (Exception exception)
            {
                Debug.WriteLine("MediaCapture initialization error: " + exception.Message);
                await CleanupMediaCaptureAsync();

                return;
            }

            // Create the frame reader
            MediaFrameSource frameSource = _mediaCapture.FrameSources[selectedSource.SourceInfo.Id];
            var format = frameSource.SupportedFormats.OrderByDescending(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault();
            await frameSource.SetFormatAsync(format);

            BitmapSize size = new BitmapSize() // Choose a lower resolution to make the image processing more performant
            {
                Height = format.VideoFormat.Height,
                Width  = format.VideoFormat.Width
            };

            _reader = await _mediaCapture.CreateFrameReaderAsync(frameSource, MediaEncodingSubtypes.Bgra8, size);

            _reader.FrameArrived += HandleFrameArrive;
            await _reader.StartAsync();
        }
        private async Task SetCameraFormat()
        {
            colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id];
            var preferredFormat = colorFrameSource.SupportedFormats.Where(format =>
            {
                return(format.VideoFormat.Width >= 1080 && format.Subtype == MediaEncodingSubtypes.Argb32);
            }).FirstOrDefault();

            if (preferredFormat == null)
            {
                return;
            }

            await colorFrameSource.SetFormatAsync(preferredFormat);
        }
        /// <summary>
        /// Sets the frame format of the current frame source.
        /// </summary>
        private async Task ChangeMediaFormatAsync(FrameFormatModel format)
        {
            if (_source == null)
            {
                _logger.Log("Unable to set format when source is not set.");
                return;
            }

            if (format != null && !format.HasSameFormat(_source.CurrentFormat))
            {
                await _source.SetFormatAsync(format.Format);

                _logger.Log($"Format set to {format.DisplayName}");
            }
        }
        private Task SetFrameType(MediaFrameSource frameSource, int width, int height, int framerate)
        {
            var preferredFormat = frameSource.SupportedFormats.Where(format =>
            {
                return(format.VideoFormat.Width == width &&
                       format.VideoFormat.Height == height &&
                       (int)Math.Round(((double)format.FrameRate.Numerator / format.FrameRate.Denominator)) == framerate);
            });

            if (preferredFormat.Count() == 0)
            {
                throw new ArgumentException(String.Format("No frame type exists for {0}x{1}@{2}", width, height, framerate));
            }

            return(frameSource.SetFormatAsync(preferredFormat.First()).AsTask());
        }
Beispiel #8
0
        private async Task ChangeMediaFormatAsync2(FrameFormatModel format)
        {
            if (_source2 == null)
            {
                _logger2.Log("Unable to set format when source is not set.");
                return;
            }

            // Set the source format if the selected one is different from the current one.
            if (format != null && !format.HasSameFormat(_source2.CurrentFormat))
            {
                await _source2.SetFormatAsync(format.Format);

                _logger2.Log($"Format set to {format.DisplayName}");
            }
        }
Beispiel #9
0
        /// <summary>
        /// Initializes MediaCapture in compatible format, if possible.
        /// Throws Exception if no compatible stream(s) available
        /// </summary>
        /// <returns></returns>
        private async Task InitializeMediaFrameSourceAsync()
        {
            if (m_mediaCapture == null)
            {
                return;
            }

            // Get preview or record stream as source
            Func <KeyValuePair <string, MediaFrameSource>, MediaStreamType, bool> filterFrameSources = (source, type) =>
            {
                return(source.Value.Info.MediaStreamType == type && source.Value.Info.SourceKind == MediaFrameSourceKind.Color);
            };

            m_frameSource = m_mediaCapture.FrameSources.FirstOrDefault(source => filterFrameSources(source, MediaStreamType.VideoPreview)).Value
                            ?? m_mediaCapture.FrameSources.FirstOrDefault(source => filterFrameSources(source, MediaStreamType.VideoRecord)).Value;

            // if no preview stream are available, bail
            if (m_frameSource == null)
            {
                throw new Exception("No preview or record streams available");
            }

            // Filter MediaType given resolution and framerate preference, and filter out non-compatible subtypes
            var formats = m_frameSource.SupportedFormats.Where(format =>
                                                               format.FrameRate.Numerator / format.FrameRate.Denominator > 15 &&
                                                               (string.Compare(format.Subtype, MediaEncodingSubtypes.Nv12, true) == 0 ||
                                                                string.Compare(format.Subtype, MediaEncodingSubtypes.Bgra8, true) == 0 ||
                                                                string.Compare(format.Subtype, MediaEncodingSubtypes.Yuy2, true) == 0 ||
                                                                string.Compare(format.Subtype, MediaEncodingSubtypes.Rgb32, true) == 0)
                                                               )?.OrderBy(format => Math.Abs((int)(format.VideoFormat.Width * format.VideoFormat.Height) - (1920 * 1080)));
            var selectedFormat = formats.FirstOrDefault();

            if (selectedFormat != null)
            {
                await m_frameSource.SetFormatAsync(selectedFormat);

                FrameWidth  = m_frameSource.CurrentFormat.VideoFormat.Width;
                FrameHeight = m_frameSource.CurrentFormat.VideoFormat.Height;
            }
            else
            {
                throw new Exception("No compatible formats available");
            }
        }
        private async Task <bool> InitialiserSourceImages(uint largeur, uint hauteur, string sousTypeVideo)
        {
            if (_mediaCapture.FrameSources.Count > 0)
            {
                MediaFrameSource sourceImages = _mediaCapture.FrameSources.First().Value;
                ReglerControleurVideo(sourceImages);

                MediaFrameFormat formatVideo = sourceImages.SupportedFormats.FirstOrDefault(f => f.VideoFormat.Width == largeur && f.VideoFormat.Height == hauteur && f.Subtype == sousTypeVideo);
                if (formatVideo != null)
                {
                    await sourceImages.SetFormatAsync(formatVideo);

                    _lecteurMedia = await _mediaCapture.CreateFrameReaderAsync(sourceImages);

                    _lecteurMedia.FrameArrived += _lecteurMedia_FrameArrived;
                    await _lecteurMedia.StartAsync();

                    return(true);
                }
                else
                {
                    Debug.WriteLine("Le format demandé " + largeur.ToString() + "x" + hauteur.ToString() + " en " + sousTypeVideo + " n'est pas supporté par cette caméra !");
                    Debug.WriteLine("Voici les formats supportés :");
                    foreach (MediaFrameFormat formatImages in sourceImages.SupportedFormats)
                    {
                        Debug.WriteLine(formatImages.MajorType + " " + formatImages.VideoFormat.Width.ToString() + "x" + formatImages.VideoFormat.Height.ToString() + " " + formatImages.Subtype);
                    }
                    return(false);
                }
            }
            else
            {
                Debug.WriteLine("Aucun périphérique de capture détecté !");
                return(false);
            }
        }
Beispiel #11
0
    public async void StartCapture()
    {
        System.Diagnostics.Debug.WriteLine("Starting capture.");

        var videoDevice = await GetBestVideoDevice();

        if (videoDevice == null)
        {
            System.Diagnostics.Debug.WriteLine("Failed to find video device.");
            return;
        }

        MediaCaptureVideoProfile profile;
        MediaCaptureVideoProfileMediaDescription description;

        if (!GetBestProfileAndDescription(videoDevice, out profile, out description))
        {
            System.Diagnostics.Debug.WriteLine("Failed to find profile and description.");
            return;
        }

        var settings = new MediaCaptureInitializationSettings
        {
            MemoryPreference       = MediaCaptureMemoryPreference.Cpu,
            VideoDeviceId          = videoDevice.Id,
            VideoProfile           = profile,
            RecordMediaDescription = description,
        };

        var mediaCapture = new MediaCapture();
        await mediaCapture.InitializeAsync(settings);

        MediaFrameSource source = null;
        MediaFrameFormat format = null;

        if (!GetBestSourceAndFormat(mediaCapture, out source, out format))
        {
            System.Diagnostics.Debug.WriteLine("Failed to find source and format.");
            return;
        }

        System.Diagnostics.Debug.WriteLine(string.Format("Selected Video Format: Width: {0}, Height: {1}, Major Type: {2}, Subtype: {3}, Frame Rate: {4}/{5}",
                                                         format.VideoFormat.Width,
                                                         format.VideoFormat.Height,
                                                         format.MajorType,
                                                         format.Subtype,
                                                         format.FrameRate == null ? "null" : format.FrameRate.Numerator.ToString(),
                                                         format.FrameRate == null ? "null" : format.FrameRate.Denominator.ToString()));

        await source.SetFormatAsync(format);

        mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(source, MediaEncodingSubtypes.Bgra8);

        if (await mediaFrameReader.StartAsync() != MediaFrameReaderStartStatus.Success)
        {
            System.Diagnostics.Debug.WriteLine("Failed to start media frame reader.");
            return;
        }

        mediaFrameReader.FrameArrived += MediaFrameReader_FrameArrived;
        System.Diagnostics.Debug.WriteLine("Capture started.");
    }
 //#############################################################################################
 /// <summary>
 /// Set the frame format for capture. Needed for allowing camera launch.
 /// </summary>
 private async Task SetFrameFormat()
 {
     _frameSource = _mediaCapture.FrameSources[_selectedSourceInfo.Id]; // get the source found in FindSource method
     var format = _frameSource.SupportedFormats.FirstOrDefault();
     await _frameSource.SetFormatAsync(format);
 }
    async Task Start()
    {
        // Socket listener
        audioSocketListener = new StreamSocketListener();
        audioSocketListener.ConnectionReceived += OnConnectionAudio;
        await audioSocketListener.BindServiceNameAsync(audioServiceName);

        videoSocketListener = new StreamSocketListener();
        videoSocketListener.ConnectionReceived += OnConnectionVideo;
        await videoSocketListener.BindServiceNameAsync(videoServiceName);

        // Find a media source group which gives us webcam and microphone input streams
        var sourceGroups = await MediaFrameSourceGroup.FindAllAsync();

        MediaFrameSourceGroup    selectedSourceGroup  = null;
        MediaCaptureVideoProfile selectedVideoProfile = null;
        MediaCaptureVideoProfileMediaDescription selectedDescription = null;

        foreach (MediaFrameSourceGroup sourceGroup in sourceGroups)
        {
            var videoProfiles = MediaCapture.FindKnownVideoProfiles(sourceGroup.Id, KnownVideoProfile.VideoConferencing);
            foreach (MediaCaptureVideoProfile videoProfile in videoProfiles)
            {
                foreach (var desc in videoProfile.SupportedRecordMediaDescription)
                {
                    if (desc.Width == videoWidth && desc.Height == videoHeight && desc.FrameRate == frameRate)
                    {
                        selectedSourceGroup  = sourceGroup;
                        selectedVideoProfile = videoProfile;
                        selectedDescription  = desc;
                    }
                }
            }
        }

        if (selectedSourceGroup == null)
        {
            Debug.Log("No source group was found.");
            return;
        }

        mediaCapture = new MediaCapture();
        var settings = new MediaCaptureInitializationSettings()
        {
            AudioProcessing        = AudioProcessing.Raw,
            MemoryPreference       = MediaCaptureMemoryPreference.Cpu,
            RecordMediaDescription = selectedDescription,
            SharingMode            = MediaCaptureSharingMode.ExclusiveControl,
            SourceGroup            = selectedSourceGroup,
            StreamingCaptureMode   = StreamingCaptureMode.AudioAndVideo,
            VideoProfile           = selectedVideoProfile,
        };

        try
        {
            await mediaCapture.InitializeAsync(settings);
        }
        catch (Exception ex)
        {
            Debug.Log("MediaCapture initialization failed: " + ex.Message);
            return;
        }

        var audioFrameSources = mediaCapture.FrameSources.Where(src => src.Value.Info.MediaStreamType == MediaStreamType.Audio);

        if (audioFrameSources.Count() == 0)
        {
            Debug.Log("No audio source was found.");
            return;
        }
        MediaFrameSource audioFrameSource = audioFrameSources.FirstOrDefault().Value;
        var videoFrameSources             = mediaCapture.FrameSources.Where(src => src.Value.Info.SourceKind == MediaFrameSourceKind.Color);

        if (videoFrameSources.Count() == 0)
        {
            Debug.Log("No video source was found.");
            return;
        }
        // MediaFrameSource videoFrameSource = videoFrameSources.FirstOrDefault().Value;
        MediaFrameSource videoFrameSource = null;
        MediaFrameFormat selectedFormat   = null;

        foreach (var kv in videoFrameSources)
        {
            MediaFrameSource source = kv.Value;
            foreach (MediaFrameFormat format in source.SupportedFormats)
            {
                if (format.VideoFormat.Width == videoWidth && format.VideoFormat.Height == videoHeight &&
                    format.FrameRate.Numerator == frameRate && format.FrameRate.Denominator == 1)
                {
                    videoFrameSource = source;
                    selectedFormat   = format;
                    break;
                }
            }
            if (videoFrameSource != null)
            {
                break;
            }
        }
        if (selectedFormat != null)
        {
            await videoFrameSource.SetFormatAsync(selectedFormat);
        }
        else
        {
            Debug.Log("Cannot find a proper MediaFrameFormat.");
            return;
        }

        // Start streaming
        audioFrameReader = await mediaCapture.CreateFrameReaderAsync(audioFrameSource);

        audioFrameReader.FrameArrived += AudioFrameArrived;
        videoFrameReader = await mediaCapture.CreateFrameReaderAsync(videoFrameSource);

        videoFrameReader.FrameArrived += VideoFrameArrived;

        var audioStartStatus = audioFrameReader.StartAsync();
        var videoStartStatus = videoFrameReader.StartAsync();

        if (await audioStartStatus != MediaFrameReaderStartStatus.Success)
        {
            Debug.Log("The audioFrameReader couldn't start.");
        }
        if (await videoStartStatus != MediaFrameReaderStartStatus.Success)
        {
            Debug.Log("The videoFrameReader couldn't start.");
        }
    }
    private async Task <bool> InitializeMediaCaptureAsync()
    {
        if (captureStatus != CaptureStatus.Clean)
        {
            Debug.Log(": InitializeMediaCaptureAsync() fails because of incorrect status");
            return(false);
        }

        if (mediaCapture != null)
        {
            return(false);
        }

        var allGroups = await MediaFrameSourceGroup.FindAllAsync();

        foreach (var group in allGroups)
        {
            Debug.Log(group.DisplayName + ", " + group.Id);
        }

        if (allGroups.Count <= 0)
        {
            Debug.Log(": InitializeMediaCaptureAsync() fails because there is no MediaFrameSourceGroup");
            return(false);
        }

        // Initialize mediacapture with the source group.
        mediaCapture = new MediaCapture();
        var settings = new MediaCaptureInitializationSettings {
            SourceGroup = allGroups[group_id],
            // This media capture can share streaming with other apps.
            SharingMode = MediaCaptureSharingMode.SharedReadOnly,
            // Only stream video and don't initialize audio capture devices.
            StreamingCaptureMode = StreamingCaptureMode.Video,
            // Set to CPU to ensure frames always contain CPU SoftwareBitmap images
            // instead of preferring GPU D3DSurface images.
            MemoryPreference = MediaCaptureMemoryPreference.Cpu
        };

        await mediaCapture.InitializeAsync(settings);

        Debug.Log(": MediaCapture is successfully initialized in shared mode.");

        MediaFrameSource targetFrameSource = mediaCapture.FrameSources.Values.ElementAt(id);
        MediaFrameFormat targetResFormat   = targetFrameSource.SupportedFormats[0];

        try {
            await targetFrameSource.SetFormatAsync(targetResFormat);

            frameReader = await mediaCapture.CreateFrameReaderAsync(targetFrameSource, targetResFormat.Subtype);

            frameReader.FrameArrived += OnFrameArrived;
            videoWidth  = Convert.ToInt32(targetResFormat.VideoFormat.Width);
            videoHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height);
            Debug.Log(": FrameReader is successfully initialized, " + videoWidth + "x" + videoHeight +
                      ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator +
                      ", Major type: " + targetResFormat.MajorType + ", Subtype: " + targetResFormat.Subtype);
        }
        catch (Exception e) {
            Debug.Log(": FrameReader is not initialized");
            Debug.Log(": Exception: " + e);
            return(false);
        }

        captureStatus = CaptureStatus.Initialized;
        return(true);
    }
        /// <summary>
        /// Start the video stream. This just prepares the stream for capture, and doesn't start collecting frames
        /// </summary>
        /// <param name="streamDesc">The description of the stream to start.</param>
        public async void Start(StreamDescription streamDesc)
        {
#if CAN_USE_UWP_TYPES
            lock (stateLock)
            {
                if (State != CameraState.Initialized)
                {
                    throw new InvalidOperationException("Start cannot be called until the camera is in the Initialized state");
                }

                State = CameraState.Starting;
            }

            Resolution = streamDesc.Resolution;
            CameraType = streamDesc.CameraType;

            StreamDescriptionInternal desc = streamDesc as StreamDescriptionInternal;

            MediaCaptureInitializationSettings initSettings = new MediaCaptureInitializationSettings()
            {
                SourceGroup          = desc.FrameSourceGroup,
                SharingMode          = MediaCaptureSharingMode.ExclusiveControl,
                MemoryPreference     = MediaCaptureMemoryPreference.Cpu,
                StreamingCaptureMode = StreamingCaptureMode.Video
            };

            // initialize the media device
            mediaCapture = new MediaCapture();

            try
            {
                await mediaCapture.InitializeAsync(initSettings);
            }
            catch (Exception ex)
            {
                System.Diagnostics.Debug.WriteLine($"MediaCapture initialization failed: {ex.Message}");
                mediaCapture.Dispose();
                mediaCapture = null;
            }

            if (mediaCapture != null)
            {
                // get access to the video device controller for property settings
                videoDeviceController = mediaCapture.VideoDeviceController;

                // choose media source
                MediaFrameSource frameSource     = mediaCapture.FrameSources[desc.FrameSourceInfo.Id];
                MediaFrameFormat preferredFormat = null;

                foreach (MediaFrameFormat format in frameSource.SupportedFormats)
                {
                    if (format.VideoFormat.Width == desc.Resolution.Width && format.VideoFormat.Height == desc.Resolution.Height && Math.Abs((double)format.FrameRate.Numerator / (double)format.FrameRate.Denominator - desc.Resolution.Framerate) < epsilon)
                    {
                        preferredFormat = format;
                        break;
                    }
                }

                if (preferredFormat != null && preferredFormat != frameSource.CurrentFormat)
                {
                    await frameSource.SetFormatAsync(preferredFormat);
                }
                else
                {
                    System.Diagnostics.Debug.WriteLine($"failed to set desired frame format");
                }

                // set up frame readercapture frame data
                frameReader = await mediaCapture.CreateFrameReaderAsync(frameSource);

                frameReader.FrameArrived += OnMediaFrameArrived;
                await frameReader.StartAsync();

                lock (stateLock)
                {
                    State = CameraState.Ready;
                    OnCameraStarted?.Invoke(this, true);
                }
            }
            else
            {
                lock (stateLock)
                {
                    // drop back to initialized when the camera doesn't initialize
                    State = CameraState.Initialized;
                    OnCameraStarted?.Invoke(this, false);
                }
            }
#else
            await Task.CompletedTask;
#endif
        }
Beispiel #16
0
    private async void init()
    {
        var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync();

        LogError("checkpoint 1.1");
        var targetGroups = frameSourceGroups.Select(g => new
        {
            Group       = g,
            SourceInfos = new MediaFrameSourceInfo[]
            {
                g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Color),
                g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Depth),
            }
        }).Where(g => g.SourceInfos.Any(info => info != null)).ToList();

        LogError("checkpoint 1.2");

        if (targetGroups.Count == 0)
        {
            LogError("No source groups found.");
            return;
        }

        MediaFrameSourceGroup mediaSourceGroup = targetGroups[0].Group;

        LogError("checkpoint 1.3");

        mediaCapture = new MediaCapture();

        LogError("checkpoint 1.4");
        var settings = new MediaCaptureInitializationSettings()
        {
            SourceGroup          = mediaSourceGroup,
            SharingMode          = MediaCaptureSharingMode.ExclusiveControl,
            MemoryPreference     = MediaCaptureMemoryPreference.Cpu,
            StreamingCaptureMode = StreamingCaptureMode.Video
        };

        LogError("checkpoint 1.5");

        await mediaCapture.InitializeAsync(settings);

        LogError("checkpoint 1.6");

        MediaFrameSource colorSource =
            mediaCapture.FrameSources.Values.FirstOrDefault(
                s => s.Info.SourceKind == MediaFrameSourceKind.Color);

        MediaFrameSource depthSource =
            mediaCapture.FrameSources.Values.FirstOrDefault(
                s => s.Info.SourceKind == MediaFrameSourceKind.Depth);

        LogError("checkpoint 1.7");

        if (colorSource == null || depthSource == null)
        {
            LogError("Cannot find color or depth stream.");
            return;
        }

        MediaFrameFormat colorFormat = colorSource.SupportedFormats.Where(format =>
        {
            return(format.VideoFormat.Width >= 640 &&
                   format.Subtype == MediaEncodingSubtypes.Rgb24);
        }).FirstOrDefault();

        MediaFrameFormat depthFormat = depthSource.SupportedFormats.Where(format =>
        {
            return(format.VideoFormat.Width >= 640 &&
                   format.Subtype == MediaEncodingSubtypes.D16);
        }).FirstOrDefault();

        await colorSource.SetFormatAsync(colorFormat);

        await depthSource.SetFormatAsync(depthFormat);

        _colorSourceId = colorSource.Info.Id;
        _depthSourceId = depthSource.Info.Id;

        _frameReader = await mediaCapture.CreateMultiSourceFrameReaderAsync(
            new[] { colorSource, depthSource });

        _frameReader.FrameArrived += FrameReader_FrameArrived;

        MultiSourceMediaFrameReaderStartStatus startStatus = await _frameReader.StartAsync();

        if (startStatus != MultiSourceMediaFrameReaderStartStatus.Success)
        {
            throw new InvalidOperationException("Unable to start reader: " + startStatus);
        }

        this.CorrelationFailed += MainPage_CorrelationFailed;
        Task.Run(() => NotifyAboutCorrelationFailure(_tokenSource.Token));
    }
    /// <summary>
    ///
    /// </summary>
    /// <returns></returns>
    private async Task <bool> InitializeMediaCaptureAsync()
    {
        if (captureStatus != CaptureStatus.Clean)
        {
            Debug.Log(TAG + " " + id + ": InitializeMediaCaptureAsync() fails because of incorrect status");
            return(false);
        }

        if (mediaCapture != null)
        {
            return(false);
        }

        var allGroups = await MediaFrameSourceGroup.FindAllAsync();

        foreach (var group in allGroups)
        {
            Debug.Log(group.DisplayName + ", " + group.Id);
        }

        if (allGroups.Count <= 0)
        {
            Debug.Log(TAG + " " + id + ": InitializeMediaCaptureAsync() fails because there is no MediaFrameSourceGroup");
            return(false);
        }

        // Initialize mediacapture with the source group.
        mediaCapture = new MediaCapture();
        var settings = new MediaCaptureInitializationSettings {
            SourceGroup = allGroups[0],
            // This media capture can share streaming with other apps.
            SharingMode = MediaCaptureSharingMode.SharedReadOnly,
            // Only stream video and don't initialize audio capture devices.
            StreamingCaptureMode = StreamingCaptureMode.Video,
            // Set to CPU to ensure frames always contain CPU SoftwareBitmap images
            // instead of preferring GPU D3DSurface images.
            MemoryPreference = MediaCaptureMemoryPreference.Cpu
        };

        await mediaCapture.InitializeAsync(settings);

        Debug.Log(TAG + " " + id + ": MediaCapture is successfully initialized in shared mode.");

        // logging all frame source information
        string logString = "";

        foreach (var frameSource in mediaCapture.FrameSources)
        {
            var info = frameSource.Value.Info;
            logString += info.Id + ", " + info.MediaStreamType + ", " + info.SourceKind + "\n";
            logString += "Total number of SupportedFormats is " + frameSource.Value.SupportedFormats.Count + "\n";
            foreach (var format in frameSource.Value.SupportedFormats)
            {
                logString += format.VideoFormat.Width + " x " + format.VideoFormat.Height + ", Major type: " + format.MajorType + ", Subtype: " + format.Subtype +
                             ", Framerate: " + format.FrameRate.Numerator + "/" + format.FrameRate.Denominator + "\n";
            }
        }
        Debug.Log(logString);
        MediaFrameSource targetFrameSource = mediaCapture.FrameSources.Values.ElementAt(id);
        MediaFrameFormat targetResFormat   = targetFrameSource.SupportedFormats[0];

        try {
            // choose the smallest resolution
            //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault();
            // choose the specific resolution
            //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => (x.VideoFormat.Width == 1344 && x.VideoFormat.Height == 756)).FirstOrDefault();
            await targetFrameSource.SetFormatAsync(targetResFormat);

            frameReader = await mediaCapture.CreateFrameReaderAsync(targetFrameSource, targetResFormat.Subtype);

            frameReader.FrameArrived += OnFrameArrived;
            videoWidth  = Convert.ToInt32(targetResFormat.VideoFormat.Width);
            videoHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height);
            Debug.Log(TAG + " " + id + ": FrameReader is successfully initialized, " + videoWidth + "x" + videoHeight +
                      ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator +
                      ", Major type: " + targetResFormat.MajorType + ", Subtype: " + targetResFormat.Subtype);
        }
        catch (Exception e) {
            Debug.Log(TAG + " " + id + ": FrameReader is not initialized");
            Debug.Log(TAG + " " + id + ": Exception: " + e);
            return(false);
        }

        captureStatus = CaptureStatus.Initialized;
        return(true);
    }
Beispiel #18
0
        /// <summary>
        /// Initializes MediaCapture's frame source with a compatible format, if possible.
        /// Throws Exception if no compatible stream(s) available
        /// </summary>
        /// <returns></returns>
        private async Task InitializeMediaFrameSourceAsync()
        {
            if (m_mediaCapture == null)
            {
                return;
            }

            // Get preview or record stream as source
            Func <KeyValuePair <string, MediaFrameSource>, MediaStreamType, bool> filterFrameSources = (source, type) =>
            {
                return(source.Value.Info.MediaStreamType == type && source.Value.Info.SourceKind == MediaFrameSourceKind.Color);
            };

            m_frameSource = m_mediaCapture.FrameSources.FirstOrDefault(source => filterFrameSources(source, MediaStreamType.VideoPreview)).Value
                            ?? m_mediaCapture.FrameSources.FirstOrDefault(source => filterFrameSources(source, MediaStreamType.VideoRecord)).Value;

            // if no preview stream is available, bail
            if (m_frameSource == null)
            {
                throw new Exception("No preview or record stream available");
            }

            // Get preferred camera frame format described in a ISkillFeatureImageDescriptor if specified
            int preferredFrameWidth = 1920;

            if (m_desiredImageDescriptor != null && m_desiredImageDescriptor.Width != -1)
            {
                preferredFrameWidth = Math.Abs(m_desiredImageDescriptor.Width);
            }
            int preferredFrameHeight = 1080;

            if (m_desiredImageDescriptor != null && m_desiredImageDescriptor.Height != -1)
            {
                preferredFrameHeight = Math.Abs(m_desiredImageDescriptor.Height);
            }
            string preferredMediaEncodingSubtype = MediaEncodingSubtypes.Bgra8;

            if (m_desiredImageDescriptor != null)
            {
                preferredMediaEncodingSubtype = BitmapPixelFormatToMediaEncodingSubtype(m_desiredImageDescriptor.SupportedBitmapPixelFormat);
            }

            // If we can, let's attempt to change the format set on the source to our preferences
            if (m_mediaCaptureInitializationSettings.SharingMode == MediaCaptureSharingMode.ExclusiveControl)
            {
                // Filter camera MediaType given frame format preference, and filter out non-compatible subtypes
                var selectedFormat = m_frameSource.SupportedFormats.Where(format =>
                                                                          format.FrameRate.Numerator / format.FrameRate.Denominator > 15 &&
                                                                          string.Compare(format.Subtype, preferredMediaEncodingSubtype, true) == 0
                                                                          )?.OrderBy(format => Math.Abs((int)(format.VideoFormat.Width * format.VideoFormat.Height) - (preferredFrameWidth * preferredFrameHeight))).FirstOrDefault();

                // Defer to other supported subtypes if the one prescribed is not supported on the source
                if (selectedFormat == null)
                {
                    selectedFormat = m_frameSource.SupportedFormats.Where(format =>
                                                                          format.FrameRate.Numerator / format.FrameRate.Denominator > 15 &&
                                                                          (string.Compare(format.Subtype, MediaEncodingSubtypes.Bgra8, true) == 0 ||
                                                                           string.Compare(format.Subtype, MediaEncodingSubtypes.Nv12, true) == 0 ||
                                                                           string.Compare(format.Subtype, MediaEncodingSubtypes.Yuy2, true) == 0 ||
                                                                           string.Compare(format.Subtype, MediaEncodingSubtypes.Rgb32, true) == 0)
                                                                          )?.OrderBy(format => Math.Abs((int)(format.VideoFormat.Width * format.VideoFormat.Height) - (preferredFrameWidth * preferredFrameHeight))).FirstOrDefault();
                }
                if (selectedFormat == null)
                {
                    throw new Exception("No compatible formats available");
                }

                await m_frameSource.SetFormatAsync(selectedFormat);
            }
            FrameWidth  = m_frameSource.CurrentFormat.VideoFormat.Width;
            FrameHeight = m_frameSource.CurrentFormat.VideoFormat.Height;
        }