Beispiel #1
0
 public IAsyncOperation <ColorFrameReader> OpenColorFrameReaderAsync(ReaderConfig config = ReaderConfig.Default)
 {
     return(Task.Run(async() =>
     {
         if (_colorReader == null)
         {
             var colorSourceInfo = _sourceGroup.SourceInfos.FirstOrDefault(si => si.SourceKind == MediaFrameSourceKind.Color);
             if (colorSourceInfo == null)
             {
                 return null;
             }
             MediaFrameSource colorSource;
             if (_mediaCapture.FrameSources.TryGetValue(colorSourceInfo.Id, out colorSource))
             {
                 var colorMediaReader = await _mediaCapture.CreateFrameReaderAsync(colorSource);
                 _colorReader = new ColorFrameReader(this, colorMediaReader, config);
             }
             else
             {
                 return null;
             }
         }
         await _colorReader.OpenAsync();
         return _colorReader;
     }).AsAsyncOperation());
 }
Beispiel #2
0
 public IAsyncOperation <ColorFrameReader> OpenColorFrameReaderAsync(ReaderConfig config = ReaderConfig.Default)
 {
     return(Task.Run(async() =>
     {
         if (ColorReader == null)
         {
             if (Type == SensorType.NetworkClient)
             {
                 ColorReader = new ColorFrameReader(this, _networkClient, config);
             }
             else
             {
                 var colorSourceInfo = _sourceGroup.SourceInfos.FirstOrDefault(si => si.SourceKind == MediaFrameSourceKind.Color);
                 if (colorSourceInfo != null)
                 {
                     MediaFrameSource colorSource;
                     if (_mediaCapture.FrameSources.TryGetValue(colorSourceInfo.Id, out colorSource))
                     {
                         var colorMediaReader = await _mediaCapture.CreateFrameReaderAsync(colorSource);
                         ColorReader = new ColorFrameReader(this, colorMediaReader, config);
                     }
                 }
             }
         }
         await ColorReader?.OpenAsync();
         return ColorReader;
     }).AsAsyncOperation());
 }
Beispiel #3
0
        public async Task Initialize(VideoSetting videoSetting)
        {
            await CoreApplication.MainView.CoreWindow.Dispatcher.RunAndAwaitAsync(CoreDispatcherPriority.Normal, async() =>
            {
                _threadsCount   = videoSetting.UsedThreads;
                _stoppedThreads = videoSetting.UsedThreads;

                _lastFrameAdded.Start();

                _imageQuality         = new BitmapPropertySet();
                var imageQualityValue = new BitmapTypedValue(videoSetting.VideoQuality, Windows.Foundation.PropertyType.Single);
                _imageQuality.Add("ImageQuality", imageQualityValue);

                _mediaCapture = new MediaCapture();

                var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync();

                var settings = new MediaCaptureInitializationSettings()
                {
                    SharingMode = MediaCaptureSharingMode.ExclusiveControl,

                    //With CPU the results contain always SoftwareBitmaps, otherwise with GPU
                    //they preferring D3DSurface
                    MemoryPreference = MediaCaptureMemoryPreference.Cpu,

                    //Capture only video, no audio
                    StreamingCaptureMode = StreamingCaptureMode.Video
                };

                await _mediaCapture.InitializeAsync(settings);

                var mediaFrameSource      = _mediaCapture.FrameSources.First().Value;
                var videoDeviceController = mediaFrameSource.Controller.VideoDeviceController;

                videoDeviceController.DesiredOptimization = Windows.Media.Devices.MediaCaptureOptimization.Quality;
                videoDeviceController.PrimaryUse          = Windows.Media.Devices.CaptureUse.Video;

                //Set exposure (auto light adjustment)
                if (_mediaCapture.VideoDeviceController.Exposure.Capabilities.Supported &&
                    _mediaCapture.VideoDeviceController.Exposure.Capabilities.AutoModeSupported)
                {
                    _mediaCapture.VideoDeviceController.Exposure.TrySetAuto(true);
                }

                var videoResolutionWidthHeight = VideoResolutionWidthHeight.Get(videoSetting.VideoResolution);
                var videoSubType = VideoSubtypeHelper.Get(videoSetting.VideoSubtype);

                //Set resolution, frame rate and video subtyp
                var videoFormat = mediaFrameSource.SupportedFormats.Where(sf => sf.VideoFormat.Width == videoResolutionWidthHeight.Width &&
                                                                          sf.VideoFormat.Height == videoResolutionWidthHeight.Height &&
                                                                          sf.Subtype == videoSubType)
                                  .OrderByDescending(m => m.FrameRate.Numerator / m.FrameRate.Denominator)
                                  .First();

                await mediaFrameSource.SetFormatAsync(videoFormat);

                _mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource);
                await _mediaFrameReader.StartAsync();
            });
        }
Beispiel #4
0
    // MediaFrameReader
    public static async Task <MediaFrameReader> CreateMediaFrameReader(SensorConfig cfg, IReadOnlyList <MediaFrameSourceGroup> sourceGroups)
    {
        // Step 1. Find correct sourceInfo
        var sourceInfos = sourceGroups
                          .SelectMany(group => group.SourceInfos)
                          .Where(cfg.Selector);

        Debug.Assert(sourceInfos.Count() == 1);
        var sourceInfo = sourceInfos.First();
        // Step 2. Create MediaCapture
        var mediaCapture = new MediaCapture();
        await mediaCapture.InitializeAsync(new MediaCaptureInitializationSettings
        {
            SourceGroup          = sourceInfo.SourceGroup,
            SharingMode          = MediaCaptureSharingMode.SharedReadOnly,
            StreamingCaptureMode = StreamingCaptureMode.Video,
            MemoryPreference     = MediaCaptureMemoryPreference.Cpu
        });

        // Step 3. Create MediaFrameReader
        var mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(
            mediaCapture.FrameSources[sourceInfo.Id], cfg.MediaEncodingSubtype);

        // Step 4. Return MediaFrameReader, add callbacks then call StartAsync
        Debug.Log($"{cfg.Name} acquired");
        return(mediaFrameReader);
    }
Beispiel #5
0
        public override void Load(string FriendlyName, int FrameRate, int Height, int Width, string Encoding)
        {
            if (NativeDevice != null)
            {
                NativeDevice.Dispose();
            }
            NativeDevice = new MediaCapture();
            var task = NativeDevice.InitializeAsync(new MediaCaptureInitializationSettings()
            {
                VideoDeviceId = FriendlyName, MemoryPreference = MediaCaptureMemoryPreference.Auto, StreamingCaptureMode = StreamingCaptureMode.Video
            });

            while (task.Status == AsyncStatus.Started)
            {
                Thread.Sleep(50);
            }
            if (task.Status == AsyncStatus.Error)
            {
                throw new System.Exception("Access Denied");
            }
            if (Reader != null)
            {
                Reader.FrameArrived -= NewFrameArrived;
                Reader.Dispose();
            }
            IReadOnlyDictionary <string, MediaFrameSource> sources = NativeDevice.FrameSources;
            MediaFrameSource selectedSource = null;

            foreach (MediaFrameSource source in sources.Values)
            {
                if (source.CurrentFormat.MajorType == "Video")
                {
                    if (source.Info.MediaStreamType == MediaStreamType.VideoPreview || source.Info.MediaStreamType == MediaStreamType.VideoRecord)
                    {
                        foreach (MediaFrameFormat format in source.SupportedFormats)
                        {
                            if (format.VideoFormat.Height == 480 && format.VideoFormat.Width == 640 && format.VideoFormat.MediaFrameFormat.Subtype == "MJPG")
                            {
                                if (selectedSource == null)
                                {
                                    selectedSource = source;
                                    var SetTastk = selectedSource.SetFormatAsync(format);
                                    while (SetTastk.Status == AsyncStatus.Started)
                                    {
                                        Thread.Sleep(50);
                                    }
                                }
                            }
                        }
                    }
                }
            }
            var Task = NativeDevice.CreateFrameReaderAsync(selectedSource);

            while (Task.Status == AsyncStatus.Started)
            {
                Thread.Sleep(50);
            }
            Reader = Task.GetResults();
        }
        async Task initializeFrameReaders()
        {
            string requestedSubtype = null;

            foreach (var source in mediaCapture.FrameSources.Values)
            {
                var kind = source.Info.SourceKind;
                foreach (var format in source.SupportedFormats)
                {
                    requestedSubtype = MediaFrameUtil.GetSubtypeForFrameReader(kind, format);
                    if (requestedSubtype != null)
                    {
                        await source.SetFormatAsync(format);

                        break;
                    }
                }
                if (requestedSubtype == null)
                {
                    continue;
                }

                var frameReader = await mediaCapture.CreateFrameReaderAsync(source, requestedSubtype);

                frameReader.FrameArrived += FrameReader_FrameArrived;
                frameReaders.Add(frameReader);
            }
        }
Beispiel #7
0
        public async Task SetupReaderAsync()
        {
            FrameReader_ = await MediaCapture_.CreateFrameReaderAsync(MediaCapture_.FrameSources[SourceInfo_.Id]);

            FrameReader_.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime;
            FrameReader_.FrameArrived   += FrameArrivedCallback;
        }
Beispiel #8
0
        /// <summary>
        /// Creates the frame reader using the target format and registers the <see cref="OnFrameArrived"/> event. The width is padded to be divisibly by 64.
        /// </summary>
        /// <returns></returns>
        private async Task <bool> CreateFrameReader()
        {
            const MediaStreamType mediaStreamType = MediaStreamType.VideoRecord;
            CameraParameters      parameters      = new CameraParameters(_cameraProfile);

            try
            {
                MediaFrameSource source = _mediaCapture.FrameSources.Values.Single(frameSource => frameSource.Info.MediaStreamType == mediaStreamType);
                MediaFrameFormat format = GetTargetFormat(source, parameters);
                await source.SetFormatAsync(format);

                _frameReader = await _mediaCapture.CreateFrameReaderAsync(source, format.Subtype);

                _frameReader.FrameArrived += OnFrameArrived;

                FrameWidth  = Convert.ToInt32(format.VideoFormat.Width);
                FrameHeight = Convert.ToInt32(format.VideoFormat.Height);
                FrameWidth  = PadTo64(FrameWidth);

                _logger.Log($"FrameReader initialized using {FrameWidth} x {FrameHeight}, frame rate: {format.FrameRate.Numerator} / {format.FrameRate.Denominator}, color format: {_format}");
            }
            catch (Exception exception)
            {
                _logger.LogError("Frame Reader could not be initialized");
                _logger.LogException(exception);
                return(false);
            }

            return(true);
        }
Beispiel #9
0
    //********************************************************
    //   LEFTSIDE, LEFTFRONT, RIGHTSIDE, RIGHTFRONT CAPTURE
    //********************************************************
    public async Task <string> StartFourCamerasCapture()
    {
        leftSideBytes   = new byte[480 * 160 * 4];
        leftSideCapture = new MediaCapture();
        try
        {
            await leftSideCapture.InitializeAsync(
                new MediaCaptureInitializationSettings()
            {
                SourceGroup          = depthGroup,
                SharingMode          = MediaCaptureSharingMode.SharedReadOnly,
                MemoryPreference     = MediaCaptureMemoryPreference.Cpu,
                StreamingCaptureMode = StreamingCaptureMode.Video
            }
                );
        }
        catch (Exception ex)
        {
            return(null);
        }

        var colorFrameSource = depthNearCapture.FrameSources[leftSideInfo.Id];
        var preferredFormat  = colorFrameSource.SupportedFormats.FirstOrDefault();
        await colorFrameSource.SetFormatAsync(preferredFormat);

        leftSideFrameReader = await depthNearCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32);

        leftSideFrameReader.FrameArrived += leftSideFrameReader_FrameArrived;
        await leftSideFrameReader.StartAsync();

        return(null);
    }
Beispiel #10
0
    //********************************************************
    //                  DEPTH NEAR CAPTURE
    //********************************************************
    public async Task <string> StartDepthNearCapture()
    {
        depthNearBytes   = new byte[450 * 448 * 2];
        depthNearCapture = new MediaCapture();
        try
        {
            await depthNearCapture.InitializeAsync(
                new MediaCaptureInitializationSettings()
            {
                SourceGroup          = depthGroup,
                SharingMode          = MediaCaptureSharingMode.SharedReadOnly,
                MemoryPreference     = MediaCaptureMemoryPreference.Cpu,
                StreamingCaptureMode = StreamingCaptureMode.Video
            }
                );
        }
        catch (Exception ex)
        {
            return(null);
        }

        var depthFrameSource = depthNearCapture.FrameSources[depthNearInfo.Id];
        var preferredFormat  = depthFrameSource.SupportedFormats.FirstOrDefault();
        await depthFrameSource.SetFormatAsync(preferredFormat);

        depthNearFrameReader = await depthNearCapture.CreateFrameReaderAsync(depthFrameSource, MediaEncodingSubtypes.D16);

        depthNearFrameReader.FrameArrived += DepthNearFrameReader_FrameArrived;
        await depthNearFrameReader.StartAsync();

        return(null);
    }
        private async Task CreateFrameReader()
        {
            mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32);

            mediaFrameReader.FrameArrived += ColorFrameReader_FrameArrived;
            await mediaFrameReader.StartAsync();
        }
        //--------------------------------------------------------Set-, Get- Methods:---------------------------------------------------------\\
        #region --Set-, Get- Methods--


        #endregion
        //--------------------------------------------------------Misc Methods:---------------------------------------------------------------\\
        #region --Misc Methods (Public)--


        #endregion

        #region --Misc Methods (Private)--
        private async Task startCameraAsync()
        {
            if (mediaCapture != null)
            {
                return;
            }

            var settings = new MediaCaptureInitializationSettings()
            {
                MemoryPreference     = MediaCaptureMemoryPreference.Cpu,
                StreamingCaptureMode = StreamingCaptureMode.Video
            };

            mediaCapture = new MediaCapture();
            try
            {
                await mediaCapture.InitializeAsync(settings);

                DisplayInformation.AutoRotationPreferences = DisplayOrientations.Landscape;
            }
            catch (UnauthorizedAccessException e)
            {
                Logger.Error("[QRCodeReaderControl] Failed to start camera: Access denied!", e);
                return;
            }

            try
            {
                cameraPreview_ce.Source = mediaCapture;
                await mediaCapture.StartPreviewAsync();

                if (mediaCapture.FrameSources.Count <= 0)
                {
                    MediaFrameSource frameSource = mediaCapture.FrameSources.First().Value;
                    if (frameSource != null)
                    {
                        frameReader = await mediaCapture.CreateFrameReaderAsync(frameSource);

                        frameReader.FrameArrived -= FrameReader_FrameArrived;
                        frameReader.FrameArrived += FrameReader_FrameArrived;

                        QR_CODE_READER.ResultFound -= QR_CODE_READER_ResultFound;
                        QR_CODE_READER.ResultFound += QR_CODE_READER_ResultFound;
                    }
                    else
                    {
                        Logger.Error("[QRCodeReaderControl] Failed to start camera! No sources.");
                    }
                }
                else
                {
                    Logger.Error("[QRCodeReaderControl] Failed to start camera! No sources.");
                }
            }
            catch (Exception e)
            {
                Logger.Error("[QRCodeReaderControl] Failed to start camera!", e);
                return;
            }
        }
        private async Task InitializeCameraFrameReader()
        {
            var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync();

            MediaFrameSourceGroup selectedGroup   = null;
            MediaFrameSourceInfo  colorSourceInfo = null;

            foreach (var sourceGroup in frameSourceGroups)
            {
                foreach (var sourceInfo in sourceGroup.SourceInfos)
                {
                    if (sourceInfo.MediaStreamType == MediaStreamType.VideoPreview &&
                        sourceInfo.SourceKind == MediaFrameSourceKind.Color)
                    {
                        colorSourceInfo = sourceInfo;
                        break;
                    }
                }

                if (colorSourceInfo != null)
                {
                    selectedGroup = sourceGroup;
                    break;
                }
            }

            var colorFrameSource = CameraCapture.FrameSources[colorSourceInfo.Id];
            var preferredFormat  = colorFrameSource.SupportedFormats.Where(format => {
                return(format.Subtype == MediaEncodingSubtypes.Argb32);
            }).FirstOrDefault();

            CameraFrameReader = await CameraCapture.CreateFrameReaderAsync(colorFrameSource);

            await CameraFrameReader.StartAsync();
        }
Beispiel #14
0
        /// <summary>
        /// Initializes and starts Media Capture and frame reader.
        /// </summary>
        /// <param name=""></param>
        /// <returns></returns>
        private static async Task StartMediaCaptureAsync()
        {
            // Initialize media capture with default settings in video-only streaming mode and in shared mode so that multiple instances can access the camera concurrently
            m_mediaCapture = new MediaCapture();
            var mediaCaptureInistializationSettings = new MediaCaptureInitializationSettings()
            {
                StreamingCaptureMode = StreamingCaptureMode.Video,
                SharingMode          = MediaCaptureSharingMode.SharedReadOnly
            };

            await m_mediaCapture.InitializeAsync(mediaCaptureInistializationSettings);

            var selectedFrameSource = m_mediaCapture.FrameSources.FirstOrDefault(source => source.Value.Info.MediaStreamType == MediaStreamType.VideoPreview &&
                                                                                 source.Value.Info.SourceKind == MediaFrameSourceKind.Color).Value;

            if (selectedFrameSource == null)
            {
                selectedFrameSource = m_mediaCapture.FrameSources.FirstOrDefault(source => source.Value.Info.MediaStreamType == MediaStreamType.VideoRecord &&
                                                                                 source.Value.Info.SourceKind == MediaFrameSourceKind.Color).Value;
            }
            if (selectedFrameSource == null)
            {
                throw(new Exception("No valid video frame sources were found with source type color."));
            }

            Console.WriteLine($"{selectedFrameSource.Info.DeviceInformation?.Name} | MediaStreamType: {selectedFrameSource.Info.MediaStreamType} MediaFrameSourceKind: {selectedFrameSource.Info.SourceKind}");

            m_frameReader = await m_mediaCapture.CreateFrameReaderAsync(selectedFrameSource);

            m_frameReader.FrameArrived   += FrameArrivedHandler;
            m_frameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime;
            await m_frameReader.StartAsync();
        }
Beispiel #15
0
        public async Task <bool> InitAsync()
        {
            var sourceGroup = await GetMediaSourceGroup("MN34150"); // name of the HoloLens camera

            if (sourceGroup == null)
            {
                System.Diagnostics.Debug.WriteLine("HoloLens camera not found");
                return(false);
            }

            await InitializeMediaCapture(sourceGroup);

            var source = mediaCapture.FrameSources
                         .Where(fs => fs.Value.Info.MediaStreamType == MediaStreamType.VideoRecord)
                         .Select(x => x.Value)
                         .FirstOrDefault();

            if (source == null)
            {
                System.Diagnostics.Debug.WriteLine("No valid media source found");
                return(false);
            }

            mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(source);

            mediaFrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Buffered;
            mediaFrameReader.FrameArrived   += MediaFrameReader_FrameArrived;

            return(true);
        }
        private async Task StartFrameListenerAsync()
        {
            if (frameListenerRunning)
            {
                Logger.Info("Frame listener already running. Restarting it...");
                await StopFrameListenerAsync();
            }

            try
            {
                if (cameraCapture.FrameSources.Count > 0)
                {
                    MediaFrameSource frameSource = cameraCapture.FrameSources.First().Value;
                    int count = cameraCapture.FrameSources.Count;
                    if (!(frameSource is null))
                    {
                        frameReader = await cameraCapture.CreateFrameReaderAsync(frameSource);

                        frameReader.FrameArrived += FrameReader_FrameArrived;
                        await frameReader.StartAsync();

                        frameListenerRunning = true;
                    }
                    else
                    {
                        Logger.Info("MediaFrameSource is null.");
                        await OnErrorAsync(PreviewError.MEDIA_FRAME_IS_NULL);
                    }
                }
                else
                {
                    Logger.Info("MediaFrameReader creation failed with: No camera available.");
                    await OnErrorAsync(PreviewError.MEDIA_FRAME_NO_CAMERA);
                }
            }
Beispiel #17
0
        /// <summary>
        /// Asynchronously starts video mode.
        ///
        /// Activates the web camera with the various settings specified in CameraParameters.
        /// Only one VideoCapture instance can start the video mode at any given time.
        /// After starting the video mode, you listen for new video frame samples via the VideoCapture.FrameSampleAcquired event,
        /// or by calling VideoCapture.RequestNextFrameSample() when will return the next available sample.
        /// While in video mode, more power will be consumed so make sure that you call VideoCapture.StopVideoModeAsync qhen you can afford the start/stop video mode overhead.
        /// </summary>
        /// <param name="setupParams">Parameters that change how video mode is used.</param>
        /// <param name="onVideoModeStartedCallback">This callback will be invoked once video mode has been activated.</param>
        public async void StartVideoModeAsync(CameraParameters setupParams, OnVideoModeStartedCallback onVideoModeStartedCallback)
        {
            var mediaFrameSource = _mediaCapture.FrameSources[_frameSourceInfo.Id]; //Returns a MediaFrameSource

            if (mediaFrameSource == null)
            {
                onVideoModeStartedCallback?.Invoke(new VideoCaptureResult(1, ResultType.UnknownError, false));
                return;
            }

            var pixelFormat = ConvertCapturePixelFormatToMediaEncodingSubtype(setupParams.pixelFormat);

            _frameReader = await _mediaCapture.CreateFrameReaderAsync(mediaFrameSource, pixelFormat);

            _frameReader.FrameArrived += HandleFrameArrived;
            await _frameReader.StartAsync();

            VideoEncodingProperties properties = GetVideoEncodingPropertiesForCameraParams(setupParams);

            // Historical context: https://github.com/VulcanTechnologies/HoloLensCameraStream/issues/6
            if (setupParams.rotateImage180Degrees)
            {
                properties.Properties.Add(ROTATION_KEY, 180);
            }

            //	gr: taken from here https://forums.hololens.com/discussion/2009/mixedrealitycapture
            IVideoEffectDefinition ved = new VideoMRCSettings(setupParams.enableHolograms, setupParams.enableVideoStabilization, setupParams.videoStabilizationBufferSize, setupParams.hologramOpacity);
            await _mediaCapture.AddVideoEffectAsync(ved, MediaStreamType.VideoPreview);

            await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(STREAM_TYPE, properties);

            onVideoModeStartedCallback?.Invoke(new VideoCaptureResult(0, ResultType.Success, true));
        }
Beispiel #18
0
    private async Task InitializeCameraFrameReader()
    {
        var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync();

        MediaFrameSourceGroup selectedGroup   = null;
        MediaFrameSourceInfo  colorSourceInfo = null;

        foreach (var sourceGroup in frameSourceGroups)
        {
            foreach (var sourceInfo in sourceGroup.SourceInfos)
            {
                if (sourceInfo.MediaStreamType == MediaStreamType.VideoPreview &&
                    sourceInfo.SourceKind == MediaFrameSourceKind.Color)
                {
                    colorSourceInfo = sourceInfo;
                    break;
                }
            }
            if (colorSourceInfo != null)
            {
                selectedGroup = sourceGroup;
                break;
            }
        }

        var colorFrameSource = CameraCapture.FrameSources[colorSourceInfo.Id];

        CaptureWidth      = (int)colorFrameSource.CurrentFormat.VideoFormat.Width;
        CaptureHeight     = (int)colorFrameSource.CurrentFormat.VideoFormat.Height;
        CameraFrameReader = await CameraCapture.CreateFrameReaderAsync(colorFrameSource);

        await CameraFrameReader.StartAsync();
    }
Beispiel #19
0
    private async void InitSensor(int group, int sensor, int id)
    {
        var mediaFrameSourceGroupList = await MediaFrameSourceGroup.FindAllAsync();

        var mediaFrameSourceGroup = mediaFrameSourceGroupList[group];
        var mediaFrameSourceInfo  = mediaFrameSourceGroup.SourceInfos[sensor];
        var mediaCapture          = new MediaCapture();
        var settings = new MediaCaptureInitializationSettings()
        {
            SourceGroup          = mediaFrameSourceGroup,
            SharingMode          = MediaCaptureSharingMode.SharedReadOnly,
            StreamingCaptureMode = StreamingCaptureMode.Video,
            MemoryPreference     = MediaCaptureMemoryPreference.Cpu,
            //PhotoCaptureSource = PhotoCaptureSource.Photo,
        };

        try
        {
            await mediaCapture.InitializeAsync(settings);


            var mediaFrameSource = mediaCapture.FrameSources[mediaFrameSourceInfo.Id];
            var mediaframereader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, mediaFrameSource.CurrentFormat.Subtype);

            mediaframereader.FrameArrived += (sender, e) => FrameArrived(sender, e, id);
            await mediaframereader.StartAsync();
        }
        catch (Exception e)
        {
            UnityEngine.WSA.Application.InvokeOnAppThread(() => { Debug.Log(e); }, true);
        }
    }
Beispiel #20
0
        public static async Task <MediaFrameReader> GetFrameReaderAsync(MediaCapture mediaCapture, MediaFrameSourceKind kind)
        {
            var sources = mediaCapture.FrameSources.Values.Where(mfs => mfs.Info.SourceKind == kind);

            MediaFrameReader frameReader = null;

            foreach (var source in sources)
            {
                string requestedSubtype = null;

                foreach (var format in source.SupportedFormats)
                {
                    requestedSubtype = GetSubtypeForFrameReader(kind, format);

                    if (requestedSubtype != null)
                    {
                        await source.SetFormatAsync(format);

                        break;
                    }
                }

                if (requestedSubtype == null)
                {
                    continue;
                }

                frameReader = await mediaCapture.CreateFrameReaderAsync(source, requestedSubtype);
            }

            return(frameReader);
        }
Beispiel #21
0
        public async Task InitMedia()
        {
            var groups = await MediaFrameSourceGroup.FindAllAsync();

            capture = new MediaCapture();
            var settings = new MediaCaptureInitializationSettings()
            {
                SourceGroup          = groups[0],
                SharingMode          = MediaCaptureSharingMode.SharedReadOnly,
                StreamingCaptureMode = StreamingCaptureMode.Video,
                MemoryPreference     = MediaCaptureMemoryPreference.Cpu
            };
            await capture.InitializeAsync(settings);

            MediaFrameSource frameSource;

            try
            {
                frameSource = capture.FrameSources.Values.First(x => x.Info.SourceKind == MediaFrameSourceKind.Color);
            } catch
            {
                frameSource = capture.FrameSources.Values.First();
            }
            reader = await capture.CreateFrameReaderAsync(frameSource);

            reader.FrameArrived += HandleFrames;
            await reader.StartAsync();
        }
    private async void InitSensor()
    {
        var mediaFrameSourceGroupList = await MediaFrameSourceGroup.FindAllAsync();

        var mediaFrameSourceGroup = mediaFrameSourceGroupList[0];
        var mediaFrameSourceInfo  = mediaFrameSourceGroup.SourceInfos[0];
        var mediaCapture          = new MediaCapture();
        var settings = new MediaCaptureInitializationSettings()
        {
            SourceGroup          = mediaFrameSourceGroup,
            SharingMode          = MediaCaptureSharingMode.SharedReadOnly,
            StreamingCaptureMode = StreamingCaptureMode.Video,
            MemoryPreference     = MediaCaptureMemoryPreference.Cpu,
        };

        try
        {
            await mediaCapture.InitializeAsync(settings);

            var mediaFrameSource = mediaCapture.FrameSources[mediaFrameSourceInfo.Id];
            var cameraIntrisics  = mediaFrameSource.TryGetCameraIntrinsics(mediaFrameSource.CurrentFormat);
            // Globals.maxdepth = cameraIntrisics.FocalLength.X;
            // Globals.maxdepth = cameraIntrisics.FocalLength.Y;
            var mediaframereader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, mediaFrameSource.CurrentFormat.Subtype);

            mediaframereader.FrameArrived += FrameArrived;
            await mediaframereader.StartAsync();
        }
        catch (Exception e)
        {
            UnityEngine.WSA.Application.InvokeOnAppThread(() => { UnityEngine.Debug.Log(e); }, true);
        }
    }
        // Create and initialze the MediaCapture object.

        public async Task CaptureImageAsync()
        {
            autoStopCamera          = new System.Timers.Timer(500);
            autoStopCamera.Elapsed += AutoStopCamera;
            frameQueue              = new List <SoftwareBitmap>( );
            var cameraName       = "Surface Camera Front";
            var frameSourceGroup = await MediaFrameSourceGroup.FindAllAsync( );

            Debug.WriteLine($"frameSourceGroup = {frameSourceGroup}");
            var cameraGroup = frameSourceGroup.FirstOrDefault(fg => fg.DisplayName == cameraName);

            Debug.WriteLine($"cameraGroup = {cameraGroup}");
            var mediaCapture = new MediaCapture( );
            var settings     = new MediaCaptureInitializationSettings( )
            {
                SourceGroup          = cameraGroup,
                SharingMode          = MediaCaptureSharingMode.ExclusiveControl,
                MemoryPreference     = MediaCaptureMemoryPreference.Cpu,
                StreamingCaptureMode = StreamingCaptureMode.Video,
            };

            await mediaCapture.InitializeAsync(settings);

            var exposureSuccess       = mediaCapture.VideoDeviceController.Exposure.TrySetAuto(true);
            var brightnessSuccess     = mediaCapture.VideoDeviceController.Brightness.TrySetAuto(true);
            var currExposureSuccess   = mediaCapture.VideoDeviceController.Exposure.TryGetValue(out double expValue);
            var currBrightnessSuccess = mediaCapture.VideoDeviceController.Brightness.TryGetValue(out double brightValue);

            Debug.WriteLine($"exposureSuccess: {exposureSuccess}");
            Debug.WriteLine($"brightnessSuccess: {brightnessSuccess}");
            Debug.WriteLine($"expValue: {expValue}");
            Debug.WriteLine($"brightValue: {brightValue}");

            var sourceInfoId = cameraGroup?.SourceInfos?.FirstOrDefault( )?.Id;

            var mediaFrameSource = (mediaCapture?.FrameSources?.ContainsKey(sourceInfoId) ?? false) ? mediaCapture.FrameSources[sourceInfoId] : null;
            var preferredFormat  = mediaFrameSource.SupportedFormats.Where(format =>
            {
                return(format.VideoFormat.Width >= 1080);
                //&& format.Subtype == "NV12";
            }).FirstOrDefault( );

            if (preferredFormat == null)
            {
                // Our desired format is not supported
                return;
            }

            await mediaFrameSource.SetFormatAsync(preferredFormat);

            frameReader?.Dispose( );
            frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource);

            frameReader.FrameArrived += FrameReaderOnFrameArrived;

            await frameReader.StartAsync( );

            autoStopCamera.Start( );
        }
        private async void SettingSensorData(int deviceNum, int cameraNum)
        {
            if (mediaFrameReader != null)
            {
                await mediaFrameReader.StopAsync();

                mediaFrameReader.FrameArrived -= FrameArrived;
                mediaFrameReader.Dispose();
                mediaFrameReader = null;
            }

            var mediaFrameSourceGroupList = await MediaFrameSourceGroup.FindAllAsync();

            var mediaFrameSourceGroup = mediaFrameSourceGroupList[deviceNum];
            var mediaFrameSourceInfo  = mediaFrameSourceGroup.SourceInfos[cameraNum];
            MediaFrameSourceKind kind = mediaFrameSourceInfo.SourceKind;
            var mediaCapture          = new MediaCapture();
            var settings = new MediaCaptureInitializationSettings()
            {
                SourceGroup          = mediaFrameSourceGroup,
                SharingMode          = MediaCaptureSharingMode.SharedReadOnly,
                StreamingCaptureMode = StreamingCaptureMode.Video,
                MemoryPreference     = MediaCaptureMemoryPreference.Cpu,
            };

            try
            {
                await mediaCapture.InitializeAsync(settings);

                var mediaFrameSource = mediaCapture.FrameSources[mediaFrameSourceInfo.Id];
                if (kind == MediaFrameSourceKind.Color)
                {
                    mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, MediaEncodingSubtypes.Argb32);
                }
                else
                {
                    mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, mediaFrameSource.CurrentFormat.Subtype);
                }
                mediaFrameReader.FrameArrived += FrameArrived;
                await mediaFrameReader.StartAsync();
            }
            catch (Exception)
            {
                throw;
            }
        }
Beispiel #25
0
        private async Task InitMediaSourceAsync()
        {
            var allGroups = await MediaFrameSourceGroup.FindAllAsync();

            if (allGroups.Count == 0)
            {
                Debug.LogError("cannot found MediaFrameSourceGroup. アプリケーションマニュフェストを確認してください。");
                return;
            }

            MediaFrameSourceGroup sourceGroup =
                allGroups.FirstOrDefault(g => g.SourceInfos.Any(s => s.SourceKind == MediaFrameSourceKind.Depth));

            if (sourceGroup == null)
            {
                Debug.LogError("深度カメラが見つからないようです。");
                return;
            }

            try
            {
                await InitializeMediaCaptureAsync(sourceGroup);
            }
            catch (Exception exception)
            {
                Debug.LogError("InitializeMediaCaptureAsyncに失敗しました" + exception.Message);
                await CleanupMediaCaptureAsync();

                return;
            }

            MediaFrameSource source = _mediaCapture.FrameSources.Values
                                      .FirstOrDefault(s => s.Info.SourceKind == MediaFrameSourceKind.Depth);

            if (source == null)
            {
                Debug.LogError("sourceが見つかりません。");
            }

            MediaFrameFormat format = source.SupportedFormats.FirstOrDefault(f =>
                                                                             String.Equals(f.Subtype, MediaEncodingSubtypes.D16, StringComparison.OrdinalIgnoreCase));

            if (format == null)
            {
                return;
            }

            await source.SetFormatAsync(format);

            _depthReader = await _mediaCapture.CreateFrameReaderAsync(source, format.Subtype);

            MediaFrameReaderStartStatus status = await _depthReader.StartAsync();

            if (status != MediaFrameReaderStartStatus.Success)
            {
                Debug.LogError("_depthReader.StartAsyncに失敗しました");
            }
        }
Beispiel #26
0
        private async void MainPage_OnLoaded(object sender, RoutedEventArgs e)
        {
            displayRequest.RequestActive();

            try
            {
                var(colorSourceInfo, selectedGroup) = await MediaFrameSourceInfo();

                if (selectedGroup == null)
                {
                    return;
                }

                mediaCapture = new MediaCapture();
                await mediaCapture.InitializeAsync(new MediaCaptureInitializationSettings
                {
                    SourceGroup          = selectedGroup,
                    SharingMode          = MediaCaptureSharingMode.ExclusiveControl,
                    MemoryPreference     = MediaCaptureMemoryPreference.Cpu,
                    StreamingCaptureMode = StreamingCaptureMode.Video
                });

                var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id];
                var preferredFormat  = colorFrameSource.SupportedFormats.FirstOrDefault(format =>
                                                                                        format.VideoFormat.Width == targetWidth &&
                                                                                        string.Compare(format.Subtype, MediaEncodingSubtypes.Nv12, true) == 0);

                if (preferredFormat == null)
                {
                    // Our desired format is not supported
                    return;
                }

                await colorFrameSource.SetFormatAsync(preferredFormat);

                mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Nv12);

                await mediaFrameReader.StartAsync();

                Task.Run(() => ProcessPreview(mediaFrameReader));
            }
            catch (UnauthorizedAccessException)
            {
                // This will be thrown if the user denied access to the camera in privacy settings
                return;
            }

            try
            {
                CameraPreview.Source = mediaCapture;

                await mediaCapture.StartPreviewAsync();
            }
            catch (System.IO.FileLoadException)
            {
            }
        }
Beispiel #27
0
    /// <summary>
    /// Method to start capturing camera frames at desired resolution.
    /// </summary>
    /// <param name="width"></param>
    /// <param name="height"></param>
    /// <returns></returns>
    public async Task InitializeMediaFrameReaderAsync(uint width = 224, uint height = 224)
    {
        // Check state of media capture object
        if (_mediaCapture == null || _mediaCapture.CameraStreamState == CameraStreamState.Shutdown || _mediaCapture.CameraStreamState == CameraStreamState.NotStreaming)
        {
            if (_mediaCapture != null)
            {
                _mediaCapture.Dispose();
            }

            // Find right camera settings and prefer back camera
            MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings();
            var allCameras = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

            Debug.Log($"InitializeMediaFrameReaderAsync: allCameras: {allCameras}");

            var selectedCamera = allCameras.FirstOrDefault(c => c.EnclosureLocation?.Panel == Panel.Back) ?? allCameras.FirstOrDefault();
            Debug.Log($"InitializeMediaFrameReaderAsync: selectedCamera: {selectedCamera}");


            if (selectedCamera != null)
            {
                settings.VideoDeviceId = selectedCamera.Id;
                Debug.Log($"InitializeMediaFrameReaderAsync: settings.VideoDeviceId: {settings.VideoDeviceId}");
            }

            // Init capturer and Frame reader
            _mediaCapture = new MediaCapture();
            Debug.Log("InitializeMediaFrameReaderAsync: Successfully created media capture object.");

            await _mediaCapture.InitializeAsync(settings);

            Debug.Log("InitializeMediaFrameReaderAsync: Successfully initialized media capture object.");

            var frameSource = _mediaCapture.FrameSources.Where(source => source.Value.Info.SourceKind == MediaFrameSourceKind.Color).First();
            Debug.Log($"InitializeMediaFrameReaderAsync: frameSource: {frameSource}.");

            // Convert the pixel formats
            var subtype = MediaEncodingSubtypes.Bgra8;

            // The overloads of CreateFrameReaderAsync with the format arguments will actually make a copy in FrameArrived
            BitmapSize outputSize = new BitmapSize {
                Width = width, Height = height
            };
            _mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(frameSource.Value, subtype, outputSize);

            Debug.Log("InitializeMediaFrameReaderAsync: Successfully created media frame reader.");

            _mediaFrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime;

            await _mediaFrameReader.StartAsync();

            Debug.Log("InitializeMediaFrameReaderAsync: Successfully started media frame reader.");

            IsCapturing = true;
        }
    }
        protected override async void OnNavigatedTo(NavigationEventArgs e)
        {
            App.dispatcher = this.Dispatcher;
            Cv2.InitContainer((object)App.container);
            //_helper.SetContainer(App.container);
            rootPage = MainPage.Current;

            // setting up the combobox, and default operation
            OperationComboBox.ItemsSource   = Enum.GetValues(typeof(OperationType));
            OperationComboBox.SelectedIndex = 0;
            currentOperation = OperationType.Blur;

            // Find the sources
            var allGroups = await MediaFrameSourceGroup.FindAllAsync();

            var sourceGroups = allGroups.Select(g => new
            {
                Group      = g,
                SourceInfo = g.SourceInfos.FirstOrDefault(i => i.SourceKind == MediaFrameSourceKind.Color)
            }).Where(g => g.SourceInfo != null).ToList();

            if (sourceGroups.Count == 0)
            {
                // No camera sources found
                return;
            }
            var selectedSource = sourceGroups.FirstOrDefault();

            // Initialize MediaCapture
            try
            {
                await InitializeMediaCaptureAsync(selectedSource.Group);
            }
            catch (Exception exception)
            {
                Debug.WriteLine("MediaCapture initialization error: " + exception.Message);
                await CleanupMediaCaptureAsync();

                return;
            }

            // Create the frame reader
            MediaFrameSource frameSource = _mediaCapture.FrameSources[selectedSource.SourceInfo.Id];
            BitmapSize       size        = new BitmapSize() // Choose a lower resolution to make the image processing more performant
            {
                Height = IMAGE_ROWS,
                Width  = IMAGE_COLS
            };

            _reader = await _mediaCapture.CreateFrameReaderAsync(frameSource, MediaEncodingSubtypes.Bgra8, size);

            _reader.FrameArrived += ColorFrameReader_FrameArrivedAsync;
            await _reader.StartAsync();

            _FPSTimer.Start();
        }
Beispiel #29
0
 private void InitReader()
 {
     foreach (MediaFrameSource source in mediaCapture.FrameSources.Values)
     {
         if (source.Info.SourceKind == MediaFrameSourceKind.Color)
         {
             VideoDevice      = source.Info.DeviceInformation;
             videoFrameReader = mediaCapture.CreateFrameReaderAsync(source, MediaEncodingSubtypes.Nv12).AsTask().GetAwaiter().GetResult();
             videoFrameReader.FrameArrived += VideoFrameArrivedEvent;
         }
         else if (source.Info.SourceKind == MediaFrameSourceKind.Audio)
         {
             AudioDevice      = source.Info.DeviceInformation;
             AudioFormat      = source.CurrentFormat;
             audioFrameReader = mediaCapture.CreateFrameReaderAsync(source).AsTask().GetAwaiter().GetResult();
             audioFrameReader.FrameArrived += AudioFrameArrivedEvent;
         }
     }
 }
Beispiel #30
0
        public static async Task <VideoFrameProcessor> CreateAsync()
        {
            IReadOnlyList <MediaFrameSourceGroup> groups = await MediaFrameSourceGroup.FindAllAsync();

            MediaFrameSourceGroup selectedGroup      = null;
            MediaFrameSourceInfo  selectedSourceInfo = null;

            // Pick first color source.
            foreach (var sourceGroup in groups)
            {
                foreach (var sourceInfo in sourceGroup.SourceInfos)
                {
                    if (sourceInfo.MediaStreamType == MediaStreamType.VideoPreview &&
                        sourceInfo.SourceKind == MediaFrameSourceKind.Color)
                    {
                        selectedSourceInfo = sourceInfo;
                        break;
                    }
                }
                if (selectedSourceInfo != null)
                {
                    selectedGroup = sourceGroup;
                    break;
                }
            }

            // No valid camera was found. This will happen on the emulator.
            if (selectedGroup == null || selectedSourceInfo == null)
            {
                return(null);
            }

            MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings();

            settings.MemoryPreference     = MediaCaptureMemoryPreference.Cpu; // Need SoftwareBitmaps for FaceAnalysis
            settings.StreamingCaptureMode = StreamingCaptureMode.Video;       // Only need to stream video
            settings.SourceGroup          = selectedGroup;

            MediaCapture mediaCapture = new MediaCapture();
            await mediaCapture.InitializeAsync(settings);

            MediaFrameSource selectedSource = mediaCapture.FrameSources[selectedSourceInfo.Id];
            MediaFrameReader reader         = await mediaCapture.CreateFrameReaderAsync(selectedSource);

            MediaFrameReaderStartStatus status = await reader.StartAsync();

            // Only create a VideoFrameProcessor if the reader successfully started
            if (status == MediaFrameReaderStartStatus.Success)
            {
                return(new VideoFrameProcessor(mediaCapture, reader, selectedSource));
            }

            return(null);
        }