public FrameSourceGroupModel(MediaFrameSourceGroup group)
 {
     SourceGroup = group;
     this.Id = group.Id;
     this.DisplayName = group.DisplayName;
     this.SourceInfos = group.SourceInfos.Select(
         sourceInfo => new FrameSourceInfoModel(sourceInfo)).ToArray();
 }
        protected override async void OnNavigatedTo(NavigationEventArgs e)
        {
            rootPage = MainPage.Current;

            // setting up the combobox, and default operation
            OperationComboBox.ItemsSource   = Enum.GetValues(typeof(OperationType));
            OperationComboBox.SelectedIndex = 0;
            currentOperation = OperationType.Blur;

            // Find the sources
            var allGroups = await MediaFrameSourceGroup.FindAllAsync();

            var sourceGroups = allGroups.Select(g => new
            {
                Group      = g,
                SourceInfo = g.SourceInfos.FirstOrDefault(i => i.SourceKind == MediaFrameSourceKind.Color)
            }).Where(g => g.SourceInfo != null).ToList();

            if (sourceGroups.Count == 0)
            {
                // No camera sources found
                return;
            }
            var selectedSource = sourceGroups.FirstOrDefault();

            // Initialize MediaCapture
            try
            {
                await InitializeMediaCaptureAsync(selectedSource.Group);
            }
            catch (Exception exception)
            {
                Debug.WriteLine("MediaCapture initialization error: " + exception.Message);
                await CleanupMediaCaptureAsync();

                return;
            }

            // Create the frame reader
            MediaFrameSource frameSource = _mediaCapture.FrameSources[selectedSource.SourceInfo.Id];
            BitmapSize       size        = new BitmapSize() // Choose a lower resolution to make the image processing more performant
            {
                Height = IMAGE_ROWS,
                Width  = IMAGE_COLS
            };

            _reader = await _mediaCapture.CreateFrameReaderAsync(frameSource, MediaEncodingSubtypes.Bgra8, size);

            _reader.FrameArrived += ColorFrameReader_FrameArrivedAsync;
            await _reader.StartAsync();

            _FPSTimer.Start();
        }
        /// <summary>
        /// Adds a SourceGroup with given Id to the collection.
        /// </summary>
        private async Task AddDeviceAsync(string id)
        {
            var group = await MediaFrameSourceGroup.FromIdAsync(id);

            if (group != null)
            {
                await _dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                {
                    _sourceCollection.Add(new FrameSourceGroupModel(group));
                });
            }
        }
Example #4
0
        /// <summary>
        /// Asynchronously create a Hand Detector with the first depth camera found
        /// </summary>
        /// <param name="id">The ID of the device to look for if needed. If NULL, a device with "depth" capabilities will be randomly choose.</param>
        /// <returns>The asynchronous task</returns>
        public static async Task <HandDetector> CreateAsync(String id = null)
        {
            Debug.WriteLine("Initialize the hand detector");

            //Search for the correct media frame source
            MediaFrameSourceGroup selectedFrameSourceGroup = null;
            MediaFrameSourceInfo  selectedFrameSourceInfo  = null;

            IReadOnlyList <MediaFrameSourceGroup> allFrameSourceGroups = await MediaFrameSourceGroup.FindAllAsync();

            Debug.WriteLine($"Found {allFrameSourceGroups.Count} frame sources...");

            foreach (MediaFrameSourceGroup group in allFrameSourceGroups)
            {
                Debug.WriteLine($"Group: {group.DisplayName}");
                Debug.WriteLine($"Found {group.SourceInfos.Count} source infos...");
                foreach (MediaFrameSourceInfo info in group.SourceInfos)
                {
                    //Debug.WriteLine($"{info.SourceKind} : {info.MediaStreamType} -> {info.DeviceInformation.EnclosureLocation.Panel}");
                    //If an ID is given
                    if ((id == null || info.DeviceInformation.Id == id) && (info.MediaStreamType == MediaStreamType.VideoPreview || info.MediaStreamType == MediaStreamType.VideoRecord))
                    {
                        //Check the depth capabilities
                        if (info.SourceKind == MediaFrameSourceKind.Depth)
                        {
                            selectedFrameSourceGroup = group;
                            selectedFrameSourceInfo  = info;

                            Debug.WriteLine($"Found Device : {info.DeviceInformation.Name}:{info.DeviceInformation.Id}");
                        }
                    }

                    if (selectedFrameSourceGroup != null)
                    {
                        break;
                    }
                }
                if (selectedFrameSourceGroup != null)
                {
                    break;
                }
            }

            if (selectedFrameSourceGroup == null)
            {
                Debug.WriteLine("No frame source available found");
                return(null);
            }

            HandDetector HandDetector = new HandDetector(selectedFrameSourceGroup, selectedFrameSourceInfo);

            return(HandDetector);
        }
        async static Task <IEnumerable <MediaFrameSourceGroup> > GetGroupsSupportingSourceKindsAsync(
            params MediaFrameSourceKind[] kinds)
        {
            var sourceGroups = await MediaFrameSourceGroup.FindAllAsync();

            var groups =
                sourceGroups.Where(
                    group => kinds.All(
                        kind => group.SourceInfos.Any(sourceInfo => sourceInfo.SourceKind == kind)));

            return(groups);
        }
Example #6
0
        private async Task PlayLiveVideo()
        {
            var allGroups = await MediaFrameSourceGroup.FindAllAsync();

            var eligibleGroups = allGroups.Select(g => new
            {
                Group = g,

                // For each source kind, find the source which offers that kind of media frame,
                // or null if there is no such source.
                SourceInfos = new MediaFrameSourceInfo[]
                {
                    g.SourceInfos.FirstOrDefault(info => info.DeviceInformation?.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front &&
                                                 info.SourceKind == MediaFrameSourceKind.Color),
                    g.SourceInfos.FirstOrDefault(info => info.DeviceInformation?.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Back &&
                                                 info.SourceKind == MediaFrameSourceKind.Color)
                }
            }).Where(g => g.SourceInfos.Any(info => info != null)).ToList();

            if (eligibleGroups.Count == 0)
            {
                System.Diagnostics.Debug.WriteLine("No source group with front and back-facing camera found.");
                return;
            }

            var selectedGroupIndex = 0; // Select the first eligible group
            MediaFrameSourceGroup selectedGroup   = eligibleGroups[selectedGroupIndex].Group;
            MediaFrameSourceInfo  frontSourceInfo = selectedGroup.SourceInfos[0];

            MediaCapture mediaCapture = new MediaCapture();
            MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings()
            {
                SourceGroup          = selectedGroup,
                SharingMode          = MediaCaptureSharingMode.ExclusiveControl,
                MemoryPreference     = MediaCaptureMemoryPreference.Cpu,
                StreamingCaptureMode = StreamingCaptureMode.Video,
            };

            try
            {
                await mediaCapture.InitializeAsync(settings);
            }
            catch (Exception ex)
            {
                System.Diagnostics.Debug.WriteLine("MediaCapture initialization failed: " + ex.Message);
                return;
            }
            var frameMediaSource1 = MediaSource.CreateFromMediaFrameSource(mediaCapture.FrameSources[frontSourceInfo.Id]);

            VideoStreamer.SetPlaybackSource(frameMediaSource1);
            VideoStreamer.Play();
        }
        private bool InitCaptureAsync()
        {
            var mediaCapture = new MediaCapture();
            var sourceGroup  = MediaFrameSourceGroup.FindAllAsync().AsTask().GetAwaiter().GetResult();

            if (sourceGroup.Count == 0)
            {
                return(false);
            }
            m_capturer = new GeneralMediaCapturer(sourceGroup[0], StreamingCaptureMode.Video);
            m_capturer.OnVideoFrameArrived += VideoFrameArrivedEvent;
            return(true);
        }
Example #8
0
        private async Task <MediaFrameSourceGroup> GetMediaSourceGroup(string name)
        {
            var groups = await MediaFrameSourceGroup.FindAllAsync();

            foreach (var group in groups)
            {
                if (group.DisplayName == name)
                {
                    return(group);
                }
            }
            return(null);
        }
Example #9
0
        public SourceGroupCollection(CoreDispatcher uiDispatcher)
        {
            _dispatcher       = uiDispatcher;
            _sourceCollection = new ObservableCollection <FrameSourceGroupModel>();

            var deviceSelector = MediaFrameSourceGroup.GetDeviceSelector();

            _watcher          = DeviceInformation.CreateWatcher(deviceSelector);
            _watcher.Added   += Watcher_Added;
            _watcher.Removed += Watcher_Removed;
            _watcher.Updated += Watcher_Updated;
            _watcher.Start();
        }
Example #10
0
        private async Task InitializeMediaCaptureAsync(MediaFrameSourceGroup sourceGroup)
        {
            if (_mediaCapture != null)
            {
                return;
            }

            var allVideoDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

            if (allVideoDevices.Count > 0)
            {
                var cameraDevice = allVideoDevices[0];

                // Initialize mediacapture with the source group.
                _mediaCapture = new MediaCapture();
                var settings = new MediaCaptureInitializationSettings
                {
                    VideoDeviceId = cameraDevice.Id,

                    SourceGroup = sourceGroup,

                    // This media capture can share streaming with other apps.
                    SharingMode = MediaCaptureSharingMode.ExclusiveControl,

                    // Only stream video and don't initialize audio capture devices.
                    StreamingCaptureMode = StreamingCaptureMode.Video,

                    // Set to CPU to ensure frames always contain CPU SoftwareBitmap images
                    // instead of preferring GPU D3DSurface images.
                    MemoryPreference = MediaCaptureMemoryPreference.Cpu
                };

                await _mediaCapture.InitializeAsync(settings);

                var cameraProperties = _mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(MediaStreamType.VideoPreview).Select(x => x as VideoEncodingProperties).ToList();

                foreach (var mediaEncodingProperty in cameraProperties)
                {
                    if (mediaEncodingProperty.Width == 960 &&
                        mediaEncodingProperty.Height == 544 &&
                        mediaEncodingProperty.FrameRate.Numerator == 15 &&
                        string.Compare(mediaEncodingProperty.Subtype, "YUY2") == 0)
                    {
                        Debug.WriteLine("Chosen: " + mediaEncodingProperty.Width + "x" + mediaEncodingProperty.Height + " FPS: " + mediaEncodingProperty.FrameRate.Numerator + "Type:" + mediaEncodingProperty.Type + "   SubType:" + mediaEncodingProperty.Subtype);
                        await _mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, mediaEncodingProperty);

                        break;
                    }
                }
            }
        }
Example #11
0
        /// <summary>
        /// Video Capture: Initialize Camera Capture.
        /// Implementation is from the UWP official tutorial.
        /// https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/process-media-frames-with-mediaframereader
        /// </summary>
        public async void InitializeCamera()
        {
            var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync();

            var selectedGroupObjects = frameSourceGroups.Select(group =>
                                                                new
            {
                sourceGroup     = group,
                colorSourceInfo = group.SourceInfos.FirstOrDefault((sourceInfo) =>
                {
                    // On Xbox/Kinect, omit the MediaStreamType and EnclosureLocation tests
                    return(sourceInfo.SourceKind == MediaFrameSourceKind.Color);
                })
            }).Where(t => t.colorSourceInfo != null)
                                       .FirstOrDefault();

            MediaFrameSourceGroup selectedGroup   = selectedGroupObjects?.sourceGroup;
            MediaFrameSourceInfo  colorSourceInfo = selectedGroupObjects?.colorSourceInfo;

            if (selectedGroup == null)
            {
                return;
            }

            mediaCapture = new MediaCapture();

            var settings = new MediaCaptureInitializationSettings()
            {
                SourceGroup          = selectedGroup,
                SharingMode          = MediaCaptureSharingMode.ExclusiveControl,
                MemoryPreference     = MediaCaptureMemoryPreference.Cpu,
                StreamingCaptureMode = StreamingCaptureMode.Video
            };

            try
            {
                await mediaCapture.InitializeAsync(settings);
            }
            catch (Exception ex)
            {
                System.Diagnostics.Debug.WriteLine("MediaCapture initialization failed: " + ex.Message);
                return;
            }

            var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id];

            mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Argb32);

            mediaFrameReader.FrameArrived += ColorFrameReader_FrameArrived;
            await mediaFrameReader.StartAsync();
        }
Example #12
0
    private async Task StartCapturer()
    {
        MediaFrameSource mediaFrameSource;
        var allGroups = await MediaFrameSourceGroup.FindAllAsync();

        if (allGroups.Count <= 0)
        {
            //textmesh.text = "Orca";
            Debug.Log("Orca");
        }
        var mediaCapture = new MediaCapture();
        var settings     = new MediaCaptureInitializationSettings
        {
            SourceGroup          = allGroups[0],
            SharingMode          = MediaCaptureSharingMode.SharedReadOnly,
            StreamingCaptureMode = StreamingCaptureMode.Video,
            MemoryPreference     = MediaCaptureMemoryPreference.Cpu
        };

        await mediaCapture.InitializeAsync(settings);

        //render.material.color = new Color(0, 0, 0.5f);

        mediaFrameSource = mediaCapture.FrameSources.Values.Single(x => x.Info.MediaStreamType == MediaStreamType.VideoRecord);
        try {
            MediaFrameFormat targetResFormat = null;
            foreach (var f in mediaFrameSource.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height))
            {
                //textmesh.text = string.Format("{0}x{1} {2}/{3}", f.VideoFormat.Width, f.VideoFormat.Height, f.FrameRate.Numerator, f.FrameRate.Denominator);
                if (f.VideoFormat.Width == 896 && f.VideoFormat.Height == 504 && f.FrameRate.Numerator == 24)
                {
                    targetResFormat = f;
                }
            }
            await mediaFrameSource.SetFormatAsync(targetResFormat);
        }
        catch {
            //textmesh.text = "Orca2";
        }

        try {
            frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, MediaEncodingSubtypes.Bgra8);

            frameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime;
            frameReader.FrameArrived   += OnFrameArrived;
        }
        catch {
            //textmesh.text = "Orca3";
        }
    }
Example #13
0
        public async void InitializeManager()
        {
            _helper = new OpenCVHelper();

            // Find the sources
            var allGroups = await MediaFrameSourceGroup.FindAllAsync();

            var sourceGroups = allGroups.Select(g => new
            {
                Group      = g,
                SourceInfo = g.SourceInfos.FirstOrDefault(i => i.SourceKind == MediaFrameSourceKind.Color)
            }).Where(g => g.SourceInfo != null).ToList();

            if (sourceGroups.Count == 0)
            {
                // No camera sources found
                return;
            }
            var selectedSource = sourceGroups.FirstOrDefault();

            // Initialize MediaCapture
            try
            {
                await InitializeMediaCaptureAsync(selectedSource.Group);
            }
            catch (Exception exception)
            {
                Debug.WriteLine("MediaCapture initialization error: " + exception.Message);
                await CleanupMediaCaptureAsync();

                return;
            }

            // Create the frame reader
            MediaFrameSource frameSource = _mediaCapture.FrameSources[selectedSource.SourceInfo.Id];
            var format = frameSource.SupportedFormats.OrderByDescending(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault();
            await frameSource.SetFormatAsync(format);

            BitmapSize size = new BitmapSize() // Choose a lower resolution to make the image processing more performant
            {
                Height = format.VideoFormat.Height,
                Width  = format.VideoFormat.Width
            };

            _reader = await _mediaCapture.CreateFrameReaderAsync(frameSource, MediaEncodingSubtypes.Bgra8, size);

            _reader.FrameArrived += HandleFrameArrive;
            await _reader.StartAsync();
        }
Example #14
0
        public async void Initialize()
        {
            var sourceGroups = await MediaFrameSourceGroup.FindAllAsync();

            var desiredGroupInfo = sourceGroups.Select(sourceGroup => new
            {
                Group = sourceGroup,
                Info  = sourceGroup.SourceInfos.FirstOrDefault(info => info.MediaStreamType == MediaStreamType.VideoPreview && info.SourceKind == MediaFrameSourceKind.Color)
            }).FirstOrDefault(groupInfo => groupInfo.Info != null);

            if (desiredGroupInfo == null)
            {
                return;
            }

            var settings = new MediaCaptureInitializationSettings()
            {
                SourceGroup          = desiredGroupInfo.Group,
                SharingMode          = MediaCaptureSharingMode.ExclusiveControl,
                MemoryPreference     = MediaCaptureMemoryPreference.Auto,
                StreamingCaptureMode = StreamingCaptureMode.Video
            };
            await mediaCapture.InitializeAsync(settings);

            var frameSource     = mediaCapture.FrameSources[desiredGroupInfo.Info.Id];
            var preferredFormat = frameSource.SupportedFormats
                                  .OrderByDescending(format => format.VideoFormat.Width)
                                  .ThenByDescending(format => (float)format.FrameRate.Numerator / format.FrameRate.Denominator)
                                  .FirstOrDefault();

            if (preferredFormat == null)
            {
                return;
            }
            await frameSource.SetFormatAsync(preferredFormat);

            var cameraController = frameSource.Controller.VideoDeviceController;

            cameraController.WhiteBalance.TrySetAuto(false);
            cameraController.WhiteBalance.TrySetValue(2600);
            cameraController.Exposure.TrySetAuto(false);
            cameraController.Exposure.TrySetValue(5.0);
            cameraController.BacklightCompensation.TrySetAuto(false);
            cameraController.DesiredOptimization = Windows.Media.Devices.MediaCaptureOptimization.Quality;
            mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(frameSource);

            mediaFrameReader.FrameArrived += OnFrameArrived;
            await mediaFrameReader.StartAsync();
        }
 async Task initializeMediaCapterAsync(MediaFrameSourceGroup sourceGroup)
 {
     if (mediaCapture != null)
     {
         return;
     }
     mediaCapture = new MediaCapture();
     var settings = new MediaCaptureInitializationSettings {
         SourceGroup          = sourceGroup,
         SharingMode          = MediaCaptureSharingMode.SharedReadOnly,
         StreamingCaptureMode = StreamingCaptureMode.Video,
         MemoryPreference     = MediaCaptureMemoryPreference.Cpu
     };
     await mediaCapture.InitializeAsync(settings);
 }
Example #16
0
        /// <summary>
        /// Retrieves the <see cref="MediaFrameSourceGroup">source group</see> using the display name of the camera.
        /// Defaults to the world-facing color camera of the HoloLens 2.
        /// </summary>
        private async Task <MediaFrameSourceGroup> SelectGroup(string displayName = locatableCameraDisplayName)
        {
            IReadOnlyList <MediaFrameSourceGroup> groups = await MediaFrameSourceGroup.FindAllAsync();

            foreach (MediaFrameSourceGroup group in groups)
            {
                if (group.DisplayName != displayName)
                {
                    continue;
                }
                _logger.Log($"Selected group {group} on {_device}");
                return(group);
            }
            throw new ArgumentException($"No source group for display name {displayName} found.");
        }
        async Task StartHoloLensMediaFrameSourceGroups()
        {
#if ENABLE_WINMD_SUPPORT
            // Plugin doesn't work in the Unity editor
            myText.text = "Initalizing MediaFrameSourceGroups...";

            // PV
            _sensorFrameStreamerPv = new SensorFrameStreamer();
            _sensorType            = (SensorType)sensorTypePv;
            _sensorFrameStreamerPv.Enable(_sensorType);

            // Research streams
            _sensorFrameStreamerResearch = new SensorFrameStreamer();
            _sensorTypeResearch          = (SensorType)sensorTypeShortDepth;
            _sensorFrameStreamerResearch.Enable(_sensorTypeResearch);

            // Spatial perception
            _spatialPerception = new SpatialPerception();

            // Enable media frame source groups
            // PV
            _pvMediaFrameSourceGroup = new MediaFrameSourceGroup(
                MediaFrameSourceGroupType.PhotoVideoCamera,
                _spatialPerception,
                _sensorFrameStreamerPv);
            _pvMediaFrameSourceGroup.Enable(_sensorType);

            // ToF Depth
            _shortDepthMediaFrameSourceGroup = new MediaFrameSourceGroup(
                MediaFrameSourceGroupType.HoloLensResearchModeSensors,
                _spatialPerception,
                _sensorFrameStreamerResearch);
            _shortDepthMediaFrameSourceGroup.Enable(_sensorTypeResearch);

            // Start media frame source groups
            myText.text = "Starting MediaFrameSourceGroups...";

            // Photo video
            await _pvMediaFrameSourceGroup.StartAsync();

            // ToF Depth
            await _shortDepthMediaFrameSourceGroup.StartAsync();

            _mediaFrameSourceGroupsStarted = true;

            myText.text = "MediaFrameSourceGroups started...";
#endif
        }
        /// <summary>
        /// Gets a list of <see cref="MediaFrameSourceGroup"/> available for video preview or video record.
        /// </summary>
        /// <returns>A <see cref="MediaFrameSourceGroup"/> list.</returns>
        public static async Task <IReadOnlyList <MediaFrameSourceGroup> > GetFrameSourceGroupsAsync()
        {
            if (_frameSourceGroups == null)
            {
                var videoDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);

                var groups = await MediaFrameSourceGroup.FindAllAsync();

                // Filter out color video preview and video record type sources and remove duplicates video devices.
                _frameSourceGroups = groups.Where(g => g.SourceInfos.Any(s => s.SourceKind == MediaFrameSourceKind.Color &&
                                                                         (s.MediaStreamType == MediaStreamType.VideoPreview || s.MediaStreamType == MediaStreamType.VideoRecord)) &&
                                                  g.SourceInfos.All(sourceInfo => videoDevices.Any(vd => vd.Id == sourceInfo.DeviceInformation.Id))).ToList();
            }

            return(_frameSourceGroups);
        }
Example #19
0
        public static IAsyncOperation <Sensor> GetDefaultAsync()
        {
            return(Task.Run(async() =>
            {
                // todo: remove custom from required streams - Xbox doesn't expose it yet
                var cameraSensorGroups = await MediaFrameSourceGroup.FindAllAsync();
                var sourceGroup = cameraSensorGroups.FirstOrDefault(
                    group =>
                    group.SourceInfos.Any(si => si.SourceKind == MediaFrameSourceKind.Color) &&
                    group.SourceInfos.Any(si => si.SourceKind == MediaFrameSourceKind.Depth) &&
                    group.SourceInfos.Any(si => si.SourceKind == MediaFrameSourceKind.Infrared) &&
                    group.SourceInfos.Any(si => si.SourceKind == MediaFrameSourceKind.Custom));

                return sourceGroup == null ? null : new Sensor(sourceGroup);
            }).AsAsyncOperation());
        }
Example #20
0
        public async Task <bool> PopulateAsync(
            Func <MediaFrameSourceInfo, bool> sourceInfoFilter,
            Func <IEnumerable <MediaFrameSourceGroup>, MediaFrameSourceGroup> sourceGroupSelector)
        {
            var mediaFrameSourceGroups = await MediaFrameSourceGroup.FindAllAsync();

            var candidates = mediaFrameSourceGroups.Where(
                group => group.SourceInfos.Any(sourceInfoFilter));

            this.FrameSourceGroup = sourceGroupSelector(candidates);

            this.FrameSourceInfo = this.FrameSourceGroup?.SourceInfos.FirstOrDefault(
                sourceInfoFilter);

            return((this.FrameSourceGroup != null) && (this.FrameSourceInfo != null));
        }
Example #21
0
        private async void FrameReaderThread()
        {
            var list = await MediaFrameSourceGroup.FindAllAsync();

            MediaFrameSourceInfo  sourceInfo  = null;
            MediaFrameSourceGroup sourceGroup = null;

            foreach (var group in list)
            {
                if (group.SourceInfos.Count == 2)
                {
                    var tempSourceInfo = group.SourceInfos.FirstOrDefault(s =>
                                                                          s.SourceKind == MediaFrameSourceKind.Infrared &&
                                                                          (s.MediaStreamType == MediaStreamType.VideoPreview ||
                                                                           s.MediaStreamType == MediaStreamType.VideoRecord));
                    if (tempSourceInfo != null)
                    {
                        sourceInfo  = tempSourceInfo;
                        sourceGroup = group;
                        break;
                    }
                }
            }

            if (sourceGroup == null || sourceInfo == null)
            {
                return;
            }

            var settings = new MediaCaptureInitializationSettings();

            settings.SourceGroup      = sourceGroup;
            settings.SharingMode      = MediaCaptureSharingMode.SharedReadOnly;
            settings.MemoryPreference = MediaCaptureMemoryPreference.Cpu;
            await _mc.InitializeAsync(settings);

            var irSource = _mc.FrameSources[sourceInfo.Id];

            imageFrameSize.Width  = (int)irSource.CurrentFormat.VideoFormat.Width;
            imageFrameSize.Height = (int)irSource.CurrentFormat.VideoFormat.Height;

            _iRFrameReader = await _mc.CreateFrameReaderAsync(irSource);

            _iRFrameReader.FrameArrived += IrReader_FrameArrived;

            await _iRFrameReader.StartAsync();
        }
        /// <summary>
        /// Initializes the MediaCapture object with the given source group.
        /// </summary>
        /// <param name="sourceGroup">SourceGroup with which to initialize.</param>
        private async Task InitializeMediaCaptureAsync(MediaFrameSourceGroup sourceGroup)
        {
            if (mediaCapture != null)
            {
                return;
            }

            mediaCapture = new MediaCapture();
            var settings = new MediaCaptureInitializationSettings()
            {
                SourceGroup          = sourceGroup,
                SharingMode          = MediaCaptureSharingMode.ExclusiveControl,
                StreamingCaptureMode = StreamingCaptureMode.Video,
                MemoryPreference     = MediaCaptureMemoryPreference.Cpu
            };
            await mediaCapture.InitializeAsync(settings);
        }
        private async void SettingSensorData(int deviceNum, int cameraNum)
        {
            if (mediaFrameReader != null)
            {
                await mediaFrameReader.StopAsync();

                mediaFrameReader.FrameArrived -= FrameArrived;
                mediaFrameReader.Dispose();
                mediaFrameReader = null;
            }

            var mediaFrameSourceGroupList = await MediaFrameSourceGroup.FindAllAsync();

            var mediaFrameSourceGroup = mediaFrameSourceGroupList[deviceNum];
            var mediaFrameSourceInfo  = mediaFrameSourceGroup.SourceInfos[cameraNum];
            MediaFrameSourceKind kind = mediaFrameSourceInfo.SourceKind;
            var mediaCapture          = new MediaCapture();
            var settings = new MediaCaptureInitializationSettings()
            {
                SourceGroup          = mediaFrameSourceGroup,
                SharingMode          = MediaCaptureSharingMode.SharedReadOnly,
                StreamingCaptureMode = StreamingCaptureMode.Video,
                MemoryPreference     = MediaCaptureMemoryPreference.Cpu,
            };

            try
            {
                await mediaCapture.InitializeAsync(settings);

                var mediaFrameSource = mediaCapture.FrameSources[mediaFrameSourceInfo.Id];
                if (kind == MediaFrameSourceKind.Color)
                {
                    mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, MediaEncodingSubtypes.Argb32);
                }
                else
                {
                    mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, mediaFrameSource.CurrentFormat.Subtype);
                }
                mediaFrameReader.FrameArrived += FrameArrived;
                await mediaFrameReader.StartAsync();
            }
            catch (Exception)
            {
                throw;
            }
        }
Example #24
0
        /// <summary>
        /// List on screen the available media frame source information
        /// </summary>
        public static async void ListMediaFrameSourceInfo()
        {
            IReadOnlyList <MediaFrameSourceGroup> allFrameSourceGroups = await MediaFrameSourceGroup.FindAllAsync();

            //Print all devices
            foreach (MediaFrameSourceGroup group in allFrameSourceGroups)
            {
                Debug.WriteLine("-------------------------------------------------------------");
                Debug.WriteLine($"Group: {group.DisplayName}");
                foreach (MediaFrameSourceInfo info in group.SourceInfos)
                {
                    Debug.WriteLine($"SourceKind : {info.SourceKind}");
                    Debug.WriteLine($"Device : {info.DeviceInformation.Name}:{info.DeviceInformation.Id}");
                    Debug.WriteLine("");
                }
                Debug.WriteLine("-------------------------------------------------------------\n");
            }
        }
Example #25
0
    public async void StartDataStream()
    {
#if UNITY_EDITOR
        //DebugToServer.Log.Send("Camera test is only for Hololens.");
#endif

#if !UNITY_EDITOR
        var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync();

        GetStreamingGroupsAndInfos(frameSourceGroups);
        await StartWebcamCapture();

        //await StartDepthFarCapture();
        //await StartDepthNearCapture();
        //await StartFourCamerasCapture();
        isReadyToSend = true;
#endif
    }
Example #26
0
        public async Task Initialize()
        {
            // Find the sources
            var allGroups = await MediaFrameSourceGroup.FindAllAsync();

            var sourceGroups = allGroups.Select(g => new
            {
                Group      = g,
                SourceInfo = g.SourceInfos.FirstOrDefault(i => i.SourceKind == MediaFrameSourceKind.Color)
            }).Where(g => g.SourceInfo != null).ToList();

            if (sourceGroups.Count == 0)
            {
                // No camera sources found
                return;
            }
            var selectedSource = sourceGroups.FirstOrDefault();

            // Initialize MediaCapture
            try
            {
                await InitializeMediaCaptureAsync(selectedSource.Group);
            }
            catch (Exception exception)
            {
                Debug.WriteLine("MediaCapture initialization error: " + exception.Message);
                await Cleanup();

                return;
            }

            // Create the frame reader
            MediaFrameSource frameSource = _mediaCapture.FrameSources[selectedSource.SourceInfo.Id];
            BitmapSize       size        = new BitmapSize() // Choose a lower resolution to make the image processing more performant
            {
                Height = IMAGE_ROWS,
                Width  = IMAGE_COLS
            };

            _reader = await _mediaCapture.CreateFrameReaderAsync(frameSource, MediaEncodingSubtypes.Bgra8, size);

            _reader.FrameArrived += ColorFrameReader_FrameArrivedAsync;
            await _reader.StartAsync();
        }
Example #27
0
        private async void GetRGB32PreferredFormat()
        {
            var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync();

            var selectedGroupObjects = frameSourceGroups.Select(group =>
                                                                new
            {
                sourceGroup     = group,
                colorSourceInfo = group.SourceInfos.FirstOrDefault((sourceInfo) =>
                {
                    return(sourceInfo.MediaStreamType == MediaStreamType.VideoPreview &&
                           sourceInfo.SourceKind == MediaFrameSourceKind.Color &&
                           sourceInfo.DeviceInformation?.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front);
                })
            }).Where(t => t.colorSourceInfo != null)
                                       .FirstOrDefault();

            MediaFrameSourceGroup selectedGroup   = selectedGroupObjects?.sourceGroup;
            MediaFrameSourceInfo  colorSourceInfo = selectedGroupObjects?.colorSourceInfo;

            if (selectedGroup == null)
            {
                return;
            }

            // <SnippetGetPreferredFormat>
            var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id];
            var preferredFormat  = colorFrameSource.SupportedFormats.Where(format =>
            {
                return(format.VideoFormat.Width >= 1080 &&
                       format.Subtype == MediaEncodingSubtypes.Argb32);
            }).FirstOrDefault();

            if (preferredFormat == null)
            {
                // Our desired format is not supported
                return;
            }

            await colorFrameSource.SetFormatAsync(preferredFormat);

            // </SnippetGetPreferredFormat>
        }
Example #28
0
        private async Task _AddAcceptableSourceGroupAsync(string deviceId)
        {
            // Accept any user facing IR camera
            var sourceGroup = await MediaFrameSourceGroup.FromIdAsync(deviceId);

            if (sourceGroup != null && (sourceGroup.SourceInfos.Count > 1) &&
                sourceGroup.SourceInfos.Any(source =>
                                            source.SourceKind == MediaFrameSourceKind.Infrared))
            {
                if (sourceGroup.SourceInfos.Any(source =>
                                                source.SourceKind == MediaFrameSourceKind.Color))
                {
                    _sourceGroups[deviceId] = sourceGroup;
                    if (!_isOpened)
                    {
                        _displayAutoEvent.Set();
                        _isOpened = true;
                    }
                }
            }
        }
Example #29
0
        async Task InitializeMediaCaptureAsync()
        {
            if (MediaCapture != null)
            {
                return;
            }

            var sourceGroups = await MediaFrameSourceGroup.FindAllAsync();

            var settings = new MediaCaptureInitializationSettings {
                SourceGroup          = sourceGroups[0],
                SharingMode          = MediaCaptureSharingMode.SharedReadOnly,          // This media capture can share streaming with other apps.
                StreamingCaptureMode = StreamingCaptureMode.Video,                      // Only stream video and don't initialize audio capture devices.
                MemoryPreference     = MediaCaptureMemoryPreference.Cpu                 // Set to CPU to ensure frames always contain CPU SoftwareBitmap images instead of preferring GPU D3DSurface images.
            };

            MediaCapture = new MediaCapture();
            await MediaCapture.InitializeAsync(settings);

            Logger.Log($"Successfully initialized MediaCapture in shared mode using MediaFrameSourceGroup {sourceGroups[0].DisplayName}.");
        }
        private async Task GetMediaFrameGroup()
        {
            var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync();

            foreach (MediaFrameSourceGroup sourceGroup in frameSourceGroups)
            {
                foreach (MediaFrameSourceInfo sourceInfo in sourceGroup.SourceInfos)
                {
                    if (sourceInfo.MediaStreamType == MediaStreamType.VideoPreview && sourceInfo.SourceKind == MediaFrameSourceKind.Color)
                    {
                        colorSourceInfo = sourceInfo;
                        break;
                    }
                }
                if (colorSourceInfo != null)
                {
                    selectedGroup = sourceGroup;
                    break;
                }
            }
        }
Example #31
0
    private async void InitSensor()
    {
        var mediaFrameSourceGroupList = await MediaFrameSourceGroup.FindAllAsync();

        var mediaFrameSourceGroup = mediaFrameSourceGroupList[0];
        var mediaFrameSourceInfo  = mediaFrameSourceGroup.SourceInfos[0];
        MediaFrameSourceKind kind = mediaFrameSourceInfo.SourceKind;
        var mediaCapture          = new MediaCapture();
        var settings = new MediaCaptureInitializationSettings()
        {
            SourceGroup          = mediaFrameSourceGroup,
            SharingMode          = MediaCaptureSharingMode.SharedReadOnly,
            StreamingCaptureMode = StreamingCaptureMode.Video,
            MemoryPreference     = MediaCaptureMemoryPreference.Cpu,
        };

        try
        {
            await mediaCapture.InitializeAsync(settings);

            var mediaFrameSource = mediaCapture.FrameSources[mediaFrameSourceInfo.Id];
            MediaFrameReader mediaframereader;
            if (kind == MediaFrameSourceKind.Color)
            {
                mediaframereader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, MediaEncodingSubtypes.Argb32);
            }
            else
            {
                mediaframereader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, mediaFrameSource.CurrentFormat.Subtype);
            }
            //var mediaframereader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, mediaFrameSource.CurrentFormat.Subtype);
            mediaframereader.FrameArrived += FrameArrived;
            await mediaframereader.StartAsync();
        }
        catch (Exception e)
        {
            UnityEngine.WSA.Application.InvokeOnAppThread(() => { Debug.Log(e); }, true);
        }
    }
        /// <summary>
        /// Initializes the MediaCapture object with the given source group.
        /// </summary>
        /// <param name="sourceGroup">SourceGroup with which to initialize.</param>
        private async Task InitializeMediaCaptureAsync(MediaFrameSourceGroup sourceGroup)
        {
            if (_mediaCapture != null)
            {
                return;
            }

            // Initialize mediacapture with the source group.
            _mediaCapture = new MediaCapture();
            var settings = new MediaCaptureInitializationSettings
            {
                SourceGroup = sourceGroup,

                // This media capture can share streaming with other apps.
                SharingMode = MediaCaptureSharingMode.SharedReadOnly,

                // Only stream video and don't initialize audio capture devices.
                StreamingCaptureMode = StreamingCaptureMode.Video,

                // Set to CPU to ensure frames always contain CPU SoftwareBitmap images
                // instead of preferring GPU D3DSurface images.
                MemoryPreference = MediaCaptureMemoryPreference.Cpu
            };

            await _mediaCapture.InitializeAsync(settings);
            _logger.Log("MediaCapture is successfully initialized in shared mode.");
        }