private CameraType GetCameraType(MediaFrameSourceKind sourceKind) { CameraType cameraType = CameraType.Infrared; switch (sourceKind) { case MediaFrameSourceKind.Image: case MediaFrameSourceKind.Color: { cameraType = CameraType.Color; break; } case MediaFrameSourceKind.Depth: { cameraType = CameraType.Depth; break; } case MediaFrameSourceKind.Infrared: { cameraType = CameraType.Infrared; break; } } return(cameraType); }
public static uint GetBytesPerPixel(MediaFrameSourceKind kind) { uint bytesPerPixel = 0; switch (kind) { case MediaFrameSourceKind.Depth: bytesPerPixel = 2; break; case MediaFrameSourceKind.Infrared: bytesPerPixel = 1; break; case MediaFrameSourceKind.Color: if (Config.convertColorToGrayscale) { bytesPerPixel = 1; } else { bytesPerPixel = 4; } break; default: Debug.Assert(false); break; } return(bytesPerPixel); }
/// <summary> /// Returns latest ir or color frame in base64 format. /// </summary> /// <param name="type">Frame type. Either Color or Infrared</param> /// <param name="promise"></param> /// <returns></returns> public async Task TakePictureAsync(MediaFrameSourceKind type, IReactPromise <JSValueObject> promise) { var base64Frame = ""; if (type == MediaFrameSourceKind.Color && colorFrame != null) { base64Frame = await ConvertToBase64(colorFrame); } if (type == MediaFrameSourceKind.Infrared && irFrame != null) { base64Frame = await ConvertToBase64(irFrame); } if (string.IsNullOrEmpty(base64Frame)) { var err = new ReactError(); err.Message = "Error taking picture."; promise.Reject(err); } var obj = new JSValueObject(); obj.Add("base64", base64Frame); promise.Resolve(obj); }
public static void SetHeaderValues( byte[] buffer, Int32 totalSize, MediaFrameSourceKind sourceKind, Int32 width, Int32 height) { int offset = 0; // Size CopyIntToBuffer(buffer, 0, totalSize); offset += Marshal.SizeOf <Int32>(); // Message Type CopyIntToBuffer(buffer, offset, MessageConstants.FrameMessage); offset += Marshal.SizeOf <Int32>(); // Source Kind buffer[offset] = (byte)sourceKind; offset += Marshal.SizeOf <Byte>(); // Width BufferHelper.CopyIntToBuffer(buffer, offset, width); offset += Marshal.SizeOf <Int32>(); // Height BufferHelper.CopyIntToBuffer(buffer, offset, height); }
public static string GetCameraSourceEncoding(MediaFrameSourceKind kind) { string format = "bgra8"; switch (kind) { case MediaFrameSourceKind.Depth: format = "mono16"; break; case MediaFrameSourceKind.Infrared: format = "mono8"; break; case MediaFrameSourceKind.Color: if (Config.convertColorToGrayscale) { format = "mono8"; } else { format = "bgra8"; } break; default: Debug.Assert(false); break; } return(format); }
public static int GetBytesPerPixelForSourceKind(MediaFrameSourceKind sourceKind) { var bytesPerPixel = 0; // I make some assumptions here around the size of the pixel data on // a per 'sourcekind' basis. These could easily break/be wrong over // time if Color for instance suddenly came in as a 2 byte array or // Depth jumped to 4 bytes. switch (sourceKind) { case MediaFrameSourceKind.Color: bytesPerPixel = 4; break; case MediaFrameSourceKind.Infrared: bytesPerPixel = 1; break; case MediaFrameSourceKind.Depth: bytesPerPixel = 2; break; default: Debug.Assert(false); break; } return(bytesPerPixel); }
//############################################################################################# //################################### private ######################################## //############################################################################################# /// <summary> /// Find a source coresponding to parameters of <see cref="Init(MediaStreamType, MediaFrameSourceKind)"/> /// </summary> /// <param name="streamType"> MediaStreamType object property </param> /// <param name="sourceKind"> MediaFrameSourceKind object property </param> private async Task FindSource(MediaStreamType streamType, MediaFrameSourceKind sourceKind) { var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync(); // list available sources // indicate that source is not set _selectedGroup = null; _selectedSourceInfo = null; foreach (var sourceGroup in frameSourceGroups) { foreach (var sourceInfo in sourceGroup.SourceInfos) { // if a source is matching with arguments if (sourceInfo.MediaStreamType == streamType && sourceInfo.SourceKind == sourceKind) { _selectedSourceInfo = sourceInfo; break; } } if (_selectedSourceInfo != null) { _selectedGroup = sourceGroup; break; } } // in case no source was found if (_selectedSourceInfo == null) { System.Diagnostics.Debug.WriteLine("Source not find"); } }
/// <summary> /// Determines the subtype to request from the MediaFrameReader that will result in /// a frame that can be rendered by ConvertToDisplayableImage. /// </summary> /// <returns>Subtype string to request, or null if subtype is not renderable.</returns> public static string GetSubtypeForFrameReader(MediaFrameSourceKind kind, MediaFrameFormat format) { // Note that media encoding subtypes may differ in case. // https://docs.microsoft.com/en-us/uwp/api/Windows.Media.MediaProperties.MediaEncodingSubtypes string subtype = format.Subtype; switch (kind) { // For color sources, we accept anything and request that it be converted to Bgra8. case MediaFrameSourceKind.Color: return(MediaEncodingSubtypes.Bgra8); // The only depth format we can render is D16. case MediaFrameSourceKind.Depth: return(String.Equals(subtype, MediaEncodingSubtypes.D16, StringComparison.OrdinalIgnoreCase) ? subtype : null); // The only infrared formats we can render are L8 and L16. case MediaFrameSourceKind.Infrared: return((String.Equals(subtype, MediaEncodingSubtypes.L8, StringComparison.OrdinalIgnoreCase) || String.Equals(subtype, MediaEncodingSubtypes.L16, StringComparison.OrdinalIgnoreCase)) ? subtype : null); // No other source kinds are supported by this class. default: return(null); } }
// Used an explicit tuple here as I'm in C# 6.0 async Task <Tuple <MediaCapture, MediaFrameSource> > GetMediaCaptureForDescriptionAsync( MediaFrameSourceKind sourceKind, int width, int height, int frameRate, string[] bitmapFormats = null) { MediaCapture mediaCapture = null; MediaFrameSource frameSource = null; var allSources = await MediaFrameSourceGroup.FindAllAsync(); // Ignore frame rate here on the description as both depth streams seem to tell me they are // 30fps whereas I don't think they are (from the docs) so I leave that to query later on. // NB: LastOrDefault here is a NASTY, NASTY hack - just my way of getting hold of the // *LAST* depth stream rather than the *FIRST* because I'm assuming that the *LAST* // one is the longer distance stream rather than the short distance stream. // I should fix this and find a better way of choosing the right depth stream rather // than relying on some ordering that's not likely to always work! var sourceInfo = allSources.SelectMany(group => group.SourceInfos) .LastOrDefault( si => (si.MediaStreamType == MediaStreamType.VideoRecord) && (si.SourceKind == sourceKind) && (si.VideoProfileMediaDescription.Any( desc => desc.Width == width && desc.Height == height && desc.FrameRate == frameRate))); if (sourceInfo != null) { var sourceGroup = sourceInfo.SourceGroup; mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync( new MediaCaptureInitializationSettings() { // I want software bitmaps MemoryPreference = MediaCaptureMemoryPreference.Cpu, SourceGroup = sourceGroup, StreamingCaptureMode = StreamingCaptureMode.Video } ); frameSource = mediaCapture.FrameSources[sourceInfo.Id]; var selectedFormat = frameSource.SupportedFormats.First( format => format.VideoFormat.Width == width && format.VideoFormat.Height == height && format.FrameRate.Numerator / format.FrameRate.Denominator == frameRate && ((bitmapFormats == null) || (bitmapFormats.Contains(format.Subtype.ToLower())))); await frameSource.SetFormatAsync(selectedFormat); } return(Tuple.Create(mediaCapture, frameSource)); }
async Task CreateMediaCaptureAndReadersAsync() { var frameSourceKinds = new MediaFrameSourceKind[] { MediaFrameSourceKind.Depth, MediaFrameSourceKind.Infrared, MediaFrameSourceKind.Color }; // Get me the first source group that does Depth+Infrared. var firstSourceGroupWithSourceKinds = await MediaSourceFinder.FindGroupsWithAllSourceKindsAsync(frameSourceKinds); if (firstSourceGroupWithSourceKinds != null) { this.mediaCapture = new MediaCapture(); // Note: This will blow up unless I have the restricted capability named // 'perceptionSensorsExperimental' in my .appx manifest and I think that // being a 'restricted' capability means that any app using it could not // go into store. // Note2: I've gone with Cpu here rather than Gpu because I ultimately // want a byte[] that I can send down a socket. If I go with Gpu then // I get an IDirect3DSurface but (AFAIK) there's not much of a way // to get to a byte[] from that other than to copy it into a // SoftwareBitmap and then to copy that SoftwareBitmap into a byte[] // which I don't really want to do. Hence - Cpu choice here. await this.mediaCapture.InitializeAsync( new MediaCaptureInitializationSettings() { SourceGroup = firstSourceGroupWithSourceKinds, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video } ); var sources = this.mediaCapture.FrameSources .Where(fs => frameSourceKinds.Contains(fs.Value.Info.SourceKind)) .Select(fs => fs.Value); // Build a description of what we have for our client to receive. this.BuildFrameSourceDescriptionMessageBuffer(sources); // Note: I originally wanted to open a multi source frame reader with all frame // sources specified but that blew up on me and so, for the moment, I am making // multiple readers. foreach (var source in sources) { var reader = new MediaFrameReaderHelper(source.Info, this.mediaCapture); this.readers.Add(reader); await reader.StartAsync(); } this.currentReaderIndex = 0; } }
public mtMediaSourceReader( MediaCapture capture, MediaFrameSourceKind mediaSourceKind, Action <MediaFrameReader> onFrameArrived, Func <MediaFrameSource, bool> additionalSourceCriteria = null) { this.mediaCapture = capture; this.mediaSourceKind = mediaSourceKind; this.additionalSourceCriteria = additionalSourceCriteria; this.onFrameArrived = onFrameArrived; }
/// <summary> /// Takes picture from viewTag's corresponding camera view. /// </summary> /// <param name="viewTag"></param> /// <param name="type"></param> /// <param name="promise"></param> /// <returns></returns> public static async Task TakePicture(int viewTag, MediaFrameSourceKind type, IReactPromise <JSValueObject> promise) { if (Views.ContainsKey(viewTag)) { await Views[viewTag].TakePictureAsync(type, promise); } else { ReactError err = new ReactError(); err.Message = "Camera view not found."; promise.Reject(err); } }
public static string GetSubtypeForFrameReader(MediaFrameSourceKind kind, MediaFrameFormat format) { switch (kind) { case MediaFrameSourceKind.Color: return(MediaEncodingSubtypes.Bgra8); case MediaFrameSourceKind.Depth: return(String.Equals(format.Subtype, MediaEncodingSubtypes.D16, StringComparison.OrdinalIgnoreCase) ? format.Subtype : null); default: return(null); } }
private async void SettingSensorData(int deviceNum, int cameraNum) { if (mediaFrameReader != null) { await mediaFrameReader.StopAsync(); mediaFrameReader.FrameArrived -= FrameArrived; mediaFrameReader.Dispose(); mediaFrameReader = null; } var mediaFrameSourceGroupList = await MediaFrameSourceGroup.FindAllAsync(); var mediaFrameSourceGroup = mediaFrameSourceGroupList[deviceNum]; var mediaFrameSourceInfo = mediaFrameSourceGroup.SourceInfos[cameraNum]; MediaFrameSourceKind kind = mediaFrameSourceInfo.SourceKind; var mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = mediaFrameSourceGroup, SharingMode = MediaCaptureSharingMode.SharedReadOnly, StreamingCaptureMode = StreamingCaptureMode.Video, MemoryPreference = MediaCaptureMemoryPreference.Cpu, }; try { await mediaCapture.InitializeAsync(settings); var mediaFrameSource = mediaCapture.FrameSources[mediaFrameSourceInfo.Id]; if (kind == MediaFrameSourceKind.Color) { mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, MediaEncodingSubtypes.Argb32); } else { mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, mediaFrameSource.CurrentFormat.Subtype); } mediaFrameReader.FrameArrived += FrameArrived; await mediaFrameReader.StartAsync(); } catch (Exception) { throw; } }
private async void InitSensor() { var mediaFrameSourceGroupList = await MediaFrameSourceGroup.FindAllAsync(); var mediaFrameSourceGroup = mediaFrameSourceGroupList[0]; var mediaFrameSourceInfo = mediaFrameSourceGroup.SourceInfos[0]; MediaFrameSourceKind kind = mediaFrameSourceInfo.SourceKind; var mediaCapture = new MediaCapture(); var settings = new MediaCaptureInitializationSettings() { SourceGroup = mediaFrameSourceGroup, SharingMode = MediaCaptureSharingMode.SharedReadOnly, StreamingCaptureMode = StreamingCaptureMode.Video, MemoryPreference = MediaCaptureMemoryPreference.Cpu, }; try { await mediaCapture.InitializeAsync(settings); var mediaFrameSource = mediaCapture.FrameSources[mediaFrameSourceInfo.Id]; MediaFrameReader mediaframereader; if (kind == MediaFrameSourceKind.Color) { mediaframereader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, MediaEncodingSubtypes.Argb32); } else { mediaframereader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, mediaFrameSource.CurrentFormat.Subtype); } //var mediaframereader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSource, mediaFrameSource.CurrentFormat.Subtype); mediaframereader.FrameArrived += FrameArrived; await mediaframereader.StartAsync(); } catch (Exception e) { UnityEngine.WSA.Application.InvokeOnAppThread(() => { Debug.Log(e); }, true); } }
public async Task GetDataSources() { // If pose is supported and selected add pose source //var ver = Windows.System.Profile.AnalyticsInfo.VersionInfo.DeviceFamily; //if (ver == "Windows.Holographic") //{ // bool isSelected; // Config.SourceSelectionDictionary.TryGetValue(Config.Pose, out isSelected); // if (isSelected) // { // DataSources.Add(new PoseSource(ref RosConnector, ref SharedTimer) // { // SourceName = Config.Pose, // PublishPeriod = 1 / Config.HololensPoseFPS // }); // } //} // Check for any available cameras var possibleSourceKinds = new MediaFrameSourceKind[] { MediaFrameSourceKind.Depth, MediaFrameSourceKind.Infrared, MediaFrameSourceKind.Color }; var groups = await MediaFrameSourceGroup.FindAllAsync(); // Find the group that exposes all of the sensors for streaming foreach (var g in groups) { if (g.DisplayName == "Sensor Streaming") { Debug.WriteLine("Found Sensor Streaming Source Group"); var mediaCapture = new MediaCapture(); await mediaCapture.InitializeAsync( new MediaCaptureInitializationSettings() { SourceGroup = g, MemoryPreference = MediaCaptureMemoryPreference.Cpu, StreamingCaptureMode = StreamingCaptureMode.Video } ); var sources = mediaCapture.FrameSources.Where(fs => possibleSourceKinds.Contains(fs.Value.Info.SourceKind)).Select(fs => fs.Value); foreach (var source in sources) { string originalSourceName = source.Info.Id.Substring(source.Info.Id.IndexOf("Source#"), 8); string assignedSourceName; Config.DataSourceDictionary.TryGetValue(originalSourceName, out assignedSourceName); bool isSelected; Config.SourceSelectionDictionary.TryGetValue(assignedSourceName, out isSelected); if (isSelected) { double assignedFrameRate; Config.FrameRateDictionary.TryGetValue(assignedSourceName, out assignedFrameRate); double assignedPublishPeriod = 1.0 / (double)assignedFrameRate; int originalFPS = (int)source.Info.VideoProfileMediaDescription[0].FrameRate; CameraHandler handler = new CameraHandler(source.Info, mediaCapture, assignedPublishPeriod); await handler.SetupReaderAsync(); DataSources.Add(new CameraSource(ref RosConnector, handler, assignedSourceName, assignedPublishPeriod) { Resolution = $"{ source.Info.VideoProfileMediaDescription[0].Width } x { source.Info.VideoProfileMediaDescription[0].Height }", OriginalFPS = originalFPS, SourceName = assignedSourceName }); } } break; } } }
internal ExampleMediaFrameArrivedEventArgs(MediaFrameSourceKind sourceKind) { SourceKind = sourceKind; }
//############################################################################################# /// <summary> /// Initialize the camera. Check available cameras before build the FrameReader object and define the output format. /// Main camera could be initialized using MediaStreamType.VideoRecord and MediaFrameSourceKind.Color as arguments. /// </summary> /// <param name="streamType"> /// Type of media that the media reader will be used for. /// Values : VideoRecord (recommanded for accessing frame), VideoPreview, Audio, Photo. /// </param> /// <param name="sourceKind"> /// For cameras, specify the type of image produced. /// Values : Color (recommanded), Infrared, Depth. /// </param> public async Task Init(MediaStreamType streamType, MediaFrameSourceKind sourceKind) { await FindSource(streamType, sourceKind); await InitMediaCapture(); await SetFrameFormat(); }
public MediaFrameReference TryGetFrameReferenceBySourceKind(MediaFrameSourceKind sourceKind) { return(MediaFrameReferences[sourceKind]); }
public static async Task <MediaFrameReader> GetFrameReaderAsync(MediaCapture mediaCapture, MediaFrameSourceKind kind) { var sources = mediaCapture.FrameSources.Values.Where(mfs => mfs.Info.SourceKind == kind); MediaFrameReader frameReader = null; foreach (var source in sources) { string requestedSubtype = null; foreach (var format in source.SupportedFormats) { requestedSubtype = GetSubtypeForFrameReader(kind, format); if (requestedSubtype != null) { await source.SetFormatAsync(format); break; } } if (requestedSubtype == null) { continue; } frameReader = await mediaCapture.CreateFrameReaderAsync(source, requestedSubtype); } return(frameReader); }
/// <summary> /// Switches to the next camera source and starts reading frames. /// </summary> private async Task PickNextMediaSourceWorkerAsync() { await CleanupMediaCaptureAsync(); var allGroups = await MediaFrameSourceGroup.FindAllAsync(); if (allGroups.Count == 0) { _logger.Log("No source groups found."); return; } // Pick next group in the array after each time the Next button is clicked. _groupSelectionIndex = (_groupSelectionIndex + 1) % allGroups.Count; var selectedGroup = allGroups[1]; // _logger.Log($"Found {allGroups.Count} groups and selecting index [{_groupSelectionIndex}]: {selectedGroup.DisplayName}"); try { // Initialize MediaCapture with selected group. // This can raise an exception if the source no longer exists, // or if the source could not be initialized. await InitializeMediaCaptureAsync(selectedGroup); } catch (Exception exception) { _logger.Log($"MediaCapture initialization error: {exception.Message}"); await CleanupMediaCaptureAsync(); return; } // Set up frame readers, register event handlers and start streaming. var startedKinds = new HashSet <MediaFrameSourceKind>(); foreach (MediaFrameSource source in _mediaCapture.FrameSources.Values) { MediaFrameSourceKind kind = source.Info.SourceKind; // Ignore this source if we already have a source of this kind. if (startedKinds.Contains(kind)) { continue; } // Look for a format which the FrameRenderer can render. string requestedSubtype = null; foreach (MediaFrameFormat format in source.SupportedFormats) { requestedSubtype = FrameRenderer.GetSubtypeForFrameReader(kind, format); if (requestedSubtype != null) { // Tell the source to use the format we can render. await source.SetFormatAsync(format); break; } } if (requestedSubtype == null) { // No acceptable format was found. Ignore this source. continue; } MediaFrameReader frameReader = await _mediaCapture.CreateFrameReaderAsync(source, requestedSubtype); frameReader.FrameArrived += FrameReader_FrameArrived; _sourceReaders.Add(frameReader); MediaFrameReaderStartStatus status = await frameReader.StartAsync(); if (status == MediaFrameReaderStartStatus.Success) { // _logger.Log($"Started {kind} reader."); startedKinds.Add(kind); } else { _logger.Log($"Unable to start {kind} reader. Error: {status}"); } } if (startedKinds.Count == 0) { _logger.Log($"No eligible sources in {selectedGroup.DisplayName}."); } }