public static async Task ListDevicesAsync() { DeviceInformationCollection inputs = null; DeviceInformationCollection outputs = null; Task.WaitAll( Task.Run(async() => inputs = await AsyncHelper.AsAsync(DeviceInformation.FindAllAsync(MediaDevice.GetAudioCaptureSelector()))), Task.Run(async() => outputs = await AsyncHelper.AsAsync(DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector()))) ); Log.WriteLine("Audio Input Devices:"); foreach (var i in inputs) { Log.WriteLine($"\t{i.Id}: {i.Name}"); } Log.WriteLine("Audio Output Devices:"); foreach (var o in outputs) { Log.WriteLine($"\t{o.Id}: {o.Name}"); } await Task.CompletedTask; }
public static async Task <Model> CreateModelAsync(string filename, bool gpu) { Log.WriteLine("creating model"); var file = await AsyncHelper.AsAsync(StorageFile.GetFileFromPathAsync(filename)); Log.WriteLine("have file"); var learningModel = await AsyncHelper.AsAsync(LearningModel.LoadFromStorageFileAsync(file)); Log.WriteLine("loaded model"); Model model = new Model(); model._model = learningModel; LearningModelDeviceKind kind = LearningModelDeviceKind.Cpu; if (gpu) { Log.WriteLine("using GPU"); kind = LearningModelDeviceKind.DirectXHighPerformance; } else { Log.WriteLine("using CPU"); } model._device = new LearningModelDevice(kind); model._session = new LearningModelSession(model._model, model._device); Log.WriteLine("returning model now"); return(model); }
public async Task InitializeAsync() { var result = await AsyncHelper.AsAsync(Graph.CreateDeviceOutputNodeAsync()); if (result.Status != AudioDeviceNodeCreationStatus.Success) { throw new ApplicationException($"audio output device create failed. status = {result.Status} ex = {result.ExtendedError.ToString()}"); } Device = result.DeviceOutputNode; }
public static async Task <InMemoryRandomAccessStream> ConvertToIRandomAccessStream(SoftwareBitmap sb) { InMemoryRandomAccessStream imageStream = new InMemoryRandomAccessStream(); var encoder = await AsyncHelper.AsAsync(BitmapEncoder.CreateAsync(BitmapEncoder.BmpEncoderId, imageStream)); encoder.SetSoftwareBitmap(sb); await AsyncHelper.AsAsync(encoder.FlushAsync()); imageStream.Seek(0L); return(imageStream); }
private static async Task <Tuple <MediaFrameSourceGroup, MediaFrameSourceInfo> > EnumFrameSourcesAsync() { MediaFrameSourceInfo result_info = null; MediaFrameSourceGroup result_group = null; var sourcegroups = await AsyncHelper.AsAsync(MediaFrameSourceGroup.FindAllAsync()); Log.WriteLine("found {0} Source Groups", sourcegroups.Count); if (sourcegroups.Count == 0) { var dinfos = await AsyncHelper.AsAsync(DeviceInformation.FindAllAsync(MediaFrameSourceGroup.GetDeviceSelector())); Log.WriteLine("found {0} devices from MediaFrameSourceGroup selector", dinfos.Count); foreach (var info in dinfos) { Log.WriteLine(info.Name); } if (dinfos.Count == 0) { dinfos = await AsyncHelper.AsAsync(DeviceInformation.FindAllAsync(DeviceClass.VideoCapture)); Log.WriteLine("found {0} devices from Video Capture DeviceClass", dinfos.Count); foreach (var info in dinfos) { Log.WriteLine(info.Name); } } } foreach (var g in sourcegroups) { var sourceinfos = g.SourceInfos; Log.WriteLine("Source Group {0}", g.Id); Log.WriteLine(" {0}", g.DisplayName); Log.WriteLine(" with {0} Sources:", sourceinfos.Count); foreach (var s in sourceinfos) { var d = s.DeviceInformation; Log.WriteLine("\t{0}", s.Id); Log.WriteLine("\t\tKind {0}", s.SourceKind); Log.WriteLine("\t\tDevice {0}", d.Id); Log.WriteLine("\t\t {0}", d.Name); Log.WriteLine("\t\t Kind {0}", d.Kind); if (result_info == null) { result_info = s; // for now just pick the first thing we find } } Log.EndLine(); if (result_group == null) { result_group = g; // for now just pick the first thing we find } } return(new Tuple <MediaFrameSourceGroup, MediaFrameSourceInfo>(result_group, result_info)); }
public async Task InitializeAsync(DeviceInformation di, AudioEncodingProperties settings) { Log.WriteLine($"attempting to create input device {di.Id}"); var result = await AsyncHelper.AsAsync(Graph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Speech, settings, di)); if (result.Status != AudioDeviceNodeCreationStatus.Success) { throw new ApplicationException($"audio input device {di.Id}:{di.Name} create failed. status = {result.Status} ex = {result.ExtendedError.ToString()}"); } Device = result.DeviceInputNode; }
public static async Task <SoftwareBitmap> LoadSoftwareBitmap(string path) { try { var decoder = await AsyncHelper.AsAsync(BitmapDecoder.CreateAsync(await ConvertBytesToInputStream(File.ReadAllBytes(path)))); return(await AsyncHelper.AsAsync(decoder.GetSoftwareBitmapAsync(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore))); } catch (Exception) { } return(null); }
public static async Task <byte[]> ConvertInputStreamToBytes(IRandomAccessStream stream) { try { var reader = new DataReader(stream.GetInputStreamAt(0)); var bytes = new byte[stream.Size]; await AsyncHelper.AsAsync(reader.LoadAsync((uint)stream.Size)); reader.ReadBytes(bytes); return(bytes); } catch (Exception) { } return(new byte[] { }); }
public async static Task <byte[]> EncodeSoftwareBitmapToJpeg(SoftwareBitmap softwareBitmap) { try { using (var stream = new InMemoryRandomAccessStream()) { // Create an encoder with the desired format BitmapEncoder encoder = await AsyncHelper.AsAsync(BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream)); // Set the software bitmap encoder.SetSoftwareBitmap(softwareBitmap); // Set additional encoding parameters, if needed encoder.BitmapTransform.ScaledWidth = (uint)softwareBitmap.PixelWidth; encoder.BitmapTransform.ScaledHeight = (uint)softwareBitmap.PixelHeight; encoder.BitmapTransform.Rotation = Windows.Graphics.Imaging.BitmapRotation.None; encoder.BitmapTransform.InterpolationMode = BitmapInterpolationMode.Fant; encoder.IsThumbnailGenerated = false; try { await AsyncHelper.AsAsync(encoder.FlushAsync()); } catch (Exception) { } if (encoder.IsThumbnailGenerated == false) { await AsyncHelper.AsAsync(encoder.FlushAsync()); } var reader = new DataReader(stream.GetInputStreamAt(0)); var bytes = new byte[stream.Size]; await AsyncHelper.AsAsync(reader.LoadAsync((uint)stream.Size)); reader.ReadBytes(bytes); return(bytes); } } catch (Exception) { } return(new byte[] { }); }
public async Task <ModelResult> EvaluateAsync(MediaFrameReference input, string correlationId) { var r = new ModelResult(_session, correlationId); lock (_results) { _results.Add(correlationId, r); ++depth; } var v = ImageFeatureValue.CreateFromVideoFrame(input.VideoMediaFrame.GetVideoFrame()); // NOTE: following bind strings are specific to azure custom vision coreml output. r._binding.Bind("data", v); r._binding.Bind("classLabel", r._output.classLabelTensor); r._binding.Bind("loss", r._output.loss); r._result = await AsyncHelper.AsAsync(_session.EvaluateAsync(r._binding, correlationId)); return(r); }
public static async Task <DeviceInformation> SelectAsync(string name) { var s = MediaDevice.GetAudioRenderSelector(); var di = default(DeviceInformation); if (name != null) { await SelectAsync(s, name); } else { await Task.CompletedTask; } if (di == default(DeviceInformation)) { s = MediaDevice.GetDefaultAudioRenderId(0); di = await AsyncHelper.AsAsync(Windows.Devices.Enumeration.DeviceInformation.CreateFromIdAsync(s)); } return(di); }
public static async Task <InMemoryRandomAccessStream> ConvertBytesToInputStream(byte[] data) { try { InMemoryRandomAccessStream strm = new InMemoryRandomAccessStream(); var writer = new DataWriter(strm); { writer.WriteBytes(data); await AsyncHelper.AsAsync(writer.FlushAsync()); await AsyncHelper.AsAsync(writer.StoreAsync()); } return(strm); } catch (Exception) { } return(null); }
static async Task <int> MainAsync(string[] args) { Log.WriteLine("Starting async..."); var Options = new AppOptions(); Options.Parse(args); Log.Enabled = !Options.Quiet; Log.Verbose = Options.Verbose; Log.WriteLine("arg parse complete..."); var inDevice = default(AudioInputDevice); var outDevice = default(AudioOutputDevice); var connection = default(AzureConnection); var module = default(AzureModule); try { if (Options.List) { await AudioInputDevice.ListDevicesAsync(); } await Task.WhenAll( Task.Run(async() => { try { if (!Options.Test) { Log.WriteLine("starting connection creation"); connection = await AzureConnection.CreateAzureConnectionAsync(); } else { Log.WriteLine("test mode. skipping connection creation"); } } catch (Exception e) { Log.WriteLine("Audio Main CreateAzureConnectionAsync exception {0}", e.ToString()); } }), Task.Run(async() => { try { var settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Speech); settings.PrimaryRenderDevice = await AudioOutputDevice.SelectAsync(Options.OutputDeviceName); Log.WriteLine($"found Primary Render Device {settings.PrimaryRenderDevice.Id}"); var graph = await AsyncHelper.AsAsync(AudioGraph.CreateAsync(settings)); if (graph.Status != AudioGraphCreationStatus.Success) { throw new ApplicationException($"Audio Graph Creation failed status = {graph.Status} err = {graph.ExtendedError}"); } Log.WriteLine("Graph created"); inDevice = new AudioInputDevice(graph.Graph); Log.WriteLine("input device created"); var encSettings = AudioEncodingProperties.CreatePcm(16000, 1, 16); await inDevice.InitializeAsync(await AudioInputDevice.SelectAsync(Options.InputDeviceName), encSettings); Log.WriteLine("input device Initialized"); outDevice = new AudioOutputDevice(graph.Graph); Log.WriteLine("output device created"); await outDevice.InitializeAsync(); Log.WriteLine("output device Initialized"); inDevice.Connect(outDevice); graph.Graph.Start(); Log.WriteLine("graph started"); } catch (Exception e) { Log.WriteLine("Audio Initialization exception {0}", e.ToString()); Environment.Exit(2); } } ) ); EventHandler <ConfigurationType> ConfigurationChangedHandler = async(object sender, ConfigurationType newConfiguration) => { var m = (AzureModule)sender; Log.WriteLine("updating Audio with {0}", newConfiguration.ToString()); await Task.CompletedTask; }; try { if (!Options.Test) { module = (AzureModule)connection.Module; module.ConfigurationChanged += ConfigurationChangedHandler; await connection.NotifyModuleLoadAsync(); } Log.WriteLine("Initialization Complete. have connection and devices"); Task.WaitAll(Task.Run(() => { try { // TODO: cancellation token for (; ;) { Log.WriteLine("{0} wait spin", Environment.TickCount); Thread.Sleep(TimeSpan.FromSeconds(30)); } } catch (Exception e) { Log.WriteLine("Audio wait spin exception {0}", e.ToString()); } })); } finally { if (!Options.Test) { module.ConfigurationChanged += ConfigurationChangedHandler; } } } finally { if (connection != default(AzureConnection)) { connection.Dispose(); } if (module != default(AzureModule)) { module.Dispose(); } if (inDevice != default(AudioInputDevice)) { inDevice.Dispose(); } if (outDevice != default(AudioOutputDevice)) { outDevice.Dispose(); } } return(0); }
public static async Task <DeviceInformation> SelectAsync(string selector, string name) { var dis = await AsyncHelper.AsAsync(Windows.Devices.Enumeration.DeviceInformation.FindAllAsync(selector)); return(dis.Where(d => d.Name.Contains(name)).FirstOrDefault()); }
static async Task <Tuple <MediaFrameReader, EventWaitHandle> > GetFrameReaderAsync() { MediaCapture capture = new MediaCapture(); MediaCaptureInitializationSettings init = new MediaCaptureInitializationSettings(); Log.WriteLine("Enumerating Frame Source Info"); var(frame_group, frame_source_info) = await EnumFrameSourcesAsync(); if (frame_group == null || frame_source_info == null) { throw new ApplicationException("no capture devices found"); } Log.WriteLine("Selecting Source"); init.SourceGroup = frame_group; init.SharingMode = MediaCaptureSharingMode.ExclusiveControl; //init.SharingMode(MediaCaptureSharingMode::SharedReadOnly); //init.MemoryPreference(a.opt.fgpu_only ? MediaCaptureMemoryPreference::Auto : MediaCaptureMemoryPreference::Cpu); init.MemoryPreference = MediaCaptureMemoryPreference.Cpu; init.StreamingCaptureMode = StreamingCaptureMode.Video; await Task.Run(() => Thread.Sleep(1000)); await AsyncHelper.AsAsync(capture.InitializeAsync(init)); await Task.Run(() => Thread.Sleep(1000)); Log.WriteLine("capture initialized. capture is {0}", capture == null ? "null" : "not null"); var sources = capture.FrameSources; Log.WriteLine("have frame sources. FrameSources is {0}", sources == null ? "null" : "not null"); Log.WriteLine("selected source group {0}. looking for source {1}", frame_group.DisplayName, frame_source_info.Id); MediaFrameSource source; var found = sources.TryGetValue(frame_source_info.Id, out source); if (!found) { Log.WriteLine("source {0} not found", frame_source_info.Id); throw new ApplicationException(string.Format("can't find source {0}", source)); } Log.WriteLine("have frame source that matches chosen source info id"); // MediaCaptureVideoProfile doesn't have frame reader variant only photo, preview, and record. // so we will enumerate and select instead of just declaring what we want and having the system // give us the closest match var formats = source.SupportedFormats; Log.WriteLine("have formats"); MediaFrameFormat format = null; Log.WriteLine("hunting for format"); foreach (var f in formats) { Log.Write(string.Format("major {0} sub {1} ", f.MajorType, f.Subtype)); if (f.MajorType == "Video" && f.Subtype == "MJPG") { Log.Write(string.Format("w {0} h {1} ", f.VideoFormat.Width, f.VideoFormat.Height)); if (format == null) { format = f; Log.Write(" *** Updating Selection *** "); } else { var vf = format.VideoFormat; var new_vf = f.VideoFormat; if (new_vf.Width > vf.Width || new_vf.Height > vf.Height) { // this will select first of the dupes which hopefully is ok format = f; Log.Write(" *** Updating Selection *** "); } } } Log.Write("\n"); } if (format == null) { throw new ApplicationException("Can't find a Video Format"); } Log.WriteLine(string.Format("selected videoformat -- major {0} sub {1} w {2} h {3}", format.MajorType, format.Subtype, format.VideoFormat.Width, format.VideoFormat.Height)); await AsyncHelper.AsAsync(source.SetFormatAsync(format)); Log.WriteLine("set format complete"); var reader = await AsyncHelper.AsAsync(capture.CreateFrameReaderAsync(source)); Log.WriteLine("frame reader retrieved\r\n"); reader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime; var evtframe = new EventWaitHandle(false, EventResetMode.ManualReset); reader.FrameArrived += (MediaFrameReader sender, MediaFrameArrivedEventArgs args) => evtframe.Set(); return(new Tuple <MediaFrameReader, EventWaitHandle>(reader, evtframe)); }
static async Task <int> MainAsync(AppOptions options) { //Log.WriteLine("pause..."); //var x = Console.ReadLine(); Log.WriteLine("Starting async..."); Model model = null; AzureConnection connection = null; MediaFrameReader reader = null; EventWaitHandle evtFrame = null; if (options.List) { await EnumFrameSourcesAsync(); Environment.Exit(3); } await Task.WhenAll( Task.Run(async() => { try { model = await Model.CreateModelAsync( Directory.GetCurrentDirectory() + "\\resources\\office_fruit.onnx", options.Gpu); if (options.Test) { await Task.CompletedTask; } else { connection = await AzureConnection.CreateAzureConnectionAsync(); } } catch (Exception e) { Log.WriteLineError("failed to create model {0}", e.ToString()); Environment.Exit(2); } }), Task.Run(async() => { try { (reader, evtFrame) = await GetFrameReaderAsync(); await AsyncHelper.AsAsync(reader.StartAsync()); } catch (Exception e) { Log.WriteLineError("failed to start frame reader {0}", e.ToString()); Environment.Exit(2); } })); try { AzureModule m = null; EventHandler <string> ModuleLoadedHandler = async(Object sender, string moduleName) => { try { Log.WriteLine("module loaded. resending state"); await connection.NotifyNewModuleOfCurrentStateAsync(); } catch (Exception e) { Log.WriteLineError("failed to notify state {0}", e.ToString()); Environment.Exit(2); } }; if (connection != null) { m = (AzureModule)connection.Module; m.ModuleLoaded += ModuleLoadedHandler; } try { Log.WriteLine("Model loaded, Azure Connection created, and FrameReader Started\n\n\n\n"); await CameraProcessingAsync(model, reader, evtFrame, connection); } finally { if (connection != null) { m.ModuleLoaded -= ModuleLoadedHandler; } } } finally { if (connection != null) { connection.Dispose(); } reader.Dispose(); model.Dispose(); } return(0); }