示例#1
0
        //</SnippetCreateFileOutputNode>

        //<SnippetCreateDeviceInputNode>
        private async Task CreateDeviceInputNode()
        {
            // Create a device output node
            CreateAudioDeviceInputNodeResult result = await audioGraph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Media);

            if (result.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                ShowErrorMessage(result.Status.ToString());
                return;
            }

            deviceInputNode = result.DeviceInputNode;
        }
示例#2
0
        public async void CaptureAudio()
        {
            AudioGraphSettings audioGraphSettings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Speech);
            var result = await AudioGraph.CreateAsync(audioGraphSettings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return;
            }
            _audioGraph = result.Graph;

            var deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Speech);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }
            var deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            _audioFile = await Windows.Storage.ApplicationData.Current.TemporaryFolder
                         .CreateFileAsync("speech", CreationCollisionOption.ReplaceExisting);

            var mediaEncodingProfile = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High);
            var fileOutputNodeResult = await _audioGraph.CreateFileOutputNodeAsync(_audioFile, mediaEncodingProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                return;
            }
            var fileOutputNode = fileOutputNodeResult.FileOutputNode;

            deviceInputNode.AddOutgoingConnection(fileOutputNode);

            _audioGraph.Start();
        }
示例#3
0
        private async Task <bool> CreateMicrophoneInputNode()
        {
            // Create a device output node
            var result = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Communications);

            if (result.Status != AudioDeviceNodeCreationStatus.Success)
            {
                switch (result.Status)
                {
                case AudioDeviceNodeCreationStatus.DeviceNotAvailable:
                    RaiseMicrophoneIsNotAvailable("You do not have the required microphone present on your system.");
                    return(false);

                case AudioDeviceNodeCreationStatus.AccessDenied:
                    RaiseMicrophoneIsNotAvailable(
                        "OneTox doesn't have permission to use your microphone. To change this, please go to the Settings app's Privacy section. " +
                        "Then click or tap the mute button to start using the microphone again.");
                    return(false);

                default:
                    throw new Exception(result.Status.ToString());
                }
            }

            _microphoneInputNode = result.DeviceInputNode;
            return(true);
        }
        async void CreateAudioGraphAsync()
        {
            var graphResult = await AudioGraph.CreateAsync(new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media));

            if (graphResult.Status != AudioGraphCreationStatus.Success)
            {
                throw new InvalidOperationException($"Graph creation failed {graphResult.Status}");
            }
            _graph = graphResult.Graph;
            var inputNodeResult = await _graph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Media);

            if (inputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                throw new InvalidOperationException($"Input node creation failed {inputNodeResult.Status}");
            }

            _inputNode = inputNodeResult.DeviceInputNode;

            _source                     = AudioVisualizer.PlaybackSource.CreateFromAudioNode(_inputNode);
            _converter                  = new SourceConverter();
            _converter.Source           = _source.Source;
            _converter.MinFrequency     = 110.0f;     // Note A2
            _converter.MaxFrequency     = 3520.0f;    // Note A7
            _converter.FrequencyCount   = 12 * 5 * 5; // 5 octaves, 5 bars per note
            _converter.FrequencyScale   = ScaleType.Logarithmic;
            _converter.SpectrumRiseTime = TimeSpan.FromMilliseconds(20);
            _converter.SpectrumFallTime = TimeSpan.FromMilliseconds(200);
            _converter.RmsRiseTime      = TimeSpan.FromMilliseconds(20); // Use RMS to gate noise, fast rise slow fall
            _converter.RmsFallTime      = TimeSpan.FromMilliseconds(500);
            _converter.ChannelCount     = 1;
            notesSpectrum.Source        = _converter;

            _graph.Start();
        }
        public async Task CreateAudioGraph()
        {
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
            settings.PrimaryRenderDevice      = outputDevices[0];

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                MessageDialog md = new MessageDialog("Error in creating Audio Graph", "OOPS!!");
                await md.ShowAsync();

                return;
            }
            graph = result.Graph;
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                MessageDialog md = new MessageDialog("Error in creating InputDeviceNode", "OOPS!!");
                await md.ShowAsync();

                return;
            }
            deviceInputNode = deviceInputNodeResult.DeviceInputNode;
        }
示例#6
0
        private async Task InitializeAudioAsync()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.EncodingProperties = AudioEncodingProperties.CreatePcm(22050, 1, 16);

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return;
            }

            _graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }

            _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }

            _deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            // Create the FrameInputNode at the same format as the graph, except explicitly set mono.
            AudioEncodingProperties nodeEncodingProperties = _graph.EncodingProperties;

            nodeEncodingProperties.ChannelCount = 1;
            _frameInputNode = _graph.CreateFrameInputNode(nodeEncodingProperties);
            _frameInputNode.AddOutgoingConnection(_deviceOutputNode);


            _frameOutputNode = _graph.CreateFrameOutputNode(nodeEncodingProperties);
            _deviceInputNode.AddOutgoingConnection(_frameOutputNode);

            // Initialize the Frame Input Node in the stopped state
            _frameInputNode.Stop();

            // Hook up an event handler so we can start generating samples when needed
            // This event is triggered when the node is required to provide data
            _frameInputNode.QuantumStarted += node_QuantumStarted;

            _graph.QuantumProcessed += GraphOnQuantumProcessed;

            // Start the graph since we will only start/stop the frame input node
            _graph.Start();
        }
示例#7
0
        private async Task CreateAudioGraph()
        {
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
            settings.PrimaryRenderDevice      = outputDevices[outputDevicesListBox.SelectedIndex - 1];

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;
            rootPage.NotifyUser("Graph successfully created!", NotifyType.StatusMessage);

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                outputDeviceContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output connection successfully created", NotifyType.StatusMessage);
            outputDeviceContainer.Background = new SolidColorBrush(Colors.Green);

            // Create a device input node using the default audio input device
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                rootPage.NotifyUser(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                inputDeviceContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceInputNode = deviceInputNodeResult.DeviceInputNode;
            rootPage.NotifyUser("Device Input connection successfully created", NotifyType.StatusMessage);
            inputDeviceContainer.Background = new SolidColorBrush(Colors.Green);

            // Since graph is successfully created, enable the button to select a file output
            fileButton.IsEnabled = true;

            // Disable the graph button to prevent accidental click
            createGraphButton.IsEnabled = false;

            // Because we are using lowest latency setting, we need to handle device disconnection errors
            graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred;
        }
示例#8
0
        public static async Task AudioDevices()
        {
            if (beep == null)
            {
                beep = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/Audio/beep.wav"));
            }

            DeviceInformationCollection devices = await Windows.Devices.Enumeration.DeviceInformation.FindAllAsync(Windows.Media.Devices.MediaDevice.GetAudioRenderSelector());

            DeviceInformation selectedDevice = null;

            // Sometimes usb mics are confused as speakers.. so add check to make Speakers (Raspberry Pi 2 audio) the default <-- Confirmed works on rpi3
            //for (int i = 0; i <= devices.Count; i++)
            //{
            //    if (i != devices.Count)
            //    {
            //        if (DeviceTypeInformation.IsRaspberryPi)
            //        {
            //            if (devices[i].Name == "Speakers (Raspberry Pi 2 audio)")
            //            {
            //                selectedDevice = devices[i];
            //                break;
            //            }
            //        }
            //        else
            //        {
            //            selectedDevice = devices[i];
            //            break;
            //        }
            //    }
            //}

            settings = new AudioGraphSettings(AudioRenderCategory.Media);
            settings.PrimaryRenderDevice = selectedDevice;
            CreateAudioGraphResult resultg = await AudioGraph.CreateAsync(settings);

            audioflow = resultg.Graph;

            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await audioflow.CreateDeviceOutputNodeAsync();

            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await audioflow.CreateDeviceInputNodeAsync(MediaCategory.Media);

            deviceOuput = deviceOutputNodeResult.DeviceOutputNode;

            try
            {
                _mediaCapture = new MediaCapture();
                await _mediaCapture.InitializeAsync();

                _mediaCapture.Failed += _mediaCapture_Failed;
                _mediaCapture.AudioDeviceController.VolumePercent = 0.75f;
            }
            catch
            {
                Debug.WriteLine("Failed to setup microphone is one connected?");
            }
        }
示例#9
0
        async Task Init()
        {
            try
            {
                await Stop();

                var pcmEncoding = AudioEncodingProperties.CreatePcm((uint)SampleRate, (uint)ChannelCount, (uint)BitsPerSample);
                // apparently this is not _really_ used/supported here, as the audio data seems to come thru as floats (so basically MediaEncodingSubtypes.Float?)
                pcmEncoding.Subtype = MediaEncodingSubtypes.Pcm;

                var graphSettings = new AudioGraphSettings(AudioRenderCategory.Media)
                {
                    EncodingProperties = pcmEncoding,
                    DesiredRenderDeviceAudioProcessing = AudioProcessing.Raw
                                                         // these do not seem to take effect on certain hardware and MSFT recommends SystemDefault when recording to a file anyway
                                                         //	We'll buffer audio data ourselves to improve RMS calculation across larger samples
                                                         //QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired,
                                                         //DesiredSamplesPerQuantum = 4096
                };

                // create our audio graph... this will be a device input node feeding audio data into a frame output node
                var graphResult = await AudioGraph.CreateAsync(graphSettings);

                if (graphResult.Status == AudioGraphCreationStatus.Success)
                {
                    audioGraph = graphResult.Graph;

                    // take input from whatever the default communications device is set to me on windows
                    var inputResult = await audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Communications, pcmEncoding);

                    if (inputResult.Status == AudioDeviceNodeCreationStatus.Success)
                    {
                        // create the output node
                        outputNode = audioGraph.CreateFrameOutputNode(pcmEncoding);

                        // wire the input to the output
                        inputResult.DeviceInputNode.AddOutgoingConnection(outputNode);

                        // Attach to QuantumStarted event in order to receive synchronous updates from audio graph (to capture incoming audio)
                        audioGraph.QuantumStarted             += Graph_QuantumStarted;
                        audioGraph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred;
                    }
                    else
                    {
                        throw new Exception($"audioGraph.CreateDeviceInputNodeAsync() returned non-Success status: {inputResult.Status}");
                    }
                }
                else
                {
                    throw new Exception($"AudioGraph.CreateAsync() returned non-Success status: {graphResult.Status}");
                }
            }
            catch
            {
                throw;
            }
        }
示例#10
0
        private async Task CreateAudioGraph()
        {
            if (graph != null)
            {
                graph.Dispose();
            }

            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.SystemDefault;

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                return;
            }

            graph = result.Graph;

            // Create a device input node using the default audio input device (manifest microphone!!!!)
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                return;
            }

            deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            //creating file

            StorageFolder storageFolder = Windows.Storage.ApplicationData.Current.LocalFolder;
            StorageFile   file          = await storageFolder.CreateFileAsync("sample.wav", Windows.Storage.CreationCollisionOption.ReplaceExisting);

            path = file.Path.ToString();

            MediaEncodingProfile fileProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium);

            // Operate node at the graph format, but save file at the specified format
            CreateAudioFileOutputNodeResult fileOutputNodeResult = await graph.CreateFileOutputNodeAsync(file, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                // FileOutputNode creation failed
                return;
            }

            fileOutputNode = fileOutputNodeResult.FileOutputNode;

            // Connect the input node to both output nodes
            deviceInputNode.AddOutgoingConnection(fileOutputNode);
        }
示例#11
0
        async Task <AudioDeviceInputNode> CreateDeviceInputNode()
        {
            var result = await audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Media);

            if (result.Status != AudioDeviceNodeCreationStatus.Success)
            {
                //error
                return(null);
            }

            return(result.DeviceInputNode);
        }
示例#12
0
        public async Task InitializeAsync()
        {
            DebugUtil.CheckAppThread();

            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            // settings.DesiredRenderDeviceAudioProcessing = AudioProcessing.Raw;
            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            DebugUtil.Assert(result.Status == AudioGraphCreationStatus.Success, "Failed to create audio graph");

            _audioGraph = result.Graph;

            int latencyInSamples = _audioGraph.LatencyInSamples;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = _audioGraph.CreateDeviceOutputNodeAsync().GetResults();

            DebugUtil.Assert(deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success,
                             $"Audio Device Output unavailable because {deviceOutputNodeResult.Status}");

            _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

            _inputCaptureNode = _audioGraph.CreateFrameOutputNode();                                                            // Create a device input node using the default audio input device

            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            DebugUtil.Assert(deviceInputNodeResult.Status == AudioDeviceNodeCreationStatus.Success,
                             $"Audio Device Input unavailable because {deviceInputNodeResult.Status}");

            _deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            _deviceInputNode.AddOutgoingConnection(_inputCaptureNode);
            _deviceInputNode.AddOutgoingConnection(_deviceOutputNode);

            /*
             * echoEffect = new EchoEffectDefinition(_graph);
             * echoEffect.WetDryMix = 0.7f;
             * echoEffect.Feedback = 0.5f;
             * echoEffect.Delay = 500.0f;
             * submixNode.EffectDefinitions.Add(echoEffect);
             *
             * // Disable the effect in the beginning. Enable in response to user action (UI toggle switch)
             * submixNode.DisableEffectsByDefinition(echoEffect);
             */

            // All nodes can have an OutgoingGain property
            // Setting the gain on the Submix node attenuates the output of the node
            //_submixNode.OutgoingGain = 0.5;
        }
示例#13
0
        private async Task InitInputeNode()
        {
            var audioDeviceInputNodeResult = await ag.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Other);

            if (audioDeviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                rootPage.ShowLogs("Input device failed: " + audioDeviceInputNodeResult.Status);
                return;
            }
            audioI = audioDeviceInputNodeResult.DeviceInputNode;
            rootPage.ShowLogs("Input Node initialized successfully: " + audioI?.Device?.Name
                              + " (channels :" + audioI?.EncodingProperties.ChannelCount + " )");
        }
示例#14
0
        private async Task <AudioDeviceInputNode> AttachDeviceInputNode()
        {
            var deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Speech, encoding.Audio, selectedInputDevice);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                error.Text = String.Format("DeviceInputNode creation failed because {0}", deviceInputNodeResult.Status.ToString());
            }

            var deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            return(deviceInputNode);
        }
示例#15
0
        public static async Task CreateDeviceInputNode()
        {
            Console.WriteLine("Creating AudioGraphs");
            // Create an AudioGraph with default settings
            AudioGraphSettings graphsettings = new AudioGraphSettings(AudioRenderCategory.GameChat);

            graphsettings.EncodingProperties               = new AudioEncodingProperties();
            graphsettings.EncodingProperties.Subtype       = "Float";
            graphsettings.EncodingProperties.SampleRate    = 48000;
            graphsettings.EncodingProperties.ChannelCount  = 2;
            graphsettings.EncodingProperties.BitsPerSample = 32;
            graphsettings.EncodingProperties.Bitrate       = 3072000;
            //settings.DesiredSamplesPerQuantum = 960;
            //settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired;
            CreateAudioGraphResult graphresult = await AudioGraph.CreateAsync(graphsettings);

            if (graphresult.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                return;
            }

            ingraph = graphresult.Graph;


            AudioGraphSettings nodesettings = new AudioGraphSettings(AudioRenderCategory.GameChat);

            nodesettings.EncodingProperties       = AudioEncodingProperties.CreatePcm(48000, 2, 16);
            nodesettings.DesiredSamplesPerQuantum = 960;
            nodesettings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired;
            frameOutputNode         = ingraph.CreateFrameOutputNode(outgraph.EncodingProperties);
            quantum                 = 0;
            ingraph.QuantumStarted += Graph_QuantumStarted;

            Windows.Devices.Enumeration.DeviceInformation selectedDevice =
                await Windows.Devices.Enumeration.DeviceInformation.CreateFromIdAsync(Windows.Media.Devices.MediaDevice.GetDefaultAudioCaptureId(Windows.Media.Devices.AudioDeviceRole.Default));

            CreateAudioDeviceInputNodeResult result =
                await ingraph.CreateDeviceInputNodeAsync(MediaCategory.Media, nodesettings.EncodingProperties, selectedDevice);

            if (result.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                return;
            }

            deviceInputNode = result.DeviceInputNode;
            deviceInputNode.AddOutgoingConnection(frameOutputNode);
            frameOutputNode.Start();
            ingraph.Start();
        }
示例#16
0
        private async Task CreateAudioGraph()
        {
            var outputDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector());

            var settings = new AudioGraphSettings(AudioRenderCategory.Media)
            {
                QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency,
                PrimaryRenderDevice      = outputDevices[0]
            };

            var result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                await new MessageDialog("AudioGraph Creation Error: " + result.Status).ShowAsync();
                return;
            }

            _audioGraph = result.Graph;

            // Create a device output node
            var deviceOutputNodeResult = await _audioGraph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                await new MessageDialog("Audio Device Output unavailable: " + deviceOutputNodeResult.Status).ShowAsync();
                return;
            }

            _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

            // Create a device input node using the default audio input device
            var deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                await new MessageDialog("Audio Device Input unavailable: " + deviceInputNodeResult.Status).ShowAsync();

                _audioGraph.Dispose();
                _audioGraph = null;

                return;
            }

            _deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            // Because we are using lowest latency setting,
            // in general, we need to handle device disconnection errors
            // graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred;
        }
示例#17
0
        private async Task openMicrophonePopup()
        {
            AudioGraphSettings     settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            AudioGraph audioGraph = result.Graph;

            CreateAudioDeviceInputNodeResult resultNode = await audioGraph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Media);

            AudioDeviceInputNode deviceInputNode = resultNode.DeviceInputNode;

            deviceInputNode.Dispose();
            audioGraph.Dispose();
        }
        async private void ToggleRecord2(object sender, RoutedEventArgs e)
        {
            var btn_record_audio = sender as ToggleButton;

            if (btn_record_audio.IsChecked == false)
            {
                _graph_record.Stop();
                _graph_record.Dispose();
                await PlayAudio(_target_file);

                //using the media element to play the sound
                //var raf_stream = await _target_file.OpenReadAsync();
                //media.SetSource(raf_stream, "");
                //media.Play();
            }
            else
            {
                //initialize the audio graph for recording and then start recording
                AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
                settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;

                CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

                if (result.Status == AudioGraphCreationStatus.Success)
                {
                    _graph_record = result.Graph;

                    //setup the input
                    var input_node = (await _graph_record.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Other)).DeviceInputNode;

                    //setup the output (place where audio will be recorded to)
                    var feedback_folder = await Windows.Storage.ApplicationData.Current.LocalFolder.CreateFolderAsync("AudioFeedback", CreationCollisionOption.OpenIfExists);

                    _target_file = await feedback_folder.CreateFileAsync("audio message.mp3", CreationCollisionOption.GenerateUniqueName);

                    var profile          = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High);
                    var file_output_node = (await _graph_record.CreateFileOutputNodeAsync(_target_file, profile)).FileOutputNode;

                    //direct the input to the output
                    input_node.AddOutgoingConnection(file_output_node);
                    media.Stop();  //stop playback since we are recording
                    _graph_record.Start();
                }
                else
                {
                    await new MessageDialog("Could not initialize recorder").ShowAsync();
                }
            }
        }
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                _rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            _graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputResult = await _graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output
                _rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputResult.Status.ToString()), NotifyType.ErrorMessage);
                speakerContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            _deviceOutputNode = deviceOutputResult.DeviceOutputNode;
            _rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage);
            speakerContainer.Background = new SolidColorBrush(Colors.Green);

            // Create a device input node using the default audio input device
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Media);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                _rootPage.NotifyUser(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            _deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            var frameOutputNode = _graph.CreateFrameOutputNode();

            _graph.QuantumProcessed += AudioGraph_QuantumProcessed;
            _deviceInputNode.AddOutgoingConnection(frameOutputNode);
        }
示例#20
0
        public async Task <bool> ResetAudioInput(DeviceInformation microphoneDevice = null)
        {
            if (microphoneDevice == null)
            {
                var inputDevices = await GetAllMicrophoneDevices();

                if (inputDevices.Count == 0)
                {
                    InputDeviceState = InputDeviceState.MicrophoneNotDetected;
                    return(false);
                }

                microphoneDevice = inputDevices[0];
            }

            var inputAudioEnocdingProperties = AudioEncodingProperties.CreatePcm(
                OpusConvertConstants.SamplingRate,
                1,
                16
                );

            var deviceInputNodeCreateResult = await _AudioGraph.CreateDeviceInputNodeAsync(
                Windows.Media.Capture.MediaCategory.GameChat,
                inputAudioEnocdingProperties,
                microphoneDevice
                );

            if (deviceInputNodeCreateResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                if (deviceInputNodeCreateResult.Status == AudioDeviceNodeCreationStatus.AccessDenied)
                {
                    InputDeviceState = InputDeviceState.AccessDenied;
                }
                else
                {
                    InputDeviceState = InputDeviceState.UnknowunError;
                }

                return(false);
            }

            _InputNode       = deviceInputNodeCreateResult.DeviceInputNode;
            _FrameOutputNode = _AudioGraph.CreateFrameOutputNode(inputAudioEnocdingProperties);
            _InputNode.AddOutgoingConnection(_FrameOutputNode);

            InputDeviceState = InputDeviceState.Avairable;

            return(true);
        }
示例#21
0
        public async Task Init()
        {
            Recording = false;
            // Selecionar o dispositivo para gravar e reproduzir
            var devices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector());

            var devicesIn = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioCaptureSelector());

            outputDevice = devices[0];
            // Configurações de gravações
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media)
            {
                //QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency,
                PrimaryRenderDevice = outputDevice,
            };


            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            graph = result.Graph;


            deviceOutputNode = (await graph //Criar nó de saída (Reprodução no headset)
                                .CreateDeviceOutputNodeAsync())
                               .DeviceOutputNode;

            deviceInputNode = (await graph //Criar nó de entrada (Microfone) - Real-time communication
                               .CreateDeviceInputNodeAsync(MediaCategory.Communications, graph.EncodingProperties, devicesIn[0]))
                              .DeviceInputNode;

            // Criar o arquivo para ser armazenado o PCM gravado direto do microfone
            StorageFile pcmfile = await KnownFolders
                                  .MusicLibrary
                                  .CreateFileAsync("PCM_original.wav", Windows.Storage.CreationCollisionOption.ReplaceExisting);

            // PCM 16bits com 44,1kHZ com 96kbps
            MediaEncodingProfile profile = MediaEncodingProfile.CreateWav(Windows.Media.MediaProperties.AudioEncodingQuality.Medium);


            pcmFileNode = (await graph // Criar nó do arquivo de saída
                           .CreateFileOutputNodeAsync(pcmfile, profile))
                          .FileOutputNode;

            // Conectar os nós de reprodução e do arquivo PCM ao nó do microfone
            // Ou seja, passar os sinais para o fone reproduzir e o arquivo armazenar ao mesmo tempo
            deviceInputNode.AddOutgoingConnection(pcmFileNode);
            deviceInputNode.AddOutgoingConnection(deviceOutputNode);
        }
示例#22
0
        private async Task InitialiseAudioFeed()
        {
            var defaultAudioCaptureId = MediaDevice.GetDefaultAudioCaptureId(AudioDeviceRole.Default);
            var microphone            = await DeviceInformation.CreateFromIdAsync(defaultAudioCaptureId);

            var inputProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
            var inputResult  =
                await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Media, inputProfile.Audio, microphone);

            if (inputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                throw new InvalidOperationException("AudioDeviceNode creation error !");
            }

            inputResult.DeviceInputNode.AddOutgoingConnection(_audioFileOutputNode);
        }
示例#23
0
        public async Task Start()
        {
            try
            {
                m_mutex.WaitOne();

                // Construct the audio graph
                var result = await AudioGraph.CreateAsync(
                    new AudioGraphSettings(AudioRenderCategory.Speech)
                {
                    DesiredRenderDeviceAudioProcessing = AudioProcessing.Raw,
                    AudioRenderCategory = AudioRenderCategory.Speech
                });

                if (result.Status != AudioGraphCreationStatus.Success)
                {
                    throw new Exception("AudioGraph creation error: " + result.Status);
                }

                m_audioGraph = result.Graph;

                var pcmEncoding = AudioEncodingProperties.CreatePcm(16000, 1, 32);
                m_frameOutputNode = m_audioGraph.CreateFrameOutputNode(pcmEncoding);
                var encodingProperties = m_frameOutputNode.EncodingProperties;
                encodingProperties = m_audioGraph.EncodingProperties;

                var inputResult = await m_audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Speech, pcmEncoding);

                if (inputResult.Status != AudioDeviceNodeCreationStatus.Success)
                {
                    throw new Exception("AudioGraph CreateDeviceInputNodeAsync error: " + inputResult.Status);
                }

                m_deviceInputNode = inputResult.DeviceInputNode;
                m_deviceInputNode.AddOutgoingConnection(m_frameOutputNode);
                m_audioGraph.QuantumStarted += node_QuantumStarted;
                encodingProperties           = m_audioGraph.EncodingProperties;
                m_audioGraph.Start();
            }
            catch (Exception ex)
            {
                Utils.Toasts.ShowToast("", "AudioInput Start Exception: " + ex.Message);
            }

            m_mutex.ReleaseMutex();
        }
示例#24
0
        private async Task StartAudioAsync()
        {
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;

            try
            {
                CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

                if (result.Status != AudioGraphCreationStatus.Success)
                {
                    return;
                }

                _audioGraph = result.Graph;

                // Create a device input node
                CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Media, _audioGraph.EncodingProperties, _audioInputSelected);

                if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
                {
                    return;
                }

                // Create a device output node
                CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _audioGraph.CreateDeviceOutputNodeAsync();

                if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
                {
                    return;
                }

                _deviceInputNode  = deviceInputNodeResult.DeviceInputNode;
                _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

                _deviceInputNode.AddOutgoingConnection(_deviceOutputNode);
                _audioGraph.Start();
            }
            catch (Exception ex)
            {
                Debug.WriteLine($"AudioGraph initialization failed. {ex?.Message}");
            }
        }
示例#25
0
        private async void StartButton_Click(object sender, RoutedEventArgs e)
        {
            DeviceInformation  SelectedDevice = DevicesBox.SelectedItem as DeviceInformation;
            AudioGraphSettings settings       = new AudioGraphSettings(AudioRenderCategory.Media)
            {
                QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency
            };

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);


            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            AudioDeviceOutputNode deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

            // Create a device input node using the default audio input device
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other, graph.EncodingProperties, SelectedDevice);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                System.Diagnostics.Debug.WriteLine(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()));

                return;
            }

            AudioDeviceInputNode deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            frameOutputNode = graph.CreateFrameOutputNode();
            deviceInputNode.AddOutgoingConnection(frameOutputNode);

            AudioFrameInputNode frameInputNode = graph.CreateFrameInputNode();

            frameInputNode.AddOutgoingConnection(deviceOutputNode);

            // Attach to QuantumStarted event in order to receive synchronous updates from audio graph (to capture incoming audio).
            graph.QuantumStarted += GraphOnQuantumProcessed;

            graph.Start();
        }
示例#26
0
        private async void CreateAudioGraphAsync()
        {
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media)
            {
                //settings.DesiredSamplesPerQuantum = fftLength;
                DesiredRenderDeviceAudioProcessing = AudioProcessing.Default,
                QuantumSizeSelectionMode           = QuantumSizeSelectionMode.ClosestToDesired
            };

            CreateAudioGraphResult graphResult = await AudioGraph.CreateAsync(settings);

            if (graphResult.Status != AudioGraphCreationStatus.Success)
            {
                throw new InvalidOperationException($"Graph creation failed {graphResult.Status}");
            }

            _graph = graphResult.Graph;

            //CreateAudioDeviceInputNodeResult inputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Media);
            CreateAudioDeviceInputNodeResult inputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (inputNodeResult.Status == AudioDeviceNodeCreationStatus.Success)
            {
                _inputNode = inputNodeResult.DeviceInputNode;


                _frameOutputNode = _graph.CreateFrameOutputNode();
                _inputNode.AddOutgoingConnection(_frameOutputNode);
                _frameOutputNode.Start();
                _graph.QuantumProcessed += AudioGraph_QuantumProcessed;

                // Because we are using lowest latency setting, we need to handle device disconnection errors
                _graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred;

                _graph.Start();
            }
            else
            {
                MessageDialog md = new MessageDialog("Cannot access microphone");
                await md.ShowAsync();
            }
        }
示例#27
0
        public async Task StartRecordAsync()
        {
            _filePath = Path.GetTempFileName();
            var file = await StorageFile.GetFileFromPathAsync(_filePath);

            var result = await AudioGraph.CreateAsync(new AudioGraphSettings(AudioRenderCategory.Speech));

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                throw new Exception("Couldn't open recorder!");
            }
            _graph = result.Graph;

            var microphone = await DeviceInformation.CreateFromIdAsync(MediaDevice.GetDefaultAudioCaptureId(AudioDeviceRole.Default));

            var outProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low);

            outProfile.Audio = AudioEncodingProperties.CreatePcm(16000, 1, 16);

            var outputResult = await _graph.CreateFileOutputNodeAsync(file, outProfile);

            if (outputResult.Status != AudioFileNodeCreationStatus.Success)
            {
                throw new Exception("Couldn't create output!");
            }

            _outputNode = outputResult.FileOutputNode;
            var inProfile   = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
            var inputResult = await _graph.CreateDeviceInputNodeAsync(
                MediaCategory.Speech,
                inProfile.Audio,
                microphone);

            if (inputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                throw new Exception("Couldn't create device node!");
            }

            inputResult.DeviceInputNode.AddOutgoingConnection(_outputNode);
            _graph.Start();
        }
        async Task StartRecordingAsync()
        {
            try
            {
                recordingFile = await ApplicationData.Current.LocalFolder.CreateFileAsync(Constants.AudioFilename, CreationCollisionOption.ReplaceExisting);

                Debug.WriteLine(recordingFile.Path);

                var result = await AudioGraph.CreateAsync(new AudioGraphSettings(AudioRenderCategory.Media));

                if (result.Status == AudioGraphCreationStatus.Success)
                {
                    audioGraph = result.Graph;

                    var microphone = await DeviceInformation.CreateFromIdAsync(MediaDevice.GetDefaultAudioCaptureId(AudioDeviceRole.Default));

                    var outputProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low); // 1 channel, 16 bits per sample, 16K sample rate
                    outputProfile.Audio = AudioEncodingProperties.CreatePcm(16000, 1, 16);

                    var inputProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
                    var outputResult = await audioGraph.CreateFileOutputNodeAsync(recordingFile, outputProfile);

                    if (outputResult.Status == AudioFileNodeCreationStatus.Success)
                    {
                        audioFileOutputNode = outputResult.FileOutputNode;

                        var inputResult = await audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Media, inputProfile.Audio, microphone);

                        if (inputResult.Status == AudioDeviceNodeCreationStatus.Success)
                        {
                            inputResult.DeviceInputNode.AddOutgoingConnection(audioFileOutputNode);
                            audioGraph.Start();
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine(ex.Message);
            }
        }
示例#29
0
        /// <summary>
        /// Tyrs to creates the frame output node and trys to set the outgoing connection to it. Also calculates audioFrameUpdateMinimum.
        /// </summary>
        /// <returns>Whether or not the attempt was successful</returns>
        private static async Task <bool> CreateNodes()
        {
            try
            {
                CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

                deviceInputNode = deviceInputNodeResult.DeviceInputNode;

                frameOutputNode       = graph.CreateFrameOutputNode(graph.EncodingProperties);
                graph.QuantumStarted += Graph_QuantumStarted;

                audioFrameUpdateMinimum = Convert.ToInt32(samplesPerQuantumLimit / graph.SamplesPerQuantum);
                deviceInputNode.AddOutgoingConnection(frameOutputNode);

                return(true);
            }
            catch (Exception)
            {
                return(false);
            }
        }
示例#30
0
        //
        //
        //
        public async Task InitializeAsync()
        {
            audGraphResult = await AudioGraph.CreateAsync(audGraphSettings);

            //
            audGraph = audGraphResult.Graph;
            //
            //
            //
            deviceOutputNodeResult = await audGraph.CreateDeviceOutputNodeAsync();

            //
            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
            //
            //
            //
            //deviceInputNodeResult = await audGraph.CreateDeviceInputNodeAsync(MediaCategory.Other);
            // deviceInputNodeResult = await audGraph.CreateDeviceInputNodeAsync(MediaCategory.Other, audGraph.EncodingProperties);
            deviceInputNodeResult = await audGraph.CreateDeviceInputNodeAsync(MediaCategory.Other, audGraph.EncodingProperties, inputDevice);

            //
            deviceInputNode = deviceInputNodeResult.DeviceInputNode;
            //
            //
            //
            audioDeviceOutputSubmixNode = audGraph.CreateSubmixNode();
            //
            //
            //
            deviceInputNode.AddOutgoingConnection(audioDeviceOutputSubmixNode);
            //
            audioDeviceOutputSubmixNode.AddOutgoingConnection(deviceOutputNode);
            //
            //
            //
            CreateEchoEffect();
            CreateReverbEffect();
            CreateLimiterEffect();
            CreateEqEffect();
        }
示例#31
0
		/// <summary>
		/// Required before starting recording
		/// </summary>
		/// <returns></returns>
		private async Task<string> CreateAudioGraphAsync()
		{
			// var inputDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioCaptureSelector()); // LOLLO TEST

			_outputDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector());
			if (_outputDevices == null || _outputDevices.Count < 1)
			{
				return "AudioGraph Creation Error: no output devices found";
			}

			AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media)
			{ QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency, PrimaryRenderDevice = _outputDevices[0] };

			CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);
			if (result.Status != AudioGraphCreationStatus.Success)
			{
				// Cannot create graph
				return string.Format("AudioGraph Creation Error because {0}", result.Status.ToString());
			}
			_audioGraph = result.Graph;
			// Because we are using lowest latency setting, we need to handle device disconnection errors
			_audioGraph.UnrecoverableErrorOccurred += OnGraph_UnrecoverableErrorOccurred;

			//// Create a device output node // away, so we get no echo
			//CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _audioGraph.CreateDeviceOutputNodeAsync();
			//if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
			//{
			//	// Cannot create device output node
			//	return string.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString());
			//}
			//_deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

			// Create a device input node using the default audio input device
			CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other);
			if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
			{
				// Cannot create device input node
				return string.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString());
			}
			_deviceInputNode = deviceInputNodeResult.DeviceInputNode;

			//// LOLLO set the volume, rather useless coz it is like a mixer and the default value is 1.
			//if (_deviceOutputNode.OutgoingGain < 1.0) _deviceOutputNode.OutgoingGain = 1.0;
			//if (_deviceInputNode.OutgoingGain < 1.0) _deviceInputNode.OutgoingGain = 1.0;

			return string.Empty;
		}
        private async Task CreateAudioGraph()
        {
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
            settings.PrimaryRenderDevice = outputDevices[outputDevicesListBox.SelectedIndex - 1];

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;
            rootPage.NotifyUser("Graph successfully created!", NotifyType.StatusMessage);

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();
            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                outputDeviceContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output connection successfully created", NotifyType.StatusMessage);
            outputDeviceContainer.Background = new SolidColorBrush(Colors.Green);

            // Create a device input node using the default audio input device
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                rootPage.NotifyUser(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                inputDeviceContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceInputNode = deviceInputNodeResult.DeviceInputNode;
            rootPage.NotifyUser("Device Input connection successfully created", NotifyType.StatusMessage);
            inputDeviceContainer.Background = new SolidColorBrush(Colors.Green);

            // Since graph is successfully created, enable the button to select a file output
            fileButton.IsEnabled = true;

            // Disable the graph button to prevent accidental click
            createGraphButton.IsEnabled = false;

            // Because we are using lowest latency setting, we need to handle device disconnection errors
            graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred;
        }
示例#33
0
        // Create the AudioGraph
        private async Task CreateAudioGraph()
        {
            // Create a new AudioGraph settings object to store the options, here you can play with latence/output device etc
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.SoundEffects);
            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired;
            settings.DesiredSamplesPerQuantum = desiredSamples;
            settings.DesiredRenderDeviceAudioProcessing = Windows.Media.AudioProcessing.Default;
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                ShowErrorMessage(string.Format("AudioGraph Creation Error because {0}", result.Status.ToString()));
                return;
            }
            graph = result.Graph;


            // Create a device input node using the default audio input device
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                ShowErrorMessage(string.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()));
                return;
            }
            deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            
            // Create the output node to send to data processing and add the event handler for when a quantum is processed
            frameOutputNode = graph.CreateFrameOutputNode();
            graph.QuantumProcessed += AudioGraph_QuantumProcessed;


            // Link the nodes together
            deviceInputNode.AddOutgoingConnection(frameOutputNode);


            // Because we are using lowest latency setting, we need to handle device disconnection errors
            graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred;
        }