Esempio n. 1
0
        public static async void Play(SoundEffect effect)
        {
            var settings = new AudioGraphSettings(AudioRenderCategory.SoundEffects);

            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;

            var result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return;
            }

            var file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/Audio/sent.mp3"));

            var fileInputNodeResult = await result.Graph.CreateFileInputNodeAsync(file);

            if (fileInputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                return;
            }

            var deviceOutputNodeResult = await result.Graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }

            fileInputNodeResult.FileInputNode
            .AddOutgoingConnection(deviceOutputNodeResult.DeviceOutputNode);

            result.Graph.Start();
        }
Esempio n. 2
0
        public async Task InitializeSounds()
        {
            InputNodes = new ObservableCollection <AudioFileInputNode>();
            FileInputNodesDictionary = new Dictionary <string, AudioFileInputNode>();

            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            if (result.Status == AudioGraphCreationStatus.Success)
            {
                graph = result.Graph;
                CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();


                // make sure the audio output is available
                if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success)
                {
                    outputNode = deviceOutputNodeResult.DeviceOutputNode;
                    graph.ResetAllNodes();

                    for (int i = 0; i < countdownSoundsNum; i++)
                    {
                        await CreateInputNodeFromFile(baseUri + countdownBaseFileName + "0" + (i + 1).ToString() + "-" + "1" + audioFileExt);
                        await CreateInputNodeFromFile(baseUri + countdownBaseFileName + "0" + (i + 1).ToString() + "-" + "2" + audioFileExt);
                    }

                    for (int j = 0; j < doneSoundsNum; j++)
                    {
                        await CreateInputNodeFromFile(baseUri + doneBaseFileName + ((j >= 9) ? "" : "0") + (j + 1).ToString() + audioFileExt);
                    }

                    graph.Start();
                }
            }
        }
Esempio n. 3
0
        private async void initGraph()
        {
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            graph = result.Graph;
            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            deviceOutput = deviceOutputNodeResult.DeviceOutputNode;


            FileOpenPicker filePicker = new FileOpenPicker();

            filePicker.SuggestedStartLocation = PickerLocationId.MusicLibrary;
            filePicker.FileTypeFilter.Add(".mp3");
            filePicker.FileTypeFilter.Add(".wav");
            StorageFile file = await GetPackagedFile(null, "audio.mp3");

            CreateAudioFileInputNodeResult fileInputResult = await graph.CreateFileInputNodeAsync(file);

            fileInput = fileInputResult.FileInputNode;
            fileInput.AddOutgoingConnection(deviceOutput);
            graph.Start();
        }
Esempio n. 4
0
        private async Task CreateAudioGraph()
        {
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;

            graph.EncodingProperties.SampleRate = 44100;

            CreateAudioDeviceOutputNodeResult deviceOutputResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputResult.Status.ToString()), NotifyType.ErrorMessage);
                speakerContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceOutputNode = deviceOutputResult.DeviceOutputNode;
            NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage);
            speakerContainer.Background = new SolidColorBrush(Colors.Green);
        }
 // Initializes AudioGraph
 public async Task InitAudioGraph()
 {
     AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
                 CreateAudioGraphResult result = await 
                 AudioGraph.CreateAsync(settings);
     audioGraph = result.Graph;
 }
Esempio n. 6
0
        internal async Task Initialize()
        {
            // Create an AudioGraph with default settings
            var settings = new AudioGraphSettings(AudioRenderCategory.Media);
            var result   = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                Debug.WriteLine(string.Format("AudioGraph Creation Error because {0}", result.Status.ToString()));
                return;
            }

            _audioGraph = result.Graph;

            // Create a device output node
            var deviceOutputResult = await _audioGraph.CreateDeviceOutputNodeAsync();

            if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output
                Debug.WriteLine(string.Format("Audio Device Output unavailable because {0}",
                                              deviceOutputResult.Status.ToString()));
                return;
            }

            var deviceOutputNode = deviceOutputResult.DeviceOutputNode;

            Debug.WriteLine("Device Output Node successfully created");

            CreateFrameInputNode();
            _frameInputNode.AddOutgoingConnection(deviceOutputNode);
        }
Esempio n. 7
0
        private void CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = AudioGraph.CreateAsync(settings).GetResults();

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputResult = graph.CreateDeviceOutputNodeAsync().GetResults();

            if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output
                return;
            }

            deviceOutputNode = deviceOutputResult.DeviceOutputNode;
        }
Esempio n. 8
0
 //
 //
 public AudioEngine(DeviceInformation audioRenderDevice, DeviceInformation audioCaptureDevice)
 {
     inputDevice = audioCaptureDevice;
     //
     //
     //
     //audEncodingProperties = new Windows.Media.MediaProperties.AudioEncodingProperties();
     //
     //audEncodingProperties.Bitrate = 3072000;
     //audEncodingProperties.BitsPerSample = 32;
     //audEncodingProperties.ChannelCount = 2;
     //audEncodingProperties.SampleRate = 48000;
     //audEncodingProperties.Subtype = "Float";
     //
     //
     //
     audGraphSettings = new AudioGraphSettings(AudioRenderCategory.Media);
     //
     //audGraphSettings.AudioRenderCategory = AudioRenderCategory.Media;
     audGraphSettings.DesiredRenderDeviceAudioProcessing = Windows.Media.AudioProcessing.Raw;
     //audGraphSettings.DesiredSamplesPerQuantum = 480;
     //audGraphSettings.EncodingProperties = audEncodingProperties;
     audGraphSettings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
     audGraphSettings.PrimaryRenderDevice      = audioRenderDevice;
     //
     //
     //
     //InitializeAsync();
 }
Esempio n. 9
0
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                await ShowMessage(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()));
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                await ShowMessage(String.Format("Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()));
                return;
            }

            deviceOutput = deviceOutputNodeResult.DeviceOutputNode;
            await ShowMessage("Device Output Node successfully created");
        }
Esempio n. 10
0
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                rootPage.NotifyUser(String.Format("Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                speakerContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceOutput = deviceOutputNodeResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage);
            speakerContainer.Background = new SolidColorBrush(Colors.Green);
        }
        public async Task CreateAudioGraph()
        {
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
            settings.PrimaryRenderDevice      = outputDevices[0];

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                MessageDialog md = new MessageDialog("Error in creating Audio Graph", "OOPS!!");
                await md.ShowAsync();

                return;
            }
            graph = result.Graph;
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                MessageDialog md = new MessageDialog("Error in creating InputDeviceNode", "OOPS!!");
                await md.ShowAsync();

                return;
            }
            deviceInputNode = deviceInputNodeResult.DeviceInputNode;
        }
Esempio n. 12
0
        public async void CaptureAudio()
        {
            AudioGraphSettings audioGraphSettings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Speech);
            var result = await AudioGraph.CreateAsync(audioGraphSettings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return;
            }
            _audioGraph = result.Graph;

            var deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Speech);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }
            var deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            _audioFile = await Windows.Storage.ApplicationData.Current.TemporaryFolder
                         .CreateFileAsync("speech", CreationCollisionOption.ReplaceExisting);

            var mediaEncodingProfile = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High);
            var fileOutputNodeResult = await _audioGraph.CreateFileOutputNodeAsync(_audioFile, mediaEncodingProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                return;
            }
            var fileOutputNode = fileOutputNodeResult.FileOutputNode;

            deviceInputNode.AddOutgoingConnection(fileOutputNode);

            _audioGraph.Start();
        }
Esempio n. 13
0
        protected override async void OnNavigatedTo(NavigationEventArgs e)
        {
            base.OnNavigatedTo(e);
            var mediaSource = MediaSource.CreateFromUri(new Uri("ms-appx:///Test/GirlishLover.m4a"));
            await mediaSource.OpenAsync();

            this.mpe.Source = mediaSource;
            this.mpe.MediaPlayer.MediaOpened += this.MediaPlayer_MediaOpened;

            var settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Other)
            {
                QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency
            };
            var result = await AudioGraph.CreateAsync(settings);

            this.audioGraph = result.Graph;

            this.outNode = this.audioGraph.CreateFrameOutputNode();

            this.fileNode           = (await this.audioGraph.CreateFileInputNodeAsync(await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Test/GirlishLover.m4a")))).FileInputNode;
            this.fileNode.LoopCount = 0;
            this.fileNode.AddOutgoingConnection(this.outNode);
            this.fileNode.FileCompleted    += this.FileNode_FileCompleted;
            this.audioGraph.QuantumStarted += this.AudioGraph_QuantumStarted;

            this.audioGraph.Start();
        }
        public async Task Init()
        {
            AudioGraphSettings audioGraphSettings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media);
            var result = await AudioGraph.CreateAsync(audioGraphSettings);

            if (result == null || result.Status != AudioGraphCreationStatus.Success)
            {
                return;
            }
            audioGraph = result.Graph;

            var createAudioDeviceOutputResult = await audioGraph.CreateDeviceOutputNodeAsync();

            if (createAudioDeviceOutputResult == null || createAudioDeviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }
            deviceOutputNode = createAudioDeviceOutputResult.DeviceOutputNode;

            AudioEncodingProperties audioEncodingProperties = new AudioEncodingProperties();

            audioEncodingProperties.BitsPerSample = 32;
            audioEncodingProperties.ChannelCount  = 2;
            audioEncodingProperties.SampleRate    = 44100;
            audioEncodingProperties.Subtype       = MediaEncodingSubtypes.Float;

            audioFrameInputNode = audioGraph.CreateFrameInputNode(audioEncodingProperties);
            audioFrameInputNode.QuantumStarted += FrameInputNode_QuantumStarted;

            audioFrameInputNode.AddOutgoingConnection(deviceOutputNode);
            audioGraph.Start();
        }
Esempio n. 15
0
        /// <summary>
        /// Creates the audio graph output.<br/>
        /// IMPORTANT: Only 32-bit IEEEFloat format is supported!
        /// </summary>
        /// <param name="ChannelCount">The number of channels. Default: 2(Stereo)</param>
        /// <param name="SampleRate">The sample rate. Default: 192000Hz</param>
        /// <returns></returns>
        /// <exception cref="System.Exception">AudioGraph creation error: " + result.Status.ToString()</exception>
        public static async Task <AudioGraphOutput> CreateAudioGraphOutput(uint ChannelCount = 2, uint SampleRate = 192000)
        {
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media)
            {
                QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired,
                EncodingProperties       = new AudioEncodingProperties()
                {
                    BitsPerSample = 32,
                    ChannelCount  = ChannelCount,
                    SampleRate    = SampleRate,
                    Subtype       = "Float"
                }
            };

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                throw new Exception("AudioGraph creation error: " + result.Status.ToString(), result.ExtendedError);
            }
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await result.Graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                throw new Exception("AudioGraph creation error: " + deviceOutputNodeResult.Status.ToString(), deviceOutputNodeResult.ExtendedError);
            }

            return(new AudioGraphOutput(result.Graph, deviceOutputNodeResult.DeviceOutputNode));
        }
        /// <summary>
        ///     Creates an instance of AudioGraph and sets io_progress
        /// </summary>
        public async Task <CreateAudioGraphResult> Init(
            Progress <double> progress)
        {
            // set io_progress var to show progress of input-output
            _ioProgress = progress;

            // initialize settings for AudioGraph
            var settings =
                new AudioGraphSettings(
                    AudioRenderCategory.Media
                    );

            // if audioGraph was previously created
            if (_audioGraph != null)
            {
                _audioGraph.Dispose();
                _audioGraph = null;
            }

            var result =
                await AudioGraph.CreateAsync(settings);

            if (result.Status == AudioGraphCreationStatus.Success)
            {
                _audioGraph = result.Graph;
            }

            return(result);
        }
Esempio n. 17
0
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                throw new Exception("error");
            }

            _graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputResult = await _graph.CreateDeviceOutputNodeAsync();

            _subMixNode = _graph.CreateSubmixNode();


            if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output
                throw new Exception("error");
            }

            _deviceOutputNode = deviceOutputResult.DeviceOutputNode;
            _subMixNode.AddOutgoingConnection(_deviceOutputNode);
        }
Esempio n. 18
0
        // create the audio graph and output
        private async void InitAudioGraph()
        {
            var settings = new AudioGraphSettings(AudioRenderCategory.Media);
            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; // pick lowest latency available to devices in the graph


            // create the audio graph
            _graph = (await AudioGraph.CreateAsync(settings)).Graph;
            if (_graph == null)
            {
                // failed to create audio graph
                MessageDialog dlg = new MessageDialog("Failed to create audio graph");
                await dlg.ShowAsync();
                return;
            }


            // create the output. You could also create file output here to stream to a temp file or similar
            _deviceOutput = (await _graph.CreateDeviceOutputNodeAsync()).DeviceOutputNode;
            if (_deviceOutput == null)
            {
                // failed to create audio output
                MessageDialog dlg = new MessageDialog("Failed to create device output");
                await dlg.ShowAsync();
                return;
            }


            // load all of the samples into graph nodes
            BuildFileNodes();

            // start playback
            _graph.Start();
        }
Esempio n. 19
0
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                Logging.SingleInstance.LogMessage("AudioGraph Creation Error because " + result.Status);
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                Logging.SingleInstance.LogMessage(String.Format("Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()));
                return;
            }

            deviceOutput = deviceOutputNodeResult.DeviceOutputNode;
            Logging.SingleInstance.LogMessage("Device Output Node successfully created");
        }
Esempio n. 20
0
        private async void Create()
        {
            OutputDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector());

            var settings = new AudioGraphSettings(AudioRenderCategory.Media)
            {
                QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency
            };

            var audioGraphResult = await AudioGraph.CreateAsync(settings);

            if (audioGraphResult.Status != AudioGraphCreationStatus.Success)
            {
                throw new ApplicationException($"Audio graph error: {audioGraphResult.Status}");
            }

            Graph = audioGraphResult.Graph;
            Graph.QuantumProcessed += (audioGraph, e) => AudioGraphQuantumProcessed();
            Graph.QuantumStarted   += (audioGraph, e) => AudioGraphQuantumStarted();

            InputDevice = await CreateInputDevice().ConfigureAwait(true);

            OutputDevice = await CreateOutputDevice().ConfigureAwait(true);

            RecordingOutputNode = CreateFrameOutputNode();

            Ready?.Invoke(this, EventArgs.Empty);
        }
Esempio n. 21
0
        public async Task <bool> InitializeAudioGraphAsync()
        {
            var audioGraphSettings = new AudioGraphSettings(AudioRenderCategory.Media);
            var result             = await AudioGraph.CreateAsync(audioGraphSettings);

            LastStatus = result.Status.ToString();
            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return(false);
            }

            _audioGraph = result.Graph;
            _audioGraph.UnrecoverableErrorOccurred += (sender, args) => OnErrorOccurred(args);

            var outputResult = await _audioGraph.CreateDeviceOutputNodeAsync();

            LastStatus = outputResult.Status.ToString();

            if (outputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                _audioGraph.Dispose();
                return(false);
            }

            _outputNode = outputResult.DeviceOutputNode;

            CreateEchoEffect();
            CreateLimiterEffect();
            CreateReverbEffect();
            CreateEqualizerEffect();

            return(true);
        }
Esempio n. 22
0
        public async Task <bool> InitializeSound()
        {
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return(false);
            }

            graph = result.Graph;
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success)
            {
                deviceOutput = deviceOutputNodeResult.DeviceOutputNode;
                graph.ResetAllNodes();

                foreach (var soundMapping in soundMappings)
                {
                    await AddFileToSoundDictionary("ms-appx:///Features/Game/Assets/" + soundMapping.Value);
                }

                graph.Start();
            }

            isInitialized = true;
            return(isInitialized);
        }
Esempio n. 23
0
        public IAsyncOperation <AudioFrameReader> OpenAudioFrameReaderAsync()
        {
            return(Task.Run(async() =>
            {
                if (AudioReader == null)
                {
                    var microphones = await DeviceInformation.FindAllAsync(DeviceInformation.GetAqsFilterFromDeviceClass(DeviceClass.AudioCapture));
                    var kinectMicArray = microphones.FirstOrDefault(mic => mic.Name.ToLowerInvariant().Contains("xbox nui sensor"));

                    if (kinectMicArray != null)
                    {
                        //TODO: review parameters
                        var settings = new AudioGraphSettings(AudioRenderCategory.Speech);
                        settings.EncodingProperties = AudioEncodingProperties.CreatePcm(16000, 4, 32);
                        settings.EncodingProperties.Subtype = MediaEncodingSubtypes.Float;
                        settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
                        settings.DesiredRenderDeviceAudioProcessing = Windows.Media.AudioProcessing.Raw;

                        var audioGraphResult = await AudioGraph.CreateAsync(settings);
                        if (audioGraphResult.Status == AudioGraphCreationStatus.Success)
                        {
                            var inputNodeResult = await audioGraphResult.Graph.CreateDeviceInputNodeAsync(MediaCategory.Speech, audioGraphResult.Graph.EncodingProperties, kinectMicArray);

                            if (inputNodeResult.Status == AudioDeviceNodeCreationStatus.Success)
                            {
                                var output = audioGraphResult.Graph.CreateFrameOutputNode(audioGraphResult.Graph.EncodingProperties);
                                AudioReader = new AudioFrameReader(audioGraphResult.Graph, output);
                            }
                        }
                    }
                }
                AudioReader?.Open();
                return AudioReader;
            }).AsAsyncOperation());
        }
Esempio n. 24
0
        // Init the AudioGraph
        //  despite the Aync methods - this will exec synchronously to get the InitPhase  only get done when all is available
        private void InitAudioGraph( )
        {
            LOG.Log("InitAudioGraph: Begin");
            if (!_canPlay)
            {
                LOG.Log("InitAudioGraph: Canceled with _canPlay = false");
                return; // cannot even try..
            }

            // MUST WAIT UNTIL all items are created, else one may call Play too early...
            // cleanup existing items
            if (_deviceOutputNode != null)
            {
                _deviceOutputNode.Dispose( ); _deviceOutputNode = null;
            }
            if (_audioGraph != null)
            {
                _audioGraph.Dispose( ); _audioGraph = null;
            }

            // Create an AudioGraph
            AudioGraphSettings settings = new AudioGraphSettings(_renderCat)
            {
                PrimaryRenderDevice    = null, // If PrimaryRenderDevice is null, the default playback device will be used.
                MaxPlaybackSpeedFactor = 2,    // should preserve some memory
            };
            // We await here the execution without providing an async method ...
            var resultAG = WindowsRuntimeSystemExtensions.AsTask(AudioGraph.CreateAsync(settings));

            resultAG.Wait( );
            if (resultAG.Result.Status != AudioGraphCreationStatus.Success)
            {
                LOG.LogError($"InitAudioGraph: Failed to create AudioGraph with RenderCategory: {_renderCat}");
                LOG.LogError($"InitAudioGraph: AudioGraph creation: {resultAG.Result.Status}, TaskStatus: {resultAG.Status}"
                             + $"\nExtError: {resultAG.Result.ExtendedError}");
                _canPlay = false;
                return;
            }
            _audioGraph = resultAG.Result.Graph;
            LOG.Log($"InitAudioGraph: AudioGraph: [{_audioGraph.EncodingProperties}]");

            // Create a device output node
            // The output node uses the PrimaryRenderDevice of the audio graph.
            // We await here the execution without providing an async method ...
            var resultDO = WindowsRuntimeSystemExtensions.AsTask(_audioGraph.CreateDeviceOutputNodeAsync());

            resultDO.Wait( );
            if (resultDO.Result.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                LOG.LogError($"InitAudioGraph: DeviceOutputNode creation: {resultDO.Result.Status}, TaskStatus: {resultDO.Status}"
                             + $"\nExtError: {resultDO.Result.ExtendedError}");
                _canPlay = false;
                return;
            }
            _deviceOutputNode = resultDO.Result.DeviceOutputNode;
            LOG.Log($"InitAudioGraph: DeviceOutputNode: [{_deviceOutputNode.Device}]");
            LOG.Log($"InitAudioGraph: InitAudioGraph-END");
        }
Esempio n. 25
0
        public async Task InitializeSounds()
        {
            soundBankInitializer = new SoundBanksInitializer();

            POneInputNodes = new ObservableCollection <AudioFileInputNode>();

            PTwoInputNodes = new ObservableCollection <AudioFileInputNode>();

            WOneInputNodes = new ObservableCollection <AudioFileInputNode>();

            WTwoInputNodes = new ObservableCollection <AudioFileInputNode>();

            InputNodesList = new ObservableCollection <ObservableCollection <AudioFileInputNode> >();
            InputNodesList.Add(POneInputNodes);
            InputNodesList.Add(PTwoInputNodes);
            InputNodesList.Add(WOneInputNodes);
            InputNodesList.Add(WTwoInputNodes);

            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            if (result.Status == AudioGraphCreationStatus.Success)
            {
                graph = result.Graph;


                // create the output device
                CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

                // make sure the audio output is available
                if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success)
                {
                    deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
                    graph.ResetAllNodes();

                    foreach (SoundBank soundBank in soundBankInitializer.SoundBanks)
                    {
                        foreach (string fileName in soundBank.FileNames[0])
                        {
                            await CreateInputNodeFromFile("ms-appx:///Assets/AudioSamples/" + fileName);

                            InputNodesList[soundBankInitializer.SoundBanks.IndexOf(soundBank)].Add(FileInputNodesDictionary[fileName]);
                        }

                        foreach (string fileName in soundBank.FileNames[1])
                        {
                            await CreateInputNodeFromFile("ms-appx:///Assets/AudioSamples/" + fileName);

                            FileInputNodesDictionary[fileName].LoopCount = null;

                            InputNodesList[soundBankInitializer.SoundBanks.IndexOf(soundBank)].Add(FileInputNodesDictionary[fileName]);
                        }
                    }

                    InitializeEffects();
                    graph.Start();
                }
            }
        }
Esempio n. 26
0
        private async Task CreateAudioGraph()
        {
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
            settings.PrimaryRenderDevice      = outputDevices[outputDevicesListBox.SelectedIndex - 1];

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;
            rootPage.NotifyUser("Graph successfully created!", NotifyType.StatusMessage);

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                outputDeviceContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output connection successfully created", NotifyType.StatusMessage);
            outputDeviceContainer.Background = new SolidColorBrush(Colors.Green);

            // Create a device input node using the default audio input device
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                rootPage.NotifyUser(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                inputDeviceContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceInputNode = deviceInputNodeResult.DeviceInputNode;
            rootPage.NotifyUser("Device Input connection successfully created", NotifyType.StatusMessage);
            inputDeviceContainer.Background = new SolidColorBrush(Colors.Green);

            // Since graph is successfully created, enable the button to select a file output
            fileButton.IsEnabled = true;

            // Disable the graph button to prevent accidental click
            createGraphButton.IsEnabled = false;

            // Because we are using lowest latency setting, we need to handle device disconnection errors
            graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred;
        }
Esempio n. 27
0
        // Find a valid AudioGraph RenderCategory
        // this should leave _renderCat with a valid one or _renderNone
        private void FindRenderCategory( )
        {
            // A list of tryouts for the output rendering
            Queue <AudioRenderCategory> renderSequence = new Queue <AudioRenderCategory>(new [] {
                AudioRenderCategory.Speech,
                AudioRenderCategory.GameChat,
                AudioRenderCategory.GameEffects,
                AudioRenderCategory.SoundEffects,
                AudioRenderCategory.Media,
                AudioRenderCategory.Other,
                // Finally the Not Available Cat
                _renderNone,
            });

            _renderCat = renderSequence.Dequeue( );

            // Try a cat that works
            do
            {
                // Create an AudioGraph
                AudioGraphSettings settings = new AudioGraphSettings(_renderCat)
                {
                    PrimaryRenderDevice = null, // If PrimaryRenderDevice is null, the default playback device will be used.
                };
                LOG.Log($"FindRenderCategory: About to test AudioGraph with RenderCategory: {_renderCat}");
                // We await here the execution without providing an async method ...
                var resultAG = WindowsRuntimeSystemExtensions.AsTask(AudioGraph.CreateAsync(settings));
                resultAG.Wait( );
                if (resultAG.Result.Status != AudioGraphCreationStatus.Success)
                {
                    LOG.LogError($"FindRenderCategory: AudioGraph test error: {resultAG.Result.Status}, TaskStatus: {resultAG.Status}"
                                 + $"\nExtError: {resultAG.Result.ExtendedError}");

                    // try next category if there is one left
                    if (renderSequence.Count > 0)
                    {
                        _renderCat = renderSequence.Dequeue( );
                    }
                    else
                    {
                        // sanity - should never happen
                        LOG.LogError($"FindRenderCategory: Program error - Queue overrun");
                        _renderCat = _renderNone;
                        return;
                    }
                }
                else
                {
                    resultAG.Result.Graph?.Dispose( ); // not used after tryout
                    LOG.Log($"FindRenderCategory: Success with RenderCategory: {_renderCat}");
                    return;                            // _renderCat contains a successful one
                }
            } while (_renderCat != _renderNone);

            LOG.LogError($"FindRenderCategory: Failed to find a working RenderCategory - cannot speak");
            _canSpeak = false;
            return; // could not resolve - left with _renderNone
        }
Esempio n. 28
0
        private async Task InitializeAudioAsync()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.EncodingProperties = AudioEncodingProperties.CreatePcm(22050, 1, 16);

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return;
            }

            _graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }

            _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }

            _deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            // Create the FrameInputNode at the same format as the graph, except explicitly set mono.
            AudioEncodingProperties nodeEncodingProperties = _graph.EncodingProperties;

            nodeEncodingProperties.ChannelCount = 1;
            _frameInputNode = _graph.CreateFrameInputNode(nodeEncodingProperties);
            _frameInputNode.AddOutgoingConnection(_deviceOutputNode);


            _frameOutputNode = _graph.CreateFrameOutputNode(nodeEncodingProperties);
            _deviceInputNode.AddOutgoingConnection(_frameOutputNode);

            // Initialize the Frame Input Node in the stopped state
            _frameInputNode.Stop();

            // Hook up an event handler so we can start generating samples when needed
            // This event is triggered when the node is required to provide data
            _frameInputNode.QuantumStarted += node_QuantumStarted;

            _graph.QuantumProcessed += GraphOnQuantumProcessed;

            // Start the graph since we will only start/stop the frame input node
            _graph.Start();
        }
Esempio n. 29
0
        async Task Init()
        {
            try
            {
                await Stop();

                var pcmEncoding = AudioEncodingProperties.CreatePcm((uint)SampleRate, (uint)ChannelCount, (uint)BitsPerSample);
                // apparently this is not _really_ used/supported here, as the audio data seems to come thru as floats (so basically MediaEncodingSubtypes.Float?)
                pcmEncoding.Subtype = MediaEncodingSubtypes.Pcm;

                var graphSettings = new AudioGraphSettings(AudioRenderCategory.Media)
                {
                    EncodingProperties = pcmEncoding,
                    DesiredRenderDeviceAudioProcessing = AudioProcessing.Raw
                                                         // these do not seem to take effect on certain hardware and MSFT recommends SystemDefault when recording to a file anyway
                                                         //	We'll buffer audio data ourselves to improve RMS calculation across larger samples
                                                         //QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired,
                                                         //DesiredSamplesPerQuantum = 4096
                };

                // create our audio graph... this will be a device input node feeding audio data into a frame output node
                var graphResult = await AudioGraph.CreateAsync(graphSettings);

                if (graphResult.Status == AudioGraphCreationStatus.Success)
                {
                    audioGraph = graphResult.Graph;

                    // take input from whatever the default communications device is set to me on windows
                    var inputResult = await audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Communications, pcmEncoding);

                    if (inputResult.Status == AudioDeviceNodeCreationStatus.Success)
                    {
                        // create the output node
                        outputNode = audioGraph.CreateFrameOutputNode(pcmEncoding);

                        // wire the input to the output
                        inputResult.DeviceInputNode.AddOutgoingConnection(outputNode);

                        // Attach to QuantumStarted event in order to receive synchronous updates from audio graph (to capture incoming audio)
                        audioGraph.QuantumStarted             += Graph_QuantumStarted;
                        audioGraph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred;
                    }
                    else
                    {
                        throw new Exception($"audioGraph.CreateDeviceInputNodeAsync() returned non-Success status: {inputResult.Status}");
                    }
                }
                else
                {
                    throw new Exception($"AudioGraph.CreateAsync() returned non-Success status: {graphResult.Status}");
                }
            }
            catch
            {
                throw;
            }
        }
Esempio n. 30
0
        public static async Task AudioDevices()
        {
            if (beep == null)
            {
                beep = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/Audio/beep.wav"));
            }

            DeviceInformationCollection devices = await Windows.Devices.Enumeration.DeviceInformation.FindAllAsync(Windows.Media.Devices.MediaDevice.GetAudioRenderSelector());

            DeviceInformation selectedDevice = null;

            // Sometimes usb mics are confused as speakers.. so add check to make Speakers (Raspberry Pi 2 audio) the default <-- Confirmed works on rpi3
            //for (int i = 0; i <= devices.Count; i++)
            //{
            //    if (i != devices.Count)
            //    {
            //        if (DeviceTypeInformation.IsRaspberryPi)
            //        {
            //            if (devices[i].Name == "Speakers (Raspberry Pi 2 audio)")
            //            {
            //                selectedDevice = devices[i];
            //                break;
            //            }
            //        }
            //        else
            //        {
            //            selectedDevice = devices[i];
            //            break;
            //        }
            //    }
            //}

            settings = new AudioGraphSettings(AudioRenderCategory.Media);
            settings.PrimaryRenderDevice = selectedDevice;
            CreateAudioGraphResult resultg = await AudioGraph.CreateAsync(settings);

            audioflow = resultg.Graph;

            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await audioflow.CreateDeviceOutputNodeAsync();

            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await audioflow.CreateDeviceInputNodeAsync(MediaCategory.Media);

            deviceOuput = deviceOutputNodeResult.DeviceOutputNode;

            try
            {
                _mediaCapture = new MediaCapture();
                await _mediaCapture.InitializeAsync();

                _mediaCapture.Failed += _mediaCapture_Failed;
                _mediaCapture.AudioDeviceController.VolumePercent = 0.75f;
            }
            catch
            {
                Debug.WriteLine("Failed to setup microphone is one connected?");
            }
        }
Esempio n. 31
0
        public async Task InitializeAudioGraph()
        {
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            this._audioGraph = result.Graph;
            CreateAudioDeviceOutputNodeResult outputDeviceNodeResult = await this._audioGraph.CreateDeviceOutputNodeAsync();

            _deviceOutputNode = outputDeviceNodeResult.DeviceOutputNode;
        }
Esempio n. 32
0
		private async Task CreateAudioGraph()
		{
			var settings = new AudioGraphSettings(AudioRenderCategory.Media);
			var result = await AudioGraph.CreateAsync(settings);
			_graph = result.Graph;
			var deviceOutputNodeResult = await _graph.CreateDeviceOutputNodeAsync();
			_deviceOutput = deviceOutputNodeResult.DeviceOutputNode;
			_graph.ResetAllNodes();
			_graph.Start();
		}
Esempio n. 33
0
        async Task createAudioGraph()
        {
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.SoundEffects);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            if (result.Status == AudioGraphCreationStatus.Success)
            {
                audioGraph = result.Graph;
            }
        }
Esempio n. 34
0
        private static async Task<AudioGraph> CreateAudioGraph()
        {
            // we will only play files that's why we are using AudioRenderCategory.Media
            var settings = new AudioGraphSettings(AudioRenderCategory.Media);
            var result = await AudioGraph.CreateAsync(settings);
            if (result.Status != AudioGraphCreationStatus.Success)
            {
                throw new Exception("Audio graph unavailable.");
            }

            return result.Graph;
        }
Esempio n. 35
0
        private async Task InitializeAsync()
        {
            var settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.GameEffects);

            var graphCreationResult = await AudioGraph.CreateAsync(settings);
            _graph = graphCreationResult.Graph;

            var deviceOutputCreationResult = await _graph.CreateDeviceOutputNodeAsync();
            _deviceOutput = deviceOutputCreationResult.DeviceOutputNode;

            _graph.ResetAllNodes();
            _graph.Start();
        }
Esempio n. 36
0
        void ISynthesizer.SetUp()
        {
            using (WavePlayer player = WavePlayer.CreateWavePlayer())
            {
            }

            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media)
            {
                //QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency
            };

            AudioGraph.CreateAsync(settings).AsTask().ContinueWith(graphTask =>
            {
                CreateAudioGraphResult graphResult = graphTask.Result;

                if(graphResult.Status != AudioGraphCreationStatus.Success)
                {
                    this.EmitFailed();
                }
                else
                {
                    graphResult.Graph.CreateDeviceOutputNodeAsync().AsTask().ContinueWith(nodeTask =>
                    {
                        CreateAudioDeviceOutputNodeResult nodeResult = nodeTask.Result;

                        if(nodeResult.Status != AudioDeviceNodeCreationStatus.Success)
                        {
                            this.EmitFailed();
                        }
                        else
                        {
                            _audioGraph = graphResult.Graph;
                            _frameInputNode = _audioGraph.CreateFrameInputNode();
                            _frameInputNode.AddOutgoingConnection(nodeResult.DeviceOutputNode);
                            _frameInputNode.QuantumStarted += this.OnQuantumStarted;
                            _channelsNumber = _audioGraph.EncodingProperties.ChannelCount;
                            _waveSource = new WaveSource(_audioGraph.EncodingProperties.SampleRate, _channelsNumber);
                            this.EmitReady();
                        }
                    });
                }
            });
        }
        private async void Page_Loaded(object sender, RoutedEventArgs e)
        {
            // midi

            var s = MidiInPort.GetDeviceSelector();
            var information = await DeviceInformation.FindAllAsync(s);

            var list = information.ToList();
            port = await MidiInPort.FromIdAsync(list.ElementAt(2).Id);
            port.MessageReceived += Port_MessageReceived;

            // audio
            var settings = new AudioGraphSettings(AudioRenderCategory.GameEffects);
            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
            var creation = await AudioGraph.CreateAsync(settings);

            graph = creation.Graph;
            output = await graph.CreateDeviceOutputNodeAsync();

            var encoding = graph.EncodingProperties;
            encoding.ChannelCount = 1;
            input = graph.CreateFrameInputNode(encoding);
            input.AddOutgoingConnection(output.DeviceOutputNode);
            input.Stop();

            input.QuantumStarted += Input_QuantumStarted;

            graph.Start();

            // midi notes (pitch to note)

            float a = 440; // a is 440 hz...
            for (int x = 0; x < 127; ++x)
            {
                notes[x] = (a / 32f) * (float)Math.Pow(2f, ((x - 9f) / 12f));
            }
        }
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output
                rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputResult.Status.ToString()), NotifyType.ErrorMessage);
                speakerContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceOutputNode = deviceOutputResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage);
            speakerContainer.Background = new SolidColorBrush(Colors.Green);
        }
Esempio n. 39
0
        // this is an unfortunate workaround because we can't start c++ AudioGraph from the UI thread due to its blocking calls. We have to create it here and pass it to the Plugin.
        private static async Task CreateAudioGraph()
        {
            if (graph != null)
            {
                return;
            }
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);    // Create an AudioGraph with default settings
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);             // this graph is bound to this process

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return; // Cannot create graph
            }
            graph = result.Graph;
            CheckForErrorOnCall(MicInitializeDefaultWithGraph((int)streamType, graph)); // pass the bound graph to the mic plugin. this lets our current process hear audio. 
        }
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default setting
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Can't create the graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                speakerContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage);
            speakerContainer.Background = new SolidColorBrush(Colors.Green);

            submixNode = graph.CreateSubmixNode();
            subMixNode.Background = new SolidColorBrush(Colors.Green);
            submixNode.AddOutgoingConnection(deviceOutputNode);

            echoEffect = new EchoEffectDefinition(graph);
            echoEffect.WetDryMix = 0.7f;
            echoEffect.Feedback = 0.5f;
            echoEffect.Delay = 500.0f;
            submixNode.EffectDefinitions.Add(echoEffect);

            // Disable the effect in the beginning. Enable in response to user action (UI toggle switch)
            submixNode.DisableEffectsByDefinition(echoEffect);

            // All nodes can have an OutgoingGain property
            // Setting the gain on the Submix node attenuates the output of the node
            submixNode.OutgoingGain = 0.5;

            // Graph successfully created. Enable buttons to load files
            fileButton1.IsEnabled = true;
            fileButton2.IsEnabled = true;
        }
Esempio n. 41
0
        // Create the AudioGraph
        private async Task CreateAudioGraph()
        {
            // Create a new AudioGraph settings object to store the options, here you can play with latence/output device etc
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.SoundEffects);
            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired;
            settings.DesiredSamplesPerQuantum = desiredSamples;
            settings.DesiredRenderDeviceAudioProcessing = Windows.Media.AudioProcessing.Default;
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                ShowErrorMessage(string.Format("AudioGraph Creation Error because {0}", result.Status.ToString()));
                return;
            }
            graph = result.Graph;


            // Create a device input node using the default audio input device
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                ShowErrorMessage(string.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()));
                return;
            }
            deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            
            // Create the output node to send to data processing and add the event handler for when a quantum is processed
            frameOutputNode = graph.CreateFrameOutputNode();
            graph.QuantumProcessed += AudioGraph_QuantumProcessed;


            // Link the nodes together
            deviceInputNode.AddOutgoingConnection(frameOutputNode);


            // Because we are using lowest latency setting, we need to handle device disconnection errors
            graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred;
        }
Esempio n. 42
0
        private async Task InitAudioGraph()
        {
            var encodingProperties = AudioEncodingProperties.CreatePcm((uint) _samplingRate, 1, 16);

            // Don't modify DesiredSamplesPerQuantum! If you do, change KQuantumSize accordingly!
            var settings = new AudioGraphSettings(AudioRenderCategory.Communications)
            {
                EncodingProperties = encodingProperties
            };

            var result = await AudioGraph.CreateAsync(settings);
            if (result.Status != AudioGraphCreationStatus.Success)
            {
                throw new Exception(result.Status.ToString());
            }

            _audioGraph = result.Graph;
        }
Esempio n. 43
0
		/// <summary>
		/// Required before starting recording
		/// </summary>
		/// <returns></returns>
		private async Task<string> CreateAudioGraphAsync()
		{
			// var inputDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioCaptureSelector()); // LOLLO TEST

			_outputDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector());
			if (_outputDevices == null || _outputDevices.Count < 1)
			{
				return "AudioGraph Creation Error: no output devices found";
			}

			AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media)
			{ QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency, PrimaryRenderDevice = _outputDevices[0] };

			CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);
			if (result.Status != AudioGraphCreationStatus.Success)
			{
				// Cannot create graph
				return string.Format("AudioGraph Creation Error because {0}", result.Status.ToString());
			}
			_audioGraph = result.Graph;
			// Because we are using lowest latency setting, we need to handle device disconnection errors
			_audioGraph.UnrecoverableErrorOccurred += OnGraph_UnrecoverableErrorOccurred;

			//// Create a device output node // away, so we get no echo
			//CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _audioGraph.CreateDeviceOutputNodeAsync();
			//if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
			//{
			//	// Cannot create device output node
			//	return string.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString());
			//}
			//_deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

			// Create a device input node using the default audio input device
			CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other);
			if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
			{
				// Cannot create device input node
				return string.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString());
			}
			_deviceInputNode = deviceInputNodeResult.DeviceInputNode;

			//// LOLLO set the volume, rather useless coz it is like a mixer and the default value is 1.
			//if (_deviceOutputNode.OutgoingGain < 1.0) _deviceOutputNode.OutgoingGain = 1.0;
			//if (_deviceInputNode.OutgoingGain < 1.0) _deviceInputNode.OutgoingGain = 1.0;

			return string.Empty;
		}
        private async Task CreateAudioGraph()
        {
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
            settings.PrimaryRenderDevice = outputDevices[outputDevicesListBox.SelectedIndex - 1];

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;
            rootPage.NotifyUser("Graph successfully created!", NotifyType.StatusMessage);

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();
            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                outputDeviceContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output connection successfully created", NotifyType.StatusMessage);
            outputDeviceContainer.Background = new SolidColorBrush(Colors.Green);

            // Create a device input node using the default audio input device
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                rootPage.NotifyUser(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                inputDeviceContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceInputNode = deviceInputNodeResult.DeviceInputNode;
            rootPage.NotifyUser("Device Input connection successfully created", NotifyType.StatusMessage);
            inputDeviceContainer.Background = new SolidColorBrush(Colors.Green);

            // Since graph is successfully created, enable the button to select a file output
            fileButton.IsEnabled = true;

            // Disable the graph button to prevent accidental click
            createGraphButton.IsEnabled = false;

            // Because we are using lowest latency setting, we need to handle device disconnection errors
            graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred;
        }
Esempio n. 45
0
        private void CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result = AudioGraph.CreateAsync(settings).GetResults();

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputResult = graph.CreateDeviceOutputNodeAsync().GetResults();

            if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output
                return;
            }

            deviceOutputNode = deviceOutputResult.DeviceOutputNode;
            
        }
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();
            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                speakerContainer.Background = new SolidColorBrush(Colors.Red);
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage);
            speakerContainer.Background = new SolidColorBrush(Colors.Green);

            // Create the FrameInputNode at the same format as the graph, except explicitly set mono.
            AudioEncodingProperties nodeEncodingProperties = graph.EncodingProperties;
            nodeEncodingProperties.ChannelCount = 1;
            frameInputNode = graph.CreateFrameInputNode(nodeEncodingProperties);
            frameInputNode.AddOutgoingConnection(deviceOutputNode);
            frameContainer.Background = new SolidColorBrush(Colors.Green);

            // Initialize the Frame Input Node in the stopped state
            frameInputNode.Stop();

            // Hook up an event handler so we can start generating samples when needed
            // This event is triggered when the node is required to provide data
            frameInputNode.QuantumStarted += node_QuantumStarted;
            
            // Start the graph since we will only start/stop the frame input node
            graph.Start();
        }
Esempio n. 47
0
        private async void initGraph() {
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            graph = result.Graph;
            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();
            deviceOutput = deviceOutputNodeResult.DeviceOutputNode;


            FileOpenPicker filePicker = new FileOpenPicker();
            filePicker.SuggestedStartLocation = PickerLocationId.MusicLibrary;
            filePicker.FileTypeFilter.Add(".mp3");
            filePicker.FileTypeFilter.Add(".wav");
            StorageFile file = await GetPackagedFile(null, "audio.mp3");
            CreateAudioFileInputNodeResult fileInputResult = await graph.CreateFileInputNodeAsync(file);
            fileInput = fileInputResult.FileInputNode;
            fileInput.AddOutgoingConnection(deviceOutput);
            graph.Start();
        }