Exemple #1
0
        public static async Task <AudioOutputManager> CreateAsync(AudioGraph audioGraph)
        {
            var outputAudioManager = new AudioOutputManager(audioGraph);
            await outputAudioManager.InitializeAudioOutput();

            return(outputAudioManager);
        }
Exemple #2
0
        public async Task InitializeSounds()
        {
            InputNodes = new ObservableCollection <AudioFileInputNode>();
            FileInputNodesDictionary = new Dictionary <string, AudioFileInputNode>();

            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            if (result.Status == AudioGraphCreationStatus.Success)
            {
                graph = result.Graph;
                CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();


                // make sure the audio output is available
                if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success)
                {
                    outputNode = deviceOutputNodeResult.DeviceOutputNode;
                    graph.ResetAllNodes();

                    for (int i = 0; i < countdownSoundsNum; i++)
                    {
                        await CreateInputNodeFromFile(baseUri + countdownBaseFileName + "0" + (i + 1).ToString() + "-" + "1" + audioFileExt);
                        await CreateInputNodeFromFile(baseUri + countdownBaseFileName + "0" + (i + 1).ToString() + "-" + "2" + audioFileExt);
                    }

                    for (int j = 0; j < doneSoundsNum; j++)
                    {
                        await CreateInputNodeFromFile(baseUri + doneBaseFileName + ((j >= 9) ? "" : "0") + (j + 1).ToString() + audioFileExt);
                    }

                    graph.Start();
                }
            }
        }
Exemple #3
0
        private async Task CreateAudioGraph()
        {
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;

            graph.EncodingProperties.SampleRate = 44100;

            CreateAudioDeviceOutputNodeResult deviceOutputResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputResult.Status.ToString()), NotifyType.ErrorMessage);
                speakerContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceOutputNode = deviceOutputResult.DeviceOutputNode;
            NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage);
            speakerContainer.Background = new SolidColorBrush(Colors.Green);
        }
Exemple #4
0
        public async Task <bool> InitializeAudioGraphAsync()
        {
            var audioGraphSettings = new AudioGraphSettings(AudioRenderCategory.Media);
            var result             = await AudioGraph.CreateAsync(audioGraphSettings);

            LastStatus = result.Status.ToString();
            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return(false);
            }

            _audioGraph = result.Graph;
            _audioGraph.UnrecoverableErrorOccurred += (sender, args) => OnErrorOccurred(args);

            var outputResult = await _audioGraph.CreateDeviceOutputNodeAsync();

            LastStatus = outputResult.Status.ToString();

            if (outputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                _audioGraph.Dispose();
                return(false);
            }

            _outputNode = outputResult.DeviceOutputNode;

            CreateEchoEffect();
            CreateLimiterEffect();
            CreateReverbEffect();
            CreateEqualizerEffect();

            return(true);
        }
        public async Task Init()
        {
            AudioGraphSettings audioGraphSettings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media);
            var result = await AudioGraph.CreateAsync(audioGraphSettings);

            if (result == null || result.Status != AudioGraphCreationStatus.Success)
            {
                return;
            }
            audioGraph = result.Graph;

            var createAudioDeviceOutputResult = await audioGraph.CreateDeviceOutputNodeAsync();

            if (createAudioDeviceOutputResult == null || createAudioDeviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }
            deviceOutputNode = createAudioDeviceOutputResult.DeviceOutputNode;

            AudioEncodingProperties audioEncodingProperties = new AudioEncodingProperties();

            audioEncodingProperties.BitsPerSample = 32;
            audioEncodingProperties.ChannelCount  = 2;
            audioEncodingProperties.SampleRate    = 44100;
            audioEncodingProperties.Subtype       = MediaEncodingSubtypes.Float;

            audioFrameInputNode = audioGraph.CreateFrameInputNode(audioEncodingProperties);
            audioFrameInputNode.QuantumStarted += FrameInputNode_QuantumStarted;

            audioFrameInputNode.AddOutgoingConnection(deviceOutputNode);
            audioGraph.Start();
        }
Exemple #6
0
        public async Task <bool> InitializeAsync()
        {
            if (this.IsInitialized == true)
            {
                return(true);
            }

            var result = await AudioGraph.CreateAsync(new AudioGraphSettings(AudioRenderCategory.Media));

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return(false);
            }

            _audioGraph = result.Graph;
            var outputResult = await _audioGraph.CreateDeviceOutputNodeAsync();

            if (outputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return(false);
            }

            _outputNode = outputResult.DeviceOutputNode;

            if (this.IsMute == false)
            {
                _audioGraph.Start();
            }

            this.IsInitialized = true;

            return(true);
        }
Exemple #7
0
        public IAsyncOperation <AudioFrameReader> OpenAudioFrameReaderAsync()
        {
            return(Task.Run(async() =>
            {
                if (AudioReader == null)
                {
                    var microphones = await DeviceInformation.FindAllAsync(DeviceInformation.GetAqsFilterFromDeviceClass(DeviceClass.AudioCapture));
                    var kinectMicArray = microphones.FirstOrDefault(mic => mic.Name.ToLowerInvariant().Contains("xbox nui sensor"));

                    if (kinectMicArray != null)
                    {
                        //TODO: review parameters
                        var settings = new AudioGraphSettings(AudioRenderCategory.Speech);
                        settings.EncodingProperties = AudioEncodingProperties.CreatePcm(16000, 4, 32);
                        settings.EncodingProperties.Subtype = MediaEncodingSubtypes.Float;
                        settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
                        settings.DesiredRenderDeviceAudioProcessing = Windows.Media.AudioProcessing.Raw;

                        var audioGraphResult = await AudioGraph.CreateAsync(settings);
                        if (audioGraphResult.Status == AudioGraphCreationStatus.Success)
                        {
                            var inputNodeResult = await audioGraphResult.Graph.CreateDeviceInputNodeAsync(MediaCategory.Speech, audioGraphResult.Graph.EncodingProperties, kinectMicArray);

                            if (inputNodeResult.Status == AudioDeviceNodeCreationStatus.Success)
                            {
                                var output = audioGraphResult.Graph.CreateFrameOutputNode(audioGraphResult.Graph.EncodingProperties);
                                AudioReader = new AudioFrameReader(audioGraphResult.Graph, output);
                            }
                        }
                    }
                }
                AudioReader?.Open();
                return AudioReader;
            }).AsAsyncOperation());
        }
Exemple #8
0
        /// <summary>
        /// When audioFrameUpdateMinimum is reached by audioFrameUpdateCount, this method gets the current audio frame, obtains the data from it
        /// and calculates the raw audio level from -100 to 0.
        /// </summary>
        private static unsafe void Graph_QuantumStarted(AudioGraph sender, object args)
        {
            audioFrameUpdateCount++;
            if (audioFrameUpdateCount >= audioFrameUpdateMinimum)
            {
                AudioFrame audioFrame = frameOutputNode.GetFrame();
                float[]    floatData;
                using (AudioBuffer audioBuffer = audioFrame.LockBuffer(AudioBufferAccessMode.Write))
                    using (IMemoryBufferReference reference = audioBuffer.CreateReference())
                    {
                        ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *dataInBytes, out uint capacity);

                        float *unsafeFloatData = (float *)dataInBytes;
                        floatData = new float[capacity / sizeof(float)];

                        for (int i = 0; i < capacity / sizeof(float); i++)
                        {
                            floatData[i] = unsafeFloatData[i];
                        }
                    }

                double soundLevel = 0f;
                foreach (float sample in floatData)
                {
                    soundLevel += Math.Abs(sample);
                }
                soundLevel = Math.Log10(soundLevel / floatData.Length) * 20;

                NewRawSoundLevel(soundLevel);

                audioFrameUpdateCount = 0;
            }
        }
Exemple #9
0
        private static async Task<AudioFileInputNode[,]> LoadAudioFileInputNodesAsync(int rowsCount, int columnsCount, AudioGraph audioGraph)
        {
            var audioDeviceOutputNode = await CreateAudioDeviceOutputNodeAsync(audioGraph);
            var storageFiles = await LoadStorageFiles(rowsCount, columnsCount);
            var result = new AudioFileInputNode[rowsCount, columnsCount];

            // initialize an input node for each cell
            for (var y = 0; y < rowsCount; y++)
            {
                for (var x = 0; x < columnsCount; x++)
                {
                    var inputResult = await audioGraph.CreateFileInputNodeAsync(storageFiles[y, x]);
                    if (inputResult.Status != AudioFileNodeCreationStatus.Success) continue;

                    var audioFileInputNode = inputResult.FileInputNode;
                    // it shouldn't start when we add it to audioGraph
                    audioFileInputNode.Stop();
                    // link it to the output node
                    audioFileInputNode.AddOutgoingConnection(audioDeviceOutputNode);
                    // add to the array for easier access to playback
                    result[y, x] = audioFileInputNode;
                }
            }

            return result;
        }
Exemple #10
0
        private async void Create()
        {
            OutputDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector());

            var settings = new AudioGraphSettings(AudioRenderCategory.Media)
            {
                QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency
            };

            var audioGraphResult = await AudioGraph.CreateAsync(settings);

            if (audioGraphResult.Status != AudioGraphCreationStatus.Success)
            {
                throw new ApplicationException($"Audio graph error: {audioGraphResult.Status}");
            }

            Graph = audioGraphResult.Graph;
            Graph.QuantumProcessed += (audioGraph, e) => AudioGraphQuantumProcessed();
            Graph.QuantumStarted   += (audioGraph, e) => AudioGraphQuantumStarted();

            InputDevice = await CreateInputDevice().ConfigureAwait(true);

            OutputDevice = await CreateOutputDevice().ConfigureAwait(true);

            RecordingOutputNode = CreateFrameOutputNode();

            Ready?.Invoke(this, EventArgs.Empty);
        }
        /// <summary>
        ///     Starts every time when audio frame is read from a file
        /// </summary>
        private void FileInput_QuantumStarted(AudioGraph sender, object args)
        {
            // to not report too many times
            if (sender.CompletedQuantumCount % 100 == 0)
            {
                var numOfSamples =
                    0.0000001
                    * _fileInputNode.Duration.Ticks
                    * _fileInputNode.EncodingProperties.SampleRate;
                var dProgress =
                    100 *
                    (int)sender.CompletedQuantumCount
                    * sender.SamplesPerQuantum /
                    numOfSamples;
                _ioProgress?.Report(dProgress);
            }

            if (_audioDataCurrentPosition == 0)
            {
                _frameOutputNode.Start();
            }

            var frame = _frameOutputNode.GetFrame();

            ProcessInputFrame(frame);

            if (_finished)
            {
                _frameOutputNode?.Stop();
                _audioGraph?.Stop();
            }
        }
Exemple #12
0
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                rootPage.NotifyUser(String.Format("Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                speakerContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceOutput = deviceOutputNodeResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage);
            speakerContainer.Background = new SolidColorBrush(Colors.Green);
        }
        /// <summary>
        ///     Creates an instance of AudioGraph and sets io_progress
        /// </summary>
        public async Task <CreateAudioGraphResult> Init(
            Progress <double> progress)
        {
            // set io_progress var to show progress of input-output
            _ioProgress = progress;

            // initialize settings for AudioGraph
            var settings =
                new AudioGraphSettings(
                    AudioRenderCategory.Media
                    );

            // if audioGraph was previously created
            if (_audioGraph != null)
            {
                _audioGraph.Dispose();
                _audioGraph = null;
            }

            var result =
                await AudioGraph.CreateAsync(settings);

            if (result.Status == AudioGraphCreationStatus.Success)
            {
                _audioGraph = result.Graph;
            }

            return(result);
        }
        public async Task CreateAudioGraph()
        {
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
            settings.PrimaryRenderDevice      = outputDevices[0];

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                MessageDialog md = new MessageDialog("Error in creating Audio Graph", "OOPS!!");
                await md.ShowAsync();

                return;
            }
            graph = result.Graph;
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                MessageDialog md = new MessageDialog("Error in creating InputDeviceNode", "OOPS!!");
                await md.ShowAsync();

                return;
            }
            deviceInputNode = deviceInputNodeResult.DeviceInputNode;
        }
 private async void AudioGraph_QuantumStarted(AudioGraph sender, object args)
 {
     var dispatcher = CoreApplication.MainView.CoreWindow.Dispatcher;
     await dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal,
                               () =>
     {
         try
         {
             var rotate = new RotateTransform
             {
                 Angle = ((RotateTransform)albumArtThumb.RenderTransform).Angle + 0.2
             };
             albumArtThumb.RenderTransform = rotate;
             timeProgress.Text             = graphGlobal.fileInputNode.Position.Duration().ToString(@"mm\:ss");
             progressBarControl.Value      = graphGlobal.fileInputNode.Position.TotalSeconds / graphGlobal.fileInputNode.Duration.TotalSeconds * 100;
             var remainder_ms = graphGlobal.fileInputNode.Duration.TotalMilliseconds - graphGlobal.fileInputNode.Position.TotalMilliseconds;
             if (remainder_ms <= scanLinePixelRangeTotalMilliseconds)
             {
                 controlScanLine.Value = 100 * (1 - remainder_ms / scanLinePixelRangeTotalMilliseconds);
             }
         }
         catch
         {
         }
     });
 }
Exemple #16
0
        public ControlPanelViewModel(IWaveFormRenderer waveFormRenderer)
        {
            this.waveFormRenderer = waveFormRenderer;
            this.audioGraph = new AudioGraph();
            audioGraph.MaximumCalculated += audioGraph_MaximumCalculated;
            audioGraph.GotSongList += audioGraph_GotSongList;
            audioGraph.PlaybackStateChanged += audioGraph_PlaybackStateChanged;
            this.captureSeconds = 10;
            this.NotificationsPerSecond = 100;
            this.ConnectionUri = "localhost:6170?udp=9500";
            this.EnableCompression = false;

            PlayStreamCommand = new RelayCommand(
                        () => this.PlayStream(),
                        () => true);
            CaptureCommand = new RelayCommand(
                        () => this.Capture(),
                        () => true);
            PlayCapturedAudioCommand = new RelayCommand(
                        () => this.PlayCapturedAudio(),
                        () => this.HasCapturedAudio());
            SaveCapturedAudioCommand = new RelayCommand(
                        () => this.SaveCapturedAudio(),
                        () => this.HasCapturedAudio());
            StopCommand = new RelayCommand(
                        () => this.Stop(),
                        () => true);
        }
        public async Task <bool> InitializeSound()
        {
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return(false);
            }

            graph = result.Graph;
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success)
            {
                deviceOutput = deviceOutputNodeResult.DeviceOutputNode;
                graph.ResetAllNodes();

                foreach (var soundMapping in soundMappings)
                {
                    await AddFileToSoundDictionary("ms-appx:///Features/Game/Assets/" + soundMapping.Value);
                }

                graph.Start();
            }

            isInitialized = true;
            return(isInitialized);
        }
Exemple #18
0
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                throw new Exception("error");
            }

            _graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputResult = await _graph.CreateDeviceOutputNodeAsync();

            _subMixNode = _graph.CreateSubmixNode();


            if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output
                throw new Exception("error");
            }

            _deviceOutputNode = deviceOutputResult.DeviceOutputNode;
            _subMixNode.AddOutgoingConnection(_deviceOutputNode);
        }
Exemple #19
0
        public async Task Init()
        {
            var graphResult = await Windows.Media.Audio.AudioGraph.CreateAsync(new Windows.Media.Audio.AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media)
            {
            });

            if (graphResult.Status != Windows.Media.Audio.AudioGraphCreationStatus.Success)
            {
                throw new Exception("Faild to Create Audio Graph");
            }

            graph = graphResult.Graph;



            var outPutResult = await graph.CreateDeviceOutputNodeAsync();

            if (outPutResult.Status != Windows.Media.Audio.AudioDeviceNodeCreationStatus.Success)
            {
                throw new Exception("Faild To Create DeviceOutput");
            }

            outputNode = outPutResult.DeviceOutputNode;


            graph.QuantumProcessed += MainGraph_QuantumProcessed;
        }
Exemple #20
0
        /// <summary>
        /// Update system sample rate asynchronously.
        /// </summary>
        /// <returns>Task represents the asynchronous operation.</returns>
        public async Task <int> UpdateSampleRate()
        {
            try
            {
                var result = await AudioGraph.CreateAsync(
                    new AudioGraphSettings(AudioRenderCategory.Media));

                if (result.Status == AudioGraphCreationStatus.Success)
                {
                    var rate = (int)result.Graph.EncodingProperties.SampleRate;
                    SetSystemSampleRate(rate);
                    result.Graph.Dispose();
                    return(rate);
                }
                else
                {
                    SetSystemSampleRate(0);
                }
            }
            catch
            {
                // Ignore
            }

            return(0);
        }
Exemple #21
0
        public void Stop()
        {
            m_audioDataMutex.WaitOne();
            m_isRunning  = false;
            m_isFlushing = false;

            if (m_audioGraph != null)
            {
                m_audioGraph.Stop();
            }

            if (m_deviceOutputNode != null)
            {
                m_deviceOutputNode.Dispose();
                m_deviceOutputNode = null;
            }

            if (m_frameInputNode != null)
            {
                m_frameInputNode.Dispose();
                m_frameInputNode = null;
            }

            if (m_audioGraph != null)
            {
                m_audioGraph.Dispose();
                m_audioGraph = null;
            }
            m_audioData = null;
            m_audioDataMutex.ReleaseMutex();
        }
Exemple #22
0
        private void Recover()
        {
            App.Log("Recover MediaPlayer");
            graph.Stop();
            try
            {
                mainInputNode.Dispose();
            }
            catch (Exception) { }
            try
            {
                subInputNode.Dispose();
            }
            catch (Exception) { }
            try
            {
                outputNode.Dispose();
            }
            catch (Exception) { }
            mainInputNode = null;
            subInputNode  = null;
            outputNode    = null;
            mainSong      = null;
            subSong       = null;

            try
            {
                graph.Dispose();
            }
            catch (Exception) { }

            graph = null;
            Init();
        }
        // create the audio graph and output
        private async void InitAudioGraph()
        {
            var settings = new AudioGraphSettings(AudioRenderCategory.Media);
            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; // pick lowest latency available to devices in the graph


            // create the audio graph
            _graph = (await AudioGraph.CreateAsync(settings)).Graph;
            if (_graph == null)
            {
                // failed to create audio graph
                MessageDialog dlg = new MessageDialog("Failed to create audio graph");
                await dlg.ShowAsync();
                return;
            }


            // create the output. You could also create file output here to stream to a temp file or similar
            _deviceOutput = (await _graph.CreateDeviceOutputNodeAsync()).DeviceOutputNode;
            if (_deviceOutput == null)
            {
                // failed to create audio output
                MessageDialog dlg = new MessageDialog("Failed to create device output");
                await dlg.ShowAsync();
                return;
            }


            // load all of the samples into graph nodes
            BuildFileNodes();

            // start playback
            _graph.Start();
        }
Exemple #24
0
        /// <summary>
        /// Initializes the player service
        /// </summary>
        private async Task InitializeAsync()
        {
            var result = await AudioGraph.CreateAsync(new AudioGraphSettings(AudioRenderCategory.Media));

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return;
            }
            _audioGraph = result.Graph;
            var outputResult = await _audioGraph.CreateDeviceOutputNodeAsync();

            if (outputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }
            _outputNode = outputResult.DeviceOutputNode;
            _audioGraph.Start();

            foreach (var kvp in _filenames)
            {
                var storageFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri($"ms-appx:///Assets/{kvp.Value}"));

                _loaded.Add(kvp.Key, storageFile);
            }
        }
Exemple #25
0
        public async Task Initialize()
        {
            using (var release = await InitializeLock.LockAsync())
            {
                var pcmEncoding = AudioEncodingProperties.CreatePcm(48000, 1, 16);

                var result = await AudioGraph.CreateAsync(
                    new AudioGraphSettings(AudioRenderCategory.GameChat)
                {
                    DesiredRenderDeviceAudioProcessing = AudioProcessing.Raw,
                    AudioRenderCategory = AudioRenderCategory.GameChat,
                    EncodingProperties  = pcmEncoding
                }
                    );

                if (result.Status != AudioGraphCreationStatus.Success)
                {
                    throw new Exception();
                }

                AudioGraph = result.Graph;

                // マイク入力を初期化
                Input = await AudioInputManager.CreateAsync(AudioGraph);

                Input.MinGain = _MicVolume;
                Input.InputDeviceStateChanged += Input_InputDeviceStateChanged;
                this.InputDeviceState          = Input.InputDeviceState;

                // スピーカー出力を初期化
                Output = await AudioOutputManager.CreateAsync(AudioGraph);

                Output.SpeakerGain = _SpeakerVolume;
            }
        }
Exemple #26
0
        internal static async Task <AudioInputManager> CreateAsync(AudioGraph audioGraph, DeviceInformation microphoneDevice = null)
        {
            var audioInputManager = new AudioInputManager(audioGraph);
            await audioInputManager.ResetAudioInput();

            return(audioInputManager);
        }
Exemple #27
0
        private void CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = AudioGraph.CreateAsync(settings).GetResults();

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputResult = graph.CreateDeviceOutputNodeAsync().GetResults();

            if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output
                return;
            }

            deviceOutputNode = deviceOutputResult.DeviceOutputNode;
        }
Exemple #28
0
 public override void InitFromRawDecompressed(byte[] decompressedBytes)
 {
     using (var reader = new BinaryReader(new MemoryStream(decompressedBytes)))
     {
         this.Resource = Read_AudioGraphResource(reader);
     }
 }
 protected override void OnNavigatedFrom(NavigationEventArgs e)
 {
     base.OnNavigatedFrom(e);
     _graph?.Stop();
     _graph?.Dispose();
     _graph = null;
 }
		protected override async Task CloseMayOverrideAsync()
		{
			var ag = _audioGraph;
			if (ag != null)
			{
				ag.UnrecoverableErrorOccurred -= OnGraph_UnrecoverableErrorOccurred;
				try
				{
					ag.Stop();
				}
				catch { }
				try
				{
					ag.Dispose();
				}
				catch { }
			}
			_audioGraph = null;
			//try
			//{
			//	_deviceInputNode?.Dispose();
			//}
			//catch { }
			//try
			//{
			//	_deviceOutputNode?.Dispose();
			//}
			//catch { }
			//try
			//{
			//	_fileOutputNode?.Dispose();
			//}
			//catch { }
			await Task.CompletedTask;
		}
Exemple #31
0
        private static ReverbEffectDefinition CreateSmallRoom(AudioGraph graph)
        {
            ReverbEffectDefinition reverbEffectDefinition = new ReverbEffectDefinition(graph);

            reverbEffectDefinition.WetDryMix           = 100;
            reverbEffectDefinition.ReflectionsDelay    = 5;
            reverbEffectDefinition.ReverbDelay         = 10;
            reverbEffectDefinition.RearDelay           = 5;
            reverbEffectDefinition.PositionLeft        = 6;
            reverbEffectDefinition.PositionRight       = 6;
            reverbEffectDefinition.PositionMatrixLeft  = 27;
            reverbEffectDefinition.PositionMatrixRight = 27;
            reverbEffectDefinition.EarlyDiffusion      = 15;
            reverbEffectDefinition.LateDiffusion       = 15;
            reverbEffectDefinition.LowEQGain           = 8;
            reverbEffectDefinition.LowEQCutoff         = 4;
            reverbEffectDefinition.HighEQGain          = 8;
            reverbEffectDefinition.HighEQCutoff        = 6;
            reverbEffectDefinition.RoomFilterFreq      = 5000;
            reverbEffectDefinition.RoomFilterMain      = -10;
            reverbEffectDefinition.RoomFilterHF        = -6;
            reverbEffectDefinition.ReflectionsGain     = -4;
            reverbEffectDefinition.ReverbGain          = 5;
            reverbEffectDefinition.DecayTime           = 1.10;
            reverbEffectDefinition.Density             = 100;
            reverbEffectDefinition.RoomSize            = 100;
            reverbEffectDefinition.DisableLateField    = false;

            return(reverbEffectDefinition);
        }
 // Initializes AudioGraph
 public async Task InitAudioGraph()
 {
     AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
                 CreateAudioGraphResult result = await 
                 AudioGraph.CreateAsync(settings);
     audioGraph = result.Graph;
 }
Exemple #33
0
        protected override async void OnNavigatedTo(NavigationEventArgs e)
        {
            base.OnNavigatedTo(e);
            var mediaSource = MediaSource.CreateFromUri(new Uri("ms-appx:///Test/GirlishLover.m4a"));
            await mediaSource.OpenAsync();

            this.mpe.Source = mediaSource;
            this.mpe.MediaPlayer.MediaOpened += this.MediaPlayer_MediaOpened;

            var settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Other)
            {
                QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency
            };
            var result = await AudioGraph.CreateAsync(settings);

            this.audioGraph = result.Graph;

            this.outNode = this.audioGraph.CreateFrameOutputNode();

            this.fileNode           = (await this.audioGraph.CreateFileInputNodeAsync(await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Test/GirlishLover.m4a")))).FileInputNode;
            this.fileNode.LoopCount = 0;
            this.fileNode.AddOutgoingConnection(this.outNode);
            this.fileNode.FileCompleted    += this.FileNode_FileCompleted;
            this.audioGraph.QuantumStarted += this.AudioGraph_QuantumStarted;

            this.audioGraph.Start();
        }
Exemple #34
0
        public async void CaptureAudio()
        {
            AudioGraphSettings audioGraphSettings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Speech);
            var result = await AudioGraph.CreateAsync(audioGraphSettings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return;
            }
            _audioGraph = result.Graph;

            var deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Speech);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }
            var deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            _audioFile = await Windows.Storage.ApplicationData.Current.TemporaryFolder
                         .CreateFileAsync("speech", CreationCollisionOption.ReplaceExisting);

            var mediaEncodingProfile = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High);
            var fileOutputNodeResult = await _audioGraph.CreateFileOutputNodeAsync(_audioFile, mediaEncodingProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                return;
            }
            var fileOutputNode = fileOutputNodeResult.FileOutputNode;

            deviceInputNode.AddOutgoingConnection(fileOutputNode);

            _audioGraph.Start();
        }
Exemple #35
0
        private AudioMatrix(AudioGraph audioGraph, AudioFileInputNode[,] audioFileInputNodes)
        {
            _audioGraph = audioGraph;
            _audioFileInputNodes = audioFileInputNodes;

            // we have to start audioGraph one time but we can start/stop individual 
            //input nodes as many times as needed
            _audioGraph.Start();
        }
		private async Task CreateAudioGraph()
		{
			var settings = new AudioGraphSettings(AudioRenderCategory.Media);
			var result = await AudioGraph.CreateAsync(settings);
			_graph = result.Graph;
			var deviceOutputNodeResult = await _graph.CreateDeviceOutputNodeAsync();
			_deviceOutput = deviceOutputNodeResult.DeviceOutputNode;
			_graph.ResetAllNodes();
			_graph.Start();
		}
        private async Task InitializeAsync()
        {
            var settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.GameEffects);

            var graphCreationResult = await AudioGraph.CreateAsync(settings);
            _graph = graphCreationResult.Graph;

            var deviceOutputCreationResult = await _graph.CreateDeviceOutputNodeAsync();
            _deviceOutput = deviceOutputCreationResult.DeviceOutputNode;

            _graph.ResetAllNodes();
            _graph.Start();
        }
        private async void Page_Loaded(object sender, RoutedEventArgs e)
        {
            // midi

            var s = MidiInPort.GetDeviceSelector();
            var information = await DeviceInformation.FindAllAsync(s);

            var list = information.ToList();
            port = await MidiInPort.FromIdAsync(list.ElementAt(2).Id);
            port.MessageReceived += Port_MessageReceived;

            // audio
            var settings = new AudioGraphSettings(AudioRenderCategory.GameEffects);
            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
            var creation = await AudioGraph.CreateAsync(settings);

            graph = creation.Graph;
            output = await graph.CreateDeviceOutputNodeAsync();

            var encoding = graph.EncodingProperties;
            encoding.ChannelCount = 1;
            input = graph.CreateFrameInputNode(encoding);
            input.AddOutgoingConnection(output.DeviceOutputNode);
            input.Stop();

            input.QuantumStarted += Input_QuantumStarted;

            graph.Start();

            // midi notes (pitch to note)

            float a = 440; // a is 440 hz...
            for (int x = 0; x < 127; ++x)
            {
                notes[x] = (a / 32f) * (float)Math.Pow(2f, ((x - 9f) / 12f));
            }
        }
Exemple #39
0
 /// <summary>
 /// Creates a new instance of the <see cref="StatViewModel"/> class.
 /// </summary>
 /// <param name="audioGraph">The audio graph which notifies of stat changes.</param>
 public StatViewModel(AudioGraph audioGraph)
 {
     audioGraph.StatsUpdated += AudioGraph_StatsUpdated;
     Smoothness = "0.0000";
     FractionSamplesLost = ". . .";
 }
Exemple #40
0
        private async void initGraph() {
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            graph = result.Graph;
            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();
            deviceOutput = deviceOutputNodeResult.DeviceOutputNode;


            FileOpenPicker filePicker = new FileOpenPicker();
            filePicker.SuggestedStartLocation = PickerLocationId.MusicLibrary;
            filePicker.FileTypeFilter.Add(".mp3");
            filePicker.FileTypeFilter.Add(".wav");
            StorageFile file = await GetPackagedFile(null, "audio.mp3");
            CreateAudioFileInputNodeResult fileInputResult = await graph.CreateFileInputNodeAsync(file);
            fileInput = fileInputResult.FileInputNode;
            fileInput.AddOutgoingConnection(deviceOutput);
            graph.Start();
        }
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();
            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                speakerContainer.Background = new SolidColorBrush(Colors.Red);
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage);
            speakerContainer.Background = new SolidColorBrush(Colors.Green);

            // Create the FrameInputNode at the same format as the graph, except explicitly set mono.
            AudioEncodingProperties nodeEncodingProperties = graph.EncodingProperties;
            nodeEncodingProperties.ChannelCount = 1;
            frameInputNode = graph.CreateFrameInputNode(nodeEncodingProperties);
            frameInputNode.AddOutgoingConnection(deviceOutputNode);
            frameContainer.Background = new SolidColorBrush(Colors.Green);

            // Initialize the Frame Input Node in the stopped state
            frameInputNode.Stop();

            // Hook up an event handler so we can start generating samples when needed
            // This event is triggered when the node is required to provide data
            frameInputNode.QuantumStarted += node_QuantumStarted;
            
            // Start the graph since we will only start/stop the frame input node
            graph.Start();
        }
 private async void Graph_UnrecoverableErrorOccurred(AudioGraph sender, AudioGraphUnrecoverableErrorOccurredEventArgs args)
 {
     // Recreate the graph and all nodes when this happens
     await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async () =>
     {
         sender.Dispose();
         // Re-query for devices
         await PopulateDeviceList();
         // Reset UI
         fileButton.IsEnabled = false;
         recordStopButton.IsEnabled = false;
         recordStopButton.Content = "Record";
         outputDeviceContainer.Background = new SolidColorBrush(Color.FromArgb(255, 74, 74, 74));
         audioPipe1.Fill = new SolidColorBrush(Color.FromArgb(255, 49, 49, 49));
         audioPipe2.Fill = new SolidColorBrush(Color.FromArgb(255, 49, 49, 49));
     });
 }
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output
                rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputResult.Status.ToString()), NotifyType.ErrorMessage);
                speakerContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceOutputNode = deviceOutputResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage);
            speakerContainer.Background = new SolidColorBrush(Colors.Green);
        }
        // Get the buffer whenever it is processed
        private void AudioGraph_QuantumProcessed(AudioGraph sender, object args)
        {
            if (Math.IEEERemainder(graph.CompletedQuantumCount,10) == 0)
            {
                IList<float> points = new List<float>();

                // Get the required properties and fill points array
                sampleRate = graph.EncodingProperties.SampleRate;
                AudioFrame frame = frameOutputNode.GetFrame();

                // Get the buffer into the point collection
                try
                {
                    points = audioProcessing.ProcessFrameOutput(frame);
                }
                catch (Exception e)
                {
                    ShowErrorMessage(e.Message);
                }

                size = points.Count();

                if (size != 0)
                {
                    sizePowerOf2Log = GetNextPowerOf2((uint)size);
                    quantumDuration = (float)Math.Pow(2, sizePowerOf2Log) / sampleRate;
                    uppercutPoint = (float)Math.Round(uppercut * quantumDuration);
                    lowercutPoint = (float)Math.Round(lowercut * quantumDuration);

                    size = (int)Math.Pow(2, sizePowerOf2Log);

                    while (points.Count() < size)
                    {
                        points.Add(0);
                    }
                    
                    // Perform FFT on quantum
                    points = freqAnalyzer.Run(points);

                    //Remove upper and lower limits
                    for (int i = 0; i < (uppercutPoint - lowercutPoint); i++)
                    {
                        points[i] = points[i + (int)lowercutPoint];
                    }
                    while (points.Count() > uppercutPoint - lowercutPoint)
                    {
                        points.RemoveAt(points.Count() - 1);
                    }

                    // Normalize the points
                    float mh = points.Max();
                    float mg = points.Min();

                    if ((mh - mg) / size > 0.001)
                    {
                        for (int i = 0; i < points.Count; i++)
                        {
                            points[i] = points[i] * size / (mh * 2);
                        }
                    }

                    // Store results
                    if(ready)
                    {
                        pointsf.Clear();
                        pointsf.AddRange(points);
                    }
                }
            }
        }
        // Create the AudioGraph
        private async Task CreateAudioGraph()
        {
            // Create a new AudioGraph settings object to store the options, here you can play with latence/output device etc
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.SoundEffects);
            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired;
            settings.DesiredSamplesPerQuantum = desiredSamples;
            settings.DesiredRenderDeviceAudioProcessing = Windows.Media.AudioProcessing.Default;
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                ShowErrorMessage(string.Format("AudioGraph Creation Error because {0}", result.Status.ToString()));
                return;
            }
            graph = result.Graph;


            // Create a device input node using the default audio input device
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                ShowErrorMessage(string.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()));
                return;
            }
            deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            
            // Create the output node to send to data processing and add the event handler for when a quantum is processed
            frameOutputNode = graph.CreateFrameOutputNode();
            graph.QuantumProcessed += AudioGraph_QuantumProcessed;


            // Link the nodes together
            deviceInputNode.AddOutgoingConnection(frameOutputNode);


            // Because we are using lowest latency setting, we need to handle device disconnection errors
            graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred;
        }
		private void OnGraph_UnrecoverableErrorOccurred(AudioGraph sender, AudioGraphUnrecoverableErrorOccurredEventArgs args)
		{
			_messageWriter.LastMessage = args.Error.ToString();
			// Recreate the graph and all nodes when this happens
			//sender.Dispose();
			//DisposeAudioGraph();

			UnrecoverableError?.Invoke(this, EventArgs.Empty);

			// Re-query for devices // LOLLO NO!
			// _messageWriter.LastMessage = await CreateAudioGraphAsync().ConfigureAwait(false);
		}
        public static void StopMicDevice()
        {
            StringBuilder sb = new StringBuilder(260);  // 260 is Windows MAX_PATH as defined in c++. paths cant be longer than this and the plugin knows it, too
            Task.Factory.StartNew(() =>
            {
                MicStopRecording(sb);
                Debug.WriteLine(sb.ToString());
                CheckForErrorOnCall(MicDestroy());

                graph.Dispose();    // unfortunately, the app needs to do this to be able to re-init plugin later
                graph = null;       // this, too
            }
            );
        }
		/// <summary>
		/// Required before starting recording
		/// </summary>
		/// <returns></returns>
		private async Task<string> CreateAudioGraphAsync()
		{
			// var inputDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioCaptureSelector()); // LOLLO TEST

			_outputDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector());
			if (_outputDevices == null || _outputDevices.Count < 1)
			{
				return "AudioGraph Creation Error: no output devices found";
			}

			AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media)
			{ QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency, PrimaryRenderDevice = _outputDevices[0] };

			CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);
			if (result.Status != AudioGraphCreationStatus.Success)
			{
				// Cannot create graph
				return string.Format("AudioGraph Creation Error because {0}", result.Status.ToString());
			}
			_audioGraph = result.Graph;
			// Because we are using lowest latency setting, we need to handle device disconnection errors
			_audioGraph.UnrecoverableErrorOccurred += OnGraph_UnrecoverableErrorOccurred;

			//// Create a device output node // away, so we get no echo
			//CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _audioGraph.CreateDeviceOutputNodeAsync();
			//if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
			//{
			//	// Cannot create device output node
			//	return string.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString());
			//}
			//_deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

			// Create a device input node using the default audio input device
			CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other);
			if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
			{
				// Cannot create device input node
				return string.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString());
			}
			_deviceInputNode = deviceInputNodeResult.DeviceInputNode;

			//// LOLLO set the volume, rather useless coz it is like a mixer and the default value is 1.
			//if (_deviceOutputNode.OutgoingGain < 1.0) _deviceOutputNode.OutgoingGain = 1.0;
			//if (_deviceInputNode.OutgoingGain < 1.0) _deviceInputNode.OutgoingGain = 1.0;

			return string.Empty;
		}
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default setting
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Can't create the graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                speakerContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage);
            speakerContainer.Background = new SolidColorBrush(Colors.Green);

            submixNode = graph.CreateSubmixNode();
            subMixNode.Background = new SolidColorBrush(Colors.Green);
            submixNode.AddOutgoingConnection(deviceOutputNode);

            echoEffect = new EchoEffectDefinition(graph);
            echoEffect.WetDryMix = 0.7f;
            echoEffect.Feedback = 0.5f;
            echoEffect.Delay = 500.0f;
            submixNode.EffectDefinitions.Add(echoEffect);

            // Disable the effect in the beginning. Enable in response to user action (UI toggle switch)
            submixNode.DisableEffectsByDefinition(echoEffect);

            // All nodes can have an OutgoingGain property
            // Setting the gain on the Submix node attenuates the output of the node
            submixNode.OutgoingGain = 0.5;

            // Graph successfully created. Enable buttons to load files
            fileButton1.IsEnabled = true;
            fileButton2.IsEnabled = true;
        }
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                await ShowMessage(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()));
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                await ShowMessage(String.Format("Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()));
                return;
            }

            deviceOutput = deviceOutputNodeResult.DeviceOutputNode;
            await ShowMessage("Device Output Node successfully created");
        }
        private async Task CreateAudioGraph()
        {
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
            settings.PrimaryRenderDevice = outputDevices[outputDevicesListBox.SelectedIndex - 1];

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;
            rootPage.NotifyUser("Graph successfully created!", NotifyType.StatusMessage);

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();
            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                outputDeviceContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output connection successfully created", NotifyType.StatusMessage);
            outputDeviceContainer.Background = new SolidColorBrush(Colors.Green);

            // Create a device input node using the default audio input device
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                rootPage.NotifyUser(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                inputDeviceContainer.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            deviceInputNode = deviceInputNodeResult.DeviceInputNode;
            rootPage.NotifyUser("Device Input connection successfully created", NotifyType.StatusMessage);
            inputDeviceContainer.Background = new SolidColorBrush(Colors.Green);

            // Since graph is successfully created, enable the button to select a file output
            fileButton.IsEnabled = true;

            // Disable the graph button to prevent accidental click
            createGraphButton.IsEnabled = false;

            // Because we are using lowest latency setting, we need to handle device disconnection errors
            graph.UnrecoverableErrorOccurred += Graph_UnrecoverableErrorOccurred;
        }
Exemple #52
0
        private static async Task<AudioDeviceOutputNode> CreateAudioDeviceOutputNodeAsync(AudioGraph audio)
        {
            //create output node - speakers in our case
            var result = await audio.CreateDeviceOutputNodeAsync();
            if (result.Status != AudioDeviceNodeCreationStatus.Success)
            {
                throw new Exception("Audio device node unavailable.");
            }

            return result.DeviceOutputNode;
        }
        private void AudioGraphQuantumProcessedHandler(AudioGraph sender, object args)
        {
            if (!_friendIsReceivingAudio)
                return;

            var frame = _toxOutputNode.GetFrame();
            ProcessFrameOutput(frame);
        }
        // this is an unfortunate workaround because we can't start c++ AudioGraph from the UI thread due to its blocking calls. We have to create it here and pass it to the Plugin.
        private static async Task CreateAudioGraph()
        {
            if (graph != null)
            {
                return;
            }
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);    // Create an AudioGraph with default settings
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);             // this graph is bound to this process

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return; // Cannot create graph
            }
            graph = result.Graph;
            CheckForErrorOnCall(MicInitializeDefaultWithGraph((int)streamType, graph)); // pass the bound graph to the mic plugin. this lets our current process hear audio. 
        }
        private async Task InitAudioGraph()
        {
            var encodingProperties = AudioEncodingProperties.CreatePcm((uint) _samplingRate, 1, 16);

            // Don't modify DesiredSamplesPerQuantum! If you do, change KQuantumSize accordingly!
            var settings = new AudioGraphSettings(AudioRenderCategory.Communications)
            {
                EncodingProperties = encodingProperties
            };

            var result = await AudioGraph.CreateAsync(settings);
            if (result.Status != AudioGraphCreationStatus.Success)
            {
                throw new Exception(result.Status.ToString());
            }

            _audioGraph = result.Graph;
        }
 [DllImport("MicStreamSelector", ExactSpelling = true)]    public static extern int MicInitializeDefaultWithGraph(int category, AudioGraph appGraph); // pass graph from app here to mic plugin
 private async void Graph_UnrecoverableErrorOccurred(AudioGraph sender, AudioGraphUnrecoverableErrorOccurredEventArgs args)
 {
     // Recreate the graph and all nodes when this happens
     await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
     {
         sender.Dispose();
     });
 }
 [DllImport("MicStreamSelector", ExactSpelling = true)]    public static extern int MicInitializeDefaultWithGraph(int category, AudioGraph appGraph); // pass graph from app here to mic plugin
 [DllImport("MicStreamSelector", ExactSpelling = true)]    public static extern int MicInitializeCustomRateWithGraph(int category, int samplerate, AudioGraph appGraph);  // pass graph from app here to mic plugin w/ custom sample rate
Exemple #59
0
        private void CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result = AudioGraph.CreateAsync(settings).GetResults();

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputResult = graph.CreateDeviceOutputNodeAsync().GetResults();

            if (deviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output
                return;
            }

            deviceOutputNode = deviceOutputResult.DeviceOutputNode;
            
        }
        private void outputDevicesListBox_SelectionChanged(object sender, SelectionChangedEventArgs e)
        {
            if (outputDevicesListBox.SelectedIndex == 0)
            {
                createGraphButton.IsEnabled = false;
                outputDevice.Foreground = new SolidColorBrush(Color.FromArgb(255, 110, 110, 110));
                outputDeviceContainer.Background = new SolidColorBrush(Color.FromArgb(255, 74, 74, 74));
                fileButton.IsEnabled = false;
                fileButton.Background = new SolidColorBrush(Color.FromArgb(255, 74, 74, 74));
                inputDeviceContainer.Background = new SolidColorBrush(Color.FromArgb(255, 74, 74, 74));

                // Destroy graph
                if (graph != null)
                {
                    graph.Dispose();
                    graph = null;
                }
            }
            else
            {
                createGraphButton.IsEnabled = true;
                outputDevice.Foreground = new SolidColorBrush(Colors.White);
            }
        }