Exemple #1
0
        public void StartAudioOutput(Discord.Audio.AudioInStream audioInStream)
        {
            AudioInStream = audioInStream;

            // 音声出力用のオーディオグラフ入力ノードを作成
            // Note: Channels = 2 かつ BitRateがDisocrdに合わせて16bitじゃなきゃダメ
            #region DO NOT TOUCH

            _FrameInputNode = _AudioGraph.CreateFrameInputNode(
                AudioEncodingProperties.CreatePcm(
                    OpusConvertConstants.SamplingRate,
                    OpusConvertConstants.Channels,
                    16
                    ));

            #endregion

            // デフォルトの出力ノードに接続
            _FrameInputNode.AddOutgoingConnection(_OutputNode);


            _FrameInputNode.QuantumStarted += FrameInputNode_QuantumStarted;

            _FrameInputNode.Start();

            _AudioGraph.Start();
        }
        public async Task Init()
        {
            AudioGraphSettings audioGraphSettings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media);
            var result = await AudioGraph.CreateAsync(audioGraphSettings);

            if (result == null || result.Status != AudioGraphCreationStatus.Success)
            {
                return;
            }
            audioGraph = result.Graph;

            var createAudioDeviceOutputResult = await audioGraph.CreateDeviceOutputNodeAsync();

            if (createAudioDeviceOutputResult == null || createAudioDeviceOutputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }
            deviceOutputNode = createAudioDeviceOutputResult.DeviceOutputNode;

            AudioEncodingProperties audioEncodingProperties = new AudioEncodingProperties();

            audioEncodingProperties.BitsPerSample = 32;
            audioEncodingProperties.ChannelCount  = 2;
            audioEncodingProperties.SampleRate    = 44100;
            audioEncodingProperties.Subtype       = MediaEncodingSubtypes.Float;

            audioFrameInputNode = audioGraph.CreateFrameInputNode(audioEncodingProperties);
            audioFrameInputNode.QuantumStarted += FrameInputNode_QuantumStarted;

            audioFrameInputNode.AddOutgoingConnection(deviceOutputNode);
            audioGraph.Start();
        }
        private async Task InitializeAudioAsync()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.EncodingProperties = AudioEncodingProperties.CreatePcm(22050, 1, 16);

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return;
            }

            _graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }

            _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }

            _deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            // Create the FrameInputNode at the same format as the graph, except explicitly set mono.
            AudioEncodingProperties nodeEncodingProperties = _graph.EncodingProperties;

            nodeEncodingProperties.ChannelCount = 1;
            _frameInputNode = _graph.CreateFrameInputNode(nodeEncodingProperties);
            _frameInputNode.AddOutgoingConnection(_deviceOutputNode);


            _frameOutputNode = _graph.CreateFrameOutputNode(nodeEncodingProperties);
            _deviceInputNode.AddOutgoingConnection(_frameOutputNode);

            // Initialize the Frame Input Node in the stopped state
            _frameInputNode.Stop();

            // Hook up an event handler so we can start generating samples when needed
            // This event is triggered when the node is required to provide data
            _frameInputNode.QuantumStarted += node_QuantumStarted;

            _graph.QuantumProcessed += GraphOnQuantumProcessed;

            // Start the graph since we will only start/stop the frame input node
            _graph.Start();
        }
 // Creates FrameInputNode for taking in audio frames
 private void CreateFrameInputNode()
 {
     // Create the FrameInputNode at the same format as the graph, except explicitly set mono.
     AudioEncodingProperties nodeEncodingProperties = audioGraph.EncodingProperties;
     frameInputNode = audioGraph.CreateFrameInputNode(nodeEncodingProperties);
     // Initialize the Frame Input Node in the stopped state
     frameInputNode.Stop();
     // Hook up an event handler so we can start generating samples when needed
     // This event is triggered when the node is required to provide data
     frameInputNode.QuantumStarted += node_QuantumStarted;
 }
        private void CreateToxInputNode()
        {
            // Create the FrameInputNode at the same format as the graph, except explicitly set mono.
            var nodeEncodingProperties = _audioGraph.EncodingProperties;

            nodeEncodingProperties.ChannelCount = 1;
            _toxInputNode = _audioGraph.CreateFrameInputNode(nodeEncodingProperties);

            // Hook up an event handler so we can start generating samples when needed
            // This event is triggered when the node is required to provide data
            _toxInputNode.QuantumStarted += ToxInputNodeQuantumStartedHandler;
        }
        private void CreateNotes(int noteCount, AudioEncodingProperties props,
                                 AudioDeviceOutputNode outputNode)
        {
            for (int i = 0; i < noteCount; i++)
            {
                double fr = 0;
                if (i < 3)
                {
                    switch (i)
                    {
                    case 0:
                        fr = startC;
                        break;

                    case 1:
                        fr = startE;
                        break;

                    case 2:
                        fr = startG;
                        break;
                    }
                }
                else
                {
                    var theEnum    = inputNotes.Values;
                    var lastOfNote = theEnum.ToList()[i - 3];
                    fr = lastOfNote.frequency * 2.0;
                }

                // Initialize the Frame Input Node in the stopped state

                var nyQuist = graph.EncodingProperties.SampleRate / 2.0;
                // Hook up an event handler so we can start generating samples when needed
                // This event is triggered when the node is required to provide data

                if (fr > nyQuist)
                {
                    break;                // no need to generate notes higher than
                }
                // the nyQuist frequency which will just sound like noise.

                var inNode = graph.CreateFrameInputNode(props);
                inNode.AddOutgoingConnection(outputNode);

                inNode.Stop();
                inNode.QuantumStarted += node_QuantumStarted;

                var note = new Note(graph, inNode, fr);

                inputNotes.Add(inNode, note);
            }
        }
Exemple #7
0
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                speakerContainer.Background = new SolidColorBrush(Colors.Red);
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage);
            speakerContainer.Background = new SolidColorBrush(Colors.Green);

            // Create the FrameInputNode at the same format as the graph, except explicitly set mono.
            AudioEncodingProperties nodeEncodingProperties = graph.EncodingProperties;

            nodeEncodingProperties.ChannelCount = 1;
            frameInputNode = graph.CreateFrameInputNode(nodeEncodingProperties);
            frameInputNode.AddOutgoingConnection(deviceOutputNode);
            frameContainer.Background = new SolidColorBrush(Colors.Green);

            // Initialize the Frame Input Node in the stopped state
            frameInputNode.Stop();

            // Hook up an event handler so we can start generating samples when needed
            // This event is triggered when the node is required to provide data
            frameInputNode.QuantumStarted += node_QuantumStarted;

            // Start the graph since we will only start/stop the frame input node
            graph.Start();
        }
Exemple #8
0
        private AudioGraphOutput(AudioGraph audioGraph, AudioDeviceOutputNode deviceOutputNode)
        {
            AudioGraph = audioGraph ?? throw new ArgumentNullException(nameof(audioGraph));
            AudioEncodingProperties nodeEncodingProperties = audioGraph.EncodingProperties;

            //nodeEncodingProperties.ChannelCount = Channels;
            frameInputNode = audioGraph.CreateFrameInputNode(nodeEncodingProperties);
            frameInputNode.AddOutgoingConnection(deviceOutputNode);
            // Initialize the Frame Input Node in the stopped state
            frameInputNode.Stop();

            // Hook up an event handler so we can start generating samples when needed
            // This event is triggered when the node is required to provide data
            frameInputNode.QuantumStarted += Node_QuantumStarted;
            sampleSize = sizeof(float) * AudioGraph.EncodingProperties.ChannelCount;
            sampleCap  = int.MaxValue - (int)(int.MaxValue % sampleSize);
        }
Exemple #9
0
        private async Task CreateAudioGraph(uint samplingRate)
        {
            // Create an AudioGraph with default settings
            var encoding = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Auto);

            encoding.Audio = AudioEncodingProperties.CreatePcm(samplingRate, 1, 16);
            var settings = new AudioGraphSettings(AudioRenderCategory.Speech);

            settings.EncodingProperties = encoding.Audio;
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return;
            }

            graph = result.Graph;
            graph.EncodingProperties.SampleRate = samplingRate;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

            // Create the FrameInputNode at the same format as the graph, except explicitly set mono.
            AudioEncodingProperties nodeEncodingProperties = graph.EncodingProperties;

            nodeEncodingProperties.ChannelCount = 1;
            frameInputNode = graph.CreateFrameInputNode(nodeEncodingProperties);
            frameInputNode.AddOutgoingConnection(deviceOutputNode);

            // Initialize the Frame Input Node in the stopped state
            frameInputNode.Stop();

            frameInputNode.AudioFrameCompleted += FrameInputNode_AudioFrameCompleted;
            //frameInputNode.QuantumStarted += node_QuantumStarted;

            // Start the graph since we will only start/stop the frame input node
            graph.Start();
        }
        private async void StartButton_Click(object sender, RoutedEventArgs e)
        {
            DeviceInformation  SelectedDevice = DevicesBox.SelectedItem as DeviceInformation;
            AudioGraphSettings settings       = new AudioGraphSettings(AudioRenderCategory.Media)
            {
                QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency
            };

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);


            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            AudioDeviceOutputNode deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

            // Create a device input node using the default audio input device
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other, graph.EncodingProperties, SelectedDevice);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                System.Diagnostics.Debug.WriteLine(String.Format("Audio Device Input unavailable because {0}", deviceInputNodeResult.Status.ToString()));

                return;
            }

            AudioDeviceInputNode deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            frameOutputNode = graph.CreateFrameOutputNode();
            deviceInputNode.AddOutgoingConnection(frameOutputNode);

            AudioFrameInputNode frameInputNode = graph.CreateFrameInputNode();

            frameInputNode.AddOutgoingConnection(deviceOutputNode);

            // Attach to QuantumStarted event in order to receive synchronous updates from audio graph (to capture incoming audio).
            graph.QuantumStarted += GraphOnQuantumProcessed;

            graph.Start();
        }
        /// <summary>
        /// Setup an AudioGraph with PCM input node and output for media playback
        /// </summary>
        private async Task CreateAudioGraph()
        {
            AudioGraphSettings     graphSettings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media);
            CreateAudioGraphResult graphResult   = await AudioGraph.CreateAsync(graphSettings);

            if (graphResult.Status != AudioGraphCreationStatus.Success)
            {
                UpdateUI(() =>
                {
                    this.Messages.Add(new MessageDisplay($"Error in AudioGraph construction: {graphResult.Status.ToString()}", Sender.Other));
                });
            }

            audioGraph = graphResult.Graph;

            CreateAudioDeviceOutputNodeResult outputResult = await audioGraph.CreateDeviceOutputNodeAsync();

            if (outputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                UpdateUI(() =>
                {
                    this.Messages.Add(new MessageDisplay($"Error in audio OutputNode construction: {outputResult.Status.ToString()}", Sender.Other));
                });
            }

            outputNode = outputResult.DeviceOutputNode;

            // Create the FrameInputNode using PCM format; 16kHz, 1 channel, 16 bits per sample
            AudioEncodingProperties nodeEncodingProperties = AudioEncodingProperties.CreatePcm(16000, 1, 16);

            frameInputNode = audioGraph.CreateFrameInputNode(nodeEncodingProperties);
            frameInputNode.AddOutgoingConnection(outputNode);

            // Initialize the FrameInputNode in the stopped state
            frameInputNode.Stop();

            // Hook up an event handler so we can start generating samples when needed
            // This event is triggered when the node is required to provide data
            frameInputNode.QuantumStarted += node_QuantumStarted;

            audioGraph.Start();
        }
Exemple #12
0
        public static async Task CreateDeviceOutputNode()
        {
            Console.WriteLine("Creating AudioGraphs");
            // Create an AudioGraph with default settings
            AudioGraphSettings graphsettings = new AudioGraphSettings(AudioRenderCategory.GameChat);

            graphsettings.EncodingProperties               = new AudioEncodingProperties();
            graphsettings.EncodingProperties.Subtype       = "Float";
            graphsettings.EncodingProperties.SampleRate    = 48000;
            graphsettings.EncodingProperties.ChannelCount  = 2;
            graphsettings.EncodingProperties.BitsPerSample = 32;
            graphsettings.EncodingProperties.Bitrate       = 3072000;
            CreateAudioGraphResult graphresult = await AudioGraph.CreateAsync(graphsettings);

            if (graphresult.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                return;
            }

            outgraph = graphresult.Graph;



            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await outgraph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

            // Create the FrameInputNode at the same format as the graph, except explicitly set stereo.
            frameInputNode = outgraph.CreateFrameInputNode(outgraph.EncodingProperties);
            frameInputNode.AddOutgoingConnection(deviceOutputNode);
            frameInputNode.Start();
            ready = true;
            outgraph.Start();
        }
Exemple #13
0
        public async Task Start()
        {
            m_isFlushing       = false;
            m_isRunning        = false;
            m_waveBufferReader = null;

            var settings = new AudioGraphSettings(AudioRenderCategory.Media);
            var result   = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                throw new Exception("AudioGraph creation error: " + result.Status);
            }

            m_audioGraph = result.Graph;


            var outputDeviceResult = await m_audioGraph.CreateDeviceOutputNodeAsync();

            if (outputDeviceResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                throw new Exception("Unable to create audio playback device: " + result.Status);
            }

            m_deviceOutputNode = outputDeviceResult.DeviceOutputNode;

            // Create the FrameInputNode at the same format as the graph,
            var nodeEncodingProperties = m_audioGraph.EncodingProperties;

            nodeEncodingProperties.ChannelCount = 1;
            nodeEncodingProperties.SampleRate   = 16000;
            m_frameInputNode = m_audioGraph.CreateFrameInputNode(nodeEncodingProperties);
            m_frameInputNode.AddOutgoingConnection(m_deviceOutputNode);
            m_frameInputNode.QuantumStarted += OnQuantumStarted;

            m_isRunning  = true;
            m_isFlushing = false;
            m_audioGraph.Start();
        }
        private async void Page_Loaded(object sender, RoutedEventArgs e)
        {
            // midi

            var s = MidiInPort.GetDeviceSelector();
            var information = await DeviceInformation.FindAllAsync(s);

            var list = information.ToList();
            port = await MidiInPort.FromIdAsync(list.ElementAt(2).Id);
            port.MessageReceived += Port_MessageReceived;

            // audio
            var settings = new AudioGraphSettings(AudioRenderCategory.GameEffects);
            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
            var creation = await AudioGraph.CreateAsync(settings);

            graph = creation.Graph;
            output = await graph.CreateDeviceOutputNodeAsync();

            var encoding = graph.EncodingProperties;
            encoding.ChannelCount = 1;
            input = graph.CreateFrameInputNode(encoding);
            input.AddOutgoingConnection(output.DeviceOutputNode);
            input.Stop();

            input.QuantumStarted += Input_QuantumStarted;

            graph.Start();

            // midi notes (pitch to note)

            float a = 440; // a is 440 hz...
            for (int x = 0; x < 127; ++x)
            {
                notes[x] = (a / 32f) * (float)Math.Pow(2f, ((x - 9f) / 12f));
            }
        }
        SaveAudioToFile(
            StorageFile file,
            IProgress <string> status)
        {
            _finished = false;
            status.Report("Saving audio to file");

            var mediaEncodingProfile =
                CreateMediaEncodingProfile(file);

            if (!_audioData.IsStereo && mediaEncodingProfile.Audio != null)
            {
                mediaEncodingProfile.Audio.ChannelCount = 1;
            }

            // Initialize FileOutputNode
            var result =
                await _audioGraph.CreateFileOutputNodeAsync(file, mediaEncodingProfile);

            if (result.Status != AudioFileNodeCreationStatus.Success)
            {
                return(result);
            }

            _fileOutputNode = result.FileOutputNode;
            _fileOutputNode.Stop();

            // Initialize FrameInputNode and connect it to fileOutputNode
            _frameInputNode = _audioGraph.CreateFrameInputNode(
                // EncodingProprties are different than for input file
                _fileOutputNode.EncodingProperties
                //audioEncodingProperties
                );

            _frameInputNode.AddOutgoingConnection(_fileOutputNode);
            _frameInputNode.Stop();

            // Add a handler which will transfer every audioData sample to audio frame
            _frameInputNode.QuantumStarted += FrameInputNode_QuantumStarted;

            _audioDataCurrentPosition = 0;

            // Start process which will write audio file frame by frame
            // and will generated events QuantumStarted
            _audioGraph.Start();
            // don't start fileOutputNode yet because it will record zeros

            // because we initialised frameInputNode in Stop mode we need to start it
            _frameInputNode.Start();

            // didn't find a better way to wait for writing to file
            while (!_finished)
            {
                await Task.Delay(50);
            }

            // when audioData samples ended and audioGraph already stoped
            await _fileOutputNode.FinalizeAsync();

            // clean status and progress
            status.Report("");
            _ioProgress.Report(0);

            return(result);
        }
Exemple #16
0
        /// <summary>
        /// Create output audio graph
        /// </summary>
        /// <param name="deviceId">Overload for default ouput device id</param>
        public static async Task CreateOutputDeviceNode(string deviceId = null)
        {
            // If not in use, redo dispose
            if (outgraph != null && OutputDeviceID != outgraph.PrimaryRenderDevice.Id)
            {
                HeavyDisposeOutGraph();
            }
            // Increment use counter
            else
            {
                outGraphCount++;
            }

            Console.WriteLine("Creating AudioGraphs");

            // Create an AudioGraph with default settings
            AudioGraphSettings graphsettings = new AudioGraphSettings(AudioRenderCategory.Media);

            graphsettings.EncodingProperties               = new AudioEncodingProperties();
            graphsettings.EncodingProperties.Subtype       = "Float";
            graphsettings.EncodingProperties.SampleRate    = 48000;
            graphsettings.EncodingProperties.ChannelCount  = 2;
            graphsettings.EncodingProperties.BitsPerSample = 32;
            graphsettings.EncodingProperties.Bitrate       = 3072000;

            // Determine selected device
            DeviceInformation selectedDevice;

            if (deviceId == "Default" || deviceId == null)
            {
                selectedDevice = await DeviceInformation.CreateFromIdAsync(Windows.Media.Devices.MediaDevice.GetDefaultAudioRenderId(Windows.Media.Devices.AudioDeviceRole.Default));

                Windows.Media.Devices.MediaDevice.DefaultAudioRenderDeviceChanged += MediaDevice_DefaultAudioRenderDeviceChanged;
            }
            else
            {
                try
                {
                    selectedDevice = await DeviceInformation.CreateFromIdAsync(deviceId);
                }
                catch
                {
                    selectedDevice = await DeviceInformation.CreateFromIdAsync(Windows.Media.Devices.MediaDevice.GetDefaultAudioRenderId(Windows.Media.Devices.AudioDeviceRole.Default));

                    deviceId = "Default";
                }
            }

            // Set selected device
            graphsettings.PrimaryRenderDevice = selectedDevice;

            // Create graph
            CreateAudioGraphResult graphresult = await AudioGraph.CreateAsync(graphsettings);

            if (graphresult.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                return;
            }

            // "Save" graph
            outgraph = graphresult.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await outgraph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

            // Create the FrameInputNode at the same format as the graph, except explicitly set stereo.
            frameInputNode = outgraph.CreateFrameInputNode(outgraph.EncodingProperties);
            frameInputNode.AddOutgoingConnection(deviceOutputNode);
            OutputDeviceID = deviceId;

            // Begin playing
            frameInputNode.Start();
            ready = true;
            outgraph.Start();
        }
        private void CreateNotes(int noteCount, AudioEncodingProperties props,
                                 AudioDeviceOutputNode outputNode)
        {
            NoteMC currentNote = null;

            for (int i = 0; i < noteCount; i++)
            {
                double fr = 0;
                if (i < 3)
                {
                    switch (i)
                    {
                    case 0:
                        fr = startC;
                        break;

                    case 1:
                        fr = startE;
                        break;

                    case 2:
                        fr = startG;
                        break;
                    }
                }
                else
                {
                    var lastOfNote = allFrequencyList[i - 3];
                    fr = lastOfNote * 2.0;
                }

                allFrequencyList.Add(fr);

                // Initialize the Frame Input Node in the stopped state

                var nyQuist = graph.EncodingProperties.SampleRate / 2.0;
                // Hook up an event handler so we can start generating samples when needed
                // This event is triggered when the node is required to provide data

                if (fr > nyQuist)
                {
                    break;                // no need to generate notes higher than
                }
                // the nyQuist frequency which will just sound like noise.

                if (currentNote == null ||
                    (currentNote.GetFrequencyCount() >= props.ChannelCount))
                {
                    var inNode = graph.CreateFrameInputNode(props);
                    inNode.AddOutgoingConnection(outputNode);

                    inNode.Stop();
                    inNode.QuantumStarted += node_QuantumStarted;

                    currentNote = new NoteMC(graph, inNode, fr);

                    inputNotes.Add(inNode, currentNote);
                }
                else
                {
                    currentNote.AddFrequency(fr);
                    currentNote.AddAngle(0.0);
                }
            }
        }
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();
            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                speakerContainer.Background = new SolidColorBrush(Colors.Red);
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage);
            speakerContainer.Background = new SolidColorBrush(Colors.Green);

            // Create the FrameInputNode at the same format as the graph, except explicitly set mono.
            AudioEncodingProperties nodeEncodingProperties = graph.EncodingProperties;
            nodeEncodingProperties.ChannelCount = 1;
            frameInputNode = graph.CreateFrameInputNode(nodeEncodingProperties);
            frameInputNode.AddOutgoingConnection(deviceOutputNode);
            frameContainer.Background = new SolidColorBrush(Colors.Green);

            // Initialize the Frame Input Node in the stopped state
            frameInputNode.Stop();

            // Hook up an event handler so we can start generating samples when needed
            // This event is triggered when the node is required to provide data
            frameInputNode.QuantumStarted += node_QuantumStarted;
            
            // Start the graph since we will only start/stop the frame input node
            graph.Start();
        }
Exemple #19
0
        private async void InitializeAsync()
        {
            await UpdatePlaylistAsync();

            MasterVolume = 100;

            await AudioSourceManager.InitializeAsync(new FileSystem(), "GroorineCore");


            var settings = new AudioGraphSettings(AudioRenderCategory.Media)
            {
            };

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                await new MessageDialog("Can't create AudioGraph! Application will stop...").ShowAsync();
                Application.Current.Exit();
            }


            _graph = result.Graph;



            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                await new MessageDialog("Can't create DeviceOutputNode! Application will stop...").ShowAsync();
                Application.Current.Exit();
            }
            _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

            AudioEncodingProperties nodeEncodingProperties = _graph.EncodingProperties;

            nodeEncodingProperties.ChannelCount = 2;


            _frameInputNode = _graph.CreateFrameInputNode(nodeEncodingProperties);
            _frameInputNode.AddOutgoingConnection(_deviceOutputNode);

            _frameInputNode.Stop();
            _player = new Player((int)nodeEncodingProperties.SampleRate);



            _player.PropertyChanged += (sender, args) =>
            {
                switch (args.PropertyName)
                {
                case nameof(_player.IsPlaying):
                    _synchronizationContext.Post(o =>
                    {
                        if (!_player.IsPlaying && !_player.IsPausing && IsPlaying)
                        {
                            IsPlaying = CanStop = false;
                        }
                    }, null);
                    break;
                }
            };


            _frameInputNode.QuantumStarted += (sender, args) =>
            {
                var numSamplesNeeded = (uint)args.RequiredSamples;

                if (numSamplesNeeded != 0)
                {
                    //_synchronizationContext.Post(o =>
                    //{
                    //	foreach (var a in Channels)
                    //		a.Update();
                    AudioFrame audioData = GenerateAudioData(numSamplesNeeded);
                    _frameInputNode.AddFrame(audioData);
                    //}, null);
                }
            };

            _graph.Start();
            _frameInputNode.Start();

            /*
             * _player = new Player();
             *
             * _buffer = _player.CreateBuffer(50);
             *
             * _bwp = new BufferedWaveProvider(new WaveFormat(44100, 16, 2));
             * _nativePlayer = new WasapiOutRT(AudioClientShareMode.Shared, 50);
             * _nativePlayer.Init(() => _bwp);
             * _nativePlayer.Play();
             */
            IsInitialized = true;

            /*
             * while (true)
             * {
             *      _player.GetBuffer(_buffer);
             *
             *      var b = ToByte(_buffer);
             *      _bwp.AddSamples(b, 0, b.Length);
             *      while (_bwp.BufferedBytes > _buffer.Length * 2)
             *              await Task.Delay(1);
             * }
             */
        }
Exemple #20
0
        void ISynthesizer.SetUp()
        {
            using (WavePlayer player = WavePlayer.CreateWavePlayer())
            {
            }

            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media)
            {
                //QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency
            };

            AudioGraph.CreateAsync(settings).AsTask().ContinueWith(graphTask =>
            {
                CreateAudioGraphResult graphResult = graphTask.Result;

                if(graphResult.Status != AudioGraphCreationStatus.Success)
                {
                    this.EmitFailed();
                }
                else
                {
                    graphResult.Graph.CreateDeviceOutputNodeAsync().AsTask().ContinueWith(nodeTask =>
                    {
                        CreateAudioDeviceOutputNodeResult nodeResult = nodeTask.Result;

                        if(nodeResult.Status != AudioDeviceNodeCreationStatus.Success)
                        {
                            this.EmitFailed();
                        }
                        else
                        {
                            _audioGraph = graphResult.Graph;
                            _frameInputNode = _audioGraph.CreateFrameInputNode();
                            _frameInputNode.AddOutgoingConnection(nodeResult.DeviceOutputNode);
                            _frameInputNode.QuantumStarted += this.OnQuantumStarted;
                            _channelsNumber = _audioGraph.EncodingProperties.ChannelCount;
                            _waveSource = new WaveSource(_audioGraph.EncodingProperties.SampleRate, _channelsNumber);
                            this.EmitReady();
                        }
                    });
                }
            });
        }