Exemple #1
0
        /// <summary>
        /// Initializes the player service
        /// </summary>
        private async Task InitializeAsync()
        {
            var result = await AudioGraph.CreateAsync(new AudioGraphSettings(AudioRenderCategory.Media));

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return;
            }
            _audioGraph = result.Graph;
            var outputResult = await _audioGraph.CreateDeviceOutputNodeAsync();

            if (outputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }
            _outputNode = outputResult.DeviceOutputNode;
            _audioGraph.Start();

            foreach (var kvp in _filenames)
            {
                var storageFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri($"ms-appx:///Assets/{kvp.Value}"));

                _loaded.Add(kvp.Key, storageFile);
            }
        }
Exemple #2
0
        private async void initGraph()
        {
            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            graph = result.Graph;
            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            deviceOutput = deviceOutputNodeResult.DeviceOutputNode;


            FileOpenPicker filePicker = new FileOpenPicker();

            filePicker.SuggestedStartLocation = PickerLocationId.MusicLibrary;
            filePicker.FileTypeFilter.Add(".mp3");
            filePicker.FileTypeFilter.Add(".wav");
            StorageFile file = await GetPackagedFile(null, "audio.mp3");

            CreateAudioFileInputNodeResult fileInputResult = await graph.CreateFileInputNodeAsync(file);

            fileInput = fileInputResult.FileInputNode;
            fileInput.AddOutgoingConnection(deviceOutput);
            graph.Start();
        }
        protected override async void OnNavigatedTo(NavigationEventArgs e)
        {
            await InitAudioGraph();
            await CreateFileInputNode();

            //await CreateFileOutputNode();
            await CreateDeviceOutputNode();

            //CreateFrameInputNode();
            //CreateFrameOutputNode();
            //CreateSubmixNode();



            //fileInputNode.AddOutgoingConnection(deviceOutputNode, .7);
            // AudioFileOutputNode fileOutputNode;
            //fileInputNode.AddOutgoingConnection(fileOutputNode, 1.0);
            //AudioSubmixNode submix = audioGraph.CreateSubmixNode();
            fileInputNode.AddOutgoingConnection(deviceOutputNode);
            //frameInputNode.AddOutgoingConnection(deviceOutputNode);
            //submix.AddOutgoingConnection(deviceOutputNode);
            //submix.AddOutgoingConnection(frameOutputNode);

            AddEffect();
            //AddCustomEffect();


            audioGraph.Start();
            //frameInputNode.Start();
        }
Exemple #4
0
        public static async Task PlayAudio(StorageFile file)
        {
            await AudioDevices();

            _isAudioPlaying = true;
            CreateAudioFileInputNodeResult fileInputResult = await audioflow.CreateFileInputNodeAsync(file);

            if (AudioFileNodeCreationStatus.Success != fileInputResult.Status)
            {
                // Cannot read input file
                Debug.WriteLine(String.Format("Cannot read input file because {0}", fileInputResult.Status.ToString()));
                _isAudioPlaying = false;
                return;
            }

            if (!_isAudioPlaying)
            {
                Debug.WriteLine("Error detected!");
                return;
            }

            fileInput = fileInputResult.FileInputNode;
            fileInput.FileCompleted += FileInput_FileCompleted;
            try
            {
                fileInput.AddOutgoingConnection(deviceOuput);
            }
            catch (Exception e)
            {
                Debug.WriteLine(e.ToString());
            }
            fileInput.StartTime = TimeSpan.FromSeconds(0);
            audioflow.Start();
            _isAudioPlaying = false;
        }
        // create the audio graph and output
        private async void InitAudioGraph()
        {
            var settings = new AudioGraphSettings(AudioRenderCategory.Media);
            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency; // pick lowest latency available to devices in the graph


            // create the audio graph
            _graph = (await AudioGraph.CreateAsync(settings)).Graph;
            if (_graph == null)
            {
                // failed to create audio graph
                MessageDialog dlg = new MessageDialog("Failed to create audio graph");
                await dlg.ShowAsync();
                return;
            }


            // create the output. You could also create file output here to stream to a temp file or similar
            _deviceOutput = (await _graph.CreateDeviceOutputNodeAsync()).DeviceOutputNode;
            if (_deviceOutput == null)
            {
                // failed to create audio output
                MessageDialog dlg = new MessageDialog("Failed to create device output");
                await dlg.ShowAsync();
                return;
            }


            // load all of the samples into graph nodes
            BuildFileNodes();

            // start playback
            _graph.Start();
        }
Exemple #6
0
        public async Task <bool> InitializeAsync()
        {
            if (this.IsInitialized == true)
            {
                return(true);
            }

            var result = await AudioGraph.CreateAsync(new AudioGraphSettings(AudioRenderCategory.Media));

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return(false);
            }

            _audioGraph = result.Graph;
            var outputResult = await _audioGraph.CreateDeviceOutputNodeAsync();

            if (outputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return(false);
            }

            _outputNode = outputResult.DeviceOutputNode;

            if (this.IsMute == false)
            {
                _audioGraph.Start();
            }

            this.IsInitialized = true;

            return(true);
        }
Exemple #7
0
        private async Task ToggleRecordStop()
        {
            if (StartStop.Content.Equals("Record"))
            {
                graph.Start();
                StartStop.Content = "Stop";
                // audioPipe1.Fill = new SolidColorBrush(Colors.Blue);
                // audioPipe2.Fill = new SolidColorBrush(Colors.Blue);
            }
            else if (StartStop.Content.Equals("Stop"))
            {
                // Good idea to stop the graph to avoid data loss
                graph.Stop();
                //  audioPipe1.Fill = new SolidColorBrush(Color.FromArgb(255, 49, 49, 49));
                //  audioPipe2.Fill = new SolidColorBrush(Color.FromArgb(255, 49, 49, 49));

                TranscodeFailureReason finalizeResult = await fileOutputNode.FinalizeAsync();

                if (finalizeResult != TranscodeFailureReason.None)
                {
                    // Finalization of file failed. Check result code to see why
                    //  rootPage.NotifyUser(String.Format("Finalization of file failed because {0}", finalizeResult.ToString()), NotifyType.ErrorMessage);
                    GetFileName.Background = new SolidColorBrush(Colors.Red);
                    return;
                }

                StartStop.Content = "Record";
                //rootPage.NotifyUser("Recording to file completed successfully!", NotifyType.StatusMessage);
                GetFileName.Background = new SolidColorBrush(Colors.Green);
                StartStop.IsEnabled    = false;
                // createGraphButton.IsEnabled = false;
            }
        }
        async void CreateAudioGraphAsync()
        {
            var graphResult = await AudioGraph.CreateAsync(new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media));

            if (graphResult.Status != AudioGraphCreationStatus.Success)
            {
                throw new InvalidOperationException($"Graph creation failed {graphResult.Status}");
            }
            _graph = graphResult.Graph;
            var inputNodeResult = await _graph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Media);

            if (inputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                throw new InvalidOperationException($"Input node creation failed {inputNodeResult.Status}");
            }

            _inputNode = inputNodeResult.DeviceInputNode;

            _source                     = AudioVisualizer.PlaybackSource.CreateFromAudioNode(_inputNode);
            _converter                  = new SourceConverter();
            _converter.Source           = _source.Source;
            _converter.MinFrequency     = 110.0f;     // Note A2
            _converter.MaxFrequency     = 3520.0f;    // Note A7
            _converter.FrequencyCount   = 12 * 5 * 5; // 5 octaves, 5 bars per note
            _converter.FrequencyScale   = ScaleType.Logarithmic;
            _converter.SpectrumRiseTime = TimeSpan.FromMilliseconds(20);
            _converter.SpectrumFallTime = TimeSpan.FromMilliseconds(200);
            _converter.RmsRiseTime      = TimeSpan.FromMilliseconds(20); // Use RMS to gate noise, fast rise slow fall
            _converter.RmsFallTime      = TimeSpan.FromMilliseconds(500);
            _converter.ChannelCount     = 1;
            notesSpectrum.Source        = _converter;

            _graph.Start();
        }
Exemple #9
0
        /// <summary>
        /// Recording code is taken from UWP samples and slightly reduced
        ///
        /// - see official UWP samples on GitHub
        ///   https://github.com/Microsoft/Windows-universal-samples/blob/master/Samples/AudioCreation/cs/AudioCreation/Scenario2_DeviceCapture.xaml.cs
        ///
        /// </summary>
        public async Task StartRecording()
        {
            await CreateAudioGraph();

            var temporaryFile = await ApplicationData.Current.TemporaryFolder.TryGetItemAsync(TemporaryWaveFile) as StorageFile;

            if (temporaryFile != null)
            {
                await temporaryFile.DeleteAsync(StorageDeleteOption.Default);
            }

            temporaryFile = await ApplicationData.Current.TemporaryFolder.CreateFileAsync(TemporaryWaveFile);

            var fileProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium);

            var fileOutputNodeResult = await _audioGraph.CreateFileOutputNodeAsync(temporaryFile, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                await new MessageDialog("Cannot create output file: " + fileOutputNodeResult.Status).ShowAsync();
                return;
            }

            _fileOutputNode = fileOutputNodeResult.FileOutputNode;

            // Connect the input node to both output nodes
            _deviceInputNode.AddOutgoingConnection(_fileOutputNode);
            _deviceInputNode.AddOutgoingConnection(_deviceOutputNode);

            // Ta da!
            _audioGraph.Start();
        }
        public void PlaySound(int fileIndex)
        {
            if (hasInit == false)
            {
                return;
            }
            int index = fileIndex % 4;

            if (isStarted == false)
            {
                // Signal started
                isStarted = true;
                // Stop other sounds from playing on first playthrough
                for (int i = 0; i < 4; i++)
                {
                    if (i == index)
                    {
                        continue;
                    }
                    inputNodes[i].Seek(TimeSpan.FromMilliseconds(inputNodes[i].Duration.Milliseconds - 1));
                }
                audioGraph.Start();
            }
            if (soundReady[index])
            {
                inputNodes[index].Seek(TimeSpan.FromSeconds(0.0));
                inputNodes[index].Start();
            }
        }
Exemple #11
0
        public void Play()
        {
            if (_inputNode == null)
            {
                return;
            }

            _audioGraph?.Start();
            State = MediaPlaybackState.Playing;

            _timer?.Dispose();
            _timer = null;

            _timer = new Timer(state =>
            {
                if (!IsEnd)
                {
                    OnPositionChanged();
                    return;
                }

                _timer?.Dispose();
                _timer = null;

                OnMediaEnd();
            }, null, TimeSpan.Zero, TimeSpan.FromSeconds(1));
        }
Exemple #12
0
        public async Task InitializeSounds()
        {
            soundBankInitializer = new SoundBanksInitializer();

            POneInputNodes = new ObservableCollection <AudioFileInputNode>();

            PTwoInputNodes = new ObservableCollection <AudioFileInputNode>();

            WOneInputNodes = new ObservableCollection <AudioFileInputNode>();

            WTwoInputNodes = new ObservableCollection <AudioFileInputNode>();

            InputNodesList = new ObservableCollection <ObservableCollection <AudioFileInputNode> >();
            InputNodesList.Add(POneInputNodes);
            InputNodesList.Add(PTwoInputNodes);
            InputNodesList.Add(WOneInputNodes);
            InputNodesList.Add(WTwoInputNodes);

            AudioGraphSettings     settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result   = await AudioGraph.CreateAsync(settings);

            if (result.Status == AudioGraphCreationStatus.Success)
            {
                graph = result.Graph;


                // create the output device
                CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

                // make sure the audio output is available
                if (deviceOutputNodeResult.Status == AudioDeviceNodeCreationStatus.Success)
                {
                    deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
                    graph.ResetAllNodes();

                    foreach (SoundBank soundBank in soundBankInitializer.SoundBanks)
                    {
                        foreach (string fileName in soundBank.FileNames[0])
                        {
                            await CreateInputNodeFromFile("ms-appx:///Assets/AudioSamples/" + fileName);

                            InputNodesList[soundBankInitializer.SoundBanks.IndexOf(soundBank)].Add(FileInputNodesDictionary[fileName]);
                        }

                        foreach (string fileName in soundBank.FileNames[1])
                        {
                            await CreateInputNodeFromFile("ms-appx:///Assets/AudioSamples/" + fileName);

                            FileInputNodesDictionary[fileName].LoopCount = null;

                            InputNodesList[soundBankInitializer.SoundBanks.IndexOf(soundBank)].Add(FileInputNodesDictionary[fileName]);
                        }
                    }

                    InitializeEffects();
                    graph.Start();
                }
            }
        }
Exemple #13
0
        /*
         * public AudioFrameInputNode CreateAudioInputNode(AudioGraph audioGraph)
         * {
         *  var inputNode = audioGraph.CreateFrameInputNode(AudioEncodingProperties.CreatePcm(16000, 4, 32));
         *  inputNode.QuantumStarted += InputNode_QuantumStarted;
         * }
         *
         * private void InputNode_QuantumStarted(AudioFrameInputNode sender, FrameInputNodeQuantumStartedEventArgs args)
         * {
         *  throw new NotImplementedException();
         * }*/

        public void Open()
        {
            if (!_isStarted)
            {
                _audioGraph.Start();
                _isStarted = true;
            }
        }
        private async Task InitializeAudioAsync()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.EncodingProperties = AudioEncodingProperties.CreatePcm(22050, 1, 16);

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return;
            }

            _graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await _graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }

            _deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await _graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }

            _deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            // Create the FrameInputNode at the same format as the graph, except explicitly set mono.
            AudioEncodingProperties nodeEncodingProperties = _graph.EncodingProperties;

            nodeEncodingProperties.ChannelCount = 1;
            _frameInputNode = _graph.CreateFrameInputNode(nodeEncodingProperties);
            _frameInputNode.AddOutgoingConnection(_deviceOutputNode);


            _frameOutputNode = _graph.CreateFrameOutputNode(nodeEncodingProperties);
            _deviceInputNode.AddOutgoingConnection(_frameOutputNode);

            // Initialize the Frame Input Node in the stopped state
            _frameInputNode.Stop();

            // Hook up an event handler so we can start generating samples when needed
            // This event is triggered when the node is required to provide data
            _frameInputNode.QuantumStarted += node_QuantumStarted;

            _graph.QuantumProcessed += GraphOnQuantumProcessed;

            // Start the graph since we will only start/stop the frame input node
            _graph.Start();
        }
Exemple #15
0
        private AudioMatrix(AudioGraph audioGraph, AudioFileInputNode[,] audioFileInputNodes)
        {
            _audioGraph = audioGraph;
            _audioFileInputNodes = audioFileInputNodes;

            // we have to start audioGraph one time but we can start/stop individual 
            //input nodes as many times as needed
            _audioGraph.Start();
        }
Exemple #16
0
        private AudioMatrix(AudioGraph audioGraph, AudioFileInputNode[,] audioFileInputNodes)
        {
            _audioGraph          = audioGraph;
            _audioFileInputNodes = audioFileInputNodes;

            // we have to start audioGraph one time but we can start/stop individual
            //input nodes as many times as needed
            _audioGraph.Start();
        }
		private async Task CreateAudioGraph()
		{
			var settings = new AudioGraphSettings(AudioRenderCategory.Media);
			var result = await AudioGraph.CreateAsync(settings);
			_graph = result.Graph;
			var deviceOutputNodeResult = await _graph.CreateDeviceOutputNodeAsync();
			_deviceOutput = deviceOutputNodeResult.DeviceOutputNode;
			_graph.ResetAllNodes();
			_graph.Start();
		}
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired;
            settings.DesiredSamplesPerQuantum = bufferLength;

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;

            Debug.WriteLine($"Set samples per quantum to {graph.SamplesPerQuantum}");

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                speakerContainer.Background = new SolidColorBrush(Colors.Red);
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage);
            speakerContainer.Background = new SolidColorBrush(Colors.Green);

            // Create the FrameInputNode at the same format as the graph, except explicitly set mono.
            AudioEncodingProperties nodeEncodingProperties = graph.EncodingProperties;

            nodeEncodingProperties.ChannelCount = 1;
            CreateNotes(noteCount, nodeEncodingProperties, deviceOutputNode);

            double lowNote = inputNotes.Values.OrderBy(p => p.frequency).First().frequency;
            double hiNote  = inputNotes.Values.OrderByDescending(p => p.frequency).First().frequency;

            noteCount = inputNotes.Keys.Count;
            var mSLength = 1000.0 * (double)bufferLength / 48000.0;

            setupDescription = $"playing {noteCount} notes in {noteCount / 3} octaves ({lowNote:0.0} -> {hiNote:0.0}), {graph.SamplesPerQuantum} samples, {mSLength:0.0}mS buffers";
            DetailText.Text  = setupDescription;

            frameContainer.Background = new SolidColorBrush(Colors.Green);

            // Start the graph since we will only start/stop the frame input node
            graph.Start();
        }
Exemple #19
0
        }                                                                                                              //todo: Access is denied!!!

        async Task NewMethod(StorageFolder sf)
        {
            try
            {
                foreach (StorageFolder item in await sf.GetFoldersAsync())
                {
                    Debug.WriteLine(item.Name);
                }

                //var dbgFoldr0 = await sf.CreateFolderAsync("Dbg");
                var dbgFoldr = await sf.GetFolderAsync("Dbg");

                var inpFiles = await dbgFoldr.GetFilesAsync(CommonFileQuery.OrderByName);

                foreach (var inpFile in inpFiles.Where(r => r.Name.StartsWith("[") && r.Name.EndsWith("3")))                 //inpFiles.ForEach(inpFile =>{});
                {
                    var outFile = await dbgFoldr.CreateFileAsync($"{_PlaybackSpeedFactor:N1}-{inpFile.Name}", CreationCollisionOption.ReplaceExisting);

                    var fileInputResult = await _graph.CreateFileInputNodeAsync(inpFile); if (AudioFileNodeCreationStatus.Success != fileInputResult.Status)
                    {
                        notifyUser(String.Format("Cannot read input file because {0}", fileInputResult.Status.ToString())); return;
                    }

                    var fileInput = fileInputResult.FileInputNode;                            //_fileInput.StartTime = TimeSpan.FromSeconds(10);				//_fileInput.EndTime = TimeSpan.FromSeconds(20);
                    //fileInput.PlaybackSpeedFactor = _PlaybackSpeedFactor;

                    var fileOutNodeResult = await _graph.CreateFileOutputNodeAsync(outFile, CreateMediaEncodingProfile(outFile));                     // Operate node at the graph format, but save file at the specified format

                    if (fileOutNodeResult.Status != AudioFileNodeCreationStatus.Success)
                    {
                        notifyUser(string.Format("Cannot create output file because {0}", fileOutNodeResult.Status.ToString())); return;
                    }

                    fileInput.AddOutgoingConnection(fileOutNodeResult.FileOutputNode);
                    //fileInput.AddOutgoingConnection(_deviceOutput);
                    fileInput.FileCompleted += fileInput_FileCompleted;

                    //nogo{
                    fileInput.EncodingProperties.Bitrate       *= 2;
                    fileInput.EncodingProperties.SampleRate    *= 2;
                    fileInput.EncodingProperties.BitsPerSample *= 2;
                    fileOutNodeResult.FileOutputNode.EncodingProperties.Bitrate       /= 2;
                    fileOutNodeResult.FileOutputNode.EncodingProperties.SampleRate    /= 2;
                    fileOutNodeResult.FileOutputNode.EncodingProperties.BitsPerSample /= 2;
                    //}nogo

                    _fileInputs.Add(fileInput);
                }

                _graph.Start();                             //await Task.Delay(12000);            _graph.Stop();
                notifyUser("Started...");
            }
            catch (Exception ex) { Debug.WriteLine(ex); throw; }
        }
Exemple #20
0
        private async void RecordStart(object sender, TappedRoutedEventArgs e)
        {
            if (!recordButtonPushed)
            {
                recordButtonPushed           = true;
                __start_record_button.Source = new BitmapImage(new Uri(this.BaseUri, "Assets/mic-512_pushed.png"));
                await CreateAudioGraph();

                graph.Start();
            }
            else
            {
                recordButtonPushed           = false;
                __start_record_button.Source = new BitmapImage(new Uri(this.BaseUri, "Assets/mic-512.png"));
                graph.Stop();

                TranscodeFailureReason finalizeResult = await fileOutputNode.FinalizeAsync();

                if (finalizeResult != TranscodeFailureReason.None)
                {
                    // Finalization of file failed. Check result code to see why
                    return;
                }

                Guid requestId = Guid.NewGuid();
                var  Uri       = @"https://speech.platform.bing.com/recognize?version=3.0&requestid=" + requestId.ToString() + @"&appID=D4D52672-91D7-4C74-8AD8-42B1D981415A&format=json&locale=en-US&device.os=Windows%20OS&scenarios=ulm&instanceid=f1efbd27-25fd-4212-9332-77cd63176112";

                var        resp     = SendRequestAsync(Uri, accessToken, "audio/wav; samplerate=16000", path);
                string     json     = resp;
                ParsedJson jsonResp = JsonConvert.DeserializeObject <ParsedJson>(json);
                json = jsonResp.header.lexical.Replace("<profanity>", "");
                json = json.Replace("</profanity>", "");

                if (allDone)
                {
                    precise     = 0;
                    count       = 0;
                    Result.Text = "";
                    allDone     = false;
                }
                var temp = StringDifference(parts[count], json, jsonResp.results[0].confidence);
                precise     += temp;
                Result.Text += json + " - " + temp.ToString("F1") + " %\n";
                if (count + 1 < parts.Length)
                {
                    count++;
                }
                else
                {
                    Result.Text += "Общая точность: " + (precise / parts.Length).ToString("F1") + "%\n";
                    allDone      = true;
                }
            }
        }
        private async Task CreateAudioGraph()
        {
            var settings = new AudioGraphSettings(AudioRenderCategory.Media);
            var result   = await AudioGraph.CreateAsync(settings);

            _graph = result.Graph;
            var deviceOutputNodeResult = await _graph.CreateDeviceOutputNodeAsync();

            _deviceOutput = deviceOutputNodeResult.DeviceOutputNode;
            _graph.ResetAllNodes();
            _graph.Start();
        }
Exemple #22
0
 public async Task ToggleRecordStop()
 {
     if (!Recording) //Se não estiver gravando
     {
         graph.Start();
     }
     else if (Recording) // Se já estiver gravando
     {
         graph.Stop();   //Parar de gravar e salvar o pcm
         await pcmFileNode.FinalizeAsync();
     }
     Recording = !Recording;
 }
        private async Task InitializeAsync()
        {
            var settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.GameEffects);

            var graphCreationResult = await AudioGraph.CreateAsync(settings);
            _graph = graphCreationResult.Graph;

            var deviceOutputCreationResult = await _graph.CreateDeviceOutputNodeAsync();
            _deviceOutput = deviceOutputCreationResult.DeviceOutputNode;

            _graph.ResetAllNodes();
            _graph.Start();
        }
Exemple #24
0
        private async void PageLoaded(object sender, RoutedEventArgs e)
        {
            await CreateAudioGraph();

            _graph.Start();
            AddCustomEffect();

            DispatcherTimer timer = new DispatcherTimer();

            timer.Interval = TimeSpan.FromSeconds(1);
            timer.Tick    += (s, args) =>
            {
                if (_fileInputNode == null)
                {
                    return;
                }
                CurrentProgressPositionLabel.Text = _fileInputNode.Position.ToString(@"m\m\:s\s");
                Progress.Value = _fileInputNode.Position.TotalSeconds;
            };

            timer.Start();
        }
Exemple #25
0
        private async void playAudioFileButton_Click(object sender, RoutedEventArgs e)
        {
            await ResetAudioGraph();

            var fileInputNode = await AttachFileInputNode(audioFile, async (AudioFileInputNode senderNode, object args) =>
            {
                graph.Stop();
                senderNode.Reset();
                await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                {
                    playAudioFileButton.IsEnabled       = true;
                    transcribeAudioFileButton.IsEnabled = true;
                });
            });


            await AttachDeviceOutputNode(fileInputNode);

            graph.Start();
            playAudioFileButton.IsEnabled       = false;
            transcribeAudioFileButton.IsEnabled = false;
        }
Exemple #26
0
        public static async Task CreateDeviceInputNode()
        {
            Console.WriteLine("Creating AudioGraphs");
            // Create an AudioGraph with default settings
            AudioGraphSettings graphsettings = new AudioGraphSettings(AudioRenderCategory.GameChat);

            graphsettings.EncodingProperties               = new AudioEncodingProperties();
            graphsettings.EncodingProperties.Subtype       = "Float";
            graphsettings.EncodingProperties.SampleRate    = 48000;
            graphsettings.EncodingProperties.ChannelCount  = 2;
            graphsettings.EncodingProperties.BitsPerSample = 32;
            graphsettings.EncodingProperties.Bitrate       = 3072000;
            //settings.DesiredSamplesPerQuantum = 960;
            //settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired;
            CreateAudioGraphResult graphresult = await AudioGraph.CreateAsync(graphsettings);

            if (graphresult.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                return;
            }

            ingraph = graphresult.Graph;


            AudioGraphSettings nodesettings = new AudioGraphSettings(AudioRenderCategory.GameChat);

            nodesettings.EncodingProperties       = AudioEncodingProperties.CreatePcm(48000, 2, 16);
            nodesettings.DesiredSamplesPerQuantum = 960;
            nodesettings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.ClosestToDesired;
            frameOutputNode         = ingraph.CreateFrameOutputNode(outgraph.EncodingProperties);
            quantum                 = 0;
            ingraph.QuantumStarted += Graph_QuantumStarted;

            Windows.Devices.Enumeration.DeviceInformation selectedDevice =
                await Windows.Devices.Enumeration.DeviceInformation.CreateFromIdAsync(Windows.Media.Devices.MediaDevice.GetDefaultAudioCaptureId(Windows.Media.Devices.AudioDeviceRole.Default));

            CreateAudioDeviceInputNodeResult result =
                await ingraph.CreateDeviceInputNodeAsync(MediaCategory.Media, nodesettings.EncodingProperties, selectedDevice);

            if (result.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                return;
            }

            deviceInputNode = result.DeviceInputNode;
            deviceInputNode.AddOutgoingConnection(frameOutputNode);
            frameOutputNode.Start();
            ingraph.Start();
        }
Exemple #27
0
        public async Task Play(SongViewModel song)
        {
            if (mainInputNode == null)
            {
                mainInputNode = await song.Song.CreateNode(graph);

                mainSong = song;

                NextJump = mainSong.Jumps.FirstOrDefault();
                mainInputNode.AddOutgoingConnection(outputNode);
                graph.Start();
                IsPlaying = true;
            }
            else if (IsPlaying)
            {
                var jump = new JumpViewModel(mainSong)
                {
                    Origin     = mainInputNode.Position + TimeSpan.FromSeconds(5),
                    TargetSong = song,
                    TargetTime = TimeSpan.FromSeconds(5),
                    CrossFade  = TimeSpan.FromSeconds(5)
                };
                if (mainInputNode.Duration < jump.Origin)
                {
                    jump.Origin     = mainInputNode.Duration;
                    jump.CrossFade  = mainInputNode.Duration - mainInputNode.Position;
                    jump.TargetTime = jump.CrossFade;
                }
                NextJump = jump;
            }
            else
            {
                if (mainInputNode != null)
                {
                    mainInputNode.RemoveOutgoingConnection(outputNode);
                    mainInputNode.Dispose();
                    mainInputNode = null;
                    mainSong      = null;
                }
                if (subInputNode != null)
                {
                    subInputNode.RemoveOutgoingConnection(outputNode);
                    subInputNode.Dispose();
                    subInputNode = null;
                    subSong      = null;
                }
                IsFading = false;

                await Play(song);
            }
        }
        /// <summary>
        /// Plays audio from specified file.
        /// </summary>
        /// <param name="fileName"></param>
        /// <returns></returns>
        public async Task PlayFromFile(string fileName)
        {
            lock (this)
            {
                isInitialized.CheckIfFulfills("Speaker", "initialized", true);
                isPlaying = true;
            }

            await CreateFileInputNode(fileName);

            fileInput.AddOutgoingConnection(deviceOutput);

            audioGraph.Start();
        }
Exemple #29
0
 private void TogglePlay()
 {
     //Toggle playback
     if (graphButton.Content.Equals("Start Graph"))
     {
         graph.Start();
         graphButton.Content = "Stop Graph";
     }
     else
     {
         graph.Stop();
         graphButton.Content = "Start Graph";
     }
 }
Exemple #30
0
        public bool HitButton()
        {
            isOn = !isOn;
            if (isOn)
            {
                ag.Start();
            }
            else
            {
                ag.Stop();
            }

            return(isOn);
        }
        public async Task StartRecordingAsync(string recordingFilename)
        {
            _outputFilename = recordingFilename;
            _storageFile    = await ApplicationData.Current.LocalFolder
                              .CreateFileAsync(_outputFilename, CreationCollisionOption.ReplaceExisting);

            Debug.WriteLine($"StartRecordingAsync : {_storageFile.Path}");

            await InitialiseAudioGraph();
            await InitialiseAudioFileOutputNode();
            await InitialiseAudioFeed();

            _audioGraph.Start();
        }
Exemple #32
0
        public async void RecordOrStop(int value)
        {
            if (value == 1)
            {
                secondstimer.Start();
                graph.Start();
                secondscount = 0;
                await Recordings.ShowAsync();
            }
            else
            {
                secondstimer.Stop();
                graph.Stop();
                TranscodeFailureReason finalizeResult = await fileOutputNode.FinalizeAsync();

                if (finalizeResult != TranscodeFailureReason.None)
                {
                    MessageDialog md = new MessageDialog("Error in saving the audio", "OOPS!!");
                    await md.ShowAsync();

                    return;
                }
                else
                {
                    MessageDialog md = new MessageDialog("Sucessfully saved", "Hurray!!");
                    await md.ShowAsync();

                    UpdateInCommentSection(storageFile.Name);
                    com1.Add(new comments {
                        empname = pd.emp.name, message = storageFile.Name, dt = DateTime.Now, empid = pd.emp.id, IsFile = true, storagefile = storageFile
                    });
                    commentsSection.ItemsSource = null;
                    commentsSection.ItemsSource = com1;
                    Recordings.Hide();
                }
            }
        }
        async private void ToggleRecord2(object sender, RoutedEventArgs e)
        {
            var btn_record_audio = sender as ToggleButton;

            if (btn_record_audio.IsChecked == false)
            {
                _graph_record.Stop();
                _graph_record.Dispose();
                await PlayAudio(_target_file);

                //using the media element to play the sound
                //var raf_stream = await _target_file.OpenReadAsync();
                //media.SetSource(raf_stream, "");
                //media.Play();
            }
            else
            {
                //initialize the audio graph for recording and then start recording
                AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
                settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;

                CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

                if (result.Status == AudioGraphCreationStatus.Success)
                {
                    _graph_record = result.Graph;

                    //setup the input
                    var input_node = (await _graph_record.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Other)).DeviceInputNode;

                    //setup the output (place where audio will be recorded to)
                    var feedback_folder = await Windows.Storage.ApplicationData.Current.LocalFolder.CreateFolderAsync("AudioFeedback", CreationCollisionOption.OpenIfExists);

                    _target_file = await feedback_folder.CreateFileAsync("audio message.mp3", CreationCollisionOption.GenerateUniqueName);

                    var profile          = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High);
                    var file_output_node = (await _graph_record.CreateFileOutputNodeAsync(_target_file, profile)).FileOutputNode;

                    //direct the input to the output
                    input_node.AddOutgoingConnection(file_output_node);
                    media.Stop();  //stop playback since we are recording
                    _graph_record.Start();
                }
                else
                {
                    await new MessageDialog("Could not initialize recorder").ShowAsync();
                }
            }
        }
Exemple #34
0
 private void TogglePlay()
 {
     // Toggle playback
     if (playing == false)
     {
         System.Diagnostics.Debug.WriteLine("Playing");
         graph.Start();
         playing = true;
     }
     else
     {
         playing = false;
         graph.Stop();
     }
 }
        private async void Page_Loaded(object sender, RoutedEventArgs e)
        {
            // midi

            var s = MidiInPort.GetDeviceSelector();
            var information = await DeviceInformation.FindAllAsync(s);

            var list = information.ToList();
            port = await MidiInPort.FromIdAsync(list.ElementAt(2).Id);
            port.MessageReceived += Port_MessageReceived;

            // audio
            var settings = new AudioGraphSettings(AudioRenderCategory.GameEffects);
            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
            var creation = await AudioGraph.CreateAsync(settings);

            graph = creation.Graph;
            output = await graph.CreateDeviceOutputNodeAsync();

            var encoding = graph.EncodingProperties;
            encoding.ChannelCount = 1;
            input = graph.CreateFrameInputNode(encoding);
            input.AddOutgoingConnection(output.DeviceOutputNode);
            input.Stop();

            input.QuantumStarted += Input_QuantumStarted;

            graph.Start();

            // midi notes (pitch to note)

            float a = 440; // a is 440 hz...
            for (int x = 0; x < 127; ++x)
            {
                notes[x] = (a / 32f) * (float)Math.Pow(2f, ((x - 9f) / 12f));
            }
        }
        private async Task CreateAudioGraph()
        {
            // Create an AudioGraph with default settings
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                rootPage.NotifyUser(String.Format("AudioGraph Creation Error because {0}", result.Status.ToString()), NotifyType.ErrorMessage);
                return;
            }

            graph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();
            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                rootPage.NotifyUser(String.Format("Audio Device Output unavailable because {0}", deviceOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                speakerContainer.Background = new SolidColorBrush(Colors.Red);
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;
            rootPage.NotifyUser("Device Output Node successfully created", NotifyType.StatusMessage);
            speakerContainer.Background = new SolidColorBrush(Colors.Green);

            // Create the FrameInputNode at the same format as the graph, except explicitly set mono.
            AudioEncodingProperties nodeEncodingProperties = graph.EncodingProperties;
            nodeEncodingProperties.ChannelCount = 1;
            frameInputNode = graph.CreateFrameInputNode(nodeEncodingProperties);
            frameInputNode.AddOutgoingConnection(deviceOutputNode);
            frameContainer.Background = new SolidColorBrush(Colors.Green);

            // Initialize the Frame Input Node in the stopped state
            frameInputNode.Stop();

            // Hook up an event handler so we can start generating samples when needed
            // This event is triggered when the node is required to provide data
            frameInputNode.QuantumStarted += node_QuantumStarted;
            
            // Start the graph since we will only start/stop the frame input node
            graph.Start();
        }
Exemple #37
0
        private async void initGraph() {
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            graph = result.Graph;
            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await graph.CreateDeviceOutputNodeAsync();
            deviceOutput = deviceOutputNodeResult.DeviceOutputNode;


            FileOpenPicker filePicker = new FileOpenPicker();
            filePicker.SuggestedStartLocation = PickerLocationId.MusicLibrary;
            filePicker.FileTypeFilter.Add(".mp3");
            filePicker.FileTypeFilter.Add(".wav");
            StorageFile file = await GetPackagedFile(null, "audio.mp3");
            CreateAudioFileInputNodeResult fileInputResult = await graph.CreateFileInputNodeAsync(file);
            fileInput = fileInputResult.FileInputNode;
            fileInput.AddOutgoingConnection(deviceOutput);
            graph.Start();
        }