Example #1
0
        public async void CaptureAudio()
        {
            AudioGraphSettings audioGraphSettings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Speech);
            var result = await AudioGraph.CreateAsync(audioGraphSettings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                return;
            }
            _audioGraph = result.Graph;

            var deviceInputNodeResult = await _audioGraph.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Speech);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                return;
            }
            var deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            _audioFile = await Windows.Storage.ApplicationData.Current.TemporaryFolder
                         .CreateFileAsync("speech", CreationCollisionOption.ReplaceExisting);

            var mediaEncodingProfile = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High);
            var fileOutputNodeResult = await _audioGraph.CreateFileOutputNodeAsync(_audioFile, mediaEncodingProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                return;
            }
            var fileOutputNode = fileOutputNodeResult.FileOutputNode;

            deviceInputNode.AddOutgoingConnection(fileOutputNode);

            _audioGraph.Start();
        }
Example #2
0
        /// <summary>
        /// Recording code is taken from UWP samples and slightly reduced
        ///
        /// - see official UWP samples on GitHub
        ///   https://github.com/Microsoft/Windows-universal-samples/blob/master/Samples/AudioCreation/cs/AudioCreation/Scenario2_DeviceCapture.xaml.cs
        ///
        /// </summary>
        public async Task StartRecording()
        {
            await CreateAudioGraph();

            var temporaryFile = await ApplicationData.Current.TemporaryFolder.TryGetItemAsync(TemporaryWaveFile) as StorageFile;

            if (temporaryFile != null)
            {
                await temporaryFile.DeleteAsync(StorageDeleteOption.Default);
            }

            temporaryFile = await ApplicationData.Current.TemporaryFolder.CreateFileAsync(TemporaryWaveFile);

            var fileProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium);

            var fileOutputNodeResult = await _audioGraph.CreateFileOutputNodeAsync(temporaryFile, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                await new MessageDialog("Cannot create output file: " + fileOutputNodeResult.Status).ShowAsync();
                return;
            }

            _fileOutputNode = fileOutputNodeResult.FileOutputNode;

            // Connect the input node to both output nodes
            _deviceInputNode.AddOutgoingConnection(_fileOutputNode);
            _deviceInputNode.AddOutgoingConnection(_deviceOutputNode);

            // Ta da!
            _audioGraph.Start();
        }
Example #3
0
        //</SnippetCreateFileInputNode>
        //<SnippetCreateFileOutputNode>
        private async Task CreateFileOutputNode()
        {
            FileSavePicker saveFilePicker = new FileSavePicker();

            saveFilePicker.FileTypeChoices.Add("Pulse Code Modulation", new List <string>()
            {
                ".wav"
            });
            saveFilePicker.FileTypeChoices.Add("Windows Media Audio", new List <string>()
            {
                ".wma"
            });
            saveFilePicker.FileTypeChoices.Add("MPEG Audio Layer-3", new List <string>()
            {
                ".mp3"
            });
            saveFilePicker.SuggestedFileName = "New Audio Track";
            StorageFile file = await saveFilePicker.PickSaveFileAsync();

            // File can be null if cancel is hit in the file picker
            if (file == null)
            {
                return;
            }

            Windows.Media.MediaProperties.MediaEncodingProfile mediaEncodingProfile;
            switch (file.FileType.ToString().ToLowerInvariant())
            {
            case ".wma":
                mediaEncodingProfile = MediaEncodingProfile.CreateWma(AudioEncodingQuality.High);
                break;

            case ".mp3":
                mediaEncodingProfile = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High);
                break;

            case ".wav":
                mediaEncodingProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
                break;

            default:
                throw new ArgumentException();
            }


            // Operate node at the graph format, but save file at the specified format
            CreateAudioFileOutputNodeResult result = await audioGraph.CreateFileOutputNodeAsync(file, mediaEncodingProfile);

            if (result.Status != AudioFileNodeCreationStatus.Success)
            {
                // FileOutputNode creation failed
                ShowErrorMessage(result.Status.ToString());
                return;
            }

            fileOutputNode = result.FileOutputNode;
        }
Example #4
0
        private async void GetFileName_Tapped(object sender, TappedRoutedEventArgs e)
        {
            FileSavePicker fileSavePicker = new FileSavePicker();

            fileSavePicker.FileTypeChoices.Add("MP3 audio", new List <string>()
            {
                ".mp3"
            });
            fileSavePicker.FileTypeChoices.Add("Wave audio", new List <string>()
            {
                ".wav"
            });
            fileSavePicker.FileTypeChoices.Add("Windows Media Audio", new List <string>()
            {
                ".wma"
            });
            fileSavePicker.SuggestedStartLocation = PickerLocationId.MusicLibrary;
            StorageFile file = await fileSavePicker.PickSaveFileAsync();

            if (file != null)
            {
                audioFile          = file;
                AudioFileName.Text = audioFile.Name;
            }
            else
            {
                return;
            }
            //rootPage.NotifyUser(String.Format("Recording to {0}", file.Name.ToString()), NotifyType.StatusMessage);
            MediaEncodingProfile fileProfile = CreateMediaEncodingProfile(file);


            //my add here,,,

            await CreateAudioGraph();

            // Operate node at the graph format, but save file at the specified format
            CreateAudioFileOutputNodeResult fileOutputNodeResult = await graph.CreateFileOutputNodeAsync(file, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                // FileOutputNode creation failed
                //rootPage.NotifyUser(String.Format("Cannot create output file because {0}", fileOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                //fileButton.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            fileOutputNode         = fileOutputNodeResult.FileOutputNode;
            GetFileName.Background = new SolidColorBrush(Colors.YellowGreen);

            // Connect the input node to both output nodes
            deviceInputNode.AddOutgoingConnection(fileOutputNode);
            deviceInputNode.AddOutgoingConnection(deviceOutputNode);
            StartStop.IsEnabled = true;
        }
Example #5
0
        private async Task CreateAudioGraph()
        {
            if (graph != null)
            {
                graph.Dispose();
            }

            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.SystemDefault;

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                return;
            }

            graph = result.Graph;

            // Create a device input node using the default audio input device (manifest microphone!!!!)
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                return;
            }

            deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            //creating file

            StorageFolder storageFolder = Windows.Storage.ApplicationData.Current.LocalFolder;
            StorageFile   file          = await storageFolder.CreateFileAsync("sample.wav", Windows.Storage.CreationCollisionOption.ReplaceExisting);

            path = file.Path.ToString();

            MediaEncodingProfile fileProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium);

            // Operate node at the graph format, but save file at the specified format
            CreateAudioFileOutputNodeResult fileOutputNodeResult = await graph.CreateFileOutputNodeAsync(file, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                // FileOutputNode creation failed
                return;
            }

            fileOutputNode = fileOutputNodeResult.FileOutputNode;

            // Connect the input node to both output nodes
            deviceInputNode.AddOutgoingConnection(fileOutputNode);
        }
Example #6
0
        }                                                                                                              //todo: Access is denied!!!

        async Task NewMethod(StorageFolder sf)
        {
            try
            {
                foreach (StorageFolder item in await sf.GetFoldersAsync())
                {
                    Debug.WriteLine(item.Name);
                }

                //var dbgFoldr0 = await sf.CreateFolderAsync("Dbg");
                var dbgFoldr = await sf.GetFolderAsync("Dbg");

                var inpFiles = await dbgFoldr.GetFilesAsync(CommonFileQuery.OrderByName);

                foreach (var inpFile in inpFiles.Where(r => r.Name.StartsWith("[") && r.Name.EndsWith("3")))                 //inpFiles.ForEach(inpFile =>{});
                {
                    var outFile = await dbgFoldr.CreateFileAsync($"{_PlaybackSpeedFactor:N1}-{inpFile.Name}", CreationCollisionOption.ReplaceExisting);

                    var fileInputResult = await _graph.CreateFileInputNodeAsync(inpFile); if (AudioFileNodeCreationStatus.Success != fileInputResult.Status)
                    {
                        notifyUser(String.Format("Cannot read input file because {0}", fileInputResult.Status.ToString())); return;
                    }

                    var fileInput = fileInputResult.FileInputNode;                            //_fileInput.StartTime = TimeSpan.FromSeconds(10);				//_fileInput.EndTime = TimeSpan.FromSeconds(20);
                    //fileInput.PlaybackSpeedFactor = _PlaybackSpeedFactor;

                    var fileOutNodeResult = await _graph.CreateFileOutputNodeAsync(outFile, CreateMediaEncodingProfile(outFile));                     // Operate node at the graph format, but save file at the specified format

                    if (fileOutNodeResult.Status != AudioFileNodeCreationStatus.Success)
                    {
                        notifyUser(string.Format("Cannot create output file because {0}", fileOutNodeResult.Status.ToString())); return;
                    }

                    fileInput.AddOutgoingConnection(fileOutNodeResult.FileOutputNode);
                    //fileInput.AddOutgoingConnection(_deviceOutput);
                    fileInput.FileCompleted += fileInput_FileCompleted;

                    //nogo{
                    fileInput.EncodingProperties.Bitrate       *= 2;
                    fileInput.EncodingProperties.SampleRate    *= 2;
                    fileInput.EncodingProperties.BitsPerSample *= 2;
                    fileOutNodeResult.FileOutputNode.EncodingProperties.Bitrate       /= 2;
                    fileOutNodeResult.FileOutputNode.EncodingProperties.SampleRate    /= 2;
                    fileOutNodeResult.FileOutputNode.EncodingProperties.BitsPerSample /= 2;
                    //}nogo

                    _fileInputs.Add(fileInput);
                }

                _graph.Start();                             //await Task.Delay(12000);            _graph.Stop();
                notifyUser("Started...");
            }
            catch (Exception ex) { Debug.WriteLine(ex); throw; }
        }
        async private void ToggleRecord2(object sender, RoutedEventArgs e)
        {
            var btn_record_audio = sender as ToggleButton;

            if (btn_record_audio.IsChecked == false)
            {
                _graph_record.Stop();
                _graph_record.Dispose();
                await PlayAudio(_target_file);

                //using the media element to play the sound
                //var raf_stream = await _target_file.OpenReadAsync();
                //media.SetSource(raf_stream, "");
                //media.Play();
            }
            else
            {
                //initialize the audio graph for recording and then start recording
                AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);
                settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;

                CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

                if (result.Status == AudioGraphCreationStatus.Success)
                {
                    _graph_record = result.Graph;

                    //setup the input
                    var input_node = (await _graph_record.CreateDeviceInputNodeAsync(Windows.Media.Capture.MediaCategory.Other)).DeviceInputNode;

                    //setup the output (place where audio will be recorded to)
                    var feedback_folder = await Windows.Storage.ApplicationData.Current.LocalFolder.CreateFolderAsync("AudioFeedback", CreationCollisionOption.OpenIfExists);

                    _target_file = await feedback_folder.CreateFileAsync("audio message.mp3", CreationCollisionOption.GenerateUniqueName);

                    var profile          = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High);
                    var file_output_node = (await _graph_record.CreateFileOutputNodeAsync(_target_file, profile)).FileOutputNode;

                    //direct the input to the output
                    input_node.AddOutgoingConnection(file_output_node);
                    media.Stop();  //stop playback since we are recording
                    _graph_record.Start();
                }
                else
                {
                    await new MessageDialog("Could not initialize recorder").ShowAsync();
                }
            }
        }
Example #8
0
        private async Task InitialiseAudioFileOutputNode()
        {
            var outputProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low);

            outputProfile.Audio = AudioEncodingProperties.CreatePcm(SAMPLE_RATE, CHANNEL, BITS_PER_SAMPLE);

            var outputResult = await _audioGraph.CreateFileOutputNodeAsync(_storageFile, outputProfile);

            if (outputResult.Status != AudioFileNodeCreationStatus.Success)
            {
                throw new InvalidOperationException("AudioFileNode creation error !");
            }

            _audioFileOutputNode = outputResult.FileOutputNode;
        }
Example #9
0
        public async Task Init()
        {
            Recording = false;
            // Selecionar o dispositivo para gravar e reproduzir
            var devices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector());

            var devicesIn = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioCaptureSelector());

            outputDevice = devices[0];
            // Configurações de gravações
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media)
            {
                //QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency,
                PrimaryRenderDevice = outputDevice,
            };


            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            graph = result.Graph;


            deviceOutputNode = (await graph //Criar nó de saída (Reprodução no headset)
                                .CreateDeviceOutputNodeAsync())
                               .DeviceOutputNode;

            deviceInputNode = (await graph //Criar nó de entrada (Microfone) - Real-time communication
                               .CreateDeviceInputNodeAsync(MediaCategory.Communications, graph.EncodingProperties, devicesIn[0]))
                              .DeviceInputNode;

            // Criar o arquivo para ser armazenado o PCM gravado direto do microfone
            StorageFile pcmfile = await KnownFolders
                                  .MusicLibrary
                                  .CreateFileAsync("PCM_original.wav", Windows.Storage.CreationCollisionOption.ReplaceExisting);

            // PCM 16bits com 44,1kHZ com 96kbps
            MediaEncodingProfile profile = MediaEncodingProfile.CreateWav(Windows.Media.MediaProperties.AudioEncodingQuality.Medium);


            pcmFileNode = (await graph // Criar nó do arquivo de saída
                           .CreateFileOutputNodeAsync(pcmfile, profile))
                          .FileOutputNode;

            // Conectar os nós de reprodução e do arquivo PCM ao nó do microfone
            // Ou seja, passar os sinais para o fone reproduzir e o arquivo armazenar ao mesmo tempo
            deviceInputNode.AddOutgoingConnection(pcmFileNode);
            deviceInputNode.AddOutgoingConnection(deviceOutputNode);
        }
Example #10
0
        private async Task SelectOutputFile()
        {
            FileSavePicker saveFilePicker = new FileSavePicker();

            saveFilePicker.FileTypeChoices.Add("Pulse Code Modulation", new List <string>()
            {
                ".wav"
            });
            saveFilePicker.FileTypeChoices.Add("Windows Media Audio", new List <string>()
            {
                ".wma"
            });
            saveFilePicker.FileTypeChoices.Add("MPEG Audio Layer-3", new List <string>()
            {
                ".mp3"
            });
            saveFilePicker.SuggestedFileName = "New Audio Track";
            StorageFile file = await saveFilePicker.PickSaveFileAsync();

            // File can be null if cancel is hit in the file picker
            if (file == null)
            {
                return;
            }

            rootPage.NotifyUser(String.Format("Recording to {0}", file.Name.ToString()), NotifyType.StatusMessage);
            MediaEncodingProfile fileProfile = CreateMediaEncodingProfile(file);

            // Operate node at the graph format, but save file at the specified format
            CreateAudioFileOutputNodeResult fileOutputNodeResult = await graph.CreateFileOutputNodeAsync(file, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                // FileOutputNode creation failed
                rootPage.NotifyUser(String.Format("Cannot create output file because {0}", fileOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                fileButton.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            fileOutputNode        = fileOutputNodeResult.FileOutputNode;
            fileButton.Background = new SolidColorBrush(Colors.YellowGreen);

            // Connect the input node to both output nodes
            deviceInputNode.AddOutgoingConnection(fileOutputNode);
            deviceInputNode.AddOutgoingConnection(deviceOutputNode);
            recordStopButton.IsEnabled = true;
        }
        private async Task CreateFileOutputNode()
        {
            Debug.WriteLine(_storageFolder.Path);
            _mainFile = await _storageFolder.CreateFileAsync("sample.wav", Windows.Storage.CreationCollisionOption.ReplaceExisting);

            var mediaEncodingProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);

            // Operate node at the graph format, but save file at the specified format
            var result = await _audioGraph.CreateFileOutputNodeAsync(_mainFile, mediaEncodingProfile);

            if (result.Status != AudioFileNodeCreationStatus.Success)
            {
                // FileOutputNode creation failed
                Debug.WriteLine(result.Status.ToString());
                return;
            }

            _fileOutputNode = result.FileOutputNode;
        }
        async Task StartRecordingAsync()
        {
            try
            {
                recordingFile = await ApplicationData.Current.LocalFolder.CreateFileAsync(Constants.AudioFilename, CreationCollisionOption.ReplaceExisting);

                Debug.WriteLine(recordingFile.Path);

                var result = await AudioGraph.CreateAsync(new AudioGraphSettings(AudioRenderCategory.Media));

                if (result.Status == AudioGraphCreationStatus.Success)
                {
                    audioGraph = result.Graph;

                    var microphone = await DeviceInformation.CreateFromIdAsync(MediaDevice.GetDefaultAudioCaptureId(AudioDeviceRole.Default));

                    var outputProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low); // 1 channel, 16 bits per sample, 16K sample rate
                    outputProfile.Audio = AudioEncodingProperties.CreatePcm(16000, 1, 16);

                    var inputProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
                    var outputResult = await audioGraph.CreateFileOutputNodeAsync(recordingFile, outputProfile);

                    if (outputResult.Status == AudioFileNodeCreationStatus.Success)
                    {
                        audioFileOutputNode = outputResult.FileOutputNode;

                        var inputResult = await audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Media, inputProfile.Audio, microphone);

                        if (inputResult.Status == AudioDeviceNodeCreationStatus.Success)
                        {
                            inputResult.DeviceInputNode.AddOutgoingConnection(audioFileOutputNode);
                            audioGraph.Start();
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine(ex.Message);
            }
        }
Example #13
0
        public async Task StartRecordAsync()
        {
            _filePath = Path.GetTempFileName();
            var file = await StorageFile.GetFileFromPathAsync(_filePath);

            var result = await AudioGraph.CreateAsync(new AudioGraphSettings(AudioRenderCategory.Speech));

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                throw new Exception("Couldn't open recorder!");
            }
            _graph = result.Graph;

            var microphone = await DeviceInformation.CreateFromIdAsync(MediaDevice.GetDefaultAudioCaptureId(AudioDeviceRole.Default));

            var outProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low);

            outProfile.Audio = AudioEncodingProperties.CreatePcm(16000, 1, 16);

            var outputResult = await _graph.CreateFileOutputNodeAsync(file, outProfile);

            if (outputResult.Status != AudioFileNodeCreationStatus.Success)
            {
                throw new Exception("Couldn't create output!");
            }

            _outputNode = outputResult.FileOutputNode;
            var inProfile   = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
            var inputResult = await _graph.CreateDeviceInputNodeAsync(
                MediaCategory.Speech,
                inProfile.Audio,
                microphone);

            if (inputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                throw new Exception("Couldn't create device node!");
            }

            inputResult.DeviceInputNode.AddOutgoingConnection(_outputNode);
            _graph.Start();
        }
        private async Task SelectOutputFile()
        {
            var Folder = ApplicationData.Current.LocalFolder;

            Folder = await Folder.CreateFolderAsync("Audio", CreationCollisionOption.OpenIfExists);

            storageFile = await Folder.CreateFileAsync("audio.mp3", CreationCollisionOption.GenerateUniqueName);

            if (storageFile == null)
            {
                return;
            }
            MediaEncodingProfile            fileProfile          = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High);
            CreateAudioFileOutputNodeResult fileOutputNodeResult = await graph.CreateFileOutputNodeAsync(storageFile, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                MessageDialog md = new MessageDialog("Error in Audio Device");
                md.ShowAsync();
                return;
            }
            fileOutputNode = fileOutputNodeResult.FileOutputNode;
            deviceInputNode.AddOutgoingConnection(fileOutputNode);
        }
        SaveAudioToFile(
            StorageFile file,
            IProgress <string> status)
        {
            _finished = false;
            status.Report("Saving audio to file");

            var mediaEncodingProfile =
                CreateMediaEncodingProfile(file);

            if (!_audioData.IsStereo && mediaEncodingProfile.Audio != null)
            {
                mediaEncodingProfile.Audio.ChannelCount = 1;
            }

            // Initialize FileOutputNode
            var result =
                await _audioGraph.CreateFileOutputNodeAsync(file, mediaEncodingProfile);

            if (result.Status != AudioFileNodeCreationStatus.Success)
            {
                return(result);
            }

            _fileOutputNode = result.FileOutputNode;
            _fileOutputNode.Stop();

            // Initialize FrameInputNode and connect it to fileOutputNode
            _frameInputNode = _audioGraph.CreateFrameInputNode(
                // EncodingProprties are different than for input file
                _fileOutputNode.EncodingProperties
                //audioEncodingProperties
                );

            _frameInputNode.AddOutgoingConnection(_fileOutputNode);
            _frameInputNode.Stop();

            // Add a handler which will transfer every audioData sample to audio frame
            _frameInputNode.QuantumStarted += FrameInputNode_QuantumStarted;

            _audioDataCurrentPosition = 0;

            // Start process which will write audio file frame by frame
            // and will generated events QuantumStarted
            _audioGraph.Start();
            // don't start fileOutputNode yet because it will record zeros

            // because we initialised frameInputNode in Stop mode we need to start it
            _frameInputNode.Start();

            // didn't find a better way to wait for writing to file
            while (!_finished)
            {
                await Task.Delay(50);
            }

            // when audioData samples ended and audioGraph already stoped
            await _fileOutputNode.FinalizeAsync();

            // clean status and progress
            status.Report("");
            _ioProgress.Report(0);

            return(result);
        }
        public async Task InitializeUWPAudio()
        {
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
            outputDevices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector());

            foreach (DeviceInformation dev in outputDevices)
            {
                if (dev.Name.Contains(OMENHeadset))
                {
                    settings.PrimaryRenderDevice = dev;
                }
            }


            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                return;
            }

            uwpAudioGraph = result.Graph;

            // Create a device output node
            CreateAudioDeviceOutputNodeResult deviceOutputNodeResult = await uwpAudioGraph.CreateDeviceOutputNodeAsync();

            if (deviceOutputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device output node
                return;
            }

            deviceOutputNode = deviceOutputNodeResult.DeviceOutputNode;

            // Create a device input node using the default audio input device
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await uwpAudioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                return;
            }

            deviceInputNode = deviceInputNodeResult.DeviceInputNode;
#if false
            //For File recording not use now.
            FileSavePicker saveFilePicker = new FileSavePicker();
            saveFilePicker.FileTypeChoices.Add("Pulse Code Modulation", new List <string>()
            {
                ".wav"
            });
            saveFilePicker.FileTypeChoices.Add("Windows Media Audio", new List <string>()
            {
                ".wma"
            });
            saveFilePicker.FileTypeChoices.Add("MPEG Audio Layer-3", new List <string>()
            {
                ".mp3"
            });
            saveFilePicker.SuggestedFileName = "New Audio Track";
            StorageFile file = await saveFilePicker.PickSaveFileAsync();

            MediaEncodingProfile fileProfile = CreateMediaEncodingProfile(file);

            // Operate node at the graph format, but save file at the specified format
            CreateAudioFileOutputNodeResult fileOutputNodeResult = await uwpAudioGraph.CreateFileOutputNodeAsync(file, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                // FileOutputNode creation failed
                //rootPage.NotifyUser(String.Format("Cannot create output file because {0}", fileOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                //fileButton.Background = new SolidColorBrush(Colors.Red);
                return;
            }
            fileOutputNode = fileOutputNodeResult.FileOutputNode;
            deviceInputNode.AddOutgoingConnection(fileOutputNode);
#endif

            // Connect the input node to both output nodes
            deviceInputNode.AddOutgoingConnection(deviceOutputNode);
        }
Example #17
0
        public MainPageViewModel()
        {
            _musicFiles = new ObservableCollection <StorageFile>();
            CurrentFile = new GroorineFileViewModel(null);
            //_player = new Player();
            InitializeAsync();
            DeleteCommand = new DelegateCommand(async(o) =>
            {
                if (!(o is StorageFile))
                {
                    return;
                }
                var sf = o as StorageFile;
                MusicFiles.Remove(sf);
                await sf.DeleteAsync();
            });

            ExportCommand = new DelegateCommand(async(o) =>
            {
                if (!(o is StorageFile))
                {
                    return;
                }
                var sf  = o as StorageFile;
                var fsp = new FileSavePicker();
                fsp.FileTypeChoices.Add("Wave Audio", new List <string> {
                    ".wav"
                });
                fsp.FileTypeChoices.Add("Windows Media Audio", new List <string> {
                    ".wma"
                });
                fsp.FileTypeChoices.Add("MPEG 3 Audio", new List <string> {
                    ".mp3"
                });
                fsp.FileTypeChoices.Add("MPEG 4 Audio", new List <string> {
                    ".m4a"
                });
                fsp.SuggestedFileName = sf.DisplayName;
                fsp.CommitButtonText  = "Bounce";

                StorageFile file = await fsp.PickSaveFileAsync();
                if (file == null)
                {
                    return;
                }

                MediaEncodingProfile mediaEncodingProfile;
                switch (file.FileType.ToString().ToLowerInvariant())
                {
                case ".wma":
                    mediaEncodingProfile = MediaEncodingProfile.CreateWma(AudioEncodingQuality.High);
                    break;

                case ".mp3":
                    mediaEncodingProfile = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High);
                    break;

                case ".wav":
                    mediaEncodingProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
                    break;

                case ".m4a":
                    mediaEncodingProfile = MediaEncodingProfile.CreateM4a(AudioEncodingQuality.High);
                    break;

                default:
                    throw new ArgumentException();
                }

                CreateAudioFileOutputNodeResult result = await _graph.CreateFileOutputNodeAsync(file, mediaEncodingProfile);

                if (result.Status != AudioFileNodeCreationStatus.Success)
                {
                    // FileOutputNode creation failed
                    await new MessageDialog("We couldn't create FileOutputNode, so we failed to bounce.").ShowAsync();
                    return;
                }

                AudioFileOutputNode node = result.FileOutputNode;

                _graph.Stop();

                _frameInputNode.AddOutgoingConnection(node);
                Stop();
                _player.Load(SmfParser.Parse(await sf.OpenStreamForReadAsync()));

                Play();

                _graph.Start();
                var a = new BouncingDialog();

#pragma warning disable CS4014 // この呼び出しを待たないため、現在のメソッドの実行は、呼び出しが完了する前に続行します
                a.ShowAsync();
#pragma warning restore CS4014 // この呼び出しを待たないため、現在のメソッドの実行は、呼び出しが完了する前に続行します



                while (_player.IsPlaying)
                {
                    await Task.Delay(1);
                }
                _graph.Stop();

                await node.FinalizeAsync();

                _graph.Start();

                a.Hide();
                await new MessageDialog("Bouncing has successfully finished!").ShowAsync();
            });
        }