コード例 #1
0
        /// <summary>
        /// Recording code is taken from UWP samples and slightly reduced
        ///
        /// - see official UWP samples on GitHub
        ///   https://github.com/Microsoft/Windows-universal-samples/blob/master/Samples/AudioCreation/cs/AudioCreation/Scenario2_DeviceCapture.xaml.cs
        ///
        /// </summary>
        public async Task StartRecording()
        {
            await CreateAudioGraph();

            var temporaryFile = await ApplicationData.Current.TemporaryFolder.TryGetItemAsync(TemporaryWaveFile) as StorageFile;

            if (temporaryFile != null)
            {
                await temporaryFile.DeleteAsync(StorageDeleteOption.Default);
            }

            temporaryFile = await ApplicationData.Current.TemporaryFolder.CreateFileAsync(TemporaryWaveFile);

            var fileProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium);

            var fileOutputNodeResult = await _audioGraph.CreateFileOutputNodeAsync(temporaryFile, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                await new MessageDialog("Cannot create output file: " + fileOutputNodeResult.Status).ShowAsync();
                return;
            }

            _fileOutputNode = fileOutputNodeResult.FileOutputNode;

            // Connect the input node to both output nodes
            _deviceInputNode.AddOutgoingConnection(_fileOutputNode);
            _deviceInputNode.AddOutgoingConnection(_deviceOutputNode);

            // Ta da!
            _audioGraph.Start();
        }
コード例 #2
0
ファイル: MainPage.xaml.cs プロジェクト: zholobov/windows-uwp
        //</SnippetCreateFileInputNode>
        //<SnippetCreateFileOutputNode>
        private async Task CreateFileOutputNode()
        {
            FileSavePicker saveFilePicker = new FileSavePicker();

            saveFilePicker.FileTypeChoices.Add("Pulse Code Modulation", new List <string>()
            {
                ".wav"
            });
            saveFilePicker.FileTypeChoices.Add("Windows Media Audio", new List <string>()
            {
                ".wma"
            });
            saveFilePicker.FileTypeChoices.Add("MPEG Audio Layer-3", new List <string>()
            {
                ".mp3"
            });
            saveFilePicker.SuggestedFileName = "New Audio Track";
            StorageFile file = await saveFilePicker.PickSaveFileAsync();

            // File can be null if cancel is hit in the file picker
            if (file == null)
            {
                return;
            }

            Windows.Media.MediaProperties.MediaEncodingProfile mediaEncodingProfile;
            switch (file.FileType.ToString().ToLowerInvariant())
            {
            case ".wma":
                mediaEncodingProfile = MediaEncodingProfile.CreateWma(AudioEncodingQuality.High);
                break;

            case ".mp3":
                mediaEncodingProfile = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High);
                break;

            case ".wav":
                mediaEncodingProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
                break;

            default:
                throw new ArgumentException();
            }


            // Operate node at the graph format, but save file at the specified format
            CreateAudioFileOutputNodeResult result = await audioGraph.CreateFileOutputNodeAsync(file, mediaEncodingProfile);

            if (result.Status != AudioFileNodeCreationStatus.Success)
            {
                // FileOutputNode creation failed
                ShowErrorMessage(result.Status.ToString());
                return;
            }

            fileOutputNode = result.FileOutputNode;
        }
コード例 #3
0
        private async void GetFileName_Tapped(object sender, TappedRoutedEventArgs e)
        {
            FileSavePicker fileSavePicker = new FileSavePicker();

            fileSavePicker.FileTypeChoices.Add("MP3 audio", new List <string>()
            {
                ".mp3"
            });
            fileSavePicker.FileTypeChoices.Add("Wave audio", new List <string>()
            {
                ".wav"
            });
            fileSavePicker.FileTypeChoices.Add("Windows Media Audio", new List <string>()
            {
                ".wma"
            });
            fileSavePicker.SuggestedStartLocation = PickerLocationId.MusicLibrary;
            StorageFile file = await fileSavePicker.PickSaveFileAsync();

            if (file != null)
            {
                audioFile          = file;
                AudioFileName.Text = audioFile.Name;
            }
            else
            {
                return;
            }
            //rootPage.NotifyUser(String.Format("Recording to {0}", file.Name.ToString()), NotifyType.StatusMessage);
            MediaEncodingProfile fileProfile = CreateMediaEncodingProfile(file);


            //my add here,,,

            await CreateAudioGraph();

            // Operate node at the graph format, but save file at the specified format
            CreateAudioFileOutputNodeResult fileOutputNodeResult = await graph.CreateFileOutputNodeAsync(file, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                // FileOutputNode creation failed
                //rootPage.NotifyUser(String.Format("Cannot create output file because {0}", fileOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                //fileButton.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            fileOutputNode         = fileOutputNodeResult.FileOutputNode;
            GetFileName.Background = new SolidColorBrush(Colors.YellowGreen);

            // Connect the input node to both output nodes
            deviceInputNode.AddOutgoingConnection(fileOutputNode);
            deviceInputNode.AddOutgoingConnection(deviceOutputNode);
            StartStop.IsEnabled = true;
        }
コード例 #4
0
        private async Task CreateAudioGraph()
        {
            if (graph != null)
            {
                graph.Dispose();
            }

            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.SystemDefault;

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                return;
            }

            graph = result.Graph;

            // Create a device input node using the default audio input device (manifest microphone!!!!)
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                return;
            }

            deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            //creating file

            StorageFolder storageFolder = Windows.Storage.ApplicationData.Current.LocalFolder;
            StorageFile   file          = await storageFolder.CreateFileAsync("sample.wav", Windows.Storage.CreationCollisionOption.ReplaceExisting);

            path = file.Path.ToString();

            MediaEncodingProfile fileProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium);

            // Operate node at the graph format, but save file at the specified format
            CreateAudioFileOutputNodeResult fileOutputNodeResult = await graph.CreateFileOutputNodeAsync(file, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                // FileOutputNode creation failed
                return;
            }

            fileOutputNode = fileOutputNodeResult.FileOutputNode;

            // Connect the input node to both output nodes
            deviceInputNode.AddOutgoingConnection(fileOutputNode);
        }
コード例 #5
0
        public async Task <string> EndRecordAsync()
        {
            _graph.Stop();
            await _outputNode.FinalizeAsync();

            _outputNode = null;
            _graph.Dispose();
            _graph = null;

            return(_filePath);
        }
コード例 #6
0
        public async Task StartRecording()
        {
            //Create file in ApplicationData LocalFolder
            try
            {
                var file = await ApplicationData.Current.LocalFolder.CreateFileAsync("MyVoice.wav", CreationCollisionOption.ReplaceExisting);

                if (file != null)
                {
                    //Create AudioGraph for Speech audio
                    var result = await AudioGraph.CreateAsync(
                        new AudioGraphSettings(AudioRenderCategory.Speech));

                    if (result.Status == AudioGraphCreationStatus.Success)
                    {
                        this.graph = result.Graph;

                        var microphone = await DeviceInformation.CreateFromIdAsync(
                            MediaDevice.GetDefaultAudioCaptureId(AudioDeviceRole.Default));

                        //.WAV file AudioQuality.Low gives only 1 channel
                        var outProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low);
                        //PCM encoding, 16k rate, Monochannel, 16 bit per sample
                        outProfile.Audio = AudioEncodingProperties.CreatePcm(16000, 1, 16);

                        //Create File Output Node
                        var outputResult = await this.graph.CreateFileOutputNodeAsync(file, outProfile);

                        if (outputResult.Status == AudioFileNodeCreationStatus.Success)
                        {
                            this.outputNode = outputResult.FileOutputNode;

                            var inProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low);

                            var inputResult = await this.graph.CreateDeviceInputNodeAsync(
                                MediaCategory.Speech,
                                this.graph.EncodingProperties,
                                microphone);

                            if (inputResult.Status == AudioDeviceNodeCreationStatus.Success)
                            {
                                inputResult.DeviceInputNode.AddOutgoingConnection(this.outputNode);

                                this.graph.Start();
                            }
                        }
                    }
                }
            }
            catch (UnauthorizedAccessException e)
            {
                Debug.WriteLine("Unauthorized exception when recording " + e.Message);
            }
        }
コード例 #7
0
        private async Task InitialiseAudioFileOutputNode()
        {
            var outputProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low);

            outputProfile.Audio = AudioEncodingProperties.CreatePcm(this.SAMPLE_RATE, this.CHANNEL, this.BITS_PER_SAMPLE);

            var outputResult = await this._audioGraph.CreateFileOutputNodeAsync(this._storageFile, outputProfile);

            if (outputResult.Status != AudioFileNodeCreationStatus.Success)
            {
                throw new MicrophoneServiceException("AudioFileNode creation error !");
            }

            this._audioFileOutputNode = outputResult.FileOutputNode;
        }
コード例 #8
0
        public async Task Init()
        {
            Recording = false;
            // Selecionar o dispositivo para gravar e reproduzir
            var devices = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioRenderSelector());

            var devicesIn = await DeviceInformation.FindAllAsync(MediaDevice.GetAudioCaptureSelector());

            outputDevice = devices[0];
            // Configurações de gravações
            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media)
            {
                //QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency,
                PrimaryRenderDevice = outputDevice,
            };


            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            graph = result.Graph;


            deviceOutputNode = (await graph //Criar nó de saída (Reprodução no headset)
                                .CreateDeviceOutputNodeAsync())
                               .DeviceOutputNode;

            deviceInputNode = (await graph //Criar nó de entrada (Microfone) - Real-time communication
                               .CreateDeviceInputNodeAsync(MediaCategory.Communications, graph.EncodingProperties, devicesIn[0]))
                              .DeviceInputNode;

            // Criar o arquivo para ser armazenado o PCM gravado direto do microfone
            StorageFile pcmfile = await KnownFolders
                                  .MusicLibrary
                                  .CreateFileAsync("PCM_original.wav", Windows.Storage.CreationCollisionOption.ReplaceExisting);

            // PCM 16bits com 44,1kHZ com 96kbps
            MediaEncodingProfile profile = MediaEncodingProfile.CreateWav(Windows.Media.MediaProperties.AudioEncodingQuality.Medium);


            pcmFileNode = (await graph // Criar nó do arquivo de saída
                           .CreateFileOutputNodeAsync(pcmfile, profile))
                          .FileOutputNode;

            // Conectar os nós de reprodução e do arquivo PCM ao nó do microfone
            // Ou seja, passar os sinais para o fone reproduzir e o arquivo armazenar ao mesmo tempo
            deviceInputNode.AddOutgoingConnection(pcmFileNode);
            deviceInputNode.AddOutgoingConnection(deviceOutputNode);
        }
コード例 #9
0
        private async Task SelectOutputFile()
        {
            FileSavePicker saveFilePicker = new FileSavePicker();

            saveFilePicker.FileTypeChoices.Add("Pulse Code Modulation", new List <string>()
            {
                ".wav"
            });
            saveFilePicker.FileTypeChoices.Add("Windows Media Audio", new List <string>()
            {
                ".wma"
            });
            saveFilePicker.FileTypeChoices.Add("MPEG Audio Layer-3", new List <string>()
            {
                ".mp3"
            });
            saveFilePicker.SuggestedFileName = "New Audio Track";
            StorageFile file = await saveFilePicker.PickSaveFileAsync();

            // File can be null if cancel is hit in the file picker
            if (file == null)
            {
                return;
            }

            rootPage.NotifyUser(String.Format("Recording to {0}", file.Name.ToString()), NotifyType.StatusMessage);
            MediaEncodingProfile fileProfile = CreateMediaEncodingProfile(file);

            // Operate node at the graph format, but save file at the specified format
            CreateAudioFileOutputNodeResult fileOutputNodeResult = await graph.CreateFileOutputNodeAsync(file, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                // FileOutputNode creation failed
                rootPage.NotifyUser(String.Format("Cannot create output file because {0}", fileOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                fileButton.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            fileOutputNode        = fileOutputNodeResult.FileOutputNode;
            fileButton.Background = new SolidColorBrush(Colors.YellowGreen);

            // Connect the input node to both output nodes
            deviceInputNode.AddOutgoingConnection(fileOutputNode);
            deviceInputNode.AddOutgoingConnection(deviceOutputNode);
            recordStopButton.IsEnabled = true;
        }
コード例 #10
0
ファイル: MicrophoneHelper.cs プロジェクト: qazyj/BABERECEN
        /// <summary>
        /// 오디오 파일 초기화
        /// </summary>
        /// <returns></returns>
        private async Task InitialiseAudioFileOutputNode()
        {
            if (_audioGraph == null)
            {
                return;
            }
            var outputProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);

            outputProfile.Audio = AudioEncodingProperties.CreatePcm(SAMPLE_RATE, CHANNEL, BITS_PER_SAMPLE);

            var outputResult = await _audioGraph.CreateFileOutputNodeAsync(_storageFile, outputProfile);

            if (outputResult.Status != AudioFileNodeCreationStatus.Success)
            {
                throw new InvalidOperationException("AudioFileNode creation error !");
            }

            _audioFileOutputNode = outputResult.FileOutputNode;
        }
コード例 #11
0
        private async Task CreateFileOutputNode()
        {
            Debug.WriteLine(_storageFolder.Path);
            _mainFile = await _storageFolder.CreateFileAsync("sample.wav", Windows.Storage.CreationCollisionOption.ReplaceExisting);

            var mediaEncodingProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);

            // Operate node at the graph format, but save file at the specified format
            var result = await _audioGraph.CreateFileOutputNodeAsync(_mainFile, mediaEncodingProfile);

            if (result.Status != AudioFileNodeCreationStatus.Success)
            {
                // FileOutputNode creation failed
                Debug.WriteLine(result.Status.ToString());
                return;
            }

            _fileOutputNode = result.FileOutputNode;
        }
コード例 #12
0
        async Task StartRecordingAsync()
        {
            try
            {
                recordingFile = await ApplicationData.Current.LocalFolder.CreateFileAsync(Constants.AudioFilename, CreationCollisionOption.ReplaceExisting);

                Debug.WriteLine(recordingFile.Path);

                var result = await AudioGraph.CreateAsync(new AudioGraphSettings(AudioRenderCategory.Media));

                if (result.Status == AudioGraphCreationStatus.Success)
                {
                    audioGraph = result.Graph;

                    var microphone = await DeviceInformation.CreateFromIdAsync(MediaDevice.GetDefaultAudioCaptureId(AudioDeviceRole.Default));

                    var outputProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low); // 1 channel, 16 bits per sample, 16K sample rate
                    outputProfile.Audio = AudioEncodingProperties.CreatePcm(16000, 1, 16);

                    var inputProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
                    var outputResult = await audioGraph.CreateFileOutputNodeAsync(recordingFile, outputProfile);

                    if (outputResult.Status == AudioFileNodeCreationStatus.Success)
                    {
                        audioFileOutputNode = outputResult.FileOutputNode;

                        var inputResult = await audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Media, inputProfile.Audio, microphone);

                        if (inputResult.Status == AudioDeviceNodeCreationStatus.Success)
                        {
                            inputResult.DeviceInputNode.AddOutgoingConnection(audioFileOutputNode);
                            audioGraph.Start();
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine(ex.Message);
            }
        }
コード例 #13
0
        public async Task StartRecordAsync()
        {
            _filePath = Path.GetTempFileName();
            var file = await StorageFile.GetFileFromPathAsync(_filePath);

            var result = await AudioGraph.CreateAsync(new AudioGraphSettings(AudioRenderCategory.Speech));

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                throw new Exception("Couldn't open recorder!");
            }
            _graph = result.Graph;

            var microphone = await DeviceInformation.CreateFromIdAsync(MediaDevice.GetDefaultAudioCaptureId(AudioDeviceRole.Default));

            var outProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low);

            outProfile.Audio = AudioEncodingProperties.CreatePcm(16000, 1, 16);

            var outputResult = await _graph.CreateFileOutputNodeAsync(file, outProfile);

            if (outputResult.Status != AudioFileNodeCreationStatus.Success)
            {
                throw new Exception("Couldn't create output!");
            }

            _outputNode = outputResult.FileOutputNode;
            var inProfile   = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
            var inputResult = await _graph.CreateDeviceInputNodeAsync(
                MediaCategory.Speech,
                inProfile.Audio,
                microphone);

            if (inputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                throw new Exception("Couldn't create device node!");
            }

            inputResult.DeviceInputNode.AddOutgoingConnection(_outputNode);
            _graph.Start();
        }
コード例 #14
0
        private async Task SelectOutputFile()
        {
            var Folder = ApplicationData.Current.LocalFolder;

            Folder = await Folder.CreateFolderAsync("Audio", CreationCollisionOption.OpenIfExists);

            storageFile = await Folder.CreateFileAsync("audio.mp3", CreationCollisionOption.GenerateUniqueName);

            if (storageFile == null)
            {
                return;
            }
            MediaEncodingProfile            fileProfile          = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High);
            CreateAudioFileOutputNodeResult fileOutputNodeResult = await graph.CreateFileOutputNodeAsync(storageFile, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                MessageDialog md = new MessageDialog("Error in Audio Device");
                md.ShowAsync();
                return;
            }
            fileOutputNode = fileOutputNodeResult.FileOutputNode;
            deviceInputNode.AddOutgoingConnection(fileOutputNode);
        }
コード例 #15
0
		/// <summary>
		/// Required before starting recording
		/// </summary>
		/// <param name="file"></param>
		/// <returns></returns>
		private async Task<string> SetFileAsync(StorageFile file)
		{
			if (file == null)
			{
				return "file is empty";
			}

			MediaEncodingProfile fileProfile = CreateMediaEncodingProfile(file);

			// Operate node at the graph format, but save file at the specified format
			CreateAudioFileOutputNodeResult fileOutputNodeResult = await _audioGraph.CreateFileOutputNodeAsync(file, fileProfile); // LOLLO NOTE this fails on the phone with mp3, not with wav
			// CreateAudioFileOutputNodeResult fileOutputNodeResult = await _audioGraph.CreateFileOutputNodeAsync(file); // this does not fail but it records some crap that cannot be played back, on the phone
			if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
			{
				// FileOutputNode creation failed
				return string.Format("Cannot create output file because {0}", fileOutputNodeResult.Status.ToString());
			}
			_fileOutputNode = fileOutputNodeResult.FileOutputNode;

			// Connect the input node to both output nodes
			_deviceInputNode.AddOutgoingConnection(_fileOutputNode);
			//_deviceInputNode.AddOutgoingConnection(_deviceOutputNode); // away, so we get no echo

			return string.Empty;
		}
コード例 #16
0
        SaveAudioToFile(
            StorageFile file,
            IProgress <string> status)
        {
            _finished = false;
            status.Report("Saving audio to file");

            var mediaEncodingProfile =
                CreateMediaEncodingProfile(file);

            if (!_audioData.IsStereo && mediaEncodingProfile.Audio != null)
            {
                mediaEncodingProfile.Audio.ChannelCount = 1;
            }

            // Initialize FileOutputNode
            var result =
                await _audioGraph.CreateFileOutputNodeAsync(file, mediaEncodingProfile);

            if (result.Status != AudioFileNodeCreationStatus.Success)
            {
                return(result);
            }

            _fileOutputNode = result.FileOutputNode;
            _fileOutputNode.Stop();

            // Initialize FrameInputNode and connect it to fileOutputNode
            _frameInputNode = _audioGraph.CreateFrameInputNode(
                // EncodingProprties are different than for input file
                _fileOutputNode.EncodingProperties
                //audioEncodingProperties
                );

            _frameInputNode.AddOutgoingConnection(_fileOutputNode);
            _frameInputNode.Stop();

            // Add a handler which will transfer every audioData sample to audio frame
            _frameInputNode.QuantumStarted += FrameInputNode_QuantumStarted;

            _audioDataCurrentPosition = 0;

            // Start process which will write audio file frame by frame
            // and will generated events QuantumStarted
            _audioGraph.Start();
            // don't start fileOutputNode yet because it will record zeros

            // because we initialised frameInputNode in Stop mode we need to start it
            _frameInputNode.Start();

            // didn't find a better way to wait for writing to file
            while (!_finished)
            {
                await Task.Delay(50);
            }

            // when audioData samples ended and audioGraph already stoped
            await _fileOutputNode.FinalizeAsync();

            // clean status and progress
            status.Report("");
            _ioProgress.Report(0);

            return(result);
        }
コード例 #17
0
        public MainPageViewModel()
        {
            _musicFiles = new ObservableCollection <StorageFile>();
            CurrentFile = new GroorineFileViewModel(null);
            //_player = new Player();
            InitializeAsync();
            DeleteCommand = new DelegateCommand(async(o) =>
            {
                if (!(o is StorageFile))
                {
                    return;
                }
                var sf = o as StorageFile;
                MusicFiles.Remove(sf);
                await sf.DeleteAsync();
            });

            ExportCommand = new DelegateCommand(async(o) =>
            {
                if (!(o is StorageFile))
                {
                    return;
                }
                var sf  = o as StorageFile;
                var fsp = new FileSavePicker();
                fsp.FileTypeChoices.Add("Wave Audio", new List <string> {
                    ".wav"
                });
                fsp.FileTypeChoices.Add("Windows Media Audio", new List <string> {
                    ".wma"
                });
                fsp.FileTypeChoices.Add("MPEG 3 Audio", new List <string> {
                    ".mp3"
                });
                fsp.FileTypeChoices.Add("MPEG 4 Audio", new List <string> {
                    ".m4a"
                });
                fsp.SuggestedFileName = sf.DisplayName;
                fsp.CommitButtonText  = "Bounce";

                StorageFile file = await fsp.PickSaveFileAsync();
                if (file == null)
                {
                    return;
                }

                MediaEncodingProfile mediaEncodingProfile;
                switch (file.FileType.ToString().ToLowerInvariant())
                {
                case ".wma":
                    mediaEncodingProfile = MediaEncodingProfile.CreateWma(AudioEncodingQuality.High);
                    break;

                case ".mp3":
                    mediaEncodingProfile = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High);
                    break;

                case ".wav":
                    mediaEncodingProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
                    break;

                case ".m4a":
                    mediaEncodingProfile = MediaEncodingProfile.CreateM4a(AudioEncodingQuality.High);
                    break;

                default:
                    throw new ArgumentException();
                }

                CreateAudioFileOutputNodeResult result = await _graph.CreateFileOutputNodeAsync(file, mediaEncodingProfile);

                if (result.Status != AudioFileNodeCreationStatus.Success)
                {
                    // FileOutputNode creation failed
                    await new MessageDialog("We couldn't create FileOutputNode, so we failed to bounce.").ShowAsync();
                    return;
                }

                AudioFileOutputNode node = result.FileOutputNode;

                _graph.Stop();

                _frameInputNode.AddOutgoingConnection(node);
                Stop();
                _player.Load(SmfParser.Parse(await sf.OpenStreamForReadAsync()));

                Play();

                _graph.Start();
                var a = new BouncingDialog();

#pragma warning disable CS4014 // この呼び出しを待たないため、現在のメソッドの実行は、呼び出しが完了する前に続行します
                a.ShowAsync();
#pragma warning restore CS4014 // この呼び出しを待たないため、現在のメソッドの実行は、呼び出しが完了する前に続行します



                while (_player.IsPlaying)
                {
                    await Task.Delay(1);
                }
                _graph.Stop();

                await node.FinalizeAsync();

                _graph.Start();

                a.Hide();
                await new MessageDialog("Bouncing has successfully finished!").ShowAsync();
            });
        }
        private async Task SelectOutputFile()
        {
            FileSavePicker saveFilePicker = new FileSavePicker();
            saveFilePicker.FileTypeChoices.Add("Pulse Code Modulation", new List<string>() { ".wav" });
            saveFilePicker.FileTypeChoices.Add("Windows Media Audio", new List<string>() { ".wma" });
            saveFilePicker.FileTypeChoices.Add("MPEG Audio Layer-3", new List<string>() { ".mp3" });
            saveFilePicker.SuggestedFileName = "New Audio Track";
            StorageFile file = await saveFilePicker.PickSaveFileAsync();

            // File can be null if cancel is hit in the file picker
            if (file == null)
            {
                return;
            }

            rootPage.NotifyUser(String.Format("Recording to {0}", file.Name.ToString()), NotifyType.StatusMessage);
            MediaEncodingProfile fileProfile = CreateMediaEncodingProfile(file);

            // Operate node at the graph format, but save file at the specified format
            CreateAudioFileOutputNodeResult fileOutputNodeResult = await graph.CreateFileOutputNodeAsync(file, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                // FileOutputNode creation failed
                rootPage.NotifyUser(String.Format("Cannot create output file because {0}", fileOutputNodeResult.Status.ToString()), NotifyType.ErrorMessage);
                fileButton.Background = new SolidColorBrush(Colors.Red);
                return;
            }

            fileOutputNode = fileOutputNodeResult.FileOutputNode;
            fileButton.Background = new SolidColorBrush(Colors.YellowGreen);

            // Connect the input node to both output nodes
            deviceInputNode.AddOutgoingConnection(fileOutputNode);
            deviceInputNode.AddOutgoingConnection(deviceOutputNode);
            recordStopButton.IsEnabled = true;
        }