Ejemplo n.º 1
0
            public async Task StartAsync()
            {
                m_isRecording = true;

                if (m_isVideo)
                {
                    var profile       = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Auto);
                    var rotationAngle = CameraRotationHelper.ConvertSimpleOrientationToClockwiseDegrees(_rotationHelper.GetCameraCaptureOrientation());
                    profile.Video.Properties.Add(new Guid("C380465D-2271-428C-9B83-ECEA3B4A85C1"), PropertyValue.CreateInt32(rotationAngle));

                    m_lowLag = await m_mediaCapture.PrepareLowLagRecordToStorageFileAsync(profile, m_file);

                    await m_lowLag.StartAsync();
                }
                else
                {
                    var wavEncodingProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
                    wavEncodingProfile.Audio.BitsPerSample = 16;
                    wavEncodingProfile.Audio.SampleRate    = 48000;
                    wavEncodingProfile.Audio.ChannelCount  = 1;

                    m_opusSink = await OpusCodec.CreateMediaSinkAsync(m_file);

                    await m_mediaCapture.StartRecordToCustomSinkAsync(wavEncodingProfile, m_opusSink);
                }
            }
Ejemplo n.º 2
0
        public static async Task StartRecord()
        {
            if (!IsRecording)
            {
                IsRecording     = true;
                _isAudioPlaying = true;
                await Audio.PlayAudio(beep);

                _isAudioPlaying = false;

                if (recordStorageFile == null)
                {
                    recordStorageFile = await Windows.Storage.KnownFolders.MusicLibrary.CreateFileAsync("recording.wav", Windows.Storage.CreationCollisionOption.ReplaceExisting);
                }

                MediaEncodingProfile recordProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
                recordProfile.Audio.BitsPerSample = 16;
                recordProfile.Audio.ChannelCount  = 1;
                recordProfile.Audio.SampleRate    = 16000;

                try
                {
                    await str.FlushAsync();

                    await _mediaCapture.StartRecordToStreamAsync(recordProfile, str.AsRandomAccessStream());
                }
                catch (Exception e)
                {
                    Debug.WriteLine(e.ToString());
                }
                //await _mediaCapture.StartRecordToStorageFileAsync(recordProfile, recordStorageFile);
                recordStorageFile = null;
            }
        }
Ejemplo n.º 3
0
        private Microphone()
        {
            wavEncodingProfile       = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
            wavEncodingProfile.Audio = AudioEncodingProperties.CreatePcm(16000, 1, 16);

            Initialize();
        }
Ejemplo n.º 4
0
        private async void Record_click(object sender, RoutedEventArgs e)
        {
            MediaEncodingProfile encodingProfile = null;

            switch (selectedFormat)
            {
            case AudioEncodingFormat.Mp3:
                encodingProfile = MediaEncodingProfile.CreateMp3(SelectedQuality);
                break;

            case AudioEncodingFormat.Mp4:
                encodingProfile = MediaEncodingProfile.CreateMp3(SelectedQuality);
                break;

            case AudioEncodingFormat.Wma:
                encodingProfile = MediaEncodingProfile.CreateWav(SelectedQuality);
                break;

            case AudioEncodingFormat.Avi:
                encodingProfile = MediaEncodingProfile.CreateMp3(SelectedQuality);
                break;

            default:
                throw new ArgumentOutOfRangeException();
            }

            AudioStream = new InMemoryRandomAccessStream();
            await CaptureMedia.StartRecordToStreamAsync(encodingProfile, AudioStream);

            UpdateRecordingControls(RecordingMode.Recording);
            DishTImer.Start();
        }
Ejemplo n.º 5
0
        /// <summary>
        /// Captures audio from the microphone for the specified amount of time.
        /// </summary>
        /// <param name="ct"></param>
        /// <param name="timeToRecord">Amount of time to record.</param>
        /// <returns></returns>
        public async Task <Stream> RecordAsync(CancellationToken ct, TimeSpan timeToRecord)
        {
            MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings
            {
                StreamingCaptureMode = StreamingCaptureMode.Audio
            };

            MediaCapture audioCapture = new MediaCapture();
            await audioCapture.InitializeAsync(settings);

            var outProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium);

            outProfile.Audio = AudioEncodingProperties.CreatePcm(16000, 1, 16);

            var buffer = new InMemoryRandomAccessStream();
            await audioCapture.StartRecordToStreamAsync(outProfile, buffer);

            await Task.Delay(timeToRecord, ct);

            await audioCapture.StopRecordAsync();

            IRandomAccessStream audio = null;

            try
            {
                audio = buffer.CloneStream();
                return(this.FixWavPcmStream(audio));
            }
            finally
            {
                audio.Dispose();
            }
        }
Ejemplo n.º 6
0
        //void logo_PointerExited(object sender, PointerRoutedEventArgs e)
        //{
        //    logo.PointerExit();
        //}

        //void logo_PointerEntered(object sender, PointerRoutedEventArgs e)
        //{
        //    logo.PointerEnter();
        //}

        async void logo_Tapped(object sender, TappedRoutedEventArgs e)
        {
            if (!b)
            {
                //StartRecord();
                file = await folder.CreateFileAsync(DateTime.Now.ToString("yyyy-MM-dd hh-mm-ss") + ".wav", CreationCollisionOption.GenerateUniqueName);

                await micphone.StartRecordToStorageFileAsync(MediaEncodingProfile.CreateWav(AudioEncodingQuality.Auto), file);

                b = true;
                logo.StartFlash();
            }
            else
            {
                //StopRecord();
                await micphone.StopRecordAsync();

                var stream = await file.OpenAsync(FileAccessMode.Read);

                mediaElement.SetSource(stream, file.FileType);
                mediaElement.AutoPlay = false;
                b = false;
                logo.StopFlash();

                obs.Add(file.Name);
            }
        }
Ejemplo n.º 7
0
        internal async Task <bool> Record()
        {
            if (isRecording)
            {
                Debug.WriteLine("\nCurrent recording is not yet finished,so skip this cycle");
                NumberOfMissedRecordings++;
                return(isRecording);
            }

            var currentDate = DateTime.UtcNow;

            try
            {
                var recordProfileM4a = MediaEncodingProfile.CreateM4a(AudioEncodingQuality.Auto);
                var recordProfileWav = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Auto);
                recordProfileWav.Audio = AudioEncodingProperties.CreatePcm(8000, 1, 16);
                InMemoryRandomAccessStream recordedStream = new InMemoryRandomAccessStream();
                await StartRecordingToStream(recordProfileWav, recordedStream);

                Thread.Sleep(recordingTimeInMilliSeconds);
                await StopRecording();

                //ToDo-Send it to Azure
                return(isRecording);
            }
            catch (Exception ex)
            {
                Debug.WriteLine("\n" + ex.Message);
                Cleanup();
                return(isRecording);
            }
        }
Ejemplo n.º 8
0
        public async Task StartAsync()
        {
            if (m_mediaCapture != null)
            {
                throw new InvalidOperationException("Cannot start while recording");
            }

            m_mediaCapture = new MediaCapture();
            await m_mediaCapture.InitializeAsync(new MediaCaptureInitializationSettings()
            {
                MediaCategory        = MediaCategory.Speech,
                AudioProcessing      = AudioProcessing.Default,
                MemoryPreference     = MediaCaptureMemoryPreference.Auto,
                SharingMode          = MediaCaptureSharingMode.SharedReadOnly,
                StreamingCaptureMode = StreamingCaptureMode.Audio,
            });

            m_opusSink = await OpusCodec.CreateMediaSinkAsync(m_file);

            var wawEncodingProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);

            wawEncodingProfile.Audio.BitsPerSample = 16;
            wawEncodingProfile.Audio.SampleRate    = 48000;
            wawEncodingProfile.Audio.ChannelCount  = 1;
            await m_mediaCapture.StartRecordToCustomSinkAsync(wawEncodingProfile, m_opusSink);
        }
Ejemplo n.º 9
0
        /// <summary>
        /// Starts the audio stream.
        /// </summary>
        public async Task Start()
        {
            try
            {
                if (!Active)
                {
                    await Init();

                    var profile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low);
                    profile.Audio = AudioEncodingProperties.CreatePcm((uint)SampleRate, (uint)ChannelCount, (uint)BitsPerSample);


                    await capture.StartRecordToStreamAsync(profile, stream);

                    Active = true;
                    OnActiveChanged?.Invoke(this, true);

                    _ = Task.Run(() => Record());
                }
            }
            catch (Exception ex)
            {
                Active = false;
                System.Diagnostics.Debug.WriteLine("Error in AudioStream.Start(): {0}", ex);
                throw;
            }
        }
Ejemplo n.º 10
0
        public async Task <object> StreamingMicRecognizeAsync(int seconds)
        {
            _mediaCapture = await GetMediaCapture();

            if (_mediaCapture == null)
            {
                Debug.WriteLine("No microphone!");
                return(null);
            }

            await SetupSpeechClient();

            _isRecording = true;

            var printResponses = PrintResponses();

            var profile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Auto);

            profile.Audio.SampleRate    = (uint)16000;             // Samples per second
            profile.Audio.BitsPerSample = (uint)16;                // bits per sample
            profile.Audio.ChannelCount  = (uint)1;                 // channels

            var stream = new AudioAmplitudeStream(_streamingCall); // custom stream implementation
            await _mediaCapture.StartRecordToStreamAsync(profile, stream);

            stream.AmplitudeReading += AmplitudeReading; // get an amplitude event

            return(null);
        }
Ejemplo n.º 11
0
        private async void recordBtn_Click(object sender, RoutedEventArgs e)
        {
            if (record)
            {
                //recording already in progress
            }
            else
            {
                await RecordProcess();

                //await capture.StartRecordToStreamAsync(MediaEncodingProfile.CreateMp3(AudioEncodingQuality.Auto), buffer);

                var profile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);

                profile.Audio = AudioEncodingProperties.CreatePcm(16000, 1, 16);

                await capture.StartRecordToStreamAsync(profile, buffer);

                if (record)
                {
                    throw new InvalidOperationException();
                }
                record = true;
                recordBtn.IsEnabled = false;
            }
        }
Ejemplo n.º 12
0
        /// <summary>
        /// Recording code is taken from UWP samples and slightly reduced
        ///
        /// - see official UWP samples on GitHub
        ///   https://github.com/Microsoft/Windows-universal-samples/blob/master/Samples/AudioCreation/cs/AudioCreation/Scenario2_DeviceCapture.xaml.cs
        ///
        /// </summary>
        public async Task StartRecording()
        {
            await CreateAudioGraph();

            var temporaryFile = await ApplicationData.Current.TemporaryFolder.TryGetItemAsync(TemporaryWaveFile) as StorageFile;

            if (temporaryFile != null)
            {
                await temporaryFile.DeleteAsync(StorageDeleteOption.Default);
            }

            temporaryFile = await ApplicationData.Current.TemporaryFolder.CreateFileAsync(TemporaryWaveFile);

            var fileProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium);

            var fileOutputNodeResult = await _audioGraph.CreateFileOutputNodeAsync(temporaryFile, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                await new MessageDialog("Cannot create output file: " + fileOutputNodeResult.Status).ShowAsync();
                return;
            }

            _fileOutputNode = fileOutputNodeResult.FileOutputNode;

            // Connect the input node to both output nodes
            _deviceInputNode.AddOutgoingConnection(_fileOutputNode);
            _deviceInputNode.AddOutgoingConnection(_deviceOutputNode);

            // Ta da!
            _audioGraph.Start();
        }
Ejemplo n.º 13
0
        public async static Task <StorageFile> FromStorageFile(StorageFile source)
        {
            MediaTranscoder         transcoder = new MediaTranscoder();
            MediaEncodingProfile    profile    = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium);
            CancellationTokenSource cts        = new CancellationTokenSource();

            string      fileName = String.Format("TempFile_{0}.wav", Guid.NewGuid());
            StorageFile output   = await ApplicationData.Current.TemporaryFolder.CreateFileAsync(fileName);

            if (source == null || output == null)
            {
                return(null);
            }
            try
            {
                var preparedTranscodeResult = await transcoder.PrepareFileTranscodeAsync(source, output, profile);

                if (preparedTranscodeResult.CanTranscode)
                {
                    var progress = new Progress <double>((percent) => { });
                    await preparedTranscodeResult.TranscodeAsync().AsTask(cts.Token, progress);
                }
                return(output);
            }
            catch
            {
                return(null);
            }
        }
Ejemplo n.º 14
0
        private async Task InitAudioMeterAsync()
        {
            var result = await AudioGraph.CreateAsync(new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Speech));

            if (result.Status == AudioGraphCreationStatus.Success)
            {
                this.audioGraph = result.Graph;

                var audioDevice = (AudioDeviceComboBox.SelectedItem as ComboBoxItem);
                if (audioDevice == null)
                {
                    return;
                }

                var microphone = await DeviceInformation.CreateFromIdAsync(audioDevice.Tag.ToString());

                var inProfile   = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
                var inputResult = await this.audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Speech, inProfile.Audio, microphone);

                if (inputResult.Status != AudioDeviceNodeCreationStatus.Success)
                {
                    var msg = new MessageDialog("Device is not available");
                    await msg.ShowAsync();

                    return;
                }

                this.audioGraph.Start();

                var source = PlaybackSource.CreateFromAudioNode(inputResult.DeviceInputNode);
                AudioDiscreteVUBar.Source = source.Source;
            }
        }
Ejemplo n.º 15
0
        private async void HyperlinkButton_Click_1(object sender, RoutedEventArgs e)
        {
            if (!recording)
            {
                try
                {
                    CaptureMedia = new MediaCapture();
                    var captureSettings = new MediaCaptureInitializationSettings();
                    captureSettings.StreamingCaptureMode = StreamingCaptureMode.Audio;
                    await CaptureMedia.InitializeAsync(captureSettings);

                    CaptureMedia.Failed += CaptureMedia_Failed;
                    CaptureMedia.RecordLimitationExceeded += CaptureMedia_RecordLimitationExceeded;

                    DishTImer          = new DispatcherTimer();
                    DishTImer.Interval = new TimeSpan(0, 0, 0, 0, 100);
                    DishTImer.Tick    += DishTImer_Tick;

                    AudioStream = new InMemoryRandomAccessStream();
                    try
                    {
                        MediaEncodingProfile encodingProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium);
                        await CaptureMedia.StartRecordToStreamAsync(encodingProfile, AudioStream);
                    }
                    catch
                    {
                        MediaEncodingProfile encodingProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium);
                        await CaptureMedia.StartRecordToStreamAsync(encodingProfile, AudioStream);
                    }

                    DishTImer.Start();
                    recording = true;
                }
                catch { }
            }
            else
            {
                await CaptureMedia.StopRecordAsync();

                RecordButton.Text         = App.GetString("/Dialogs/SavingAudio");
                recording                 = false;
                RecordHyperlink.IsEnabled = false;
                DishTImer.Stop();
                var mediaFile = await ApplicationData.Current.TemporaryFolder.CreateFileAsync("Audio.mp3", CreationCollisionOption.GenerateUniqueName);

                using (var dataReader = new DataReader(AudioStream.GetInputStreamAt(0)))
                {
                    await dataReader.LoadAsync((uint)AudioStream.Size);

                    byte[] buffer = new byte[(int)AudioStream.Size];
                    dataReader.ReadBytes(buffer);
                    await FileIO.WriteBytesAsync(mediaFile, buffer);

                    RecordButton.Text         = App.GetString("/Dialogs/AdvancedRecordSoundTB");
                    RecordHyperlink.IsEnabled = true;
                }
                AddAttachement(mediaFile);
            }
        }
Ejemplo n.º 16
0
        private async void btnStartRecord_Click(object sender, RoutedEventArgs e)
        {
            btnStartRecord.IsEnabled = !btnStartRecord.IsEnabled;
            btnStopRecord.IsEnabled  = !btnStopRecord.IsEnabled;

            await RecordProcess();

            await capture.StartRecordToStreamAsync(MediaEncodingProfile.CreateWav(AudioEncodingQuality.High), buffer);
        }
Ejemplo n.º 17
0
        //</SnippetCreateFileInputNode>
        //<SnippetCreateFileOutputNode>
        private async Task CreateFileOutputNode()
        {
            FileSavePicker saveFilePicker = new FileSavePicker();

            saveFilePicker.FileTypeChoices.Add("Pulse Code Modulation", new List <string>()
            {
                ".wav"
            });
            saveFilePicker.FileTypeChoices.Add("Windows Media Audio", new List <string>()
            {
                ".wma"
            });
            saveFilePicker.FileTypeChoices.Add("MPEG Audio Layer-3", new List <string>()
            {
                ".mp3"
            });
            saveFilePicker.SuggestedFileName = "New Audio Track";
            StorageFile file = await saveFilePicker.PickSaveFileAsync();

            // File can be null if cancel is hit in the file picker
            if (file == null)
            {
                return;
            }

            Windows.Media.MediaProperties.MediaEncodingProfile mediaEncodingProfile;
            switch (file.FileType.ToString().ToLowerInvariant())
            {
            case ".wma":
                mediaEncodingProfile = MediaEncodingProfile.CreateWma(AudioEncodingQuality.High);
                break;

            case ".mp3":
                mediaEncodingProfile = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High);
                break;

            case ".wav":
                mediaEncodingProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
                break;

            default:
                throw new ArgumentException();
            }


            // Operate node at the graph format, but save file at the specified format
            CreateAudioFileOutputNodeResult result = await audioGraph.CreateFileOutputNodeAsync(file, mediaEncodingProfile);

            if (result.Status != AudioFileNodeCreationStatus.Success)
            {
                // FileOutputNode creation failed
                ShowErrorMessage(result.Status.ToString());
                return;
            }

            fileOutputNode = result.FileOutputNode;
        }
Ejemplo n.º 18
0
        public async Task Start()
        {
            Capture = await MediaCaptureHandler.Init(MediaCategory, AudioProcessingType);

            Profile       = MediaEncodingProfile.CreateWav(Quality);
            Profile.Audio = AudioEncodingProperties.CreatePcm(SampleRate, ChannelCount, BitsPerSample);

            memoryBuffer = new InMemoryRandomAccessStream();
            await Capture.StartRecordToStreamAsync(Profile, memoryBuffer);
        }
Ejemplo n.º 19
0
        private async Task CreateAudioGraph()
        {
            if (graph != null)
            {
                graph.Dispose();
            }

            AudioGraphSettings settings = new AudioGraphSettings(AudioRenderCategory.Media);

            settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.SystemDefault;

            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);

            if (result.Status != AudioGraphCreationStatus.Success)
            {
                // Cannot create graph
                return;
            }

            graph = result.Graph;

            // Create a device input node using the default audio input device (manifest microphone!!!!)
            CreateAudioDeviceInputNodeResult deviceInputNodeResult = await graph.CreateDeviceInputNodeAsync(MediaCategory.Other);

            if (deviceInputNodeResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                // Cannot create device input node
                return;
            }

            deviceInputNode = deviceInputNodeResult.DeviceInputNode;

            //creating file

            StorageFolder storageFolder = Windows.Storage.ApplicationData.Current.LocalFolder;
            StorageFile   file          = await storageFolder.CreateFileAsync("sample.wav", Windows.Storage.CreationCollisionOption.ReplaceExisting);

            path = file.Path.ToString();

            MediaEncodingProfile fileProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium);

            // Operate node at the graph format, but save file at the specified format
            CreateAudioFileOutputNodeResult fileOutputNodeResult = await graph.CreateFileOutputNodeAsync(file, fileProfile);

            if (fileOutputNodeResult.Status != AudioFileNodeCreationStatus.Success)
            {
                // FileOutputNode creation failed
                return;
            }

            fileOutputNode = fileOutputNodeResult.FileOutputNode;

            // Connect the input node to both output nodes
            deviceInputNode.AddOutgoingConnection(fileOutputNode);
        }
Ejemplo n.º 20
0
        private async void OnButtonClicked(object sender, RoutedEventArgs e)
        {
            if (!_recording)
            {
                MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings
                {
                    StreamingCaptureMode = StreamingCaptureMode.Audio
                };

                _capture = new MediaCapture();
                await _capture.InitializeAsync(settings);

                _capture.RecordLimitationExceeded += async(MediaCapture s) =>
                {
                    await new MessageDialog("Record limtation exceeded", "Error").ShowAsync();
                };

                _capture.Failed += async(MediaCapture s, MediaCaptureFailedEventArgs args) =>
                {
                    await new MessageDialog("Media capture failed: " + args.Message, "Error").ShowAsync();
                };

                _buffer = new InMemoryRandomAccessStream();
                var profile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Auto);
                profile.Audio = AudioEncodingProperties.CreatePcm(16000, 1, 16); // Must be mono (1 channel)
                await _capture.StartRecordToStreamAsync(profile, _buffer);

                TheButton.Content = "Verify";
                _recording        = true;
            }
            else // Recording
            {
                if (_capture != null && _buffer != null && _id != null && _id != Guid.Empty)
                {
                    await _capture.StopRecordAsync();

                    IRandomAccessStream stream = _buffer.CloneStream();
                    var client = new SpeakerVerificationServiceClient(_key);

                    var response = await client.VerifyAsync(stream.AsStream(), _id);

                    string message = String.Format("Result: {0}, Confidence: {1}", response.Result, response.Confidence);
                    await new MessageDialog(message).ShowAsync();

                    _capture.Dispose();
                    _capture = null;

                    _buffer.Dispose();
                    _buffer = null;
                }

                TheButton.Content = "Start";
                _recording        = false;
            }
        }
Ejemplo n.º 21
0
        private async void Button_Click(object sender, RoutedEventArgs e)
        {
            if (CaptureMedia == null)
            {
                btnRecognition.Content = "Stop Voice Recognition";
                CaptureMedia           = new MediaCapture();
                var captureInitSettings = new MediaCaptureInitializationSettings();
                captureInitSettings.StreamingCaptureMode = StreamingCaptureMode.Audio;
                await CaptureMedia.InitializeAsync(captureInitSettings);

                CaptureMedia.Failed += MediaCaptureOnFailed;
                CaptureMedia.RecordLimitationExceeded += MediaCaptureOnRecordLimitationExceeded;

                MediaEncodingProfile encodingProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium);
                AudioStream = new InMemoryRandomAccessStream();

                await CaptureMedia.StartRecordToStreamAsync(encodingProfile, AudioStream);
            }
            else
            {
                btnRecognition.Content = "Start Voice Recognition";
                await CaptureMedia.StopRecordAsync();

                var    auth  = new Authenticator();
                string token = await auth.Authenticate("97ca907166a84fa7baf8e3a7a3faca3f");

                var a = new SpeechToText();
                a.AuthorizationToken = token;

                byte[] buffer = null;

                //var file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///whatstheweatherlike.wav"));
                //using (IRandomAccessStreamWithContentType stream = await file.OpenReadAsync())
                //{
                //    buffer = new byte[stream.Size];

                //    using (DataReader reader = new DataReader(stream))
                //    {
                //        await reader.LoadAsync((uint)stream.Size);
                //        reader.ReadBytes(buffer);
                //    }
                //}

                using (var dataReader = new DataReader(AudioStream.GetInputStreamAt(0)))
                {
                    await dataReader.LoadAsync((uint)AudioStream.Size);

                    buffer = new byte[(int)AudioStream.Size];
                    dataReader.ReadBytes(buffer);
                }

                var response = await a.Recognize(CancellationToken.None, buffer);
            }
        }
Ejemplo n.º 22
0
        private async void VoiceCaptureButton_Click(object sender, RoutedEventArgs e)
        {
            string output;

            //开始录音
            if (VoiceRecordSym == true)
            {
                _memoryBuffer = new InMemoryRandomAccessStream();
                VoiceCaptureButton.FontFamily = new FontFamily("Segoe UI");
                VoiceCaptureButton.Content    = "停止录音";
                VoiceRecordSym = false;
                if (IsRecording)
                {
                    throw new InvalidOperationException("Recording already in progress!");
                }
                MediaCaptureInitializationSettings settings =
                    new MediaCaptureInitializationSettings
                {
                    StreamingCaptureMode = StreamingCaptureMode.Audio
                };
                _mediaCapture = new MediaCapture();
                await _mediaCapture.InitializeAsync(settings);

                //将录音文件存入_memoryBuffer里面
                await _mediaCapture.StartRecordToStreamAsync(MediaEncodingProfile.CreateWav(AudioEncodingQuality.Auto), _memoryBuffer);

                IsRecording = true;
            }
            //停止录音
            else
            {
                await _mediaCapture.StopRecordAsync();

                IsRecording = false;
                VoiceCaptureButton.FontFamily = new FontFamily("Segoe MDL2 Assets");
                VoiceCaptureButton.Content    = "\xE1D6";
                VoiceRecordSym       = true;
                progessRing.IsActive = true;
                Input.IsReadOnly     = true;
                //转换InMemoryRandomAccessStream成Stream
                Stream tempStream = WindowsRuntimeStreamExtensions.AsStreamForRead(_memoryBuffer.GetInputStreamAt(0));
                using (var stream = new MemoryStream())
                {
                    tempStream.CopyTo(stream);
                    VoiceToText voiceToText = new VoiceToText();
                    //传入VoiceToText函数
                    output = await voiceToText.ReadVoice(stream, "yue");
                }
                //tempStream.Position = 0;
                progessRing.IsActive = false;
                Input.IsReadOnly     = false;
                Input.Text          += output;
            }
        }
Ejemplo n.º 23
0
        public async Task StartRecording()
        {
            //Create file in ApplicationData LocalFolder
            try
            {
                var file = await ApplicationData.Current.LocalFolder.CreateFileAsync("MyVoice.wav", CreationCollisionOption.ReplaceExisting);

                if (file != null)
                {
                    //Create AudioGraph for Speech audio
                    var result = await AudioGraph.CreateAsync(
                        new AudioGraphSettings(AudioRenderCategory.Speech));

                    if (result.Status == AudioGraphCreationStatus.Success)
                    {
                        this.graph = result.Graph;

                        var microphone = await DeviceInformation.CreateFromIdAsync(
                            MediaDevice.GetDefaultAudioCaptureId(AudioDeviceRole.Default));

                        //.WAV file AudioQuality.Low gives only 1 channel
                        var outProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low);
                        //PCM encoding, 16k rate, Monochannel, 16 bit per sample
                        outProfile.Audio = AudioEncodingProperties.CreatePcm(16000, 1, 16);

                        //Create File Output Node
                        var outputResult = await this.graph.CreateFileOutputNodeAsync(file, outProfile);

                        if (outputResult.Status == AudioFileNodeCreationStatus.Success)
                        {
                            this.outputNode = outputResult.FileOutputNode;

                            var inProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low);

                            var inputResult = await this.graph.CreateDeviceInputNodeAsync(
                                MediaCategory.Speech,
                                this.graph.EncodingProperties,
                                microphone);

                            if (inputResult.Status == AudioDeviceNodeCreationStatus.Success)
                            {
                                inputResult.DeviceInputNode.AddOutgoingConnection(this.outputNode);

                                this.graph.Start();
                            }
                        }
                    }
                }
            }
            catch (UnauthorizedAccessException e)
            {
                Debug.WriteLine("Unauthorized exception when recording " + e.Message);
            }
        }
Ejemplo n.º 24
0
        private async void RecordButton_Click(object sender, RoutedEventArgs e)
        {
            MediaEncodingProfile encodingProfile = null;

            encodingProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low);

            m_audioStream = new InMemoryRandomAccessStream();
            await m_mediaCapture.StartRecordToStreamAsync(encodingProfile, m_audioStream);

            UpdateRecordingControls(RecordingMode.Recording);
            m_timer.Start();
        }
Ejemplo n.º 25
0
        MediaEncodingProfile CreateMediaEncodingProfile(StorageFile file)
        {
            switch (file.FileType.ToString().ToLowerInvariant())
            {
            case ".wma": return(MediaEncodingProfile.CreateWma(AudioEncodingQuality.High));

            case ".mp3": return(MediaEncodingProfile.CreateMp3(AudioEncodingQuality.High));

            case ".wav": return(MediaEncodingProfile.CreateWav(AudioEncodingQuality.High));

            default: throw new ArgumentException();
            }
        }
Ejemplo n.º 26
0
        public async void StartRecord(object sender, RoutedEventArgs e)
        {
            if (isRecording)
            {
                return;
            }

            // 開始錄音
            await capture.StartRecordToStreamAsync(MediaEncodingProfile.CreateWav(AudioEncodingQuality.Auto), buffer);

            isRecording     = true;
            recordStartTime = DateTime.UtcNow;
            timer.Start();
        }
Ejemplo n.º 27
0
        public async Task <Signal> LoadSignalAsync(StorageFile file)
        {
            Stream stream;

            // if user opens WAV-file then we load its contents directly
            if (file.Name.EndsWith("wav", StringComparison.OrdinalIgnoreCase))
            {
                stream = await file.OpenStreamForReadAsync();
            }
            // otherwise transcode to wave
            else
            {
                var transcoder = new MediaTranscoder();
                var profile    = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium);

                var temporaryFile = await ApplicationData.Current.TemporaryFolder.TryGetItemAsync(TemporaryWaveFile) as StorageFile;

                if (temporaryFile != null)
                {
                    await temporaryFile.DeleteAsync(StorageDeleteOption.Default);
                }

                temporaryFile = await ApplicationData.Current.TemporaryFolder.CreateFileAsync(TemporaryWaveFile);

                if (temporaryFile == null)
                {
                    return(null);
                }

                var preparedTranscodeResult = await transcoder.PrepareFileTranscodeAsync(file, temporaryFile, profile);

                if (preparedTranscodeResult.CanTranscode)
                {
                    await preparedTranscodeResult.TranscodeAsync();
                }
                else
                {
                    await new MessageDialog("Error: could not convert to wave!").ShowAsync();
                }

                stream = await temporaryFile.OpenStreamForReadAsync();
            }

            var signal = new Signal();

            await Task.Run(() => signal.Load(stream));

            return(signal);
        }
Ejemplo n.º 28
0
        private int intDelay = 200;  // Record Time in millesecond...
        // Short time - increase repetet noise ...
        // Long time - create echo ...
        // I think 200ms is optimum ...

        // If you use headphones - best time will be 500..1000ms ...


        private async void btnStart_Click(object sender, RoutedEventArgs e)
        {
            blnStart            = true;
            btnStart.Visibility = Visibility.Collapsed;
            btnStop.Visibility  = Visibility.Visible;

            textBlock.Visibility = Visibility.Visible;

            mediaCaptureAudioPrimery = new Windows.Media.Capture.MediaCapture();

            var settings = new Windows.Media.Capture.MediaCaptureInitializationSettings();

            settings.StreamingCaptureMode = Windows.Media.Capture.StreamingCaptureMode.Audio;
            settings.MediaCategory        = Windows.Media.Capture.MediaCategory.Other;
            settings.AudioProcessing      = Windows.Media.AudioProcessing.Default; // Use only Default

            await mediaCaptureAudioPrimery.InitializeAsync(settings);

            recordProfile = MediaEncodingProfile.CreateWav(Windows.Media.MediaProperties.AudioEncodingQuality.Low);


            while (blnStart)  // Repeate untile stop ...
            {
                try
                {
                    msIRAS0     = new MemoryStream();
                    streamIRAS0 = msIRAS0.AsRandomAccessStream();                                        // New Stream ...
                    await mediaCaptureAudioPrimery.StartRecordToStreamAsync(recordProfile, streamIRAS0); // write audio in first stream ...

                    await Task.Delay(intDelay);

                    await mediaCaptureAudioPrimery.StopRecordAsync();   // Stop first stream
                    await PlayThreadMethod(streamIRAS0);                // Play from first stream

                    msIRAS1     = new MemoryStream();
                    streamIRAS1 = msIRAS0.AsRandomAccessStream();                                        // Second Stream ...
                    await mediaCaptureAudioPrimery.StartRecordToStreamAsync(recordProfile, streamIRAS1); // sweetch stream ... to second stream ...

                    await Task.Delay(intDelay);

                    await mediaCaptureAudioPrimery.StopRecordAsync();
                    await PlayThreadMethod(streamIRAS1);                // Play Second Streem
                }
                catch (Exception ex)
                {
                    Stop();
                }
            }
        }
Ejemplo n.º 29
0
        private async Task InitialiseAudioFileOutputNode()
        {
            var outputProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Low);

            outputProfile.Audio = AudioEncodingProperties.CreatePcm(this.SAMPLE_RATE, this.CHANNEL, this.BITS_PER_SAMPLE);

            var outputResult = await this._audioGraph.CreateFileOutputNodeAsync(this._storageFile, outputProfile);

            if (outputResult.Status != AudioFileNodeCreationStatus.Success)
            {
                throw new MicrophoneServiceException("AudioFileNode creation error !");
            }

            this._audioFileOutputNode = outputResult.FileOutputNode;
        }
Ejemplo n.º 30
0
        private async Task InitialiseAudioFeed()
        {
            var defaultAudioCaptureId = MediaDevice.GetDefaultAudioCaptureId(AudioDeviceRole.Default);
            var microphone            = await DeviceInformation.CreateFromIdAsync(defaultAudioCaptureId);

            var inputProfile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.High);
            var inputResult  = await this._audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Media, inputProfile.Audio, microphone);

            if (inputResult.Status != AudioDeviceNodeCreationStatus.Success)
            {
                throw new MicrophoneServiceException("AudioDeviceNode creation error !");
            }

            inputResult.DeviceInputNode.AddOutgoingConnection(this._audioFileOutputNode);
        }