Пример #1
0
            public AudioOutputStreamClip(string name, PullAudioOutputStream stream, WaveFormat format)
            {
                Name = name;

                _stream = stream;
                _format = format;
            }
Пример #2
0
        // Waits for accumulates all audio associated with a given PullAudioOutputStream and then plays it to the
        // MediaElement. Long spoken audio will create extra latency and a streaming playback solution (that plays
        // audio while it continues to be received) should be used -- see the samples for examples of this.
        private void SynchronouslyPlayActivityAudio(PullAudioOutputStream activityAudio)
        {
            var playbackStreamWithHeader = new MemoryStream();

            playbackStreamWithHeader.Write(Encoding.ASCII.GetBytes("RIFF"), 0, 4);        // ChunkID
            playbackStreamWithHeader.Write(BitConverter.GetBytes(UInt32.MaxValue), 0, 4); // ChunkSize: max
            playbackStreamWithHeader.Write(Encoding.ASCII.GetBytes("WAVE"), 0, 4);        // Format
            playbackStreamWithHeader.Write(Encoding.ASCII.GetBytes("fmt "), 0, 4);        // Subchunk1ID
            playbackStreamWithHeader.Write(BitConverter.GetBytes(16), 0, 4);              // Subchunk1Size: PCM
            playbackStreamWithHeader.Write(BitConverter.GetBytes(1), 0, 2);               // AudioFormat: PCM
            playbackStreamWithHeader.Write(BitConverter.GetBytes(1), 0, 2);               // NumChannels: mono
            playbackStreamWithHeader.Write(BitConverter.GetBytes(16000), 0, 4);           // SampleRate: 16kHz
            playbackStreamWithHeader.Write(BitConverter.GetBytes(32000), 0, 4);           // ByteRate
            playbackStreamWithHeader.Write(BitConverter.GetBytes(2), 0, 2);               // BlockAlign
            playbackStreamWithHeader.Write(BitConverter.GetBytes(16), 0, 2);              // BitsPerSample: 16-bit
            playbackStreamWithHeader.Write(Encoding.ASCII.GetBytes("data"), 0, 4);        // Subchunk2ID
            playbackStreamWithHeader.Write(BitConverter.GetBytes(UInt32.MaxValue), 0, 4); // Subchunk2Size

            byte[] pullBuffer = new byte[2056];

            uint lastRead = 0;

            do
            {
                lastRead = activityAudio.Read(pullBuffer);
                playbackStreamWithHeader.Write(pullBuffer, 0, (int)lastRead);
            }while (lastRead == pullBuffer.Length);

            var task = Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
            {
                mediaElement.SetSource(playbackStreamWithHeader.AsRandomAccessStream(), "audio/wav");
                mediaElement.Play();
            });
        }
        /// <summary>
        /// Write TTS Audio to WAV file.
        /// </summary>
        /// <param name="audio"> TTS Audio.</param>
        /// <param name="baseFileName"> File name where this test is specified. </param>
        /// <param name="dialogID">The value of the DialogID in the input test file.</param>
        /// <param name="turnID">The value of the TurnID in the input test file.</param>
        /// <param name="indexActivityWithAudio">Index value of the current TTS response.</param>
        private int WriteAudioToWAVfile(PullAudioOutputStream audio, string baseFileName, string dialogID, int turnID, int indexActivityWithAudio)
        {
            FileStream fs = null;
            string     testFileOutputFolder = Path.Combine(this.appsettings.OutputFolder, baseFileName + "Output");
            string     wAVFolderPath        = Path.Combine(testFileOutputFolder, ProgramConstants.WAVFileFolderName);
            int        durationInMS         = 0;

            if (indexActivityWithAudio == 0)
            {
                // First TTS WAV file to be written, create the WAV File Folder
                Directory.CreateDirectory(wAVFolderPath);
            }

            this.outputWAV = Path.Combine(wAVFolderPath, baseFileName + "-BotResponse-" + dialogID + "-" + turnID + "-" + indexActivityWithAudio + ".WAV");
            byte[] buff = new byte[MaxSizeOfTtsAudioInBytes];
            uint   bytesReadtofile;

            try
            {
                fs = File.Create(this.outputWAV);
                fs.Write(new byte[WavHeaderSizeInBytes]);
                while ((bytesReadtofile = audio.Read(buff)) > 0)
                {
                    fs.Write(buff, 0, (int)bytesReadtofile);
                }

                WriteWavHeader(fs);
            }
            catch (Exception e)
            {
                Trace.TraceError(e.ToString());
            }
            finally
            {
                fs.Close();
            }

            WaveFileReader waveFileReader = new WaveFileReader(this.outputWAV);

            durationInMS = (int)waveFileReader.TotalTime.TotalMilliseconds;
            waveFileReader.Dispose();
            return(durationInMS);
        }
        private async void DialogServiceConnector_ActivityReceived(object sender, ActivityReceivedEventArgs e)
        {
            var json     = e.Activity;
            var activity = JsonConvert.DeserializeObject <Activity>(json);

            if (e.HasAudio)
            {
                UpdateUI(() =>
                {
                    this.Messages.Add(new MessageDisplay("Audio received", Sender.Other));
                });

                audioStream = e.Audio;
                frameInputNode.Start();
            }

            await UpdateActivity(json);

            UpdateUI(() =>
            {
                this.Activities.Add(new ActivityDisplay(json, activity, Sender.Bot));
            });
        }
Пример #5
0
 public WaveProvider(PullAudioOutputStream stream, WaveFormat format)
 {
     WaveFormat = format;
     _stream    = stream;
 }
Пример #6
0
 public PullStream(PullAudioOutputStream stream)
 {
     _stream = stream;
 }
        public void PlayStream(PullAudioOutputStream stream)
        {
            _streamList.Enqueue(stream);

            EnsureIsPlaying();
        }
Пример #8
0
 /// <summary>
 /// Initializes a new instance of the <see cref="DirectLineSpeechAudioOutputStream"/> class.
 /// </summary>
 /// <param name="audioSource"> The PullAudioOutputStream that should be read from. </param>
 public DirectLineSpeechAudioOutputStream(PullAudioOutputStream audioSource, DialogAudio format)
     : base(format)
 {
     this.audioSource = audioSource;
 }
Пример #9
0
 public void Dispose()
 {
     _stream?.Dispose();
     _stream = null;
 }