internal static bool Skip(SpeechSession session, int desyncMilliseconds, out int deltaSamples, out int deltaDesyncMilliseconds)
        {
            //We're too far behind where we ought to be to resync with speed adjustment. Skip ahead to where we should be.
            if (desyncMilliseconds > ResetDesync.TotalMilliseconds)
            {
                Log.Warn("Playback desync ({0}ms) beyond recoverable threshold; resetting stream to current time", desyncMilliseconds);

                deltaSamples            = desyncMilliseconds * session.OutputWaveFormat.SampleRate / 1000;
                deltaDesyncMilliseconds = -desyncMilliseconds;

                // skip through the session the required number of samples
                // we allocate here, but we are already in an error case rather than normal operation
                return(session.Read(new ArraySegment <float>(new float[deltaSamples])));
            }

            //We're too far ahead of where we ought to be to resync with speed adjustment. Insert silent frames to resync
            if (desyncMilliseconds < -ResetDesync.TotalMilliseconds)
            {
                Log.Error("Playback desync ({0}ms) AHEAD beyond recoverable threshold", desyncMilliseconds);
            }

            deltaSamples            = 0;
            deltaDesyncMilliseconds = 0;
            return(false);
        }
Exemple #2
0
        public void Play(SpeechSession session)
        {
            if (Session != null)
            {
                throw Log.CreatePossibleBugException("Attempted to play a session when one is already playing", "C4F19272-994D-4025-AAEF-37BB62685C2E");
            }

            Log.Debug("Began playback of speech session. id={0}", session.Context.Id);

            if (DebugSettings.Instance.EnablePlaybackDiagnostics && DebugSettings.Instance.RecordFinalAudio)
            {
                var filename = string.Format("Dissonance_Diagnostics/Output_{0}_{1}_{2}", session.Context.PlayerName, session.Context.Id, DateTime.UtcNow.ToFileTime());
                Interlocked.Exchange(ref _diagnosticOutput, new AudioFileWriter(filename, session.OutputWaveFormat));
            }

            _sessionLock.EnterWriteLock();
            try
            {
                ApplyReset();
                Session = session;
            }
            finally
            {
                _sessionLock.ExitWriteLock();
            }
        }
        internal static bool Filter(SpeechSession session, [NotNull] float[] output, int channels, [NotNull] float[] temp, [CanBeNull] AudioFileWriter diagnosticOutput, out float arv)
        {
            //Read out data from source (exactly as much as we need for one channel)
            var samplesRequired = output.Length / channels;
            var complete        = session.Read(new ArraySegment <float>(temp, 0, samplesRequired));

            //Write the audio we're about to play to the diagnostics writer (on disk)
            if (diagnosticOutput != null)
            {
                diagnosticOutput.WriteSamples(new ArraySegment <float>(temp, 0, samplesRequired));
            }

            //Step through samples, stretching them (i.e. play mono input in all output channels)
            float accumulator = 0;
            var   sampleIndex = 0;

            for (var i = 0; i < output.Length; i += channels)
            {
                //Get a single sample from the source data
                var sample = temp[sampleIndex++];

                //Accumulate the sum of the audio signal
                accumulator += Mathf.Abs(sample);

                //Copy data into all channels
                for (var c = 0; c < channels; c++)
                {
                    output[i + c] *= sample;
                }
            }

            arv = accumulator / output.Length;

            return(complete);
        }
        /// <summary>
        ///     Starts a new speech session and adds it to the queue for playback
        /// </summary>
        /// <param name="format">The frame format.</param>
        /// <param name="now">Current time, or null for DateTime.UtcNow</param>
        /// <param name="jitter">Jitter estimator, or null for this stream to estimate it's own jitter</param>
        public void StartSession(FrameFormat format, DateTime?now = null, [CanBeNull] IJitterEstimator jitter = null)
        {
            if (PlayerName == null)
            {
                throw Log.CreatePossibleBugException("Attempted to `StartSession` but `PlayerName` is null", "0C0F3731-8D6B-43F6-87C1-33CEC7A26804");
            }

            _active = GetOrCreateDecoderPipeline(format, _volumeProvider);

            var session = SpeechSession.Create(new SessionContext(PlayerName, unchecked (_currentId++)), jitter ?? this, _active, _active, now ?? DateTime.UtcNow);

            _awaitingActivation.Enqueue(session);

            Log.Debug("Created new speech session with buffer time of {0}ms", session.Delay.TotalMilliseconds);
        }
Exemple #5
0
        internal static bool Filter(SpeechSession session, float[] data, int channels, float[] temp, [CanBeNull] AudioFileWriter diagnosticOutput, out float arv, out int samplesRead, bool multiply)
        {
            //Read out data from source (exactly as much as we need for one channel)
            var samplesRequired = data.Length / channels;
            var complete        = session.Read(new ArraySegment <float>(temp, 0, samplesRequired));

            if (diagnosticOutput != null)
            {
                diagnosticOutput.WriteSamples(new ArraySegment <float>(temp, 0, samplesRequired));
            }

            float accumulator = 0;

            //Step through samples, stretching them (i.e. play mono input in all output channels)
            var sampleIndex = 0;

            for (var i = 0; i < data.Length; i += channels)
            {
                //Get a single sample from the source data
                var sample = temp[sampleIndex++];

                //Accumulate the sum of the audio signal
                accumulator += Mathf.Abs(sample);

                //Copy data into all channels
                for (var c = 0; c < channels; c++)
                {
                    if (multiply)
                    {
                        data[i + c] *= sample;
                    }
                    else
                    {
                        data[i + c] = sample;
                    }
                }
            }

            arv         = accumulator / data.Length;
            samplesRead = samplesRequired;

            return(complete);
        }
Exemple #6
0
        internal static bool Skip(SpeechSession session, int desyncMilliseconds, out int deltaSamples, out int deltaDesyncMilliseconds)
        {
            //If we're really far out of sync just skip forward the playback
            if (desyncMilliseconds > ResetDesync.TotalMilliseconds)
            {
                Log.Warn("Playback desync ({0}ms) beyond recoverable threshold; resetting stream to current time", desyncMilliseconds);

                deltaSamples            = desyncMilliseconds * session.OutputWaveFormat.SampleRate / 1000;
                deltaDesyncMilliseconds = -desyncMilliseconds;

                // skip through the session the required number of samples
                // we allocate here, but we are already in an error case rather than normal operation
                return(session.Read(new ArraySegment <float>(new float[deltaSamples])));
            }

            deltaSamples            = 0;
            deltaDesyncMilliseconds = 0;
            return(false);
        }
Exemple #7
0
        internal static bool Skip(SpeechSession session, int desyncMilliseconds, out int deltaSamples, out int deltaDesyncMilliseconds)
        {
            //We're too far behind where we ought to be to resync with speed adjustment. Skip ahead to where we should be.
            if (desyncMilliseconds > ResetDesync.TotalMilliseconds)
            {
                Log.Warn("Playback desync ({0}ms) beyond recoverable threshold; resetting stream to current time", desyncMilliseconds);

                deltaSamples            = desyncMilliseconds * session.OutputWaveFormat.SampleRate / 1000;
                deltaDesyncMilliseconds = -desyncMilliseconds;

                //Read out a load of data and discard it, forcing ourselves back into sync
                //If reading completes the session exit out.
                var toRead = deltaSamples;
                while (toRead > 0)
                {
                    var count = Math.Min(toRead, DesyncFixBuffer.Length);
                    if (session.Read(new ArraySegment <float>(DesyncFixBuffer, 0, count)))
                    {
                        return(true);
                    }
                    toRead -= count;
                }

                //We completed all the reads so obviously none of the reads finished the session
                return(false);
            }

            //We're too far ahead of where we ought to be to resync with speed adjustment. Insert silent frames to resync
            if (desyncMilliseconds < -ResetDesync.TotalMilliseconds)
            {
                Log.Error("Playback desync ({0}ms) AHEAD beyond recoverable threshold", desyncMilliseconds);
            }

            deltaSamples            = 0;
            deltaDesyncMilliseconds = 0;
            return(false);
        }