private void SetAudioFormat(libspotify.sp_audioformat format)
 {
     LogTo.Trace("STDP: SetAudioFormat()");
     SetupBuffer(format);
     _newFormat(_wave);
     _init();
 }
        private int Session_OnAudioDataArrived(byte[] buffer, libspotify.sp_audioformat format)
        {
            if ((!_bufferset) ||                                                                             // Buffer hasn't been setup yet
                (format.channels != _waveFormat.Channels) || (format.sample_rate != _waveFormat.SampleRate)) // New audio format
            {
                SetAudioFormat(format);
            }

            if (buffer.Length == 0)
            {
                _wave.ClearBuffer();
                return(0);
            }

            // Try to keep buffer mostly full
            //if (_wave.BufferedBytes < _wave.BufferLength - 40000) // 40000 samples = ~1s
            //    _jitter++;

            if (buffer.Length > _wave.BufferLength - _wave.BufferedBytes)
            {
                return(0);
            }

            _wave.AddSamples(buffer, 0, buffer.Length);

            return(buffer.Length);
        }
Exemple #3
0
        private static int music_delivery(IntPtr sessionPtr, IntPtr formatPtr, IntPtr framesPtr, int num_frame)
        {
            // API 11 is firing this callback several times after the track ends.  num_frame is set to 22050,
            // which seems meaninful yet is way out of normal range (usually we get a couple hundred frames or less
            // at a time).  The buffers are all zeros, this adds a ton of extra silence to the end of the track for
            // no reason.  Docs don't talk about this new behavior, maybe related to gapless playback??
            // Workaround by ignoring any data received after the end_of_track callback; this ignore is done
            // in SpotifyTrackDataDataPipe.

            if (num_frame == 0)
            {
                return(0);
            }

            libspotify.sp_audioformat format = (libspotify.sp_audioformat)Marshal.PtrToStructure(formatPtr, typeof(libspotify.sp_audioformat));
            byte[] buffer = new byte[num_frame * sizeof(Int16) * format.channels];
            Marshal.Copy(framesPtr, buffer, 0, buffer.Length);

            if (Session.OnAudioDataArrived != null)
            {
                Session.OnAudioDataArrived(format, buffer, num_frame);
            }

            return(num_frame);
        }
Exemple #4
0
        private static int music_delivery(IntPtr sessionPtr, IntPtr formatPtr, IntPtr framesPtr, int num_frame)
        {
            if (num_frame == 0)
            {
                return(0);
            }

            libspotify.sp_audioformat format = (libspotify.sp_audioformat)Marshal.PtrToStructure(formatPtr, typeof(libspotify.sp_audioformat));
            byte[] buffer = new byte[num_frame * sizeof(Int16) * format.channels];
            Marshal.Copy(framesPtr, buffer, 0, buffer.Length);

            if (Session.OnAudioDataArrived != null)
            {
                Session.OnAudioDataArrived(buffer, format, num_frame);
            }


            if (soundBuffer == null)
            {
                SetupSoundOutput(format);
            }

            soundBuffer.AddSamples(buffer, 0, buffer.Length);

            return(num_frame);
        }
        private static int MusicDeliveryCallback(IntPtr sessionPtr, IntPtr formatPtr, IntPtr framesPtr, int num_frames)
        {
            Session s = GetSession(sessionPtr);

            if (s == null)
            {
                return(0);
            }

            byte[] samplesBytes = null;
            libspotify.sp_audioformat format = (libspotify.sp_audioformat)Marshal.PtrToStructure(formatPtr, typeof(libspotify.sp_audioformat));

            if (num_frames > 0)
            {
                samplesBytes = new byte[num_frames * format.channels * 2];
                Marshal.Copy(framesPtr, samplesBytes, 0, samplesBytes.Length);
            }
            else
            {
                samplesBytes = new byte[0];
            }

            MusicDeliveryEventArgs e = new MusicDeliveryEventArgs(format.channels, format.sample_rate, samplesBytes, num_frames);

            s.OnMusicDelivery(e);

            return(e.ConsumedFrames);
        }
Exemple #6
0
        private static void SetupSoundOutput(libspotify.sp_audioformat format)
        {
            const int bitsPerSample         = 16;
            int       blockAlign            = (format.channels * (bitsPerSample / 8));
            int       averageBytesPerSecond = format.sample_rate * blockAlign;
            var       waveFormat            = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, format.sample_rate, format.channels,
                                                                            averageBytesPerSecond, blockAlign, bitsPerSample);

            soundBuffer = new BufferedWaveProvider(waveFormat);
            soundBuffer.BufferDuration = TimeSpan.FromSeconds(10);
            waveOut.Init(soundBuffer);
        }
        private static void SetupBuffer(libspotify.sp_audioformat format)
        {
            LogTo.Trace("STDP: SetupBuffer()");
            _bufferset = true;
            if ((_wave != null) && (_waveFormat != null) &&                                                  // Buffer is already setup
                (format.channels == _waveFormat.Channels) && (format.sample_rate == _waveFormat.SampleRate)) // Format is the same
            {
                LogTo.Trace("Buffer already setup for this waveformat, clearing and continuing");
                _wave.ClearBuffer();
                return;
            }

            LogTo.Trace("Buffer not setup for this waveformat");
            _waveFormat          = new WaveFormat(format.sample_rate, 16, format.channels);
            _wave                = new BufferedWaveProvider(_waveFormat);
            _wave.BufferDuration = TimeSpan.FromSeconds(10);
        }
Exemple #8
0
        static void Session_OnAudioDataArrived(libspotify.sp_audioformat fmt, byte[] obj, int num_frame)
        {
            if (audioStreamComplete)
            {
                return;
            }

            if (num_frame == 22050 || num_frame > 44100 || num_frame == 0)
            {
                //This isn't normal. It's trying to write some kind of silence at the end of the file.
                audioStreamComplete = true;
                staticfmt           = fmt;
                FinishEncode();
                return;
            }

            buf.Write(obj, 0, obj.Length);
            if (frm != null)
            {
                frm.BeginInvoke((Delegate) new MethodInvoker(() => frm.SetStatus("Fetched " + buf.Length + " bytes")));
            }
        }