public override Thread DecodingThread()
        {
            return(new Thread(() =>
            {
                while (true)
                {
                    // Dequeue decoded Frames
                    int ret = DequeueDecodedFrame(out byte[] audioSampleData);
                    if (ret == 0)
                    {
                        SampleDecoded?.Invoke(new PCMSample(audioSampleData));
                    }

                    // Enqueue encoded packet
                    AACFrame frame = null;
                    try
                    {
                        if (encodedDataQueue.Count > 0)
                        {
                            frame = encodedDataQueue.Dequeue();
                            EnqueuePacketForDecoding(frame.RawData);
                        }
                    }
                    catch (InvalidOperationException e)
                    {
                        Debug.WriteLine($"FFmpegAudio Loop: {e}");
                    }
                }
            }));
        }
Example #2
0
        public void ConsumeAudioData(Packets.AudioData data)
        {
            AACFrame frame = AudioAssembler.AssembleAudioFrame(
                data,
                AACProfile.LC,
                (int)_audioFormat.SampleRate,
                (byte)_audioFormat.Channels);

            if (_audioCodec != null)
            {
                _audioFrameQueue.Enqueue(frame);
            }
        }
        public void FeedAudioData(AACFrame data)
        {
            if (!_initialized)
            {
                Initialize();
            }


            _audioQueue.PushFront(data.RawData);

            /*int inputBufferIndex = _mediaCodec.DequeueInputBuffer(50000);
             * if (inputBufferIndex >= 0)
             * {
             *  ByteBuffer inputBuffer = _mediaCodec.GetInputBuffer(inputBufferIndex);
             *  if (inputBuffer != null)
             *  {
             *      inputBuffer.Put(data.RawData, 0, data.RawData.Length);
             *      _mediaCodec.QueueInputBuffer(inputBufferIndex, 0, data.RawData.Length, 0, MediaCodecBufferFlags.None);
             *  }
             * }*/
        }
Example #4
0
        /* Called by NanoClient on freshly received data */
        public void ConsumeAudioData(object sender, AudioDataEventArgs args)
        {
            // TODO: Sorting
            AACFrame frame = _audioAssembler.AssembleAudioFrame(
                data: args.AudioData,
                profile: AACProfile.LC,
                samplingFreq: (int)_audioFormat.SampleRate,
                channels: (byte)_audioFormat.Channels);

            if (!_audioContextInitialized)
            {
                _audioHandler.UpdateCodecParameters(frame.GetCodecSpecificData());
                _audioContextInitialized = true;
            }

            if (frame == null)
            {
                return;
            }

            // Enqueue encoded audio data in decoder
            _audioHandler.PushData(frame);
        }
 public void PushData(AACFrame data) => encodedDataQueue.Enqueue(data);