Esempio n. 1
0
        public void encode(byte[] data, int offset, int size)
        {
            if (!running)
            {
                return;
            }
            if (size == 0)
            {
                return;
            }
            int buffer_index = -1;

            lock (availableBuffers)
            {
                if (availableBuffers.Count > 0)
                {
                    buffer_index = availableBuffers[0];
                    availableBuffers.RemoveAt(0);
                }
            }

            if (buffer_index > -1)
            {
                var ib = audioEncoder.GetInputBuffer(buffer_index);
                ib.Clear();

                ib.Put(data);

                audioEncoder.QueueInputBuffer(buffer_index, offset, size, 0, 0);
            }
            return;
        }
Esempio n. 2
0
        /// <summary>
        /// Called when MediaCodec wants a new frame to decode
        /// </summary>
        /// <param name="codec">Codec.</param>
        /// <param name="index">Index.</param>
        public override void OnInputBufferAvailable(MediaCodec codec, int index)
        {
            if (_videoQueue.Size < 1)
            {
                // FIXME: Is it proper to enqueue an empty
                // buffer like this?
                codec.QueueInputBuffer(index, 0, 0, 0, MediaCodecBufferFlags.None);
                return;
            }

            var data = _videoQueue.Back();

            _videoQueue.PopBack();
            if (data != null)
            {
                // Get pre-allocated buffer from MediaCodec
                Java.Nio.ByteBuffer buffer = codec.GetInputBuffer(index);

                // Stuff in our raw framedata
                buffer.Put(data);

                // Tell the decoder to process the frame
                codec.QueueInputBuffer(index, 0, data.Length, 0, MediaCodecBufferFlags.None);
            }
        }
Esempio n. 3
0
    public void encodeAndSend(int num_bytes)
    {
        if (num_bytes > 0)
        {
            int buffer_index = -1;

            lock (availableBuffers)
            {
                if (availableBuffers.Count > 0)
                {
                    buffer_index = availableBuffers[0];
                    availableBuffers.RemoveAt(0);
                }
            }

            if (buffer_index > -1)
            {
                var ib = audioEncoder.GetInputBuffer(buffer_index);
                ib.Clear();

                ib.Put(buffer);

                audioEncoder.QueueInputBuffer(buffer_index, 0, num_bytes, 0, 0);
            }
        }

        byte[] data_to_send = null;
        lock (outputBuffers)
        {
            int total_size = 0;
            foreach (var buf in outputBuffers)
            {
                total_size += buf.Length;
            }

            if (total_size >= 400)
            {
                data_to_send = new byte[total_size];
                int data_written = 0;
                foreach (var buf in outputBuffers)
                {
                    Array.Copy(buf, 0, data_to_send, data_written, buf.Length);
                    data_written += buf.Length;
                }
                outputBuffers.Clear();
            }
        }
        if (data_to_send != null)
        {
            OnSoundDataReceived(data_to_send);
        }
    }
        public override void OnInputBufferAvailable(MediaCodec codec, int index)
        {
            H264Frame frame;
            bool      success = _videoFrameQueue.TryDequeue(out frame);

            if (!success)
            {
                codec.QueueInputBuffer(index, 0, 0, 0, 0);
                return;
            }

            Java.Nio.ByteBuffer buffer = codec.GetInputBuffer(index);
            buffer.Put(frame.RawData);

            // tell the decoder to process the frame
            codec.QueueInputBuffer(index, 0, frame.RawData.Length, 0, 0);
        }
Esempio n. 5
0
    private void decode(int buffer_index, byte[] data)
    {
        if (!running)
        {
            return;
        }
        try
        {
            var ib = audioDecoder.GetInputBuffer(buffer_index);
            ib.Clear();

            ib.Put(data);

            audioDecoder.QueueInputBuffer(buffer_index, 0, data.Length, 0, 0);
        }
        catch (Exception e)
        {
            Logging.error("Exception occured in audio decoder: " + e);
        }
    }
        private Thread GetDecoderThread()
        {
            var decoderThread = new Thread(() =>
            {
                try
                {
                    while (!_disposed)
                    {
                        int inputBufferIndex = _mediaCodec.DequeueInputBuffer(50000);
                        if (inputBufferIndex >= 0)
                        {
                            ByteBuffer inputBuffer = _mediaCodec.GetInputBuffer(inputBufferIndex);
                            if (inputBuffer != null)
                            {
                                byte[] sample = null;
                                do
                                {
                                    sample = _audioQueue.Size < 1 ? null : _audioQueue.Back();
                                } while (sample == null && !_disposed);
                                _audioQueue.PopBack();

                                if (sample != null)
                                {
                                    inputBuffer.Put(sample, 0, sample.Length);
                                    _mediaCodec.QueueInputBuffer(inputBufferIndex, 0, sample.Length, 0, MediaCodecBufferFlags.None);
                                }
                            }
                        }
                    }
                }
                catch (ThreadInterruptedException)
                {
                    // Ignore Thread got interrupted from outside
                }
            });

            decoderThread.Daemon   = true;
            decoderThread.Priority = Thread.MaxPriority;
            return(decoderThread);
        }
Esempio n. 7
0
        private void OnInputBufferAvailableInner(MediaCodec mc, int inputBufferId)
        {
            byte[] encoded_data = null;
            while (((encoded_data = mCallbackObj.getEncodedFrameData()) == null) && isClosed == false)
            {
                Thread.Sleep(500);
            }
            if (isClosed)
            {
                return;
            }
            Console.WriteLine("OnInputBufferAvailable: got encoded data!");

            if (encoded_data != null)
            {
                int sampleSize = encoded_data.Length;
                if (sampleSize > 0)
                {
                    ByteBuffer inputBuffer = mDecoder.GetInputBuffer(inputBufferId);
                    inputBuffer.Put(encoded_data);

                    if (frameCounter == 0)
                    {
                        Console.WriteLine("feed a frame contains SSP and PSP");
                        mDecoder.QueueInputBuffer(inputBufferId, 0, sampleSize, 0, MediaCodec.BufferFlagCodecConfig);
                    }
                    else
                    {
                        Console.WriteLine("QueueInputBuffer inputIndex=" + inputBufferId.ToString());
                        mDecoder.QueueInputBuffer(inputBufferId, 0, sampleSize, frameCounter * 1000 /* 1FPS */, 0);
                    }
                }
                else
                {
                    Console.WriteLine("QueueInputBuffer set MediaCodec.BufferFlagEndOfStream");
                    mDecoder.QueueInputBuffer(inputBufferId, 0, 0, 0, MediaCodec.BufferFlagEndOfStream);
                }
                frameCounter++;
            }
        }
        public async void rtspClientStart()
        {
            rtspCancel = new CancellationTokenSource();

            var url = "rtsp://192.168.0.10:8554/H264Video";

            String       now    = DateTime.Now.ToString("yyyyMMdd_HHmmss");
            MemoryStream fs_vps = null; // used to write the video
            MemoryStream fs_v   = null; // used to write the video
            MemoryStream fs_a   = null; // used to write the audio

            h264 = false;
            h265 = false;
            bool spsdone = false;

            RTSPClient c = new RTSPClient();

            // The SPS/PPS comes from the SDP data
            // or it is the first SPS/PPS from the H264 video stream
            c.Received_SPS_PPS += (byte[] sps, byte[] pps) => {
                h264 = true;
                if (fs_vps == null)
                {
                    String filename = "rtsp_capture_" + now + ".264";
                    fs_vps = new MemoryStream();
                }

                if (fs_vps != null)
                {
                    fs_vps.SetLength(0);
                    fs_vps.Write(new byte[] { 0x00, 0x00, 0x00, 0x01 }, 0, 4);  // Write Start Code
                    fs_vps.Write(sps, 0, sps.Length);
                    fs_vps.Write(new byte[] { 0x00, 0x00, 0x00, 0x01 }, 0, 4);  // Write Start Code
                    fs_vps.Write(pps, 0, pps.Length);
                }
            };

            c.Received_VPS_SPS_PPS += (byte[] vps, byte[] sps, byte[] pps) => {
                h265 = true;
                if (fs_vps == null)
                {
                    String filename = "rtsp_capture_" + now + ".265";
                    fs_vps = new MemoryStream();
                }

                if (fs_vps != null)
                {
                    fs_vps.SetLength(0);
                    fs_vps.Write(new byte[] { 0x00, 0x00, 0x00, 0x01 }, 0, 4); // Write Start Code
                    fs_vps.Write(vps, 0, vps.Length);                          // Video parameter set
                    fs_vps.Write(new byte[] { 0x00, 0x00, 0x00, 0x01 }, 0, 4); // Write Start Code
                    fs_vps.Write(sps, 0, sps.Length);                          // Sequence Parameter Set
                    fs_vps.Write(new byte[] { 0x00, 0x00, 0x00, 0x01 }, 0, 4); // Write Start Code
                    fs_vps.Write(pps, 0, pps.Length);                          // Picture Parameter Set
                }
            };

            // Video NALs. May also include the SPS and PPS in-band for H264
            c.Received_NALs += (List <byte[]> nal_units) =>
            {
                foreach (byte[] nal_unit in nal_units)
                {
                    // Output some H264 stream information
                    if (h264 && nal_unit.Length > 0)
                    {
                        int    nal_ref_idc   = (nal_unit[0] >> 5) & 0x03;
                        int    nal_unit_type = nal_unit[0] & 0x1F;
                        String description   = "";
                        if (nal_unit_type == 1)
                        {
                            description = "NON IDR NAL";
                        }
                        else if (nal_unit_type == 5)
                        {
                            description = "IDR NAL";
                        }
                        else if (nal_unit_type == 6)
                        {
                            description = "SEI NAL";
                        }
                        else if (nal_unit_type == 7)
                        {
                            description = "SPS NAL";
                        }
                        else if (nal_unit_type == 8)
                        {
                            description = "PPS NAL";
                        }
                        else if (nal_unit_type == 9)
                        {
                            description = "ACCESS UNIT DELIMITER NAL";
                        }
                        else
                        {
                            description = "OTHER NAL";
                        }
                        //Console.WriteLine("NAL Ref = " + nal_ref_idc + " NAL Type = " + nal_unit_type + " " + description);
                    }

                    // Output some H265 stream information
                    if (h265 && nal_unit.Length > 0)
                    {
                        int    nal_unit_type = (nal_unit[0] >> 1) & 0x3F;
                        String description   = "";
                        if (nal_unit_type == 1)
                        {
                            description = "NON IDR NAL";
                        }
                        else if (nal_unit_type == 19)
                        {
                            description = "IDR NAL";
                        }
                        else if (nal_unit_type == 32)
                        {
                            description = "VPS NAL";
                        }
                        else if (nal_unit_type == 33)
                        {
                            description = "SPS NAL";
                        }
                        else if (nal_unit_type == 34)
                        {
                            description = "PPS NAL";
                        }
                        else if (nal_unit_type == 39)
                        {
                            description = "SEI NAL";
                        }
                        else
                        {
                            description = "OTHER NAL";
                        }
                        //Console.WriteLine("NAL Type = " + nal_unit_type + " " + description);
                    }

                    // we need sps... first
                    if (!h264 && !h265)
                    {
                        return;
                    }

                    if (!spsdone)
                    {
                        if (callbacks == null || callbacks.buffers.Count == 0)
                        {
                            return;
                        }
                        var index  = callbacks.buffers.Pop();
                        var buffer = codec.GetInputBuffer(index);
                        buffer.Clear();
                        buffer.Put(fs_vps.ToArray());
                        codec.QueueInputBuffer(index, 0, (int)fs_vps.Length, 0, MediaCodecBufferFlags.CodecConfig);
                        spsdone = true;

                        fs_v = new MemoryStream();
                    }

                    if (fs_v != null)
                    {
                        fs_v.Write(new byte[] { 0x00, 0x00, 0x00, 0x01 }, 0, 4); // Write Start Code
                        fs_v.Write(nal_unit, 0, nal_unit.Length);                // Write NAL
                    }

                    if (callbacks == null || fs_v == null || callbacks.buffers.Count == 0)
                    {
                        return;
                    }
                    try
                    {
                        var index  = callbacks.buffers.Pop();
                        var buffer = codec.GetInputBuffer(index);
                        buffer.Clear();
                        buffer.Put(fs_v.ToArray());
                        codec.QueueInputBuffer(index, 0, (int)fs_v.Length, 0, MediaCodecBufferFlags.None);
                        fs_v.SetLength(0);
                    }
                    catch
                    {
                    }
                }
            };

            // seperate and stay running
            Task.Run(() =>
            {
                while (rtspCancel != null && true)
                {
                    try
                    {
                        if (rtspCancel.Token.IsCancellationRequested)
                        {
                            return;
                        }

                        c.Connect(url, RTSPClient.RTP_TRANSPORT.UDP);
                        var lastrtp = 0;
                        int cnt     = 0;
                        while (!c.StreamingFinished())
                        {
                            rtsprunning = true;
                            Thread.Sleep(500);
                            // existing
                            if (rtspCancel.Token.IsCancellationRequested)
                            {
                                c.Stop();
                                return;
                            }

                            // no rtp in .5 sec
                            if (lastrtp == c.rtp_count && cnt++ > 5)
                            {
                                c.Stop();
                                rtspCancel = null;
                                return;
                            }

                            lastrtp = c.rtp_count;
                        }

                        rtsprunning = false;
                    }
                    catch
                    {
                    }
                }
            });
        }
Esempio n. 9
0
        private void ExtractMedia()
        {
            if (MediaDecoder == null)
            {
                throw new InvalidOperationException("The Media Codec Extractor has not been initialized");
            }

            if (!isInitialized)
            {
                throw new InvalidOperationException("The Media Codec has not been initialized for a media");
            }

            var bufferInfo      = new MediaCodec.BufferInfo();
            var waitDefaultTime = TimeSpan.FromMilliseconds(10);

            MediaDecoder.Start();
            while (true)
            {
                var waitTime = waitDefaultTime; // time to wait at the end of the loop iteration

                //Process the commands
                if (ProcessCommandsAndUpdateCurrentState())
                {
                    waitTime = TimeSpan.Zero;
                }

                // terminate the thread on disposal
                if (currentState == SchedulerAsyncCommandEnum.Dispose)
                {
                    return;
                }

                //=================================================================================================
                //Extract video inputs
                if (!inputExtractionDone)
                {
                    int inputBufIndex = MediaDecoder.DequeueInputBuffer(0);
                    if (inputBufIndex >= 0)
                    {
                        waitTime = TimeSpan.Zero;
                        var inputBuffer = MediaDecoder.GetInputBuffer(inputBufIndex);

                        // Read the sample data into the ByteBuffer.  This neither respects nor updates inputBuf's position, limit, etc.
                        int chunkSize = mediaExtractor.ReadSampleData(inputBuffer, 0);
                        if (chunkSize > 0)
                        {
                            if (mediaExtractor.SampleTrackIndex != mediaTrackIndex)
                            {
                                throw new Exception($"Got media sample from track {mediaExtractor.SampleTrackIndex}, track expected {mediaTrackIndex}");
                            }

                            MediaDecoder.QueueInputBuffer(inputBufIndex, 0, chunkSize, mediaExtractor.SampleTime, 0);
                            mediaExtractor.Advance();
                        }
                        else // End of stream -- send empty frame with EOS flag set.
                        {
                            MediaDecoder.QueueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodecBufferFlags.EndOfStream);
                            inputExtractionDone = true;
                        }
                    }
                    else
                    {
                        //do nothing: the input buffer queue is full (we need to output them first)
                    }
                }

                //=================================================================================================
                // Process the output buffers
                if (ShouldProcessDequeueOutput(ref waitTime))
                {
                    int indexOutput = MediaDecoder.DequeueOutputBuffer(bufferInfo, 0);
                    switch (indexOutput)
                    {
                    case (int)MediaCodecInfoState.TryAgainLater:        // decoder not ready yet (haven't processed input yet)
                    case (int)MediaCodecInfoState.OutputBuffersChanged: //deprecated: we just ignore it
                        break;

                    case (int)MediaCodecInfoState.OutputFormatChanged:
                        Logger.Verbose("decoder output format changed: " + MediaDecoder.OutputFormat.ToString());
                        break;

                    default:     // the index of the output buffer

                        if (indexOutput < 0)
                        {
                            Logger.Warning("unexpected index from decoder.dequeueOutputBuffer: " + indexOutput);
                            isEOF = true;
                            break;
                        }

                        if ((bufferInfo.Flags & MediaCodecBufferFlags.EndOfStream) != 0)
                        {
                            isEOF = true;
                            MediaDecoder.ReleaseOutputBuffer(indexOutput, false);
                            break;
                        }

                        MediaCurrentTime = TimeSpanExtensions.FromMicroSeconds(bufferInfo.PresentationTimeUs);

                        ProcessOutputBuffer(bufferInfo, indexOutput);

                        break;
                    }
                }

                if (waitTime > TimeSpan.Zero)
                {
                    // sleep required time to avoid active looping
                    // Note: do not sleep more than 'waitDefaultTime' to continue processing play commands
                    Utilities.Sleep(TimeSpanExtensions.Min(waitDefaultTime, waitTime));
                }
            }
        }
        private bool ExtractSomeAudioData(out bool endOfFile)
        {
            endOfFile = extractionOutputDone;
            if (endOfFile)
            {
                return(false);
            }

            var hasExtractedData = false;

            int TimeoutUs = 20000;

            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();

            if (!extractionInputDone)
            {
                int inputBufIndex = audioMediaDecoder.DequeueInputBuffer(TimeoutUs);
                if (inputBufIndex >= 0)
                {
                    Java.Nio.ByteBuffer inputBuffer = audioMediaDecoder.GetInputBuffer(inputBufIndex);

                    //Read the sample data into the ByteBuffer.  This neither respects nor updates inputBuf's position, limit, etc.
                    int chunkSize = audioMediaExtractor.ReadSampleData(inputBuffer, 0);
                    if (chunkSize < 0)
                    {
                        //End of stream: send empty frame with EOS flag set
                        audioMediaDecoder.QueueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodecBufferFlags.EndOfStream);
                        extractionInputDone = true;
                        //Logger.Verbose("sent input EOS");
                    }
                    else
                    {
                        if (audioMediaExtractor.SampleTrackIndex != trackIndexAudio)
                        {
                            Logger.Warning(string.Format("got audio sample from track {0}, expected {1}", audioMediaExtractor.SampleTrackIndex, trackIndexAudio));
                        }

                        var presentationTimeMicroSeconds = audioMediaExtractor.SampleTime;
                        audioMediaDecoder.QueueInputBuffer(inputBufIndex, 0, chunkSize, presentationTimeMicroSeconds, 0);

                        audioMediaExtractor.Advance();
                    }
                }
                else
                {
                    //do nothing: the input buffer queue is full (we need to output them first)
                    //continue;
                }
            }

            int decoderStatus = audioMediaDecoder.DequeueOutputBuffer(info, TimeoutUs);

            switch (decoderStatus)
            {
            case (int)MediaCodecInfoState.TryAgainLater:
            {
                Logger.Verbose("no output from decoder available");
                break;
            }

            case (int)MediaCodecInfoState.OutputFormatChanged:
            {
                MediaFormat newFormat    = audioMediaDecoder.OutputFormat;
                string      newFormatStr = newFormat.ToString();
                Logger.Verbose("audio decoder output format changed: " + newFormatStr);
                break;
            }

            case (int)MediaCodecInfoState.OutputBuffersChanged:
            {
                //deprecated: we just ignore it
                break;
            }

            default:
            {
                if (decoderStatus < 0)
                {
                    throw new InvalidOperationException(string.Format("unexpected result from audio decoder.DequeueOutputBuffer: {0}", decoderStatus));
                }

                if ((info.Flags & MediaCodecBufferFlags.EndOfStream) != 0)
                {
                    Logger.Verbose("audio: output EOS");
                    extractionOutputDone = true;
                }

                if (info.Size > 0)
                {
                    hasExtractedData = true;
                    var buffer           = audioMediaDecoder.GetOutputBuffer(decoderStatus);
                    var presentationTime = TimeSpanExtensions.FromMicroSeconds(info.PresentationTimeUs);

                    if (StorageBuffer.CountDataBytes + info.Size <= StorageBuffer.Data.Length)
                    {
                        buffer.Get(StorageBuffer.Data, StorageBuffer.CountDataBytes, info.Size); // Read the buffer all at once
                        buffer.Clear();                                                          // MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN
                        buffer.Position(0);

                        if (StorageBuffer.CountDataBytes == 0)
                        {
                            StorageBuffer.PresentationTime = presentationTime;
                        }

                        StorageBuffer.CountDataBytes += info.Size;
                    }
                    else
                    {
                        Logger.Error("The storage buffer has reached full capacity. Current data will be dropped");
                    }
                }

                audioMediaDecoder.ReleaseOutputBuffer(decoderStatus, false);
                break;
            }
            }

            endOfFile = extractionOutputDone;
            return(hasExtractedData);
        }
Esempio n. 11
0
        private void RtspClient_FrameReceived(object sender, RtspClientSharp.RawFrames.RawFrame e)
        {
            if (rtspCancel.Token.IsCancellationRequested)
            {
                return;
            }
            //Console.WriteLine("Got Frame " + e.ToString());

            switch (e)
            {
            case RawH264IFrame h264IFrame:
            {
                if (callbacks.buffers.Count == 0)
                {
                    return;
                }
                var index  = callbacks.buffers.Pop();
                var buffer = codec.GetInputBuffer(index);
                buffer.Clear();
                buffer.Put(h264IFrame.SpsPpsSegment.Array);
                codec.QueueInputBuffer(index, 0, h264IFrame.SpsPpsSegment.Count, 0, MediaCodecBufferFlags.CodecConfig);

                if (callbacks.buffers.Count == 0)
                {
                    return;
                }
                index  = callbacks.buffers.Pop();
                buffer = codec.GetInputBuffer(index);
                buffer.Clear();
                buffer.Put(h264IFrame.FrameSegment.Array);
                codec.QueueInputBuffer(index, 0, h264IFrame.FrameSegment.Count, 0, MediaCodecBufferFlags.None);

                iframestart = false;
                break;
            }

            case RawH264PFrame h264PFrame:
            {
                if (iframestart)
                {
                    return;
                }
                if (callbacks.buffers.Count == 0)
                {
                    return;
                }
                var index  = callbacks.buffers.Pop();
                var buffer = codec.GetInputBuffer(index);
                buffer.Clear();
                buffer.Put(h264PFrame.FrameSegment.Array);
                codec.QueueInputBuffer(index, 0, h264PFrame.FrameSegment.Count, 0, 0);
                break;
            }

            case RawJpegFrame jpegFrame:
            case RawAACFrame aacFrame:
            case RawG711AFrame g711AFrame:
            case RawG711UFrame g711UFrame:
            case RawPCMFrame pcmFrame:
            case RawG726Frame g726Frame:
                break;
            }
        }