Exemplo n.º 1
0
        public static int ProcessAudioFrame(Decoder decoder, MediaFrame mFrame, AVFrame *frame)
        {
            /* References
             *
             * https://www.programmersought.com/article/70255648018/
             * https://ffmpeg.org/doxygen/2.4/resampling__audio_8c_source.html
             *
             * Notes
             *
             * Currently output sample rate = input sample rate which means in case we change that we should review that actually works properly
             * If frame->nb_samples are known initially or by decoding on frame we can avoid av_samples_alloc_array_and_samples here and transfer it to SetupAudio
             */

            int ret = 0;

            try
            {
                int dst_nb_samples;

                if (decoder.m_max_dst_nb_samples == -1)
                {
                    if (decoder.m_dst_data != null && (IntPtr)(*decoder.m_dst_data) != IntPtr.Zero)
                    {
                        av_freep(&decoder.m_dst_data[0]); decoder.m_dst_data = null;
                    }

                    decoder.m_max_dst_nb_samples = (int)av_rescale_rnd(frame->nb_samples, decoder.opt.audio.SampleRate, decoder.codecCtx->sample_rate, AVRounding.AV_ROUND_UP);

                    fixed(byte ***dst_data = &decoder.m_dst_data)
                    fixed(int *dst_linesize = &decoder.m_dst_linesize)
                    ret = av_samples_alloc_array_and_samples(dst_data, dst_linesize, decoder.opt.audio.Channels, decoder.m_max_dst_nb_samples, decoder.opt.audio.SampleFormat, 0);
                }

                fixed(int *dst_linesize = &decoder.m_dst_linesize)
                {
                    dst_nb_samples = (int)av_rescale_rnd(swr_get_delay(decoder.swrCtx, decoder.codecCtx->sample_rate) + frame->nb_samples, decoder.opt.audio.SampleRate, decoder.codecCtx->sample_rate, AVRounding.AV_ROUND_UP);

                    if (dst_nb_samples > decoder.m_max_dst_nb_samples)
                    {
                        av_freep(&decoder.m_dst_data[0]);
                        ret = av_samples_alloc(decoder.m_dst_data, dst_linesize, decoder.opt.audio.Channels, (int)dst_nb_samples, decoder.opt.audio.SampleFormat, 0);
                    }

                    ret = swr_convert(decoder.swrCtx, decoder.m_dst_data, dst_nb_samples, (byte **)&frame->data, frame->nb_samples);
                    if (ret < 0)
                    {
                        return(ret);
                    }

                    int dst_data_len = av_samples_get_buffer_size(dst_linesize, decoder.opt.audio.Channels, ret, decoder.opt.audio.SampleFormat, 1);

                    mFrame.audioData = new byte[dst_data_len]; Marshal.Copy((IntPtr)(*decoder.m_dst_data), mFrame.audioData, 0, mFrame.audioData.Length);    //Marshal.FreeHGlobal((IntPtr)(*m_dst_data));
                }
            } catch (Exception e) { ret = -1; Log("Error[" + (ret).ToString("D4") + "], Func: ProcessAudioFrame(), Msg: " + e.Message + " - " + e.StackTrace); }

            return(ret);
        }
Exemplo n.º 2
0
        public void ReSync(long ms = -1)
        {
            if (status == Status.NOTSET)
            {
                return;
            }

            Pause();
            decoder.Flush();

            if (ms == -1)
            {
                MediaFrame vFrame = null;
                do
                {
                    decCtx.vDecoder.frames.TryPeek(out vFrame);
                    if (vFrame != null)
                    {
                        break;
                    }
                    else
                    {
                        Thread.Sleep(5);
                    }
                } while (vFrame == null || forcePause);

                if (forcePause)
                {
                    return;
                }
                Seek(vFrame.timestamp / 10000);
            }
            else
            {
                Seek(ms);
            }

            if (decCtx.status == Status.PLAY)
            {
                decoder.decodeARE.Set();
            }
        }
Exemplo n.º 3
0
        public static int ProcessSubsFrame(Decoder decoder, MediaFrame mFrame, AVSubtitle *sub)
        {
            int ret = 0;

            try
            {
                string           line = "";
                byte[]           buffer;
                AVSubtitleRect **rects = sub->rects;
                AVSubtitleRect * cur   = rects[0];

                switch (cur->type)
                {
                case AVSubtitleType.SUBTITLE_ASS:
                    buffer = new byte[1024];
                    line   = Utils.BytePtrToStringUTF8(cur->ass);
                    break;

                case AVSubtitleType.SUBTITLE_TEXT:
                    buffer = new byte[1024];
                    line   = Utils.BytePtrToStringUTF8(cur->ass);

                    break;

                case AVSubtitleType.SUBTITLE_BITMAP:
                    Log("Subtitles BITMAP -> Not Implemented yet");

                    return(-1);
                }

                mFrame.text      = Subtitles.SSAtoSubStyles(line, out List <OSDMessage.SubStyle> subStyles);
                mFrame.subStyles = subStyles;
                mFrame.duration  = (int)(sub->end_display_time - sub->start_display_time);

                //Log("SUBS ......... " + Utils.TicksToTime(mFrame.timestamp));
            } catch (Exception e) { ret = -1; Log("Error[" + (ret).ToString("D4") + "], Func: ProcessSubsFrame(), Msg: " + e.Message + " - " + e.StackTrace); }

            return(ret);
        }
Exemplo n.º 4
0
        public long GetVideoFrame()
        {
            int  ret;
            long firstTs = -1;

            while (interrupt != 1)
            {
                AVPacket *pkt = av_packet_alloc();
                ret = av_read_frame(demuxer.fmtCtx, pkt);
                if (ret != 0)
                {
                    return(-1);
                }

                if (!demuxer.enabledStreams.Contains(pkt->stream_index))
                {
                    av_packet_free(&pkt);
                    continue;
                }

                switch (demuxer.fmtCtx->streams[pkt->stream_index]->codecpar->codec_type)
                {
                case AVMEDIA_TYPE_AUDIO:
                    aDecoder.packets.Enqueue((IntPtr)pkt);

                    break;

                case AVMEDIA_TYPE_VIDEO:
                    lock (device)
                        ret = avcodec_send_packet(vDecoder.codecCtx, pkt);
                    av_packet_free(&pkt);

                    if (ret != 0)
                    {
                        return(-1);
                    }

                    while (interrupt != 1)
                    {
                        AVFrame *frame = av_frame_alloc();
                        lock (device)
                            ret = avcodec_receive_frame(vDecoder.codecCtx, frame);

                        if (ret == 0)
                        {
                            MediaFrame mFrame = new MediaFrame();
                            mFrame.pts       = frame->best_effort_timestamp == AV_NOPTS_VALUE ? frame->pts : frame->best_effort_timestamp;
                            mFrame.timestamp = ((long)(mFrame.pts * vDecoder.info.Timebase) - demuxer.streams[vDecoder.st->index].StartTime) + opt.audio.LatencyTicks;

                            if (mFrame.pts == AV_NOPTS_VALUE)
                            {
                                av_frame_free(&frame);
                                continue;
                            }

                            if (firstTs == -1)
                            {
                                if (vDecoder.hwAccelSuccess && frame->hw_frames_ctx == null)
                                {
                                    vDecoder.hwAccelSuccess = false;
                                }
                                firstTs = mFrame.timestamp;
                            }

                            if (MediaFrame.ProcessVideoFrame(vDecoder, mFrame, frame) != 0)
                            {
                                mFrame = null;
                            }
                            if (mFrame != null)
                            {
                                vDecoder.frames.Enqueue(mFrame);
                            }

                            //Log(Utils.TicksToTime((long)(mFrame.pts * avs.streams[video.st->index].timebase)));

                            av_frame_free(&frame);
                            continue;
                        }

                        av_frame_free(&frame);
                        break;
                    }

                    break;

                case AVMEDIA_TYPE_SUBTITLE:
                    sDecoder.packets.Enqueue((IntPtr)pkt);

                    break;

                default:
                    av_packet_free(&pkt);
                    break;
                }

                if (firstTs != -1)
                {
                    break;
                }
            }

            return(firstTs);
        }
Exemplo n.º 5
0
        public void Decode()
        {
            //int xf = 0;
            AVPacket *pkt;

            while (true)
            {
                if (status != Status.END)
                {
                    status = Status.READY;
                }
                decodeARE.Reset();
                decodeARE.WaitOne();
                status     = Status.PLAY;
                forcePause = false;
                bool shouldStop    = false;
                int  allowedErrors = decCtx.opt.demuxer.MaxErrors;
                int  ret           = -1;

                Log("Started");

                // Wait for demuxer to come up
                if (demuxer.status == Status.READY)
                {
                    demuxer.demuxARE.Set();
                    while (!demuxer.isPlaying && demuxer.status != Status.END)
                    {
                        Thread.Sleep(1);
                    }
                }

                while (true)
                {
                    // No Packets || Max Frames Brakes
                    if (packets.Count == 0 ||
                        (type == Type.Audio && frames.Count > decCtx.opt.audio.MaxDecodedFrames) ||
                        (type == Type.Video && frames.Count > decCtx.opt.video.MaxDecodedFrames) ||
                        (type == Type.Subs && frames.Count > decCtx.opt.subs.MaxDecodedFrames))
                    {
                        shouldStop = false;
                        //isWaiting   = true;

                        do
                        {
                            if (!decCtx.isPlaying || forcePause) // Proper Pause
                            {
                                Log("Pausing"); shouldStop = true; break;
                            }
                            else if (packets.Count == 0 && demuxer.status == Status.END) // Drain
                            {
                                Log("Draining"); break;
                            }
                            //else if (packets.Count == 0 && (!demuxer.isPlaying || demuxer.isWaiting)) // No reason to run
                            else if (packets.Count == 0 && (!demuxer.isPlaying || ((!isEmbedded || type == Type.Video) && demuxer.isWaiting))) // No reason to run
                            {
                                Log("Exhausted " + isPlaying); shouldStop = true; break;
                            }

                            Thread.Sleep(10);
                        } while (packets.Count == 0 ||
                                 (type == Type.Audio && frames.Count > decCtx.opt.audio.MaxDecodedFrames) ||
                                 (type == Type.Video && frames.Count > decCtx.opt.video.MaxDecodedFrames) ||
                                 (type == Type.Subs && frames.Count > decCtx.opt.subs.MaxDecodedFrames));

                        //isWaiting = false;
                        if (shouldStop)
                        {
                            break;
                        }
                    }

                    if (packets.Count == 0 && demuxer.status == Status.END)
                    {
                        if (type == Type.Video)
                        {
                            // Check case pause while draining
                            Log("Draining...");
                            pkt = null;
                        }
                        else
                        {
                            status = Status.END;
                            Log("EOF");
                            break;
                        }
                    }
                    else
                    {
                        packets.TryDequeue(out IntPtr pktPtr);
                        pkt = (AVPacket *)pktPtr;

                        if (type == Type.Subs)
                        {
                            MediaFrame mFrame = new MediaFrame();
                            mFrame.pts       = pkt->pts;
                            mFrame.timestamp = (long)((mFrame.pts * info.Timebase)) + opt.audio.LatencyTicks + opt.subs.DelayTicks;
                            //Log(Utils.TicksToTime((long)(mFrame.pts * demuxer.streams[st->index].timebase)) + " | pts -> " + mFrame.pts);
                            //xf++;

                            if (mFrame.pts == AV_NOPTS_VALUE)
                            {
                                av_packet_free(&pkt);
                                continue;
                            }

                            int        gotFrame = 0;
                            AVSubtitle sub      = new AVSubtitle();

                            // drain mode todo
                            // pkt->data set to NULL && pkt->size = 0 until it stops returning subtitles
                            ret = avcodec_decode_subtitle2(codecCtx, &sub, &gotFrame, pkt);
                            if (ret < 0)
                            {
                                allowedErrors--;
                                Log($"[ERROR-2] {Utils.ErrorCodeToMsg(ret)} ({ret})");

                                if (allowedErrors == 0)
                                {
                                    Log("[ERROR-0] Too many errors!");
                                    break;
                                }

                                continue;
                            }

                            if (gotFrame < 1 || sub.num_rects < 1)
                            {
                                continue;
                            }

                            MediaFrame.ProcessSubsFrame(this, mFrame, &sub);

                            frames.Enqueue(mFrame);
                            avsubtitle_free(&sub);
                            av_packet_free(&pkt);

                            continue;
                        }
                    }

                    lock (demuxer.decCtx.device)
                        ret = avcodec_send_packet(codecCtx, pkt);

                    if (ret != 0 && ret != AVERROR(EAGAIN))
                    {
                        if (ret == AVERROR_EOF)
                        {
                            status = Status.END;
                            Log("EOF");
                            break;
                        }
                        else
                        //if (ret == AVERROR_INVALIDDATA) // We also get Error number -16976906 occurred
                        {
                            allowedErrors--;
                            Log($"[ERROR-2] {Utils.ErrorCodeToMsg(ret)} ({ret})");

                            if (allowedErrors == 0)
                            {
                                Log("[ERROR-0] Too many errors!");
                                break;
                            }

                            continue;
                        }
                    }

                    av_packet_free(&pkt);

                    while (true)
                    {
                        lock (demuxer.decCtx.device)
                            ret = avcodec_receive_frame(codecCtx, frame);

                        if (ret == 0)
                        {
                            MediaFrame mFrame = new MediaFrame();
                            mFrame.pts = frame->best_effort_timestamp == AV_NOPTS_VALUE ? frame->pts : frame->best_effort_timestamp;

                            if (mFrame.pts == AV_NOPTS_VALUE)
                            {
                                av_frame_unref(frame);
                                continue;
                            }

                            //Log(Utils.TicksToTime((long)(mFrame.pts * demuxer.streams[st->index].Timebase)) + " | pts -> " + mFrame.pts);

                            if (type == Type.Video)
                            {
                                if (hwAccelSuccess && frame->hw_frames_ctx == null)
                                {
                                    hwAccelSuccess = false;
                                }
                                mFrame.timestamp = ((long)(mFrame.pts * info.Timebase) - demuxer.streams[st->index].StartTime) + opt.audio.LatencyTicks;
                                if (MediaFrame.ProcessVideoFrame(this, mFrame, frame) != 0)
                                {
                                    mFrame = null;
                                }
                            }
                            else // Audio
                            {
                                mFrame.timestamp = ((long)(mFrame.pts * info.Timebase) - demuxer.streams[st->index].StartTime) + opt.audio.DelayTicks + (demuxer.streams[st->index].StartTime - demuxer.decCtx.vDecoder.info.StartTime);
                                if (MediaFrame.ProcessAudioFrame(this, mFrame, frame) < 0)
                                {
                                    mFrame = null;
                                }
                            }

                            if (mFrame != null)
                            {
                                frames.Enqueue(mFrame);
                                //xf++;
                            }

                            av_frame_unref(frame);
                            continue;
                        }

                        av_frame_unref(frame);
                        break;
                    }

                    if (ret == AVERROR_EOF)
                    {
                        status = Status.END;
                        Log("EOF");
                        if (type == Type.Video && decCtx.aDecoder.status != Status.PLAY)
                        {
                            Log("EOF All"); decCtx.status = Status.END;
                        }
                        else if (type == Type.Audio && decCtx.vDecoder.status != Status.PLAY)
                        {
                            Log("EOF All"); decCtx.status = Status.END;
                        }
                        break;
                    }

                    if (ret != AVERROR(EAGAIN))
                    {
                        Log($"[ERROR-3] {Utils.ErrorCodeToMsg(ret)} ({ret})"); break;
                    }
                }

                Log($"Done {(allowedErrors == decCtx.opt.demuxer.MaxErrors ? "" : $"[Errors: {decCtx.opt.demuxer.MaxErrors - allowedErrors}]")}");
            }
        }
Exemplo n.º 6
0
        public static int ProcessVideoFrame(Decoder decoder, MediaFrame mFrame, AVFrame *frame)
        {
            int ret = 0;

            try
            {
                // Hardware Frame (NV12|P010)   | CopySubresourceRegion FFmpeg -> textureHW -> VideoProcessBlt RGBA
                if (decoder.hwAccelSuccess)
                {
                    decoder.textureFFmpeg     = new Texture2D((IntPtr)frame->data.ToArray()[0]);
                    decoder.textDescHW.Format = decoder.textureFFmpeg.Description.Format;
                    mFrame.textureHW          = new Texture2D(decoder.decCtx.device, decoder.textDescHW);

                    lock (decoder.decCtx.device)
                        decoder.decCtx.device.ImmediateContext.CopySubresourceRegion(decoder.textureFFmpeg, (int)frame->data.ToArray()[1], new ResourceRegion(0, 0, 0, mFrame.textureHW.Description.Width, mFrame.textureHW.Description.Height, 1), mFrame.textureHW, 0);

                    return(ret);
                }

                // Software Frame (YUV420P)     | YUV byte* -> Device YUV (srv R8 * 3) -> PixelShader YUV->RGBA
                else if (frame->format == (int)AVPixelFormat.AV_PIX_FMT_YUV420P)
                {
                    decoder.textDescYUV.Width  = decoder.codecCtx->width;
                    decoder.textDescYUV.Height = decoder.codecCtx->height;

                    DataStream dsY = new DataStream(frame->linesize.ToArray()[0] * decoder.codecCtx->height, true, true);
                    DataStream dsU = new DataStream(frame->linesize.ToArray()[1] * decoder.codecCtx->height / 2, true, true);
                    DataStream dsV = new DataStream(frame->linesize.ToArray()[2] * decoder.codecCtx->height / 2, true, true);

                    DataBox dbY = new DataBox();
                    DataBox dbU = new DataBox();
                    DataBox dbV = new DataBox();

                    dbY.DataPointer = dsY.DataPointer;
                    dbU.DataPointer = dsU.DataPointer;
                    dbV.DataPointer = dsV.DataPointer;

                    dbY.RowPitch = frame->linesize.ToArray()[0];
                    dbU.RowPitch = frame->linesize.ToArray()[1];
                    dbV.RowPitch = frame->linesize.ToArray()[2];

                    dsY.WriteRange((IntPtr)frame->data.ToArray()[0], dsY.Length);
                    dsU.WriteRange((IntPtr)frame->data.ToArray()[1], dsU.Length);
                    dsV.WriteRange((IntPtr)frame->data.ToArray()[2], dsV.Length);

                    mFrame.textureY            = new Texture2D(decoder.decCtx.device, decoder.textDescYUV, new DataBox[] { dbY });
                    decoder.textDescYUV.Width  = decoder.codecCtx->width / 2;
                    decoder.textDescYUV.Height = decoder.codecCtx->height / 2;

                    mFrame.textureU = new Texture2D(decoder.decCtx.device, decoder.textDescYUV, new DataBox[] { dbU });
                    mFrame.textureV = new Texture2D(decoder.decCtx.device, decoder.textDescYUV, new DataBox[] { dbV });

                    Utilities.Dispose(ref dsY);
                    Utilities.Dispose(ref dsU);
                    Utilities.Dispose(ref dsV);
                }

                // Software Frame (OTHER/sws_scale) | X byte* -> Sws_Scale RGBA -> Device RGA
                else if (!decoder.hwAccelSuccess)
                {
                    if (decoder.swsCtx == null)
                    {
                        decoder.outData       = new byte_ptrArray4();
                        decoder.outLineSize   = new int_array4();
                        decoder.outBufferSize = av_image_get_buffer_size(decoder.opt.video.PixelFormat, decoder.codecCtx->width, decoder.codecCtx->height, 1);
                        Marshal.FreeHGlobal(decoder.outBufferPtr);
                        decoder.outBufferPtr = Marshal.AllocHGlobal(decoder.outBufferSize);

                        av_image_fill_arrays(ref decoder.outData, ref decoder.outLineSize, (byte *)decoder.outBufferPtr, decoder.opt.video.PixelFormat, decoder.codecCtx->width, decoder.codecCtx->height, 1);

                        int vSwsOptFlags = decoder.opt.video.SwsHighQuality ? DecoderContext.SCALING_HQ : DecoderContext.SCALING_LQ;
                        decoder.swsCtx = sws_getContext(decoder.codecCtx->coded_width, decoder.codecCtx->coded_height, decoder.codecCtx->pix_fmt, decoder.codecCtx->width, decoder.codecCtx->height, decoder.opt.video.PixelFormat, vSwsOptFlags, null, null, null);
                        if (decoder.swsCtx == null)
                        {
                            Log($"[ProcessVideoFrame|RGB] [ERROR-1] Failed to allocate SwsContext"); return(ret);
                        }
                    }

                    sws_scale(decoder.swsCtx, frame->data, frame->linesize, 0, frame->height, decoder.outData, decoder.outLineSize);

                    DataStream ds = new DataStream(decoder.outLineSize[0] * decoder.codecCtx->height, true, true);
                    DataBox    db = new DataBox();

                    db.DataPointer = ds.DataPointer;
                    db.RowPitch    = decoder.outLineSize[0];
                    ds.WriteRange((IntPtr)decoder.outData.ToArray()[0], ds.Length);

                    mFrame.textureRGB = new Texture2D(decoder.decCtx.device, decoder.textDescRGB, new DataBox[] { db });
                    Utilities.Dispose(ref ds);
                }

                return(ret);
            } catch (Exception e) { ret = -1;  Log("Error[" + (ret).ToString("D4") + "], Func: ProcessVideoFrame(), Msg: " + e.Message + " - " + e.StackTrace); }

            return(ret);
        }