コード例 #1
0
ファイル: MediaFrame.cs プロジェクト: midnite8177/Flyleaf
        public static int ProcessAudioFrame(Decoder decoder, MediaFrame mFrame, AVFrame *frame)
        {
            /* References
             *
             * https://www.programmersought.com/article/70255648018/
             * https://ffmpeg.org/doxygen/2.4/resampling__audio_8c_source.html
             *
             * Notes
             *
             * Currently output sample rate = input sample rate which means in case we change that we should review that actually works properly
             * If frame->nb_samples are known initially or by decoding on frame we can avoid av_samples_alloc_array_and_samples here and transfer it to SetupAudio
             */

            int ret = 0;

            try
            {
                int dst_nb_samples;

                if (decoder.m_max_dst_nb_samples == -1)
                {
                    if (decoder.m_dst_data != null && (IntPtr)(*decoder.m_dst_data) != IntPtr.Zero)
                    {
                        av_freep(&decoder.m_dst_data[0]); decoder.m_dst_data = null;
                    }

                    decoder.m_max_dst_nb_samples = (int)av_rescale_rnd(frame->nb_samples, decoder.codecCtx->sample_rate, decoder.codecCtx->sample_rate, AVRounding.AV_ROUND_UP);

                    fixed(byte ***dst_data = &decoder.m_dst_data)
                    fixed(int *dst_linesize = &decoder.m_dst_linesize)
                    ret = av_samples_alloc_array_and_samples(dst_data, dst_linesize, Decoder.AOutChannels, decoder.m_max_dst_nb_samples, Decoder.AOutSampleFormat, 0);
                }

                fixed(int *dst_linesize = &decoder.m_dst_linesize)
                {
                    dst_nb_samples = (int)av_rescale_rnd(swr_get_delay(decoder.swrCtx, decoder.codecCtx->sample_rate) + frame->nb_samples, decoder.codecCtx->sample_rate, decoder.codecCtx->sample_rate, AVRounding.AV_ROUND_UP);

                    if (dst_nb_samples > decoder.m_max_dst_nb_samples)
                    {
                        av_freep(&decoder.m_dst_data[0]);
                        ret = av_samples_alloc(decoder.m_dst_data, dst_linesize, Decoder.AOutChannels, (int)dst_nb_samples, Decoder.AOutSampleFormat, 0);
                    }

                    ret = swr_convert(decoder.swrCtx, decoder.m_dst_data, dst_nb_samples, (byte **)&frame->data, frame->nb_samples);
                    if (ret < 0)
                    {
                        return(ret);
                    }

                    int dst_data_len = av_samples_get_buffer_size(dst_linesize, Decoder.AOutChannels, ret, Decoder.AOutSampleFormat, 1);

                    mFrame.audioData = new byte[dst_data_len]; Marshal.Copy((IntPtr)(*decoder.m_dst_data), mFrame.audioData, 0, mFrame.audioData.Length);    //Marshal.FreeHGlobal((IntPtr)(*m_dst_data));
                }
            } catch (Exception e) { ret = -1; Log("Error[" + (ret).ToString("D4") + "], Func: ProcessAudioFrame(), Msg: " + e.Message + " - " + e.StackTrace); }

            return(ret);
        }
コード例 #2
0
        public void ReSync(long ms = -1)
        {
            if (status == Status.None)
            {
                return;
            }

            Pause();
            decoder.Flush();

            if (ms == -1)
            {
                MediaFrame vFrame = null;
                do
                {
                    decCtx.vDecoder.frames.TryPeek(out vFrame);
                    if (vFrame != null)
                    {
                        break;
                    }
                    else
                    {
                        Thread.Sleep(5);
                    }
                } while (vFrame == null || forcePause);

                if (forcePause)
                {
                    return;
                }
                Seek(vFrame.timestamp / 10000);
            }
            else
            {
                Seek(ms);
            }

            if (decCtx.status == Status.Playing)
            {
                decoder.decodeARE.Set();
            }
        }
コード例 #3
0
ファイル: MediaFrame.cs プロジェクト: midnite8177/Flyleaf
        public static int ProcessSubsFrame(Decoder decoder, MediaFrame mFrame, AVSubtitle *sub)
        {
            int ret = 0;

            try
            {
                string           line = "";
                byte[]           buffer;
                AVSubtitleRect **rects = sub->rects;
                AVSubtitleRect * cur   = rects[0];

                switch (cur->type)
                {
                case AVSubtitleType.SUBTITLE_ASS:
                    buffer = new byte[1024];
                    line   = Utils.BytePtrToStringUTF8(cur->ass);
                    break;

                case AVSubtitleType.SUBTITLE_TEXT:
                    buffer = new byte[1024];
                    line   = Utils.BytePtrToStringUTF8(cur->ass);

                    break;

                case AVSubtitleType.SUBTITLE_BITMAP:
                    Log("Subtitles BITMAP -> Not Implemented yet");

                    return(-1);
                }

                mFrame.text      = SSAtoSubStyles(line, out List <SubStyle> subStyles);
                mFrame.subStyles = subStyles;
                mFrame.duration  = (int)(sub->end_display_time - sub->start_display_time);

                //Log("SUBS ......... " + Utils.TicksToTime(mFrame.timestamp));
            } catch (Exception e) { ret = -1; Log("Error[" + (ret).ToString("D4") + "], Func: ProcessSubsFrame(), Msg: " + e.Message + " - " + e.StackTrace); }

            return(ret);
        }
コード例 #4
0
        public void Decode()
        {
            //int xf = 0;
            AVPacket *pkt;

            while (!stopThread)
            {
                if (status != Status.Ended)
                {
                    status = Status.Paused;
                }
                decodeARE.Reset();
                decodeARE.WaitOne();
                if (stopThread)
                {
                    stopThread = false; break;
                }
                forcePause = false;
                status     = Status.Playing;
                bool shouldStop    = false;
                int  allowedErrors = decCtx.cfg.decoder.MaxErrors;
                int  ret           = -1;

                Log("Started");

                // Wait for demuxer to come up
                if (demuxer.status == Status.Paused)
                {
                    demuxer.demuxARE.Set();
                    while (!demuxer.isPlaying && demuxer.status != Status.Ended && !forcePause && decCtx.isPlaying)
                    {
                        Thread.Sleep(1);
                    }
                }

                while (!stopThread)
                {
                    // No Packets || Max Frames Brakes
                    if (packets.Count == 0 ||
                        (type == MediaType.Audio && frames.Count > decCtx.cfg.decoder.MaxAudioFrames) ||
                        (type == MediaType.Video && frames.Count > decCtx.cfg.decoder.MaxVideoFrames) ||
                        (type == MediaType.Subs && frames.Count > decCtx.cfg.decoder.MaxSubsFrames))
                    {
                        shouldStop = false;
                        //isWaiting   = true;

                        do
                        {
                            if (!decCtx.isPlaying || forcePause) // Proper Pause
                            {
                                Log("Pausing"); shouldStop = true; break;
                            }
                            else if (packets.Count == 0 && demuxer.status == Status.Ended) // Drain
                            {
                                Log("Draining"); break;
                            }
                            //else if (packets.Count == 0 && (!demuxer.isPlaying || demuxer.isWaiting)) // No reason to run
                            else if (packets.Count == 0 && (!demuxer.isPlaying || ((!isEmbedded || type == MediaType.Video) && demuxer.isWaiting))) // No reason to run
                            {
                                Log("Exhausted " + isPlaying); shouldStop = true; break;
                            }

                            Thread.Sleep(10);
                        } while (packets.Count == 0 ||
                                 (type == MediaType.Audio && frames.Count > decCtx.cfg.decoder.MaxAudioFrames) ||
                                 (type == MediaType.Video && frames.Count > decCtx.cfg.decoder.MaxVideoFrames) ||
                                 (type == MediaType.Subs && frames.Count > decCtx.cfg.decoder.MaxSubsFrames));

                        //isWaiting = false;
                        if (shouldStop)
                        {
                            break;
                        }
                    }

                    if (!decCtx.isPlaying || forcePause)
                    {
                        break;
                    }

                    if (packets.Count == 0 && demuxer.status == Status.Ended)
                    {
                        if (type == MediaType.Video)
                        {
                            // Check case pause while draining
                            Log("Draining...");
                            pkt = null;
                        }
                        else
                        {
                            status = Status.Ended;
                            Log("EOF");
                            break;
                        }
                    }
                    else
                    {
                        packets.TryDequeue(out IntPtr pktPtr);
                        pkt = (AVPacket *)pktPtr;

                        if (type == MediaType.Subs)
                        {
                            MediaFrame mFrame = new MediaFrame();
                            mFrame.pts       = pkt->pts;
                            mFrame.timestamp = (long)((mFrame.pts * info.Timebase)) + decCtx.cfg.audio.LatencyTicks + decCtx.cfg.subs.DelayTicks;
                            //Log(Utils.TicksToTime((long)(mFrame.pts * demuxer.streams[st->index].timebase)) + " | pts -> " + mFrame.pts);
                            //xf++;

                            if (mFrame.pts == AV_NOPTS_VALUE)
                            {
                                av_packet_free(&pkt);
                                continue;
                            }

                            int        gotFrame = 0;
                            AVSubtitle sub      = new AVSubtitle();

                            // drain mode todo
                            // pkt->data set to NULL && pkt->size = 0 until it stops returning subtitles
                            ret = avcodec_decode_subtitle2(codecCtx, &sub, &gotFrame, pkt);
                            if (ret < 0)
                            {
                                allowedErrors--;
                                Log($"[ERROR-2] {Utils.FFmpeg.ErrorCodeToMsg(ret)} ({ret})");

                                if (allowedErrors == 0)
                                {
                                    Log("[ERROR-0] Too many errors!");
                                    break;
                                }

                                continue;
                            }

                            if (gotFrame < 1 || sub.num_rects < 1)
                            {
                                continue;
                            }

                            MediaFrame.ProcessSubsFrame(this, mFrame, &sub);

                            frames.Enqueue(mFrame);
                            avsubtitle_free(&sub);
                            av_packet_free(&pkt);

                            continue;
                        }
                    }

                    ret = avcodec_send_packet(codecCtx, pkt);

                    if (ret != 0 && ret != AVERROR(EAGAIN))
                    {
                        if (ret == AVERROR_EOF)
                        {
                            status = Status.Ended;
                            Log("EOF");
                            break;
                        }
                        else
                        //if (ret == AVERROR_INVALIDDATA) // We also get Error number -16976906 occurred
                        {
                            allowedErrors--;
                            Log($"[ERROR-2] {Utils.FFmpeg.ErrorCodeToMsg(ret)} ({ret})");

                            if (allowedErrors == 0)
                            {
                                Log("[ERROR-0] Too many errors!");
                                break;
                            }

                            continue;
                        }
                    }

                    av_packet_free(&pkt);

                    while (true)
                    {
                        ret = avcodec_receive_frame(codecCtx, frame);

                        if (ret == 0)
                        {
                            MediaFrame mFrame = new MediaFrame();
                            mFrame.pts = frame->best_effort_timestamp == AV_NOPTS_VALUE ? frame->pts : frame->best_effort_timestamp;

                            if (mFrame.pts == AV_NOPTS_VALUE)
                            {
                                av_frame_unref(frame);
                                continue;
                            }

                            //Log(Utils.TicksToTime((long)(mFrame.pts * demuxer.streams[st->index].Timebase)) + " | pts -> " + mFrame.pts);

                            if (type == MediaType.Video)
                            {
                                if (hwAccelSuccess && frame->hw_frames_ctx == null)
                                {
                                    Log("HW Acceleration Failed 2");
                                    hwAccelSuccess = false;
                                    decCtx.renderer.FrameResized();
                                }
                                mFrame.timestamp = ((long)(mFrame.pts * info.Timebase) - info.StartTime) + decCtx.cfg.audio.LatencyTicks;
                                if (MediaFrame.ProcessVideoFrame(this, mFrame, frame) != 0)
                                {
                                    mFrame = null;
                                }
                            }
                            else // Audio
                            {
                                mFrame.timestamp = ((long)(mFrame.pts * info.Timebase) - info.StartTime) + decCtx.cfg.audio.DelayTicks + (info.StartTime - demuxer.decCtx.vDecoder.info.StartTime);
                                if (MediaFrame.ProcessAudioFrame(this, mFrame, frame) < 0)
                                {
                                    mFrame = null;
                                }
                            }

                            if (mFrame != null)
                            {
                                frames.Enqueue(mFrame);
                                //xf++;
                            }

                            av_frame_unref(frame);
                            continue;
                        }

                        av_frame_unref(frame);
                        break;
                    }

                    if (ret == AVERROR_EOF)
                    {
                        status = Status.Ended;
                        Log("EOF");
                        if (type == MediaType.Video && decCtx.aDecoder.status != Status.Playing)
                        {
                            Log("EOF All"); decCtx.status = Status.Ended;
                        }
                        else if (type == MediaType.Audio && decCtx.vDecoder.status != Status.Playing)
                        {
                            Log("EOF All"); decCtx.status = Status.Ended;
                        }
                        break;
                    }

                    if (ret != AVERROR(EAGAIN))
                    {
                        Log($"[ERROR-3] {Utils.FFmpeg.ErrorCodeToMsg(ret)} ({ret})"); break;
                    }
                }

                Log($"Done {(allowedErrors == decCtx.cfg.decoder.MaxErrors ? "" : $"[Errors: {decCtx.cfg.decoder.MaxErrors - allowedErrors}]")}");
            }
        }
コード例 #5
0
ファイル: MediaFrame.cs プロジェクト: midnite8177/Flyleaf
        public static int ProcessVideoFrame(Decoder decoder, MediaFrame mFrame, AVFrame *frame)
        {
            int ret = 0;

            try
            {
                // Hardware Frame (NV12|P010)   | CopySubresourceRegion FFmpeg Texture Array -> Device Texture[1] (NV12|P010) / SRV (RX_RXGX) -> PixelShader (Y_UV)
                if (decoder.hwAccelSuccess)
                {
                    decoder.textureFFmpeg   = new Texture2D((IntPtr)frame->data.ToArray()[0]);
                    decoder.textDesc.Format = decoder.textureFFmpeg.Description.Format;
                    mFrame.textures         = new Texture2D[1];
                    mFrame.textures[0]      = new Texture2D(decoder.decCtx.renderer.device, decoder.textDesc);
                    decoder.decCtx.renderer.device.ImmediateContext.CopySubresourceRegion(decoder.textureFFmpeg, (int)frame->data.ToArray()[1], new ResourceRegion(0, 0, 0, mFrame.textures[0].Description.Width, mFrame.textures[0].Description.Height, 1), mFrame.textures[0], 0);

                    return(ret);
                }

                // Software Frame (8-bit YUV)   | YUV byte* -> Device Texture[3] (RX) / SRV (RX_RX_RX) -> PixelShader (Y_U_V)
                else if (decoder.info.PixelFormatType == PixelFormatType.Software_Handled)
                {
                    mFrame.textures = new Texture2D[3];

                    // YUV Planar [Y0 ...] [U0 ...] [V0 ....]
                    if (decoder.info.IsPlanar)
                    {
                        DataBox db = new DataBox();
                        db.DataPointer     = (IntPtr)frame->data.ToArray()[0];
                        db.RowPitch        = frame->linesize.ToArray()[0];
                        mFrame.textures[0] = new Texture2D(decoder.decCtx.renderer.device, decoder.textDesc, new DataBox[] { db });

                        db                 = new DataBox();
                        db.DataPointer     = (IntPtr)frame->data.ToArray()[1];
                        db.RowPitch        = frame->linesize.ToArray()[1];
                        mFrame.textures[1] = new Texture2D(decoder.decCtx.renderer.device, decoder.textDescUV, new DataBox[] { db });

                        db                 = new DataBox();
                        db.DataPointer     = (IntPtr)frame->data.ToArray()[2];
                        db.RowPitch        = frame->linesize.ToArray()[2];
                        mFrame.textures[2] = new Texture2D(decoder.decCtx.renderer.device, decoder.textDescUV, new DataBox[] { db });
                    }

                    // YUV Packed ([Y0U0Y1V0] ....)
                    else
                    {
                        DataStream dsY = new DataStream(decoder.textDesc.Width * decoder.textDesc.Height, true, true);
                        DataStream dsU = new DataStream(decoder.textDescUV.Width * decoder.textDescUV.Height, true, true);
                        DataStream dsV = new DataStream(decoder.textDescUV.Width * decoder.textDescUV.Height, true, true);
                        DataBox    dbY = new DataBox();
                        DataBox    dbU = new DataBox();
                        DataBox    dbV = new DataBox();

                        dbY.DataPointer = dsY.DataPointer;
                        dbU.DataPointer = dsU.DataPointer;
                        dbV.DataPointer = dsV.DataPointer;

                        dbY.RowPitch = decoder.textDesc.Width;
                        dbU.RowPitch = decoder.textDescUV.Width;
                        dbV.RowPitch = decoder.textDescUV.Width;

                        long totalSize = frame->linesize.ToArray()[0] * decoder.textDesc.Height;

                        byte *dataPtr = frame->data.ToArray()[0];
                        AVComponentDescriptor[] comps = decoder.info.PixelFormatDesc->comp.ToArray();

                        for (int i = 0; i < totalSize; i += decoder.info.Comp0Step)
                        {
                            dsY.WriteByte(*(dataPtr + i));
                        }

                        for (int i = 1; i < totalSize; i += decoder.info.Comp1Step)
                        {
                            dsU.WriteByte(*(dataPtr + i));
                        }

                        for (int i = 3; i < totalSize; i += decoder.info.Comp2Step)
                        {
                            dsV.WriteByte(*(dataPtr + i));
                        }

                        mFrame.textures[0] = new Texture2D(decoder.decCtx.renderer.device, decoder.textDesc, new DataBox[] { dbY });
                        mFrame.textures[1] = new Texture2D(decoder.decCtx.renderer.device, decoder.textDescUV, new DataBox[] { dbU });
                        mFrame.textures[2] = new Texture2D(decoder.decCtx.renderer.device, decoder.textDescUV, new DataBox[] { dbV });

                        Utilities.Dispose(ref dsY); Utilities.Dispose(ref dsU); Utilities.Dispose(ref dsV);
                    }
                }

                // Software Frame (OTHER/sws_scale) | X byte* -> Sws_Scale RGBA -> Device Texture[1] (RGBA) / SRV (RGBA) -> PixelShader (RGBA)
                else
                {
                    if (decoder.swsCtx == null)
                    {
                        decoder.textDesc.Format = SharpDX.DXGI.Format.R8G8B8A8_UNorm;
                        decoder.outData         = new byte_ptrArray4();
                        decoder.outLineSize     = new int_array4();
                        decoder.outBufferSize   = av_image_get_buffer_size(Decoder.VOutPixelFormat, decoder.codecCtx->width, decoder.codecCtx->height, 1);
                        Marshal.FreeHGlobal(decoder.outBufferPtr);
                        decoder.outBufferPtr = Marshal.AllocHGlobal(decoder.outBufferSize);
                        av_image_fill_arrays(ref decoder.outData, ref decoder.outLineSize, (byte *)decoder.outBufferPtr, Decoder.VOutPixelFormat, decoder.codecCtx->width, decoder.codecCtx->height, 1);

                        int vSwsOptFlags = decoder.decCtx.cfg.video.SwsHighQuality ? DecoderContext.SCALING_HQ : DecoderContext.SCALING_LQ;
                        decoder.swsCtx = sws_getContext(decoder.codecCtx->coded_width, decoder.codecCtx->coded_height, decoder.codecCtx->pix_fmt, decoder.codecCtx->width, decoder.codecCtx->height, Decoder.VOutPixelFormat, vSwsOptFlags, null, null, null);
                        if (decoder.swsCtx == null)
                        {
                            Log($"[ProcessVideoFrame|RGB] [ERROR-1] Failed to allocate SwsContext"); return(ret);
                        }
                    }

                    sws_scale(decoder.swsCtx, frame->data, frame->linesize, 0, frame->height, decoder.outData, decoder.outLineSize);

                    DataBox db = new DataBox();
                    db.DataPointer     = (IntPtr)decoder.outData.ToArray()[0];
                    db.RowPitch        = decoder.outLineSize[0];
                    mFrame.textures    = new Texture2D[1];
                    mFrame.textures[0] = new Texture2D(decoder.decCtx.renderer.device, decoder.textDesc, new DataBox[] { db });
                }

                return(ret);
            } catch (Exception e) { ret = -1;  Log("Error[" + (ret).ToString("D4") + "], Func: ProcessVideoFrame(), Msg: " + e.Message + " - " + e.StackTrace); }

            return(ret);
        }
コード例 #6
0
        public long GetVideoFrame()
        {
            int  ret;
            long firstTs = -1;

            while (interrupt != 1)
            {
                AVPacket *pkt = av_packet_alloc();
                ret = av_read_frame(demuxer.fmtCtx, pkt);
                if (ret != 0)
                {
                    return(-1);
                }

                if (!demuxer.enabledStreams.Contains(pkt->stream_index))
                {
                    av_packet_free(&pkt);
                    continue;
                }

                switch (demuxer.fmtCtx->streams[pkt->stream_index]->codecpar->codec_type)
                {
                case AVMEDIA_TYPE_AUDIO:
                    aDecoder.packets.Enqueue((IntPtr)pkt);

                    break;

                case AVMEDIA_TYPE_VIDEO:
                    ret = avcodec_send_packet(vDecoder.codecCtx, pkt);
                    av_packet_free(&pkt);

                    if (ret != 0)
                    {
                        return(-1);
                    }

                    while (interrupt != 1)
                    {
                        AVFrame *frame = av_frame_alloc();
                        ret = avcodec_receive_frame(vDecoder.codecCtx, frame);

                        if (ret == 0)
                        {
                            MediaFrame mFrame = new MediaFrame();
                            mFrame.pts       = frame->best_effort_timestamp == AV_NOPTS_VALUE ? frame->pts : frame->best_effort_timestamp;
                            mFrame.timestamp = ((long)(mFrame.pts * vDecoder.info.Timebase) - vDecoder.info.StartTime) + cfg.audio.LatencyTicks;

                            if (mFrame.pts == AV_NOPTS_VALUE || frame->pict_type != AVPictureType.AV_PICTURE_TYPE_I)
                            {
                                if (frame->pict_type != AVPictureType.AV_PICTURE_TYPE_I)
                                {
                                    Log($"Invalid Seek to Keyframe, skip... {frame->pict_type} | {frame->key_frame.ToString()}");
                                }
                                av_frame_free(&frame);
                                continue;
                            }

                            if (firstTs == -1)
                            {
                                if (vDecoder.hwAccelSuccess && frame->hw_frames_ctx == null)
                                {
                                    Log("HW Acceleration Failed 2");
                                    vDecoder.hwAccelSuccess = false;
                                    renderer.FrameResized();
                                }
                                if (vDecoder.hwAccelSuccess && frame->hw_frames_ctx == null)
                                {
                                    vDecoder.hwAccelSuccess = false;
                                }
                                firstTs = mFrame.timestamp;
                            }

                            if (MediaFrame.ProcessVideoFrame(vDecoder, mFrame, frame) != 0)
                            {
                                mFrame = null;
                            }
                            if (mFrame != null)
                            {
                                vDecoder.frames.Enqueue(mFrame);
                            }

                            //Log(Utils.TicksToTime((long)(mFrame.pts * avs.streams[video.st->index].timebase)));

                            av_frame_free(&frame);
                            continue;
                        }

                        av_frame_free(&frame);
                        break;
                    }

                    break;

                case AVMEDIA_TYPE_SUBTITLE:
                    sDecoder.packets.Enqueue((IntPtr)pkt);

                    break;

                default:
                    av_packet_free(&pkt);
                    break;
                }

                if (firstTs != -1)
                {
                    break;
                }
            }

            return(firstTs);
        }