public void Decode() { //int xf = 0; AVPacket *pkt; while (!stopThread) { if (status != Status.Ended) { status = Status.Paused; } decodeARE.Reset(); decodeARE.WaitOne(); if (stopThread) { stopThread = false; break; } forcePause = false; status = Status.Playing; bool shouldStop = false; int allowedErrors = decCtx.cfg.decoder.MaxErrors; int ret = -1; Log("Started"); // Wait for demuxer to come up if (demuxer.status == Status.Paused) { demuxer.demuxARE.Set(); while (!demuxer.isPlaying && demuxer.status != Status.Ended && !forcePause && decCtx.isPlaying) { Thread.Sleep(1); } } while (!stopThread) { // No Packets || Max Frames Brakes if (packets.Count == 0 || (type == MediaType.Audio && frames.Count > decCtx.cfg.decoder.MaxAudioFrames) || (type == MediaType.Video && frames.Count > decCtx.cfg.decoder.MaxVideoFrames) || (type == MediaType.Subs && frames.Count > decCtx.cfg.decoder.MaxSubsFrames)) { shouldStop = false; //isWaiting = true; do { if (!decCtx.isPlaying || forcePause) // Proper Pause { Log("Pausing"); shouldStop = true; break; } else if (packets.Count == 0 && demuxer.status == Status.Ended) // Drain { Log("Draining"); break; } //else if (packets.Count == 0 && (!demuxer.isPlaying || demuxer.isWaiting)) // No reason to run else if (packets.Count == 0 && (!demuxer.isPlaying || ((!isEmbedded || type == MediaType.Video) && demuxer.isWaiting))) // No reason to run { Log("Exhausted " + isPlaying); shouldStop = true; break; } Thread.Sleep(10); } while (packets.Count == 0 || (type == MediaType.Audio && frames.Count > decCtx.cfg.decoder.MaxAudioFrames) || (type == MediaType.Video && frames.Count > decCtx.cfg.decoder.MaxVideoFrames) || (type == MediaType.Subs && frames.Count > decCtx.cfg.decoder.MaxSubsFrames)); //isWaiting = false; if (shouldStop) { break; } } if (!decCtx.isPlaying || forcePause) { break; } if (packets.Count == 0 && demuxer.status == Status.Ended) { if (type == MediaType.Video) { // Check case pause while draining Log("Draining..."); pkt = null; } else { status = Status.Ended; Log("EOF"); break; } } else { packets.TryDequeue(out IntPtr pktPtr); pkt = (AVPacket *)pktPtr; if (type == MediaType.Subs) { MediaFrame mFrame = new MediaFrame(); mFrame.pts = pkt->pts; mFrame.timestamp = (long)((mFrame.pts * info.Timebase)) + decCtx.cfg.audio.LatencyTicks + decCtx.cfg.subs.DelayTicks; //Log(Utils.TicksToTime((long)(mFrame.pts * demuxer.streams[st->index].timebase)) + " | pts -> " + mFrame.pts); //xf++; if (mFrame.pts == AV_NOPTS_VALUE) { av_packet_free(&pkt); continue; } int gotFrame = 0; AVSubtitle sub = new AVSubtitle(); // drain mode todo // pkt->data set to NULL && pkt->size = 0 until it stops returning subtitles ret = avcodec_decode_subtitle2(codecCtx, &sub, &gotFrame, pkt); if (ret < 0) { allowedErrors--; Log($"[ERROR-2] {Utils.FFmpeg.ErrorCodeToMsg(ret)} ({ret})"); if (allowedErrors == 0) { Log("[ERROR-0] Too many errors!"); break; } continue; } if (gotFrame < 1 || sub.num_rects < 1) { continue; } MediaFrame.ProcessSubsFrame(this, mFrame, &sub); frames.Enqueue(mFrame); avsubtitle_free(&sub); av_packet_free(&pkt); continue; } } ret = avcodec_send_packet(codecCtx, pkt); if (ret != 0 && ret != AVERROR(EAGAIN)) { if (ret == AVERROR_EOF) { status = Status.Ended; Log("EOF"); break; } else //if (ret == AVERROR_INVALIDDATA) // We also get Error number -16976906 occurred { allowedErrors--; Log($"[ERROR-2] {Utils.FFmpeg.ErrorCodeToMsg(ret)} ({ret})"); if (allowedErrors == 0) { Log("[ERROR-0] Too many errors!"); break; } continue; } } av_packet_free(&pkt); while (true) { ret = avcodec_receive_frame(codecCtx, frame); if (ret == 0) { MediaFrame mFrame = new MediaFrame(); mFrame.pts = frame->best_effort_timestamp == AV_NOPTS_VALUE ? frame->pts : frame->best_effort_timestamp; if (mFrame.pts == AV_NOPTS_VALUE) { av_frame_unref(frame); continue; } //Log(Utils.TicksToTime((long)(mFrame.pts * demuxer.streams[st->index].Timebase)) + " | pts -> " + mFrame.pts); if (type == MediaType.Video) { if (hwAccelSuccess && frame->hw_frames_ctx == null) { Log("HW Acceleration Failed 2"); hwAccelSuccess = false; decCtx.renderer.FrameResized(); } mFrame.timestamp = ((long)(mFrame.pts * info.Timebase) - info.StartTime) + decCtx.cfg.audio.LatencyTicks; if (MediaFrame.ProcessVideoFrame(this, mFrame, frame) != 0) { mFrame = null; } } else // Audio { mFrame.timestamp = ((long)(mFrame.pts * info.Timebase) - info.StartTime) + decCtx.cfg.audio.DelayTicks + (info.StartTime - demuxer.decCtx.vDecoder.info.StartTime); if (MediaFrame.ProcessAudioFrame(this, mFrame, frame) < 0) { mFrame = null; } } if (mFrame != null) { frames.Enqueue(mFrame); //xf++; } av_frame_unref(frame); continue; } av_frame_unref(frame); break; } if (ret == AVERROR_EOF) { status = Status.Ended; Log("EOF"); if (type == MediaType.Video && decCtx.aDecoder.status != Status.Playing) { Log("EOF All"); decCtx.status = Status.Ended; } else if (type == MediaType.Audio && decCtx.vDecoder.status != Status.Playing) { Log("EOF All"); decCtx.status = Status.Ended; } break; } if (ret != AVERROR(EAGAIN)) { Log($"[ERROR-3] {Utils.FFmpeg.ErrorCodeToMsg(ret)} ({ret})"); break; } } Log($"Done {(allowedErrors == decCtx.cfg.decoder.MaxErrors ? "" : $"[Errors: {decCtx.cfg.decoder.MaxErrors - allowedErrors}]")}"); } }
public long GetVideoFrame() { int ret; long firstTs = -1; while (interrupt != 1) { AVPacket *pkt = av_packet_alloc(); ret = av_read_frame(demuxer.fmtCtx, pkt); if (ret != 0) { return(-1); } if (!demuxer.enabledStreams.Contains(pkt->stream_index)) { av_packet_free(&pkt); continue; } switch (demuxer.fmtCtx->streams[pkt->stream_index]->codecpar->codec_type) { case AVMEDIA_TYPE_AUDIO: aDecoder.packets.Enqueue((IntPtr)pkt); break; case AVMEDIA_TYPE_VIDEO: ret = avcodec_send_packet(vDecoder.codecCtx, pkt); av_packet_free(&pkt); if (ret != 0) { return(-1); } while (interrupt != 1) { AVFrame *frame = av_frame_alloc(); ret = avcodec_receive_frame(vDecoder.codecCtx, frame); if (ret == 0) { MediaFrame mFrame = new MediaFrame(); mFrame.pts = frame->best_effort_timestamp == AV_NOPTS_VALUE ? frame->pts : frame->best_effort_timestamp; mFrame.timestamp = ((long)(mFrame.pts * vDecoder.info.Timebase) - vDecoder.info.StartTime) + cfg.audio.LatencyTicks; if (mFrame.pts == AV_NOPTS_VALUE || frame->pict_type != AVPictureType.AV_PICTURE_TYPE_I) { if (frame->pict_type != AVPictureType.AV_PICTURE_TYPE_I) { Log($"Invalid Seek to Keyframe, skip... {frame->pict_type} | {frame->key_frame.ToString()}"); } av_frame_free(&frame); continue; } if (firstTs == -1) { if (vDecoder.hwAccelSuccess && frame->hw_frames_ctx == null) { Log("HW Acceleration Failed 2"); vDecoder.hwAccelSuccess = false; renderer.FrameResized(); } if (vDecoder.hwAccelSuccess && frame->hw_frames_ctx == null) { vDecoder.hwAccelSuccess = false; } firstTs = mFrame.timestamp; } if (MediaFrame.ProcessVideoFrame(vDecoder, mFrame, frame) != 0) { mFrame = null; } if (mFrame != null) { vDecoder.frames.Enqueue(mFrame); } //Log(Utils.TicksToTime((long)(mFrame.pts * avs.streams[video.st->index].timebase))); av_frame_free(&frame); continue; } av_frame_free(&frame); break; } break; case AVMEDIA_TYPE_SUBTITLE: sDecoder.packets.Enqueue((IntPtr)pkt); break; default: av_packet_free(&pkt); break; } if (firstTs != -1) { break; } } return(firstTs); }