/// <summary> /// create audio converter by dst output parames /// </summary> /// <param name="dstFormat"></param> /// <param name="dstChannels"></param> /// <param name="dstNbSamples"></param> /// <param name="dstSampleRate"></param> public SampleConverter(AVSampleFormat dstFormat, int dstChannels, int dstNbSamples, int dstSampleRate) { DstFormat = dstFormat; DstChannels = dstChannels; DstChannelLayout = FFmpegHelper.GetChannelLayout(dstChannels); DstNbSamples = dstNbSamples; DstSampleRate = dstSampleRate; dstFrame = new AudioFrame(DstChannels, DstNbSamples, DstFormat, DstSampleRate); AudioFifo = new AudioFifo(DstFormat, DstChannels); }
private static Bitmap BgraToBitmap(VideoFrame frame) { int width = frame.Width; int height = frame.Height; Bitmap bitmap = new Bitmap(width, height, (AVPixelFormat)frame.AVFrame.format == AVPixelFormat.AV_PIX_FMT_BGRA ? PixelFormat.Format32bppArgb : PixelFormat.Format24bppRgb); BitmapData bitmapData = bitmap.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.WriteOnly, bitmap.PixelFormat); var bytewidth = Math.Min(bitmapData.Stride, frame.Linesize[0]); FFmpegHelper.CopyPlane(frame.Data[0], frame.Linesize[0], bitmapData.Scan0, bitmapData.Stride, bytewidth, height); bitmap.UnlockBits(bitmapData); return(bitmap); }
private static Mat AudioFrameToMat(AudioFrame frame) { int planar = ffmpeg.av_sample_fmt_is_planar((AVSampleFormat)frame.AVFrame.format); int planes = planar != 0 ? frame.AVFrame.channels : 1; int block_align = ffmpeg.av_get_bytes_per_sample((AVSampleFormat)frame.AVFrame.format) * (planar != 0 ? 1 : frame.AVFrame.channels); int stride = frame.AVFrame.nb_samples * block_align; int channels = planar != 0 ? 1 : frame.AVFrame.channels; MatType dstType; switch ((AVSampleFormat)frame.AVFrame.format) { case AVSampleFormat.AV_SAMPLE_FMT_U8: case AVSampleFormat.AV_SAMPLE_FMT_U8P: dstType = MatType.CV_8UC(channels); break; case AVSampleFormat.AV_SAMPLE_FMT_S16: case AVSampleFormat.AV_SAMPLE_FMT_S16P: dstType = MatType.CV_16SC(channels); break; case AVSampleFormat.AV_SAMPLE_FMT_S32: case AVSampleFormat.AV_SAMPLE_FMT_S32P: dstType = MatType.CV_32SC(channels); break; case AVSampleFormat.AV_SAMPLE_FMT_FLT: case AVSampleFormat.AV_SAMPLE_FMT_FLTP: dstType = MatType.CV_32FC(channels); break; case AVSampleFormat.AV_SAMPLE_FMT_DBL: case AVSampleFormat.AV_SAMPLE_FMT_DBLP: // opencv not have 64S, use 64F case AVSampleFormat.AV_SAMPLE_FMT_S64: case AVSampleFormat.AV_SAMPLE_FMT_S64P: dstType = MatType.CV_64FC(channels); break; default: throw new FFmpegException(FFmpegException.NotSupportFormat); } Mat mat = new Mat(planes, frame.AVFrame.nb_samples, dstType); for (int i = 0; i < planes; i++) { FFmpegHelper.CopyMemory(mat.Data + i * stride, frame.Data[i], stride); } return(mat); }
private static AudioFrame MatToAudioFrame(Mat mat, AVSampleFormat srctFormat, int sampleRate) { int channels = mat.NumberOfChannels > 1 ? mat.NumberOfChannels : mat.Height; AudioFrame frame = new AudioFrame(channels, mat.Width, srctFormat, sampleRate); bool isPlanar = ffmpeg.av_sample_fmt_is_planar(srctFormat) > 0; int stride = mat.Step; for (int i = 0; i < (isPlanar ? channels : 1); i++) { FFmpegHelper.CopyMemory(frame.Data[i], mat.DataPointer + i * stride, stride); } return(frame); }
private static Mat AudioFrameToMat(AudioFrame frame) { DepthType dstType; switch ((AVSampleFormat)frame.AVFrame.format) { case AVSampleFormat.AV_SAMPLE_FMT_U8: case AVSampleFormat.AV_SAMPLE_FMT_U8P: dstType = DepthType.Cv8U; break; case AVSampleFormat.AV_SAMPLE_FMT_S16: case AVSampleFormat.AV_SAMPLE_FMT_S16P: dstType = DepthType.Cv16S; break; case AVSampleFormat.AV_SAMPLE_FMT_S32: case AVSampleFormat.AV_SAMPLE_FMT_S32P: dstType = DepthType.Cv32S; break; case AVSampleFormat.AV_SAMPLE_FMT_FLT: case AVSampleFormat.AV_SAMPLE_FMT_FLTP: dstType = DepthType.Cv32F; break; case AVSampleFormat.AV_SAMPLE_FMT_DBL: case AVSampleFormat.AV_SAMPLE_FMT_DBLP: // emgucv not have S64, use 64F case AVSampleFormat.AV_SAMPLE_FMT_S64: case AVSampleFormat.AV_SAMPLE_FMT_S64P: dstType = DepthType.Cv64F; break; default: throw new FFmpegException(FFmpegException.NotSupportFormat); } int planar = ffmpeg.av_sample_fmt_is_planar((AVSampleFormat)frame.AVFrame.format); int planes = planar != 0 ? frame.AVFrame.channels : 1; int block_align = ffmpeg.av_get_bytes_per_sample((AVSampleFormat)frame.AVFrame.format) * (planar != 0 ? 1 : frame.AVFrame.channels); int stride = frame.AVFrame.nb_samples * block_align; Mat mat = new Mat(planes, frame.AVFrame.nb_samples, dstType, (planar != 0 ? 1 : frame.AVFrame.channels)); for (int i = 0; i < planes; i++) { FFmpegHelper.CopyMemory(frame.Data[i], mat.DataPointer + i * stride, stride); } return(mat); }
/// <summary> /// create audio converter by dst frame /// </summary> /// <param name="dstFrame"></param> public SampleConverter(AudioFrame dstFrame) { ffmpeg.av_frame_make_writable(dstFrame).ThrowIfError(); DstFormat = (AVSampleFormat)dstFrame.AVFrame.format; DstChannels = dstFrame.AVFrame.channels; DstChannelLayout = dstFrame.AVFrame.channel_layout; if (DstChannelLayout == 0) { DstChannelLayout = FFmpegHelper.GetChannelLayout(DstChannels); } DstNbSamples = dstFrame.AVFrame.nb_samples; DstSampleRate = dstFrame.AVFrame.sample_rate; base.dstFrame = dstFrame; AudioFifo = new AudioFifo(DstFormat, DstChannels); }
/// <summary> /// convert <see cref="PixelFormat.Format32bppArgb"/> or <see cref="PixelFormat.Format24bppRgb"/> bitmap to videoframe /// </summary> /// <param name="bitmap"></param> /// <returns></returns> public static VideoFrame ToVideoFrame(this Bitmap bitmap) { if (bitmap.PixelFormat != PixelFormat.Format24bppRgb && bitmap.PixelFormat != PixelFormat.Format32bppArgb) { throw new FFmpegException(FFmpegException.NotSupportFormat); } int width = bitmap.Width; int height = bitmap.Height; VideoFrame frame = new VideoFrame(width, height, bitmap.PixelFormat == PixelFormat.Format24bppRgb ? AVPixelFormat.AV_PIX_FMT_BGR24 : AVPixelFormat.AV_PIX_FMT_BGRA); BitmapData bitmapData = bitmap.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadOnly, bitmap.PixelFormat); var bytewidth = Math.Min(bitmapData.Stride, frame.Linesize[0]); FFmpegHelper.CopyPlane(bitmapData.Scan0, bitmapData.Stride, frame.Data[0], frame.Linesize[0], bytewidth, height); bitmap.UnlockBits(bitmapData); return(frame); }
/// <summary> /// create audio converter by dst codec /// </summary> /// <param name="dstCodec"></param> public SampleConverter(MediaCodec dstCodec) { if (dstCodec.Type != AVMediaType.AVMEDIA_TYPE_AUDIO) { throw new FFmpegException(FFmpegException.CodecTypeError); } DstFormat = dstCodec.AVCodecContext.sample_fmt; DstChannels = dstCodec.AVCodecContext.channels; DstChannelLayout = dstCodec.AVCodecContext.channel_layout; if (DstChannelLayout == 0) { DstChannelLayout = FFmpegHelper.GetChannelLayout(DstChannels); } DstNbSamples = dstCodec.AVCodecContext.frame_size; DstSampleRate = dstCodec.AVCodecContext.sample_rate; dstFrame = new AudioFrame(DstChannels, DstNbSamples, DstFormat, DstSampleRate); AudioFifo = new AudioFifo(DstFormat, DstChannels); }
private void SwrCheckInit(MediaFrame srcFrame) { if (pSwrContext == null && !isDisposing) { AVFrame *src = srcFrame; AVFrame *dst = dstFrame; ulong srcChannelLayout = src->channel_layout; if (srcChannelLayout == 0) { srcChannelLayout = FFmpegHelper.GetChannelLayout(src->channels); } pSwrContext = ffmpeg.swr_alloc_set_opts(null, (long)DstChannelLayout, DstFormat, DstSampleRate == 0 ? src->sample_rate : DstSampleRate, (long)srcChannelLayout, (AVSampleFormat)src->format, src->sample_rate, 0, null); ffmpeg.swr_init(pSwrContext).ThrowIfError(); } }
public static MediaEncoder CreateAudioEncode(AVCodecID audioCodec, int flags, int channels, int sampleRate = 0, long bitRate = 0, AVSampleFormat format = AVSampleFormat.AV_SAMPLE_FMT_NONE) { return(CreateAudioEncode(audioCodec, flags, FFmpegHelper.GetChannelLayout(channels), sampleRate, bitRate, format)); }