コード例 #1
0
        private static Mat AudioFrameToMat(AudioFrame frame)
        {
            int planar      = ffmpeg.av_sample_fmt_is_planar((AVSampleFormat)frame.AVFrame.format);
            int planes      = planar != 0 ? frame.AVFrame.channels : 1;
            int block_align = ffmpeg.av_get_bytes_per_sample((AVSampleFormat)frame.AVFrame.format) * (planar != 0 ? 1 : frame.AVFrame.channels);
            int stride      = frame.AVFrame.nb_samples * block_align;
            int channels    = planar != 0 ? 1 : frame.AVFrame.channels;

            MatType dstType;

            switch ((AVSampleFormat)frame.AVFrame.format)
            {
            case AVSampleFormat.AV_SAMPLE_FMT_U8:
            case AVSampleFormat.AV_SAMPLE_FMT_U8P:
                dstType = MatType.CV_8UC(channels);
                break;

            case AVSampleFormat.AV_SAMPLE_FMT_S16:
            case AVSampleFormat.AV_SAMPLE_FMT_S16P:
                dstType = MatType.CV_16SC(channels);
                break;

            case AVSampleFormat.AV_SAMPLE_FMT_S32:
            case AVSampleFormat.AV_SAMPLE_FMT_S32P:
                dstType = MatType.CV_32SC(channels);
                break;

            case AVSampleFormat.AV_SAMPLE_FMT_FLT:
            case AVSampleFormat.AV_SAMPLE_FMT_FLTP:
                dstType = MatType.CV_32FC(channels);
                break;

            case AVSampleFormat.AV_SAMPLE_FMT_DBL:
            case AVSampleFormat.AV_SAMPLE_FMT_DBLP:
            // opencv not have 64S, use 64F
            case AVSampleFormat.AV_SAMPLE_FMT_S64:
            case AVSampleFormat.AV_SAMPLE_FMT_S64P:
                dstType = MatType.CV_64FC(channels);
                break;

            default:
                throw new FFmpegException(FFmpegException.NotSupportFormat);
            }

            Mat mat = new Mat(planes, frame.AVFrame.nb_samples, dstType);

            for (int i = 0; i < planes; i++)
            {
                FFmpegHelper.CopyMemory(mat.Data + i * stride, frame.Data[i], stride);
            }
            return(mat);
        }
コード例 #2
0
        private static AudioFrame MatToAudioFrame(Mat mat, AVSampleFormat srctFormat, int sampleRate)
        {
            int        channels = mat.NumberOfChannels > 1 ? mat.NumberOfChannels : mat.Height;
            AudioFrame frame    = new AudioFrame(channels, mat.Width, srctFormat, sampleRate);
            bool       isPlanar = ffmpeg.av_sample_fmt_is_planar(srctFormat) > 0;
            int        stride   = mat.Step;

            for (int i = 0; i < (isPlanar ? channels : 1); i++)
            {
                FFmpegHelper.CopyMemory(frame.Data[i], mat.DataPointer + i * stride, stride);
            }
            return(frame);
        }
コード例 #3
0
        private static Mat AudioFrameToMat(AudioFrame frame)
        {
            DepthType dstType;

            switch ((AVSampleFormat)frame.AVFrame.format)
            {
            case AVSampleFormat.AV_SAMPLE_FMT_U8:
            case AVSampleFormat.AV_SAMPLE_FMT_U8P:
                dstType = DepthType.Cv8U;
                break;

            case AVSampleFormat.AV_SAMPLE_FMT_S16:
            case AVSampleFormat.AV_SAMPLE_FMT_S16P:
                dstType = DepthType.Cv16S;
                break;

            case AVSampleFormat.AV_SAMPLE_FMT_S32:
            case AVSampleFormat.AV_SAMPLE_FMT_S32P:
                dstType = DepthType.Cv32S;
                break;

            case AVSampleFormat.AV_SAMPLE_FMT_FLT:
            case AVSampleFormat.AV_SAMPLE_FMT_FLTP:
                dstType = DepthType.Cv32F;
                break;

            case AVSampleFormat.AV_SAMPLE_FMT_DBL:
            case AVSampleFormat.AV_SAMPLE_FMT_DBLP:
            // emgucv not have S64, use 64F
            case AVSampleFormat.AV_SAMPLE_FMT_S64:
            case AVSampleFormat.AV_SAMPLE_FMT_S64P:
                dstType = DepthType.Cv64F;
                break;

            default:
                throw new FFmpegException(FFmpegException.NotSupportFormat);
            }

            int planar      = ffmpeg.av_sample_fmt_is_planar((AVSampleFormat)frame.AVFrame.format);
            int planes      = planar != 0 ? frame.AVFrame.channels : 1;
            int block_align = ffmpeg.av_get_bytes_per_sample((AVSampleFormat)frame.AVFrame.format) * (planar != 0 ? 1 : frame.AVFrame.channels);
            int stride      = frame.AVFrame.nb_samples * block_align;

            Mat mat = new Mat(planes, frame.AVFrame.nb_samples, dstType, (planar != 0 ? 1 : frame.AVFrame.channels));

            for (int i = 0; i < planes; i++)
            {
                FFmpegHelper.CopyMemory(frame.Data[i], mat.DataPointer + i * stride, stride);
            }
            return(mat);
        }