Exemple #1
0
        private void Load(Stream stream)
        {
            if (!stream.CanRead)
            {
                throw new ApplicationException("Unable to read stream");
            }

            ffContext = FFmpegContext.Read(stream);
            ffContext.FindStreamInfo();

            ffContext.SelectStream(AVMediaType.AVMEDIA_TYPE_VIDEO);

            width     = ffContext.imageWidth;
            height    = ffContext.imageHeight;
            frametime = ffContext.videoFrametime;
            length    = ffContext.videoDuration;

            if (width * height <= 0)
            {
                throw new ApplicationException("Invalid video size: " + width.ToString() + "x" + height.ToString());
            }

            // setup resamplers and other format converters if needed
            ffContext.ConvertToFormat(AVPixelFormat.AV_PIX_FMT_BGRA);

            bytes = new byte[width * height * 4];
        }
Exemple #2
0
        public static byte[] SoundFromFileResample(string path, int sampleRate, int channels, ushort sampleFormatSDL, ResampleQuality resampleQuality = ResampleQuality.High)
        {
            AVSampleFormat targetFormat2;

            switch (sampleFormatSDL)
            {
            case SDL.AUDIO_S16:
                targetFormat2 = AVSampleFormat.AV_SAMPLE_FMT_S16;
                break;

            case SDL.AUDIO_F32:
                targetFormat2 = AVSampleFormat.AV_SAMPLE_FMT_FLT;
                break;

            case SDL.AUDIO_S32:
                targetFormat2 = AVSampleFormat.AV_SAMPLE_FMT_S32;
                break;

            default:
                throw new ApplicationException("Could not map SDL audio format to AVSampleFormat: " + sampleFormatSDL.ToString());
            }

            using (FFmpegContext ffContext = FFmpegContext.Read(path))
            {
                ffContext.SelectStream(AVMediaType.AVMEDIA_TYPE_AUDIO);

                // setup resamplers and other format converters if needed
                ffContext.ConvertToFormat(targetFormat2, sampleRate, channels, resampleQuality);

                // FFmpeg only approximates stream durations but is
                // usually not far from the real duration.
                byte[] bytes = new byte[ffContext.audioBytesTotal];

                // read all data from frames
                long offset = 0;
                while (ffContext.ReadFrame())
                {
                    long frameSize = ffContext.GetFrameBufferSize();
                    if (offset + frameSize > bytes.Length)
                    {
                        Array.Resize(ref bytes, (int)(offset + frameSize));
                    }

                    offset += ffContext.GetFrameData(ref bytes, (int)offset);
                }

                return(bytes);
            }
        }
Exemple #3
0
        public static byte[] SoundFromFile(string path, out int sampleRate, out int channels, out ushort sampleFormatSDL)
        {
            using (FFmpegContext ffContext = FFmpegContext.Read(path))
            {
                ffContext.SelectStream(AVMediaType.AVMEDIA_TYPE_AUDIO);

                sampleRate = ffContext.audioSampleRate;
                channels   = ffContext.audioChannels;

                AVSampleFormat sampleFormat = ffContext.audioSampleFormat;
                switch (sampleFormat)
                {
                case AVSampleFormat.AV_SAMPLE_FMT_S16:
                    sampleFormatSDL = SDL.AUDIO_S16;
                    break;

                case AVSampleFormat.AV_SAMPLE_FMT_FLT:
                    sampleFormatSDL = SDL.AUDIO_F32;
                    break;

                case AVSampleFormat.AV_SAMPLE_FMT_S32:
                    sampleFormatSDL = SDL.AUDIO_S32;
                    break;

                default:
                    throw new ApplicationException("Could not map AVSampleFormat to SDL audio format: " + sampleFormat.ToString());
                }

                // FFmpeg only approximates stream durations but is
                // usually not far from the real duration.
                byte[] bytes = new byte[ffContext.audioBytesTotal];

                // read all data from frames
                long offset = 0;
                while (ffContext.ReadFrame())
                {
                    long frameSize = ffContext.GetFrameBufferSize();
                    if (offset + frameSize > bytes.Length)
                    {
                        Array.Resize(ref bytes, (int)(offset + frameSize));
                    }

                    offset += ffContext.GetFrameData(ref bytes, (int)offset);
                }

                return(bytes);
            }
        }