public static byte[] SoundFromFileResample(string path, int sampleRate, int channels, ushort sampleFormatSDL, ResampleQuality resampleQuality = ResampleQuality.High) { AVSampleFormat targetFormat2; switch (sampleFormatSDL) { case SDL.AUDIO_S16: targetFormat2 = AVSampleFormat.AV_SAMPLE_FMT_S16; break; case SDL.AUDIO_F32: targetFormat2 = AVSampleFormat.AV_SAMPLE_FMT_FLT; break; case SDL.AUDIO_S32: targetFormat2 = AVSampleFormat.AV_SAMPLE_FMT_S32; break; default: throw new ApplicationException("Could not map SDL audio format to AVSampleFormat: " + sampleFormatSDL.ToString()); } using (FFmpegContext ffContext = FFmpegContext.Read(path)) { ffContext.SelectStream(AVMediaType.AVMEDIA_TYPE_AUDIO); // setup resamplers and other format converters if needed ffContext.ConvertToFormat(targetFormat2, sampleRate, channels, resampleQuality); // FFmpeg only approximates stream durations but is // usually not far from the real duration. byte[] bytes = new byte[ffContext.audioBytesTotal]; // read all data from frames long offset = 0; while (ffContext.ReadFrame()) { long frameSize = ffContext.GetFrameBufferSize(); if (offset + frameSize > bytes.Length) { Array.Resize(ref bytes, (int)(offset + frameSize)); } offset += ffContext.GetFrameData(ref bytes, (int)offset); } return(bytes); } }
public static byte[] SoundFromFile(string path, out int sampleRate, out int channels, out ushort sampleFormatSDL) { using (FFmpegContext ffContext = FFmpegContext.Read(path)) { ffContext.SelectStream(AVMediaType.AVMEDIA_TYPE_AUDIO); sampleRate = ffContext.audioSampleRate; channels = ffContext.audioChannels; AVSampleFormat sampleFormat = ffContext.audioSampleFormat; switch (sampleFormat) { case AVSampleFormat.AV_SAMPLE_FMT_S16: sampleFormatSDL = SDL.AUDIO_S16; break; case AVSampleFormat.AV_SAMPLE_FMT_FLT: sampleFormatSDL = SDL.AUDIO_F32; break; case AVSampleFormat.AV_SAMPLE_FMT_S32: sampleFormatSDL = SDL.AUDIO_S32; break; default: throw new ApplicationException("Could not map AVSampleFormat to SDL audio format: " + sampleFormat.ToString()); } // FFmpeg only approximates stream durations but is // usually not far from the real duration. byte[] bytes = new byte[ffContext.audioBytesTotal]; // read all data from frames long offset = 0; while (ffContext.ReadFrame()) { long frameSize = ffContext.GetFrameBufferSize(); if (offset + frameSize > bytes.Length) { Array.Resize(ref bytes, (int)(offset + frameSize)); } offset += ffContext.GetFrameData(ref bytes, (int)offset); } return(bytes); } }
public void Dispose() { if (ffContext != null) { ffContext.Dispose(); } ffContext = null; if (loadThread != null) { nextFrameEvent.Set(); if (loadThread.IsAlive) { loadThread.Join(); } } }
public void Start() { if (presentedFrames == 0) { nextFrameEvent.Set(); return; } // TODO: seek back to first frame ffContext.Dispose(); ffContext = null; currentTime = 0.0; presentedFrames = 0; decodedFrames = 0; nextFramePts = 0.0; Load(path); nextFrameEvent.Set(); }
public static FFmpegContext Write(string path) { FFmpegContext context = new FFmpegContext(path); return(context); }