public void ConvertFromWave(string waveFileName, string mp3FileName) { var mediaTypes = MediaFoundationEncoder.GetEncoderMediaTypes(AudioSubTypes.MpegLayer3); if (!mediaTypes.Any()) { ColorConsole.WriteLine("The current platform does not support mp3 encoding.", ConsoleColor.DarkRed); return; } IWaveSource source; try { source = CodecFactory.Instance.GetCodec(waveFileName); if (mediaTypes.All(x => x.SampleRate != source.WaveFormat.SampleRate && x.Channels == source.WaveFormat.Channels)) { int sampleRate = mediaTypes.OrderBy(x => Math.Abs(source.WaveFormat.SampleRate - x.SampleRate)).First(x => x.Channels == source.WaveFormat.Channels).SampleRate; ColorConsole.WriteLine("Samplerate {0} -> {1}", ConsoleColor.DarkRed, source.WaveFormat.SampleRate, sampleRate); ColorConsole.WriteLine("Channels {0} -> {1}", ConsoleColor.DarkRed, source.WaveFormat.Channels, 2); source = source.ChangeSampleRate(sampleRate); } } catch (Exception) { ColorConsole.WriteLine("Mp3 Format Not Supported", ConsoleColor.DarkRed); return; } using (source) { using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(source.WaveFormat, mp3FileName)) { byte[] buffer = new byte[source.WaveFormat.BytesPerSecond]; int read; while ((read = source.Read(buffer, 0, buffer.Length)) > 0) { encoder.Write(buffer, 0, read); ColorConsole.WriteLine("Wav -> Mp3 Progress: {0:P}/{1:P}", ConsoleColor.DarkGreen, (double)source.Position / source.Length, 1); } } } }
/// <summary> /// Method Converts IWaveSource .Mp4 to .Mp3 with 192kbs Sample Rate and Saves it to Path Using the videoTitle /// </summary> /// <param name="source">Takes a IWaveSource videoSource = CSCore.Codecs.CodecFactory.Instance.GetCodec(new Uri(video.Uri))</param> /// <param name="videoTitle"> Takes the Video Title</param> /// <returns></returns> private static bool ConvertToMp3(IWaveSource source, string videoTitle) { var supportedFormats = MediaFoundationEncoder.GetEncoderMediaTypes(AudioSubTypes.MpegLayer3); if (!supportedFormats.Any()) { Console.WriteLine("The current platform does not support mp3 encoding."); return(true); } if (supportedFormats.All( x => x.SampleRate != source.WaveFormat.SampleRate && x.Channels == source.WaveFormat.Channels)) { int sampleRate = supportedFormats.OrderBy(x => Math.Abs(source.WaveFormat.SampleRate - x.SampleRate)) .First(x => x.Channels == source.WaveFormat.Channels) .SampleRate; Console.WriteLine("Samplerate {0} -> {1}", source.WaveFormat.SampleRate, sampleRate); Console.WriteLine("Channels {0} -> {1}", source.WaveFormat.Channels, 2); source = source.ChangeSampleRate(sampleRate); } using (source) { using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(source.WaveFormat, PATH + videoTitle + ".mp3")) { byte[] buffer = new byte[source.WaveFormat.BytesPerSecond]; int read; while ((read = source.Read(buffer, 0, buffer.Length)) > 0) { encoder.Write(buffer, 0, read); //Console.CursorLeft = 0; //Console.Write("{0:P}/{1:P}", (double)source.Position / source.Length, 1); } } } File.Delete(PATH + videoTitle + ".mp4"); return(false); }
public byte[] FromWav(byte[] audioFile) { var supportedFormats = MediaFoundationEncoder.GetEncoderMediaTypes(AudioSubTypes.MPEG_HEAAC); _logger.Verbose("Checking for support of AAC encoding."); if (!supportedFormats.Any()) { _logger.Verbose("The current platform does not support AAC encoding."); throw new ApplicationException("Current platform does not support AAC encoding."); } MemoryStream inStream = null; MemoryStream outStream = null; IWaveSource source = null; bool sourceDisposed = false; try { _logger.Verbose("Creating input stream and decoder."); inStream = new MemoryStream(audioFile); source = new CSCore.MediaFoundation.MediaFoundationDecoder(inStream); //in case the encoder does not support the input sample rate -> convert it to any supported samplerate //choose the best sample rate _logger.Verbose("Searching for the optimal sample rate."); _logger.Verbose($"Input wave format: {source.WaveFormat.ToString()}"); int sampleRate = supportedFormats.OrderBy(x => Math.Abs(source.WaveFormat.SampleRate - x.SampleRate)) .First(x => x.Channels == source.WaveFormat.Channels) .SampleRate; if (source.WaveFormat.SampleRate != sampleRate) { _logger.Verbose($"Changing sample rate of the source: {source.WaveFormat.SampleRate} -> {sampleRate}."); source = source.ChangeSampleRate(sampleRate); } _logger.Verbose("Encoding WAV to AAC"); outStream = new MemoryStream(); using (source) { using (var encoder = MediaFoundationEncoder.CreateAACEncoder(source.WaveFormat, outStream)) { byte[] buffer = new byte[source.WaveFormat.BytesPerSecond]; int read; while ((read = source.Read(buffer, 0, buffer.Length)) > 0) { encoder.Write(buffer, 0, read); } } sourceDisposed = true; } _logger.Verbose("Encoding is complete"); return(outStream.ToArray()); } finally { _logger.Verbose("Cleaning up resources"); if (inStream != null) { inStream.Dispose(); } if (source != null && !sourceDisposed) { source.Dispose(); } if (outStream != null) { inStream.Dispose(); } } }
static void Main(string[] args) { args = new string[] { "C:\\fam.wav" }; if (args.Length < 1 || !File.Exists(args[0]) || // ReSharper disable once PossibleNullReferenceException !Path.GetExtension(args[0]).Equals(".wav", StringComparison.InvariantCultureIgnoreCase)) { Console.WriteLine("Invalid input."); return; } var supportedFormats = MediaFoundationEncoder.GetEncoderMediaTypes(AudioSubTypes.MpegLayer3); if (!supportedFormats.Any()) { Console.WriteLine("The current platform does not support mp3 encoding."); return; } IWaveSource source; try { Console.WriteLine("getting source"); source = CodecFactory.Instance.GetCodec(args[0]); Console.WriteLine("Source retreived"); Console.WriteLine(source.ToString()); if ( supportedFormats.All( x => x.SampleRate != source.WaveFormat.SampleRate && x.Channels == source.WaveFormat.Channels)) { //the encoder does not support the input sample rate -> convert it to any supported samplerate //choose the best sample rate with stereo (in order to make simple, we always use stereo in this sample) int sampleRate = supportedFormats.OrderBy(x => Math.Abs(source.WaveFormat.SampleRate - x.SampleRate)) .First(x => x.Channels == source.WaveFormat.Channels) .SampleRate; Console.WriteLine("Samplerate {0} -> {1}", source.WaveFormat.SampleRate, sampleRate); Console.WriteLine("Channels {0} -> {1}", source.WaveFormat.Channels, 2); source = source.ChangeSampleRate(sampleRate); } } catch (Exception) { Console.WriteLine("Format not supported."); return; } //use memstream to write to instead of file var memStream = new MemoryStream(); using (source) { using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(source.WaveFormat, memStream)) { byte[] buffer = new byte[source.WaveFormat.BytesPerSecond]; int read; while ((read = source.Read(buffer, 0, buffer.Length)) > 0) { encoder.Write(buffer, 0, read); Console.CursorLeft = 0; Console.Write("{0:P}/{1:P}", (double)source.Position / source.Length, 1); } //write memstream contents to file var fileStream = File.Open("d:\\output.mp3", FileMode.Create); memStream.Seek(0, SeekOrigin.Begin); memStream.CopyTo(fileStream); fileStream.Close(); memStream.Close(); } } Console.WriteLine("done"); while (true) { } }
static void Main(string[] args) { if (args.Length < 1 || !File.Exists(args[0]) || // ReSharper disable once PossibleNullReferenceException !Path.GetExtension(args[0]).Equals(".wav", StringComparison.InvariantCultureIgnoreCase)) { Console.WriteLine("Invalid input."); return; } var supportedFormats = MediaFoundationEncoder.GetEncoderMediaTypes(AudioSubTypes.MpegLayer3); if (!supportedFormats.Any()) { Console.WriteLine("The current platform does not support mp3 encoding."); return; } IWaveSource source; try { source = CodecFactory.Instance.GetCodec(args[0]); if ( supportedFormats.All( x => x.SampleRate != source.WaveFormat.SampleRate && x.Channels == source.WaveFormat.Channels)) { //the encoder does not support the input sample rate -> convert it to any supported samplerate //choose the best sample rate with stereo (in order to make simple, we always use stereo in this sample) int sampleRate = supportedFormats.OrderBy(x => Math.Abs(source.WaveFormat.SampleRate - x.SampleRate)) .First(x => x.Channels == source.WaveFormat.Channels) .SampleRate; Console.WriteLine("Samplerate {0} -> {1}", source.WaveFormat.SampleRate, sampleRate); Console.WriteLine("Channels {0} -> {1}", source.WaveFormat.Channels, 2); source = source.ChangeSampleRate(sampleRate); } } catch (Exception) { Console.WriteLine("Format not supported."); return; } using (source) { using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(source.WaveFormat, "output.mp3")) { byte[] buffer = new byte[source.WaveFormat.BytesPerSecond]; int read; while ((read = source.Read(buffer, 0, buffer.Length)) > 0) { encoder.Write(buffer, 0, read); Console.CursorLeft = 0; Console.Write("{0:P}/{1:P}", (double)source.Position / source.Length, 1); } } } }
private bool ConvertWavToMp3(string wavFilePath, string mp3FileName) { if (!File.Exists(wavFilePath)) { Log.Error($"Unable to find wav file {wavFilePath}"); return(false); } var supportedFormats = MediaFoundationEncoder.GetEncoderMediaTypes(AudioSubTypes.MpegLayer3); if (!supportedFormats.Any()) { Log.Error("The current platform does not support mp3 encoding."); return(false); } IWaveSource source; try { source = CodecFactory.Instance.GetCodec(wavFilePath); if ( supportedFormats.All( x => x.SampleRate != source.WaveFormat.SampleRate && x.Channels == source.WaveFormat.Channels)) { //the encoder does not support the input sample rate -> convert it to any supported samplerate //choose the best sample rate with stereo (in order to make simple, we always use stereo in this sample) int sampleRate = supportedFormats.OrderBy(x => Math.Abs(source.WaveFormat.SampleRate - x.SampleRate)) .First(x => x.Channels == source.WaveFormat.Channels) .SampleRate; Log.Info($"Samplerate {source.WaveFormat.SampleRate} -> {sampleRate}"); Log.Info($"Channels {source.WaveFormat.Channels} -> {2}"); source = source.ChangeSampleRate(sampleRate); } } catch (Exception ex) { Log.Error("Format not supported.", ex); return(false); } using (source) { using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(source.WaveFormat, mp3FileName)) { Log.Info($"\nWriting {mp3FileName}. . ."); byte[] buffer = new byte[source.WaveFormat.BytesPerSecond]; int read; double logMessageThreshold = .10; double logMessageIncrement = .10; while ((read = source.Read(buffer, 0, buffer.Length)) > 0) { encoder.Write(buffer, 0, read); Console.CursorLeft = 0; var writePercentage = (double)source.Position / source.Length; if (writePercentage >= logMessageThreshold) { string writePercentageLogMessage = string.Format("{0:P}/{1:P}", writePercentage, 1); Log.Info(writePercentageLogMessage); logMessageThreshold += logMessageIncrement; } } } } return(true); }