public static MediaType SelectMediaType(Guid audioSubtype, WaveFormat inputFormat, int desiredBitRate) { return(MediaFoundationEncoder.GetOutputMediaTypes(audioSubtype) .Where(mt => mt.SampleRate >= inputFormat.SampleRate && mt.ChannelCount == inputFormat.Channels) .Select(mt => new { MediaType = mt, Delta = Math.Abs(desiredBitRate - mt.AverageBytesPerSecond * 8) }) .OrderBy(mt => mt.Delta) .Select(mt => mt.MediaType) .FirstOrDefault()); }
private void Stop() { if (IsStopped) { return; } IsStopped = true; WaveIn?.Dispose(); WaveIn = null; if (Settings.Format is not(AudioFormat.Wav or AudioFormat.Mp3) || Stream is null) { return; } Stream.Position = 0; Data = Stream.ToArray(); if (Settings.Format is not AudioFormat.Mp3) { return; } var path1 = Path.GetTempFileName(); var path2 = $"{path1}.mp3"; try { File.WriteAllBytes(path1, Data); var mediaTypes = MediaFoundationEncoder .GetOutputMediaTypes(AudioSubtypes.MFAudioFormat_MP3); var mediaType = mediaTypes.First(); using var reader = new MediaFoundationReader(path1); using var encoder = new MediaFoundationEncoder(mediaType); encoder.Encode(path2, reader); Data = File.ReadAllBytes(path2); } finally { foreach (var path in new[] { path1, path2 }) { if (File.Exists(path)) { File.Delete(path); } } } }
private void AppWindow_Load(object sender, EventArgs e) { if (AppPreferences.RecordingDevice == "") { SwitchView(); } if (!MediaFoundationEncoder.GetOutputMediaTypes(AudioSubtypes.MFAudioFormat_MP3).Any()) { MessageBox.Show("The current platform does not support mp3 encoding.\nApplication will close."); Application.Exit(); } }
public static void DevDbgPoc__________(string wav) { try { var waveFormat = getWaveFormat(wav); var i = 0; foreach (var mt in MediaFoundationEncoder.GetOutputMediaTypes(AudioSubtypes.MFAudioFormat_MP3).Where(mt => mt != null && mt.SampleRate == waveFormat.SampleRate && mt.ChannelCount == waveFormat.Channels)) { encode(wav, $@"{wav} - {++i} - {ShortDescription(mt)}.mp3", mt); } } catch (Exception e) { Debug.WriteLine($@"Not a supported input file ({e.Message})"); } }
private void TryGetSupportedMediaTypes() { var list = MediaFoundationEncoder.GetOutputMediaTypes(SelectedOutputFormat.Guid) .Select(mf => new MediaTypeViewModel(mf)) .ToList(); if (list.Count == 0) { list.Add(new MediaTypeViewModel() { Name = "Not Supported", Description = "No encoder found for this output type" }); } allMediaTypes[SelectedOutputFormat.Guid] = list; }
/// <summary> /// Creates an MP3-file from the downloaded video file /// </summary> /// <param name="bytesPerSecond">Audio bitrate in bytes per second</param> /// <param name="input"></param> /// <param name="output"></param> private void PerformConversion(int bytesPerSecond, string input, string output) { var allMediaTypes = new Dictionary <Guid, List <MediaType> >(); var list = MediaFoundationEncoder.GetOutputMediaTypes(AudioSubtypes.MFAudioFormat_MP3).ToList(); allMediaTypes[AudioSubtypes.MFAudioFormat_MP3] = list; // Keep audio properties from the original video file var supportedMediaTypes = allMediaTypes[AudioSubtypes.MFAudioFormat_MP3] .Where(m => m != null) .Where(m => m.SampleRate == inputWaveFormat.SampleRate) .Where(m => m.ChannelCount == inputWaveFormat.Channels) .ToList(); var mediaType = supportedMediaTypes.FirstOrDefault(m => m.AverageBytesPerSecond == bytesPerSecond) ?? supportedMediaTypes.FirstOrDefault(); if (mediaType != null) { using (var reader = new MediaFoundationReader(input)) { using (var encoder = new MediaFoundationEncoder(mediaType)) { encoder.Encode(output, reader); } } } // Cleanup before throwing cancellation if (tokenSource.IsCancellationRequested) { File.Delete(output); File.Delete(input); tokenSource.Token.ThrowIfCancellationRequested(); } UpdateWorkStatus(WorkStatus.Finished); }
public static void ConvertWavToMp3(string wav, string mp3 = null) { try { if (mp3 == null) { mp3 = $"{wav}.mp3"; } var wavFrmt = getWaveFormat(wav); var mt1 = MediaFoundationEncoder.GetOutputMediaTypes(AudioSubtypes.MFAudioFormat_MP3).FirstOrDefault(m => m != null && m.SampleRate == wavFrmt.SampleRate && m.ChannelCount == wavFrmt.Channels); if (mt1 != null) { encode(wav, mp3, mt1); } } catch (Exception ex) { System.Diagnostics.Trace.WriteLine(ex.Message, System.Reflection.MethodInfo.GetCurrentMethod().Name); if (System.Diagnostics.Debugger.IsAttached) { System.Diagnostics.Debugger.Break(); } throw; } }
private void FilterFile(string inputFilePath, string outputFilePath) { // nothing to do if the output file already exists and is newer than both the input file and Cross Time DSP's app.config file if (File.Exists(outputFilePath)) { DateTime inputLastWriteTimeUtc = File.GetLastWriteTimeUtc(inputFilePath); DateTime outputLastWriteTimeUtc = File.GetLastWriteTimeUtc(outputFilePath); if ((outputLastWriteTimeUtc > inputLastWriteTimeUtc) && (outputLastWriteTimeUtc > this.Configuration.LastWriteTimeUtc)) { this.log.ReportVerbose("'{0}': skipped as '{1}' is newer.", Path.GetFileName(inputFilePath), Path.GetFileName(outputFilePath)); return; } } // get input DateTime processingStartedUtc = DateTime.UtcNow; MediaFoundationReader inputStream = new MediaFoundationReader(inputFilePath); if (Constant.SampleBlockSizeInBytes % inputStream.WaveFormat.BlockAlign != 0) { this.log.ReportError("'{0}': cannot be processed as sample block size of {0} bytes is not an exact multiple of the input block alignment of {1} bytes.", Path.GetFileName(inputFilePath), Constant.SampleBlockSizeInBytes, inputStream.WaveFormat.BlockAlign); return; } // ensure output directory exists so that output file write succeeds string outputDirectoryPath = Path.GetDirectoryName(outputFilePath); if (String.IsNullOrEmpty(outputDirectoryPath) == false && Directory.Exists(outputDirectoryPath) == false) { Directory.CreateDirectory(outputDirectoryPath); } StreamPerformance streamMetrics; using (WaveStream outputStream = this.FilterStream(inputStream, out streamMetrics)) { // do the filtering if (this.Stopping) { // if the stop flag was set during filtering outputStream will be null return; } // write output file MediaType outputMediaType; if (this.Configuration.Output.Encoding == Encoding.Wave) { // work around NAudio bug: MediaFoundationEncoder supports Wave files but GetOutputMediaTypes() fails on Wave outputMediaType = new MediaType(outputStream.WaveFormat); } else { List <MediaType> outputMediaTypes = MediaFoundationEncoder.GetOutputMediaTypes(Constant.EncodingGuids[this.Configuration.Output.Encoding]).Where(mediaType => mediaType.BitsPerSample == outputStream.WaveFormat.BitsPerSample && mediaType.ChannelCount == outputStream.WaveFormat.Channels && mediaType.SampleRate == outputStream.WaveFormat.SampleRate).ToList(); if ((outputMediaTypes == null) || (outputMediaTypes.Count < 1)) { this.log.ReportError("'{0}': no media type found for {1} bits per sample, {2} channels, at {3} kHz.", Path.GetFileName(inputFilePath), outputStream.WaveFormat.BitsPerSample, outputStream.WaveFormat.Channels, outputStream.WaveFormat.SampleRate); return; } outputMediaType = outputMediaTypes[0]; } MediaFoundationEncoder outputEncoder = new MediaFoundationEncoder(outputMediaType); streamMetrics.EncodingStartedUtc = DateTime.UtcNow; outputEncoder.Encode(outputFilePath, outputStream); streamMetrics.EncodingStoppedUtc = DateTime.UtcNow; } // copy metadata Tag inputMetadata; using (FileStream inputMetadataStream = new FileStream(inputFilePath, FileMode.Open, FileAccess.Read, FileShare.Read)) { using (TagFile inputTagFile = TagFile.Create(new StreamFileAbstraction(inputMetadataStream.Name, inputMetadataStream, inputMetadataStream))) { inputMetadata = inputTagFile.Tag; } } using (FileStream outputMetadataStream = new FileStream(outputFilePath, FileMode.Open, FileAccess.ReadWrite, FileShare.Read)) { using (TagFile outputTagFile = TagFile.Create(new StreamFileAbstraction(outputMetadataStream.Name, outputMetadataStream, outputMetadataStream))) { if (this.TryApplyMetadata(inputMetadata, outputTagFile.Tag)) { outputTagFile.Save(); } } } DateTime processingStoppedUtc = DateTime.UtcNow; TimeSpan encodingTime = streamMetrics.EncodingStoppedUtc - streamMetrics.EncodingStartedUtc; TimeSpan processingTime = processingStoppedUtc - processingStartedUtc; if (streamMetrics.HasReverseTime) { TimeSpan reverseBufferTime = streamMetrics.ReverseBufferCompleteUtc - streamMetrics.StartTimeUtc; TimeSpan reverseProcessingTime = streamMetrics.ReverseTimeCompleteUtc - streamMetrics.ReverseBufferCompleteUtc; if (streamMetrics.HasForwardTime) { TimeSpan forwardProcessingTime = streamMetrics.CompleteTimeUtc - streamMetrics.ReverseTimeCompleteUtc; this.log.ReportVerbose("'{0}' to '{1}' in {2} (buffer {3}, reverse {4}, forward {5}, encode {6}).", Path.GetFileName(inputFilePath), Path.GetFileName(outputFilePath), processingTime.ToString(Constant.ElapsedTimeFormat), reverseBufferTime.ToString(Constant.ElapsedTimeFormat), reverseProcessingTime.ToString(Constant.ElapsedTimeFormat), forwardProcessingTime.ToString(Constant.ElapsedTimeFormat), encodingTime.ToString(Constant.ElapsedTimeFormat)); } else { this.log.ReportVerbose("'{0}' to '{1}' in {2} (buffer {3}, reverse {4}, encode {5}).", Path.GetFileName(inputFilePath), Path.GetFileName(outputFilePath), processingTime.ToString(Constant.ElapsedTimeFormat), reverseBufferTime.ToString(Constant.ElapsedTimeFormat), reverseProcessingTime.ToString(Constant.ElapsedTimeFormat), encodingTime.ToString(Constant.ElapsedTimeFormat)); } } else { TimeSpan filteringTime = streamMetrics.CompleteTimeUtc - streamMetrics.StartTimeUtc; this.log.ReportVerbose("'{0}' to '{1}' in {2} (load+filter {3}, encode {4}).", Path.GetFileName(inputFilePath), Path.GetFileName(outputFilePath), processingTime.ToString(Constant.ElapsedTimeFormat), filteringTime.ToString(Constant.ElapsedTimeFormat), encodingTime.ToString(Constant.ElapsedTimeFormat)); } }