Beispiel #1
0
        protected override void OnOpen()
        {
            _validated = Context.QueryString["token"] == _token;
            if (!_validated)
            {
                Context.WebSocket.Close(4000);
            }
            _capture = new WasapiLoopbackCapture(0, new WaveFormat());
            _capture.Initialize();
            _capture.Start();
            var wsStream = new WebSocketStream(this);

            Console.WriteLine($"Captured audio format: {_capture.WaveFormat}");
            IWriteable encoder = null;

            switch (_format)
            {
            case AudioFormat.AAC:
                encoder = new AacEncoder(_capture.WaveFormat, wsStream, 128000,
                                         TranscodeContainerTypes.MFTranscodeContainerType_ADTS);
                break;

            case AudioFormat.MP3:
                encoder = MediaFoundationEncoder.CreateMP3Encoder(_capture.WaveFormat, wsStream, 320000);
                break;
            }

            _capture.DataAvailable += (sender, e) => { encoder?.Write(e.Data, e.Offset, e.ByteCount); };
        }
Beispiel #2
0
        /// <summary>
        /// Create a new file based on the given filename and start recording to it.
        /// Filename must include its full path.
        /// </summary>
        /// <param name="fileName">The name of a file to be created. Include its full path</param>
        /// <param name="codec">The codec to record in</param>
        /// <param name="bitRate">The bitrate of the file</param>
        /// <param name="channels">The channels to record</param>
        public void StartCapture(string fileName, AvailableCodecs codec, int bitRate, Channels channels)
        {
            if (!ReadyToRecord())
            {
                throw new NullReferenceException("There is no SoundInSource configured for the recorder.");
            }

            fileName = $"{fileName}.{codec.ToString().ToLower()}";

            WaveFormat waveSource;

            switch (channels)
            {
            case Channels.Mono:
                waveSource = _soundInSource.ToMono().WaveFormat;
                break;

            case Channels.Stereo:
                waveSource = _soundInSource.ToStereo().WaveFormat;
                break;

            default:
                throw new ArgumentException("The selected channel option could not be found.");
            }

            switch (codec)
            {
            case AvailableCodecs.MP3:
                _writer = MediaFoundationEncoder.CreateMP3Encoder(waveSource, fileName, bitRate);
                break;

            case AvailableCodecs.AAC:
                _writer = MediaFoundationEncoder.CreateAACEncoder(waveSource, fileName, bitRate);
                break;

            case AvailableCodecs.WMA:
                _writer = MediaFoundationEncoder.CreateWMAEncoder(waveSource, fileName, bitRate);
                break;

            case AvailableCodecs.WAV:
                _writer = new WaveWriter(fileName, waveSource);
                break;

            default:
                throw new ArgumentException("The specified codec was not found.");
            }

            byte[] buffer = new byte[waveSource.BytesPerSecond];

            _soundInSource.DataAvailable += (s, e) =>
            {
                int read = _waveStream.Read(buffer, 0, buffer.Length);
                _writer.Write(buffer, 0, read);
            };

            // Start recording
            _soundInSource.SoundIn.Start();
            _state = RecordingState.Recording;
        }
Beispiel #3
0
        public HttpResponseMessage MakeCoolTrack([FromForm] KaraokeElems value)
        {
            string name = value.TrackName;
            // Getting Image
            var image = value.VoiceFile;

            try
            {
                var voiceFile = "Assets/Yours/" + image.FileName.Substring(0, image.FileName.Length - 4) +
                                "-your-voice.mp3";
                var instFile = "Assets/Nirvana Smells Like Teen Spirit Inst.mp3";

                var trackFile = "Assets/Yours/" + image.FileName.Substring(0, image.FileName.Length - 4) +
                                "-your-track.mp3";

                // Saving Image on Server
                if (image.Length > 0)
                {
                    using (var fileStream = new FileStream(voiceFile, FileMode.Create))
                    {
                        image.CopyTo(fileStream);
                    }
                }

                var voiceWaveSource = CodecFactory.Instance.GetCodec(voiceFile);
                var instWaveSource  = CodecFactory.Instance.GetCodec(instFile);

                using (var cAurio = new AudioControler())
                {
                    VolumeSource vol1, vol2;
                    var          yourTrack = cAurio.MixAudioAndVoice(instWaveSource, out vol1, voiceWaveSource, out vol2);

                    vol1.Volume = 0.5f;
                    vol2.Volume = 0.5f;

                    using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(yourTrack.WaveFormat, trackFile))
                    {
                        byte[] buffer = new byte[yourTrack.WaveFormat.BytesPerSecond];
                        int    read;
                        while ((read = yourTrack.Read(buffer, 0, buffer.Length)) > 0)
                        {
                            encoder.Write(buffer, 0, read);

                            //Console.CursorLeft = 0;
                            //Console.Write("{0:P}/{1:P}", (double) yourTrack.Position / yourTrack.Length, 1);
                        }
                    }
                }
            }
            catch (Exception e)
            {
                throw new HttpResponseException(HttpStatusCode.InternalServerError);
            }

            return(new HttpResponseMessage(HttpStatusCode.OK));
        }
 public static void ToAudioAgain(string base64Chunks, string outputFile, WaveFormat wav)
 {
     using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(wav, outputFile))
     {
         foreach (var element in base64Chunks.GetByteChunks())
         {
             encoder.Write(element, 0, element.Length);
         }
     }
 }
 public void CanEncodeToMP3()
 {
     using (var source = GlobalTestConfig.TestWav2S())
     {
         using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(source.WaveFormat, _targetfilename))
         {
             MediaFoundationEncoder.EncodeWholeSource(encoder, source);
         }
     }
 }
Beispiel #6
0
        async Task ISonosRequestHandler.HandleRequest(HttpListenerContext ctx)
        {
            // stream out the audio content to Sonos
            ctx.Response.ContentType = "audio/mp3";
            await ctx.Response.OutputStream.WriteAsync(new byte[1] {
                0
            }, 0, 1);

            var cancellationToken = new CancellationTokenSource();
            var memoryOffset      = 0;

            using (var memoryStream = new MemoryStream())
            {
                using (var audioEncoder = MediaFoundationEncoder.CreateMP3Encoder(_audioListener.WaveFormat, memoryStream))
                {
                    async void OnAudioAvailable(object sender, DataAvailableEventArgs e)
                    {
                        audioEncoder.Write(e.Data, e.Offset, e.ByteCount);
                        try
                        {
                            if (memoryStream.TryGetBuffer(out ArraySegment <byte> buffer))
                            {
                                var newOffset = (int)memoryStream.Position;
                                var length    = (int)newOffset - memoryOffset;

                                await ctx.Response.OutputStream.WriteAsync(buffer.Array, (int)memoryOffset, length);

                                memoryOffset = newOffset;
                            }

                            await ctx.Response.OutputStream.FlushAsync();
                        }
                        catch
                        {
                            cancellationToken.Cancel();
                        }
                    }

                    try
                    {
                        _audioListener.AudioAvailable += OnAudioAvailable;
                        while (!cancellationToken.IsCancellationRequested)
                        {
                            await Task.Delay(1000);
                        }
                    }
                    finally
                    {
                        _audioListener.AudioAvailable -= OnAudioAvailable;
                    }
                }
            }
        }
        protected override void OnOpen()
        {
            _validated = Context.QueryString["token"] == _token;
            var capture = new WasapiLoopbackCapture();

            capture.Initialize();
            capture.Start();
            var wsStream = new WebSocketStream(this);
            var encoder  = MediaFoundationEncoder.CreateMP3Encoder(capture.WaveFormat, wsStream);

            capture.DataAvailable += (sender, e) => encoder.Write(e.Data, e.Offset, e.ByteCount);
        }
Beispiel #8
0
        public static void StartRecording(String fileName, int bitRate = 192000)
        {
            capture = new WasapiLoopbackCapture();

            capture.Initialize();

            wasapiCaptureSource = new SoundInSource(capture);
            stereoSource        = wasapiCaptureSource.ToStereo();

            switch (System.IO.Path.GetExtension(fileName))
            {
            case ".mp3":
                encoderWriter = MediaFoundationEncoder.CreateMP3Encoder(stereoSource.WaveFormat, fileName, bitRate);
                writerType    = WriterType.EncoderWriter;
                break;

            case ".wma":
                encoderWriter = MediaFoundationEncoder.CreateWMAEncoder(stereoSource.WaveFormat, fileName, bitRate);
                writerType    = WriterType.EncoderWriter;
                break;

            case ".aac":
                encoderWriter = MediaFoundationEncoder.CreateAACEncoder(stereoSource.WaveFormat, fileName, bitRate);
                writerType    = WriterType.EncoderWriter;
                break;

            case ".wav":
                waveWriter = new WaveWriter(fileName, capture.WaveFormat);
                writerType = WriterType.WaveWriter;
                break;
            }

            switch (writerType)
            {
            case WriterType.EncoderWriter:
                capture.DataAvailable += (s, e) =>
                {
                    encoderWriter.Write(e.Data, e.Offset, e.ByteCount);
                };
                break;

            case WriterType.WaveWriter:
                capture.DataAvailable += (s, e) =>
                {
                    waveWriter.Write(e.Data, e.Offset, e.ByteCount);
                };
                break;
            }

            // Start recording
            capture.Start();
        }
        public void CanEncodeToMP3()
        {
            string targetfilename = Path.ChangeExtension(testfile, "test.mp3");

            if (File.Exists(targetfilename))
            {
                File.Delete(targetfilename);
            }

            using (var source = Codecs.CodecFactory.Instance.GetCodec(testfile))
            {
                using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(source.WaveFormat, targetfilename))
                {
                    MediaFoundationEncoder.EncodeWholeSource(encoder, source);
                    //Thread.Sleep(5000);
                }
            }
        }
        public void ConvertFromWave(string waveFileName, string mp3FileName)
        {
            var mediaTypes = MediaFoundationEncoder.GetEncoderMediaTypes(AudioSubTypes.MpegLayer3);

            if (!mediaTypes.Any())
            {
                ColorConsole.WriteLine("The current platform does not support mp3 encoding.", ConsoleColor.DarkRed);
                return;
            }

            IWaveSource source;

            try
            {
                source = CodecFactory.Instance.GetCodec(waveFileName);

                if (mediaTypes.All(x => x.SampleRate != source.WaveFormat.SampleRate && x.Channels == source.WaveFormat.Channels))
                {
                    int sampleRate = mediaTypes.OrderBy(x => Math.Abs(source.WaveFormat.SampleRate - x.SampleRate)).First(x => x.Channels == source.WaveFormat.Channels).SampleRate;
                    ColorConsole.WriteLine("Samplerate {0} -> {1}", ConsoleColor.DarkRed, source.WaveFormat.SampleRate, sampleRate);
                    ColorConsole.WriteLine("Channels {0} -> {1}", ConsoleColor.DarkRed, source.WaveFormat.Channels, 2);
                    source = source.ChangeSampleRate(sampleRate);
                }
            }
            catch (Exception)
            {
                ColorConsole.WriteLine("Mp3 Format Not Supported", ConsoleColor.DarkRed);
                return;
            }

            using (source)
            {
                using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(source.WaveFormat, mp3FileName))
                {
                    byte[] buffer = new byte[source.WaveFormat.BytesPerSecond];
                    int    read;
                    while ((read = source.Read(buffer, 0, buffer.Length)) > 0)
                    {
                        encoder.Write(buffer, 0, read);
                        ColorConsole.WriteLine("Wav -> Mp3 Progress: {0:P}/{1:P}", ConsoleColor.DarkGreen, (double)source.Position / source.Length, 1);
                    }
                }
            }
        }
Beispiel #11
0
        private void StartRecordBtn_Clicked(object sender, RoutedEventArgs ev)
        {
            capture = new WasapiLoopbackCapture();
            capture.Initialize();
            DateTime date     = DateTime.Now;
            string   filename = string.Format(@"F:\Share\DailyMeetingRecord\{0:yyyy_MM_dd}.mp3", date);

            //create a wavewriter to write the data to
            writer = MediaFoundationEncoder.CreateMP3Encoder(capture.WaveFormat, filename);
            byte[] buffer = new byte[capture.WaveFormat.BytesPerSecond];
            //setup an eventhandler to receive the recorded data
            capture.DataAvailable += (s, e) =>
            {
                //save the recorded audio
                writer.Write(e.Data, e.Offset, e.ByteCount);
            };
            //start recording
            capture.Start();
            this.StartRecordBtn.IsEnabled = false;
            this.StopRecordBtn.IsEnabled  = true;
        }
        /// <summary>
        /// Method Converts IWaveSource .Mp4 to .Mp3 with 192kbs Sample Rate and Saves it to Path Using the videoTitle
        /// </summary>
        /// <param name="source">Takes a IWaveSource videoSource = CSCore.Codecs.CodecFactory.Instance.GetCodec(new Uri(video.Uri))</param>
        /// <param name="videoTitle"> Takes the Video Title</param>
        /// <returns></returns>
        private static bool ConvertToMp3(IWaveSource source, string videoTitle)
        {
            var supportedFormats = MediaFoundationEncoder.GetEncoderMediaTypes(AudioSubTypes.MpegLayer3);

            if (!supportedFormats.Any())
            {
                Console.WriteLine("The current platform does not support mp3 encoding.");
                return(true);
            }

            if (supportedFormats.All(
                    x => x.SampleRate != source.WaveFormat.SampleRate && x.Channels == source.WaveFormat.Channels))
            {
                int sampleRate =
                    supportedFormats.OrderBy(x => Math.Abs(source.WaveFormat.SampleRate - x.SampleRate))
                    .First(x => x.Channels == source.WaveFormat.Channels)
                    .SampleRate;

                Console.WriteLine("Samplerate {0} -> {1}", source.WaveFormat.SampleRate, sampleRate);
                Console.WriteLine("Channels {0} -> {1}", source.WaveFormat.Channels, 2);
                source = source.ChangeSampleRate(sampleRate);
            }
            using (source)
            {
                using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(source.WaveFormat, PATH + videoTitle + ".mp3"))
                {
                    byte[] buffer = new byte[source.WaveFormat.BytesPerSecond];
                    int    read;
                    while ((read = source.Read(buffer, 0, buffer.Length)) > 0)
                    {
                        encoder.Write(buffer, 0, read);

                        //Console.CursorLeft = 0;
                        //Console.Write("{0:P}/{1:P}", (double)source.Position / source.Length, 1);
                    }
                }
            }
            File.Delete(PATH + videoTitle + ".mp4");
            return(false);
        }
Beispiel #13
0
        public void startRecording(MMDevice micDevice, MMDevice speakDevice)
        {
            isRecording = true;
            window.LockUI();
            playSilence();
            makeFileNames();

            micCapture        = new WasapiCapture();
            micCapture.Device = micDevice;
            micCapture.Initialize();

            speakCapture        = new WasapiLoopbackCapture();
            speakCapture.Device = speakDevice;
            speakCapture.Initialize();

            micSource = new SoundInSource(micCapture);

            micWriter = MediaFoundationEncoder.CreateMP3Encoder(micSource.WaveFormat, micFileName);
            byte[] micBuffer = new byte[micSource.WaveFormat.BytesPerSecond];
            micSource.DataAvailable += (s, e) =>
            {
                int read = micSource.Read(micBuffer, 0, micBuffer.Length);
                micWriter.Write(micBuffer, 0, read);
            };

            micCapture.Start();

            speakSource = new SoundInSource(speakCapture);
            speakWriter = MediaFoundationEncoder.CreateMP3Encoder(speakSource.WaveFormat, speakFileName);
            byte[] speakBuffer = new byte[speakSource.WaveFormat.BytesPerSecond];
            speakSource.DataAvailable += (s, e) =>
            {
                int read = speakSource.Read(speakBuffer, 0, speakBuffer.Length);
                speakWriter.Write(speakBuffer, 0, read);
            };

            speakCapture.Start();
        }
Beispiel #14
0
        public MP3Recorder(string filename)
        {
            if (File.Exists(filename))
            {
                File.Delete(filename);
            }
            wasapiCapture_ = new WasapiCapture();
            wasapiCapture_.Initialize();
            var
                wasapiCaptureSource = new SoundInSource(wasapiCapture_);

            stereoSource_ = wasapiCaptureSource.ToStereo();
            writer_       = MediaFoundationEncoder.CreateMP3Encoder(stereoSource_.WaveFormat, filename);
            byte []
            buffer = new byte[stereoSource_.WaveFormat.BytesPerSecond];
            wasapiCaptureSource.DataAvailable += (s, e) =>
            {
                int
                    read = stereoSource_.Read(buffer, 0, buffer.Length);
                writer_.Write(buffer, 0, read);
            };
            wasapiCapture_.Start();
        }
Beispiel #15
0
        private static async void OnImportSoundCommand(bool forceMono)
        {
            var asset     = _currentEditor.Asset;
            var soundWave = (SoundWaveAsset)asset.RootObject;

            byte chunkIndex = 0;

            if (soundWave.Localization != null && soundWave.Localization.Count > 0)
            {
                var english = soundWave.Localization[0];
                chunkIndex = soundWave.RuntimeVariations[english.FirstVariationIndex].ChunkIndex;
            }

            var chunk = soundWave.Chunks[chunkIndex];

            var chunkEntry = App.AssetManager.GetChunkEntry(chunk.ChunkId);

            var ofd = new OpenFileDialog();

            ofd.Filter      = "All Media Files|*.wav;*.mp3";
            ofd.Title       = "Open Audio";
            ofd.Multiselect = true;

            if (ofd.ShowDialog(_mainWindow) != true)
            {
                return;
            }

            FrostyTask2.Begin("Importing Chunk", "");
            await Task.Run(() =>
            {
                var imports        = new List <SoundImport>();
                var totalChunkSize = 0u;

                foreach (var fileName in ofd.FileNames)
                {
                    var tempFile = Path.GetTempFileName();
#if DEBUG
                    var tempFile2 = Path.GetTempFileName();

                    var decoder = new CSCore.Ffmpeg.FfmpegDecoder(fileName);
                    var source  = decoder.ChangeSampleRate(48000);
                    if (forceMono)
                    {
                        decoder.ToMono();
                    }

                    using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(source.WaveFormat,
                                                                                 tempFile2))
                    {
                        var buffer = new byte[source.WaveFormat.BytesPerSecond];
                        int read;
                        while ((read = source.Read(buffer, 0, buffer.Length)) > 0)
                        {
                            encoder.Write(buffer, 0, read);
                        }
                    }
#else
                    var tempFile2 = fileName;
#endif

                    try
                    {
                        var fileNameEncoded = EncodeParameterArgument(tempFile2);
                        App.Logger.Log($"- \"{fileNameEncoded}\"");
                        var result = Primrose.Utility.ExternalTool.Run(@"dandev-el3.exe", $"{fileNameEncoded} -o {tempFile}",
                                                                       out var stdout, out var stderr);

                        if (!string.IsNullOrEmpty(stderr))
                        {
                            App.Logger.LogError(stderr);
                        }

                        if (result == 0)
                        {
                            using (var nativeReader =
                                       new NativeReader(
                                           new FileStream(tempFile, FileMode.Open, FileAccess.Read)))
                            {
                                var end = nativeReader.ReadToEnd();

                                if (end.Length > 0 && end[0] == 0x48)
                                {
                                    var lines = stdout.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries);

                                    foreach (var line in lines)
                                    {
                                        App.Logger.Log("> " + line);
                                    }

                                    var chunkSize =
                                        uint.Parse(lines.First(l => l.StartsWith("ChunkSize:")).Substring(10));
                                    var segmentLength =
                                        float.Parse(lines.First(l => l.StartsWith("SegmentLength:")).Substring(15));

                                    imports.Add(new SoundImport
                                    {
                                        SourceFile    = fileName,
                                        Chunk         = end,
                                        ChunkOffset   = totalChunkSize,
                                        ChunkSize     = chunkSize,
                                        SegmentLength = segmentLength
                                    });
                                    totalChunkSize += chunkSize;
                                }
                            }
                        }
                    }
                    catch (Exception e)
                    {
                        App.Logger.LogError(e.Message);
                        App.Logger.LogError(e.StackTrace);
                    }
                    finally
                    {
                        try { File.Delete(tempFile); } catch (Exception) { /* ignored */ }
                        try { File.Delete(tempFile2); } catch (Exception) { /* ignored */ }
                    }
                }

                if (soundWave.Localization.Count > 0)
                {
                    var english = soundWave.Localization[0];

                    if (soundWave.Localization.Count > 1)
                    {
                        soundWave.Localization.RemoveRange(1, soundWave.Localization.Count - 1);
                    }

                    english.FirstVariationIndex = 0;
                    english.VariationCount      = (ushort)imports.Count;
                }

                soundWave.Segments.Clear();
                soundWave.RuntimeVariations.Clear();

                var combinedChunk = new byte[totalChunkSize];

                for (var i = 0; i < imports.Count; i++)
                {
                    var import = imports[i];

                    App.Logger.Log($"Import[{i}] - SampleOffset: {import.ChunkOffset} - SegmentLength: {import.SegmentLength} - SourceFile: {import.SourceFile}");

                    soundWave.Segments.Add(new SoundWaveVariationSegment
                    {
                        SamplesOffset   = import.ChunkOffset,
                        SeekTableOffset = uint.MaxValue,
                        SegmentLength   = import.SegmentLength,
                    });

                    soundWave.RuntimeVariations.Add(new SoundWaveRuntimeVariation
                    {
                        ChunkIndex            = chunkIndex,
                        FirstLoopSegmentIndex = 0,
                        FirstSegmentIndex     = (ushort)i,
                        LastLoopSegmentIndex  = 0,
                        PersistentDataSize    = 0,
                        SegmentCount          = 1,
                        Weight = 100,
                    });

                    Debug.Assert(import.ChunkSize == import.Chunk.Length);

                    Array.Copy(import.Chunk, 0, combinedChunk, (int)import.ChunkOffset, import.ChunkSize);
                }

                App.AssetManager.ModifyChunk(chunkEntry.Id, combinedChunk);

                chunk.ChunkSize = totalChunkSize;
                asset.Update();
                _app.Dispatcher.InvokeAsync(() =>
                {
                    App.AssetManager.ModifyEbx(_currentEditor.AssetEntry.Name, asset);
                });
            });

            FrostyTask2.End();
        }
Beispiel #16
0
        static void Main(string[] args)
        {
            if (args.Length < 1 || !File.Exists(args[0]) ||
                // ReSharper disable once PossibleNullReferenceException
                !Path.GetExtension(args[0]).Equals(".wav", StringComparison.InvariantCultureIgnoreCase))
            {
                Console.WriteLine("Invalid input.");
                return;
            }

            var supportedFormats = MediaFoundationEncoder.GetEncoderMediaTypes(AudioSubTypes.MpegLayer3);

            if (!supportedFormats.Any())
            {
                Console.WriteLine("The current platform does not support mp3 encoding.");
                return;
            }

            IWaveSource source;

            try
            {
                source = CodecFactory.Instance.GetCodec(args[0]);

                if (
                    supportedFormats.All(
                        x => x.SampleRate != source.WaveFormat.SampleRate && x.Channels == source.WaveFormat.Channels))
                {
                    //the encoder does not support the input sample rate -> convert it to any supported samplerate
                    //choose the best sample rate with stereo (in order to make simple, we always use stereo in this sample)
                    int sampleRate =
                        supportedFormats.OrderBy(x => Math.Abs(source.WaveFormat.SampleRate - x.SampleRate))
                        .First(x => x.Channels == source.WaveFormat.Channels)
                        .SampleRate;

                    Console.WriteLine("Samplerate {0} -> {1}", source.WaveFormat.SampleRate, sampleRate);
                    Console.WriteLine("Channels {0} -> {1}", source.WaveFormat.Channels, 2);
                    source = source.ChangeSampleRate(sampleRate);
                }
            }
            catch (Exception)
            {
                Console.WriteLine("Format not supported.");
                return;
            }

            using (source)
            {
                using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(source.WaveFormat, "output.mp3"))
                {
                    byte[] buffer = new byte[source.WaveFormat.BytesPerSecond];
                    int    read;
                    while ((read = source.Read(buffer, 0, buffer.Length)) > 0)
                    {
                        encoder.Write(buffer, 0, read);

                        Console.CursorLeft = 0;
                        Console.Write("{0:P}/{1:P}", (double)source.Position / source.Length, 1);
                    }
                }
            }
        }
Beispiel #17
0
        static void Main(string[] args)
        {
            args = new string[] { "C:\\fam.wav" };
            if (args.Length < 1 || !File.Exists(args[0]) ||
                // ReSharper disable once PossibleNullReferenceException
                !Path.GetExtension(args[0]).Equals(".wav", StringComparison.InvariantCultureIgnoreCase))
            {
                Console.WriteLine("Invalid input.");
                return;
            }

            var supportedFormats = MediaFoundationEncoder.GetEncoderMediaTypes(AudioSubTypes.MpegLayer3);

            if (!supportedFormats.Any())
            {
                Console.WriteLine("The current platform does not support mp3 encoding.");
                return;
            }

            IWaveSource source;

            try
            {
                Console.WriteLine("getting source");
                source = CodecFactory.Instance.GetCodec(args[0]);
                Console.WriteLine("Source retreived");

                Console.WriteLine(source.ToString());
                if (
                    supportedFormats.All(
                        x => x.SampleRate != source.WaveFormat.SampleRate && x.Channels == source.WaveFormat.Channels))
                {
                    //the encoder does not support the input sample rate -> convert it to any supported samplerate
                    //choose the best sample rate with stereo (in order to make simple, we always use stereo in this sample)
                    int sampleRate =
                        supportedFormats.OrderBy(x => Math.Abs(source.WaveFormat.SampleRate - x.SampleRate))
                        .First(x => x.Channels == source.WaveFormat.Channels)
                        .SampleRate;

                    Console.WriteLine("Samplerate {0} -> {1}", source.WaveFormat.SampleRate, sampleRate);
                    Console.WriteLine("Channels {0} -> {1}", source.WaveFormat.Channels, 2);
                    source = source.ChangeSampleRate(sampleRate);
                }
            }
            catch (Exception)
            {
                Console.WriteLine("Format not supported.");
                return;
            }

            //use memstream to write to instead of file
            var memStream = new MemoryStream();

            using (source)
            {
                using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(source.WaveFormat, memStream))
                {
                    byte[] buffer = new byte[source.WaveFormat.BytesPerSecond];
                    int    read;
                    while ((read = source.Read(buffer, 0, buffer.Length)) > 0)
                    {
                        encoder.Write(buffer, 0, read);

                        Console.CursorLeft = 0;
                        Console.Write("{0:P}/{1:P}", (double)source.Position / source.Length, 1);
                    }


                    //write memstream contents to file
                    var fileStream = File.Open("d:\\output.mp3", FileMode.Create);
                    memStream.Seek(0, SeekOrigin.Begin);
                    memStream.CopyTo(fileStream);

                    fileStream.Close();
                    memStream.Close();
                }
            }
            Console.WriteLine("done");
            while (true)
            {
            }
        }
        private bool ConvertWavToMp3(string wavFilePath, string mp3FileName)
        {
            if (!File.Exists(wavFilePath))
            {
                Log.Error($"Unable to find wav file {wavFilePath}");
                return(false);
            }

            var supportedFormats = MediaFoundationEncoder.GetEncoderMediaTypes(AudioSubTypes.MpegLayer3);

            if (!supportedFormats.Any())
            {
                Log.Error("The current platform does not support mp3 encoding.");
                return(false);
            }

            IWaveSource source;

            try
            {
                source = CodecFactory.Instance.GetCodec(wavFilePath);

                if (
                    supportedFormats.All(
                        x => x.SampleRate != source.WaveFormat.SampleRate && x.Channels == source.WaveFormat.Channels))
                {
                    //the encoder does not support the input sample rate -> convert it to any supported samplerate
                    //choose the best sample rate with stereo (in order to make simple, we always use stereo in this sample)
                    int sampleRate =
                        supportedFormats.OrderBy(x => Math.Abs(source.WaveFormat.SampleRate - x.SampleRate))
                        .First(x => x.Channels == source.WaveFormat.Channels)
                        .SampleRate;

                    Log.Info($"Samplerate {source.WaveFormat.SampleRate} -> {sampleRate}");
                    Log.Info($"Channels {source.WaveFormat.Channels} -> {2}");
                    source = source.ChangeSampleRate(sampleRate);
                }
            }
            catch (Exception ex)
            {
                Log.Error("Format not supported.", ex);
                return(false);
            }

            using (source)
            {
                using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(source.WaveFormat, mp3FileName))
                {
                    Log.Info($"\nWriting {mp3FileName}. . .");
                    byte[] buffer = new byte[source.WaveFormat.BytesPerSecond];
                    int    read;
                    double logMessageThreshold = .10;
                    double logMessageIncrement = .10;
                    while ((read = source.Read(buffer, 0, buffer.Length)) > 0)
                    {
                        encoder.Write(buffer, 0, read);

                        Console.CursorLeft = 0;
                        var writePercentage = (double)source.Position / source.Length;
                        if (writePercentage >= logMessageThreshold)
                        {
                            string writePercentageLogMessage = string.Format("{0:P}/{1:P}", writePercentage, 1);
                            Log.Info(writePercentageLogMessage);
                            logMessageThreshold += logMessageIncrement;
                        }
                    }
                }
            }
            return(true);
        }