/// <summary> /// transcode audio /// </summary> /// <param name="input">input audio file</param> /// <param name="output">output audio file</param> /// <param name="outChannels">output audio file channels</param> /// <param name="outSampleRate">output audio file sample rate</param> public AudioTranscode(string input, string output, int outChannels = 2, int outSampleRate = 44100) { using (MediaWriter writer = new MediaWriter(output)) using (MediaReader reader = new MediaReader(input)) { int audioIndex = reader.First(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_AUDIO).Index; writer.AddStream(MediaEncoder.CreateAudioEncode(writer.Format, outChannels, outSampleRate)); writer.Initialize(); AudioFrame dst = AudioFrame.CreateFrameByCodec(writer[0].Codec); SampleConverter converter = new SampleConverter(dst); long pts = 0; foreach (var packet in reader.ReadPacket()) { foreach (var srcframe in reader[audioIndex].ReadFrame(packet)) { foreach (var dstframe in converter.Convert(srcframe)) { pts += dstframe.AVFrame.nb_samples; dstframe.Pts = pts; // audio's pts is total samples, pts can only increase. foreach (var outpacket in writer[0].WriteFrame(dstframe)) { writer.WritePacket(outpacket); } } } } writer.FlushMuxer(); } }
public EncodeAudioByMat(string output) { using (MediaWriter writer = new MediaWriter(output)) { writer.AddStream(MediaEncoder.CreateAudioEncode(writer.Format, 2, 44100)); writer.Initialize(); AudioFrame dstFrame = AudioFrame.CreateFrameByCodec(writer[0].Codec); SampleConverter converter = new SampleConverter(dstFrame); using (Mat mat = CreateMat(writer[0].Codec.AVCodecContext.channels)) { long pts = 0; for (int i = 0; i < 1000; i++) { foreach (var item in converter.Convert(mat.ToAudioFrame(dstSampleRate: writer[0].Codec.AVCodecContext.sample_rate))) { pts += item.NbSamples; item.Pts = pts; foreach (var packet in writer[0].WriteFrame(item)) { writer.WritePacket(packet); } } } } writer.FlushMuxer(); } }
private int readAsFloat(float[] buffer, int count) { int reads = 0; // Detect the underlying stream format. if (waveStream.Format.Encoding == WaveFormatEncoding.Pcm) { // The wave is in standard PCM format. We'll need // to convert it to IeeeFloat. switch (waveStream.Format.BitsPerSample) { case 8: // Stream is 8 bits { byte[] block = new byte[bufferSize]; reads = read(block, count); SampleConverter.Convert(block, buffer); } break; case 16: // Stream is 16 bits { short[] block = new short[bufferSize]; reads = read(block, count); SampleConverter.Convert(block, buffer); } break; case 32: // Stream is 32 bits { int[] block = new int[bufferSize]; reads = read(block, count); SampleConverter.Convert(block, buffer); } break; default: throw new NotSupportedException(); } } else if (waveStream.Format.Encoding == WaveFormatEncoding.IeeeFloat) { // Format is 16-bit IEEE float, // just copy to the buffer. reads = read(buffer, count); } else { throw new NotSupportedException("The wave format isn't supported"); } return(reads); // return the number of bytes read }
/// <summary> /// recording audio. /// <para> /// first set inputDeviceName = null, you will get inputDeviceName list in vs output, /// </para> /// <para> /// then set inputDeviceName to your real device name and run again,you will get a audio output. /// </para> /// <para> /// if you want stop record, exit console; /// </para> /// <para>ffmpeg </para> /// </summary> /// <param name="outputFile"></param> public RecordingAudio(string outputFile, string inputDeviceName = null) { // console output FFmpegHelper.SetupLogging(logWrite: _ => Console.Write(_)); // register all device FFmpegHelper.RegisterDevice(); var dshowInput = new InFormat("dshow"); // list all "dshow" device at console output, ffmpeg does not support direct reading of device names MediaDevice.PrintDeviceInfos(dshowInput, "list", MediaDevice.ListDevicesOptions); if (string.IsNullOrWhiteSpace(inputDeviceName)) { return; } // get your audio input device name from console output // NOTE: DO NOT delete "audio=" using (MediaReader reader = new MediaReader($"audio={inputDeviceName}", dshowInput)) using (MediaWriter writer = new MediaWriter(outputFile)) { var stream = reader.Where(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_AUDIO).First(); writer.AddStream(MediaEncode.CreateAudioEncode(writer.Format, stream.Codec.AVCodecContext.channels, stream.Codec.AVCodecContext.sample_rate)); writer.Initialize(); AudioFrame dstFrame = AudioFrame.CreateFrameByCodec(writer[0].Codec); SampleConverter converter = new SampleConverter(dstFrame); long pts = 0; foreach (var packet in reader.ReadPacket()) { foreach (var frame in stream.ReadFrame(packet)) { foreach (var dstframe in converter.Convert(frame)) { pts += dstFrame.AVFrame.nb_samples; dstFrame.Pts = pts; foreach (var dstpacket in writer[0].WriteFrame(dstFrame)) { writer.WritePacket(dstpacket); } } } } writer.FlushMuxer(); } }
public void WriteAudioFrame(AudioFrame audioFrame) { if (audioIndex < 0) { throw new NotSupportedException(); } foreach (var dstframe in sampleConverter.Convert(audioFrame)) { lastAudioPts += audioFrame.NbSamples; dstframe.Pts = lastAudioPts; foreach (var packet in writer[audioIndex].WriteFrame(dstframe)) { writer.WritePacket(packet); } } }
protected override void ProcessFilter(Signal sourceData, Signal destinationData) { int channels = sourceData.Channels; int length = sourceData.Length; SampleFormat dstFormat = destinationData.SampleFormat; SampleFormat srcFormat = sourceData.SampleFormat; if (dstFormat == SampleFormat.Format32BitIeeeFloat) { float dst; if (srcFormat == SampleFormat.Format16Bit) { short src; for (int c = 0; c < channels; c++) { for (int i = 0; i < length; i++) { src = (short)sourceData.GetSample(c, i); SampleConverter.Convert(src, out dst); } } } else if (srcFormat == SampleFormat.Format32Bit) { int src; for (int c = 0; c < channels; c++) { for (int i = 0; i < length; i++) { src = (int)sourceData.GetSample(c, i); SampleConverter.Convert(src, out dst); } } } } }