示例#1
0
        unsafe private void OnProcessSampleEvent(int mediaTypeID, uint dwSampleFlags, long llSampleTime, long llSampleDuration, uint dwSampleSize, ref byte[] sampleBuffer)
        {
            try
            {
                if (mediaTypeID == 0)
                {
                    if (_vpxEncoder == null)
                    {
                        logger.Warn("Video sample cannot be processed as the VPX encoder has not yet received the frame size.");
                    }
                    else
                    {
                        byte[] vpxEncodedBuffer = null;

                        unsafe
                        {
                            fixed(byte *p = sampleBuffer)
                            {
                                int encodeResult = _vpxEncoder.Encode(p, (int)dwSampleSize, 1, ref vpxEncodedBuffer);

                                if (encodeResult != 0)
                                {
                                    logger.Warn("VPX encode of video sample failed.");
                                }
                            }
                        }

                        OnMediaSampleReady?.Invoke(MediaSampleTypeEnum.VP8, _vp8Timestamp, vpxEncodedBuffer);

                        //Console.WriteLine($"Video SeqNum {videoSeqNum}, timestamp {videoTimestamp}, buffer length {vpxEncodedBuffer.Length}, frame count {sampleProps.FrameCount}.");

                        _vp8Timestamp += VP8_TIMESTAMP_SPACING;
                    }
                }
                else
                {
                    uint sampleDuration = (uint)(sampleBuffer.Length / 2);

                    byte[] mulawSample = new byte[sampleDuration];
                    int    sampleIndex = 0;

                    // ToDo: Find a way to wire up the Media foundation WAVE_FORMAT_MULAW codec so the encoding below is not necessary.
                    for (int index = 0; index < sampleBuffer.Length; index += 2)
                    {
                        var ulawByte = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(sampleBuffer, index));
                        mulawSample[sampleIndex++] = ulawByte;
                    }

                    OnMediaSampleReady?.Invoke(MediaSampleTypeEnum.Mulaw, _mulawTimestamp, mulawSample);

                    //Console.WriteLine($"Audio SeqNum {audioSeqNum}, timestamp {audioTimestamp}, buffer length {mulawSample.Length}.");

                    _mulawTimestamp += sampleDuration;
                }
            }
            catch (Exception excp)
            {
                logger.Warn("Exception MfSampleGrabber_OnProcessSampleEvent. " + excp.Message);
            }
        }
示例#2
0
        // Encode G.711
        private void button2_Click(object sender, EventArgs e)
        {
            try {
                if (this.currentAudio == null)
                {
                    throw new Exception("Вы не выбрали файл для кодирования.");
                }
                if (codecToEncode.SelectedItem == null)
                {
                    throw new Exception("Вы не выбрали кодэк.");
                }
            } catch (Exception ex) {
                MessageBox.Show(ex.Message, "Ошибка", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }
            SaveFileDialog save = new SaveFileDialog();

            save.Filter = "Wave File (*.wav)|*.wav;";
            if (save.ShowDialog() != DialogResult.OK)
            {
                return;
            }
            Codecs codec = (codecToEncode.SelectedIndex == 0) ? Codecs.ALAW : Codecs.MULAW;

            byte[] samples = new byte[this.currentAudio.ShortSamples.Length];
            for (int i = 0; i < this.currentAudio.ShortSamples.Length; i++)
            {
                if (codec == Codecs.ALAW)
                {
                    samples[i] = ALawEncoder.LinearToALawSample(this.currentAudio.ShortSamples[i]);
                }
                else if (codec == Codecs.MULAW)
                {
                    samples[i] = MuLawEncoder.LinearToMuLawSample(this.currentAudio.ShortSamples[i]);
                }
            }
            WaveFormat format = null;

            if (codec == Codecs.ALAW)
            {
                format = WaveFormat.CreateALawFormat(this.currentAudio.SampleRate, this.currentAudio.Stream.WaveFormat.Channels);
            }
            else if (codec == Codecs.MULAW)
            {
                format = WaveFormat.CreateMuLawFormat(this.currentAudio.SampleRate, this.currentAudio.Stream.WaveFormat.Channels);
            }
            WaveFileWriter writer = new WaveFileWriter(save.FileName, format);

            writer.Write(samples, 0, samples.Length);
            writer.Close();
            DialogResult dres = MessageBox.Show("Аудиофайл успешно сохранен. Открыть файл?", "Файл сохранен", MessageBoxButtons.YesNo, MessageBoxIcon.Question);

            if (dres == DialogResult.Yes)
            {
                this.decodeG711(save.FileName, codec);
            }
        }
示例#3
0
        public byte[] Encode(byte[] data, int offset, int length)
        {
            byte[] encoded  = new byte[length / 2];
            int    outIndex = 0;

            for (int n = 0; n < length; n += 2)
            {
                encoded[outIndex++] = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(data, offset + n));
            }
            return(encoded);
        }
示例#4
0
    /// <summary>
    /// Converts the 16-bit ACM stream to 8-bit MuLaw on read.
    /// </summary>
    /// <param name="destinationBuffer">The destination buffer to output into.</param>
    /// <param name="offset">The offset to store at.</param>
    /// <param name="readingCount">The requested size to read.</param>
    /// <returns></returns>
    public int Read(byte[] destinationBuffer, int offset, int readingCount)
    {
        // Source buffer has twice as many items as the output array.
        var sizeOfPcmBuffer = readingCount * 2;

        sourceBuffer = BufferHelpers.Ensure(sourceBuffer, sizeOfPcmBuffer);
        var sourceBytesRead = ieeeToPcm.Read(sourceBuffer, 0, sizeOfPcmBuffer);
        var samplesRead     = sourceBytesRead / 2;
        var outIndex        = 0;

        for (var n = 0; n < sizeOfPcmBuffer; n += 2)
        {
            destinationBuffer[outIndex++] = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(sourceBuffer, offset + n));
        }
        return(samplesRead);
    }
示例#5
0
        /// <summary>
        /// Event handler for receiving an audio sample that is ready for encoding, packaging into RTP and sending to the remote end
        /// of the VoIP call.
        /// </summary>
        private void RTPChannelSampleAvailable(object sender, WaveInEventArgs e)
        {
            TimeSpan samplePeriod = DateTime.Now.Subtract(_lastInputSampleReceivedAt);

            _lastInputSampleReceivedAt = DateTime.Now;
            _inputSampleCount++;

            _audioLogger.Debug(_inputSampleCount + " sample period " + samplePeriod.TotalMilliseconds + "ms,  sample bytes " + e.BytesRecorded + ".");

            byte[] sample      = new byte[e.Buffer.Length / 2];
            int    sampleIndex = 0;

            for (int index = 0; index < e.Buffer.Length; index += 2)
            {
                var ulawByte = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(e.Buffer, index));
                sample[sampleIndex++] = ulawByte;
            }

            m_rtpChannel.SendRTPRaw(sample);
        }
示例#6
0
        /// <summary>
        /// Sends a sample from a signal generator generated waveform.
        /// </summary>
        private void SendSignalGeneratorSample(object state)
        {
            lock (_audioStreamTimer)
            {
                int inputBufferSize  = RTP_TIMESTAMP_RATE / 1000 * AUDIO_SAMPLE_PERIOD_MILLISECONDS;
                int outputBufferSize = RTP_TIMESTAMP_RATE / 1000 * AUDIO_SAMPLE_PERIOD_MILLISECONDS;

                // Get the signal generator to generate the samples and then convert from
                // signed linear to PCM.
                float[] linear = new float[inputBufferSize];
                _signalGenerator.Read(linear, 0, inputBufferSize);
                short[] pcm = linear.Select(x => (short)(x * 32767f)).ToArray();

                byte[] encodedSample = new byte[outputBufferSize];

                for (int index = 0; index < inputBufferSize; index++)
                {
                    encodedSample[index] = MuLawEncoder.LinearToMuLawSample(pcm[index]);
                }

                _peerConnection.SendAudioFrame((uint)outputBufferSize, _peerConnection.GetSendingFormat(SDPMediaTypesEnum.audio).FormatCodec.GetHashCode(), encodedSample);
            }
        }
示例#7
0
        /// <summary>
        /// Starts the Media Foundation sampling.
        /// </summary>
        unsafe private void SampleMp4Media()
        {
            try
            {
                logger.LogDebug("Starting mp4 media sampling thread.");

                _isMp4Sampling = true;

                VpxEncoder vpxEncoder     = null;
                uint       vp8Timestamp   = 0;
                uint       mulawTimestamp = 0;

                while (!_exit)
                {
                    if (OnMp4MediaSampleReady == null)
                    {
                        logger.LogDebug("No active clients, media sampling paused.");
                        break;
                    }
                    else
                    {
                        byte[] sampleBuffer = null;
                        var    sample       = _mediaSource.GetSample(ref sampleBuffer);

                        if (sample != null && sample.HasVideoSample)
                        {
                            if (vpxEncoder == null ||
                                (vpxEncoder.GetWidth() != sample.Width || vpxEncoder.GetHeight() != sample.Height || vpxEncoder.GetStride() != sample.Stride))
                            {
                                if (vpxEncoder != null)
                                {
                                    vpxEncoder.Dispose();
                                }

                                vpxEncoder = InitialiseVpxEncoder((uint)sample.Width, (uint)sample.Height, (uint)sample.Stride);
                            }

                            byte[] vpxEncodedBuffer = null;

                            unsafe
                            {
                                fixed(byte *p = sampleBuffer)
                                {
                                    int encodeResult = vpxEncoder.Encode(p, sampleBuffer.Length, 1, ref vpxEncodedBuffer);

                                    if (encodeResult != 0)
                                    {
                                        logger.LogWarning("VPX encode of video sample failed.");
                                    }
                                }
                            }

                            OnMp4MediaSampleReady?.Invoke(SDPMediaTypesEnum.video, vp8Timestamp, vpxEncodedBuffer);

                            //Console.WriteLine($"Video SeqNum {videoSeqNum}, timestamp {videoTimestamp}, buffer length {vpxEncodedBuffer.Length}, frame count {sampleProps.FrameCount}.");

                            vp8Timestamp += VP8_TIMESTAMP_SPACING;
                        }
                        else if (sample != null && sample.HasAudioSample)
                        {
                            uint sampleDuration = (uint)(sampleBuffer.Length / 2);

                            byte[] mulawSample = new byte[sampleDuration];
                            int    sampleIndex = 0;

                            for (int index = 0; index < sampleBuffer.Length; index += 2)
                            {
                                var ulawByte = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(sampleBuffer, index));
                                mulawSample[sampleIndex++] = ulawByte;
                            }

                            OnMp4MediaSampleReady?.Invoke(SDPMediaTypesEnum.audio, mulawTimestamp, mulawSample);

                            //Console.WriteLine($"Audio SeqNum {audioSeqNum}, timestamp {audioTimestamp}, buffer length {mulawSample.Length}.");

                            mulawTimestamp += sampleDuration;
                        }
                    }
                }

                vpxEncoder.Dispose();
            }
            catch (Exception excp)
            {
                logger.LogWarning("Exception SampleMp4Media. " + excp.Message);
            }
            finally
            {
                logger.LogDebug("mp4 sampling thread stopped.");
                _isMp4Sampling = false;
            }
        }
 byte EncodeMuLaw( short _sample )
 {
     return MuLawEncoder.LinearToMuLawSample(_sample);
 }
        public void OnDataAvailable(object sender, WaveInEventArgs e)
        {
            _waitingForData = false;

            var outPos     = 0;
            var floatIndex = 0;

            try
            {
                CheckBuffers(e.BytesRecorded);

                // Note: Data ratio of stereo 32-bit 48KHz to mono 16-bit 8KHz is 24:1
                //var bytesReceived = e.BytesRecorded;
                //var outputSize    = bytesReceived / 24;
                //var outArray      = new byte[(int)Math.Ceiling(outputSize / 2d) * 2];

                // Note: Data ratio of stereo 32-bit 48KHz to mono 8-bit 8KHz is 48:1
                var bytesReceived = e.BytesRecorded;
                var outputSize    = bytesReceived / 48;
                var outArray      = new byte[(int)Math.Ceiling(outputSize / 2d) * 2];

                // 48KHz
                // 32Kbps
                //var i = 0;
                //var j = 0;
                //var k = 0;
                //var o = new float[e.BytesRecorded / 8];

                //while (i < e.BytesRecorded)
                //{
                //    var left  = BitConverter.ToSingle(e.Buffer, i);
                //    var right = BitConverter.ToSingle(e.Buffer, i + 4);
                //    var mono  = (left + right) * 0.5F;

                //    o[j] = mono;

                //    i = i + 8;
                //    j = j + 1;
                //}

                //var result = Downsample(o);

                //for (k = 0; k < result.Length; k++)
                //{
                //    outArray[k] = MuLawEncoder.LinearToMuLawSample((short) (result[k] * MaxValue));
                //}

                // #1 Resample to 8KHz
                var waveBuffer = Downsample(e);

                while (floatIndex < waveBuffer.FloatBufferCount)
                {
                    // #2 Convert to Mono
                    var leftSample  = waveBuffer.FloatBuffer[floatIndex++];
                    var rightSample = waveBuffer.FloatBuffer[floatIndex++];
                    var monoSample  = ConvertToMono(leftSample, rightSample);

                    // #3 Convert to short and then mu-law
                    outArray[outPos++] = MuLawEncoder.LinearToMuLawSample((short)(monoSample * MaxValue));
                }

                if (DataAvailable != null)
                {
                    foreach (var delDelegate in DataAvailable.GetInvocationList())
                    {
                        delDelegate.DynamicInvoke(this, new DataEventArgs(outArray, outPos)); //24));
                    }
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.ToString());
                Console.WriteLine($"{nameof(outPos)}: {outPos}, {nameof(floatIndex)}: {floatIndex}");
                throw;
            }
            _waitingForData = true;
        }
示例#10
0
        /// <summary>
        /// Starts the Media Foundation sampling.
        /// </summary>
        unsafe private static void StartMedia()
        {
            try
            {
                logger.LogDebug("Starting media sampling thread.");

                _isSampling = true;

                while (true)
                {
                    if (OnMediaSampleReady == null)
                    {
                        logger.LogDebug("No active clients, media sampling paused.");
                        break;
                    }
                    else
                    {
                        byte[] sampleBuffer = null;
                        var    sample       = _mediaSource.GetSample(ref sampleBuffer);

                        if (sample != null && sample.HasVideoSample)
                        {
                            if (_vpxEncoder == null ||
                                (_vpxEncoder.GetWidth() != sample.Width || _vpxEncoder.GetHeight() != sample.Height || _vpxEncoder.GetStride() != sample.Stride))
                            {
                                OnVideoResolutionChanged((uint)sample.Width, (uint)sample.Height, (uint)sample.Stride);
                            }

                            byte[] vpxEncodedBuffer = null;

                            unsafe
                            {
                                fixed(byte *p = sampleBuffer)
                                {
                                    int encodeResult = _vpxEncoder.Encode(p, sampleBuffer.Length, 1, ref vpxEncodedBuffer);

                                    if (encodeResult != 0)
                                    {
                                        logger.LogWarning("VPX encode of video sample failed.");
                                    }
                                }
                            }

                            OnMediaSampleReady?.Invoke(SDPMediaTypesEnum.video, _vp8Timestamp, vpxEncodedBuffer);

                            //Console.WriteLine($"Video SeqNum {videoSeqNum}, timestamp {videoTimestamp}, buffer length {vpxEncodedBuffer.Length}, frame count {sampleProps.FrameCount}.");

                            _vp8Timestamp += VP8_TIMESTAMP_SPACING;
                        }
                        else if (sample != null && sample.HasAudioSample)
                        {
                            uint sampleDuration = (uint)(sampleBuffer.Length / 2);

                            byte[] mulawSample = new byte[sampleDuration];
                            int    sampleIndex = 0;

                            // ToDo: Find a way to wire up the Media foundation WAVE_FORMAT_MULAW codec so the encoding below is not necessary.
                            for (int index = 0; index < sampleBuffer.Length; index += 2)
                            {
                                var ulawByte = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(sampleBuffer, index));
                                mulawSample[sampleIndex++] = ulawByte;
                            }

                            OnMediaSampleReady?.Invoke(SDPMediaTypesEnum.audio, _mulawTimestamp, mulawSample);

                            //Console.WriteLine($"Audio SeqNum {audioSeqNum}, timestamp {audioTimestamp}, buffer length {mulawSample.Length}.");

                            _mulawTimestamp += sampleDuration;
                        }
                    }
                }
            }
            catch (Exception excp)
            {
                logger.LogWarning("Exception OnProcessSampleEvent. " + excp.Message);
            }
            finally
            {
                logger.LogDebug("Media sampling thread stopped.");

                _isSampling = false;
            }
        }