예제 #1
0
        /// <summary>
        /// Starts buffering the audio data from the remote track in an <see cref="AudioTrackReadBuffer"/>.
        /// </summary>
        /// <remarks>
        /// WebRTC audio tracks produce an audio frame every 10 ms.
        /// If you want the audio frames to be buffered (and optionally resampled) automatically,
        /// and you want the application to control when new audio data is read, create an
        /// <see cref="AudioTrackReadBuffer"/> using <see cref="CreateReadBuffer"/>.
        /// If you want to process the audio frames as soon as they are received, without conversions,
        /// subscribe to <see cref="AudioFrameReady"/> instead.
        /// </remarks>
        public AudioTrackReadBuffer CreateReadBuffer()
        {
            uint res = RemoteAudioTrackInterop.RemoteAudioTrack_CreateReadBuffer(_nativeHandle,
                                                                                 out RemoteAudioTrackInterop.ReadBufferHandle readBufferHandle);

            Utils.ThrowOnErrorCode(res);
            return(new AudioTrackReadBuffer(readBufferHandle));
        }
예제 #2
0
 /// <summary>
 /// Fill <paramref name="samplesOut"/> with samples from the internal buffer.
 /// </summary>
 /// <remarks>
 /// This method reads the internal buffer starting from the oldest data.
 /// If the internal buffer is exhausted (underrun), <paramref name="samplesOut"/>
 /// is padded according to the value of <paramref name="padBehavior"/>.
 ///
 /// This method should be called regularly to consume the audio data as it is
 /// received. Note that the internal buffer can overrun (and some frames can be
 /// dropped) if this is not called frequently enough.
 /// </remarks>
 /// <param name="sampleRate">
 /// Desired sample rate. Data in the buffer is resampled if this is different from
 /// the native track rate.
 /// </param>
 /// <param name="numChannels">
 /// Desired number of channels. Should be 1 or 2. Data in the buffer is split/averaged
 /// if this is different from the native track channels number.
 /// </param>
 /// <param name="samplesOut">
 /// Will be filled with the samples read from the internal buffer. The function will
 /// try to fill the entire length of the array.
 /// </param>
 /// <param name="numSamplesRead">
 /// Set to the effective number of samples read.
 /// This will be generally equal to the length of <paramref name="samplesOut"/>, but can be less in
 /// case of underrun.
 /// </param>
 /// <param name="hasOverrun">
 /// Set to <c>true</c> if frames have been dropped from the internal
 /// buffer between the previous call to <c>Read</c> and this.
 /// </param>
 /// <param name="padBehavior">Controls how <paramref name="samplesOut"/> is padded in case of underrun.</param>
 public void Read(int sampleRate, int numChannels,
                  float[] samplesOut, out int numSamplesRead, out bool hasOverrun,
                  PadBehavior padBehavior = PadBehavior.PadWithZero)
 {
     RemoteAudioTrackInterop.AudioTrackReadBuffer_Read(_nativeHandle,
                                                       sampleRate, numChannels, padBehavior, samplesOut, samplesOut.Length, out numSamplesRead, out mrsBool has_overrun_res);
     hasOverrun = (bool)has_overrun_res;
 }
예제 #3
0
 private void UnregisterInteropCallbacks()
 {
     if (_selfHandle != IntPtr.Zero)
     {
         RemoteAudioTrackInterop.RemoteAudioTrack_RegisterFrameCallback(_nativeHandle, null, IntPtr.Zero);
         Utils.ReleaseWrapperRef(_selfHandle);
         _selfHandle          = IntPtr.Zero;
         _interopCallbackArgs = null;
     }
 }
예제 #4
0
 private void RegisterInteropCallbacks()
 {
     _interopCallbackArgs = new RemoteAudioTrackInterop.InteropCallbackArgs()
     {
         Track         = this,
         FrameCallback = RemoteAudioTrackInterop.FrameCallback,
     };
     _selfHandle = Utils.MakeWrapperRef(this);
     RemoteAudioTrackInterop.RemoteAudioTrack_RegisterFrameCallback(
         _nativeHandle, _interopCallbackArgs.FrameCallback, _selfHandle);
 }
예제 #5
0
 /// <summary>
 /// Returns whether the track is output directly to the system audio device.
 /// </summary>
 public bool IsOutputToDevice()
 {
     return((bool)RemoteAudioTrackInterop.RemoteAudioTrack_IsOutputToDevice(_nativeHandle));
 }
예제 #6
0
 /// <summary>
 /// Output the audio track to the WebRTC audio device.
 /// </summary>
 /// <remarks>
 /// The default behavior is for every remote audio frame to be passed to
 /// remote audio frame callbacks, as well as output automatically to the
 /// audio device used by WebRTC. If |false| is passed to this function, remote
 /// audio frames will still be received and passed to callbacks, but won't be
 /// output to the audio device.
 ///
 /// NOTE: Changing the default behavior is not supported on UWP.
 /// </remarks>
 public void OutputToDevice(bool output)
 {
     RemoteAudioTrackInterop.RemoteAudioTrack_OutputToDevice(_nativeHandle, (mrsBool)output);
 }