async Task <Stream> openStream(Part part, StreamParameters parameters, AsyncOperationInfo operationInfo) { var baseStream = new FileStream(_path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite, parameters.ReadBufferSize); baseStream.Position = part.Position; return(new SectionedStreamProxy(baseStream, part.Length)); }
public static ErrorCode Terminate() { ErrorCode c = (ErrorCode)Pa_Terminate(); if (c == ErrorCode.NoError) { foreach (Tuple <IntPtr, IntPtr> v in StreamParameters.Values) { NativeMemoryUtils.Free(v.Item1); NativeMemoryUtils.Free(v.Item2); } StreamParameters.Clear(); StreamCallbacks.Clear(); StreamUserDatas.Clear(); UserDataObjects.Clear(); NativeMemoryUtils.FreeAll(); } return(c); }
/// <summary> /// Starts the media capturing/source devices. /// </summary> public override async Task Start() { if (!_isStarted) { _sendingAudioFormat = base.GetSendingFormat(SDPMediaTypesEnum.audio); _isStarted = true; await base.Start(); PortAudio.Initialize(); var outputDevice = PortAudio.DefaultOutputDevice; if (outputDevice == PortAudio.NoDevice) { throw new ApplicationException("No audio output device available."); } else { StreamParameters stmInParams = new StreamParameters { device = 0, channelCount = 2, sampleFormat = SampleFormat.Float32 }; StreamParameters stmOutParams = new StreamParameters { device = outputDevice, channelCount = 2, sampleFormat = SampleFormat.Float32 }; // Combined audio capture and render. _audioIOStream = new Stream(stmInParams, stmOutParams, AUDIO_SAMPLING_RATE, AUDIO_SAMPLE_BUFFER_LENGTH, StreamFlags.NoFlag, AudioSampleAvailable, null); _audioIOStream.Start(); } if (_rtpAudioTimestampPeriod == 0) { _rtpAudioTimestampPeriod = (uint)(SDPMediaFormatInfo.GetClockRate(SDPMediaFormatsEnum.PCMU) / AUDIO_SAMPLE_BUFFER_LENGTH); } } }