void IGATAudioThreadStreamClient.HandleAudioThreadStream( float[] data, int offset, bool emptyData, IGATAudioThreadStream stream ) { if( _needsData == false ) return; if( !emptyData ) { int sourceChannels = stream.NbOfChannels; //May change! int length = stream.BufferSizePerChannel; if( sourceChannels == 1 ) { System.Array.Copy ( data, offset, _data, 0, length ); } else //deinterlace { int i = 0; length *= sourceChannels; offset += observedChannel; while( offset < length ) { _data[i] = data[ offset ]; offset += sourceChannels; i++; } } } _receivedZeroData = emptyData; _needsData = false; _dataIsUpdated = true; }
private void UpdateObservedStream() { IGATAudioThreadStream stream = null; if (observeTrack) { GATPlayer player = observedAudioStreamComp as GATPlayer; if (player == null) { Debug.LogWarning("Could not find Player to observe track " + observedAudioStreamComp.name); return; } GATTrack track = player.GetTrack(observedChannel); stream = (( IGATAudioThreadStreamOwner )track).GetAudioThreadStream(0); } else if (observedAudioStreamComp != null) { stream = observedAudioStreamComp as IGATAudioThreadStream; if (stream == null) { IGATAudioThreadStreamOwner streamOwner; streamOwner = observedAudioStreamComp as IGATAudioThreadStreamOwner; if (streamOwner != null) { stream = streamOwner.GetAudioThreadStream(0); } if (stream == null) { Debug.LogWarning("Could not find IGATAudioThreadStream or IGATAudioThreadStreamOwner on GameObject " + observedAudioStreamComp.name); observedAudioStreamComp = _cachedStreamComp; return; } } } if (_observedStream != null) { _observedStream.RemoveAudioThreadStreamClient(this); } if (stream != null) { stream.AddAudioThreadStreamClient(this); } else { _dataIsUpdated = false; _needsData = true; HandleNoMoreData(); } _observedStream = stream; _cachedStreamComp = observedAudioStreamComp; }
/// <summary> /// Initializes a new instance of the <see cref="GATAudioThreadStreamToCache"/> class. /// </summary> /// <param name="stream">The observed multichannel or mono stream.</param> /// <param name="caches">The caches to record to, one per channel must be provided.</param> /// <param name="handler">Optional callback fired when the cache is full.</param> public GATAudioThreadStreamToCache( IGATAudioThreadStream stream, GATData[] caches, AtEndHandler handler = null ) { _numFramesPerRead = stream.BufferSizePerChannel; Caches = caches; _stream = stream; _onEnd = handler; }
/// <summary> /// Initializes a new instance of the <see cref="GATAudioThreadStreamToCache"/> class. /// </summary> /// <param name="stream">The observed multichannel or mono stream.</param> /// <param name="caches">The caches to record to, one per channel must be provided.</param> /// <param name="handler">Optional callback fired when the cache is full.</param> public GATAudioThreadStreamToCache(IGATAudioThreadStream stream, GATData[] caches, AtEndHandler handler = null) { _numFramesPerRead = stream.BufferSizePerChannel; Caches = caches; _stream = stream; _onEnd = handler; }
protected void Dispose(bool explicitely) { if (_disposed) { return; } _sourceStream.RemoveAudioThreadStreamClient(this); _sourceStream = null; _sharedBuffer.Release(); _sharedBuffer = null; _disposed = true; }
/// <summary> /// The splitter will begin broadcasting it's /// sub streams immediately. /// </summary> public GATAudioThreadStreamSplitter(IGATAudioThreadStream stream, GATDataAllocationMode bufferAllocationMode) { int i; _sourceStreamChannels = stream.NbOfChannels; if (_sourceStreamChannels < 2) { Debug.LogWarning("source stream is mono: " + stream.StreamName); } IntPtr outputBufferPointer = IntPtr.Zero; _sharedBufferSize = stream.BufferSizePerChannel; if (bufferAllocationMode == GATDataAllocationMode.Unmanaged) { _sharedBufferArray = new float[_sharedBufferSize]; _sharedBuffer = new GATData(_sharedBufferArray); } else { if (bufferAllocationMode == GATDataAllocationMode.Fixed) { _sharedBuffer = GATManager.GetFixedDataContainer(_sharedBufferSize, "StreamSplitter buffer"); } else { _sharedBuffer = GATManager.GetDataContainer(_sharedBufferSize); } _sharedBufferArray = _sharedBuffer.ParentArray; outputBufferPointer = _sharedBuffer.GetPointer(); } _memOffset = _sharedBuffer.MemOffset; _streamProxies = new GATAudioThreadStreamProxy[_sourceStreamChannels]; for (i = 0; i < _sourceStreamChannels; i++) { _streamProxies[i] = new GATAudioThreadStreamProxy(_sharedBufferSize, 1, outputBufferPointer, _sharedBuffer.MemOffset, (stream.StreamName + " split " + i)); } stream.AddAudioThreadStreamClient(this); _sourceStream = stream; }
/// <summary> /// The splitter will begin broadcasting it's /// sub streams immediately. /// </summary> public GATAudioThreadStreamSplitter( IGATAudioThreadStream stream, GATDataAllocationMode bufferAllocationMode ) { int i; _sourceStreamChannels = stream.NbOfChannels; if( _sourceStreamChannels < 2 ) { Debug.LogWarning( "source stream is mono: " + stream.StreamName ); } IntPtr outputBufferPointer = IntPtr.Zero; _sharedBufferSize = stream.BufferSizePerChannel; if( bufferAllocationMode == GATDataAllocationMode.Unmanaged ) { _sharedBufferArray = new float[ _sharedBufferSize ]; _sharedBuffer = new GATData( _sharedBufferArray ); } else { if( bufferAllocationMode == GATDataAllocationMode.Fixed ) { _sharedBuffer = GATManager.GetFixedDataContainer( _sharedBufferSize, "StreamSplitter buffer" ); } else { _sharedBuffer = GATManager.GetDataContainer( _sharedBufferSize ); } _sharedBufferArray = _sharedBuffer.ParentArray; outputBufferPointer = _sharedBuffer.GetPointer(); } _memOffset = _sharedBuffer.MemOffset; _streamProxies = new GATAudioThreadStreamProxy[ _sourceStreamChannels ]; for( i = 0; i < _sourceStreamChannels; i++ ) { _streamProxies[ i ] = new GATAudioThreadStreamProxy( _sharedBufferSize, 1, outputBufferPointer, _sharedBuffer.MemOffset, ( stream.StreamName + " split " + i ) ); } stream.AddAudioThreadStreamClient( this ); _sourceStream = stream; }
void OnEnable() // We subscribe to the player's stream in OnEnable, and unsubscribe in OnDisable { // audio thread stream access is protected behind an explicit implementation of IGATAudioThreadStreamOwner.GetAudioThreadStream() // Handling of streams is delicate: the callback they provide is on the audio thread and needs special care. // We first cast the default player to IGATAudioThreadStreamOwner in order to get access to audio stream getter methods. IGATAudioThreadStreamOwner streamOwner = ( IGATAudioThreadStreamOwner )GATManager.DefaultPlayer; _observedStream = streamOwner.GetAudioThreadStream(0); // The point of this tutorial is to demonstrate stereo capture! if (_observedStream.NbOfChannels != 2) { Debug.LogError("This tutorial only works with stereo ouptut!"); Destroy(this); return; } _observedStream.AddAudioThreadStreamClient(this); //Subscribe to the stream: we will now receive the HandleAudioThreadStream callback. }
/// <summary> /// Call from derived classes to attempt to /// get a valid stream from the streamComponent and /// store it in _stream. /// </summary> protected void GetStream() { if( streamComponent == null ) { streamComponent = gameObject.GetComponent( typeof( IGATAudioThreadStreamOwner ) ); } if( streamIsTrack ) { GATPlayer player = streamComponent as GATPlayer; if( player == null ) { throw new GATException( "Cannot find GATPlayer to observe track stream. " ); } if( streamIndex >= player.NbOfTracks ) { throw new GATException( "Track does not exist!" ); } GATTrack track = player.GetTrack( streamIndex ); _stream = track.GetAudioThreadStream( 0 ); } else { IGATAudioThreadStreamOwner owner = streamComponent as IGATAudioThreadStreamOwner; _stream = owner.GetAudioThreadStream( streamIndex ); if( owner == null ) { throw new GATException( "Component is not a stream!" ); } if( streamIndex >= owner.NbOfStreams ) { throw new GATException( "Requested stream index does not exist." ); } } }
/// <summary> /// Call from derived classes to attempt to /// get a valid stream from the streamComponent and /// store it in _stream. /// </summary> protected void GetStream() { if (streamComponent == null) { streamComponent = gameObject.GetComponent(typeof(IGATAudioThreadStreamOwner)); } if (streamIsTrack) { GATPlayer player = streamComponent as GATPlayer; if (player == null) { throw new GATException("Cannot find GATPlayer to observe track stream. "); } if (streamIndex >= player.NbOfTracks) { throw new GATException("Track does not exist!"); } GATTrack track = player.GetTrack(streamIndex); _stream = track.GetAudioThreadStream(0); } else { IGATAudioThreadStreamOwner owner = streamComponent as IGATAudioThreadStreamOwner; _stream = owner.GetAudioThreadStream(streamIndex); if (owner == null) { throw new GATException("Component is not a stream!"); } if (streamIndex >= owner.NbOfStreams) { throw new GATException("Requested stream index does not exist."); } } }
void IGATAudioThreadStreamClient.HandleAudioThreadStream( float[] data, int offset, bool emptyData, IGATAudioThreadStream stream ) { int trackIndex = -1; int i; if( stream == _playerStream ) { for( i = 0; i < _playerChannelsLevels.Length; i++ ) { _playerChannelsLevels[ i ] = GATMaths.GetAbsMaxValueFromInterleaved( data, offset, stream.BufferSizePerChannel * stream.NbOfChannels, i, stream.NbOfChannels ); } _shouldRepaint = true; return; } for( i = 0; i < _trackStreams.Length; i++ ) { if( stream == _trackStreams[ i ] ) { trackIndex = i; break; } } if( trackIndex == -1 ) return; if( emptyData ) { _trackLevels[ trackIndex ] = 0f; _shouldRepaint = true; return; } _trackLevels[ trackIndex ] = GATMaths.GetAbsMaxValue( data, offset, GATInfo.AudioBufferSizePerChannel ); _shouldRepaint = true; }
void SetupTracksInfo() { GATTrack track; int nbOfTracks; nbOfTracks = _player.NbOfTracks; _trackStreams = new IGATAudioThreadStream[nbOfTracks]; _trackLevels = new float[nbOfTracks]; _trackFiltersInfo = new TrackFiltersInfo[nbOfTracks]; for (int i = 0; i < nbOfTracks; i++) { track = _player.GetTrack(i); _trackStreams[i] = track.GetAudioThreadStream(0); if (_trackStreams[i] != null) { _trackStreams[i].AddAudioThreadStreamClient(this); } if (track != null) { _trackFiltersInfo[i] = new TrackFiltersInfo(track.FiltersHandler); } } _playerStream = (( IGATAudioThreadStreamOwner )_player).GetAudioThreadStream(0); _playerChannelsLevels = new float[GATInfo.NbOfChannels]; _playerFiltersInfo = new TrackFiltersInfo(_player.FiltersHandler); if (_playerStream != null) { _playerStream.AddAudioThreadStreamClient(this); } }
void IGATAudioThreadStreamClient.HandleAudioThreadStream(float[] data, int offset, bool emptyData, IGATAudioThreadStream stream) { int trackIndex = -1; int i; if (stream == _playerStream) { for (i = 0; i < _playerChannelsLevels.Length; i++) { _playerChannelsLevels[i] = GATMaths.GetAbsMaxValueFromInterleaved(data, offset, stream.BufferSizePerChannel * stream.NbOfChannels, i, stream.NbOfChannels); } _shouldRepaint = true; return; } for (i = 0; i < _trackStreams.Length; i++) { if (stream == _trackStreams[i]) { trackIndex = i; break; } } if (trackIndex == -1) { return; } if (emptyData) { _trackLevels[trackIndex] = 0f; _shouldRepaint = true; return; } _trackLevels[trackIndex] = GATMaths.GetAbsMaxValue(data, offset, GATInfo.AudioBufferSizePerChannel); _shouldRepaint = true; }
void IGATAudioThreadStreamClient.HandleAudioThreadStream(float[] data, int offset, bool emptyData, IGATAudioThreadStream stream) { int framesToWrite = stream.BufferSizePerChannel; if (_writing == false) { double dspTime = AudioSettings.dspTime; double nextDspTime = dspTime + GATInfo.AudioBufferDuration; if (_targetDspTime < dspTime) { _targetDspTime = dspTime; } if (nextDspTime > _targetDspTime) { if (_waiting) { _waiting = false; _writing = true; int frameOffsetInBuffer = ( int )((_targetDspTime - dspTime) * GATInfo.OutputSampleRate); offset += frameOffsetInBuffer * stream.NbOfChannels; framesToWrite -= frameOffsetInBuffer; } else { return; } } else { return; } } if (_recFixedFrames > 0 && (_writtenFrames + framesToWrite > _recFixedFrames)) { framesToWrite = _recFixedFrames - _writtenFrames; _writer.WriteStreamAsync(data, offset, framesToWrite); EndWriting(); return; } _writer.WriteStreamAsync(data, offset, framesToWrite); _writtenFrames += framesToWrite; }
void SetupTracksInfo() { GATTrack track; int nbOfTracks; nbOfTracks = _player.NbOfTracks; _trackStreams = new IGATAudioThreadStream[ nbOfTracks ]; _trackLevels = new float[ nbOfTracks ]; _trackFiltersInfo = new TrackFiltersInfo[ nbOfTracks ]; for( int i = 0; i < nbOfTracks; i++ ) { track = _player.GetTrack( i ); _trackStreams[ i ] = track.GetAudioThreadStream( 0 ); if( _trackStreams[ i ] != null ) { _trackStreams[ i ].AddAudioThreadStreamClient( this ); } if( track != null ) { _trackFiltersInfo[ i ] = new TrackFiltersInfo( track.FiltersHandler ); } } _playerStream = ( ( IGATAudioThreadStreamOwner )_player ).GetAudioThreadStream( 0 ); _playerChannelsLevels = new float[ GATInfo.NbOfChannels ]; _playerFiltersInfo = new TrackFiltersInfo( _player.FiltersHandler ); if( _playerStream != null ) { _playerStream.AddAudioThreadStreamClient( this ); } }
void IGATAudioThreadStreamClient.HandleAudioThreadStream(float[] data, int offset, bool isEmptyData, IGATAudioThreadStream stream) { int i; int sourceIndex; int targetIndex; int toIndex; GATAudioThreadStreamProxy proxy; for (i = 0; i < _sourceStreamChannels; i++) { proxy = _streamProxies[i]; if (proxy.HasClient && isEmptyData == false) //No need to de-interlace if the current channel has no listener or data is empty! { sourceIndex = offset + i; targetIndex = _memOffset; toIndex = targetIndex + _sharedBufferSize; while (targetIndex < toIndex) { _sharedBufferArray[targetIndex] = data[sourceIndex]; targetIndex++; sourceIndex += _sourceStreamChannels; } } proxy.BroadcastStream(_sharedBufferArray, _memOffset, isEmptyData); // no need to clear buffer if no data, let clients handle the flag } }
void IGATAudioThreadStreamClient.HandleAudioThreadStream( float[] data, int offset, bool isEmptyData, IGATAudioThreadStream stream ) { int i; int sourceIndex; int targetIndex; int toIndex; GATAudioThreadStreamProxy proxy; for( i = 0; i < _sourceStreamChannels; i++ ) { proxy = _streamProxies[ i ]; if( proxy.HasClient && isEmptyData == false ) //No need to de-interlace if the current channel has no listener or data is empty! { sourceIndex = offset + i; targetIndex = _memOffset; toIndex = targetIndex + _sharedBufferSize; while( targetIndex < toIndex ) { _sharedBufferArray[ targetIndex ] = data[ sourceIndex ]; targetIndex++; sourceIndex += _sourceStreamChannels; } } proxy.BroadcastStream( _sharedBufferArray, _memOffset, isEmptyData ); // no need to clear buffer if no data, let clients handle the flag } }
void IGATAudioThreadStreamClient.HandleAudioThreadStream(float[] data, int offset, bool emptyData, IGATAudioThreadStream stream) { int pos = _vPosition; int framesToCopy = _numFramesPerRead; int i; int numCaches = _caches.Length; double dspTime = AudioSettings.dspTime; if (_vDoCache == false) { if (_targetDspTime < dspTime) { _targetDspTime = dspTime; } if (_targetDspTime >= dspTime && _targetDspTime < dspTime + GATInfo.AudioBufferDuration) { if (_waiting) { _waiting = false; _vDoCache = true; int frameOffsetInBuffer = ( int )((_targetDspTime - dspTime) * GATInfo.OutputSampleRate); framesToCopy = stream.BufferSizePerChannel - frameOffsetInBuffer; offset += frameOffsetInBuffer * stream.NbOfChannels; } else { return; } } else { return; } } if (pos + _numFramesPerRead >= _cacheFrames) { framesToCopy = _cacheFrames - pos; if (Loop) { for (i = 0; i < numCaches; i++) { if (_overdub) { _caches[i].MixFromInterlaced(data, offset, framesToCopy, pos, i, numCaches); } else { _caches[i].CopyFromInterlaced(data, offset, framesToCopy, pos, i, numCaches); } } pos = 0; offset += framesToCopy * stream.NbOfChannels; framesToCopy = _numFramesPerRead - framesToCopy; } else { _vDoCache = false; _stream.RemoveAudioThreadStreamClient(this); } if (_onEnd != null) { _onEnd(_caches, Loop); } } for (i = 0; i < numCaches; i++) { if (_overdub) { _caches[i].MixFromInterlaced(data, offset, framesToCopy, pos, i, numCaches); } else { _caches[i].CopyFromInterlaced(data, offset, framesToCopy, pos, i, numCaches); } } pos += framesToCopy; _vPosition = pos; }
protected void Dispose( bool explicitely ) { if( _disposed ) return; _sourceStream.RemoveAudioThreadStreamClient( this ); _sourceStream = null; _sharedBuffer.Release(); _sharedBuffer = null; _disposed = true; }
void IGATAudioThreadStreamClient.HandleAudioThreadStream( float[] data, int offset, bool emptyData, IGATAudioThreadStream stream ) { int pos = _vPosition; int framesToCopy = _numFramesPerRead; int i; int numCaches = _caches.Length; double dspTime = AudioSettings.dspTime; if( _vDoCache == false ) { if( _targetDspTime < dspTime ) { _targetDspTime = dspTime; } if( _targetDspTime >= dspTime && _targetDspTime < dspTime + GATInfo.AudioBufferDuration ) { if( _waiting ) { _waiting = false; _vDoCache = true; int frameOffsetInBuffer = ( int )( ( _targetDspTime - dspTime ) * GATInfo.OutputSampleRate ); framesToCopy = stream.BufferSizePerChannel - frameOffsetInBuffer; offset += frameOffsetInBuffer * stream.NbOfChannels; } else { return; } } else { return; } } if( pos + _numFramesPerRead >= _cacheFrames ) { framesToCopy = _cacheFrames - pos; if( Loop ) { for( i = 0; i < numCaches; i++ ) { if( _overdub ) { _caches[ i ].MixFromInterlaced( data, offset, framesToCopy, pos, i, numCaches ); } else { _caches[ i ].CopyFromInterlaced( data, offset, framesToCopy, pos, i, numCaches ); } } pos = 0; offset += framesToCopy * stream.NbOfChannels; framesToCopy = _numFramesPerRead - framesToCopy; } else { _vDoCache = false; _stream.RemoveAudioThreadStreamClient( this ); } if( _onEnd != null ) _onEnd( _caches, Loop ); } for( i = 0; i < numCaches; i++ ) { if( _overdub ) { _caches[ i ].MixFromInterlaced( data, offset, framesToCopy, pos, i, numCaches ); } else { _caches[ i ].CopyFromInterlaced( data, offset, framesToCopy, pos, i, numCaches ); } } pos += framesToCopy; _vPosition = pos; }
private void UpdateObservedStream() { IGATAudioThreadStream stream = null; if( observeTrack ) { GATPlayer player = observedAudioStreamComp as GATPlayer; if( player == null ) { Debug.LogWarning( "Could not find Player to observe track " + observedAudioStreamComp.name ); return; } GATTrack track = player.GetTrack( observedChannel ); stream = ( ( IGATAudioThreadStreamOwner )track ).GetAudioThreadStream( 0 ); } else if( observedAudioStreamComp != null ) { stream = observedAudioStreamComp as IGATAudioThreadStream; if( stream == null ) { IGATAudioThreadStreamOwner streamOwner; streamOwner = observedAudioStreamComp as IGATAudioThreadStreamOwner; if( streamOwner != null ) { stream = streamOwner.GetAudioThreadStream( 0 ); } if( stream == null ) { Debug.LogWarning( "Could not find IGATAudioThreadStream or IGATAudioThreadStreamOwner on GameObject " + observedAudioStreamComp.name ); observedAudioStreamComp = _cachedStreamComp; return; } } } if( _observedStream != null ) { _observedStream.RemoveAudioThreadStreamClient( this ); } if( stream != null ) { stream.AddAudioThreadStreamClient( this ); } else { _dataIsUpdated = false; _needsData = true; HandleNoMoreData(); } _observedStream = stream; _cachedStreamComp = observedAudioStreamComp; }
void IGATAudioThreadStreamClient.HandleAudioThreadStream( float[] data, int offset, bool emptyData, IGATAudioThreadStream stream ) { int framesToWrite = stream.BufferSizePerChannel; if( _writing == false ) { double dspTime = AudioSettings.dspTime; double nextDspTime = dspTime + GATInfo.AudioBufferDuration; if( _targetDspTime < dspTime ) _targetDspTime = dspTime; if( nextDspTime > _targetDspTime ) { if( _waiting ) { _waiting = false; _writing = true; int frameOffsetInBuffer = ( int )( ( _targetDspTime - dspTime ) * GATInfo.OutputSampleRate ); offset += frameOffsetInBuffer * stream.NbOfChannels; framesToWrite -= frameOffsetInBuffer; } else { return; } } else { return; } } if( _recFixedFrames > 0 && ( _writtenFrames + framesToWrite > _recFixedFrames ) ) { framesToWrite = _recFixedFrames - _writtenFrames; _writer.WriteStreamAsync( data, offset, framesToWrite ); EndWriting(); return; } _writer.WriteStreamAsync( data, offset, framesToWrite ); _writtenFrames += framesToWrite; }
void IGATAudioThreadStreamClient.HandleAudioThreadStream(float[] data, int offset, bool emptyData, IGATAudioThreadStream stream) { _streamBuffer = data; _streamOffset = offset; _streamDataEmpty = emptyData; }
public void HandleAudioThreadStream(float[] data, int offset, bool emptyData, IGATAudioThreadStream stream) { if (_state == State.Recording) // Monitor the state { bool atEnd = false; int appliedLength = stream.BufferSizePerChannel; // at the end of the recording, the length we will copy will be // smaller than buffer size. if (appliedLength > _leftData.Count - _recOffset) // We've reached the end of the recording, adjust applied length { atEnd = true; appliedLength = _leftData.Count - _recOffset; } _leftData.CopyFromInterlaced(data, appliedLength, _recOffset, 0, 2); // copy each channel to the GATData objects. CopyFromInterlaced handles de-interleaving. _rightData.CopyFromInterlaced(data, appliedLength, _recOffset, 1, 2); if (atEnd) // Done, reset the _didFade flag and change state. { _didFade = false; // We will fade the data to avoid pops, but not on the audio thread - there's no reason to do it before playback. _state = State.IdleRecInMemory; return; } _recOffset += appliedLength; } }
void IGATAudioThreadStreamClient.HandleAudioThreadStream(float[] data, int offset, bool emptyData, IGATAudioThreadStream stream) { if (_needsData == false) { return; } if (!emptyData) { int sourceChannels = stream.NbOfChannels; //May change! int length = stream.BufferSizePerChannel; if (sourceChannels == 1) { System.Array.Copy(data, offset, _data, 0, length); } else //deinterlace { int i = 0; length *= sourceChannels; offset += observedChannel; while (offset < length) { _data[i] = data[offset]; offset += sourceChannels; i++; } } } _receivedZeroData = emptyData; _needsData = false; _dataIsUpdated = true; }
void OnDisable() { _observedStream.RemoveAudioThreadStreamClient(this); // Unsubscribing is vital! _observedStream = null; }
void IGATAudioThreadStreamClient.HandleAudioThreadStream( float[] data, int offset, bool emptyData, IGATAudioThreadStream stream ) { _streamBuffer = data; _streamOffset = offset; _streamDataEmpty = emptyData; }
/// <summary> /// Streaming to the specified track will not begin until Start() is called. /// </summary> public GATAudioThreadStreamToTrack( GATTrack track, IGATAudioThreadStream stream, bool exclusive ) { _track = track; _stream = stream; _exclusive = exclusive; }
/// <summary> /// Streaming to the specified track will not begin until Start() is called. /// </summary> public GATAudioThreadStreamToTrack(GATTrack track, IGATAudioThreadStream stream, bool exclusive) { _track = track; _stream = stream; _exclusive = exclusive; }