/// <summary> /// The splitter will begin broadcasting it's /// sub streams immediately. /// </summary> public GATAudioThreadStreamSplitter(IGATAudioThreadStream stream, GATDataAllocationMode bufferAllocationMode) { int i; _sourceStreamChannels = stream.NbOfChannels; if (_sourceStreamChannels < 2) { Debug.LogWarning("source stream is mono: " + stream.StreamName); } IntPtr outputBufferPointer = IntPtr.Zero; _sharedBufferSize = stream.BufferSizePerChannel; if (bufferAllocationMode == GATDataAllocationMode.Unmanaged) { _sharedBufferArray = new float[_sharedBufferSize]; _sharedBuffer = new GATData(_sharedBufferArray); } else { if (bufferAllocationMode == GATDataAllocationMode.Fixed) { _sharedBuffer = GATManager.GetFixedDataContainer(_sharedBufferSize, "StreamSplitter buffer"); } else { _sharedBuffer = GATManager.GetDataContainer(_sharedBufferSize); } _sharedBufferArray = _sharedBuffer.ParentArray; outputBufferPointer = _sharedBuffer.GetPointer(); } _memOffset = _sharedBuffer.MemOffset; _streamProxies = new GATAudioThreadStreamProxy[_sourceStreamChannels]; for (i = 0; i < _sourceStreamChannels; i++) { _streamProxies[i] = new GATAudioThreadStreamProxy(_sharedBufferSize, 1, outputBufferPointer, _sharedBuffer.MemOffset, (stream.StreamName + " split " + i)); } stream.AddAudioThreadStreamClient(this); _sourceStream = stream; }
/// <summary> /// The splitter will begin broadcasting it's /// sub streams immediately. /// </summary> public GATAudioThreadStreamSplitter( IGATAudioThreadStream stream, GATDataAllocationMode bufferAllocationMode ) { int i; _sourceStreamChannels = stream.NbOfChannels; if( _sourceStreamChannels < 2 ) { Debug.LogWarning( "source stream is mono: " + stream.StreamName ); } IntPtr outputBufferPointer = IntPtr.Zero; _sharedBufferSize = stream.BufferSizePerChannel; if( bufferAllocationMode == GATDataAllocationMode.Unmanaged ) { _sharedBufferArray = new float[ _sharedBufferSize ]; _sharedBuffer = new GATData( _sharedBufferArray ); } else { if( bufferAllocationMode == GATDataAllocationMode.Fixed ) { _sharedBuffer = GATManager.GetFixedDataContainer( _sharedBufferSize, "StreamSplitter buffer" ); } else { _sharedBuffer = GATManager.GetDataContainer( _sharedBufferSize ); } _sharedBufferArray = _sharedBuffer.ParentArray; outputBufferPointer = _sharedBuffer.GetPointer(); } _memOffset = _sharedBuffer.MemOffset; _streamProxies = new GATAudioThreadStreamProxy[ _sourceStreamChannels ]; for( i = 0; i < _sourceStreamChannels; i++ ) { _streamProxies[ i ] = new GATAudioThreadStreamProxy( _sharedBufferSize, 1, outputBufferPointer, _sharedBuffer.MemOffset, ( stream.StreamName + " split " + i ) ); } stream.AddAudioThreadStreamClient( this ); _sourceStream = stream; }
protected virtual void OnEnable() { if (GATInfo.NbOfChannels == 0) { return; } if (_player != null) //Object has just been deserialized, only setup transient objects { _panInfo = new GATDynamicPanInfo(_player, true); _panInfo.SetGains(_gains); _trackBuffer = GATManager.GetFixedDataContainer(GATInfo.AudioBufferSizePerChannel, "track" + TrackNb + " buffer"); _audioThreadStreamProxy = new GATAudioThreadStreamProxy(GATInfo.AudioBufferSizePerChannel, 1, _trackBuffer.GetPointer(), _trackBuffer.MemOffset, ("Track " + _trackNb + " stream")); _player.onPlayerWillMix += PlayerWillBeginMixing; _mute = _nextMute; //only _nextMute is serialized _active = true; } }
protected virtual void OnEnable() { if( GATInfo.NbOfChannels == 0 ) return; if( _player != null ) //Object has just been deserialized, only setup transient objects { _panInfo = new GATDynamicPanInfo( _player, true ); _panInfo.SetGains( _gains ); _trackBuffer = GATManager.GetFixedDataContainer( GATInfo.AudioBufferSizePerChannel, "track"+TrackNb+" buffer" ); _audioThreadStreamProxy = new GATAudioThreadStreamProxy( GATInfo.AudioBufferSizePerChannel, 1, _trackBuffer.GetPointer(), _trackBuffer.MemOffset, ( "Track " + _trackNb + " stream" ) ); _player.onPlayerWillMix += PlayerWillBeginMixing; _mute = _nextMute; //only _nextMute is serialized _active = true; } }
void Awake() { AudioSource audio = GetComponent <AudioSource>(); audio.playOnAwake = false; #if UNITY_5 if (audio.clip != null) { audio.clip = null; #if GAT_DEBUG Debug.LogWarning("As of Unity 5, GATPlayer's AudioSource's clip should be null"); #endif } #else if (GATManager.UniqueInstance.SupportedSampleRates == GATManager.SampleRatesSupport.All) { if (audio.clip == null || audio.clip.frequency != GATInfo.OutputSampleRate) { if (audio.clip != null) { #if UNITY_EDITOR if (Application.isPlaying) { Destroy(audio.clip); } else { DestroyImmediate(audio.clip); } #else Destroy(audio.clip); #endif } audio.clip = AudioClip.Create("G-Audio", 1, GATInfo.NbOfChannels, GATInfo.OutputSampleRate, true, false); } } else { if (GATInfo.OutputSampleRate != 44100) { Debug.LogError("Supported sample rate setting is set to Only44100, but current output sample rate is " + GATInfo.OutputSampleRate + ". Disabling player."); this.enabled = false; } else if (audio.clip != null) { #if UNITY_EDITOR if (Application.isPlaying) { Destroy(audio.clip); } else { DestroyImmediate(audio.clip); } #else Destroy(audio.clip); #endif } } #endif // ************** Initialize serialized objects only if needed ************** if (_FiltersHandler == null) { InitFilters(); } if (_tracks == null) { _tracks = new List <GATTrack> (4); } // ************** Initialize transient objects always ************** _scheduledSamples = new SampleQueue(); _samplesToEnqueue = new SampleQueue(); _discardedSamples = new SampleQueue(); _playingSamples = new PlayingSamplesQueue(this); _pool = new Stack <BufferedSample>(30); int i; for (i = 0; i < 30; i++) { _pool.Push(new BufferedSample()); } _audioThreadStreamProxy = new GATAudioThreadStreamProxy(GATInfo.AudioBufferSizePerChannel, GATInfo.NbOfChannels, GATAudioBuffer.AudioBufferPointer, 0, ("GATPlayer " + gameObject.name)); }
protected virtual void Awake() { _audioThreadStreamProxy = new GATAudioThreadStreamProxy(GATInfo.AudioBufferSizePerChannel, GATInfo.NbOfChannels, GATAudioBuffer.AudioBufferPointer, 0, "MicrophoneStream"); _source = this.GetComponent <AudioSource>(); }
protected virtual void Awake() { _audioThreadStreamProxy = new GATAudioThreadStreamProxy( GATInfo.AudioBufferSizePerChannel, GATInfo.NbOfChannels, GATAudioBuffer.AudioBufferPointer, 0, "MicrophoneStream" ); _source = this.GetComponent< AudioSource >(); }