コード例 #1
0
        void IGATAudioThreadStreamClient.HandleAudioThreadStream( float[] data, int offset, bool emptyData, IGATAudioThreadStream stream )
        {
            if( _needsData == false )
                return;

            if( !emptyData )
            {
                int sourceChannels = stream.NbOfChannels; //May change!
                int length 		   = stream.BufferSizePerChannel;

                if( sourceChannels == 1 )
                {
                    System.Array.Copy ( data, offset, _data, 0, length );
                }
                else //deinterlace
                {
                    int i = 0;
                    length *= sourceChannels;
                    offset += observedChannel;
                    while( offset < length )
                    {
                        _data[i] = data[ offset ];
                        offset += sourceChannels;
                        i++;
                    }
                }
            }

            _receivedZeroData = emptyData;
            _needsData		  = false;
            _dataIsUpdated    = true;
        }
コード例 #2
0
        private void UpdateObservedStream()
        {
            IGATAudioThreadStream stream = null;

            if (observeTrack)
            {
                GATPlayer player = observedAudioStreamComp as GATPlayer;
                if (player == null)
                {
                    Debug.LogWarning("Could not find Player to observe track " + observedAudioStreamComp.name);
                    return;
                }

                GATTrack track = player.GetTrack(observedChannel);

                stream = (( IGATAudioThreadStreamOwner )track).GetAudioThreadStream(0);
            }
            else if (observedAudioStreamComp != null)
            {
                stream = observedAudioStreamComp as IGATAudioThreadStream;

                if (stream == null)
                {
                    IGATAudioThreadStreamOwner streamOwner;
                    streamOwner = observedAudioStreamComp as IGATAudioThreadStreamOwner;
                    if (streamOwner != null)
                    {
                        stream = streamOwner.GetAudioThreadStream(0);
                    }

                    if (stream == null)
                    {
                        Debug.LogWarning("Could not find IGATAudioThreadStream or IGATAudioThreadStreamOwner on GameObject " + observedAudioStreamComp.name);
                        observedAudioStreamComp = _cachedStreamComp;
                        return;
                    }
                }
            }

            if (_observedStream != null)
            {
                _observedStream.RemoveAudioThreadStreamClient(this);
            }

            if (stream != null)
            {
                stream.AddAudioThreadStreamClient(this);
            }
            else
            {
                _dataIsUpdated = false;
                _needsData     = true;
                HandleNoMoreData();
            }

            _observedStream   = stream;
            _cachedStreamComp = observedAudioStreamComp;
        }
コード例 #3
0
        /// <summary>
        /// Initializes a new instance of the <see cref="GATAudioThreadStreamToCache"/> class.
        /// </summary>
        /// <param name="stream">The observed multichannel or mono stream.</param>
        /// <param name="caches">The caches to record to, one per channel must be provided.</param>
        /// <param name="handler">Optional callback fired when the cache is full.</param>
        public GATAudioThreadStreamToCache( IGATAudioThreadStream stream, GATData[] caches, AtEndHandler handler = null )
        {
            _numFramesPerRead = stream.BufferSizePerChannel;

            Caches = caches;

            _stream = stream;

            _onEnd = handler;
        }
コード例 #4
0
        /// <summary>
        /// Initializes a new instance of the <see cref="GATAudioThreadStreamToCache"/> class.
        /// </summary>
        /// <param name="stream">The observed multichannel or mono stream.</param>
        /// <param name="caches">The caches to record to, one per channel must be provided.</param>
        /// <param name="handler">Optional callback fired when the cache is full.</param>
        public GATAudioThreadStreamToCache(IGATAudioThreadStream stream, GATData[] caches, AtEndHandler handler = null)
        {
            _numFramesPerRead = stream.BufferSizePerChannel;

            Caches = caches;

            _stream = stream;

            _onEnd = handler;
        }
コード例 #5
0
        protected void Dispose(bool explicitely)
        {
            if (_disposed)
            {
                return;
            }

            _sourceStream.RemoveAudioThreadStreamClient(this);
            _sourceStream = null;
            _sharedBuffer.Release();
            _sharedBuffer = null;
            _disposed     = true;
        }
コード例 #6
0
        /// <summary>
        /// The splitter will begin broadcasting it's
        /// sub streams immediately.
        /// </summary>
        public GATAudioThreadStreamSplitter(IGATAudioThreadStream stream, GATDataAllocationMode bufferAllocationMode)
        {
            int i;

            _sourceStreamChannels = stream.NbOfChannels;
            if (_sourceStreamChannels < 2)
            {
                Debug.LogWarning("source stream is mono: " + stream.StreamName);
            }

            IntPtr outputBufferPointer = IntPtr.Zero;

            _sharedBufferSize = stream.BufferSizePerChannel;


            if (bufferAllocationMode == GATDataAllocationMode.Unmanaged)
            {
                _sharedBufferArray = new float[_sharedBufferSize];
                _sharedBuffer      = new GATData(_sharedBufferArray);
            }
            else
            {
                if (bufferAllocationMode == GATDataAllocationMode.Fixed)
                {
                    _sharedBuffer = GATManager.GetFixedDataContainer(_sharedBufferSize, "StreamSplitter buffer");
                }
                else
                {
                    _sharedBuffer = GATManager.GetDataContainer(_sharedBufferSize);
                }

                _sharedBufferArray  = _sharedBuffer.ParentArray;
                outputBufferPointer = _sharedBuffer.GetPointer();
            }

            _memOffset = _sharedBuffer.MemOffset;

            _streamProxies = new GATAudioThreadStreamProxy[_sourceStreamChannels];

            for (i = 0; i < _sourceStreamChannels; i++)
            {
                _streamProxies[i] = new GATAudioThreadStreamProxy(_sharedBufferSize, 1, outputBufferPointer, _sharedBuffer.MemOffset, (stream.StreamName + " split " + i));
            }

            stream.AddAudioThreadStreamClient(this);

            _sourceStream = stream;
        }
コード例 #7
0
        /// <summary>
        /// The splitter will begin broadcasting it's 
        /// sub streams immediately. 
        /// </summary>
        public GATAudioThreadStreamSplitter( IGATAudioThreadStream stream, GATDataAllocationMode bufferAllocationMode )
        {
            int i;

            _sourceStreamChannels = stream.NbOfChannels;
            if( _sourceStreamChannels < 2 )
            {
                Debug.LogWarning( "source stream is mono: " + stream.StreamName );
            }

            IntPtr outputBufferPointer = IntPtr.Zero;

            _sharedBufferSize	= stream.BufferSizePerChannel;

            if( bufferAllocationMode == GATDataAllocationMode.Unmanaged )
            {
                _sharedBufferArray = new float[ _sharedBufferSize ];
                _sharedBuffer = new GATData( _sharedBufferArray );
            }
            else
            {
                if( bufferAllocationMode == GATDataAllocationMode.Fixed )
                {
                    _sharedBuffer = GATManager.GetFixedDataContainer( _sharedBufferSize, "StreamSplitter buffer" );
                }
                else
                {
                    _sharedBuffer = GATManager.GetDataContainer( _sharedBufferSize );
                }

                _sharedBufferArray 	= _sharedBuffer.ParentArray;
                outputBufferPointer = _sharedBuffer.GetPointer();
            }

            _memOffset			= _sharedBuffer.MemOffset;

            _streamProxies = new GATAudioThreadStreamProxy[ _sourceStreamChannels ];

            for( i = 0; i < _sourceStreamChannels; i++ )
            {
                _streamProxies[ i ] = new GATAudioThreadStreamProxy( _sharedBufferSize, 1, outputBufferPointer, _sharedBuffer.MemOffset, ( stream.StreamName + " split " + i ) );
            }

            stream.AddAudioThreadStreamClient( this );

            _sourceStream = stream;
        }
コード例 #8
0
ファイル: Example_04.cs プロジェクト: uniphonic/G-Audio
        void OnEnable()         // We subscribe to the player's stream in OnEnable, and unsubscribe in OnDisable
        {
            // audio thread stream access is protected behind an explicit implementation of IGATAudioThreadStreamOwner.GetAudioThreadStream()
            // Handling of streams is delicate: the callback they provide is on the audio thread and needs special care.
            // We first cast the default player to IGATAudioThreadStreamOwner in order to get access to audio stream getter methods.
            IGATAudioThreadStreamOwner streamOwner = ( IGATAudioThreadStreamOwner )GATManager.DefaultPlayer;

            _observedStream = streamOwner.GetAudioThreadStream(0);

            // The point of this tutorial is to demonstrate stereo capture!
            if (_observedStream.NbOfChannels != 2)
            {
                Debug.LogError("This tutorial only works with stereo ouptut!");
                Destroy(this);
                return;
            }
            _observedStream.AddAudioThreadStreamClient(this);               //Subscribe to the stream: we will now receive the HandleAudioThreadStream callback.
        }
コード例 #9
0
ファイル: AGATStreamObserver.cs プロジェクト: gregzo/G-Audio
        /// <summary>
        /// Call from derived classes to attempt to
        /// get a valid stream from the streamComponent and
        /// store it in _stream.
        /// </summary>
        protected void GetStream()
        {
            if( streamComponent == null )
            {
                streamComponent = gameObject.GetComponent( typeof( IGATAudioThreadStreamOwner ) );
            }

            if( streamIsTrack )
            {
                GATPlayer player = streamComponent as GATPlayer;
                if( player == null )
                {
                    throw new GATException( "Cannot find GATPlayer to observe track stream. " );
                }

                if( streamIndex >= player.NbOfTracks )
                {
                    throw new GATException( "Track does not exist!" );
                }

                GATTrack track = player.GetTrack( streamIndex );

                _stream = track.GetAudioThreadStream( 0 );
            }
            else
            {
                IGATAudioThreadStreamOwner owner = streamComponent as IGATAudioThreadStreamOwner;

                _stream = owner.GetAudioThreadStream( streamIndex );

                if( owner == null )
                {
                    throw new GATException( "Component is not a stream!" );
                }

                if( streamIndex >= owner.NbOfStreams )
                {
                    throw new GATException( "Requested stream index does not exist." );
                }
            }
        }
コード例 #10
0
        /// <summary>
        /// Call from derived classes to attempt to
        /// get a valid stream from the streamComponent and
        /// store it in _stream.
        /// </summary>
        protected void GetStream()
        {
            if (streamComponent == null)
            {
                streamComponent = gameObject.GetComponent(typeof(IGATAudioThreadStreamOwner));
            }

            if (streamIsTrack)
            {
                GATPlayer player = streamComponent as GATPlayer;
                if (player == null)
                {
                    throw new GATException("Cannot find GATPlayer to observe track stream. ");
                }

                if (streamIndex >= player.NbOfTracks)
                {
                    throw new GATException("Track does not exist!");
                }

                GATTrack track = player.GetTrack(streamIndex);

                _stream = track.GetAudioThreadStream(0);
            }
            else
            {
                IGATAudioThreadStreamOwner owner = streamComponent as IGATAudioThreadStreamOwner;

                _stream = owner.GetAudioThreadStream(streamIndex);

                if (owner == null)
                {
                    throw new GATException("Component is not a stream!");
                }

                if (streamIndex >= owner.NbOfStreams)
                {
                    throw new GATException("Requested stream index does not exist.");
                }
            }
        }
コード例 #11
0
ファイル: GATPlayerInspector.cs プロジェクト: gregzo/G-Audio
    void IGATAudioThreadStreamClient.HandleAudioThreadStream( float[] data, int offset, bool emptyData, IGATAudioThreadStream stream )
    {
        int trackIndex = -1;
        int i;

        if( stream == _playerStream )
        {
            for( i = 0; i < _playerChannelsLevels.Length; i++ )
            {
                _playerChannelsLevels[ i ] = GATMaths.GetAbsMaxValueFromInterleaved( data, offset, stream.BufferSizePerChannel * stream.NbOfChannels, i, stream.NbOfChannels );
            }
            _shouldRepaint = true;
            return;
        }

        for( i = 0; i < _trackStreams.Length; i++ )
        {
            if( stream == _trackStreams[ i ] )
            {
                trackIndex = i;
                break;
            }
        }

        if( trackIndex == -1 )
            return;

        if( emptyData )
        {
            _trackLevels[ trackIndex ] = 0f;
            _shouldRepaint = true;
            return;
        }

        _trackLevels[ trackIndex ] = GATMaths.GetAbsMaxValue( data, offset, GATInfo.AudioBufferSizePerChannel );
        _shouldRepaint = true;
    }
コード例 #12
0
    void SetupTracksInfo()
    {
        GATTrack track;
        int      nbOfTracks;

        nbOfTracks        = _player.NbOfTracks;
        _trackStreams     = new IGATAudioThreadStream[nbOfTracks];
        _trackLevels      = new float[nbOfTracks];
        _trackFiltersInfo = new TrackFiltersInfo[nbOfTracks];

        for (int i = 0; i < nbOfTracks; i++)
        {
            track = _player.GetTrack(i);

            _trackStreams[i] = track.GetAudioThreadStream(0);

            if (_trackStreams[i] != null)
            {
                _trackStreams[i].AddAudioThreadStreamClient(this);
            }

            if (track != null)
            {
                _trackFiltersInfo[i] = new TrackFiltersInfo(track.FiltersHandler);
            }
        }

        _playerStream         = (( IGATAudioThreadStreamOwner )_player).GetAudioThreadStream(0);
        _playerChannelsLevels = new float[GATInfo.NbOfChannels];
        _playerFiltersInfo    = new TrackFiltersInfo(_player.FiltersHandler);

        if (_playerStream != null)
        {
            _playerStream.AddAudioThreadStreamClient(this);
        }
    }
コード例 #13
0
    void IGATAudioThreadStreamClient.HandleAudioThreadStream(float[] data, int offset, bool emptyData, IGATAudioThreadStream stream)
    {
        int trackIndex = -1;
        int i;

        if (stream == _playerStream)
        {
            for (i = 0; i < _playerChannelsLevels.Length; i++)
            {
                _playerChannelsLevels[i] = GATMaths.GetAbsMaxValueFromInterleaved(data, offset, stream.BufferSizePerChannel * stream.NbOfChannels, i, stream.NbOfChannels);
            }
            _shouldRepaint = true;
            return;
        }

        for (i = 0; i < _trackStreams.Length; i++)
        {
            if (stream == _trackStreams[i])
            {
                trackIndex = i;
                break;
            }
        }

        if (trackIndex == -1)
        {
            return;
        }

        if (emptyData)
        {
            _trackLevels[trackIndex] = 0f;
            _shouldRepaint           = true;
            return;
        }

        _trackLevels[trackIndex] = GATMaths.GetAbsMaxValue(data, offset, GATInfo.AudioBufferSizePerChannel);
        _shouldRepaint           = true;
    }
コード例 #14
0
        void IGATAudioThreadStreamClient.HandleAudioThreadStream(float[] data, int offset, bool emptyData, IGATAudioThreadStream stream)
        {
            int framesToWrite = stream.BufferSizePerChannel;

            if (_writing == false)
            {
                double dspTime     = AudioSettings.dspTime;
                double nextDspTime = dspTime + GATInfo.AudioBufferDuration;

                if (_targetDspTime < dspTime)
                {
                    _targetDspTime = dspTime;
                }

                if (nextDspTime > _targetDspTime)
                {
                    if (_waiting)
                    {
                        _waiting = false;
                        _writing = true;

                        int frameOffsetInBuffer = ( int )((_targetDspTime - dspTime) * GATInfo.OutputSampleRate);
                        offset        += frameOffsetInBuffer * stream.NbOfChannels;
                        framesToWrite -= frameOffsetInBuffer;
                    }
                    else
                    {
                        return;
                    }
                }
                else
                {
                    return;
                }
            }

            if (_recFixedFrames > 0 && (_writtenFrames + framesToWrite > _recFixedFrames))
            {
                framesToWrite = _recFixedFrames - _writtenFrames;
                _writer.WriteStreamAsync(data, offset, framesToWrite);
                EndWriting();

                return;
            }

            _writer.WriteStreamAsync(data, offset, framesToWrite);
            _writtenFrames += framesToWrite;
        }
コード例 #15
0
ファイル: GATPlayerInspector.cs プロジェクト: gregzo/G-Audio
    void SetupTracksInfo()
    {
        GATTrack track;
        int nbOfTracks;

        nbOfTracks 			= _player.NbOfTracks;
        _trackStreams 		= new IGATAudioThreadStream[ nbOfTracks ];
        _trackLevels  		= new float[ nbOfTracks ];
        _trackFiltersInfo	= new TrackFiltersInfo[ nbOfTracks ];

        for( int i = 0; i < nbOfTracks; i++ )
        {
            track = _player.GetTrack( i );

            _trackStreams[ i ] = track.GetAudioThreadStream( 0 );

            if( _trackStreams[ i ] != null )
            {
                _trackStreams[ i ].AddAudioThreadStreamClient( this );
            }

            if( track != null )
            {
                _trackFiltersInfo[ i ] = new TrackFiltersInfo( track.FiltersHandler );
            }
        }

        _playerStream 			= ( ( IGATAudioThreadStreamOwner )_player ).GetAudioThreadStream( 0 );
        _playerChannelsLevels 	= new float[ GATInfo.NbOfChannels ];
        _playerFiltersInfo 		= new TrackFiltersInfo( _player.FiltersHandler );

        if( _playerStream != null )
        {
            _playerStream.AddAudioThreadStreamClient( this );
        }
    }
コード例 #16
0
        void IGATAudioThreadStreamClient.HandleAudioThreadStream(float[] data, int offset, bool isEmptyData, IGATAudioThreadStream stream)
        {
            int i;
            int sourceIndex;
            int targetIndex;
            int toIndex;

            GATAudioThreadStreamProxy proxy;

            for (i = 0; i < _sourceStreamChannels; i++)
            {
                proxy = _streamProxies[i];

                if (proxy.HasClient && isEmptyData == false)                  //No need to de-interlace if the current channel has no listener or data is empty!
                {
                    sourceIndex = offset + i;
                    targetIndex = _memOffset;
                    toIndex     = targetIndex + _sharedBufferSize;

                    while (targetIndex < toIndex)
                    {
                        _sharedBufferArray[targetIndex] = data[sourceIndex];
                        targetIndex++;
                        sourceIndex += _sourceStreamChannels;
                    }
                }

                proxy.BroadcastStream(_sharedBufferArray, _memOffset, isEmptyData);                   // no need to clear buffer if no data, let clients handle the flag
            }
        }
コード例 #17
0
        void IGATAudioThreadStreamClient.HandleAudioThreadStream( float[] data, int offset, bool isEmptyData, IGATAudioThreadStream stream )
        {
            int i;
            int sourceIndex;
            int targetIndex;
            int toIndex;

            GATAudioThreadStreamProxy proxy;

            for( i = 0; i < _sourceStreamChannels; i++ )
            {
                proxy = _streamProxies[ i ];

                if( proxy.HasClient && isEmptyData == false ) //No need to de-interlace if the current channel has no listener or data is empty!
                {
                    sourceIndex = offset + i;
                    targetIndex = _memOffset;
                    toIndex     = targetIndex + _sharedBufferSize;

                    while( targetIndex < toIndex )
                    {
                        _sharedBufferArray[ targetIndex ] = data[ sourceIndex ];
                        targetIndex++;
                        sourceIndex += _sourceStreamChannels;
                    }
                }

                proxy.BroadcastStream( _sharedBufferArray, _memOffset, isEmptyData ); // no need to clear buffer if no data, let clients handle the flag
            }
        }
コード例 #18
0
        void IGATAudioThreadStreamClient.HandleAudioThreadStream(float[] data, int offset, bool emptyData, IGATAudioThreadStream stream)
        {
            int    pos          = _vPosition;
            int    framesToCopy = _numFramesPerRead;
            int    i;
            int    numCaches = _caches.Length;
            double dspTime   = AudioSettings.dspTime;

            if (_vDoCache == false)
            {
                if (_targetDspTime < dspTime)
                {
                    _targetDspTime = dspTime;
                }

                if (_targetDspTime >= dspTime && _targetDspTime < dspTime + GATInfo.AudioBufferDuration)
                {
                    if (_waiting)
                    {
                        _waiting  = false;
                        _vDoCache = true;

                        int frameOffsetInBuffer = ( int )((_targetDspTime - dspTime) * GATInfo.OutputSampleRate);

                        framesToCopy = stream.BufferSizePerChannel - frameOffsetInBuffer;
                        offset      += frameOffsetInBuffer * stream.NbOfChannels;
                    }
                    else
                    {
                        return;
                    }
                }
                else
                {
                    return;
                }
            }

            if (pos + _numFramesPerRead >= _cacheFrames)
            {
                framesToCopy = _cacheFrames - pos;

                if (Loop)
                {
                    for (i = 0; i < numCaches; i++)
                    {
                        if (_overdub)
                        {
                            _caches[i].MixFromInterlaced(data, offset, framesToCopy, pos, i, numCaches);
                        }
                        else
                        {
                            _caches[i].CopyFromInterlaced(data, offset, framesToCopy, pos, i, numCaches);
                        }
                    }

                    pos          = 0;
                    offset      += framesToCopy * stream.NbOfChannels;
                    framesToCopy = _numFramesPerRead - framesToCopy;
                }
                else
                {
                    _vDoCache = false;

                    _stream.RemoveAudioThreadStreamClient(this);
                }

                if (_onEnd != null)
                {
                    _onEnd(_caches, Loop);
                }
            }

            for (i = 0; i < numCaches; i++)
            {
                if (_overdub)
                {
                    _caches[i].MixFromInterlaced(data, offset, framesToCopy, pos, i, numCaches);
                }
                else
                {
                    _caches[i].CopyFromInterlaced(data, offset, framesToCopy, pos, i, numCaches);
                }
            }

            pos += framesToCopy;

            _vPosition = pos;
        }
コード例 #19
0
        protected void Dispose( bool explicitely )
        {
            if( _disposed )
                return;

            _sourceStream.RemoveAudioThreadStreamClient( this );
            _sourceStream = null;
            _sharedBuffer.Release();
            _sharedBuffer = null;
            _disposed = true;
        }
コード例 #20
0
        void IGATAudioThreadStreamClient.HandleAudioThreadStream( float[] data, int offset, bool emptyData, IGATAudioThreadStream stream )
        {
            int pos = _vPosition;
            int framesToCopy = _numFramesPerRead;
            int i;
            int numCaches = _caches.Length;
            double dspTime = AudioSettings.dspTime;

            if( _vDoCache == false  )
            {
                if( _targetDspTime < dspTime )
                {
                    _targetDspTime = dspTime;
                }

                if( _targetDspTime >= dspTime  && _targetDspTime < dspTime + GATInfo.AudioBufferDuration )
                {
                    if( _waiting )
                    {
                        _waiting  = false;
                        _vDoCache = true;

                        int frameOffsetInBuffer = ( int )( ( _targetDspTime - dspTime ) * GATInfo.OutputSampleRate );

                        framesToCopy = stream.BufferSizePerChannel - frameOffsetInBuffer;
                        offset += frameOffsetInBuffer * stream.NbOfChannels;
                    }
                    else
                    {
                        return;
                    }
                }
                else
                {
                    return;
                }
            }

            if( pos + _numFramesPerRead >= _cacheFrames )
            {
                framesToCopy = _cacheFrames - pos;

                if( Loop )
                {
                    for( i = 0; i < numCaches; i++ )
                    {
                        if( _overdub )
                        {
                            _caches[ i ].MixFromInterlaced( data, offset, framesToCopy, pos, i, numCaches );
                        }
                        else
                        {
                            _caches[ i ].CopyFromInterlaced( data, offset, framesToCopy, pos, i, numCaches );
                        }

                    }

                    pos = 0;
                    offset += framesToCopy * stream.NbOfChannels;
                    framesToCopy = _numFramesPerRead - framesToCopy;
                }
                else
                {
                    _vDoCache = false;

                    _stream.RemoveAudioThreadStreamClient( this );
                }

                if( _onEnd != null )
                    _onEnd( _caches, Loop );
            }

            for( i = 0; i < numCaches; i++ )
            {
                if( _overdub )
                {
                    _caches[ i ].MixFromInterlaced( data, offset, framesToCopy, pos, i, numCaches );
                }
                else
                {
                    _caches[ i ].CopyFromInterlaced( data, offset, framesToCopy, pos, i, numCaches );
                }
            }

            pos += framesToCopy;

            _vPosition = pos;
        }
コード例 #21
0
        private void UpdateObservedStream()
        {
            IGATAudioThreadStream stream = null;

            if( observeTrack )
            {
                GATPlayer player = observedAudioStreamComp as GATPlayer;
                if( player == null )
                {
                    Debug.LogWarning( "Could not find Player to observe track " + observedAudioStreamComp.name );
                    return;
                }

                GATTrack track = player.GetTrack( observedChannel );

                stream = ( ( IGATAudioThreadStreamOwner )track ).GetAudioThreadStream( 0 );
            }
            else if( observedAudioStreamComp != null )
            {
                stream = observedAudioStreamComp as IGATAudioThreadStream;

                if( stream == null )
                {
                    IGATAudioThreadStreamOwner streamOwner;
                    streamOwner = observedAudioStreamComp as IGATAudioThreadStreamOwner;
                    if( streamOwner != null )
                    {
                        stream = streamOwner.GetAudioThreadStream( 0 );
                    }

                    if( stream == null )
                    {
                        Debug.LogWarning( "Could not find IGATAudioThreadStream or IGATAudioThreadStreamOwner on GameObject " + observedAudioStreamComp.name );
                        observedAudioStreamComp = _cachedStreamComp;
                        return;
                    }
                }
            }

            if( _observedStream != null )
            {
                _observedStream.RemoveAudioThreadStreamClient( this );
            }

            if( stream != null )
            {
                stream.AddAudioThreadStreamClient( this );
            }
            else
            {
                _dataIsUpdated = false;
                _needsData = true;
                HandleNoMoreData();
            }

            _observedStream = stream;
            _cachedStreamComp = observedAudioStreamComp;
        }
コード例 #22
0
ファイル: StreamToWavModule.cs プロジェクト: gregzo/G-Audio
        void IGATAudioThreadStreamClient.HandleAudioThreadStream( float[] data, int offset, bool emptyData, IGATAudioThreadStream stream )
        {
            int framesToWrite = stream.BufferSizePerChannel;

            if( _writing == false  )
            {
                double dspTime 		= AudioSettings.dspTime;
                double nextDspTime  = dspTime + GATInfo.AudioBufferDuration;

                if( _targetDspTime < dspTime )
                    _targetDspTime = dspTime;

                if( nextDspTime > _targetDspTime )
                {
                    if( _waiting )
                    {
                        _waiting = false;
                        _writing = true;

                        int frameOffsetInBuffer = ( int )( ( _targetDspTime - dspTime ) * GATInfo.OutputSampleRate );
                        offset += frameOffsetInBuffer * stream.NbOfChannels;
                        framesToWrite -= frameOffsetInBuffer;
                    }
                    else
                    {
                        return;
                    }
                }
                else
                {
                    return;
                }
            }

            if( _recFixedFrames > 0 && ( _writtenFrames + framesToWrite > _recFixedFrames ) )
            {
                framesToWrite = _recFixedFrames - _writtenFrames;
                _writer.WriteStreamAsync( data, offset, framesToWrite );
                EndWriting();

                return;
            }

            _writer.WriteStreamAsync( data, offset, framesToWrite );
            _writtenFrames += framesToWrite;
        }
コード例 #23
0
 void IGATAudioThreadStreamClient.HandleAudioThreadStream(float[] data, int offset, bool emptyData, IGATAudioThreadStream stream)
 {
     _streamBuffer    = data;
     _streamOffset    = offset;
     _streamDataEmpty = emptyData;
 }
コード例 #24
0
ファイル: Example_04.cs プロジェクト: uniphonic/G-Audio
        public void HandleAudioThreadStream(float[] data, int offset, bool emptyData, IGATAudioThreadStream stream)
        {
            if (_state == State.Recording)              // Monitor the state
            {
                bool atEnd         = false;
                int  appliedLength = stream.BufferSizePerChannel;                // at the end of the recording, the length we will copy will be
                // smaller than buffer size.

                if (appliedLength > _leftData.Count - _recOffset)                  // We've reached the end of the recording, adjust applied length
                {
                    atEnd         = true;
                    appliedLength = _leftData.Count - _recOffset;
                }

                _leftData.CopyFromInterlaced(data, appliedLength, _recOffset, 0, 2);                   // copy each channel to the GATData objects. CopyFromInterlaced handles de-interleaving.
                _rightData.CopyFromInterlaced(data, appliedLength, _recOffset, 1, 2);

                if (atEnd)                  // Done, reset the _didFade flag and change state.
                {
                    _didFade = false;       // We will fade the data to avoid pops, but not on the audio thread - there's no reason to do it before playback.
                    _state   = State.IdleRecInMemory;
                    return;
                }

                _recOffset += appliedLength;
            }
        }
コード例 #25
0
        void IGATAudioThreadStreamClient.HandleAudioThreadStream(float[] data, int offset, bool emptyData, IGATAudioThreadStream stream)
        {
            if (_needsData == false)
            {
                return;
            }

            if (!emptyData)
            {
                int sourceChannels = stream.NbOfChannels;                 //May change!
                int length         = stream.BufferSizePerChannel;

                if (sourceChannels == 1)
                {
                    System.Array.Copy(data, offset, _data, 0, length);
                }
                else                 //deinterlace
                {
                    int i = 0;
                    length *= sourceChannels;
                    offset += observedChannel;
                    while (offset < length)
                    {
                        _data[i] = data[offset];
                        offset  += sourceChannels;
                        i++;
                    }
                }
            }

            _receivedZeroData = emptyData;
            _needsData        = false;
            _dataIsUpdated    = true;
        }
コード例 #26
0
ファイル: Example_04.cs プロジェクト: uniphonic/G-Audio
 void OnDisable()
 {
     _observedStream.RemoveAudioThreadStreamClient(this);               // Unsubscribing is vital!
     _observedStream = null;
 }
コード例 #27
0
 void IGATAudioThreadStreamClient.HandleAudioThreadStream( float[] data, int offset, bool emptyData, IGATAudioThreadStream stream )
 {
     _streamBuffer = data;
     _streamOffset = offset;
     _streamDataEmpty = emptyData;
 }
コード例 #28
0
 /// <summary>
 /// Streaming to the specified track will not begin until Start() is called.
 /// </summary>
 public GATAudioThreadStreamToTrack( GATTrack track, IGATAudioThreadStream stream, bool exclusive )
 {
     _track = track;
     _stream = stream;
     _exclusive = exclusive;
 }
コード例 #29
0
 /// <summary>
 /// Streaming to the specified track will not begin until Start() is called.
 /// </summary>
 public GATAudioThreadStreamToTrack(GATTrack track, IGATAudioThreadStream stream, bool exclusive)
 {
     _track     = track;
     _stream    = stream;
     _exclusive = exclusive;
 }