Exemplo n.º 1
0
        void OnAudioFilterRead(float[] samples, int channelCount)
        {
            lock (this)
            {
                int coeffCount = Ambisonic.GetCoeffCount(_order);
                if (_sink != null && coeffCount > 0)
                {
                    int samplesOffset = 0;
                    // While there are sample to process
                    while (samplesOffset < samples.Length)
                    {
                        // If the pending buffer is full, move it to the full ist
                        if (_activeSamples != null && _activeSamples.Length == _activeSampleIndex)
                        {
                            _fullBuffers.Enqueue(_activeSamples);
                            _activeSamples     = null;
                            _activeSampleIndex = 0;
                        }
                        // Assign a new pending queue
                        if (_activeSamples == null && _emptyBuffers.Count > 0)
                        {
                            _activeSamples = _emptyBuffers.Dequeue();
                        }
                        if (_activeSamples == null)
                        {
                            // Remaining samples are lost!
                            break;
                        }
                        int remainingFrameCount  = (samples.Length - samplesOffset) / channelCount;
                        int generatedSampleCount = remainingFrameCount * coeffCount;
                        int remainingSampleSpace = (_activeSamples.Length - _activeSampleIndex);

                        int samplesToProcess = Mathf.Min(remainingSampleSpace, generatedSampleCount);
                        // TODO: should we specify Floor/Ceil rounding behaviour?
                        int framesToProcess = samplesToProcess / coeffCount;
                        generatedSampleCount = framesToProcess * coeffCount;

                        if (framesToProcess > 0)
                        {
                            NativePlugin.EncodeMonoToAmbisonic(_sourceInstance, samples, samplesOffset, framesToProcess, channelCount, _activeSamples, _activeSampleIndex, _activeSamples.Length, _order);
                            _activeSampleIndex += generatedSampleCount;
                            samplesOffset      += framesToProcess * channelCount;
                        }
                        else
                        {
                            Debug.Log(coeffCount + " " + framesToProcess + "   " + remainingSampleSpace + " >>  " + samplesOffset + " /  " + samples.Length);
                            break;
                        }
                    }
                }
            }
        }