Exemplo n.º 1
0
        void StartCapture()
        {
                        #if UNITY_EDITOR
            if (UnityEditor.EditorApplication.isPaused)
            {
                return;
            }
                        #endif

            Debug.Assert(_outSamples == null);
            Debug.Assert(_wavWriter == null);

            _pendingSampleCount = 0;
            int coeffCount = Ambisonic.GetCoeffCount(_order);
            Debug.Assert(coeffCount == 4 || coeffCount == 9 || coeffCount == 16);

            lock (this)
            {
                foreach (AmbisonicSource source in _sources)
                {
                    SetupSource(source);
                }

                string path = Path.Combine(Application.persistentDataPath, _filename);
                if (_capture)
                {
                    path = _capture.LastFilePath + ".wav";
                }
                Debug.Log("[AVProMovieCapture] Writing Ambisonic WAV to " + path);
                _wavWriter = new WavWriter(path, coeffCount, AudioSettings.outputSampleRate, WavWriter.SampleFormat.Float32);

                _outSamples = new float[coeffCount * AudioSettings.outputSampleRate * 1];                       // 1 second buffer
            }
        }
Exemplo n.º 2
0
        void UpdateCoefficients()
        {
            Ambisonic.PolarCoord p = new Ambisonic.PolarCoord();
            p.FromCart(_position);

            lock (this)
            {
                float[] normaliseWeights = Ambisonic.GetNormalisationWeights(_normalisation);
                NativePlugin.UpdateAmbisonicWeights(_sourceInstance, p.azimuth, p.elevation, _order, _channelOrder, normaliseWeights);
            }
        }
Exemplo n.º 3
0
        void OnAudioFilterRead(float[] samples, int channelCount)
        {
            lock (this)
            {
                int coeffCount = Ambisonic.GetCoeffCount(_order);
                if (_sink != null && coeffCount > 0)
                {
                    int samplesOffset = 0;
                    // While there are sample to process
                    while (samplesOffset < samples.Length)
                    {
                        // If the pending buffer is full, move it to the full ist
                        if (_activeSamples != null && _activeSamples.Length == _activeSampleIndex)
                        {
                            _fullBuffers.Enqueue(_activeSamples);
                            _activeSamples     = null;
                            _activeSampleIndex = 0;
                        }
                        // Assign a new pending queue
                        if (_activeSamples == null && _emptyBuffers.Count > 0)
                        {
                            _activeSamples = _emptyBuffers.Dequeue();
                        }
                        if (_activeSamples == null)
                        {
                            // Remaining samples are lost!
                            break;
                        }
                        int remainingFrameCount  = (samples.Length - samplesOffset) / channelCount;
                        int generatedSampleCount = remainingFrameCount * coeffCount;
                        int remainingSampleSpace = (_activeSamples.Length - _activeSampleIndex);

                        int samplesToProcess = Mathf.Min(remainingSampleSpace, generatedSampleCount);
                        // TODO: should we specify Floor/Ceil rounding behaviour?
                        int framesToProcess = samplesToProcess / coeffCount;
                        generatedSampleCount = framesToProcess * coeffCount;

                        if (framesToProcess > 0)
                        {
                            NativePlugin.EncodeMonoToAmbisonic(_sourceInstance, samples, samplesOffset, framesToProcess, channelCount, _activeSamples, _activeSampleIndex, _activeSamples.Length, _order);
                            _activeSampleIndex += generatedSampleCount;
                            samplesOffset      += framesToProcess * channelCount;
                        }
                        else
                        {
                            Debug.Log(coeffCount + " " + framesToProcess + "   " + remainingSampleSpace + " >>  " + samplesOffset + " /  " + samples.Length);
                            break;
                        }
                    }
                }
            }
        }
Exemplo n.º 4
0
        internal void Setup(AmbisonicOrder order, AmbisonicChannelOrder channelOrder, AmbisonicNormalisation normalisation, int bufferCount)
        {
            Debug.Assert(bufferCount > 1 && bufferCount < 100);
            lock (this)
            {
                _order         = order;
                _channelOrder  = channelOrder;
                _normalisation = normalisation;
                int sampleCount = Ambisonic.GetCoeffCount(order) * AudioSettings.outputSampleRate / 10;                 // 1/10 second buffer

                _activeSampleIndex = 0;
                _activeSamples     = null;
                _fullBuffers.Clear();
                _emptyBuffers.Clear();
                for (int i = 0; i < bufferCount; i++)
                {
                    float[] buffer = new float[sampleCount];
                    _emptyBuffers.Enqueue(buffer);
                }

                UpdateCoefficients();
            }
        }
Exemplo n.º 5
0
 void SetupSource(AmbisonicSource source)
 {
     source.Setup(_order, Ambisonic.GetChannelOrder(_format), Ambisonic.GetNormalisation(_format), _bufferCount);
     source.FlushBuffers();
 }