Exemple #1
0
        void Start()
        {
            Application.targetFrameRate = 60;
            c = Constant.CreateDefault();
            ImpulseResponses.LoadAll(c);

            waveAudioClip = WaveAudioClip.CreateWavAudioClip("Bytes/DrumLoop2.wav");
            debugButton.AddButton("Drum1", () =>
            {
                waveAudioClip = WaveAudioClip.CreateWavAudioClip("Bytes/DrumLoop2.wav");
            });
            debugButton.AddButton("Drum2", () =>
            {
                waveAudioClip = WaveAudioClip.CreateWavAudioClip("Bytes/TightFunkBreak-mono.wav");
            });
            debugButton.AddButton("Ochestra\nStrings", () =>
            {
                waveAudioClip = WaveAudioClip.CreateWavAudioClip("Bytes/OchestraStrings-mono.wav");
            });
            debugButton.AddButton("Siren", () =>
            {
                waveAudioClip = WaveAudioClip.CreateWavAudioClip("Bytes/PoliseCarSiren-mono.wav");
            });
            debugButton.AddButton("Stop", () =>
            {
                isPlaying = false;
                audioClipStreamingPlayer.Stop();
            });
            positionCircle.onTouched += OnTouched;
            overlapAddLeft            = new OverlapAdd(c);
            overlapAddRight           = new OverlapAdd(c);
            bufferSample              = new float[c.blockSamples];
            audioClipStreamingPlayer.Initialize(c, this);
        }
Exemple #2
0
        /// <summary>
        /// wavファイルからAudioClipに対応したフォーマットで情報を生成
        /// </summary>
        public static WaveAudioClip CreateWavAudioClip(string path)
        {
            var clip = new WaveAudioClip();
            var wav  = WaveReader.Load(Resources.Load <TextAsset>(path).bytes);

            clip.samples   = wav.data.Length;
            clip.channels  = wav.channels;
            clip.frequency = (int)wav.sampleRate;
            clip.waveData  = new float[wav.data.Length];
            // 16bitデータを-1~1に変換
            for (int i = 0; i < wav.data.Length; ++i)
            {
                clip.waveData[i] = (float)wav.data[i] / Int16.MaxValue;
            }
            return(clip);
        }
Exemple #3
0
        /// <summary>
        /// AudioClipStreamingPlayerのテスト
        /// </summary>
        private void AudioClipStreamingPlayerTest()
        {
            if (!audioClipStreamingPlayerTestFlg)
            {
                return;
            }

            audioClipStreamingPlayer.Initialize(Constant.CreateDefault(), this);
            waveAudioClip = WaveAudioClip.CreateWavAudioClip("Bytes/DrumLoop2.wav");

            debugButton.AddButton("AudioClipStreamingPlayerTest", () =>
            {
                audioClipStreamingPlayer.Play(AudioSettings.dspTime + 1.0f);
            });
            debugButton.AddButton("AudioClipStreamingPlayerTest Stop", () =>
            {
                audioClipStreamingPlayer.Stop();
            });
        }
        /// <summary>
        /// すべてのインパルス応答を読み込む
        /// </summary>
        public static void LoadAll(Constant c)
        {
            dictionary.Clear();

            Fft fft = new Fft(c.blockSize);

            for (int i = 0; i < 360; i += 5)
            {
                // Debug.Log($"Load angle:{i}");
                var ir     = new Data(c.blockSize);
                var clip_l = WaveAudioClip.CreateWavAudioClip($"Bytes/elev0/L0e{i:000}a.wav");
                Debug.Assert(clip_l.samples == c.impulseResponseSamples);
                clip_l.GetData(ir.channelLX, 0, c.impulseResponseSamples);
                fft.Forward(ir.channelLX, ir.channelLY);
                var clip_r = WaveAudioClip.CreateWavAudioClip($"Bytes/elev0/R0e{i:000}a.wav");
                Debug.Assert(clip_r.samples == c.impulseResponseSamples);
                clip_r.GetData(ir.channelRX, 0, c.impulseResponseSamples);
                fft.Forward(ir.channelRX, ir.channelRY);
                ir.angle      = i;
                dictionary[i] = ir;
            }
        }