Exemplo n.º 1
0
        internal static void ImportAudioThreaded(string path)
        {
            AudioDecodeWorker worker = new AudioDecodeWorker(path,
                                                             Path.Combine(Application.persistentDataPath, "TempConversion"),
                                                             (sampleArray, channels, sampleRate) =>
            {
                AudioClip clip = AudioClip.Create(Path.GetFileNameWithoutExtension(path),
                                                  sampleArray.Length / channels, channels, sampleRate, false);
                clip.SetData(sampleArray, 0);
                _source.clip = clip;
                _samples     = sampleArray;
            });

            HyperThreadDispatcher.StartWorker(worker);
        }
Exemplo n.º 2
0
 static AudioClip SoundLoader_GetAudioClip(On.SoundLoader.orig_GetAudioClip orig, SoundLoader self, int i)
 {
     if (NewSounds.FmttmAudio.ContainsKey(i))
     {
         WAV       wav       = new WAV(GetWavData(i));
         AudioClip audioClip = AudioClip.Create(NewSounds.FmttmAudio[i].ToString(), wav.SampleCount, wav.ChannelCount,
                                                wav.Frequency, false, false);
         audioClip.SetData(wav.LeftChannel, 0);
         return(audioClip);
     }
     else
     {
         return(orig(self, i));
     }
 }
Exemplo n.º 3
0
    void PlaySubclip()
    {
        var       start         = 0f;
        var       duration      = .7f;
        var       frequency     = this.clip.frequency;
        int       samplesLength = (int)(frequency * duration);
        AudioClip newClip       = AudioClip.Create(this.clip.name + "-sub", samplesLength, 1, frequency, false);

        float[] data = new float[samplesLength];
        this.clip.GetData(data, (int)(frequency * start));
        newClip.SetData(data, 0);
        this.notesSource.clip  = newClip;
        this.notesSource.pitch = 1 + ((this.game.SelectedCount() - 1) / 2f);
        this.notesSource.Play();
    }
Exemplo n.º 4
0
    /// <summary>
    /// transform the byte array form a mp3 file to an audioclip
    /// </summary>
    /// <param name="data"></param>
    /// <returns></returns>
    public static AudioClip FromMp3Data(byte[] data)
    {
        // Load the data into a stream
        MemoryStream mp3stream = new MemoryStream(data);
        // Convert the data in the stream to WAV format
        Mp3FileReader mp3audio   = new Mp3FileReader(mp3stream);
        WaveStream    waveStream = WaveFormatConversionStream.CreatePcmStream(mp3audio);
        // Convert to WAV data
        WAV       wav       = new WAV(AudioMemStream(waveStream).ToArray());
        AudioClip audioClip = AudioClip.Create("testSound", wav.SampleCount, 1, wav.Frequency, false);

        audioClip.SetData(wav.LeftChannel, 0);
        // Return the clip
        return(audioClip);
    }
Exemplo n.º 5
0
    private IEnumerator RecordingHandler()
    {
        m_recordingDone = false;
        m_Recording     = Microphone.Start(m_MicrophoneID, false, m_RecordingBufferSize, m_RecordingHZ);
        yield return(null);

        if (m_Recording == null)
        {
            yield break;
        }

        while (m_Recording != null)
        {
            int writePos = Microphone.GetPosition(m_MicrophoneID);
            if (writePos > m_Recording.samples || !Microphone.IsRecording(m_MicrophoneID))
            {
                StopRecording();
            }
            if (m_recordingDone)
            {
                float[] samples = null;
                samples = new float[writePos];

                Microphone.End(m_MicrophoneID);

                m_Recording.GetData(samples, 0);

                m_mostRecentClip = AudioClip.Create("clipy", writePos, 1, m_RecordingHZ, false);
                m_mostRecentClip.SetData(samples, 0);

                string filename = Path.Combine("temp", Webserver.GenerateFileName(IAAPlayer.playerObject.GetComponent <NetworkIdentity>().netId.ToString()));
                DownloadHandlerBuffer handler = new DownloadHandlerBuffer();
                yield return(StartCoroutine(Webserver.singleton.Upload(filename, m_mostRecentClip, handler)));

                //yield return new WaitUntil(() => handler.isDone == true);

                // create a new sound object
                IAAPlayer.playerObject.GetComponent <MakeSoundObject>().CmdSpawnSoundObject("", 1f, Vector3.one, Quaternion.identity, filename, true);
                yield break;
            }
            else
            {
                yield return(new WaitUntil(() => m_recordingDone == true));
            }
        }

        yield break;
    }
Exemplo n.º 6
0
    /// <summary>
    /// 停止录制
    /// </summary>
    /// <returns>返回音频保存路径</returns>
    public AudioRecordResultState StopRecord(out string filePath)
    {
        Debug.Log("stop record---------------");

        //Capture the current clip data
        isRecording = false;
        if (recordTimer < 0.5f)
        {
            filePath = null;
            return(AudioRecordResultState.TooShort);
        }

        int position  = Microphone.GetPosition(null);
        var soundData = new float[recordedClip.samples * recordedClip.channels];

        recordedClip.GetData(soundData, 0);

        //Create shortened array for the data that was used for recording
        var newData = new float[position * recordedClip.channels];


        //Copy the used samples to a new array
        for (int i = 0; i < newData.Length; i++)
        {
            newData[i] = soundData[i];
        }

        //One does not simply shorten an AudioClip,
        //    so we make a new one with the appropriate length
        recordedClip = AudioClip.Create(recordedClip.name,
                                        position,
                                        recordedClip.channels,
                                        recordedClip.frequency,
                                        false);

        recordedClip.SetData(newData, 0);        //Give it the data from the old clip

        //Replace the old clip
        Microphone.End(null);

        //save to disk
        string recordedAudioPath;

        byte[] data = WavUtility.FromAudioClip(recordedClip, out recordedAudioPath, true);
        filePath = recordedAudioPath;

        return(AudioRecordResultState.Success);
    }
Exemplo n.º 7
0
    private static AudioClip createAudioFromWave(float[] wave, LeanAudioOptions options)
    {
        float time = wave[wave.Length - 2];

        float[] audioArr = new float[(int)(options.frequencyRate * time)];

        int   waveIter        = 0;
        float subWaveDiff     = wave[waveIter];
        float subWaveTimeLast = 0f;
        float subWaveTime     = wave[waveIter];
        float waveHeight      = wave[waveIter + 1];

        for (int i = 0; i < audioArr.Length; i++)
        {
            float passedTime = (float)i / (float)options.frequencyRate;
            if (passedTime > wave[waveIter])
            {
                subWaveTimeLast = wave[waveIter];
                waveIter       += 2;
                subWaveDiff     = wave[waveIter] - wave[waveIter - 2];
                waveHeight      = wave[waveIter + 1];
                // Debug.Log("passed wave i:"+i);
            }
            subWaveTime = passedTime - subWaveTimeLast;
            float ratioElapsed = subWaveTime / subWaveDiff;

            float value = Mathf.Sin(ratioElapsed * Mathf.PI);
            //if(i<25)
            //	Debug.Log("passedTime:"+passedTime+" value:"+value+" ratioElapsed:"+ratioElapsed+" subWaveTime:"+subWaveTime+" subWaveDiff:"+subWaveDiff);

            value *= waveHeight;

            audioArr[i] = value;
            // Debug.Log("pt:"+pt+" i:"+i+" val:"+audioArr[i]+" len:"+audioArr.Length);
        }

        int lengthSamples = audioArr.Length;

                #if !UNITY_3_5 && !UNITY_4_0 && !UNITY_4_0_1 && !UNITY_4_1 && !UNITY_4_2 && !UNITY_4_3 && !UNITY_4_5 && !UNITY_4_6
        AudioClip audioClip = AudioClip.Create("Generated Audio", lengthSamples, 1, options.frequencyRate, false);
                #else
        bool      is3dSound = false;
        AudioClip audioClip = AudioClip.Create("Generated Audio", lengthSamples, 1, options.frequencyRate, is3dSound, false);
                #endif
        audioClip.SetData(audioArr, 0);

        return(audioClip);
    }
Exemplo n.º 8
0
    void Update()
    {
        foreach (ExternalSong s in GameSettings.userPlaylist)
        {
            if (!s.loaded && !s.waitingToLoad && !s.failedToLoad)
            {
                // lets wait for this one first.
                break;
            }
            if (!s.loaded && s.waitingToLoad && !s.failedToLoad)
            {
                s.LoadAudio();
                break;
            }
        }
        int padding = 2;
        int offset  = 4;

        foreach (ExternalSong song in GameSettings.userPlaylist)
        {
            if (song.loaded)
            {
                song.Y = -24;
            }
            else
            {
                song.alpha = 1.0f;
                song.Y     = offset;
                offset    += 24 + padding;
            }
            if (song.waitingToLoad)
            {
                song.alpha = 0.0f;
            }
            song.animation_alpha = Mathf.Lerp(song.animation_alpha, song.alpha, Time.deltaTime * 3.0f);
            song.animation_Y     = Mathf.Lerp(song.animation_Y, song.Y, Time.deltaTime * 3.0f);
            if (song.type == "MP3")
            {
                if (song.loaded && !song.MP3_setData)
                {
                    AudioClip audioClip = AudioClip.Create(song.song.name, song.MP3_data.Length / 2, 2, song.MP3_freq, false, false);
                    audioClip.SetData(song.MP3_data, 0);
                    song.song.clip   = audioClip;
                    song.MP3_setData = true;
                }
            }
        }
    }
Exemplo n.º 9
0
        // Hack for 3d spatialization
        void Awake()
        {
            AudioClip _clip = AudioClip.Create("_clip", 1024, 2, 44100, true);

            float[] samples = new float[1024];
            for (int i = 0; i < 1024; i++)
            {
                samples[i] = 1;
            }
            _clip.SetData(samples, 0);

            AudioSource _source = GetComponent <AudioSource>();

            _source.clip = _clip;
            _source.Play();
        }
Exemplo n.º 10
0
        public override object WriteTo(object obj)
        {
            if (obj == null)
            {
                return(null);
            }

            AudioClip o = (AudioClip)obj;

            o.SetData(m_data, 0);
            if (!o.preloadAudioData)
            {
                o.LoadAudioData();
            }
            return(base.WriteTo(obj));
        }
Exemplo n.º 11
0
    AudioClip trimAudioClip(AudioClip c, int position)
    {
        var data = new float[c.samples * c.channels];

        c.GetData(data, 0);
        var newData = new float[position * c.channels];

        for (int i = 0; i < newData.Length; i++)
        {
            newData [i] = data [i];
        }
        AudioClip newClip = AudioClip.Create(c.name, position, c.channels, c.frequency, false, false);

        newClip.SetData(newData, 0);
        return(newClip);
    }
Exemplo n.º 12
0
        //---------------------------------------------------------------------------
        // create AudioClip by ranged raw data
        //---------------------------------------------------------------------------
        public AudioClip Create(
            string name,
            float[] ranged_data,
            int samples,
            int channels,
            int frequency,
            bool isStream
            )
        {
            AudioClip clip = AudioClip.Create(name, samples, channels, frequency, isStream);

            // set data to clip
            clip.SetData(ranged_data, 0);

            return(clip);
        }
        /// <summary>
        /// If the default device is recording, ends the recording session and trims the default audio clip produced.
        /// </summary>
        public void StopRecording()
        {
            if (Microphone.IsRecording(null))
            {
                m_ForcedStopRecording = true;
                Microphone.End(null);
                float recordingLengthInSeconds = Time.time - m_RecordingStartTime;

                // Trim the default audio clip produced by UnityEngine.Microphone to fit the actual recording length.
                var samples = new float[Mathf.CeilToInt(m_RecordedAudio.frequency * recordingLengthInSeconds)];
                m_RecordedAudio.GetData(samples, 0);
                m_RecordedAudio = AudioClip.Create("TrimmedAudio", samples.Length,
                                                   m_RecordedAudio.channels, m_RecordedAudio.frequency, false);
                m_RecordedAudio.SetData(samples, 0);
            }
        }
Exemplo n.º 14
0
        /// <summary>
        /// 修剪音频文件
        /// </summary>
        public static AudioClip TrimAudioFiles(AudioClip audioClip, int startTime, int endTime, int samplingRate)
        {
            float[] samples_one = new float[audioClip.samples];

            audioClip.GetData(samples_one, 0);

            float[] samples_two = new float[(endTime - startTime) * samplingRate];

            Array.Copy(samples_one, endTime * samplingRate, samples_two, startTime * samplingRate, (endTime - startTime) * samplingRate);

            AudioClip newAudioClip = AudioClip.Create(audioClip.name, samplingRate * (endTime - startTime), 1, samplingRate, false);

            newAudioClip.SetData(samples_two, 0);

            return(newAudioClip);
        }
Exemplo n.º 15
0
        /// <summary>
        /// 剪切空白部分
        /// </summary>
        public static AudioClip CutBlankSection(AudioClip audioClip, int time, int samplingRate)
        {
            float[] samples_one = new float[audioClip.samples];

            audioClip.GetData(samples_one, 0);

            float[] samples_two = new float[time * samplingRate];

            Array.Copy(samples_one, 0, samples_two, 0, time * samplingRate);

            AudioClip newAudioClip = AudioClip.Create(audioClip.name, samplingRate * time, 1, samplingRate, false);

            newAudioClip.SetData(samples_two, 0);

            return(newAudioClip);
        }
Exemplo n.º 16
0
    public void Initialize(string name, int nbOfSamples, int channels, int frequency, float[] data, int BPM, int BPB, float BeginLoop, float EndLoop, List <TransitionData> _Transitions, int nextTransitionId)
    {
        clip = AudioClip.Create(name, nbOfSamples, channels, frequency, false);
        clip.SetData(data, 0);

        this.BPM       = BPM;
        this.BPB       = BPB;
        this.BeginLoop = BeginLoop;
        this.EndLoop   = EndLoop;

        this.Transitions = _Transitions;

        this.nextTransitionId = nextTransitionId;

        this.name = name;
    }
Exemplo n.º 17
0
    AudioClip LoadSong(string path)
    {
        string filename = Path.GetFileNameWithoutExtension(path);

        AudioFileReader afr    = new AudioFileReader(path);
        int             lenght = (int)afr.Length;

        float[] audioData = new float[lenght];
        afr.Read(audioData, 0, lenght);

        AudioClip song = AudioClip.Create(filename, lenght, afr.WaveFormat.Channels, afr.WaveFormat.SampleRate, false);

        song.SetData(audioData, 0);

        return(song);
    }
Exemplo n.º 18
0
    public static AudioClip generateAudioFromCurve(AnimationCurve curve, int frequencyRate = 44100)
    {
        float time = curve[curve.length - 1].time;

        float[] array = new float[(int)((float)frequencyRate * time)];
        for (int i = 0; i < array.Length; i++)
        {
            float time2 = (float)i / (float)frequencyRate;
            array[i] = curve.Evaluate(time2);
        }
        int       lengthSamples = array.Length;
        AudioClip audioClip     = AudioClip.Create("Generated Audio", lengthSamples, 1, frequencyRate, stream: false);

        audioClip.SetData(array, 0);
        return(audioClip);
    }
Exemplo n.º 19
0
    private void MixAudioFloatExample()
    {
        float[] firstAudioInFloatFormat = new float[_firstAudio.samples * _firstAudio.channels];
        _firstAudio.GetData(firstAudioInFloatFormat, 0);

        float[] secondAudioInFloatFormat = new float[_secondAudio.samples * _secondAudio.channels];
        _secondAudio.GetData(secondAudioInFloatFormat, 0);

        float[] mixedFloatArray = MixAndClampFloatBuffers(firstAudioInFloatFormat, secondAudioInFloatFormat);

        AudioClip mixedClip = AudioClip.Create("Combine", mixedFloatArray.Length, _firstAudio.channels, _secondAudio.frequency, false);

        mixedClip.SetData(mixedFloatArray, 0);

        SavWav.Save("MixedClip", mixedClip);
    }
Exemplo n.º 20
0
    void CreateTone2(float note)
    {
        for (int i = 0; i < samples.Length; i++)
        {
            samples[i] = Mathf.Cos(Mathf.PI * 3 * i * note / sampleFrequecy);
        }

        clip = AudioClip.Create("Note", samples.Length, 1, sampleFrequecy, false);
        clip.SetData(samples, 0);

        //the second note


        //Creating own methods then referencing it in void start
        //Create an array for the notes, Create a tempo; create sounds at certain intervals. Coroutine/invokerepeating to have a beat. Synthesise tones
    }
Exemplo n.º 21
0
        /// <summary>
        /// Convert a short array to an audio clip
        /// </summary>
        /// <param name="data">The short array representing an audio clip</param>
        /// <param name="channels">How many channels in the audio data</param>
        /// <param name="frequency">The recording frequency of the audio data</param>
        /// <param name="threedimensional">Whether the audio clip should be 3D</param>
        /// <param name="gain">How much to boost the volume (1.0 = unchanged)</param>
        /// <returns>An AudioClip</returns>
        public static AudioClip ShortsToAudioClip(short[] data, int channels, int frequency, bool threedimensional, float gain)
        {
            float[] samples = new float[data.Length];

            for (int i = 0; i < samples.Length; i++)
            {
                //convert to float in the -1 to 1 range
                int c = (int)data[i];
                samples[i] = ((float)c / 3267.0f) * gain;
            }

            AudioClip clip = AudioClip.Create("clip", data.Length / channels, channels, frequency, threedimensional, false);

            clip.SetData(samples, 0);
            return(clip);
        }
Exemplo n.º 22
0
        private static AudioClip generateAudioClip(string name, int lengthSamples, int sampleFrequency, float soundFrequency)
        {
            soundFrequency = Mathf.Clamp(soundFrequency, -1000f, 1000f); //REALLY IMPORTANT!

            float[] data = new float[lengthSamples];
            for (int i = 0; i < data.Length; i++)
            {
                data[i] = .25f * (float)Math.Sin((2f * Math.PI * soundFrequency) / sampleFrequency * i);
            }

            AudioClip audioClip = AudioClip.Create(name, lengthSamples, 1, sampleFrequency, false);

            audioClip.SetData(data, 0);

            return(audioClip);
        }
Exemplo n.º 23
0
        public static AudioClip GetMp3Audio(string name, byte[] data)
        {
            // Load the data into a stream
            MemoryStream mp3stream = new MemoryStream(data);
            // Convert the data in the stream to WAV format
            Mp3FileReader mp3audio = new Mp3FileReader(mp3stream);
            // Convert to WAV data
            WAV wav = new WAV(AudioMemStream(mp3audio).ToArray());

            Debug.Log(wav);
            AudioClip audioClip = AudioClip.Create(name, wav.SampleCount, 1, wav.Frequency, false);

            audioClip.SetData(wav.LeftChannel, 0);
            // Return the clip
            return(audioClip);
        }
Exemplo n.º 24
0
    public static AudioClip generateAudioFromCurve(AnimationCurve curve, int frequencyRate = 44100)
    {
        float item = curve[curve.length - 1].time;

        float[] singleArray = new float[(int)((float)frequencyRate * item)];
        for (int i = 0; i < (int)singleArray.Length; i++)
        {
            float single = (float)i / (float)frequencyRate;
            singleArray[i] = curve.Evaluate(single);
        }
        int       length    = (int)singleArray.Length;
        AudioClip audioClip = AudioClip.Create("Generated Audio", length, 1, frequencyRate, false);

        audioClip.SetData(singleArray, 0);
        return(audioClip);
    }
        private void Update()
        {
            CheckForErrorOnCall(MicStream.MicSetGain(InputGain));
            audioSource.volume = HearSelf ? 1.0f : 0.0f;

            try
            {
                audioDataMutex.WaitOne();

                var connection = GetActiveConnection();
                hasServerConnection = (connection != null);
                if (hasServerConnection)
                {
                    while (micBuffer.UsedCapacity >= 4 * AudioPacketSize)
                    {
                        TransmitAudio(connection);
                    }
                }
            }
            catch (Exception e)
            {
                Debug.LogError(e.Message);
            }
            finally
            {
                audioDataMutex.ReleaseMutex();
            }

            #region DebugInfo
            if (SaveTestClip && testCircularBuffer.UsedCapacity == testCircularBuffer.TotalCapacity)
            {
                float[] testBuffer = new float[testCircularBuffer.UsedCapacity / 4];
                testCircularBuffer.Read(testBuffer, 0, testBuffer.Length * 4);
                testCircularBuffer.Reset();
                TestClip = AudioClip.Create("testclip", testBuffer.Length / 2, 2, 48000, false);
                TestClip.SetData(testBuffer, 0);
                if (!testSource)
                {
                    GameObject testObj = new GameObject("testclip");
                    testObj.transform.parent = transform;
                    testSource = testObj.AddComponent <AudioSource>();
                }
                testSource.PlayClip(TestClip);
                SaveTestClip = false;
            }
            #endregion
        }
Exemplo n.º 26
0
    void Start()
    {
        // This is a workaround for the game to work on a system that's not on English
        System.Threading.Thread.CurrentThread.CurrentCulture = CultureInfo.InvariantCulture;

        HoundCloudRequester requester = new HoundCloudRequester(Settings.Instance.clientId, Settings.Instance.clientKey, Settings.Instance.userId);

        RequestInfoJSON.TypeClientVersion client_version = new RequestInfoJSON.TypeClientVersion();
        client_version.key     = 0;
        client_version.choice0 = "1.0";

        // RequestInfoJSON class contains metadata about the current request, like session_id, request id, client_version, etc
        RequestInfoJSON request_info = new RequestInfoJSON();

        request_info.setUnitPreference(RequestInfoJSON.TypeUnitPreference.UnitPreference_US);
        request_info.setRequestID(Guid.NewGuid().ToString());
        request_info.setClientVersion(client_version);


        // We need these to lines to get the vocalized intro of the game. The parameter Claire sets the voice type
        // and we need to set the ResponseAudioShortOrLong field to tell the API we want it to vocalize the answer
        request_info.setResponseAudioVoice("Claire");
        request_info.setResponseAudioShortOrLong(RequestInfoJSON.stringToResponseAudioShortOrLong("Long"));

        // The HoundServerJSON class is used to handle all server responses
        HoundServerJSON hound_result;

        hound_result = requester.do_text_request("speak_this", null, request_info);
        CommandResultJSON my_answer   = hound_result.getAllResults()[0];
        string            bytes_audio = my_answer.getResponseAudioBytes();

        byte[] bytes = System.Convert.FromBase64String(bytes_audio);

        // Play the clip back
        audioSource = GetComponent <AudioSource>();

        Debug.Log("Intro started...");

        WAV       wav       = new WAV(bytes);
        AudioClip audioClip = AudioClip.Create("testSound", wav.SampleCount, 1, wav.Frequency, false, false);

        audioClip.SetData(wav.LeftChannel, 0);
        audioSource.clip = audioClip;
        audioSource.Play();

        Debug.Log("Intro finished!");
    }
Exemplo n.º 27
0
    // Start is called before the first frame update
    void Start()
    {
        float x = 3;
        float y = 10;

        Debug.Log(add(x, y));

        // IntPtr emuOut = IntPtr.Zero;

        var path = Application.streamingAssetsPath + "/sample2.nsf";

        Debug.Log(path);
        GmeReader reader = new GmeReader(path);



        int size = reader.TrackInfo.playLength;

        // IntPtr wavPtr = AquesTalk_Synthe(ref aqtk_voice, koeSjisBytes, ref size);
        Debug.Log("size : " + size);

        //成功判定
        // if (wavPtr == IntPtr.Zero)
        // {
        //     Debug.LogError("ERROR: 音声生成に失敗しました。不正な文字が使われた可能性があります");
        // }

        //C#で扱えるようにマネージド側へコピー
        byte[] byte_data = new byte[size];
        reader.Read(byte_data, 0, size);
        // Marshal.Copy(wavPtr, byte_data, 0, size);

        //アンマネージドポインタは用が無くなった瞬間に解放
        // AquesTalk_FreeWave(wavPtr);

        //float配列に変換
        float[] float_data = CreateRangedRawData(byte_data, 0, size / 2, 1, BIT_16);

        //audioClip作成
        AudioClip audioClip = AudioClip.Create("gme", float_data.Length, 2, 44100, false);

        audioClip.SetData(float_data, 0);
        m_Audio.clip = audioClip;

        //再生
        m_Audio.Play();
    }
Exemplo n.º 28
0
    /// <summary>
    /// 録音を停止する
    /// </summary>
    public void EndRecording()
    {
        if (!Microphone.IsRecording(deviceName: micName))
        {
            Debug.Log("録音が開始されていません");
            return;
        }

        Debug.Log("録音を停止します");

        // マイクの録音位置を取得
        int position = Microphone.GetPosition(micName);

        // 録音を停止
        Microphone.End(deviceName: micName);
        RecordingStartButtonScript.recordedFlg = true;

        // 音声データ一時退避用の領域を確保し、audioClipからのデータを格納
        float[] soundData = new float[audioSource.clip.samples * audioSource.clip.channels];
        audioSource.clip.GetData(soundData, 0);

        // 新しい音声データ領域を確保し、positonの分だけ格納できるサイズにする。
        float[] newData = new float[position * audioSource.clip.channels];

        // positionの分だけデータをコピー
        for (int i = 0; i < newData.Length; i++)
        {
            newData[i] = soundData[i];
        }

        // 新しいAudioClipのインスタンスを生成し、音声データをセット
        AudioClip newClip = AudioClip.Create(audioSource.clip.name, position, audioSource.clip.channels, audioSource.clip.frequency, false);

        newClip.SetData(newData, 0);
        AudioClip.Destroy(audioSource.clip);
        audioSource.clip = newClip;

        // Wavファイルへ保存
        DateTime dt           = DateTime.Now;
        string   dtStr        = dt.ToString("yyyyMMddHHmmss");
        string   fileFullPath = Path.Combine(Application.persistentDataPath, "audiofile_" + dtStr + ".wav");

        if (!SaveAudioSourceWav.Save(fileFullPath, audioSource.clip))
        {
            Debug.Log("録音ファイルを保存することができませんでした");
        }
    }
Exemplo n.º 29
0
        private static AudioClip ToAudioClip(byte[] fileBytes, int offsetSamples = 0, string name = "Audio")
        {
            int    subchunk1   = BitConverter.ToInt32(fileBytes, 16);
            UInt16 audioFormat = BitConverter.ToUInt16(fileBytes, 20);

            // NB: Only uncompressed PCM wav files are supported.
            string formatCode = FormatCode(audioFormat);

            Debug.AssertFormat(audioFormat == 1 || audioFormat == 65534, "Detected format code '{0}' {1}, but only PCM and WaveFormatExtensable uncompressed formats are currently supported.", audioFormat, formatCode);

            UInt16 channels   = BitConverter.ToUInt16(fileBytes, 22);
            int    sampleRate = BitConverter.ToInt32(fileBytes, 24);
            //int byteRate = BitConverter.ToInt32 (fileBytes, 28);
            //UInt16 blockAlign = BitConverter.ToUInt16 (fileBytes, 32);
            UInt16 bitDepth = BitConverter.ToUInt16(fileBytes, 34);

            int headerOffset = 16 + 4 + subchunk1 + 4;
            int subchunk2    = BitConverter.ToInt32(fileBytes, headerOffset);

            float[] data;
            switch (bitDepth)
            {
            case 8:
                data = Convert8BitByteArrayToAudioClipData(fileBytes, headerOffset, subchunk2);
                break;

            case 16:
                data = Convert16BitByteArrayToAudioClipData(fileBytes, headerOffset, subchunk2);
                break;

            case 24:
                data = Convert24BitByteArrayToAudioClipData(fileBytes, headerOffset, subchunk2);
                break;

            case 32:
                data = Convert32BitByteArrayToAudioClipData(fileBytes, headerOffset, subchunk2);
                break;

            default:
                throw new Exception(bitDepth + " bit depth isn't supported");
            }

            AudioClip audioClip = AudioClip.Create(name, data.Length, (int)channels, sampleRate, false);

            audioClip.SetData(data, 0);
            return(audioClip);
        }
Exemplo n.º 30
0
    public AudioClip Combine(List <AudioClip> clips)
    {
        if (clips == null || clips.Count == 0)
        {
            return(null);
        }

        int length = 0;

        for (int i = 0; i < clips.Count; i++)
        {
            if (clips[i] == null)
            {
                continue;
            }

            length += clips[i].samples * clips[i].channels;
        }

        float[] data = new float[length];
        length = 0;
        for (int i = 0; i < clips.Count; i++)
        {
            if (clips[i] == null)
            {
                continue;
            }

            float[] buffer = new float[clips[i].samples * clips[i].channels];
            clips[i].GetData(buffer, 0);
            //System.Buffer.BlockCopy(buffer, 0, data, length, buffer.Length);
            buffer.CopyTo(data, length);
            length += buffer.Length;
        }

        if (length == 0)
        {
            return(null);
        }

        AudioClip result = AudioClip.Create("Combine", length, mTempAudioClip.channels, mTempAudioClip.frequency, false,
                                            false);

        result.SetData(data, 0);

        return(result);
    }