GetData() private method

private GetData ( float data, int offsetSamples ) : bool
data float
offsetSamples int
return bool
コード例 #1
7
ファイル: CAudioSystem.cs プロジェクト: nulhax/VOID
	//Returns data from an AudioClip as a byte array.
	public static byte[] GetClipData(AudioClip _clip)
	{
		//Get data
		float[] floatData = new float[_clip.samples * _clip.channels];
		_clip.GetData(floatData,0);			
		
		//convert to byte array
		byte[] byteData = new byte[floatData.Length * 4];
		Buffer.BlockCopy(floatData, 0, byteData, 0, byteData.Length);
		
		return(byteData);
	}	
コード例 #2
2
    private IEnumerator _StartMicrophone()
    {
        deviceName = PlayerPrefs.GetString("VJMicrophone.deviceName", "");

        while(micSemaphor > 0) {

            string currentDeviceName = deviceName;
            CheckSampleRate(currentDeviceName);

            Debug.Log("Microphone starting: " + currentDeviceName + " sampling rate:" + sampleRate);

            micClip = Microphone.Start(currentDeviceName, true, clipLength, sampleRate);
            float[] samples = new float[(int)(analysisWindow * sampleRate)];
            audio.clip = micClip;
            audio.Play();

            while (currentDeviceName == deviceName && micSemaphor > 0) {
                yield return 0;

                int position = Microphone.GetPosition(deviceName);
                if (position < samples.Length) position += clipLength * sampleRate;
                micClip.GetData(samples, position - samples.Length);
                audio.timeSamples = position;

                float rms = 0.0f;
                foreach (float lvl in samples) {
                    rms += lvl * lvl;
                }
                rms = Mathf.Sqrt(rms / samples.Length);

                level = Mathf.Clamp01( 0.5f * (2.0f + Mathf.Log10(rms)) );
            }

            audio.Stop();
            audio.clip = null;
            Debug.Log("Microphone stopping: " + currentDeviceName );
            Microphone.End(currentDeviceName);
        }
    }
コード例 #3
0
        public static AudioClip Concat(this AudioClip audioClip, AudioClip otherClip)
        {
            int length = audioClip.samples >= otherClip.samples ? audioClip.samples : otherClip.samples;
            AudioClip clipSum = AudioClip.Create(audioClip.name + " + " + otherClip.name, length, audioClip.channels, audioClip.frequency, false);

            float[] dataSum;
            float[] otherData;

            if (audioClip.samples >= otherClip.samples)
            {
                dataSum = new float[audioClip.samples];
                audioClip.GetData(dataSum, 0);
                otherData = new float[otherClip.samples];
                otherClip.GetData(otherData, 0);
            }
            else
            {
                dataSum = new float[otherClip.samples];
                otherClip.GetData(dataSum, 0);
                otherData = new float[audioClip.samples];
                audioClip.GetData(otherData, 0);
            }

            for (int i = 0; i < otherData.Length; i++)
                dataSum[i] += otherData[i];

            clipSum.SetData(dataSum, 0);

            return clipSum;
        }
コード例 #4
0
	public static AudioClip TrimSilence(AudioClip clip, float min) {
		var samples = new float[clip.samples];

		clip.GetData(samples, 0);

		return TrimSilence(new List<float>(samples), min, clip.channels, clip.frequency);
	}
コード例 #5
0
    /// <summary>
    /// Creates a haptics clip from the specified audio clip.
    /// </summary>
    public OVRHapticsClip(AudioClip audioClip, int channel = 0)
    {
        float[] audioData = new float[audioClip.samples * audioClip.channels];
        audioClip.GetData(audioData, 0);

        InitializeFromAudioFloatTrack(audioData, audioClip.frequency, audioClip.channels, channel);
    }
コード例 #6
0
 static public int GetData(IntPtr l)
 {
     try {
                     #if DEBUG
         var    method     = System.Reflection.MethodBase.GetCurrentMethod();
         string methodName = GetMethodName(method);
                     #if UNITY_5_5_OR_NEWER
         UnityEngine.Profiling.Profiler.BeginSample(methodName);
                     #else
         Profiler.BeginSample(methodName);
                     #endif
                     #endif
         UnityEngine.AudioClip self = (UnityEngine.AudioClip)checkSelf(l);
         System.Single[]       a1;
         checkArray(l, 2, out a1);
         System.Int32 a2;
         checkType(l, 3, out a2);
         var ret = self.GetData(a1, a2);
         pushValue(l, true);
         pushValue(l, ret);
         return(2);
     }
     catch (Exception e) {
         return(error(l, e));
     }
             #if DEBUG
     finally {
                     #if UNITY_5_5_OR_NEWER
         UnityEngine.Profiling.Profiler.EndSample();
                     #else
         Profiler.EndSample();
                     #endif
     }
             #endif
 }
コード例 #7
0
    private float[] GetAudioClipSamples(AudioClip clip)
    {
        float[] buffer = new float[clip.samples * clip.channels];

        clip.GetData(buffer, 0);

        return buffer;
    }
コード例 #8
0
ファイル: Sampler.cs プロジェクト: cstoquer/oodio-unity
        //▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄
        public Sampler(AudioClip sample)
        {
            output = new AudioSignal();
            pos = 0;

            // get samples data
            len = sample.samples;
            data = new float[len];
            sample.GetData (data, 0);
        }
コード例 #9
0
 /// <summary>
 /// Write the specified value using the writer.
 /// </summary>
 /// <param name="value">Value.</param>
 /// <param name="writer">Writer.</param>
 public override void Write(object value, ISaveGameWriter writer)
 {
     UnityEngine.AudioClip audioClip = (UnityEngine.AudioClip)value;
     float[] data = new float[audioClip.samples];
     audioClip.GetData(data, 0);
     writer.WriteProperty("data", data);
     writer.WriteProperty("channels", audioClip.channels);
     writer.WriteProperty("frequency", audioClip.frequency);
     writer.WriteProperty("name", audioClip.name);
     writer.WriteProperty("hideFlags", audioClip.hideFlags);
 }
コード例 #10
0
ファイル: SaveWav.cs プロジェクト: MedStarSiTEL/UnityTrauma
	public static AudioClip TrimSilence(AudioClip clip, float min) 
	{
		// do nothing if clip size is 0
		if ( clip.samples == 0 )
			return clip;
		
		var samples = new float[clip.samples];
 
		clip.GetData(samples, 0);

		return TrimSilence(new List<float>(samples), min, clip.channels, clip.frequency);
	}
コード例 #11
0
ファイル: Sound_Converter.cs プロジェクト: theslimreaper/DnD
    public string ConvertSoundToString(AudioClip clip)
    {
        string base64 = "";
        if (clip != null)
        {
            float[] fBytes = new float[clip.samples * clip.channels];
            byte[] bytes = new byte[fBytes.Length * 4];
            clip.GetData(fBytes, 0);
            Buffer.BlockCopy(fBytes, 0, bytes, 0, bytes.Length);

            base64 = Convert.ToBase64String(bytes);
        }
         return base64;
    }
コード例 #12
0
 static int QPYX_GetData_YXQP(IntPtr L_YXQP)
 {
     try
     {
         ToLua.CheckArgsCount(L_YXQP, 3);
         UnityEngine.AudioClip QPYX_obj_YXQP = (UnityEngine.AudioClip)ToLua.CheckObject(L_YXQP, 1, typeof(UnityEngine.AudioClip));
         float[] QPYX_arg0_YXQP = ToLua.CheckNumberArray <float>(L_YXQP, 2);
         int     QPYX_arg1_YXQP = (int)LuaDLL.luaL_checknumber(L_YXQP, 3);
         bool    QPYX_o_YXQP    = QPYX_obj_YXQP.GetData(QPYX_arg0_YXQP, QPYX_arg1_YXQP);
         LuaDLL.lua_pushboolean(L_YXQP, QPYX_o_YXQP);
         return(1);
     }
     catch (Exception e_YXQP)                {
         return(LuaDLL.toluaL_exception(L_YXQP, e_YXQP));
     }
 }
コード例 #13
0
 static int GetData(IntPtr L)
 {
     try
     {
         ToLua.CheckArgsCount(L, 3);
         UnityEngine.AudioClip obj = (UnityEngine.AudioClip)ToLua.CheckObject(L, 1, typeof(UnityEngine.AudioClip));
         float[] arg0 = ToLua.CheckNumberArray <float>(L, 2);
         int     arg1 = (int)LuaDLL.luaL_checknumber(L, 3);
         obj.GetData(arg0, arg1);
         return(0);
     }
     catch (Exception e)
     {
         return(LuaDLL.toluaL_exception(L, e));
     }
 }
コード例 #14
0
 static public int GetData(IntPtr l)
 {
     try {
         UnityEngine.AudioClip self = (UnityEngine.AudioClip)checkSelf(l);
         System.Single[]       a1;
         checkArray(l, 2, out a1);
         System.Int32 a2;
         checkType(l, 3, out a2);
         self.GetData(a1, a2);
         pushValue(l, true);
         return(1);
     }
     catch (Exception e) {
         return(error(l, e));
     }
 }
コード例 #15
0
 static public int GetData(IntPtr l)
 {
     try{
         UnityEngine.AudioClip self = (UnityEngine.AudioClip)checkSelf(l);
         System.Single[]       a1;
         checkType(l, 2, out a1);
         System.Int32 a2;
         checkType(l, 3, out a2);
         self.GetData(a1, a2);
         return(0);
     }
     catch (Exception e) {
         LuaDLL.luaL_error(l, e.ToString());
         return(0);
     }
 }
コード例 #16
0
 static int GetData(IntPtr L)
 {
     try
     {
         ToLua.CheckArgsCount(L, 3);
         UnityEngine.AudioClip obj = (UnityEngine.AudioClip)ToLua.CheckObject <UnityEngine.AudioClip>(L, 1);
         float[] arg0 = ToLua.CheckNumberArray <float>(L, 2);
         int     arg1 = (int)LuaDLL.luaL_checkinteger(L, 3);
         bool    o    = obj.GetData(arg0, arg1);
         LuaDLL.lua_pushboolean(L, o);
         return(1);
     }
     catch (Exception e)
     {
         return(LuaDLL.toluaL_exception(L, e));
     }
 }
コード例 #17
0
    static int GetData(IntPtr L)
    {
#if UNITY_EDITOR
        ToluaProfiler.AddCallRecord("UnityEngine.AudioClip.GetData");
#endif
        try
        {
            ToLua.CheckArgsCount(L, 3);
            UnityEngine.AudioClip obj = (UnityEngine.AudioClip)ToLua.CheckObject(L, 1, typeof(UnityEngine.AudioClip));
            float[] arg0 = ToLua.CheckNumberArray <float>(L, 2);
            int     arg1 = (int)LuaDLL.luaL_checknumber(L, 3);
            bool    o    = obj.GetData(arg0, arg1);
            LuaDLL.lua_pushboolean(L, o);
            return(1);
        }
        catch (Exception e)
        {
            return(LuaDLL.toluaL_exception(L, e));
        }
    }
コード例 #18
0
    public static Byte[] ConvertClipToByte(AudioClip clip)
    {
        if (clip == null)
        {
            Debug.Log("GetClipData audio.clip is null");
            return null;
        }

        float[] samples = new float[clip.samples];

        clip.GetData(samples, 0);


        Byte[] outData = new byte[samples.Length * 2];
        //Int16[] intData = new Int16[samples.Length];
        //converting in 2 float[] steps to Int16[], //then Int16[] to Byte[]

        int rescaleFactor = 32767; //to convert float to Int16

        for (int i = 0; i < samples.Length; i++)
        {
            short temshort = (short)(samples[i] * rescaleFactor);

            Byte[] temdata = System.BitConverter.GetBytes(temshort);

            outData[i * 2] = temdata[0];
            outData[i * 2 + 1] = temdata[1];


        }
        if (outData == null || outData.Length <= 0)
        {
            Debug.Log("GetClipData intData is null");
            return null;
        }
        //return intData;
        return outData;
    }
コード例 #19
0
ファイル: Analyzer.cs プロジェクト: DomCristaldi/Coda
        //public int numPartitions = 10000;
        //public float overlapPercent = 0.5f;
        //public float threshold = 1 - 0.75f; //larger float values are more strict
        //public float beatDetectionOverlapPercent = 0.5f;
        /// <summary>
        /// Processes raw audio data to find average energy for overlapping partitions.
        /// </summary>
        /// <returns>The FFT data array.</returns>
        /// <param name="clip">Audio clip to process.</param>
        /// <param name="numPartitions">Number of pieces to split the song into for analysis</param>
        /// <param name="overlapPercent">The percentage which the partitions overlap each other</param>
        public double[] ProcessAudio(AudioClip clip, int numPartitions, float overlapPercent)
        {
            _averages = new double[(int)(numPartitions / overlapPercent) - 1];
            int samplesPerPartition = (int)(clip.samples / numPartitions);

            int numDivisions = (int)(numPartitions / overlapPercent) - 1;
            //Because the partitions overlap, the number of iterations is the number of partitions multiplied by the inverse of the overlap percent
            for (int i = 0; i < numDivisions; i++) {

                float[] samples = new float[samplesPerPartition];
                int input = i * ((int) (samples.Length * overlapPercent)); //the offset to start getting song data increases by overlapPercent as i is incremented
                clip.GetData(samples, input);

                //the raw partition data is run through the Blackman-Harris windowing function
                for (int n = 0; n < samples.Length; n++) {
                    samples [n] *= _a0 - _a1 * Mathf.Cos ((2 * Mathf.PI * n) / samples.Length - 1) + _a2 * Mathf.Cos ((4 * Mathf.PI * n) / samples.Length - 1) - _a3 * Mathf.Cos ((6 * Mathf.PI * n) / samples.Length - 1);
                }

                FFT2 FFT = new FFT2 ();
                FFT.init ((uint)Mathf.Log(samplesPerPartition,2));
                //our array of floats is converted to an array of doubles for use in the FFT function
                double[] double_samples = samples.ToList ().ConvertAll<double> (new System.Converter<float, double> (F2D)).ToArray ();
                //runs our sample data through a Fast Fourier Transform to convert it to the frequency domain
                FFT.run (double_samples, new double[samples.Length], false);

                //gets the average value for this partition and adds it to an array.
                //when all of the partitions are completed, averages will contain data for the entire song
                double avg = double_samples.Average ();
                _averages[i] = avg;

            }

            return _averages;
        }
コード例 #20
0
    public void Play(AudioClip clip)
    {
        samples_         = clip.samples;
        df_              = (float)clip.frequency / sampleNum;
        updateMouthTime_ = (float)clip.length * sampleNum / samples_;
        delayCnt_        = (int)(delayTime / updateMouthTime_); // delay

        rawData_  = new float[samples_ * clip.channels];
        clip.GetData(rawData_, 0);

        isTalking_ = true;
        Clear();

        playClip_ = AudioClip.Create("tmp", samples_, clip.channels, clip.frequency,
            true, true, OnAudioRead, OnAudioSetPosition);
        var pos = (playingPosition) ? playingPosition.transform.position : transform.position;
        AudioSource.PlayClipAtPoint(playClip_, pos);
    }
コード例 #21
0
ファイル: WaveDisplay.cs プロジェクト: mmandel/8Nights2
    public void SetAudioData(AudioClip clip)
    {
        // Clear out previous channel displays.
        channelDisplays.Clear();

        float[] rawData = new float[clip.samples * clip.channels];
        clip.GetData(rawData, 0);

        int numChannelsToDraw = Mathf.Min(clip.channels, MAX_CHANNELS_TO_DRAW);
        for (int i = 0; i < numChannelsToDraw; ++i)
        {
            float[] channelData = new float[clip.samples];

            for (int rawSampleIdx = i, channelSampleIdx = 0; rawSampleIdx < rawData.Length; rawSampleIdx += clip.channels, ++channelSampleIdx)
            {
                channelData[channelSampleIdx] = rawData[rawSampleIdx];
            }

            channelDisplays.Add(new ChannelDisplay(channelData));
        }
    }
コード例 #22
0
    public static Texture2D CreatePreview(AudioClip aud, int width, int height, Color color, PreviewType previewType)
    {
        int step = Mathf.CeilToInt((aud.samples * aud.channels) / width);
        float[] samples = new float[aud.samples * aud.channels];

        //workaround to prevent the error in the function getData when Audio Importer loadType is "compressed in memory"
        string path = AssetDatabase.GetAssetPath(aud);
        AudioImporter audioImporter = AssetImporter.GetAtPath(path) as AudioImporter;
        AudioImporterLoadType audioLoadTypeBackup = audioImporter.loadType;
        audioImporter.loadType = AudioImporterLoadType.StreamFromDisc;
        AssetDatabase.ImportAsset(path);

        //getData after the loadType changed
        aud.GetData(samples, 0);

        //restore the loadType
        audioImporter.loadType = audioLoadTypeBackup;
        AssetDatabase.ImportAsset(path);

        Texture2D img = new Texture2D(width, height, TextureFormat.RGBA32, false);

        if (previewType == PreviewType.wave)
        {

            Color[] xy = new Color[width * height];
            for (int x = 0; x < width * height; x++)
            {
                xy[x] = new Color(0, 0, 0, 0);
                //xy[x] = new Color(0, 1, 0, 0.2f);
            }

            img.SetPixels(xy);

            int i = 0;
            while (i < width)
            {
                int barHeight = Mathf.CeilToInt(Mathf.Clamp(Mathf.Abs(samples[i * step]) * height, 0, height));
                int add = samples[i * step] > 0 ? 1 : -1;
                for (int j = 0; j < barHeight; j++)
                {
                    img.SetPixel(i, Mathf.FloorToInt(height / 2) - (Mathf.FloorToInt(barHeight / 2) * add) + (j * add), color);
                }
                ++i;

            }

            img.Apply();
        }
        else if (previewType == PreviewType.bar)
        {
            img = new Texture2D(width, 1, TextureFormat.RGBA32, false);
            int i = 0;
            while (i < width)
            {
                //int barHeight = Mathf.CeilToInt(Mathf.Clamp(Mathf.Abs(samples[i * step]) * height, 0, height));
                //int add = samples[i * step] > 0 ? 1 : -1;
                float colorIntensity = Mathf.Clamp(Mathf.Abs(samples[i * step]) * 10f, 0, 1);
                Color colorReturn = new Color(color.r / colorIntensity, color.g / colorIntensity, color.b / colorIntensity, colorIntensity / 4f);
                img.SetPixel(i, 0, colorReturn);
                ++i;
            }
            img.Apply();

        }
        else if (previewType == PreviewType.both)
        {
            /*
            Color[] xy = new Color[width * height];
            for (int x = 0; x < width * height; x++)
            {
                xy[x] = new Color(1, 0, 0, 1);
                //xy[x] = new Color(0, 1, 0, 0.2f);
            }
            img.SetPixels(xy);
            */
            int i = 0;
            while (i < width)
            {
                int barHeight = Mathf.CeilToInt(Mathf.Clamp(Mathf.Abs(samples[i * step]) * height, 0, height));
                int add = samples[i * step] > 0 ? 1 : -1;

                float colorIntensity = Mathf.Clamp(Mathf.Abs(samples[i * step]) * 10f, 0, 1);

                Color colorReturn = new Color(color.r / colorIntensity, color.g / colorIntensity, color.b / colorIntensity, colorIntensity / 6f);

                for (int j = 0; j < height; j++)
                {
                    img.SetPixel(i, j, colorReturn);
                }

                for (int j = 0; j < barHeight; j++)
                {
                    img.SetPixel(i, Mathf.FloorToInt(height / 2) - (Mathf.FloorToInt(barHeight / 2) * add) + (j * add), color);
                }
                ++i;

            }

            img.Apply();

        }
        return img;
    }
コード例 #23
0
	static void ConvertAndWrite(FileStream fileStream, AudioClip clip) {

		var samples = new float[clip.samples];

		clip.GetData(samples, 0);

		Int16[] intData = new Int16[samples.Length];
		//converting in 2 float[] steps to Int16[], //then Int16[] to Byte[]

		Byte[] bytesData = new Byte[samples.Length * 2];
		//bytesData array is twice the size of
		//dataSource array because a float converted in Int16 is 2 bytes.

		int rescaleFactor = 32767; //to convert float to Int16

		for (int i = 0; i<samples.Length; i++) {
			intData[i] = (short) (samples[i] * rescaleFactor);
			Byte[] byteArr = new Byte[2];
			byteArr = BitConverter.GetBytes(intData[i]);
			byteArr.CopyTo(bytesData, i * 2);
		}

		fileStream.Write(bytesData, 0, bytesData.Length);
	}
コード例 #24
0
ファイル: Audio.cs プロジェクト: BasmanovDaniil/Whoosh
        public static AudioClip MedianFilter(AudioClip clip, int window)
        {
            var data = new float[clip.samples];
            clip.GetData(data, 0);

            data = MedianFilter(data, window);

            var filtered = AudioClip.Create("MedianFilter(" + clip.name + ")", clip.samples, 1, clip.frequency, false,
                false);
            filtered.SetData(data, 0);
            return filtered;
        }
コード例 #25
0
		/// <summary>
		/// サウンドファイルの書き込み(暗号化つきサウンドファイル)(ある程度大きなサイズのファイルを省メモリで)
		/// 注*) サウンドを符号化して読み書きするのは非常に処理速度が重くメモリも大きく使うので、非推奨。
		/// どうしても必要な場合以外は、符号化なしでIOするのを推奨
		/// </summary>
		/// <param name="path">ファイルパス</param>
		/// <param name="audioClip">書き込むサウンド</param>
		/// <returns>成否</returns>
		public override bool WriteSound(string path, AudioClip audioClip)
		{
			try
			{
				audioHeader[(int)SoundHeader.Samples] = audioClip.samples;
				audioHeader[(int)SoundHeader.Frequency] = audioClip.frequency;
				audioHeader[(int)SoundHeader.Channels] = audioClip.channels;

				int audioSize = audioClip.samples * audioClip.channels;
				using (FileStream fstream = new FileStream(path, FileMode.Create, FileAccess.Write))
				{
					//ヘッダ書き込み
					Buffer.BlockCopy(audioHeader, 0, workBufferArray, 0, audioHeaderSize);
					CustomEncodeNoCompress(CryptKeyBytes, workBufferArray, 0, audioHeaderSize);
					fstream.Write(workBufferArray, 0, audioHeaderSize);

					int offsetSamples = 0;
					while (true)
					{
						//一定のサイズずつ書き込む
						int countSample = Math.Min(audioSamplesWorkArray.Length, audioSize - offsetSamples);

						audioClip.GetData(audioSamplesWorkArray, offsetSamples / audioClip.channels);

						//サウンドのサンプリングデータをバッファに変換
						for (int i = 0; i < countSample; i++)
						{
							audioShortWorkArray[i] = (short)(short.MaxValue * audioSamplesWorkArray[i]);
						}
						int count = countSample * 2;
						Buffer.BlockCopy(audioShortWorkArray, 0, workBufferArray, 0, count);

						//暗号化
						CustomEncodeNoCompress(CryptKeyBytes, workBufferArray, 0, count);
						//書き込む
						fstream.Write(workBufferArray, 0, count);
						offsetSamples += countSample;
						if (offsetSamples >= audioSize) break;
					}
				}
				return true;
			}
			catch (Exception e)
			{
				Debug.LogError(e.ToString());
				return false;
			}
		}
コード例 #26
0
ファイル: AudioPostProcessor.cs プロジェクト: fengqk/Art
	bool IsStream(AudioClip clip, out float[] samples)
	{
		samples = new float[clip.samples * clip.channels];
		
		clip.GetData(samples, 0);

		bool allZero = true;
		for (int i = 0; i < samples.Length; i++)
		{
			if (samples[i] != 0)
			{
				allZero = false;
				break;
			}
		}
		return allZero;
	}
コード例 #27
0
	//Get the data from each clip and 
	private static bool compareClips(AudioClip clip1,AudioClip clip2)
	{
		bool sameClip = false;

		//check to make sure we have at least one clip already playing.
		if(clip1 == null)
		{
			return sameClip;
		}

		//Set up the arrays for our clip data and the get that data.
		float[] firstClipSamples = new float[clip1.samples * clip1.channels];
		float[] secondClipSamples = new float[clip2.samples * clip2.channels];
		
		clip1.GetData(firstClipSamples, 0);
		clip2.GetData(secondClipSamples, 0);

		//this is done to ensure that we don't go out of bounds in either array;
		if(firstClipSamples.Length != secondClipSamples.Length)
		{
			return sameClip;
		}

		//compare the two arrays index by index for equality
		for(int i = 0; i < firstClipSamples.Length; i ++)
		{
			if(firstClipSamples[i] == secondClipSamples[i])
			{
				sameClip = true;
			}
			else
				sameClip = false;
		}
		return sameClip;
	}
コード例 #28
0
ファイル: Audio.cs プロジェクト: BasmanovDaniil/Whoosh
        public static AudioClip Volume(AudioClip clip, float volume)
        {
            var data = new float[clip.samples];
            clip.GetData(data, 0);

            data = Volume(data, volume);

            var filtered = AudioClip.Create(clip.name, clip.samples, 1, clip.frequency, false, false);
            filtered.SetData(data, 0);
            return filtered;
        }
コード例 #29
0
    public void RequestSpeech(AudioClip audio, GameObject receiver, string callback)
    {
        float[] clipData = new float[audio.samples * audio.channels];
        audio.GetData(clipData, 0);
        WaveGen.WaveFormatChunk format = new WaveGen().MakeFormat(audio);
        
        try
        {
            string filename = GetTempFileName() + ".wav";
            FileStream stream = File.OpenWrite(filename);
            new WaveGen().Write(clipData, format, stream);
            stream.Close();

            Debug.Log("Request Start time: " + DateTime.Now.ToLongTimeString());

            if (requestFactory == null)
            {
                requestFactory = BuildRequestFactory(RequestFactory.ScopeTypes.Speech);
            }

            if (clientToken ==  null)
            {
                clientToken = GetAccessToken();
            }

            if (null != clientToken)
            {
                requestFactory.ClientCredential = clientToken;
            }

            ATT_MSSDK.Speechv3.SpeechResponse response = SpeechToTextService(filename, "Generic", "audio/wav");
            string speechOutput = response.Recognition.NBest[0].ResultText;
            if (clientToken == null)
            {
                clientToken = requestFactory.ClientCredential;
                SaveAccessToken();
            }

            Debug.Log("Response received time: " + DateTime.Now.ToLongTimeString());
            showProcess = false;
            Debug.Log("response: " + speechOutput);
            File.Delete(filename);
        }
        catch (System.Exception e)
        {
            Debug.LogError(e);
        }
    }
コード例 #30
0
    void Update()
    {
        AudioEventContainer audioEventContainer = Selection.activeObject as AudioEventContainer;
        if ( audioEventContainer == null ) {
            return;
        }
        activeAudioEventContainer = audioEventContainer;

        AudioClip obj = audioEventContainer.clip;
        if ( obj != null && obj != activeAudioClip ) {
            activeAudioClip = obj;
            activeAudioClipData = new float[activeAudioClip.samples * activeAudioClip.channels];
            activeClipChannelOneData = new float[activeAudioClip.samples];
            if ( activeAudioClip.channels == 2 ) {
                activeClipChannelTwoData = new float[activeAudioClip.samples];
            }

            activeAudioClip.GetData( activeAudioClipData, 0 );

            int c = 0;
            for ( int i = 0; i < activeAudioClipData.Length; i += 2 ) {
                activeClipChannelOneData[c] = activeAudioClipData[i];
                ++c;
            }

            if ( activeAudioClip.channels == 2 ) {
                c = 0;
                for ( int j = 1; j < activeAudioClipData.Length; j += 2 ) {
                    activeClipChannelTwoData[c] = activeAudioClipData[j];
                    ++c;
                }
            }
            dirty = true;

            selectionPosition = 0;
            selectedIndex = 0;
            selectedTime = 0;
        }

        if ( activeAudioClip != null ) {
            float percent = selectedIndex / (float)activeClipChannelOneData.Length;
            selectedTime = percent * activeAudioClip.length;
        }
    }
コード例 #31
0
    private void lipSync(AudioClip clip, int frame)
    {
        frame = ((int)(frame / NUMFRAMESLIPSYNC)) * NUMFRAMESLIPSYNC;
        int init = Mathf.CeilToInt(Globals.MILISPERFRAME * clip.frequency);
        float[] samples = new float[init * clip.channels * 3];
        init *= frame % (int)(clip.length * Globals.FRAMESPERSECOND);

        clip.GetData(samples, init);
        float midSamples = 0;
        for(int i = 0; i < samples.Length; ++i) {
            midSamples += Mathf.Abs(samples[i]);
        }
        midSamples /= samples.Length;

        Component[] children = GetComponentsInChildren<Component>(true);
        foreach(Component child in children) {
            if((child.name.StartsWith("exp_") && child.name.EndsWith("_m")) || child.name.StartsWith("lipsync_"))
                child.gameObject.SetActive(false);
            if(midSamples < 0.05f && mMouthClose && child.name == mExpression + "_m")
                child.gameObject.SetActive(true);
            else if(midSamples < 0.05f && !mMouthClose && child.name == "lipsync_00")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.05f && midSamples < 0.1f && child.name == "lipsync_06")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.1f && midSamples < 0.15f && child.name == "lipsync_07")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.15f && midSamples < 0.2f && child.name == "lipsync_01")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.2f && midSamples < 0.25f && child.name == "lipsync_04")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.25f && midSamples < 0.3f && child.name == "lipsync_05")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.3f && midSamples < 0.35f && child.name == "lipsync_06")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.35f && midSamples < 0.4f && child.name == "lipsync_07")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.4f && midSamples < 0.45f && child.name == "lipsync_08")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.45f && child.name == "lipsync_09")
                child.gameObject.SetActive(true);

            /*if(midSamples < 0.05f && mMouthClose && child.name == mExpression + "_m")
                child.gameObject.SetActive(true);
            else if(midSamples < 0.05f && !mMouthClose && child.name == "lipsync_00")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.05f && midSamples < 0.1f && child.name == "lipsync_01")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.1f && midSamples < 0.15f && child.name == "lipsync_02")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.15f && midSamples < 0.2f && child.name == "lipsync_03")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.2f && midSamples < 0.25f && child.name == "lipsync_04")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.25f && midSamples < 0.3f && child.name == "lipsync_05")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.3f && midSamples < 0.35f && child.name == "lipsync_06")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.35f && midSamples < 0.4f && child.name == "lipsync_07")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.4f && midSamples < 0.45f && child.name == "lipsync_08")
                child.gameObject.SetActive(true);
            else if(midSamples >= 0.45f && child.name == "lipsync_09")
                child.gameObject.SetActive(true);*/

            /*if(midSamples<0.01f && child.name==mExpression+"_m")
            child.gameObject.SetActive(true);
            else if(midSamples>=0.01f && midSamples<0.05f && child.name=="lipsync_01")
                child.gameObject.SetActive(true);
            else if(midSamples>=0.05f && midSamples<0.1f && child.name=="lipsync_02")
                child.gameObject.SetActive(true);
            else if(midSamples>=0.1f && midSamples<0.15f && child.name=="lipsync_03")
                child.gameObject.SetActive(true);
            else if(midSamples>=0.15f && midSamples<0.2f && child.name=="lipsync_04")
                child.gameObject.SetActive(true);
            else if(midSamples>=0.2f && midSamples<0.25f && child.name=="lipsync_05")
                child.gameObject.SetActive(true);
            else if(midSamples>=0.25f && midSamples<0.3f && child.name=="lipsync_06")
                child.gameObject.SetActive(true);
            else if(midSamples>=0.3f && midSamples<0.35f && child.name=="lipsync_07")
                child.gameObject.SetActive(true);
            else if(midSamples>=0.35f && midSamples<0.4f && child.name=="lipsync_08")
                child.gameObject.SetActive(true);
            else if(midSamples>=0.4f && child.name=="lipsync_09")
                child.gameObject.SetActive(true);*/
        }
    }
コード例 #32
0
    public void Callibration(AudioClip clip, Vowel vowel)
    {
        int   samples = clip.samples;
        float df      = (float)clip.frequency / sampleNum;
        var   rawData = new float[samples * clip.channels];
        clip.GetData(rawData, 0);

        float f1  = 0.0f;
        float f2  = 0.0f;
        int   num = 0;
        for (int i = 0; i < samples / sampleNum; ++i) {
            if (sampleNum * (i + 1) >= samples) break;
            var input = new float[sampleNum];
            System.Array.Copy(rawData, sampleNum * i, input, 0, sampleNum);
            if ( GetVolume(input) > minVolume) {
                var formantIndices = GetFormantIndices(input);
                f1 += formantIndices.x * df;
                f2 += formantIndices.y * df;
                ++num;
            }
        }
        f1 /= num;
        f2 /= num;

        switch (vowel) {
            case Vowel.A : aCenterF1 = f1; aCenterF2 = f2; break;
            case Vowel.I : iCenterF1 = f1; iCenterF2 = f2; break;
            case Vowel.U : uCenterF1 = f1; uCenterF2 = f2; break;
            case Vowel.E : eCenterF1 = f1; eCenterF2 = f2; break;
            case Vowel.O : oCenterF1 = f1; oCenterF2 = f2; break;
        }
    }
コード例 #33
0
ファイル: Audio.cs プロジェクト: BasmanovDaniil/Whoosh
        public static AudioClip Mix(AudioClip a, AudioClip b)
        {
            var aData = new float[a.samples];
            a.GetData(aData, 0);

            var bData = new float[b.samples];
            b.GetData(bData, 0);

            var mixData = Mix(aData, bData);

            var clip = AudioClip.Create("Mix(" + a.name + ", " + b.name + ")", mixData.Length, 1, a.frequency, false,
                false);
            clip.SetData(mixData, 0);
            return clip;
        }
コード例 #34
0
	public void Play(AudioClip clip)
	{
		if (isTalking_) {
			Debug.LogWarning("Now talking!");
			return;
		}

		samples_ = clip.samples;
		df_ = (float) clip.frequency / sampleNum;
		updateMouthTime_ = clip.length * sampleNum / samples_;
		delayCnt_ = (int) (delayTime / updateMouthTime_);

		rawData_ = new float[samples_ * clip.channels];
		clip.GetData(rawData_, 0);

		isTalking_ = true;
		Clear();

		position_ = 0;
		playClip_ = clip;
		audio_.clip = playClip_;
		audio_.Play();
	}
コード例 #35
0
    public void Play(AudioClip clip)
    {
        if (isTalking_) {
            Debug.LogWarning("Now talking!");
            return;
        }

        samples_ = clip.samples;
        df_ = (float) clip.frequency / sampleNum;
        updateMouthTime_ = clip.length * sampleNum / samples_;
        delayCnt_ = (int) (delayTime / updateMouthTime_);

        rawData_ = new float[samples_ * clip.channels];
        clip.GetData(rawData_, 0);

        isTalking_ = true;
        Clear();

#if (UNITY_PRO_LICENSE && USE_PRO_FUNCTION)
        position_ = 0;
        playClip_ = clip;
#else
        playClip_ = AudioClip.Create("tmp", samples_, clip.channels, clip.frequency,
            is3dSound, true, OnAudioRead, OnAudioSetPosition);
#endif
        audio_.clip = playClip_;
        audio_.Play();
    }
コード例 #36
0
ファイル: LeanAudio.cs プロジェクト: 4026/worldgen
    public static void printOutAudioClip( AudioClip audioClip, ref AnimationCurve curve, float scaleX = 1f )
    {
        // Debug.Log("Audio channels:"+audioClip.channels+" frequency:"+audioClip.frequency+" length:"+audioClip.length+" samples:"+audioClip.samples);
        float[] samples = new float[audioClip.samples * audioClip.channels];
        audioClip.GetData(samples, 0);
        int i = 0;

        Keyframe[] frames = new Keyframe[samples.Length];
        while (i < samples.Length) {
           frames[i] = new Keyframe( (float)i * scaleX, samples[i] );
           ++i;
        }
        curve = new AnimationCurve( frames );
    }
コード例 #37
0
 public void SetAudioClip(AudioClip newClip)
 {
     clip = newClip;
     if (clip != null)
     {
         _clipChannels = clip.channels;
         _clipData = new float[clip.samples * _clipChannels];
         clip.GetData(_clipData, 0);
     }
     else _clipData = null;
 }