示例#1
0
        public override void OnTransitionProgress(double progress)
        {
            var nextIndex = StartIndex - progress;

            nextIndex = MathX.Clamp(nextIndex, -1, HBS.Search.Callback.MaxPageIndex + 1);

            SwipeDirection      = nextIndex > Shelf.SelectedIndex ? Direction.Right : Direction.Left;
            Shelf.SelectedIndex = nextIndex;
        }
示例#2
0
        private IMatrix GetTransform(double cummulatedDeltaX)
        {
            var f          = MxM.Identity;
            var translateX = Math.Sign(cummulatedDeltaX) * FVCM.ActualSize.Width;
            var t          = MxM.Create(translateX, 0);
            var p          = Math.Abs(cummulatedDeltaX);

            p = MathX.Clamp(p, 0, 1);
            return(MxM.Progress(ref f, ref t, p));
        }
示例#3
0
        public override void OnTranistionCompleted()
        {
            var index = Shelf.SelectedIndex;

            if (SwipeBehaviour.Direction == Direction.Left)
            {
                index = Math.Ceiling(index);
            }
            if (SwipeBehaviour.Direction == Direction.Right)
            {
                index = Math.Floor(index);
            }
            index = MathX.Clamp(index, 0, HBS.Search.Callback.MaxPageIndex);
            Shelf.AnimateSelectedIndexTo(index, 0.5, AnimationTransitions.CircEaseOut);
        }
示例#4
0
 //Record one chunk from the voice audio of each user
 public void RecordAudio()
 {
     //Debug.Log("Recording audio");
     foreach (var item in audio_outputs)
     {
         AudioOutput audio_output = item.Value;
         RefID       user_id      = item.Key;
         if (audio_output != null)
         {
             buffer.EnsureSize <float>(metagen_comp.Engine.AudioSystem.BufferSize, false);
             if (audio_output.Source.Target != null)
             {
                 //AudioSystemConnector.InformOfDSPTime(AudioSettings.dspTime);
                 FrooxEngine.Engine.Current.AudioRead();
                 audio_sources_ready = true;
                 AudioStream <MonoSample> stream = (AudioStream <MonoSample>)audio_output.Source.Target;
                 stream.Read <MonoSample>(buffer.AsMonoBuffer());
                 //if (buffer.Length > stream.MissedSamples)
                 //{
                 //    buffer = buffer.Take(buffer.Length - stream.MissedSamples).ToArray();
                 //buffer[0] = 0f;
                 //buffer[1] = 0f;
                 //buffer[2] = 0f;
                 //buffer[3] = 0f;
                 //Console.WriteLine("[{0}]", string.Join(", ", buffer));
                 for (int i = 0; i < buffer.Length; i++)
                 {
                     buffer[i] = MathX.Clamp(buffer[i], -1, 1);
                 }
                 //UniLog.Log(buffer.Length);
                 audio_recorders[user_id].ConvertAndWrite(buffer);
                 //Task.Run(() => { audio_recorders[user_id].ConvertAndWrite(buffer); });
                 //metagen_comp.StartTask(async () => { audio_recorders[user_id].ConvertAndWrite(buffer); });
                 //}
             }
             else
             {
                 UniLog.Log("Audio Output Source target was null! (hmm should we restart it?). Did it happen coz a user left (in which case we shouldn't restart it), or something else?");
                 //UniLog.Log("Restarting audio recording coz audio output source target was null!");
                 //StopRecording();
                 //StartRecording();
             }
         }
     }
 }
示例#5
0
        public override void OnTranistionCompleted()
        {
            var index = Shelf.SelectedIndex;

            if (SwipeBehaviour.Direction == Direction.Left)
            {
                index = Math.Ceiling(index);
            }
            if (SwipeBehaviour.Direction == Direction.Right)
            {
                index = Math.Floor(index);
            }
            index = MathX.Clamp(index, 0, HBS.Search.Callback.MaxPageIndex);
            var ease = Shelf.AnimateSelectedIndexTo(index, 0.5, AnimationTransitions.CircEaseOut);

            ease.Complete += (sender, e) =>
            {
                TogglePerformance(false);
            };
        }
示例#6
0
 //Record one chunk from the voice audio of each user
 public void InteractAudio()
 {
     //Debug.Log("Recording audio");
     foreach (var item in audio_outputs)
     {
         AudioOutput audio_output = item.Value;
         RefID       user_id      = item.Key;
         if (audio_output != null)
         {
             buffer.EnsureSize <float>(metagen_comp.Engine.AudioSystem.BufferSize, false);
             if (audio_output.Source.Target != null)
             {
                 //AudioSystemConnector.InformOfDSPTime(AudioSettings.dspTime);
                 FrooxEngine.Engine.Current.AudioRead();
                 audio_sources_ready = true;
                 AudioStream <MonoSample> stream = (AudioStream <MonoSample>)audio_output.Source.Target;
                 stream.Read <MonoSample>(buffer.AsMonoBuffer());
                 for (int i = 0; i < buffer.Length; i++)
                 {
                     buffer[i] = MathX.Clamp(buffer[i], -1, 1);
                 }
                 float max_val = 0;
                 for (int i = 0; i < buffer.Length; i++)
                 {
                     float val_squared = (float)Math.Pow((double)buffer[i], (double)2);
                     //if (val_squared > max_val) max_val = val_squared;
                     if (val_squared > max_val)
                     {
                         max_val = val_squared;
                         break;
                     }
                 }
                 if (isRecording[user_id])
                 {
                     audio_recorders[user_id].ConvertAndWrite(buffer);
                     //counting the number of consecutive all-zero buffers
                     if (max_val == 0)
                     {
                         number_of_zero_buffers += 1;
                     }
                     else
                     {
                         number_of_zero_buffers = 0;
                     }
                     if (number_of_zero_buffers > zero_buffer_num_threshold)
                     {
                         StopWritingFile(user_id);
                     }
                 }
                 else
                 {
                     //UniLog.Log(max_val);
                     if (max_val > 0)
                     {
                         StartWritingFile(user_id);
                         if (prev_buffer != null)
                         {
                             audio_recorders[user_id].ConvertAndWrite(prev_buffer);
                         }
                         audio_recorders[user_id].ConvertAndWrite(buffer);
                     }
                 }
                 prev_buffer = buffer;
                 //Task.Run(() => { audio_recorders[user_id].ConvertAndWrite(buffer); });
                 //metagen_comp.StartTask(async () => { audio_recorders[user_id].ConvertAndWrite(buffer); });
                 //}
             }
             else
             {
                 UniLog.Log("Audio Output Source target was null! (hmm should we restart it?). Did it happen coz a user left (in which case we shouldn't restart it), or something else?");
                 //UniLog.Log("Restarting audio recording coz audio output source target was null!");
                 //StopRecording();
                 //StartRecording();
             }
         }
     }
 }