Beispiel #1
0
    private void SendMicData(RecordingEvent recordingEvent)
    {
        if (recordingEvent.NewSampleCount >= clientSideMicSampleRecorder.SampleRate.Value - 1)
        {
            Debug.LogError("Attempt to send complete mic buffer at once");
            return;
        }
        // Copy from float array to byte array. Note that in a float there are sizeof(float) bytes.
        byte[] newByteData = new byte[recordingEvent.NewSampleCount * sizeof(float)];
        Buffer.BlockCopy(
            recordingEvent.MicSamples, recordingEvent.NewSamplesStartIndex * sizeof(float),
            newByteData, 0,
            recordingEvent.NewSampleCount * sizeof(float));

        try
        {
            // DateTime now = DateTime.Now;
            // Debug.Log($"Send data: {newByteData.Length} bytes ({recordingEvent.NewSampleCount} samples) at {now}:{now.Millisecond}");
            clientMicDataSenderNetworkStream.Write(newByteData, 0, newByteData.Length);
        }
        catch (Exception e)
        {
            Debug.LogException(e);
            Debug.LogError($"Failed sending mic data: {newByteData.Length} bytes ({recordingEvent.NewSampleCount} samples)");
            clientSideConnectRequestManager.CloseConnectionAndReconnect();
        }
    }
    protected override void OnRecordingEvent(RecordingEvent recordingEvent)
    {
        // Detect the pitch of the sample
        int newSampleLength = recordingEvent.NewSampleCount;

        bufferedNotAnalyzedSampleCount += newSampleLength;

        // Wait until enough new samples are buffered
        if (bufferedNotAnalyzedSampleCount < MinSampleCountToUse)
        {
            return;
        }

        // Do not analyze more than necessary
        if (bufferedNotAnalyzedSampleCount > MaxSampleCountToUse)
        {
            bufferedNotAnalyzedSampleCount = MaxSampleCountToUse;
        }

        // Analyze the newest portion of the not-yet-analyzed MicSamples
        int        startIndex = recordingEvent.MicSamples.Length - bufferedNotAnalyzedSampleCount;
        int        endIndex   = recordingEvent.MicSamples.Length;
        PitchEvent pitchEvent = AudioSamplesAnalyzer.ProcessAudioSamples(recordingEvent.MicSamples, startIndex, endIndex, MicProfile);

        bufferedNotAnalyzedSampleCount = 0;

        // Notify listeners
        pitchEventStream.OnNext(pitchEvent);
    }
Beispiel #3
0
    private void UpdateRecording()
    {
        if (!IsRecording)
        {
            return;
        }

        if (micAudioClip == null)
        {
            Debug.LogError("AudioClip for microphone is null");
            StopRecording();
            return;
        }

        // Fill buffer with raw sample data from microphone
        int currentSamplePosition = UnityEngine.Microphone.GetPosition(MicProfile.Name);

        micAudioClip.GetData(MicSamples, currentSamplePosition);

        // Process the portion that has been buffered by Unity since the last frame.
        // New samples come into the buffer "from the right", i.e., highest index holds the newest sample.
        int newSamplesCount      = GetNewSampleCountInCircularBuffer(lastSamplePosition, currentSamplePosition);
        int newSamplesStartIndex = MicSamples.Length - newSamplesCount;
        int newSamplesEndIndex   = MicSamples.Length - 1;

        ApplyMicAmplification(MicSamples, newSamplesStartIndex, newSamplesEndIndex);

        // Notify listeners
        RecordingEvent recordingEvent = new RecordingEvent(MicSamples, newSamplesStartIndex, newSamplesEndIndex);

        recordingEventStream.OnNext(recordingEvent);

        lastSamplePosition = currentSamplePosition;
    }
Beispiel #4
0
 private void HandleNewMicSamples(RecordingEvent recordingEvent)
 {
     if (serverMicDataReceiverEndPoint != null &&
         clientMicDataSender != null &&
         clientMicDataSenderNetworkStream != null)
     {
         SendMicData(recordingEvent);
     }
 }
Beispiel #5
0
    protected override void OnRecordingEvent(RecordingEvent recordingEvent)
    {
        int   firstBeatToAnalyze = nextBeatToAnalyze;
        float positionInSongInMillisConsideringMicDelay = (float)songAudioPlayer.PositionInSongInMillis - MicDelayInMillis;
        int   currentBeatConsideringMicDelay            = (int)BpmUtils.MillisecondInSongToBeat(songMeta, positionInSongInMillisConsideringMicDelay);

        for (int beatToAnalyze = firstBeatToAnalyze; beatToAnalyze < currentBeatConsideringMicDelay; beatToAnalyze++)
        {
            AnalyzeBeatAndNotify(beatToAnalyze);
            nextBeatToAnalyze = beatToAnalyze + 1;
        }
    }
        public static async Task ProcessEvent(RecordingEvent ev, UrlHelper url, HttpContextBase context, dynamic viewBag)
        {
            if (ev.State != RecordingState.Complete)
            {
                return;
            }
            var user      = (ApplicationUser)viewBag.User;
            var dbContext = context.GetOwinContext().Get <ApplicationDbContext>();
            var recording = await Recording.Get(ev.RecordingId);

            user.Greeting = recording.Media;
            await dbContext.SaveChangesAsync();
        }
Beispiel #7
0
    private void ApplyAmplificationAndNotifyListeners(int newSamplesCount)
    {
        // The buffer is always overwritten completely by Unity. Thus, amplification has to be applied to the whole buffer again.
        // The buffer is filled "from the right", i.e., highest index holds the newest sample.
        if (micProfile.Amplification > 0)
        {
            ApplyAmplification(MicSamples, 0, MicSamples.Length - 1, micProfile.AmplificationMultiplier);
        }

        // Notify listeners
        if (newSamplesCount <= 0)
        {
            return;
        }
        int            newSamplesStartIndex = MicSamples.Length - newSamplesCount;
        int            newSamplesEndIndex   = MicSamples.Length - 1;
        RecordingEvent recordingEvent       = new RecordingEvent(MicSamples, newSamplesStartIndex, newSamplesEndIndex);

        recordingEventStream.OnNext(recordingEvent);
    }
    private void UpdateRecording()
    {
        if (!IsRecording.Value)
        {
            return;
        }

        if (micAudioClip == null)
        {
            Debug.LogError("AudioClip for microphone is null");
            StopRecording();
            return;
        }

        // Fill buffer with raw sample data from microphone
        int currentSamplePosition = Microphone.GetPosition(DeviceName.Value);

        micAudioClip.GetData(MicSampleBuffer, currentSamplePosition);
        if (currentSamplePosition == lastSamplePosition)
        {
            // No new samples yet (or all samples changed, which is unlikely because the buffer has a length of 1 second and FPS should be > 1).
            return;
        }

        // Process the portion that has been buffered by Unity since the last frame.
        // New samples come into the buffer "from the right", i.e., highest index holds the newest sample.
        int newSamplesCount      = GetNewSampleCountInCircularBuffer(lastSamplePosition, currentSamplePosition);
        int newSamplesStartIndex = MicSampleBuffer.Length - newSamplesCount;
        int newSamplesEndIndex   = MicSampleBuffer.Length - 1;

        // Notify listeners
        RecordingEvent recordingEvent = new RecordingEvent(MicSampleBuffer, newSamplesStartIndex, newSamplesEndIndex);

        recordingEventStream.OnNext(recordingEvent);

        lastSamplePosition = currentSamplePosition;
    }
Beispiel #9
0
 protected void OnRecordingEvent(RecordingEventArgs e)
 {
     RecordingEvent?.Invoke(this, e);
 }
 private async Task ProcessCalledToContactEvent(RecordingEvent ev, TinyIoCContainer container)
 {
     if (ev.Status == "complete")
     {
         var dbContext = container.Resolve<DatabasebContext>();
         var call = await Call.Get(ev.CallId);
         var recording = await Bandwidth.Net.Model.Recording.Get(ev.RecordingId);
         var items = recording.Media.Split('/');
         var name = items[items.Length - 1];
         dbContext.Recordings.Add(new Recording
         {
             Url = "/media/" + name,
             StartTime = ev.StartTime,
             EndTime = ev.EndTime,
             PhoneNumber = call.To
         });
         await dbContext.SaveChangesAsync();
     }
 }
 private async Task ProcessCalledToOwnerEvent(RecordingEvent ev, TinyIoCContainer container)
 {
     if (ev.Status == "complete")
     {
         var dbContext = container.Resolve<DatabasebContext>();
         var recording = await Bandwidth.Net.Model.Recording.Get(ev.RecordingId);
         var items = recording.Media.Split('/');
         var name = items[items.Length - 1];
         dbContext.VoiceMessages.Add(new VoiceMessage
         {
             Url = $"/media/{name}",
             StartTime = ev.StartTime,
             EndTime = ev.EndTime,
             PhoneNumber = AgentPhoneNumber
         });
         await dbContext.SaveChangesAsync();
     }
 }