示例#1
0
        unsafe public void ProcessFrame(ProcessAudioFrameContext context)
        {
            AudioFrame frame = context.InputFrame;

            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.ReadWrite))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *dataInBytes, out uint capacity);
                    float *dataInFloat       = (float *)dataInBytes;
                    int    dataInFloatLength = (int)buffer.Length / sizeof(float);

                    // read parameters once
                    float currentWet      = this.Mix;
                    float currentDry      = 1.0f - currentWet;
                    float currentFeedback = this.Feedback;

                    // Process audio data
                    float sample, echoSample, outSample;
                    for (int i = 0; i < dataInFloatLength; i++)
                    {
                        // read values
                        sample     = dataInFloat[i];
                        echoSample = _echoBuffer.Dequeue();

                        // compute output sample
                        outSample      = (currentDry * sample) + (currentWet * echoSample);
                        dataInFloat[i] = outSample;

                        // compute delay sample
                        echoSample = sample + (currentFeedback * echoSample);
                        _echoBuffer.Enqueue(echoSample);
                    }
                }
        }
        private unsafe AudioFrame ReadAudioData(uint samples)
        {
            // Buffer size is (number of samples) * (size of each sample)
            uint       bufferSize = samples * sizeof(byte) * 2;
            AudioFrame frame      = new Windows.Media.AudioFrame(bufferSize);

            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte *dataInBytes;
                    uint  capacityInBytes;

                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                    // Read audio data from the stream and copy it to the AudioFrame buffer
                    var  readBytes = new byte[capacityInBytes];
                    uint bytesRead = audioStream.Read(readBytes);

                    if (bytesRead == 0)
                    {
                        frameInputNode.Stop();
                    }

                    for (int i = 0; i < bytesRead; i++)
                    {
                        dataInBytes[i] = readBytes[i];
                    }
                }

            return(frame);
        }
示例#3
0
        public void ProcessFrame(ProcessAudioFrameContext context)
        {
            unsafe
            {
                AudioFrame inputFrame = context.InputFrame;

                using (AudioBuffer inputBuffer = inputFrame.LockBuffer(AudioBufferAccessMode.ReadWrite))
                    using (IMemoryBufferReference inputReference = inputBuffer.CreateReference())
                    {
                        ((IMemoryBufferByteAccess)inputReference).GetBuffer(out byte *inputDataInBytes, out uint inputCapacity);

                        float *inputDataInFloat  = (float *)inputDataInBytes;
                        int    dataInFloatLength = (int)inputBuffer.Length / sizeof(float);

                        // Process audio data
                        for (int n = 0; n < dataInFloatLength; n++)
                        {
                            int ch = n % channels;

                            // cascaded filter to perform eq
                            for (int band = 0; band < bandCount; band++)
                            {
                                inputDataInFloat[n] = filters[ch, band].Transform(inputDataInFloat[n]);
                            }
                        }
                    }
            }
        }
示例#4
0
        /// <summary>
        /// When audioFrameUpdateMinimum is reached by audioFrameUpdateCount, this method gets the current audio frame, obtains the data from it
        /// and calculates the raw audio level from -100 to 0.
        /// </summary>
        private static unsafe void Graph_QuantumStarted(AudioGraph sender, object args)
        {
            audioFrameUpdateCount++;
            if (audioFrameUpdateCount >= audioFrameUpdateMinimum)
            {
                AudioFrame audioFrame = frameOutputNode.GetFrame();
                float[]    floatData;
                using (AudioBuffer audioBuffer = audioFrame.LockBuffer(AudioBufferAccessMode.Write))
                    using (IMemoryBufferReference reference = audioBuffer.CreateReference())
                    {
                        ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *dataInBytes, out uint capacity);

                        float *unsafeFloatData = (float *)dataInBytes;
                        floatData = new float[capacity / sizeof(float)];

                        for (int i = 0; i < capacity / sizeof(float); i++)
                        {
                            floatData[i] = unsafeFloatData[i];
                        }
                    }

                double soundLevel = 0f;
                foreach (float sample in floatData)
                {
                    soundLevel += Math.Abs(sample);
                }
                soundLevel = Math.Log10(soundLevel / floatData.Length) * 20;

                NewRawSoundLevel(soundLevel);

                audioFrameUpdateCount = 0;
            }
        }
示例#5
0
        unsafe private void ProcessAudioFrame(AudioMediaFrame audioMediaFrame)
        {
            using (AudioFrame audioFrame = audioMediaFrame.GetAudioFrame())
                using (AudioBuffer buffer = audioFrame.LockBuffer(AudioBufferAccessMode.Read))
                    using (IMemoryBufferReference reference = buffer.CreateReference())
                    {
                        byte * dataInBytes;
                        uint   capacityInBytes;
                        float *dataInFloat;


                        ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                        // The requested format was float
                        dataInFloat = (float *)dataInBytes;

                        // Get the number of samples by multiplying the duration by sampling rate:
                        // duration [s] x sampling rate [samples/s] = # samples

                        // Duration can be gotten off the frame reference OR the audioFrame
                        TimeSpan duration = audioMediaFrame.FrameReference.Duration;

                        // frameDurMs is in milliseconds, while SampleRate is given per second.
                        uint frameDurMs  = (uint)duration.TotalMilliseconds;
                        uint sampleRate  = audioMediaFrame.AudioEncodingProperties.SampleRate;
                        uint sampleCount = (frameDurMs * sampleRate) / 1000;
                    }
        }
示例#6
0
        /// <summary>
        /// Generates empty data for a neccessary quantity of samples
        /// </summary>
        /// <param name="samples">Sampel count</param>
        /// <returns>AudioFrame of sample count</returns>
        public static unsafe AudioFrame GenerateAudioData(uint samples)
        {
            // Buffer size is (number of samples) * (size of each sample) * (number of channels)
            uint       bufferSize = samples * sizeof(float) * 2;
            AudioFrame frame      = new AudioFrame(bufferSize);

            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *dataInBytes, out uint _);

                    // Cast to float since the data we are generating is float
                    float *dataInFloat = (float *)dataInBytes;

                    float  freq            = 17000; // choosing to generate frequency of 17kHz
                    float  amplitude       = 0.3f;
                    int    sampleRate      = (int)outgraph.EncodingProperties.SampleRate;
                    double sampleIncrement = (freq * (Math.PI * 2)) / sampleRate;

                    // Generate a 17kHz sine wave and populate the values in the memory buffer
                    for (int i = 0; i < samples; i++)
                    {
                        double sinValue = amplitude * Math.Sin(theta);
                        dataInFloat[i] = (float)sinValue;
                        theta         += sampleIncrement;
                    }
                }

            return(frame);
        }
示例#7
0
        //unsafe private void M_AudioGraph_QuantumProcessed(AudioGraph sender, object args)
        //{
        //}

        unsafe private void M_AudioGraph_QuantumStarted(AudioGraph sender, object args)
        {
            // draw every n frames
            //if (fctr++ % 5 == 0)
            //{
            using (AudioFrame audioFrame = this.m_AudioFrameOutputNode.GetFrame())
                using (AudioBuffer audioBuffer = audioFrame.LockBuffer(AudioBufferAccessMode.Read))
                    using (IMemoryBufferReference memBufferRef = audioBuffer.CreateReference())
                    {
                        IMemoryBufferByteAccess byteAccess = memBufferRef as IMemoryBufferByteAccess;

                        byte *byteBuffer;
                        uint  capacity;

                        byteAccess.GetBuffer(out byteBuffer, out capacity);

                        float *floatBuffer = (float *)byteBuffer;

                        for (int i = 0; i < this.m_AudioGraph.SamplesPerQuantum * this.m_AudioGraph.EncodingProperties.ChannelCount; i++)
                        {
                            this.m_QuantumSamples[i] = floatBuffer[i];
                        }

                        this.m_Capacity = capacity;
                        this.m_abCap    = audioBuffer.Capacity;
                        this.m_abLen    = audioBuffer.Length;
                    }
            AudioCanvas.Invalidate();
            //}
        }
示例#8
0
        private unsafe AudioFrame GenerateAudioData(uint samples)
        {
            var bufferSize = samples * sizeof(float) * 2;
            var frame      = new AudioFrame(bufferSize);

            _buffer = _buffer?.Length != samples * 2 ? new short[samples * 2] : _buffer;
            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    float *dataInFloat;
                    byte * dataInBytes;
                    uint   capacityInBytes;
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);
                    dataInFloat = (float *)dataInBytes;
                    _player.GetBuffer(_buffer);

                    for (var i = 0; i < _buffer.Length; i++)
                    {
                        dataInFloat[i] = _buffer[i] * 0.00003f;                  // 乗算のほうが早いらしい
                    }

                    //foreach (float f in _buffer.Select(a => a * 0.00003f))
                    //	*dataInFloat++ = f;
                }

            return(frame);
        }
        unsafe internal AudioFrame GenerateAudioData(uint samples)
        {
            // Buffer size is (number of samples) * (size of each sample)
            // We choose to generate single channel (mono) audio. For multi-channel, multiply by number of channels
            uint       bufferSize = samples * sizeof(float);
            AudioFrame frame      = new Windows.Media.AudioFrame(bufferSize);

            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte * dataInBytes;
                    uint   capacityInBytes;
                    float *dataInFloat;

                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                    // Cast to float since the data we are generating is float
                    dataInFloat = (float *)dataInBytes;

                    float  amplitude       = 0.3f;
                    int    sampleRate      = (int)parentGraph.EncodingProperties.SampleRate;
                    double sampleIncrement = (frequency * (Math.PI * 2)) / sampleRate;

                    // Generate a sine wave and populate the values in the memory buffer
                    for (int i = 0; i < samples; i++)
                    {
                        double sinValue = amplitude * Math.Sin(angle);
                        dataInFloat[i] = (float)sinValue;
                        angle         += sampleIncrement;
                    }
                }

            return(frame);
        }
示例#10
0
        unsafe public void ProcessFrame(ProcessAudioFrameContext context)
        {
            AudioFrame inputFrame = context.InputFrame;

            using (AudioBuffer inputBuffer = inputFrame.LockBuffer(AudioBufferAccessMode.Read))
                using (IMemoryBufferReference inputReference = inputBuffer.CreateReference())
                {
                    byte *inputDataInBytes;
                    uint  inputCapacity;

                    ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputDataInBytes, out inputCapacity);

                    float *inputDataInFloat = (float *)inputDataInBytes;

                    float inputDataL;
                    float inputDataR;

                    // Process audio data
                    int dataInFloatLength = (int)inputBuffer.Length / sizeof(float);

                    if (_chart == null)
                    {
                        _chart = new float[dataInFloatLength];
                        propertySet["chart"] = _chart;
                    }
                    for (int i = 0; i < dataInFloatLength; i += 2)
                    {
                        inputDataL    = inputDataInFloat[i];
                        inputDataR    = inputDataInFloat[i + 1];
                        _chart[i]     = inputDataL;
                        _chart[i + 1] = inputDataR;
                    }
                }
        }
示例#11
0
 unsafe private void ProcessInputFrame(AudioFrame frame)
 {
     using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Read))
     using (IMemoryBufferReference reference = buffer.CreateReference())
     {
         // We get data from current buffer
         ((IMemoryBufferByteAccess)reference).GetBuffer(
             out byte* dataInBytes,
             out uint capacityInBytes
             );
         // We discard first frame; it's full of zeros because of latency
         if (audioGraph.CompletedQuantumCount == 1) return;
         float* dataInFloat = (float*)dataInBytes;
         uint capacityInFloat = capacityInBytes / sizeof(float);
         // Number of channels defines step between samples in buffer
         uint step = fileInputNode.EncodingProperties.ChannelCount;
         // We transfer audio samples from buffer into audioData
         for (uint i = 0; i < capacityInFloat; i += step)
         {
             if (audioDataCurrentPosition < audioData.Length)
             {
                 audioData[audioDataCurrentPosition] = dataInFloat[i];
                 audioDataCurrentPosition++;
             }
         }
     }
 }
示例#12
0
        unsafe private void ProcessFrameOutput(AudioFrame frame)
        {
            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Read))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    // get hold of the buffer pointer
                    byte *dataInBytes;
                    uint  capacityInBytes;
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes,
                                                                   out capacityInBytes);

                    var dataInFloat = (float *)dataInBytes;

                    // examine
                    float max = 0;
                    for (int n = 0; n < graph.SamplesPerQuantum; n++)
                    {
                        max = Math.Max(Math.Abs(dataInFloat[n]), max);
                    }
                    currentPeak = max;

                    float x = currentPeak * 1000;

                    double Bri = Math.Pow(x, 3);                // Sensitivity slider value

                    byte Brightness = (byte)Math.Round(Bri, 0); // Calculating to a 0 - 255 value to control the light brightness

                    Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                    {
                        OutputText.Text = Brightness.ToString();
                    });
                }
        }
        public unsafe void ProcessFrame(ProcessAudioFrameContext context)
        {
            AudioFrame inputFrame  = context.InputFrame;
            AudioFrame outputFrame = context.OutputFrame;

            using (AudioBuffer inputBuffer = inputFrame.LockBuffer(AudioBufferAccessMode.Read),
                   outputBuffer = outputFrame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference inputReference = inputBuffer.CreateReference(),
                       outputReference = outputBuffer.CreateReference())
                {
                    ((IMemoryBufferByteAccess)inputReference).GetBuffer(out var inputDataInBytes, out _);
                    ((IMemoryBufferByteAccess)outputReference).GetBuffer(out var outputDataInBytes, out _);

                    float *inputDataInFloat  = (float *)inputDataInBytes;
                    float *outputDataInFloat = (float *)outputDataInBytes;

                    // Process audio data
                    int dataInFloatLength = (int)inputBuffer.Length / sizeof(float);

                    for (int i = 0; i < dataInFloatLength; i++)
                    {
                        // var inputData = inputDataInFloat[i] * (1.0f - Mix);
                        var inputData = inputDataInFloat[i];
                        outputDataInFloat[i] = ProcessFilterSample(inputData);
                    }
                }
        }
        unsafe public void ProcessFrame(ProcessAudioFrameContext context)
        {
            AudioFrame inputFrame = context.InputFrame;

            using (AudioBuffer inputBuffer = context.InputFrame.LockBuffer(AudioBufferAccessMode.Read))
                using (IMemoryBufferReference inputReference = inputBuffer.CreateReference())
                {
                    byte * inputInBytes;
                    uint   inputCapacity;
                    float *inputInFloats;

                    ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputInBytes, out inputCapacity);

                    inputInFloats = (float *)inputInBytes;
                    int   inputLength = (int)inputBuffer.Length / sizeof(float);
                    float sum         = 0;

                    // Only process one channel for now (will average out unless the audio is severely unbalanced between left/right)
                    for (int i = 0; i < inputLength; i += 2)
                    {
                        sum += (inputInFloats[i] * inputInFloats[i]);
                    }
                    double rms = Math.Sqrt(sum / (inputLength / 2));
                    this.VolumeInDecibels = 20 * Math.Log10(rms);
                }
        }
示例#15
0
        unsafe private static void ProcessFrameOutput(AudioFrame frame)
        {
            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte * dataInBytes;
                    uint   capacityInBytes;
                    float *dataInFloat;

                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                    dataInFloat = (float *)dataInBytes;
                    float[] dataInFloats = new float[capacityInBytes / sizeof(float)];

                    for (int i = 0; i < capacityInBytes / sizeof(float); i++)
                    {
                        dataInFloats[i] = dataInFloat[i];
                    }



                    InputRecieved?.Invoke(null, dataInFloats);
                }
        }
示例#16
0
        /// <summary>
        /// Handle frame of mic input
        /// </summary>
        /// <param name="frame"></param>
        private static unsafe void ProcessFrameOutput(AudioFrame frame)
        {
            float[] dataInFloats;
            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *dataInBytes, out uint capacityInBytes);

                    float *dataInFloat = (float *)dataInBytes;
                    dataInFloats = new float[capacityInBytes / sizeof(float)];

                    for (int i = 0; i < capacityInBytes / sizeof(float); i++)
                    {
                        dataInFloats[i] = dataInFloat[i];
                    }
                }

            // Don't bother if muted
            if (LocalState.VoiceState.SelfMute || LocalState.VoiceState.ServerMute)
            {
                AudioInSpec1   = 0;
                AudioInSpec2   = 0;
                AudioInSpec3   = 0;
                AudioInSpec4   = 0;
                AudioInSpec5   = 0;
                AudioInSpec6   = 0;
                AudioInSpec7   = 0;
                AudioInSpec8   = 0;
                AudioInSpec9   = 0;
                AudioInAverage = 0;
            }
            else
            {
                // Determine FFT data
                List <float[]> amplitudeData = FFT.Processing.HelperMethods.ProcessFrameOutput(frame);
                List <float[]> channelData   = FFT.Processing.HelperMethods.GetFftData(FFT.Processing.HelperMethods.ConvertTo512(amplitudeData, ingraph), ingraph);

                float[] leftChannel = channelData[1];

                // Assign each FFT data out channel
                AudioInSpec1   = HelperMethods.Max(leftChannel, 0, 1);
                AudioInSpec2   = HelperMethods.Max(leftChannel, 2, 3);
                AudioInSpec3   = HelperMethods.Max(leftChannel, 3, 4);
                AudioInSpec4   = HelperMethods.Max(leftChannel, 4, 5);
                AudioInSpec5   = HelperMethods.Max(leftChannel, 5, 6);
                AudioInSpec6   = HelperMethods.Max(leftChannel, 7, 8);
                AudioInSpec7   = HelperMethods.Max(leftChannel, 9, 10);
                AudioInSpec8   = HelperMethods.Max(leftChannel, 10, 12);
                AudioInSpec9   = HelperMethods.Max(leftChannel, 14, 26);
                AudioInAverage = (AudioInSpec1 + AudioInSpec2 + AudioInSpec3 + AudioInSpec4 + AudioInSpec5 + AudioInSpec5 + AudioInSpec6 + AudioInSpec7 + AudioInSpec8 + AudioInSpec9) / 9;
            }

            InputRecieved?.Invoke(null, dataInFloats);
        }
示例#17
0
        unsafe public void ProcessFrame(ProcessAudioFrameContext context)
        {
            //foreach (var item in context.InputFrame.ExtendedProperties.Keys)
            //{
            //    Debug.WriteLine(item);
            //}


            const int videoFrameRate = 60; // TODO: we should probably measure this

            //Debug.WriteLine(sw.ElapsedMilliseconds.ToString());
            AudioFrame inputFrame = context.InputFrame;

            using (AudioBuffer inputBuffer = inputFrame.LockBuffer(AudioBufferAccessMode.Read))
                using (IMemoryBufferReference inputReference = inputBuffer.CreateReference())
                {
                    byte * inputInBytes;
                    uint   inputCapacity;
                    float *inputInFloats;

                    ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputInBytes, out inputCapacity);

                    inputInFloats = (float *)inputInBytes;
                    int inputLengthSamples = (int)inputBuffer.Length / sizeof(float);

                    int samplesPervBlank = (int)((float)currentEncodingProperties.SampleRate / (float)videoFrameRate);

                    int numVBlanksForCurrentAudioBuffer = (int)Math.Ceiling(((float)context.InputFrame.Duration.Value.Milliseconds / ((1.0f / (float)videoFrameRate) * 1000)));

                    var volumeSetLeft  = new double[numVBlanksForCurrentAudioBuffer];
                    var volumeSetRight = new double[numVBlanksForCurrentAudioBuffer];

                    //Left Channel
                    CalcAudioVolumedBPerVBlank(inputInFloats, inputLengthSamples, samplesPervBlank, volumeSetLeft, 0, (int)currentEncodingProperties.ChannelCount);

                    if (currentEncodingProperties.ChannelCount == 2)
                    {
                        //Right Channel
                        CalcAudioVolumedBPerVBlank(inputInFloats, inputLengthSamples, samplesPervBlank, volumeSetRight, 1, (int)currentEncodingProperties.ChannelCount);
                    }

                    lock (PassthroughEffect.GetBadLock())
                    {
                        for (var i = 0; i < numVBlanksForCurrentAudioBuffer; i++)
                        {
                            ((Queue <Tuple <double, double> >) this.propertySet["dataQueue"]).Enqueue(new Tuple <double, double>(volumeSetLeft[i], volumeSetRight[i]));
                        }
                        //((Queue<Double[]>)this.propertySet["AudioVolumeLeftQueue"]).Enqueue(volumeSetLeft);
                        //((Queue<Double[]>)this.propertySet["AudioVolumeRightQueue"]).Enqueue(volumeSetRight);
                        //this.propertySet["VolumeLeft"] = volumeSetLeft;
                        //this.propertySet["VolumeRight"] = volumeSetRight;
                    }
                }
        }
        internal unsafe VisualizationData(AudioFrame frame)
        {
            m_Buffer          = frame.LockBuffer(AudioBufferAccessMode.Read);
            m_BufferReference = m_Buffer.CreateReference();
            byte *pData;
            uint  capacity;

            ((IMemoryBufferByteAccess)m_BufferReference).GetBuffer(out pData, out capacity);
            m_pData        = (float *)pData;
            m_DataCapacity = m_Buffer.Length / sizeof(float);

            m_DataStep = (uint)frame.ExtendedProperties["{3F692E37-FC20-48DD-93D2-2234E1B1AA23}"];
        }
示例#19
0
        //</SnippetQuantumProcessed>


        //<SnippetProcessFrameOutput>
        unsafe private void ProcessFrameOutput(AudioFrame frame)
        {
            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte * dataInBytes;
                    uint   capacityInBytes;
                    float *dataInFloat;

                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                    dataInFloat = (float *)dataInBytes;
                }
        }
示例#20
0
        unsafe public static void AddFrame(float[] framedata, uint samples)
        {
            if (!ready)
            {
                return;
            }
            //if (!started)
            //{
            //    //graph.Start();
            //    //started = true;
            //}
            AudioFrame frame = new AudioFrame(samples * 2 * sizeof(float));

            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte *dataInBytes;
                    uint  capacityInBytes;

                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);
                    // Cast to float since the data we are generating is float
                    float *dataInFloat = (float *)dataInBytes;
                    fixed(float *frames = framedata)
                    {
                        for (int i = 0; i < samples * 2; i++)
                        {
                            dataInFloat[i] = frames[i];
                        }
                    }
                }
            //List<float[]> amplitudeData = FFT.Processing.HelperMethods.ProcessFrameOutput(frame);
            //List<float[]> channelData = FFT.Processing.HelperMethods.GetFftData(FFT.Processing.HelperMethods.ConvertTo512(amplitudeData, outgraph), outgraph);

            //float[] leftChannel = channelData[1];

            //AudioSpec1 = HelperMethods.Max(leftChannel, 0, 1);
            //AudioSpec2 = HelperMethods.Max(leftChannel, 2, 3);
            //AudioSpec3 = HelperMethods.Max(leftChannel, 3, 4);
            //AudioSpec4 = HelperMethods.Max(leftChannel, 4, 5);
            //AudioSpec5 = HelperMethods.Max(leftChannel, 5, 6);
            //AudioSpec6 = HelperMethods.Max(leftChannel, 7, 8);
            //AudioSpec7 = HelperMethods.Max(leftChannel, 9, 10);
            //AudioSpec8 = HelperMethods.Max(leftChannel, 10, 12);
            //AudioSpec9 = HelperMethods.Max(leftChannel, 14, 26);
            frameInputNode.AddFrame(frame);
        }
        unsafe private void ProcessFrameOutput(AudioFrame frame)
        {
            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte *dataInBytes;
                    uint  capacityInBytes;


                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                    for (int i = 0; i < capacityInBytes; i++)
                    {
                        dataInFloat[i] = *((float *)dataInBytes + i);
                    }
                    SendHello(capacityInBytes);
                }
        }
示例#22
0
        // </SnippetMixProperty>

        // <SnippetProcessFrame>
        unsafe public void ProcessFrame(ProcessAudioFrameContext context)
        {
            AudioFrame inputFrame  = context.InputFrame;
            AudioFrame outputFrame = context.OutputFrame;

            using (AudioBuffer inputBuffer = inputFrame.LockBuffer(AudioBufferAccessMode.Read),
                   outputBuffer = outputFrame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference inputReference = inputBuffer.CreateReference(),
                       outputReference = outputBuffer.CreateReference())
                {
                    byte *inputDataInBytes;
                    byte *outputDataInBytes;
                    uint  inputCapacity;
                    uint  outputCapacity;

                    ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputDataInBytes, out inputCapacity);
                    ((IMemoryBufferByteAccess)outputReference).GetBuffer(out outputDataInBytes, out outputCapacity);

                    float *inputDataInFloat  = (float *)inputDataInBytes;
                    float *outputDataInFloat = (float *)outputDataInBytes;

                    float inputData;
                    float echoData;

                    // Process audio data
                    int dataInFloatLength = (int)inputBuffer.Length / sizeof(float);

                    for (int i = 0; i < dataInFloatLength; i++)
                    {
                        inputData            = inputDataInFloat[i] * (1.0f - this.Mix);
                        echoData             = echoBuffer[currentActiveSampleIndex] * this.Mix;
                        outputDataInFloat[i] = inputData + echoData;
                        echoBuffer[currentActiveSampleIndex] = inputDataInFloat[i];
                        currentActiveSampleIndex++;

                        if (currentActiveSampleIndex == echoBuffer.Length)
                        {
                            // Wrap around (after one second of samples)
                            currentActiveSampleIndex = 0;
                        }
                    }
                }
        }
        unsafe public void ProcessFrame(ProcessAudioFrameContext context)
        {
            AudioFrame inputFrame = context.InputFrame;

            using (AudioBuffer inputBuffer = inputFrame.LockBuffer(AudioBufferAccessMode.Read))
            using (IMemoryBufferReference inputReference = inputBuffer.CreateReference())
            {
                byte* inputDataInBytes;
                uint inputCapacity;

                ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputDataInBytes, out inputCapacity);

                float* inputDataInFloat = (float*)inputDataInBytes;

                // Process audio data
                int dataInFloatLength = (int)inputBuffer.Length / sizeof(float);

                for (int i = 0; i < dataInFloatLength; i++)
                {
                    float inputData = inputDataInFloat[i];

                    lock (badLock)
                    {
                        if (propertySet.ContainsKey("InputDataRaw"))
                        {
                            propertySet["InputDataRaw"] = inputData;
                        }
                        else
                        {
                            propertySet.Add("InputDataRaw", inputData);
                        }
                    }

                    if (compositionPropertySet != null)
                    {
                        compositionPropertySet.InsertScalar("InputData", inputData * 500);
                    }
                }
            }
        }
示例#24
0
        private unsafe AudioFrame GenerateAudioData(uint samples)
        {
            uint       samplesMCh = samples * AudioGraph.EncodingProperties.ChannelCount;
            uint       bufferSize = sizeof(float) * samplesMCh;
            AudioFrame frame      = new AudioFrame(bufferSize);

            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *dataInBytes, out uint capacityInBytes);
                    long u = bufferSize;
                    do
                    {
                        var read = FillBuffer(u > sampleCap ? sampleCap : (int)u, dataInBytes);
                        dataInBytes += read;
                        u           -= read;
                    } while (u > 0);
                }

            return(frame);
        }
示例#25
0
        unsafe void ProcessFrameOutput(AudioFrame frame)
        {
            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte * dataInBytes;
                    uint   capacityInBytes;
                    float *dataInFloat;

                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                    dataInFloat = (float *)dataInBytes;

                    data.Clear();

                    for (int i = 0; i <= 32; i++)
                    {
                        data.Add(dataInFloat[i]);
                    }
                }
        }
示例#26
0
        unsafe private void ProcessFrameOutput(AudioFrame frame)
        {
            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Read))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *dataInBytes, out uint capacityInBytes);

                    if (capacityInBytes > 0)
                    {
                        float *dataInFloats = (float *)dataInBytes;

                        short[] shorts = new short[capacityInBytes / 4];

                        for (int i = 0; i < capacityInBytes / 4; i++)
                        {
                            shorts[i] = (short)((65535 * dataInFloats[i] - 1) / 2);
                        }

                        bufferQueue.Enqueue(shorts);
                    }
                }
        }
        unsafe private AudioFrame GenerateAudioData(int requiredSamples)
        {
            // Buffer size is (number of samples) * (size of each sample)
            // We choose to generate single channel (mono) audio. For multi-channel, multiply by number of channels
            uint       bufferSizeElements = (uint)requiredSamples * NumChannels;
            uint       bufferSizeBytes    = bufferSizeElements * sizeof(float);
            AudioFrame frame = new AudioFrame(bufferSizeBytes);

            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte * dataInBytes;
                    uint   capacityInBytes;
                    float *dataInFloat;

                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                    // Cast to float since the data we are generating is float
                    dataInFloat = (float *)dataInBytes;

                    lock (SamplesBuffer)
                    {
                        var numElementsToCopy = Math.Min(bufferSizeElements, SamplesBuffer.Count);
                        for (var i = 0; i < numElementsToCopy; i++)
                        {
                            var converted = (float)SamplesBuffer.Dequeue() / short.MaxValue;
                            dataInFloat[i] = converted;
                        }
                        //Should we not have enough samples in buffer, set the remaing data in audio frame to zeros
                        for (var i = numElementsToCopy; i < bufferSizeElements; i++)
                        {
                            dataInFloat[i] = 0f;
                        }
                    }
                }

            return(frame);
        }
示例#28
0
        public void ProcessFrame(ProcessAudioFrameContext context)
        {
            unsafe
            {
                AudioFrame inputFrame  = context.InputFrame;
                AudioFrame outputFrame = context.OutputFrame;

                using (AudioBuffer inputBuffer = inputFrame.LockBuffer(AudioBufferAccessMode.Read),
                       outputBuffer = outputFrame.LockBuffer(AudioBufferAccessMode.Write))
                    using (IMemoryBufferReference inputReference = inputBuffer.CreateReference(),
                           outputReference = outputBuffer.CreateReference())
                    {
                        ((IMemoryBufferByteAccess)inputReference).GetBuffer(out byte *inputDataInBytes, out uint inputCapacity);
                        ((IMemoryBufferByteAccess)outputReference).GetBuffer(out byte *outputDataInBytes, out uint outputCapacity);

                        float *inputDataInFloat  = (float *)inputDataInBytes;
                        float *outputDataInFloat = (float *)outputDataInBytes;

                        // Process audio data
                        int dataInFloatLength = (int)inputBuffer.Length / sizeof(float);
                    }
            }
        }
示例#29
0
        unsafe public void ProcessFrame(ProcessAudioFrameContext context)
        {
            AudioFrame inputFrame  = context.InputFrame;
            AudioFrame outputFrame = context.OutputFrame;

            using (AudioBuffer inputBuffer = inputFrame.LockBuffer(AudioBufferAccessMode.Read),
                   outputBuffer = outputFrame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference inputReference = inputBuffer.CreateReference(),
                       outputReference = outputBuffer.CreateReference())
                {
                    byte *inputDataInBytes;
                    byte *outputDataInBytes;
                    uint  inputCapacity;
                    uint  outputCapacity;

                    ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputDataInBytes, out inputCapacity);
                    ((IMemoryBufferByteAccess)outputReference).GetBuffer(out outputDataInBytes, out outputCapacity);

                    float *inputDataInFloat  = (float *)inputDataInBytes;
                    float *outputDataInFloat = (float *)outputDataInBytes;

                    float inputDataL;
                    float inputDataR;

                    // Process audio data
                    int dataInFloatLength = (int)inputBuffer.Length / sizeof(float);

                    for (int i = 0; i < dataInFloatLength; i += 2)
                    {
                        inputDataL = inputDataInFloat[i] * ((Balance < 0) ? 1 : (1.0f - this.Balance));
                        inputDataR = inputDataInFloat[i + 1] * ((Balance > 0) ? 1 : (1.0f + this.Balance));

                        outputDataInFloat[i]     = inputDataL;
                        outputDataInFloat[i + 1] = inputDataR;
                    }
                }
        }
    unsafe private void ProcessAudioFrame(AudioMediaFrame audioMediaFrame)
    {
        using (AudioFrame audioFrame = audioMediaFrame.GetAudioFrame())
            using (AudioBuffer buffer = audioFrame.LockBuffer(AudioBufferAccessMode.Read))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte *dataInBytes;
                    uint  capacityInBytes;
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);
                    float *dataInFloat = (float *)dataInBytes;

                    TimeSpan duration    = audioMediaFrame.FrameReference.Duration;
                    uint     frameDurMs  = (uint)duration.TotalMilliseconds;
                    uint     sampleRate  = audioMediaFrame.AudioEncodingProperties.SampleRate;
                    uint     sampleCount = (frameDurMs * sampleRate) / 1000;

                    // Only send input signals of 1st~5th channel
                    byte[] buf = new byte[4 * (5 * sampleCount + 3)];
                    fixed(byte *pBufByte = buf)
                    {
                        float *pBufFloat = (float *)pBufByte;
                        uint * pBufUint  = (uint *)pBufByte;

                        pBufUint[0] = 5;
                        pBufUint[1] = sampleCount;
                        pBufUint[2] = sampleRate;
                        for (uint i = 0; i < 5 * sampleCount; i++)
                        {
                            uint frameIdx   = i / 5;
                            uint channelIdx = i % 5;
                            pBufFloat[3 + 5 * frameIdx + channelIdx] = dataInFloat[11 * frameIdx + channelIdx];
                        }
                    }

                    audioStream.WriteAsync(buf.AsBuffer());
                }
    }