Exemplo n.º 1
0
        unsafe void Graph_QuantumStarted(AudioGraph sender, object args)
        {
            // we'll only broadcast if we're actively monitoring audio packets
            if (!Active)
            {
                return;
            }

            try
            {
                // get an audio frame from the output node
                AudioFrame frame = outputNode.GetFrame();

                if (frame.Duration?.Milliseconds == 0)                 // discard any empty frames
                {
                    return;
                }

                using (var buffer = frame.LockBuffer(AudioBufferAccessMode.Read))
                    using (IMemoryBufferReference reference = buffer.CreateReference())
                    {
                        // Get the buffer from the AudioFrame
                        ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *dataInBytes, out uint capacityInBytes);

                        // convert the bytes into float
                        float *dataInFloat = (float *)dataInBytes;

                        if (audioBytes == null)
                        {
                            audioBytes = new byte [buffer.Length * broadcastSize / 2];                     // buffer length * # of frames we want to accrue / 2 (because we're transforming float audio to Int 16)
                        }

                        for (int i = 0; i < capacityInBytes / sizeof(float); i++)
                        {
                            // convert the float into a double byte for 16 bit PCM
                            var     shortVal   = AudioFunctions.FloatToInt16(dataInFloat [i]);
                            byte [] chunkBytes = BitConverter.GetBytes(shortVal);

                            audioBytes [bufferPosition++] = chunkBytes [0];
                            audioBytes [bufferPosition++] = chunkBytes [1];
                        }

                        // we want to wait until we accrue <broadcastSize> # of frames and then broadcast them
                        //	in practice, this will take us from 20ms chunks to 100ms chunks and result in more accurate audio level calculations
                        //	we could maybe use the audiograph latency settings to achieve similar results but this seems to work well
                        if (bufferPosition == audioBytes.Length || !Active)
                        {
                            // broadcast the audio data to any listeners
                            OnBroadcast?.Invoke(this, audioBytes);

                            audioBytes     = null;
                            bufferPosition = 0;
                        }
                    }
            }
            catch (Exception ex)
            {
                OnException?.Invoke(this, new Exception($"AudioStream.QueueInputCompleted() :: Error: {ex.Message}"));
            }
        }
Exemplo n.º 2
0
        private unsafe AudioFrame GenerateAudioData(uint samples)
        {
            var bufferSize = samples * sizeof(float) * 2;
            var frame      = new AudioFrame(bufferSize);

            _buffer = _buffer?.Length != samples * 2 ? new short[samples * 2] : _buffer;
            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    float *dataInFloat;
                    byte * dataInBytes;
                    uint   capacityInBytes;
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);
                    dataInFloat = (float *)dataInBytes;
                    _player.GetBuffer(_buffer);

                    for (var i = 0; i < _buffer.Length; i++)
                    {
                        dataInFloat[i] = _buffer[i] * 0.00003f;                  // 乗算のほうが早いらしい
                    }

                    //foreach (float f in _buffer.Select(a => a * 0.00003f))
                    //	*dataInFloat++ = f;
                }

            return(frame);
        }
Exemplo n.º 3
0
        unsafe private void ProcessAudioFrame(AudioMediaFrame audioMediaFrame)
        {
            using (AudioFrame audioFrame = audioMediaFrame.GetAudioFrame())
                using (AudioBuffer buffer = audioFrame.LockBuffer(AudioBufferAccessMode.Read))
                    using (IMemoryBufferReference reference = buffer.CreateReference())
                    {
                        byte * dataInBytes;
                        uint   capacityInBytes;
                        float *dataInFloat;


                        ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                        // The requested format was float
                        dataInFloat = (float *)dataInBytes;

                        // Get the number of samples by multiplying the duration by sampling rate:
                        // duration [s] x sampling rate [samples/s] = # samples

                        // Duration can be gotten off the frame reference OR the audioFrame
                        TimeSpan duration = audioMediaFrame.FrameReference.Duration;

                        // frameDurMs is in milliseconds, while SampleRate is given per second.
                        uint frameDurMs  = (uint)duration.TotalMilliseconds;
                        uint sampleRate  = audioMediaFrame.AudioEncodingProperties.SampleRate;
                        uint sampleCount = (frameDurMs * sampleRate) / 1000;
                    }
        }
Exemplo n.º 4
0
        unsafe public void ProcessFrame(ProcessAudioFrameContext context)
        {
            AudioFrame frame = context.InputFrame;

            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.ReadWrite))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *dataInBytes, out uint capacity);
                    float *dataInFloat       = (float *)dataInBytes;
                    int    dataInFloatLength = (int)buffer.Length / sizeof(float);

                    // read parameters once
                    float currentWet      = this.Mix;
                    float currentDry      = 1.0f - currentWet;
                    float currentFeedback = this.Feedback;

                    // Process audio data
                    float sample, echoSample, outSample;
                    for (int i = 0; i < dataInFloatLength; i++)
                    {
                        // read values
                        sample     = dataInFloat[i];
                        echoSample = _echoBuffer.Dequeue();

                        // compute output sample
                        outSample      = (currentDry * sample) + (currentWet * echoSample);
                        dataInFloat[i] = outSample;

                        // compute delay sample
                        echoSample = sample + (currentFeedback * echoSample);
                        _echoBuffer.Enqueue(echoSample);
                    }
                }
        }
Exemplo n.º 5
0
        //unsafe private void M_AudioGraph_QuantumProcessed(AudioGraph sender, object args)
        //{
        //}

        unsafe private void M_AudioGraph_QuantumStarted(AudioGraph sender, object args)
        {
            // draw every n frames
            //if (fctr++ % 5 == 0)
            //{
            using (AudioFrame audioFrame = this.m_AudioFrameOutputNode.GetFrame())
                using (AudioBuffer audioBuffer = audioFrame.LockBuffer(AudioBufferAccessMode.Read))
                    using (IMemoryBufferReference memBufferRef = audioBuffer.CreateReference())
                    {
                        IMemoryBufferByteAccess byteAccess = memBufferRef as IMemoryBufferByteAccess;

                        byte *byteBuffer;
                        uint  capacity;

                        byteAccess.GetBuffer(out byteBuffer, out capacity);

                        float *floatBuffer = (float *)byteBuffer;

                        for (int i = 0; i < this.m_AudioGraph.SamplesPerQuantum * this.m_AudioGraph.EncodingProperties.ChannelCount; i++)
                        {
                            this.m_QuantumSamples[i] = floatBuffer[i];
                        }

                        this.m_Capacity = capacity;
                        this.m_abCap    = audioBuffer.Capacity;
                        this.m_abLen    = audioBuffer.Length;
                    }
            AudioCanvas.Invalidate();
            //}
        }
Exemplo n.º 6
0
        public void ProcessFrame(ProcessAudioFrameContext context)
        {
            unsafe
            {
                AudioFrame inputFrame = context.InputFrame;

                using (AudioBuffer inputBuffer = inputFrame.LockBuffer(AudioBufferAccessMode.ReadWrite))
                    using (IMemoryBufferReference inputReference = inputBuffer.CreateReference())
                    {
                        ((IMemoryBufferByteAccess)inputReference).GetBuffer(out byte *inputDataInBytes, out uint inputCapacity);

                        float *inputDataInFloat  = (float *)inputDataInBytes;
                        int    dataInFloatLength = (int)inputBuffer.Length / sizeof(float);

                        // Process audio data
                        for (int n = 0; n < dataInFloatLength; n++)
                        {
                            int ch = n % channels;

                            // cascaded filter to perform eq
                            for (int band = 0; band < bandCount; band++)
                            {
                                inputDataInFloat[n] = filters[ch, band].Transform(inputDataInFloat[n]);
                            }
                        }
                    }
            }
        }
Exemplo n.º 7
0
        unsafe private static void ProcessFrameOutput(AudioFrame frame)
        {
            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte * dataInBytes;
                    uint   capacityInBytes;
                    float *dataInFloat;

                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                    dataInFloat = (float *)dataInBytes;
                    float[] dataInFloats = new float[capacityInBytes / sizeof(float)];

                    for (int i = 0; i < capacityInBytes / sizeof(float); i++)
                    {
                        dataInFloats[i] = dataInFloat[i];
                    }



                    InputRecieved?.Invoke(null, dataInFloats);
                }
        }
Exemplo n.º 8
0
        /// <summary>
        /// When audioFrameUpdateMinimum is reached by audioFrameUpdateCount, this method gets the current audio frame, obtains the data from it
        /// and calculates the raw audio level from -100 to 0.
        /// </summary>
        private static unsafe void Graph_QuantumStarted(AudioGraph sender, object args)
        {
            audioFrameUpdateCount++;
            if (audioFrameUpdateCount >= audioFrameUpdateMinimum)
            {
                AudioFrame audioFrame = frameOutputNode.GetFrame();
                float[]    floatData;
                using (AudioBuffer audioBuffer = audioFrame.LockBuffer(AudioBufferAccessMode.Write))
                    using (IMemoryBufferReference reference = audioBuffer.CreateReference())
                    {
                        ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *dataInBytes, out uint capacity);

                        float *unsafeFloatData = (float *)dataInBytes;
                        floatData = new float[capacity / sizeof(float)];

                        for (int i = 0; i < capacity / sizeof(float); i++)
                        {
                            floatData[i] = unsafeFloatData[i];
                        }
                    }

                double soundLevel = 0f;
                foreach (float sample in floatData)
                {
                    soundLevel += Math.Abs(sample);
                }
                soundLevel = Math.Log10(soundLevel / floatData.Length) * 20;

                NewRawSoundLevel(soundLevel);

                audioFrameUpdateCount = 0;
            }
        }
Exemplo n.º 9
0
        unsafe internal static AudioFrame GetAudioFrame(DataReader reader)
        {
            var numBytes = reader.UnconsumedBufferLength;

            var headerSize = 44;
            var bytes      = new byte[headerSize];

            reader.ReadBytes(bytes);

            var        numSamples = (uint)(numBytes - headerSize);
            AudioFrame frame      = new AudioFrame(numSamples);

            using (var buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte *dataInBytes;
                    uint  capacityInBytes;

                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                    Int16 *dataInInt16 = (Int16 *)dataInBytes;

                    for (int i = 0; i < capacityInBytes / sizeof(Int16); i++)
                    {
                        dataInInt16[i] = reader.ReadInt16();
                    }
                }

            return(frame);
        }
Exemplo n.º 10
0
        /// <summary>
        /// Generates empty data for a neccessary quantity of samples
        /// </summary>
        /// <param name="samples">Sampel count</param>
        /// <returns>AudioFrame of sample count</returns>
        public static unsafe AudioFrame GenerateAudioData(uint samples)
        {
            // Buffer size is (number of samples) * (size of each sample) * (number of channels)
            uint       bufferSize = samples * sizeof(float) * 2;
            AudioFrame frame      = new AudioFrame(bufferSize);

            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *dataInBytes, out uint _);

                    // Cast to float since the data we are generating is float
                    float *dataInFloat = (float *)dataInBytes;

                    float  freq            = 17000; // choosing to generate frequency of 17kHz
                    float  amplitude       = 0.3f;
                    int    sampleRate      = (int)outgraph.EncodingProperties.SampleRate;
                    double sampleIncrement = (freq * (Math.PI * 2)) / sampleRate;

                    // Generate a 17kHz sine wave and populate the values in the memory buffer
                    for (int i = 0; i < samples; i++)
                    {
                        double sinValue = amplitude * Math.Sin(theta);
                        dataInFloat[i] = (float)sinValue;
                        theta         += sampleIncrement;
                    }
                }

            return(frame);
        }
        private unsafe AudioFrame ReadAudioData(uint samples)
        {
            // Buffer size is (number of samples) * (size of each sample)
            uint       bufferSize = samples * sizeof(byte) * 2;
            AudioFrame frame      = new Windows.Media.AudioFrame(bufferSize);

            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte *dataInBytes;
                    uint  capacityInBytes;

                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                    // Read audio data from the stream and copy it to the AudioFrame buffer
                    var  readBytes = new byte[capacityInBytes];
                    uint bytesRead = audioStream.Read(readBytes);

                    if (bytesRead == 0)
                    {
                        frameInputNode.Stop();
                    }

                    for (int i = 0; i < bytesRead; i++)
                    {
                        dataInBytes[i] = readBytes[i];
                    }
                }

            return(frame);
        }
Exemplo n.º 12
0
        unsafe private void ProcessFrameOutput(AudioFrame frame)
        {
            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Read))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    // get hold of the buffer pointer
                    byte *dataInBytes;
                    uint  capacityInBytes;
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes,
                                                                   out capacityInBytes);

                    var dataInFloat = (float *)dataInBytes;

                    // examine
                    float max = 0;
                    for (int n = 0; n < graph.SamplesPerQuantum; n++)
                    {
                        max = Math.Max(Math.Abs(dataInFloat[n]), max);
                    }
                    currentPeak = max;

                    float x = currentPeak * 1000;

                    double Bri = Math.Pow(x, 3);                // Sensitivity slider value

                    byte Brightness = (byte)Math.Round(Bri, 0); // Calculating to a 0 - 255 value to control the light brightness

                    Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                    {
                        OutputText.Text = Brightness.ToString();
                    });
                }
        }
Exemplo n.º 13
0
        unsafe public void ProcessFrame(ProcessAudioFrameContext context)
        {
            AudioFrame inputFrame = context.InputFrame;

            using (AudioBuffer inputBuffer = inputFrame.LockBuffer(AudioBufferAccessMode.Read))
                using (IMemoryBufferReference inputReference = inputBuffer.CreateReference())
                {
                    byte *inputDataInBytes;
                    uint  inputCapacity;

                    ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputDataInBytes, out inputCapacity);

                    float *inputDataInFloat = (float *)inputDataInBytes;

                    float inputDataL;
                    float inputDataR;

                    // Process audio data
                    int dataInFloatLength = (int)inputBuffer.Length / sizeof(float);

                    if (_chart == null)
                    {
                        _chart = new float[dataInFloatLength];
                        propertySet["chart"] = _chart;
                    }
                    for (int i = 0; i < dataInFloatLength; i += 2)
                    {
                        inputDataL    = inputDataInFloat[i];
                        inputDataR    = inputDataInFloat[i + 1];
                        _chart[i]     = inputDataL;
                        _chart[i + 1] = inputDataR;
                    }
                }
        }
        public unsafe void ProcessFrame(ProcessAudioFrameContext context)
        {
            AudioFrame inputFrame  = context.InputFrame;
            AudioFrame outputFrame = context.OutputFrame;

            using (AudioBuffer inputBuffer = inputFrame.LockBuffer(AudioBufferAccessMode.Read),
                   outputBuffer = outputFrame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference inputReference = inputBuffer.CreateReference(),
                       outputReference = outputBuffer.CreateReference())
                {
                    ((IMemoryBufferByteAccess)inputReference).GetBuffer(out var inputDataInBytes, out _);
                    ((IMemoryBufferByteAccess)outputReference).GetBuffer(out var outputDataInBytes, out _);

                    float *inputDataInFloat  = (float *)inputDataInBytes;
                    float *outputDataInFloat = (float *)outputDataInBytes;

                    // Process audio data
                    int dataInFloatLength = (int)inputBuffer.Length / sizeof(float);

                    for (int i = 0; i < dataInFloatLength; i++)
                    {
                        // var inputData = inputDataInFloat[i] * (1.0f - Mix);
                        var inputData = inputDataInFloat[i];
                        outputDataInFloat[i] = ProcessFilterSample(inputData);
                    }
                }
        }
        unsafe public void ProcessFrame(ProcessAudioFrameContext context)
        {
            AudioFrame inputFrame = context.InputFrame;

            using (AudioBuffer inputBuffer = context.InputFrame.LockBuffer(AudioBufferAccessMode.Read))
                using (IMemoryBufferReference inputReference = inputBuffer.CreateReference())
                {
                    byte * inputInBytes;
                    uint   inputCapacity;
                    float *inputInFloats;

                    ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputInBytes, out inputCapacity);

                    inputInFloats = (float *)inputInBytes;
                    int   inputLength = (int)inputBuffer.Length / sizeof(float);
                    float sum         = 0;

                    // Only process one channel for now (will average out unless the audio is severely unbalanced between left/right)
                    for (int i = 0; i < inputLength; i += 2)
                    {
                        sum += (inputInFloats[i] * inputInFloats[i]);
                    }
                    double rms = Math.Sqrt(sum / (inputLength / 2));
                    this.VolumeInDecibels = 20 * Math.Log10(rms);
                }
        }
Exemplo n.º 16
0
 unsafe private void ProcessInputFrame(AudioFrame frame)
 {
     using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Read))
     using (IMemoryBufferReference reference = buffer.CreateReference())
     {
         // We get data from current buffer
         ((IMemoryBufferByteAccess)reference).GetBuffer(
             out byte* dataInBytes,
             out uint capacityInBytes
             );
         // We discard first frame; it's full of zeros because of latency
         if (audioGraph.CompletedQuantumCount == 1) return;
         float* dataInFloat = (float*)dataInBytes;
         uint capacityInFloat = capacityInBytes / sizeof(float);
         // Number of channels defines step between samples in buffer
         uint step = fileInputNode.EncodingProperties.ChannelCount;
         // We transfer audio samples from buffer into audioData
         for (uint i = 0; i < capacityInFloat; i += step)
         {
             if (audioDataCurrentPosition < audioData.Length)
             {
                 audioData[audioDataCurrentPosition] = dataInFloat[i];
                 audioDataCurrentPosition++;
             }
         }
     }
 }
        unsafe internal AudioFrame GenerateAudioData(uint samples)
        {
            // Buffer size is (number of samples) * (size of each sample)
            // We choose to generate single channel (mono) audio. For multi-channel, multiply by number of channels
            uint       bufferSize = samples * sizeof(float);
            AudioFrame frame      = new Windows.Media.AudioFrame(bufferSize);

            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte * dataInBytes;
                    uint   capacityInBytes;
                    float *dataInFloat;

                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                    // Cast to float since the data we are generating is float
                    dataInFloat = (float *)dataInBytes;

                    float  amplitude       = 0.3f;
                    int    sampleRate      = (int)parentGraph.EncodingProperties.SampleRate;
                    double sampleIncrement = (frequency * (Math.PI * 2)) / sampleRate;

                    // Generate a sine wave and populate the values in the memory buffer
                    for (int i = 0; i < samples; i++)
                    {
                        double sinValue = amplitude * Math.Sin(angle);
                        dataInFloat[i] = (float)sinValue;
                        angle         += sampleIncrement;
                    }
                }

            return(frame);
        }
Exemplo n.º 18
0
        /// <summary>
        /// Handle frame of mic input
        /// </summary>
        /// <param name="frame"></param>
        private static unsafe void ProcessFrameOutput(AudioFrame frame)
        {
            float[] dataInFloats;
            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *dataInBytes, out uint capacityInBytes);

                    float *dataInFloat = (float *)dataInBytes;
                    dataInFloats = new float[capacityInBytes / sizeof(float)];

                    for (int i = 0; i < capacityInBytes / sizeof(float); i++)
                    {
                        dataInFloats[i] = dataInFloat[i];
                    }
                }

            // Don't bother if muted
            if (LocalState.VoiceState.SelfMute || LocalState.VoiceState.ServerMute)
            {
                AudioInSpec1   = 0;
                AudioInSpec2   = 0;
                AudioInSpec3   = 0;
                AudioInSpec4   = 0;
                AudioInSpec5   = 0;
                AudioInSpec6   = 0;
                AudioInSpec7   = 0;
                AudioInSpec8   = 0;
                AudioInSpec9   = 0;
                AudioInAverage = 0;
            }
            else
            {
                // Determine FFT data
                List <float[]> amplitudeData = FFT.Processing.HelperMethods.ProcessFrameOutput(frame);
                List <float[]> channelData   = FFT.Processing.HelperMethods.GetFftData(FFT.Processing.HelperMethods.ConvertTo512(amplitudeData, ingraph), ingraph);

                float[] leftChannel = channelData[1];

                // Assign each FFT data out channel
                AudioInSpec1   = HelperMethods.Max(leftChannel, 0, 1);
                AudioInSpec2   = HelperMethods.Max(leftChannel, 2, 3);
                AudioInSpec3   = HelperMethods.Max(leftChannel, 3, 4);
                AudioInSpec4   = HelperMethods.Max(leftChannel, 4, 5);
                AudioInSpec5   = HelperMethods.Max(leftChannel, 5, 6);
                AudioInSpec6   = HelperMethods.Max(leftChannel, 7, 8);
                AudioInSpec7   = HelperMethods.Max(leftChannel, 9, 10);
                AudioInSpec8   = HelperMethods.Max(leftChannel, 10, 12);
                AudioInSpec9   = HelperMethods.Max(leftChannel, 14, 26);
                AudioInAverage = (AudioInSpec1 + AudioInSpec2 + AudioInSpec3 + AudioInSpec4 + AudioInSpec5 + AudioInSpec5 + AudioInSpec6 + AudioInSpec7 + AudioInSpec8 + AudioInSpec9) / 9;
            }

            InputRecieved?.Invoke(null, dataInFloats);
        }
Exemplo n.º 19
0
        unsafe public void ProcessFrame(ProcessAudioFrameContext context)
        {
            //foreach (var item in context.InputFrame.ExtendedProperties.Keys)
            //{
            //    Debug.WriteLine(item);
            //}


            const int videoFrameRate = 60; // TODO: we should probably measure this

            //Debug.WriteLine(sw.ElapsedMilliseconds.ToString());
            AudioFrame inputFrame = context.InputFrame;

            using (AudioBuffer inputBuffer = inputFrame.LockBuffer(AudioBufferAccessMode.Read))
                using (IMemoryBufferReference inputReference = inputBuffer.CreateReference())
                {
                    byte * inputInBytes;
                    uint   inputCapacity;
                    float *inputInFloats;

                    ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputInBytes, out inputCapacity);

                    inputInFloats = (float *)inputInBytes;
                    int inputLengthSamples = (int)inputBuffer.Length / sizeof(float);

                    int samplesPervBlank = (int)((float)currentEncodingProperties.SampleRate / (float)videoFrameRate);

                    int numVBlanksForCurrentAudioBuffer = (int)Math.Ceiling(((float)context.InputFrame.Duration.Value.Milliseconds / ((1.0f / (float)videoFrameRate) * 1000)));

                    var volumeSetLeft  = new double[numVBlanksForCurrentAudioBuffer];
                    var volumeSetRight = new double[numVBlanksForCurrentAudioBuffer];

                    //Left Channel
                    CalcAudioVolumedBPerVBlank(inputInFloats, inputLengthSamples, samplesPervBlank, volumeSetLeft, 0, (int)currentEncodingProperties.ChannelCount);

                    if (currentEncodingProperties.ChannelCount == 2)
                    {
                        //Right Channel
                        CalcAudioVolumedBPerVBlank(inputInFloats, inputLengthSamples, samplesPervBlank, volumeSetRight, 1, (int)currentEncodingProperties.ChannelCount);
                    }

                    lock (PassthroughEffect.GetBadLock())
                    {
                        for (var i = 0; i < numVBlanksForCurrentAudioBuffer; i++)
                        {
                            ((Queue <Tuple <double, double> >) this.propertySet["dataQueue"]).Enqueue(new Tuple <double, double>(volumeSetLeft[i], volumeSetRight[i]));
                        }
                        //((Queue<Double[]>)this.propertySet["AudioVolumeLeftQueue"]).Enqueue(volumeSetLeft);
                        //((Queue<Double[]>)this.propertySet["AudioVolumeRightQueue"]).Enqueue(volumeSetRight);
                        //this.propertySet["VolumeLeft"] = volumeSetLeft;
                        //this.propertySet["VolumeRight"] = volumeSetRight;
                    }
                }
        }
Exemplo n.º 20
0
        // Graphics / Buffer helper functions

        public unsafe int[,] GetColorDistribution(SoftwareBitmap haystack, Color needle, int[] tolerance, int sparcity = 2)
        {
            int w = haystack.PixelWidth;
            int h = haystack.PixelHeight;
            int r = needle.R;
            int g = needle.G;
            int b = needle.B;

            int[,] dA = new int[w * h, 2];
            int dAPointer = 0;

            BitmapBuffer           buffer       = haystack.LockBuffer(BitmapBufferAccessMode.Read);
            BitmapPlaneDescription bufferLayout = buffer.GetPlaneDescription(0);
            IMemoryBufferReference reference    = buffer.CreateReference();

            ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *bufferData, out uint capacity);



            int pos;

            for (int x = 0; x < w; x += sparcity)
            {
                for (int y = 0; y < h; y += sparcity)
                {
                    pos = bufferLayout.StartIndex + bufferLayout.Stride * y + 4 * x;
                    int rC = bufferData[pos + 2];
                    int gC = bufferData[pos + 1];
                    int bC = bufferData[pos];
                    if (Math.Abs(rC - r) <= tolerance[0] && Math.Abs(gC - g) <= tolerance[1] && Math.Abs(bC - b) <= tolerance[2])
                    {
                        dA[dAPointer, 0] = x;
                        dA[dAPointer, 1] = y;
                        dAPointer++;
                    }
                }
            }
            dAPointer--;

            reference.Dispose();
            buffer.Dispose();

            int[,] distributionArray = new int[dAPointer + 1, 2];

            for (int p = dAPointer; p >= 0; p--)
            {
                distributionArray[p, 0] = dA[p, 0];
                distributionArray[p, 1] = dA[p, 1];
            }

            return(distributionArray);
        }
Exemplo n.º 21
0
 public BufferMemoryManager(IMemoryBuffer buffer)
 {
     reference = buffer?.CreateReference() ?? throw new ArgumentNullException(nameof(buffer));
     if (reference is IMemoryBufferByteAccess)
     {
         byteAccess = (IMemoryBufferByteAccess)reference;
     }
     else
     {
         reference.Dispose();
         throw new ArgumentException("buffer does not implment IMemoryBufferByteAccess");
     }
 }
Exemplo n.º 22
0
        //</SnippetQuantumProcessed>


        //<SnippetProcessFrameOutput>
        unsafe private void ProcessFrameOutput(AudioFrame frame)
        {
            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte * dataInBytes;
                    uint   capacityInBytes;
                    float *dataInFloat;

                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                    dataInFloat = (float *)dataInBytes;
                }
        }
Exemplo n.º 23
0
        unsafe public static void AddFrame(float[] framedata, uint samples)
        {
            if (!ready)
            {
                return;
            }
            //if (!started)
            //{
            //    //graph.Start();
            //    //started = true;
            //}
            AudioFrame frame = new AudioFrame(samples * 2 * sizeof(float));

            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte *dataInBytes;
                    uint  capacityInBytes;

                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);
                    // Cast to float since the data we are generating is float
                    float *dataInFloat = (float *)dataInBytes;
                    fixed(float *frames = framedata)
                    {
                        for (int i = 0; i < samples * 2; i++)
                        {
                            dataInFloat[i] = frames[i];
                        }
                    }
                }
            //List<float[]> amplitudeData = FFT.Processing.HelperMethods.ProcessFrameOutput(frame);
            //List<float[]> channelData = FFT.Processing.HelperMethods.GetFftData(FFT.Processing.HelperMethods.ConvertTo512(amplitudeData, outgraph), outgraph);

            //float[] leftChannel = channelData[1];

            //AudioSpec1 = HelperMethods.Max(leftChannel, 0, 1);
            //AudioSpec2 = HelperMethods.Max(leftChannel, 2, 3);
            //AudioSpec3 = HelperMethods.Max(leftChannel, 3, 4);
            //AudioSpec4 = HelperMethods.Max(leftChannel, 4, 5);
            //AudioSpec5 = HelperMethods.Max(leftChannel, 5, 6);
            //AudioSpec6 = HelperMethods.Max(leftChannel, 7, 8);
            //AudioSpec7 = HelperMethods.Max(leftChannel, 9, 10);
            //AudioSpec8 = HelperMethods.Max(leftChannel, 10, 12);
            //AudioSpec9 = HelperMethods.Max(leftChannel, 14, 26);
            frameInputNode.AddFrame(frame);
        }
Exemplo n.º 24
0
        unsafe internal static void SendAudioFrameNative(AudioFrame frame, DataWriter writer)
        {
            using (var buffer = frame.LockBuffer(AudioBufferAccessMode.Read))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte *dataInBytes;
                    uint  capacityInBytes;

                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                    float *dataInFloat = (float *)dataInBytes;

                    for (int i = 0; i < capacityInBytes / sizeof(float); i++)
                    {
                        writer.WriteInt16(FloatToInt16(dataInFloat[i]));
                    }
                }
        }
Exemplo n.º 25
0
        // </SnippetMixProperty>

        // <SnippetProcessFrame>
        unsafe public void ProcessFrame(ProcessAudioFrameContext context)
        {
            AudioFrame inputFrame  = context.InputFrame;
            AudioFrame outputFrame = context.OutputFrame;

            using (AudioBuffer inputBuffer = inputFrame.LockBuffer(AudioBufferAccessMode.Read),
                   outputBuffer = outputFrame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference inputReference = inputBuffer.CreateReference(),
                       outputReference = outputBuffer.CreateReference())
                {
                    byte *inputDataInBytes;
                    byte *outputDataInBytes;
                    uint  inputCapacity;
                    uint  outputCapacity;

                    ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputDataInBytes, out inputCapacity);
                    ((IMemoryBufferByteAccess)outputReference).GetBuffer(out outputDataInBytes, out outputCapacity);

                    float *inputDataInFloat  = (float *)inputDataInBytes;
                    float *outputDataInFloat = (float *)outputDataInBytes;

                    float inputData;
                    float echoData;

                    // Process audio data
                    int dataInFloatLength = (int)inputBuffer.Length / sizeof(float);

                    for (int i = 0; i < dataInFloatLength; i++)
                    {
                        inputData            = inputDataInFloat[i] * (1.0f - this.Mix);
                        echoData             = echoBuffer[currentActiveSampleIndex] * this.Mix;
                        outputDataInFloat[i] = inputData + echoData;
                        echoBuffer[currentActiveSampleIndex] = inputDataInFloat[i];
                        currentActiveSampleIndex++;

                        if (currentActiveSampleIndex == echoBuffer.Length)
                        {
                            // Wrap around (after one second of samples)
                            currentActiveSampleIndex = 0;
                        }
                    }
                }
        }
Exemplo n.º 26
0
        public unsafe void SetPixelColor(SoftwareBitmap swBmp, int x, int y, Color color)
        {
            BitmapBuffer           buffer       = swBmp.LockBuffer(BitmapBufferAccessMode.Write);
            BitmapPlaneDescription bufferLayout = buffer.GetPlaneDescription(0);

            if (x >= 0 && x < bufferLayout.Width && y >= 0 && y < bufferLayout.Height)
            {
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *bufferData, out uint capacity);

                    bufferData[bufferLayout.StartIndex + bufferLayout.Stride * y + 4 * x + 0] = color.B;
                    bufferData[bufferLayout.StartIndex + bufferLayout.Stride * y + 4 * x + 1] = color.G;
                    bufferData[bufferLayout.StartIndex + bufferLayout.Stride * y + 4 * x + 2] = color.R;
                    bufferData[bufferLayout.StartIndex + bufferLayout.Stride * y + 4 * x + 3] = color.A;
                }
            }
            buffer.Dispose();
        }
Exemplo n.º 27
0
        unsafe AudioFrame GenerateAudioData(byte[] readedData, uint audioDataLength)
        {
            AudioFrame frame = new Windows.Media.AudioFrame((uint)audioDataLength);

            using (var buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte *dataInBytes;
                    uint  capacityInBytes;
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                    for (int i = 0; i < audioDataLength; i++)
                    {
                        dataInBytes[i] = readedData[i];
                    }
                }

            return(frame);
        }
        unsafe private void ProcessFrameOutput(AudioFrame frame)
        {
            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    byte *dataInBytes;
                    uint  capacityInBytes;


                    // Get the buffer from the AudioFrame
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                    for (int i = 0; i < capacityInBytes; i++)
                    {
                        dataInFloat[i] = *((float *)dataInBytes + i);
                    }
                    SendHello(capacityInBytes);
                }
        }
Exemplo n.º 29
0
        public unsafe Color GetPixelColor(SoftwareBitmap swBmp, int x, int y)
        {
            BitmapBuffer           buffer       = swBmp.LockBuffer(BitmapBufferAccessMode.Read);
            BitmapPlaneDescription bufferLayout = buffer.GetPlaneDescription(0);
            Color color = new Color();

            if (x >= 0 && x < bufferLayout.Width && y >= 0 && y < bufferLayout.Height)
            {
                using (IMemoryBufferReference reference = buffer.CreateReference())
                {
                    ((IMemoryBufferByteAccess)reference).GetBuffer(out byte *bufferData, out uint capacity);

                    color.B = bufferData[bufferLayout.StartIndex + bufferLayout.Stride * y + 4 * x + 0];
                    color.G = bufferData[bufferLayout.StartIndex + bufferLayout.Stride * y + 4 * x + 1];
                    color.R = bufferData[bufferLayout.StartIndex + bufferLayout.Stride * y + 4 * x + 2];
                    color.A = bufferData[bufferLayout.StartIndex + bufferLayout.Stride * y + 4 * x + 3];
                }
            }
            buffer.Dispose();
            return(color);
        }
Exemplo n.º 30
0
        unsafe public void ProcessFrame(ProcessAudioFrameContext context)
        {
            AudioFrame inputFrame = context.InputFrame;

            using (AudioBuffer inputBuffer = inputFrame.LockBuffer(AudioBufferAccessMode.Read))
            using (IMemoryBufferReference inputReference = inputBuffer.CreateReference())
            {
                byte* inputDataInBytes;
                uint inputCapacity;

                ((IMemoryBufferByteAccess)inputReference).GetBuffer(out inputDataInBytes, out inputCapacity);

                float* inputDataInFloat = (float*)inputDataInBytes;

                // Process audio data
                int dataInFloatLength = (int)inputBuffer.Length / sizeof(float);

                for (int i = 0; i < dataInFloatLength; i++)
                {
                    float inputData = inputDataInFloat[i];

                    lock (badLock)
                    {
                        if (propertySet.ContainsKey("InputDataRaw"))
                        {
                            propertySet["InputDataRaw"] = inputData;
                        }
                        else
                        {
                            propertySet.Add("InputDataRaw", inputData);
                        }
                    }

                    if (compositionPropertySet != null)
                    {
                        compositionPropertySet.InsertScalar("InputData", inputData * 500);
                    }
                }
            }
        }
 public IMemoryBufferReferenceEvents(IMemoryBufferReference This)
 {
     this.This = This;
 }