private void FrameInputNode_QuantumStarted(
            AudioFrameInputNode sender,
            FrameInputNodeQuantumStartedEventArgs args)
        {
            if (_audioDataCurrentPosition == 0)
            {
                _fileOutputNode.Start();
            }

            // doesn't matter how many samples requested
            var frame = ProcessOutputFrame(_audioGraph.SamplesPerQuantum);

            _frameInputNode.AddFrame(frame);

            if (_finished)
            {
                _fileOutputNode?.Stop();
                _audioGraph?.Stop();
            }

            // to not report too many times
            if (_audioGraph == null)
            {
                return;
            }
            if (_audioGraph.CompletedQuantumCount % 100 == 0)
            {
                var dProgress =
                    (double)100 *
                    _audioDataCurrentPosition /
                    _audioData.LengthSamples();
                _ioProgress?.Report(dProgress);
            }
        }
        //private unsafe void ProcessFrameOutput(AudioFrame frame)
        //{
        //    using (var buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
        //    using (var reference = buffer.CreateReference())
        //    {
        //        byte* dataInBytes;
        //        uint capacityInBytes;
        //        float* dataInFloat;

        //        // Get the buffer from the AudioFrame
        //        ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

        //        //cal level
        //        dataInFloat = (float*)dataInBytes;
        //        float max = 0;
        //        for (int i = 0; i < _audioGraph.SamplesPerQuantum; i++)
        //        {
        //            max = Math.Max(Math.Abs(dataInFloat[i]), max);
        //        }

        //        ProzessEvent(_detector.Calculate(max));
        //    }
        //}
        #endregion

        private void ProzessEvent(MicrophoneInput a)
        {
            if (a.State == 1)
            {
                Debug.WriteLine(a.Value + " : " + a.State);
            }


            if (a.State == 1 && _isRecoding == false)
            {
                Debug.WriteLine("STARTED RECORDING!");
                _fileOutputNode.Reset();
                _fileOutputNode.Start();
                _isRecoding = true;
            }

            a.IsRecording = _isRecoding;
            _interacor.OnNoiseDetected(a);

            if (_isRecoding)
            {
                _count++;
                _lastRecoring.Add(a.State);
            }
            //Record Noise for at least 5 secounds
            if (_count < FiveSecounds || _isRecoding != true)
            {
                return;
            }
            // if there was enough sound in this 5 secounds, keep recoring
            if (_lastRecoring.Fold((b) => b.Sum()) > BufferTol && _count < 2 * FiveSecounds)
            {
                return;
            }
            _count      = 0;
            _isRecoding = false;
            _fileOutputNode.Stop();

            SaveAndCleanGraph();
        }
        SaveAudioToFile(
            StorageFile file,
            IProgress <string> status)
        {
            _finished = false;
            status.Report("Saving audio to file");

            var mediaEncodingProfile =
                CreateMediaEncodingProfile(file);

            if (!_audioData.IsStereo && mediaEncodingProfile.Audio != null)
            {
                mediaEncodingProfile.Audio.ChannelCount = 1;
            }

            // Initialize FileOutputNode
            var result =
                await _audioGraph.CreateFileOutputNodeAsync(file, mediaEncodingProfile);

            if (result.Status != AudioFileNodeCreationStatus.Success)
            {
                return(result);
            }

            _fileOutputNode = result.FileOutputNode;
            _fileOutputNode.Stop();

            // Initialize FrameInputNode and connect it to fileOutputNode
            _frameInputNode = _audioGraph.CreateFrameInputNode(
                // EncodingProprties are different than for input file
                _fileOutputNode.EncodingProperties
                //audioEncodingProperties
                );

            _frameInputNode.AddOutgoingConnection(_fileOutputNode);
            _frameInputNode.Stop();

            // Add a handler which will transfer every audioData sample to audio frame
            _frameInputNode.QuantumStarted += FrameInputNode_QuantumStarted;

            _audioDataCurrentPosition = 0;

            // Start process which will write audio file frame by frame
            // and will generated events QuantumStarted
            _audioGraph.Start();
            // don't start fileOutputNode yet because it will record zeros

            // because we initialised frameInputNode in Stop mode we need to start it
            _frameInputNode.Start();

            // didn't find a better way to wait for writing to file
            while (!_finished)
            {
                await Task.Delay(50);
            }

            // when audioData samples ended and audioGraph already stoped
            await _fileOutputNode.FinalizeAsync();

            // clean status and progress
            status.Report("");
            _ioProgress.Report(0);

            return(result);
        }