Esempio n. 1
0
        public void Play()
        {
            dynamic ctx = _context;

            if (ctx.state == "suspended" || ctx.state == "interrupted")
            {
                ctx.resume();
            }

            // create an empty buffer source (silence)
            _buffer = _context.CreateBuffer(2, BufferSize, _context.SampleRate);

            // create a script processor node which will replace the silence with the generated audio
            _audioNode = _context.CreateScriptProcessor(BufferSize, 0, 2);
            _audioNode.OnAudioProcess = (Action <AudioProcessingEvent>)GenerateSound;

            _circularBuffer.Clear();

            RequestBuffers();
            _finished      = false;
            _source        = _context.CreateBufferSource();
            _source.Buffer = _buffer;
            _source.Loop   = true;
            _source.Connect(_audioNode, 0, 0);
            _source.Start(0);
            _audioNode.Connect(_context.Destination, 0, 0);
        }
 public void Pause()
 {
     if (_source != null)
     {
         _source.Stop(0);
         _source.Disconnect(0);
     }
     _source = null;
     if (_audioNode != null)
     {
         _audioNode.Disconnect(0);
     }
     _audioNode = null;
 }
Esempio n. 3
0
        public void Open()
        {
            _finished = false;

            _circularBuffer = new CircularSampleBuffer(BufferSize * BufferCount);

            JsContext.JsCode("window.AudioContext = window.AudioContext || window.webkitAudioContext");
            _context = new AudioContext();

            _latency = (BufferSize * 1000) / (2 * _context.sampleRate);

            // create an empty buffer source (silence)
            _buffer = _context.createBuffer(2, BufferSize, _context.sampleRate);

            // create a script processor node which will replace the silence with the generated audio
            _audioNode = _context.createScriptProcessor(BufferSize, 0, 2);
            _audioNode.onaudioprocess = GenerateSound;

            OnReadyChanged(true);
        }
        public void Open()
        {
            _finished = false;

            _circularBuffer = new CircularSampleBuffer(BufferSize * BufferCount);

            JsContext.JsCode("window.AudioContext = window.AudioContext || window.webkitAudioContext");
            _context = new AudioContext();

            // possible fix for Web Audio in iOS 9 (issue #4)
            dynamic ctx = _context;

            if (ctx.state == "suspended")
            {
                EventListener resume = null;
                resume = e =>
                {
                    ctx.resume();
                    HtmlContext.window.setTimeout(() =>
                    {
                        if (ctx.state == "running")
                        {
                            HtmlContext.document.body.removeEventListener("touchend", resume, false);
                        }
                    }, 0);
                };
                HtmlContext.document.body.addEventListener("touchend", resume, false);
            }

            // create an empty buffer source (silence)
            _buffer = _context.createBuffer(2, BufferSize, _context.sampleRate);

            // create a script processor node which will replace the silence with the generated audio
            _audioNode = _context.createScriptProcessor(BufferSize, 0, 2);
            _audioNode.onaudioprocess = GenerateSound;

            Ready();
        }