Exemplo n.º 1
0
        public static async void AttachToDocument(FragmentShader vs)
        {
            Native.body.style.margin = "0px";
            Native.body.style.backgroundColor = "blue";


            var mAudioContext = new AudioContext();
            var gl = new WebGLRenderingContext(alpha: true);

            if (gl == null)
            {

                new IHTMLPre {
                    // https://code.google.com/p/chromium/issues/detail?id=294207
                    "Rats! WebGL hit a snag. \n WebGL: Unavailable.\n GPU process was unable to boot. \n restart chrome.",

                    // chrome sends us to about:blank?
                    //new IHTMLAnchor {

                    //	target = "_blank",

                    //	href = "about:gpu", innerText = "about:gpu",

                    //	// http://tirania.org/blog/archive/2009/Jul-27-1.html
                    //	//onclick += de
                    //}
                    //.With(a => {  a.onclick += e => { e.preventDefault();  Native.window.open("about:gpu"); }; } )


                }.AttachToDocument();
                return;
            }

            Native.body.Clear();
            Native.body.style.overflow = IStyle.OverflowEnum.hidden;

            var c = gl.canvas.AttachToDocument();

            #region oncontextlost
            gl.oncontextlost +=
                e =>
                {
                    //[12144:10496:0311 / 120850:ERROR: gpu_watchdog_thread.cc(314)] : The GPU process hung. Terminating after 10000 ms.
                    //   GpuProcessHostUIShim: The GPU process crashed!
                    gl.canvas.Orphanize();

                    new IHTMLPre {
                        // https://code.google.com/p/chromium/issues/detail?id=294207
                        @"Rats! WebGL hit a snag.
oncontextlost.
The GPU process hung. Terminating. 
check chrome://gpu for log messages.  
do we have a stack trace?

" + new { e.statusMessage } ,

                        // chrome sends us to about:blank?
                        //new IHTMLAnchor {

                        //	target = "_blank",

                        //	href = "about:gpu", innerText = "about:gpu",

                        //	// http://tirania.org/blog/archive/2009/Jul-27-1.html
                        //	//onclick += de
                        //}
                        //.With(a => {  a.onclick += e => { e.preventDefault();  Native.window.open("about:gpu"); }; } )


                    }.AttachToDocument();
                };
            #endregion


            #region onresize
            new { }.With(
                async delegate
                {
                    do
                    {
                        c.width = Native.window.Width;
                        c.height = Native.window.Height;
                        c.style.SetSize(c.width, c.height);
                    }
                    while (await Native.window.async.onresize);
                }
            );
            #endregion




            #region CaptureMouse
            var mMouseOriX = 0;
            var mMouseOriY = 0;
            var mMousePosX = 0;
            var mMousePosY = 0;

            c.onmousedown += ev =>
            {
                mMouseOriX = ev.CursorX;
                mMouseOriY = c.height - ev.CursorY;
                mMousePosX = mMouseOriX;
                mMousePosY = mMouseOriY;

                ev.CaptureMouse();
            };

            c.onmousemove += ev =>
            {
                if (ev.MouseButton == IEvent.MouseButtonEnum.Left)
                {
                    mMousePosX = ev.CursorX;
                    // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\synergy\InputMouseByIq\InputMouseByIq\Shaders\Program.frag
                    //mMousePosY = ev.CursorY;
                    mMousePosY = c.height - ev.CursorY;
                }
            };


            c.onmouseup += ev =>
            {
                mMouseOriX = -Math.Abs(mMouseOriX);
                mMouseOriY = -Math.Abs(mMouseOriY);
            };
            #endregion

            var mEffect = new ChromeShaderToyColumns.Library.ShaderToy.Effect(
                mAudioContext,
                gl,

                callback: delegate
                {
                    new IHTMLPre { "at callback" }.AttachToDocument();

                },
                obj: null,
                forceMuted: false,
                forcePaused: false
            );

            mEffect.mPasses[0].MakeHeader_Image();
            mEffect.mPasses[0].NewShader_Image(vs);

            var sw = Stopwatch.StartNew();
            do
            {
                mEffect.mPasses[0].Paint_Image(
                    sw.ElapsedMilliseconds / 1000.0f,

                    mMouseOriX,
                    mMouseOriY,
                    mMousePosX,
                    mMousePosY


                );

                // what does it do?
                gl.flush();

            }
            while (await Native.window.async.onframe);

        }
Exemplo n.º 2
0
            // X:\jsc.svn\examples\glsl\future\GLSLShaderToyPip\GLSLShaderToyPip\Application.cs
            public EffectPass(
                AudioContext wa = null,
                gl gl = null,

                string precission = null,
                bool supportDerivatives = false,
                RefreshTexturThumbailDelegate callback = null,
                object obj = null,
                bool forceMuted = false,
                bool forcePaused = false,


                // createQuadVBO
                // ARRAY_BUFFER
                WebGLBuffer quadVBO = null,
                GainNode outputGainNode = null
                )
            {
                //new IHTMLPre { "enter EffectPass" }.AttachToDocument();


                // used by?
                var mFrame = 0;

                this.MakeHeader_Image = delegate
                {
                    #region MakeHeader_Image
                    //new IHTMLPre { "enter MakeHeader_Image" }.AttachToDocument();


                    var header = precission;
                    var headerlength = 3;

                    if (supportDerivatives) { header += "#extension GL_OES_standard_derivatives : enable\n"; headerlength++; }

                    header += "uniform vec3      iResolution;\n" +
                              "uniform float     iGlobalTime;\n" +
                              "uniform float     iChannelTime[4];\n" +
                              "uniform vec4      iMouse;\n" +
                              "uniform vec4      iDate;\n" +
                              "uniform float     iSampleRate;\n" +
                              "uniform vec3      iChannelResolution[4];\n";

                    // not to be used by the hosted shader, but by our code in the middle on the gpu.
                    // gpu code injection. first take.
                    //"uniform float     fZoom;\n";

                    headerlength += 7;

                    for (var i = 0; i < mInputs.Length; i++)
                    {
                        var inp = mInputs[i];

                        // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeShaderToyVRCardboardGrid\ChromeShaderToyVRCardboardGrid\Application.cs

                        //if (inp != null && inp.mInfo.mType == "cubemap")
                        if (inp is samplerCube)
                        {
                            new IHTMLPre { "add MakeHeader_Image samplerCube" }.AttachToDocument();
                            header += "uniform samplerCube iChannel" + i + ";\n";
                        }
                        else
                        {
                            //new IHTMLPre { "add MakeHeader_Image sampler2D" }.AttachToDocument();
                            header += "uniform sampler2D iChannel" + i + ";\n";
                        }

                        headerlength++;
                    }



                    // rror CreateShader {{ infoLog = ERROR: 0:250: 'assign' :  l-value required "gl_FragCoord" (can't modify gl_FragCoord)
                    //				ERROR: 0:251: 'assign' :  l - value required "gl_FragCoord"(can't modify gl_FragCoord)
                    //}}
                    // error CreateShader {{ infoLog = ERROR: 0:253: '=' :  cannot convert from 'FragCoord mediump 4-component vector of float' to 'highp 2-component vector of float'

                    var mImagePassFooter = @"
void main( void )
{
	vec4 color = gl_FragColor;


color.a = 1.0;

	mainImage( color, gl_FragCoord.xy );
					

	gl_FragColor = color;
}
";
                    #endregion


                    this.NewShader_Image = (fs) =>
                    {
                        #region NewShader_Image
                        //new IHTMLPre { "enter NewShader_Image" }.AttachToDocument();
                        var shaderCode = fs.ToString();

                        // is that it?
                        var vsSource = "attribute vec2 pos; void main() { gl_Position = vec4(pos.xy,0.0,1.0); }";

                        var fsSource = header + shaderCode + mImagePassFooter;

                        this.xCreateShader = CreateShader(gl, vsSource, fsSource, false);

                        #endregion

                        var vbo = new WebGLBuffer(gl);



                        #region calledby
                        //EffectPass.Paint_Image(effect.js:724)
                        //EffectPass.Paint(effect.js:1038)
                        //Effect.Paint(effect.js:1247)
                        //renderLoop2(pgWatch.js:404)
                        //ShaderToy.startRendering(pgWatch.js:420)
                        //watchInit(pgWatch.js:1386)
                        //onload(Xls3WS: 78)
                        #endregion
                        this.Paint_Image = (time, mouseOriX, mouseOriY, mousePosX, mousePosY, zoom) =>
                        {
                            var mProgram = xCreateShader.mProgram;


                            var xres = gl.canvas.width * zoom;
                            var yres = gl.canvas.height * zoom;

                            #region Paint_Image

                            //new IHTMLPre { "enter Paint_Image" }.AttachToDocument();

                            // this is enough to do pip to bottom left, no need to adjust vertex positions even?
                            gl.viewport(0, 0, (int)xres, (int)yres);

                            // useProgram: program not valid
                            gl.useProgram(mProgram);


                            if (this.ProgramSelected != null)
                                this.ProgramSelected(mProgram);



                            // uniform4fv
                            var uniform4fv_mouse = new[] { mousePosX, mousePosY, mouseOriX, mouseOriY };

                            // X:\jsc.svn\examples\glsl\future\GLSLShaderToyPip\GLSLShaderToyPip\Application.cs
                            //gl.getUniformLocation(mProgram, "fZoom").With(fZoom => gl.uniform1f(fZoom, zoom));


                            var l2 = gl.getUniformLocation(mProgram, "iGlobalTime"); if (l2 != null) gl.uniform1f(l2, time);
                            var l3 = gl.getUniformLocation(mProgram, "iResolution"); if (l3 != null) gl.uniform3f(l3, xres, yres, 1.0f);
                            var l4 = gl.getUniformLocation(mProgram, "iMouse"); if (l4 != null) gl.uniform4fv(l4, uniform4fv_mouse);

                            // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\synergy\SimpleDigitalClock\SimpleDigitalClock\Application.cs
                            // uniform vec4      iDate;                 // (year, month, day, time in seconds)
                            // http://bytes.com/topic/c-sharp/answers/258829-time-elapsed-since-midnight-c
                            var now = DateTime.Now;
                            //var sinceMidnight = now - DateTime.Today;
                            var sinceMidnight = now - now.Date;

                            var uniform4fv_dates = new float[] { now.Year, now.Month, now.Day, (float)sinceMidnight.TotalSeconds };
                            var l7 = gl.getUniformLocation(mProgram, "iDate"); if (l7 != null) gl.uniform4fv(l7, uniform4fv_dates);

                            //var l9 = gl.getUniformLocation(this.mProgram, "iSampleRate"); if (l9 != null) gl.uniform1f(l9, this.mSampleRate);

                            var ich0 = gl.getUniformLocation(mProgram, "iChannel0"); if (ich0 != null) gl.uniform1i(ich0, 0);
                            var ich1 = gl.getUniformLocation(mProgram, "iChannel1"); if (ich1 != null) gl.uniform1i(ich1, 1);
                            var ich2 = gl.getUniformLocation(mProgram, "iChannel2"); if (ich2 != null) gl.uniform1i(ich2, 2);
                            var ich3 = gl.getUniformLocation(mProgram, "iChannel3"); if (ich3 != null) gl.uniform1i(ich3, 3);


                            // what if there are other textures too?
                            // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeWebGLFrameBuffer\ChromeWebGLFrameBuffer\Application.cs

                            //for (var i = 0; i < mInputs.Length; i++)
                            //{
                            //	var inp = mInputs[i];

                            //	gl.activeTexture((uint)(gl.TEXTURE0 + i));

                            //	if (inp == null)
                            //	{
                            //		gl.bindTexture(gl.TEXTURE_2D, null);
                            //	}
                            //}

                            var times = new[] { 0.0f, 0.0f, 0.0f, 0.0f };
                            var l5 = gl.getUniformLocation(mProgram, "iChannelTime");
                            if (l5 != null) gl.uniform1fv(l5, times);

                            var resos = new float[12] { 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f };
                            var l8 = gl.getUniformLocation(mProgram, "iChannelResolution");
                            if (l8 != null) gl.uniform3fv(l8, resos);




                            // using ?
                            var vec2pos = (uint)gl.getAttribLocation(mProgram, "pos");
                            //gl.bindBuffer(gl.ARRAY_BUFFER, quadVBO);
                            gl.bindBuffer(gl.ARRAY_BUFFER, vbo);


                            #region vertices
                            float left = -1.0f;
                            // y reversed?
                            float bottom = -1.0f;
                            float right = 1.0f;
                            float top = 1.0f;

                            var fvertices =
                                new float[]
                                {
                                    // left top
                                    left, bottom,

                                    // right top
                                    //right, -1.0f,
                                    right, bottom,

                                    // left bottom
                                    left, top,

                                    // right top
                                    //right, -1.0f,
                                    right, bottom,

                                    // right bottom
                                    //right, 1.0f,
                                    right, top,

                                    // left bottom
                                    left,top
                                };

                            //var vertices = new Float32Array(fvertices);
                            #endregion
                            //gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
                            gl.bufferData(gl.ARRAY_BUFFER, fvertices, gl.STATIC_DRAW);

                            gl.vertexAttribPointer(vec2pos, 2, gl.FLOAT, false, 0, 0);
                            gl.enableVertexAttribArray(vec2pos);

                            // GL ERROR :GL_INVALID_OPERATION : glDrawArrays: attempt to render with no buffer attached to enabled attribute 1
                            gl.drawArrays(gl.TRIANGLES, 0, 6);


                            // first frame is now visible
                            gl.disableVertexAttribArray(vec2pos);
                            gl.bindBuffer(gl.ARRAY_BUFFER, null);
                            #endregion

                            mFrame++;

                        };
                    };
                };

            }
Exemplo n.º 3
0
            public Effect(
                AudioContext ac,
                WebGLRenderingContext gl,
                RefreshTexturThumbailDelegate callback,
                object obj,
                bool forceMuted,
                bool forcePaused)
            {
                //new IHTMLPre { "enter Effect" }.AttachToDocument();

                var ext = gl.getExtension("OES_standard_derivatives");
                var supportsDerivatives = (ext != null);

                //if (supportsDerivatives) gl.hint(ext.FRAGMENT_SHADER_DERIVATIVE_HINT_OES, gl.NICEST);

                var ext2 = gl.getExtension("OES_texture_float");
                this.mSupportTextureFloat = (ext2 != null);

                var precision = DetermineShaderPrecission(gl);

                //this.mGainNode = ac.createGain();
                //this.mGainNode.connect(ac.destination);

                this.mQuadVBO = createQuadVBO(gl);


                this.mPasses = new EffectPass[2];

                for (var i = 0; i < 2; i++)
                {
                    this.mPasses[i] = new EffectPass(
                        ac,
                        gl,
                        precision,
                        supportsDerivatives,
                        callback,
                        obj,
                        forceMuted,
                        forcePaused,
                        this.mQuadVBO,
                        this.mGainNode
                        );
                }
            }
Exemplo n.º 4
0
        // asus has 2015 that works?



        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // http://www.sitepoint.com/using-fourier-transforms-web-audio-api/

            new { }.With(
                async delegate
                {
                    var start = new IHTMLButton { "connect" };
                    var stop = new IHTMLButton { "disconnect" };

                    var a = new AudioContext();
                    var o = a.createOscillator();

                    o.start(0);

                    o.frequency.value = 440;


                    o.type = OscillatorType.sawtooth;

                    o.frequency.valueInput = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 1, max = 2000 }.AttachToDocument();


                    new IHTMLLabel {
                        () =>
                            $"frequency: { o.frequency.value }Hz"
                            + $" type: { o.type }"
                    }.AttachToDocument();




                    //.onchange +=
                    //    eee =>
                    //    {
                    //        var i = ((IHTMLInput)eee.Element);

                    //        o.frequency.value = i.valueAsNumber;
                    //    };

                    new IHTMLHorizontalRule { }.AttachToDocument();

                    new IHTMLButton { nameof(OscillatorType.sawtooth) }.AttachToDocument().onclick += delegate { o.type = OscillatorType.sawtooth; };
                    new IHTMLButton { nameof(OscillatorType.sine) }.AttachToDocument().onclick += delegate { o.type = OscillatorType.sine; };
                    new IHTMLButton { nameof(OscillatorType.square) }.AttachToDocument().onclick += delegate { o.type = OscillatorType.square; };
                    new IHTMLButton { nameof(OscillatorType.triangle) }.AttachToDocument().onclick += delegate { o.type = OscillatorType.triangle; };

                    new IHTMLHorizontalRule { }.AttachToDocument();

                    //s.Add()

                    new IHTMLButton { "Beep()" }.AttachToDocument().onclick +=
                        async delegate
                        {
                            //Console.Beep(frequency: 400, duration: 300);

                            o.frequency.value = 400;

                            o.type = OscillatorType.square;


                            o.connect(o.context.destination);

                            await Task.Delay(300);

                            o.disconnect();
                        };

                    new IHTMLButton { "Console.Beep()" }.AttachToDocument().onclick +=
                       delegate
                      {
                          Console.Beep();

                          //Console.Beep(frequency: 400, duration: 300);


                      };

                    new IHTMLButton { () => $"Console.Beep({ o.frequency.value }Hz, 300)" }.AttachToDocument().onclick +=
                      delegate
                      {
                          Console.Beep(frequency: (int)o.frequency.value, duration: 300);
                      };

                    retry:


                    var ee = await start.AttachToDocument().async.onclick;
                    start.Orphanize();

                    o.connect(a.destination);

                    var e = await stop.AttachToDocument().async.onclick;
                    stop.Orphanize();

                    o.disconnect();


                    goto retry;
                }
            );


        }
Exemplo n.º 5
0
            // X:\jsc.svn\examples\glsl\future\GLSLShaderToyPip\GLSLShaderToyPip\Application.cs
            public EffectPass(
                AudioContext wa = null,
                gl gl = null,

                string precission = null,
                bool supportDerivatives = false,
                RefreshTexturThumbailDelegate callback = null,
                object obj = null,
                bool forceMuted = false,
                bool forcePaused = false,


                // createQuadVBO
                // ARRAY_BUFFER
                WebGLBuffer quadVBO = null,
                GainNode outputGainNode = null
                )
            {
                //new IHTMLPre { "enter EffectPass" }.AttachToDocument();


                // used by?
                var mFrame = 0;






                // http://math.hws.edu/eck/cs424/notes2013/19_GLSL.html
                // http://stackoverflow.com/questions/10079368/how-would-i-do-environment-reflection-in-webgl-without-using-a-library-like-thre
                this.MakeHeader_Image = delegate
                {
                    #region MakeHeader_Image
                    //new IHTMLPre { "enter MakeHeader_Image" }.AttachToDocument();


                    var header = precission;
                    var headerlength = 3;

                    if (supportDerivatives) { header += "#extension GL_OES_standard_derivatives : enable\n"; headerlength++; }

                    header += "uniform vec3      iResolution;\n" +
                              "uniform float     iGlobalTime;\n" +
                              "uniform float     iChannelTime[4];\n" +
                              "uniform vec4      iMouse;\n" +
                              "uniform vec4      iDate;\n" +
                              "uniform float     iSampleRate;\n" +
                              "uniform vec3      iChannelResolution[4];\n";

                    // not to be used by the hosted shader, but by our code in the middle on the gpu.
                    // gpu code injection. first take.
                    //"uniform float     fZoom;\n";

                    headerlength += 7;

                    for (var i = 0; i < mInputs.Length; i++)
                    {
                        var inp = mInputs[i];

                        // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeShaderToyVRCardboardGrid\ChromeShaderToyVRCardboardGrid\Application.cs

                        //if (inp != null && inp.mInfo.mType == "cubemap")
                        if (inp is samplerCube)
                        {
                            //new IHTMLPre { "add MakeHeader_Image samplerCube" }.AttachToDocument();

                            this.tex = new WebGLTexture(gl);

                            // http://stackoverflow.com/questions/10079368/how-would-i-do-environment-reflection-in-webgl-without-using-a-library-like-thre
                            // view-source:https://www.shadertoy.com/js/effect.js

                            // um can there be only one samplerCube?
                            gl.activeTexture(gl.TEXTURE0);
                            //gl.enable(gl.TEXTURE_CUBE_MAP);
                            gl.bindTexture(gl.TEXTURE_CUBE_MAP, tex);
                            //gl.texParameteri(gl.TEXTURE_CUBE_MAP, gl.TEXTURE_WRAP_S, (int)gl.CLAMP_TO_EDGE);
                            //gl.texParameteri(gl.TEXTURE_CUBE_MAP, gl.TEXTURE_WRAP_T, (int)gl.CLAMP_TO_EDGE);
                            gl.texParameteri(gl.TEXTURE_CUBE_MAP, gl.TEXTURE_MIN_FILTER, (int)gl.LINEAR);
                            gl.texParameteri(gl.TEXTURE_CUBE_MAP, gl.TEXTURE_MAG_FILTER, (int)gl.LINEAR);

                            //var cube0 = new IHTMLImage[] {
                            //        new HTML.Images.FromAssets.cube02_0(),
                            //        new HTML.Images.FromAssets.cube02_1(),
                            //        new HTML.Images.FromAssets.cube02_2(),
                            //        new HTML.Images.FromAssets.cube02_3(),
                            //        new HTML.Images.FromAssets.cube02_4(),
                            //        new HTML.Images.FromAssets.cube02_5()
                            //};

                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;


                            //var cube0 = new IHTMLImage[] {
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),

                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),


                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
                            //};

                            //cube0.WithEachIndex(
                            //    (pendingimg, index) =>
                            //    {
                            //        pendingimg.InvokeOnComplete(
                            //            img =>
                            //            {
                            //                gl.bindTexture(gl.TEXTURE_CUBE_MAP, tex);
                            //                //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
                            //                gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
                            //                gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img);

                            //                new IHTMLPre { "add MakeHeader_Image samplerCube TEXTURE_CUBE_MAP_POSITIVE_X + " + new { index } }.AttachToDocument();

                            //                if (index == 5)
                            //                {
                            //                    new IHTMLPre { "add MakeHeader_Image samplerCube activeTexture  " }.AttachToDocument();

                            //                    //  samplerCube iChannel0; = 0 = TEXTURE0
                            //                }

                            //                // https://code.google.com/p/opengles-book-samples/source/browse/trunk/WebGL/Chapter_9/Simple_TextureCubemap/Simple_TextureCubemap.html?r=5
                            //                // http://stackoverflow.com/questions/31665132/gl-invalid-operation-caused-by-samplercube
                            //                // http://forum.processing.org/two/discussion/7039/samplercube-and-ambient-reflection


                            //            }
                            //        );
                            //    }
                            //);


                            header += "uniform samplerCube iChannel" + i + ";\n";
                        }
                        else
                        {
                            //new IHTMLPre { "add MakeHeader_Image sampler2D" }.AttachToDocument();
                            header += "uniform sampler2D iChannel" + i + ";\n";
                        }

                        // dont need it?
                        headerlength++;
                    }



                    // rror CreateShader {{ infoLog = ERROR: 0:250: 'assign' :  l-value required "gl_FragCoord" (can't modify gl_FragCoord)
                    //				ERROR: 0:251: 'assign' :  l - value required "gl_FragCoord"(can't modify gl_FragCoord)
                    //}}
                    // error CreateShader {{ infoLog = ERROR: 0:253: '=' :  cannot convert from 'FragCoord mediump 4-component vector of float' to 'highp 2-component vector of float'

                    var mImagePassFooter = @"
void main( void )
{
	vec4 color = gl_FragColor;


color.a = 1.0;

	mainImage( color, gl_FragCoord.xy );
					

	gl_FragColor = color;
}
";
                    #endregion


                    this.NewShader_Image = (fs) =>
                    {
                        #region NewShader_Image
                        //new IHTMLPre { "enter NewShader_Image" }.AttachToDocument();
                        var shaderCode = fs.ToString();

                        // is that it?
                        var vsSource = "attribute vec2 pos; void main() { gl_Position = vec4(pos.xy,0.0,1.0); }";

                        var fsSource = header + shaderCode + mImagePassFooter;

                        this.xCreateShader = CreateShader(gl, vsSource, fsSource, false);

                        #endregion

                        var vbo = new WebGLBuffer(gl);



                        #region calledby
                        //EffectPass.Paint_Image(effect.js:724)
                        //EffectPass.Paint(effect.js:1038)
                        //Effect.Paint(effect.js:1247)
                        //renderLoop2(pgWatch.js:404)
                        //ShaderToy.startRendering(pgWatch.js:420)
                        //watchInit(pgWatch.js:1386)
                        //onload(Xls3WS: 78)
                        #endregion
                        this.Paint_Image = (time, mouseOriX, mouseOriY, mousePosX, mousePosY, zoom) =>
                        {
                            var mProgram = xCreateShader.mProgram;


                            var xres = gl.canvas.width * zoom;
                            var yres = gl.canvas.height * zoom;

                            #region Paint_Image

                            //new IHTMLPre { "enter Paint_Image" }.AttachToDocument();

                            // this is enough to do pip to bottom left, no need to adjust vertex positions even?
                            gl.viewport(0, 0, (int)xres, (int)yres);

                            // useProgram: program not valid
                            gl.useProgram(mProgram);

                            // uniform4fv
                            var uniform4fv_mouse = new[] { mousePosX, mousePosY, mouseOriX, mouseOriY };

                            // X:\jsc.svn\examples\glsl\future\GLSLShaderToyPip\GLSLShaderToyPip\Application.cs
                            //gl.getUniformLocation(mProgram, "fZoom").With(fZoom => gl.uniform1f(fZoom, zoom));


                            var l2 = gl.getUniformLocation(mProgram, "iGlobalTime"); if (l2 != null) gl.uniform1f(l2, time);
                            var l3 = gl.getUniformLocation(mProgram, "iResolution"); if (l3 != null) gl.uniform3f(l3, xres, yres, 1.0f);
                            var l4 = gl.getUniformLocation(mProgram, "iMouse"); if (l4 != null) gl.uniform4fv(l4, uniform4fv_mouse);

                            // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\synergy\SimpleDigitalClock\SimpleDigitalClock\Application.cs
                            // uniform vec4      iDate;                 // (year, month, day, time in seconds)
                            // http://bytes.com/topic/c-sharp/answers/258829-time-elapsed-since-midnight-c
                            var now = DateTime.Now;
                            //var sinceMidnight = now - DateTime.Today;
                            var sinceMidnight = now - now.Date;

                            var uniform4fv_dates = new float[] { now.Year, now.Month, now.Day, (float)sinceMidnight.TotalSeconds };
                            var l7 = gl.getUniformLocation(mProgram, "iDate"); if (l7 != null) gl.uniform4fv(l7, uniform4fv_dates);

                            //var l9 = gl.getUniformLocation(this.mProgram, "iSampleRate"); if (l9 != null) gl.uniform1f(l9, this.mSampleRate);

                            //var ich0 = gl.getUniformLocation(mProgram, "iChannel0"); if (ich0 != null) gl.uniform1i(ich0, 0);
                            //var ich1 = gl.getUniformLocation(mProgram, "iChannel1"); if (ich1 != null) gl.uniform1i(ich1, 1);
                            //var ich2 = gl.getUniformLocation(mProgram, "iChannel2"); if (ich2 != null) gl.uniform1i(ich2, 2);
                            //var ich3 = gl.getUniformLocation(mProgram, "iChannel3"); if (ich3 != null) gl.uniform1i(ich3, 3);


                            // what if there are other textures too?
                            // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeWebGLFrameBuffer\ChromeWebGLFrameBuffer\Application.cs

                            //for (var i = 0; i < mInputs.Length; i++)
                            //{
                            //	var inp = mInputs[i];

                            //	gl.activeTexture((uint)(gl.TEXTURE0 + i));

                            //	if (inp == null)
                            //	{
                            //		gl.bindTexture(gl.TEXTURE_2D, null);
                            //	}
                            //}

                            var times = new[] { 0.0f, 0.0f, 0.0f, 0.0f };
                            var l5 = gl.getUniformLocation(mProgram, "iChannelTime");
                            if (l5 != null) gl.uniform1fv(l5, times);

                            var resos = new float[12] { 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f };
                            var l8 = gl.getUniformLocation(mProgram, "iChannelResolution");
                            if (l8 != null) gl.uniform3fv(l8, resos);




                            // using ?
                            var vec2pos = (uint)gl.getAttribLocation(mProgram, "pos");
                            //gl.bindBuffer(gl.ARRAY_BUFFER, quadVBO);
                            gl.bindBuffer(gl.ARRAY_BUFFER, vbo);


                            #region vertices
                            float left = -1.0f;
                            // y reversed?
                            float bottom = -1.0f;
                            float right = 1.0f;
                            float top = 1.0f;

                            var fvertices =
                                new float[]
                                {
                                    // left top
                                    left, bottom,

                                    // right top
                                    //right, -1.0f,
                                    right, bottom,

                                    // left bottom
                                    left, top,

                                    // right top
                                    //right, -1.0f,
                                    right, bottom,

                                    // right bottom
                                    //right, 1.0f,
                                    right, top,

                                    // left bottom
                                    left,top
                                };

                            #endregion
                            gl.bufferData(gl.ARRAY_BUFFER, fvertices, gl.STATIC_DRAW);

                            gl.vertexAttribPointer(vec2pos, 2, gl.FLOAT, false, 0, 0);
                            gl.enableVertexAttribArray(vec2pos);


                            //var iChannel0 = gl.getUniformLocation(mProgram, "iChannel0");



                            // GL ERROR :GL_INVALID_OPERATION : glDrawArrays: attempt to render with no buffer attached to enabled attribute 1
                            gl.drawArrays(gl.TRIANGLES, 0, 6);


                            // first frame is now visible
                            gl.disableVertexAttribArray(vec2pos);
                            gl.bindBuffer(gl.ARRAY_BUFFER, null);
                            #endregion

                            mFrame++;

                        };
                    };
                };

            }
Exemplo n.º 6
0
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // how does this work on android?

            // https://www.shadertoy.com/view/MdfSRj#


            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    chrome.runtime.UpdateAvailable += delegate
                    {
                        new chrome.Notification(title: "UpdateAvailable");

                    };


                    Console.WriteLine("awaiting Launch");

                    //new chrome.Notification(title: "awaiting Launch");

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        //new chrome.Notification(title: "ChromeUDPSendAsync");

                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: null
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion

            Console.WriteLine("getting ready...?");

            new Shaders.ProgramFragmentShader().With(
                async vs =>
                {
                    Console.WriteLine("getting ready...");


                    Native.body.style.margin = "0px";
                    //Native.document.documentElement.style.overflow = IStyle.OverflowEnum.auto;

                    var mAudioContext = new AudioContext();
                    var gl = new WebGLRenderingContext(alpha: true);
                    var c = gl.canvas.AttachToDocument();

                    c.style.SetSize(460, 237);
                    c.width = 460;
                    c.height = 237;

                    var mEffect = new ChromeShaderToyColumns.Library.ShaderToy.Effect(
                        mAudioContext,
                        gl,
                        callback: delegate
                        {
                            new IHTMLPre { "at callback" }.AttachToDocument();

                        },
                        obj: null,
                        forceMuted: false,
                        forcePaused: false
                    );










                    #region CaptureMouse
                    var mMouseOriX = 0;
                    var mMouseOriY = 0;
                    var mMousePosX = 0;
                    var mMousePosY = 0;

                    c.onmousedown += async ev =>
                    {
                        mMouseOriX = ev.CursorX;
                        mMouseOriY = ev.CursorY;
                        mMousePosX = mMouseOriX;
                        mMousePosY = mMouseOriY;

                        // why aint it canvas?
                        //ev.Element
                        //ev.CaptureMouse();

                        // using ?
                        ev.Element.requestPointerLock();
                        await ev.Element.async.onmouseup;
                        Native.document.exitPointerLock();

                        mMouseOriX = -Math.Abs(mMouseOriX);
                        mMouseOriY = -Math.Abs(mMouseOriY);
                    };

                    c.onmousemove += ev =>
                    {
                        if (ev.MouseButton == IEvent.MouseButtonEnum.Left)
                        {
                            mMousePosX += ev.movementX;
                            mMousePosY += ev.movementY;
                        }
                    };

                    #endregion


                    mEffect.mPasses[0].mInputs[0] = new ChromeShaderToyColumns.Library.ShaderToy.samplerCube { };

                    mEffect.mPasses[0].MakeHeader_Image();
                    mEffect.mPasses[0].NewShader_Image(vs);

                    #region onresize
                    new { }.With(
                        async delegate
                        {
                            do
                            {
                                c.width = Native.window.Width;
                                //c.height = Native.window.Height / 2;
                                c.height = Native.window.Height;
                                c.style.SetSize(c.width, c.height);
                            }
                            while (await Native.window.async.onresize);
                        }
                    );
                    #endregion


                    Console.WriteLine("can you see any?");

                    var sw = Stopwatch.StartNew();
                    do
                    {
                        mEffect.mPasses[0].Paint_Image(
                            sw.ElapsedMilliseconds / 1000.0f,

                            mMouseOriX,
                            mMouseOriY,
                            mMousePosX,
                            mMousePosY
                        );


                        // what does it do?
                        gl.flush();

                    }
                    while (await Native.window.async.onframe);
                }
        );
        }
Exemplo n.º 7
0
        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150808/equirectangular
        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150718/shadertoy
        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150706
        // subst b: X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeShaderToyColumns\ChromeShaderToyColumns\bin\Debug\staging\ChromeShaderToyColumns.Application\web


        public Application(IApp page)
		{
			#region += Launched chrome.app.window
			dynamic self = Native.self;
			dynamic self_chrome = self.chrome;
			object self_chrome_socket = self_chrome.socket;

			if (self_chrome_socket != null)
			{
				if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
				{
					Console.WriteLine("chrome.app.window.create, is that you?");

					// pass thru
				}
				else
				{
					// should jsc send a copresence udp message?
					chrome.runtime.UpdateAvailable += delegate
					{
						new chrome.Notification(title: "UpdateAvailable");

					};

					chrome.app.runtime.Launched += async delegate
					{
						// 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
						Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

						new chrome.Notification(title: "ChromeUDPSendAsync");

						var xappwindow = await chrome.app.window.create(
							   Native.document.location.pathname, options: null
						);

						//xappwindow.setAlwaysOnTop

						xappwindow.show();

						await xappwindow.contentWindow.async.onload;

						Console.WriteLine("chrome.app.window loaded!");
					};


					return;
				}
			}
			#endregion


			// view-source:https://www.shadertoy.com/view/Xls3WS
			// https://www.shadertoy.com/api

			// https://www.shadertoy.com/view/Xls3WS
			// https://www.shadertoy.com/js/cmRenderUtils.js
			// https://www.shadertoy.com/js/effect.js

			// what does it take to import those nice shaders into jsc world?

			// x:\jsc.svn\examples\javascript\webgl\webglchocolux\webglchocolux\application.cs
			// it looks there are no channels.
			// is it a vert or frag?
			//  fragColor = vec4( col, 1.0 );
			// must be a frag
			// <body onload="watchInit()" 


			//ChromeShaderToyColumns.Library.ShaderToy.AttachToDocument(
			//	new Shaders.ProgramFragmentShader()
			//);

			new { }.With(
				async delegate
				{
					Native.body.style.margin = "0px";
					(Native.body.style as dynamic).webkitUserSelect = "auto";

					var vs = new Shaders.ProgramFragmentShader();

					var mAudioContext = new AudioContext();
					var gl = new WebGLRenderingContext(alpha: true);
					var c = gl.canvas.AttachToDocument();

					c.style.SetSize(460, 237);
					c.width = 460;
					c.height = 237;

					var u = new UIKeepRendering
					{
						animate = true
					}.AttachToDocument();

					//new IHTMLPre { "init..." }.AttachToDocument();

					// function ShaderToy( parentElement, editorParent, passParent )
					// function buildInputsUI( me )

					//  this.mGLContext = createGlContext( this.mCanvas, false, true );
					//  {alpha: useAlpha, depth: false, antialias: false, stencil: true, premultipliedAlpha: false, preserveDrawingBuffer: usePreserveBuffer } 

					var mMouseOriX = 0;
					var mMouseOriY = 0;
					var mMousePosX = 0;
					var mMousePosY = 0;

					// 308
					//var mEffect = new Library.ShaderToy.Effect(
					//	mAudioContext,
					//	gl,

					//	callback: delegate
					//	{
					//		new IHTMLPre { "at callback" }.AttachToDocument();

					//	},
					//	obj: null,
					//	forceMuted: false,
					//	forcePaused: false
					//);


					////mEffect.mPasses[0].NewTexture
					//// EffectPass.prototype.NewTexture = function( wa, gl, slot, url )
					//// this.mPasses[j].Create( rpass.type, this.mAudioContext, this.mGLContext );
					//// EffectPass.prototype.MakeHeader_Image = function( precission, supportDerivatives )
					//mEffect.mPasses[0].MakeHeader_Image();

					//// EffectPass.prototype.NewShader = function( gl, shaderCode )
					//// EffectPass.prototype.NewShader_Image = function( gl, shaderCode )
					//mEffect.mPasses[0].NewShader_Image(vs);

					//// ShaderToy.prototype.resetTime = function()
					// Effect.prototype.ResetTime = function()

					// ShaderToy.prototype.startRendering = function()
					// Effect.prototype.Paint = function(time, mouseOriX, mouseOriY, mousePosX, mousePosY, isPaused)
					// EffectPass.prototype.Paint = function( wa, gl, time, mouseOriX, mouseOriY, mousePosX, mousePosY, xres, yres, isPaused )
					// EffectPass.prototype.Paint_Image = function( wa, gl, time, mouseOriX, mouseOriY, mousePosX, mousePosY, xres, yres )

					var pass = new Library.ShaderToy.EffectPass(
						mAudioContext,
						gl,
						precission: Library.ShaderToy.DetermineShaderPrecission(gl),
						supportDerivatives: gl.getExtension("OES_standard_derivatives") != null,
						callback: null,
						obj: null,
						forceMuted: false,
						forcePaused: false,
						//quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
						outputGainNode: null
					);
					pass.MakeHeader_Image();
					pass.NewShader_Image(vs);

					var sw = Stopwatch.StartNew();

					do
					{
						pass.Paint_Image(
							sw.ElapsedMilliseconds / 1000.0f,

							mMouseOriX,
							mMouseOriY,
							mMousePosX,
							mMousePosY
                            //,

							// gl_FragCoord
							// cannot be scaled, and can be referenced directly.
							// need another way to scale
							//zoom: 0.3f
						);

						// what does it do?
                        // need redux build..
						gl.flush();

						await u.animate.async.@checked;
					}
					while (await Native.window.async.onframe);

				}
			);
		}
Exemplo n.º 8
0
		/// <summary>
		/// This is a javascript application.
		/// </summary>
		/// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
		public Application(IApp page)
		{
			// https://www.shadertoy.com/view/Xds3zN

			// X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeShaderToyColumns\ChromeShaderToyColumns\Application.cs
			// X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeShaderToySeascapeByTDM\ChromeShaderToySeascapeByTDM\Application.cs

			// https://www.shadertoy.com/view/Ms2SD1

			#region += Launched chrome.app.window
			dynamic self = Native.self;
			dynamic self_chrome = self.chrome;
			object self_chrome_socket = self_chrome.socket;

			if (self_chrome_socket != null)
			{
				if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
				{
					Console.WriteLine("chrome.app.window.create, is that you?");

					// pass thru
				}
				else
				{
					// should jsc send a copresence udp message?
					chrome.runtime.UpdateAvailable += delegate
					{
						new chrome.Notification(title: "UpdateAvailable");

					};

					chrome.app.runtime.Launched += async delegate
					{
						// 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
						Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

						new chrome.Notification(title: "ChromeUDPSendAsync");

						var xappwindow = await chrome.app.window.create(
							   Native.document.location.pathname, options: null
						);

						//xappwindow.setAlwaysOnTop

						xappwindow.show();

						await xappwindow.contentWindow.async.onload;

						Console.WriteLine("chrome.app.window loaded!");
					};


					return;
				}
			}
			#endregion

			new { }.With(
			async delegate
			{
				Native.body.style.margin = "0px";

				var vs = new Shaders.ProgramFragmentShader();

				var mAudioContext = new AudioContext();
				var gl = new WebGLRenderingContext(alpha: true);
				var c = gl.canvas.AttachToDocument();

				#region onresize
				new { }.With(
					async delegate
					{
						do
						{
							c.width = Native.window.Width;
							c.height = Native.window.Height;
							c.style.SetSize(c.width, c.height);
						}
						while (await Native.window.async.onresize);
					}
				);
				#endregion




				#region CaptureMouse
				var mMouseOriX = 0;
				var mMouseOriY = 0;
				var mMousePosX = 0;
				var mMousePosY = 0;

				c.onmousedown += ev =>
				{
					mMouseOriX = ev.CursorX;
					mMouseOriY = ev.CursorY;
					mMousePosX = mMouseOriX;
					mMousePosY = mMouseOriY;

					ev.CaptureMouse();
				};

				c.onmousemove += ev =>
				{
					if (ev.MouseButton == IEvent.MouseButtonEnum.Left)
					{
						mMousePosX = ev.CursorX;
						mMousePosY = c.height - ev.CursorY;
					}
				};


				c.onmouseup += ev =>
				{
					mMouseOriX = -Math.Abs(mMouseOriX);
					mMouseOriY = -Math.Abs(mMouseOriY);
				};
				#endregion

				var mEffect = new ChromeShaderToyColumns.Library.ShaderToy.Effect(
					mAudioContext,
					gl,

					callback: delegate
					{
						new IHTMLPre { "at callback" }.AttachToDocument();

					},
					obj: null,
					forceMuted: false,
					forcePaused: false
				);

				mEffect.mPasses[0].MakeHeader_Image();
				mEffect.mPasses[0].NewShader_Image(vs);

				var sw = Stopwatch.StartNew();
				do
				{
					mEffect.mPasses[0].Paint_Image(
						sw.ElapsedMilliseconds / 1000.0f,

						mMouseOriX,
						mMouseOriY,
						mMousePosX,
						mMousePosY

					);

					// what does it do?
					gl.flush();

				}
				while (await Native.window.async.onframe);

			}
		);
		}
Exemplo n.º 9
0
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151219/audio
            // http://www.smartjava.org/examples/webaudio/example2.html


            //var audioBuffer;
            //var sourceNode;
            //var splitter;
            //var analyser, analyser2;
            //var javascriptNode;

            // get the context from the canvas to draw on
            //var ctx = $("#canvas").get()[0].getContext("2d");


            new { }.With(
                async delegate
                {

                    var ctx = new CanvasRenderingContext2D { };

                    ctx.canvas.AttachToDocument();


                    // create a gradient for the fill. Note the strange
                    // offset, since the gradient is calculated based on
                    // the canvas, not the specific element we draw


                    var context = new AudioContext();


                    // setup a javascript node
                    var javascriptNode = context.createScriptProcessor(2048, 1, 1);
                    // connect to destination, else it isn't called
                    javascriptNode.connect(context.destination);


                    // setup a analyzer
                    var analyser = context.createAnalyser();
                    analyser.smoothingTimeConstant = 0.3;
                    analyser.fftSize = 1024;

                    var analyser2 = context.createAnalyser();
                    analyser2.smoothingTimeConstant = 0.0;
                    analyser2.fftSize = 1024;

                    // create a buffer source node
                    var sourceNode = context.createBufferSource();
                    var splitter = context.createChannelSplitter();

                    // connect the source to the analyser and the splitter
                    sourceNode.connect(splitter);

                    // connect one of the outputs from the splitter to
                    // the analyser
                    splitter.connect(analyser, 0, 0);
                    splitter.connect(analyser2, 1, 0);

                    // connect the splitter to the javascriptnode
                    // we use the javascript node to draw at a
                    // specific interval.
                    analyser.connect(javascriptNode);

                    //        splitter.connect(context.destination,0,0);
                    //        splitter.connect(context.destination,0,1);

                    // and connect to destination
                    sourceNode.connect(context.destination);
                    // load the specified sound


                    // jsc doesnt like audio assets on root?
                    //{ src_value0 = /RookConfirmCommandStefanWeDontLiveOnAPlanet.mp3 }
                    //13d4:02:01:1c RewriteToAssembly error: System.InvalidOperationException: Referenced asset not found in the project. Did you forget to include it? - /RookConfirmCommandStefanWeDontLiveOnAPlanet.mp3

                    var buffer = await new WebClient().DownloadDataTaskAsync(
                        //new RoosterAudioExample.HTML.Audio.FromAssets.rooster { }.src
                        new AARPMartialLawLoop.HTML.Audio.FromAssets.loop { }.src
                        //new HTML.Audio.FromAssets.RookConfirmCommandStefanWeDontLiveOnAPlanet { }.src
                    );



                    // await ?
                    context.decodeAudioData(new Uint8ClampedArray(buffer).buffer,
                        xbuffer =>
                        {
                            // when the audio is decoded play the sound
                            sourceNode.buffer = xbuffer;



                            var sw = Stopwatch.StartNew();

                            var log = new StringBuilder();



                            var xleft = new List<int> { };
                            // 302 elements for  11 sec is 27fps
                            var xaverage = 0;

                            sourceNode.onended = IFunction.Of(
                                delegate
                                {
                                    // { min = 0, max = 63, Count = 264 }
                                    // { min = 0, max = 60, Count = 264 }

                                    var min = xleft.Min();
                                    var max = xleft.Max();

                                    new IHTMLPre { new { min, max, xleft.Count } }.AttachToDocument();




                                    new IHTMLTextArea { value = log.ToString() }.AttachToDocument();
                                }
                            );

                            Func<Uint8Array, double> getAverageVolume = (array) =>
                            {
                                var values = 0;
                                var average = 0.0;

                                var length = array.buffer.byteLength;

                                // get all the frequency amplitudes
                                for (var i = 0u; i < length; i++)
                                {
                                    values += array[i];
                                }

                                average = values / length;
                                return average;
                            };


                            var aMilliseconds = 0L;
                            var asw = new Stopwatch();

                            // 40ms per frame is trice for 60hz

                            // { xleft = 397, xaverage = 37, aMilliseconds = 40 }
                            new IHTMLPre { () => new { xleft = xleft.Count, xaverage, aMilliseconds } }.AttachToDocument();


                            // when the javascript node is called
                            // we use information from the analyzer node
                            // to draw the volume
                            javascriptNode.onaudioprocess = IFunction.Of(
                                delegate
                                {
                                    aMilliseconds = asw.ElapsedMilliseconds;


                                    // get the average for the first channel
                                    var array = new Uint8Array(new byte[analyser.frequencyBinCount]);
                                    // jsc could have all byte[] shadowed by bytebuffers for implict conversions...
                                    analyser.getByteFrequencyData(array);
                                    xaverage = (int)getAverageVolume(array);

                                    xleft.Add(xaverage);


                                    // get the average for the second channel
                                    var array2 = new Uint8Array(new byte[analyser2.frequencyBinCount]);
                                    analyser2.getByteFrequencyData(array2);
                                    var average2 = (int)getAverageVolume(array2);

                                    // clear the current state
                                    ctx.clearRect(0, 0, 60, 130);

                                    // set the fill style
                                    ctx.fillStyle = "red";


                                    // create the meters
                                    ctx.fillRect(0, 130 - xaverage, 25, 130);
                                    ctx.fillRect(30, 130 - average2, 25, 130);

                                    asw.Restart();

                                    log.AppendLine("new { ms = " + sw.ElapsedMilliseconds + ", x = " + xaverage + "},");

                                }
                            );




                            sourceNode.start(0);
                            //sourceNode.loop = true;

                        }
                     );






                }
            );






        }
Exemplo n.º 10
0
        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151016/azimuthal
        // works. ssl needs to be trusted tho, othwerwise chrome abuses tcp connections..



        // once this actually works. we can then perhaps start reviewing other shaders that also relay on cubemaps?
        // where we get to generate the cubemaps?
        // https://www.shadertoy.com/view/XsBSDR#

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150808/equirectangular
        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150718/shadertoy
        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150706

        // Z:\jsc.svn\examples\javascript\chrome\apps\WebGL\EquirectangularToAzimuthal\EquirectangularToAzimuthal\bin\Debug\staging\EquirectangularToAzimuthal.Application\web
        // subst b: s:\jsc.svn\examples\javascript\chrome\apps\WebGL\EquirectangularToAzimuthal\EquirectangularToAzimuthal\bin\Debug\staging\EquirectangularToAzimuthal.Application\web


        public Application(IApp page)
        {
            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    chrome.runtime.UpdateAvailable += delegate
                    {
                        new chrome.Notification(title: "UpdateAvailable");

                    };

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "ChromeUDPSendAsync");

                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: null
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion

            Native.body.style.backgroundColor = "yellow";

            // view-source:https://www.shadertoy.com/view/Xls3WS
            // https://www.shadertoy.com/api

            // https://www.shadertoy.com/view/Xls3WS
            // https://www.shadertoy.com/js/cmRenderUtils.js
            // https://www.shadertoy.com/js/effect.js

            // what does it take to import those nice shaders into jsc world?

            // x:\jsc.svn\examples\javascript\webgl\webglchocolux\webglchocolux\application.cs
            // it looks there are no channels.
            // is it a vert or frag?
            //  fragColor = vec4( col, 1.0 );
            // must be a frag
            // <body onload="watchInit()" 


            //EquirectangularToAzimuthal.Library.ShaderToy.AttachToDocument(
            //	new Shaders.ProgramFragmentShader()
            //);

            // WebGL: drawArrays: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled.


            new { }.With(
                async delegate
                {
                    Native.body.style.margin = "0px";
                    (Native.body.style as dynamic).webkitUserSelect = "auto";

                    var vs = new Shaders.ProgramFragmentShader();

                    var mAudioContext = new AudioContext();

                    var gl = new WebGLRenderingContext(alpha: true);

                    if (gl == null)
                    {
                        new IHTMLPre { "webgl disabled?" }.AttachToDocument();
                        return;

                    }

                    var c = gl.canvas.AttachToDocument();

                    //  3840x2160

                    //c.style.SetSize(3840, 2160);

                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150722/360-youtube

                    //c.width = 3840;
                    //c.height = 2160;


                    c.width = 512;
                    c.height = 512;

                    // this has the wrong aspect?
                    //c.width = 6466;
                    //c.height = 3232;

                    new IHTMLPre { new { c.width, c.height } }.AttachToDocument();

                    //6466x3232

                    var uizoom = 400f / c.width;

                    c.style.transformOrigin = "0 0";
                    c.style.transform = $"scale({uizoom})";

                    c.style.position = IStyle.PositionEnum.absolute;

                    c.style.SetLocation(500, 8, c.width, c.height);

                    var u = new UIKeepRendering
                    {
                        //animate = true

                        // is chrome portscanning the server??
                        animate = false
                    }.AttachToDocument();

                    //new IHTMLPre { "init..." }.AttachToDocument();

                    // function ShaderToy( parentElement, editorParent, passParent )
                    // function buildInputsUI( me )

                    //  this.mGLContext = createGlContext( this.mCanvas, false, true );
                    //  {alpha: useAlpha, depth: false, antialias: false, stencil: true, premultipliedAlpha: false, preserveDrawingBuffer: usePreserveBuffer } 

                    var mMouseOriX = 0;
                    var mMouseOriY = 0;
                    var mMousePosX = 0;
                    var mMousePosY = 0;

                    // 308
                    //var mEffect = new Library.ShaderToy.Effect(
                    //	mAudioContext,
                    //	gl,

                    //	callback: delegate
                    //	{
                    //		new IHTMLPre { "at callback" }.AttachToDocument();

                    //	},
                    //	obj: null,
                    //	forceMuted: false,
                    //	forcePaused: false
                    //);


                    ////mEffect.mPasses[0].NewTexture
                    //// EffectPass.prototype.NewTexture = function( wa, gl, slot, url )
                    //// this.mPasses[j].Create( rpass.type, this.mAudioContext, this.mGLContext );
                    //// EffectPass.prototype.MakeHeader_Image = function( precission, supportDerivatives )
                    //mEffect.mPasses[0].MakeHeader_Image();

                    //// EffectPass.prototype.NewShader = function( gl, shaderCode )
                    //// EffectPass.prototype.NewShader_Image = function( gl, shaderCode )
                    //mEffect.mPasses[0].NewShader_Image(vs);

                    //// ShaderToy.prototype.resetTime = function()
                    // Effect.prototype.ResetTime = function()

                    // ShaderToy.prototype.startRendering = function()
                    // Effect.prototype.Paint = function(time, mouseOriX, mouseOriY, mousePosX, mousePosY, isPaused)
                    // EffectPass.prototype.Paint = function( wa, gl, time, mouseOriX, mouseOriY, mousePosX, mousePosY, xres, yres, isPaused )
                    // EffectPass.prototype.Paint_Image = function( wa, gl, time, mouseOriX, mouseOriY, mousePosX, mousePosY, xres, yres )

                    var pass = new Library.ShaderToy.EffectPass(
                        mAudioContext,
                        gl,
                        precission: Library.ShaderToy.DetermineShaderPrecission(gl),
                        supportDerivatives: gl.getExtension("OES_standard_derivatives") != null,
                        callback: null,
                        obj: null,
                        forceMuted: false,
                        forcePaused: false,
                        //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                        outputGainNode: null
                    );

                    // how shall we upload our textures?
                    // can we reference GLSL.samplerCube yet?
                    //pass.mInputs[0] = new samplerCube { };
                    //pass.mInputs[0] = new Library.ShaderToy.samplerCube { };

                    var xsampler2D = new Library.ShaderToy.sampler2D { };

                    pass.mInputs[0] = xsampler2D;

                    pass.MakeHeader_Image();
                    pass.NewShader_Image(vs);

                    //var all = new Textures2 { }.Images;
                    var all = new[] {
                      new EquirectangularToAzimuthal.HTML.Images.FromAssets._20151001T0000 { }


                    };

                    new { }.With(
                        async delegate
                        {

                            var i = 0;

                            while (true)
                            {
                                xsampler2D.upload(
                                    all[i % all.Length]
                               //new HTML.Images.FromAssets._20151016T0000 { }
                               );

                                i++;

                                await Task.Delay(1000);
                            }
                        }
                    );



                    var sw = Stopwatch.StartNew();

                    var paintsw = Stopwatch.StartNew();


                    new IHTMLPre { () => new { paintsw.ElapsedMilliseconds } }.AttachToDocument();

                    do
                    {
                        await u.animate.async.@checked;

                        paintsw.Restart();

                        
                        pass.Paint_Image(
                            sw.ElapsedMilliseconds / 1000.0f,

                            mMouseOriX,
                            mMouseOriY,
                            mMousePosX,
                            mMousePosY
                        //,

                        // gl_FragCoord
                        // cannot be scaled, and can be referenced directly.
                        // need another way to scale
                        //zoom: 0.3f
                        );

                        paintsw.Stop();


                        // what does it do?
                        // need nonpartial code.
                        gl.flush();

                    }
                    while (await Native.window.async.onframe);

                }
            );
        }