// X:\jsc.svn\examples\glsl\future\GLSLShaderToyPip\GLSLShaderToyPip\Application.cs public EffectPass( AudioContext wa = null, gl gl = null, string precission = null, bool supportDerivatives = false, RefreshTexturThumbailDelegate callback = null, object obj = null, bool forceMuted = false, bool forcePaused = false, // createQuadVBO // ARRAY_BUFFER WebGLBuffer quadVBO = null, GainNode outputGainNode = null ) { //new IHTMLPre { "enter EffectPass" }.AttachToDocument(); // used by? var mFrame = 0; // http://math.hws.edu/eck/cs424/notes2013/19_GLSL.html // http://stackoverflow.com/questions/10079368/how-would-i-do-environment-reflection-in-webgl-without-using-a-library-like-thre this.MakeHeader_Image = delegate { #region MakeHeader_Image //new IHTMLPre { "enter MakeHeader_Image" }.AttachToDocument(); var header = precission; var headerlength = 3; if (supportDerivatives) { header += "#extension GL_OES_standard_derivatives : enable\n"; headerlength++; } header += "uniform vec3 iResolution;\n" + "uniform float iGlobalTime;\n" + "uniform float iChannelTime[4];\n" + "uniform vec4 iMouse;\n" + "uniform vec4 iDate;\n" + "uniform float iSampleRate;\n" + "uniform vec3 iChannelResolution[4];\n"; // not to be used by the hosted shader, but by our code in the middle on the gpu. // gpu code injection. first take. //"uniform float fZoom;\n"; headerlength += 7; for (var i = 0; i < mInputs.Length; i++) { var inp = mInputs[i]; // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeShaderToyVRCardboardGrid\ChromeShaderToyVRCardboardGrid\Application.cs //if (inp != null && inp.mInfo.mType == "cubemap") if (inp is samplerCube) { //new IHTMLPre { "add MakeHeader_Image samplerCube" }.AttachToDocument(); this.tex = new WebGLTexture(gl); // http://stackoverflow.com/questions/10079368/how-would-i-do-environment-reflection-in-webgl-without-using-a-library-like-thre // view-source:https://www.shadertoy.com/js/effect.js // um can there be only one samplerCube? gl.activeTexture(gl.TEXTURE0); //gl.enable(gl.TEXTURE_CUBE_MAP); gl.bindTexture(gl.TEXTURE_CUBE_MAP, tex); //gl.texParameteri(gl.TEXTURE_CUBE_MAP, gl.TEXTURE_WRAP_S, (int)gl.CLAMP_TO_EDGE); //gl.texParameteri(gl.TEXTURE_CUBE_MAP, gl.TEXTURE_WRAP_T, (int)gl.CLAMP_TO_EDGE); gl.texParameteri(gl.TEXTURE_CUBE_MAP, gl.TEXTURE_MIN_FILTER, (int)gl.LINEAR); gl.texParameteri(gl.TEXTURE_CUBE_MAP, gl.TEXTURE_MAG_FILTER, (int)gl.LINEAR); //var cube0 = new IHTMLImage[] { // new HTML.Images.FromAssets.cube02_0(), // new HTML.Images.FromAssets.cube02_1(), // new HTML.Images.FromAssets.cube02_2(), // new HTML.Images.FromAssets.cube02_3(), // new HTML.Images.FromAssets.cube02_4(), // new HTML.Images.FromAssets.cube02_5() //}; //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069; //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070; //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071; //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072; //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073; //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074; //var cube0 = new IHTMLImage[] { // new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(), // new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(), // new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(), // new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(), // new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(), // new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz() //}; //cube0.WithEachIndex( // (pendingimg, index) => // { // pendingimg.InvokeOnComplete( // img => // { // gl.bindTexture(gl.TEXTURE_CUBE_MAP, tex); // //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false); // gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0); // gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img); // new IHTMLPre { "add MakeHeader_Image samplerCube TEXTURE_CUBE_MAP_POSITIVE_X + " + new { index } }.AttachToDocument(); // if (index == 5) // { // new IHTMLPre { "add MakeHeader_Image samplerCube activeTexture " }.AttachToDocument(); // // samplerCube iChannel0; = 0 = TEXTURE0 // } // // https://code.google.com/p/opengles-book-samples/source/browse/trunk/WebGL/Chapter_9/Simple_TextureCubemap/Simple_TextureCubemap.html?r=5 // // http://stackoverflow.com/questions/31665132/gl-invalid-operation-caused-by-samplercube // // http://forum.processing.org/two/discussion/7039/samplercube-and-ambient-reflection // } // ); // } //); header += "uniform samplerCube iChannel" + i + ";\n"; } else { //new IHTMLPre { "add MakeHeader_Image sampler2D" }.AttachToDocument(); header += "uniform sampler2D iChannel" + i + ";\n"; } // dont need it? headerlength++; } // rror CreateShader {{ infoLog = ERROR: 0:250: 'assign' : l-value required "gl_FragCoord" (can't modify gl_FragCoord) // ERROR: 0:251: 'assign' : l - value required "gl_FragCoord"(can't modify gl_FragCoord) //}} // error CreateShader {{ infoLog = ERROR: 0:253: '=' : cannot convert from 'FragCoord mediump 4-component vector of float' to 'highp 2-component vector of float' var mImagePassFooter = @" void main( void ) { vec4 color = gl_FragColor; color.a = 1.0; mainImage( color, gl_FragCoord.xy ); gl_FragColor = color; } "; #endregion this.NewShader_Image = (fs) => { #region NewShader_Image //new IHTMLPre { "enter NewShader_Image" }.AttachToDocument(); var shaderCode = fs.ToString(); // is that it? var vsSource = "attribute vec2 pos; void main() { gl_Position = vec4(pos.xy,0.0,1.0); }"; var fsSource = header + shaderCode + mImagePassFooter; this.xCreateShader = CreateShader(gl, vsSource, fsSource, false); #endregion var vbo = new WebGLBuffer(gl); #region calledby //EffectPass.Paint_Image(effect.js:724) //EffectPass.Paint(effect.js:1038) //Effect.Paint(effect.js:1247) //renderLoop2(pgWatch.js:404) //ShaderToy.startRendering(pgWatch.js:420) //watchInit(pgWatch.js:1386) //onload(Xls3WS: 78) #endregion this.Paint_Image = (time, mouseOriX, mouseOriY, mousePosX, mousePosY, zoom) => { var mProgram = xCreateShader.mProgram; var xres = gl.canvas.width * zoom; var yres = gl.canvas.height * zoom; #region Paint_Image //new IHTMLPre { "enter Paint_Image" }.AttachToDocument(); // this is enough to do pip to bottom left, no need to adjust vertex positions even? gl.viewport(0, 0, (int)xres, (int)yres); // useProgram: program not valid gl.useProgram(mProgram); // uniform4fv var uniform4fv_mouse = new[] { mousePosX, mousePosY, mouseOriX, mouseOriY }; // X:\jsc.svn\examples\glsl\future\GLSLShaderToyPip\GLSLShaderToyPip\Application.cs //gl.getUniformLocation(mProgram, "fZoom").With(fZoom => gl.uniform1f(fZoom, zoom)); var l2 = gl.getUniformLocation(mProgram, "iGlobalTime"); if (l2 != null) gl.uniform1f(l2, time); var l3 = gl.getUniformLocation(mProgram, "iResolution"); if (l3 != null) gl.uniform3f(l3, xres, yres, 1.0f); var l4 = gl.getUniformLocation(mProgram, "iMouse"); if (l4 != null) gl.uniform4fv(l4, uniform4fv_mouse); // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\synergy\SimpleDigitalClock\SimpleDigitalClock\Application.cs // uniform vec4 iDate; // (year, month, day, time in seconds) // http://bytes.com/topic/c-sharp/answers/258829-time-elapsed-since-midnight-c var now = DateTime.Now; //var sinceMidnight = now - DateTime.Today; var sinceMidnight = now - now.Date; var uniform4fv_dates = new float[] { now.Year, now.Month, now.Day, (float)sinceMidnight.TotalSeconds }; var l7 = gl.getUniformLocation(mProgram, "iDate"); if (l7 != null) gl.uniform4fv(l7, uniform4fv_dates); //var l9 = gl.getUniformLocation(this.mProgram, "iSampleRate"); if (l9 != null) gl.uniform1f(l9, this.mSampleRate); //var ich0 = gl.getUniformLocation(mProgram, "iChannel0"); if (ich0 != null) gl.uniform1i(ich0, 0); //var ich1 = gl.getUniformLocation(mProgram, "iChannel1"); if (ich1 != null) gl.uniform1i(ich1, 1); //var ich2 = gl.getUniformLocation(mProgram, "iChannel2"); if (ich2 != null) gl.uniform1i(ich2, 2); //var ich3 = gl.getUniformLocation(mProgram, "iChannel3"); if (ich3 != null) gl.uniform1i(ich3, 3); // what if there are other textures too? // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeWebGLFrameBuffer\ChromeWebGLFrameBuffer\Application.cs //for (var i = 0; i < mInputs.Length; i++) //{ // var inp = mInputs[i]; // gl.activeTexture((uint)(gl.TEXTURE0 + i)); // if (inp == null) // { // gl.bindTexture(gl.TEXTURE_2D, null); // } //} var times = new[] { 0.0f, 0.0f, 0.0f, 0.0f }; var l5 = gl.getUniformLocation(mProgram, "iChannelTime"); if (l5 != null) gl.uniform1fv(l5, times); var resos = new float[12] { 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f }; var l8 = gl.getUniformLocation(mProgram, "iChannelResolution"); if (l8 != null) gl.uniform3fv(l8, resos); // using ? var vec2pos = (uint)gl.getAttribLocation(mProgram, "pos"); //gl.bindBuffer(gl.ARRAY_BUFFER, quadVBO); gl.bindBuffer(gl.ARRAY_BUFFER, vbo); #region vertices float left = -1.0f; // y reversed? float bottom = -1.0f; float right = 1.0f; float top = 1.0f; var fvertices = new float[] { // left top left, bottom, // right top //right, -1.0f, right, bottom, // left bottom left, top, // right top //right, -1.0f, right, bottom, // right bottom //right, 1.0f, right, top, // left bottom left,top }; #endregion gl.bufferData(gl.ARRAY_BUFFER, fvertices, gl.STATIC_DRAW); gl.vertexAttribPointer(vec2pos, 2, gl.FLOAT, false, 0, 0); gl.enableVertexAttribArray(vec2pos); //var iChannel0 = gl.getUniformLocation(mProgram, "iChannel0"); // GL ERROR :GL_INVALID_OPERATION : glDrawArrays: attempt to render with no buffer attached to enabled attribute 1 gl.drawArrays(gl.TRIANGLES, 0, 6); // first frame is now visible gl.disableVertexAttribArray(vec2pos); gl.bindBuffer(gl.ARRAY_BUFFER, null); #endregion mFrame++; }; }; }; }
// X:\jsc.svn\examples\glsl\future\GLSLShaderToyPip\GLSLShaderToyPip\Application.cs public EffectPass( AudioContext wa = null, gl gl = null, string precission = null, bool supportDerivatives = false, RefreshTexturThumbailDelegate callback = null, object obj = null, bool forceMuted = false, bool forcePaused = false, // createQuadVBO // ARRAY_BUFFER WebGLBuffer quadVBO = null, GainNode outputGainNode = null ) { //new IHTMLPre { "enter EffectPass" }.AttachToDocument(); // used by? var mFrame = 0; this.MakeHeader_Image = delegate { #region MakeHeader_Image //new IHTMLPre { "enter MakeHeader_Image" }.AttachToDocument(); var header = precission; var headerlength = 3; if (supportDerivatives) { header += "#extension GL_OES_standard_derivatives : enable\n"; headerlength++; } header += "uniform vec3 iResolution;\n" + "uniform float iGlobalTime;\n" + "uniform float iChannelTime[4];\n" + "uniform vec4 iMouse;\n" + "uniform vec4 iDate;\n" + "uniform float iSampleRate;\n" + "uniform vec3 iChannelResolution[4];\n"; // not to be used by the hosted shader, but by our code in the middle on the gpu. // gpu code injection. first take. //"uniform float fZoom;\n"; headerlength += 7; for (var i = 0; i < mInputs.Length; i++) { var inp = mInputs[i]; // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeShaderToyVRCardboardGrid\ChromeShaderToyVRCardboardGrid\Application.cs //if (inp != null && inp.mInfo.mType == "cubemap") if (inp is samplerCube) { new IHTMLPre { "add MakeHeader_Image samplerCube" }.AttachToDocument(); header += "uniform samplerCube iChannel" + i + ";\n"; } else { //new IHTMLPre { "add MakeHeader_Image sampler2D" }.AttachToDocument(); header += "uniform sampler2D iChannel" + i + ";\n"; } headerlength++; } // rror CreateShader {{ infoLog = ERROR: 0:250: 'assign' : l-value required "gl_FragCoord" (can't modify gl_FragCoord) // ERROR: 0:251: 'assign' : l - value required "gl_FragCoord"(can't modify gl_FragCoord) //}} // error CreateShader {{ infoLog = ERROR: 0:253: '=' : cannot convert from 'FragCoord mediump 4-component vector of float' to 'highp 2-component vector of float' var mImagePassFooter = @" void main( void ) { vec4 color = gl_FragColor; color.a = 1.0; mainImage( color, gl_FragCoord.xy ); gl_FragColor = color; } "; #endregion this.NewShader_Image = (fs) => { #region NewShader_Image //new IHTMLPre { "enter NewShader_Image" }.AttachToDocument(); var shaderCode = fs.ToString(); // is that it? var vsSource = "attribute vec2 pos; void main() { gl_Position = vec4(pos.xy,0.0,1.0); }"; var fsSource = header + shaderCode + mImagePassFooter; this.xCreateShader = CreateShader(gl, vsSource, fsSource, false); #endregion var vbo = new WebGLBuffer(gl); #region calledby //EffectPass.Paint_Image(effect.js:724) //EffectPass.Paint(effect.js:1038) //Effect.Paint(effect.js:1247) //renderLoop2(pgWatch.js:404) //ShaderToy.startRendering(pgWatch.js:420) //watchInit(pgWatch.js:1386) //onload(Xls3WS: 78) #endregion this.Paint_Image = (time, mouseOriX, mouseOriY, mousePosX, mousePosY, zoom) => { var mProgram = xCreateShader.mProgram; var xres = gl.canvas.width * zoom; var yres = gl.canvas.height * zoom; #region Paint_Image //new IHTMLPre { "enter Paint_Image" }.AttachToDocument(); // this is enough to do pip to bottom left, no need to adjust vertex positions even? gl.viewport(0, 0, (int)xres, (int)yres); // useProgram: program not valid gl.useProgram(mProgram); if (this.ProgramSelected != null) this.ProgramSelected(mProgram); // uniform4fv var uniform4fv_mouse = new[] { mousePosX, mousePosY, mouseOriX, mouseOriY }; // X:\jsc.svn\examples\glsl\future\GLSLShaderToyPip\GLSLShaderToyPip\Application.cs //gl.getUniformLocation(mProgram, "fZoom").With(fZoom => gl.uniform1f(fZoom, zoom)); var l2 = gl.getUniformLocation(mProgram, "iGlobalTime"); if (l2 != null) gl.uniform1f(l2, time); var l3 = gl.getUniformLocation(mProgram, "iResolution"); if (l3 != null) gl.uniform3f(l3, xres, yres, 1.0f); var l4 = gl.getUniformLocation(mProgram, "iMouse"); if (l4 != null) gl.uniform4fv(l4, uniform4fv_mouse); // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\synergy\SimpleDigitalClock\SimpleDigitalClock\Application.cs // uniform vec4 iDate; // (year, month, day, time in seconds) // http://bytes.com/topic/c-sharp/answers/258829-time-elapsed-since-midnight-c var now = DateTime.Now; //var sinceMidnight = now - DateTime.Today; var sinceMidnight = now - now.Date; var uniform4fv_dates = new float[] { now.Year, now.Month, now.Day, (float)sinceMidnight.TotalSeconds }; var l7 = gl.getUniformLocation(mProgram, "iDate"); if (l7 != null) gl.uniform4fv(l7, uniform4fv_dates); //var l9 = gl.getUniformLocation(this.mProgram, "iSampleRate"); if (l9 != null) gl.uniform1f(l9, this.mSampleRate); var ich0 = gl.getUniformLocation(mProgram, "iChannel0"); if (ich0 != null) gl.uniform1i(ich0, 0); var ich1 = gl.getUniformLocation(mProgram, "iChannel1"); if (ich1 != null) gl.uniform1i(ich1, 1); var ich2 = gl.getUniformLocation(mProgram, "iChannel2"); if (ich2 != null) gl.uniform1i(ich2, 2); var ich3 = gl.getUniformLocation(mProgram, "iChannel3"); if (ich3 != null) gl.uniform1i(ich3, 3); // what if there are other textures too? // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeWebGLFrameBuffer\ChromeWebGLFrameBuffer\Application.cs //for (var i = 0; i < mInputs.Length; i++) //{ // var inp = mInputs[i]; // gl.activeTexture((uint)(gl.TEXTURE0 + i)); // if (inp == null) // { // gl.bindTexture(gl.TEXTURE_2D, null); // } //} var times = new[] { 0.0f, 0.0f, 0.0f, 0.0f }; var l5 = gl.getUniformLocation(mProgram, "iChannelTime"); if (l5 != null) gl.uniform1fv(l5, times); var resos = new float[12] { 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f }; var l8 = gl.getUniformLocation(mProgram, "iChannelResolution"); if (l8 != null) gl.uniform3fv(l8, resos); // using ? var vec2pos = (uint)gl.getAttribLocation(mProgram, "pos"); //gl.bindBuffer(gl.ARRAY_BUFFER, quadVBO); gl.bindBuffer(gl.ARRAY_BUFFER, vbo); #region vertices float left = -1.0f; // y reversed? float bottom = -1.0f; float right = 1.0f; float top = 1.0f; var fvertices = new float[] { // left top left, bottom, // right top //right, -1.0f, right, bottom, // left bottom left, top, // right top //right, -1.0f, right, bottom, // right bottom //right, 1.0f, right, top, // left bottom left,top }; //var vertices = new Float32Array(fvertices); #endregion //gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW); gl.bufferData(gl.ARRAY_BUFFER, fvertices, gl.STATIC_DRAW); gl.vertexAttribPointer(vec2pos, 2, gl.FLOAT, false, 0, 0); gl.enableVertexAttribArray(vec2pos); // GL ERROR :GL_INVALID_OPERATION : glDrawArrays: attempt to render with no buffer attached to enabled attribute 1 gl.drawArrays(gl.TRIANGLES, 0, 6); // first frame is now visible gl.disableVertexAttribArray(vec2pos); gl.bindBuffer(gl.ARRAY_BUFFER, null); #endregion mFrame++; }; }; }; }
// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150706 // subst b: r:\jsc.svn\examples\glsl\future\GLSLShaderToyPip\GLSLShaderToyPip\bin\Debug\staging\GLSLShaderToyPip.Application\web /// <summary> /// This is a javascript application. /// </summary> /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param> public Application(IApp page) { // the idea of this exammple // is to look at how multiple shaders can be linked to work together. // we need two shaders // first we could run them as separate programs in pip mode // selected by the host/javascript // then repeat the same experiment, but have the shader do the pip in a single program // later shader code could be nugeted // lets have a copy of // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeShaderToyQuadraticBezierByMattdesl\ChromeShaderToyQuadraticBezierByMattdesl\Shaders\Program.frag // locally should we need to modify it.. // can we change colors? // https://www.shadertoy.com/view/lsSGRz #region += Launched chrome.app.window dynamic self = Native.self; dynamic self_chrome = self.chrome; object self_chrome_socket = self_chrome.socket; if (self_chrome_socket != null) { if (!(Native.window.opener == null && Native.window.parent == Native.window.self)) { Console.WriteLine("chrome.app.window.create, is that you?"); // pass thru } else { // should jsc send a copresence udp message? chrome.runtime.UpdateAvailable += delegate { new chrome.Notification(title: "UpdateAvailable"); }; chrome.app.runtime.Launched += async delegate { // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }} Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href }); new chrome.Notification(title: "ChromeUDPSendAsync"); var xappwindow = await chrome.app.window.create( Native.document.location.pathname, options : null ); //xappwindow.setAlwaysOnTop xappwindow.show(); await xappwindow.contentWindow.async.onload; Console.WriteLine("chrome.app.window loaded!"); }; return; } } #endregion new { }.With( async delegate //02000047 <module>.SHA1111132814b0387cee18e0fe5efe63eb881cfd505@901285072 //02000048 GLSLShaderToyPip.Application+<AttachToDocument>d__1+<MoveNext>06000020 //script: error JSC1000: //error: // statement cannot be a load instruction(or is it a bug?) // [0x0000] // ldarg.0 +1 -0 //public static async void AttachToDocument() //public async void AttachToDocument() { Native.body.style.margin = "0px"; (Native.body.style as dynamic).webkitUserSelect = "auto"; var gl = new WebGLRenderingContext(alpha: true); #region GPU process was unable to boot if (gl == null) { new IHTMLPre { // https://code.google.com/p/chromium/issues/detail?id=294207 "Rats! WebGL hit a snag. \n WebGL: Unavailable.\n GPU process was unable to boot. \n restart chrome.", // chrome sends us to about:blank? //new IHTMLAnchor { // target = "_blank", // href = "about:gpu", innerText = "about:gpu", // // http://tirania.org/blog/archive/2009/Jul-27-1.html // //onclick += de //} //.With(a => { a.onclick += e => { e.preventDefault(); Native.window.open("about:gpu"); }; } ) }.AttachToDocument(); return; } #endregion var c = gl.canvas.AttachToDocument(); #region oncontextlost gl.oncontextlost += e => { //[12144:10496:0311 / 120850:ERROR: gpu_watchdog_thread.cc(314)] : The GPU process hung. Terminating after 10000 ms. // GpuProcessHostUIShim: The GPU process crashed! gl.canvas.Orphanize(); new IHTMLPre { // https://code.google.com/p/chromium/issues/detail?id=294207 @"Rats! WebGL hit a snag. oncontextlost. The GPU process hung. Terminating. check chrome://gpu for log messages. do we have a stack trace? " + new { e.statusMessage }, // chrome sends us to about:blank? //new IHTMLAnchor { // target = "_blank", // href = "about:gpu", innerText = "about:gpu", // // http://tirania.org/blog/archive/2009/Jul-27-1.html // //onclick += de //} //.With(a => { a.onclick += e => { e.preventDefault(); Native.window.open("about:gpu"); }; } ) }.AttachToDocument(); }; #endregion #region onresize new { }.With( async delegate { do { c.width = Native.window.Width; c.height = Math.Min(300, Native.window.Height); c.style.SetSize(c.width, c.height); }while (await Native.window.async.onresize); } ); #endregion #region CaptureMouse var mMouseOriX = 0; var mMouseOriY = 0; var mMousePosX = c.width / 2; var mMousePosY = 0; c.onmousedown += ev => { mMouseOriX = ev.CursorX; mMouseOriY = ev.CursorY; mMousePosX = mMouseOriX; mMousePosY = mMouseOriY; ev.CaptureMouse(); }; c.onmousemove += ev => { if (ev.MouseButton == IEvent.MouseButtonEnum.Left) { mMousePosX = ev.CursorX; mMousePosY = c.height - ev.CursorY; } }; c.onmouseup += ev => { mMouseOriX = -Math.Abs(mMouseOriX); mMouseOriY = -Math.Abs(mMouseOriY); }; #endregion var quadVBO = ShaderToy.createQuadVBO(gl); var pass1 = new ShaderToy.EffectPass( gl: gl, precission: ShaderToy.DetermineShaderPrecission(gl), supportDerivatives: gl.getExtension("OES_standard_derivatives") != null, quadVBO: quadVBO ); pass1.MakeHeader_Image(); var frag1 = new GLSLShaderToyPip.Shaders.TheColorGradientFragmentShader(); pass1.NewShader_Image(frag1); var pass0 = new ShaderToy.EffectPass( gl: gl, precission: ShaderToy.DetermineShaderPrecission(gl), supportDerivatives: gl.getExtension("OES_standard_derivatives") != null, quadVBO: quadVBO ); pass0.MakeHeader_Image(); var frag0 = new GLSLShaderToyPip.Shaders.ChromeShaderToyQuadraticBezierByMattdeslFragmentShader(); //var frag = new GLSLShaderToyPip.Shaders.TheColorGradientFragmentShader(); pass0.NewShader_Image(frag0); if (pass0.xCreateShader.mProgram == null) { gl.Orphanize(); return; } new { }.With( async delegate { do { Native.document.body.style.backgroundColor = "cyan"; await Task.Delay(500); Native.document.body.style.backgroundColor = "yellow"; await Task.Delay(500); } while (await Native.window.async.onframe); } ); // https://developer.mozilla.org/en/docs/Web/API/WebGLRenderingContext #region Paint_Image Paint_ImageDelegate Paint_Image = (mProgram, time, mouseOriX, mouseOriY, mousePosX, mousePosY) => { var viewportxres = gl.canvas.width; var viewportyres = gl.canvas.height; #region Paint_Image //new IHTMLPre { "enter Paint_Image" }.AttachToDocument(); // http://www.html5rocks.com/en/tutorials/webgl/webgl_fundamentals/ //gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA); gl.viewport(0, 0, viewportxres, viewportyres); // alpha to zero will only hide the pixel if blending is enabled. gl.useProgram(mProgram); // uniform4fv var mouse = new[] { mousePosX, mousePosY, mouseOriX, mouseOriY }; var l2 = gl.getUniformLocation(mProgram, "iGlobalTime"); if (l2 != null) { gl.uniform1f(l2, time); } var l3 = gl.getUniformLocation(mProgram, "iResolution"); if (l3 != null) { gl.uniform3f(l3, viewportxres, viewportyres, 1.0f); } var l4 = gl.getUniformLocation(mProgram, "iMouse"); if (l4 != null) { gl.uniform4fv(l4, mouse); } //var l7 = gl.getUniformLocation(this.mProgram, "iDate"); if (l7 != null) gl.uniform4fv(l7, dates); //var l9 = gl.getUniformLocation(this.mProgram, "iSampleRate"); if (l9 != null) gl.uniform1f(l9, this.mSampleRate); var ich0 = gl.getUniformLocation(mProgram, "iChannel0"); if (ich0 != null) { gl.uniform1i(ich0, 0); } var ich1 = gl.getUniformLocation(mProgram, "iChannel1"); if (ich1 != null) { gl.uniform1i(ich1, 1); } var ich2 = gl.getUniformLocation(mProgram, "iChannel2"); if (ich2 != null) { gl.uniform1i(ich2, 2); } var ich3 = gl.getUniformLocation(mProgram, "iChannel3"); if (ich3 != null) { gl.uniform1i(ich3, 3); } //for (var i = 0; i < mInputs.Length; i++) //{ // var inp = mInputs[i]; // gl.activeTexture((uint)(gl.TEXTURE0 + i)); // if (inp == null) // { // gl.bindTexture(gl.TEXTURE_2D, null); // } //} var times = new[] { 0.0f, 0.0f, 0.0f, 0.0f }; var l5 = gl.getUniformLocation(mProgram, "iChannelTime"); if (l5 != null) { gl.uniform1fv(l5, times); } var resos = new float[12] { 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f }; var l8 = gl.getUniformLocation(mProgram, "iChannelResolution"); if (l8 != null) { gl.uniform3fv(l8, resos); } // using ? var l1 = (uint)gl.getAttribLocation(mProgram, "pos"); gl.bindBuffer(gl.ARRAY_BUFFER, quadVBO); gl.vertexAttribPointer(l1, 2, gl.FLOAT, false, 0, 0); gl.enableVertexAttribArray(l1); gl.drawArrays(gl.TRIANGLES, 0, 6); // first frame is now visible gl.disableVertexAttribArray(l1); #endregion //mFrame++; }; #endregion var sw = Stopwatch.StartNew(); do { pass1.Paint_Image( sw.ElapsedMilliseconds / 1000.0f, mMouseOriX, mMouseOriY, mMousePosX, mMousePosY, zoom: 1.0f ); pass0.Paint_Image( sw.ElapsedMilliseconds / 1000.0f, mMouseOriX, mMouseOriY, mMousePosX, mMousePosY, //zoom: 0.5f zoom: mMousePosX / (float)c.width ); // what does it do? gl.flush(); }while (await Native.window.async.onframe); } ); }