/// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // i think jsc should tell, to swap the hdd out and refresh the current installation

            // X:\jsc.svn\examples\javascript\canvas\CanvasFromBytes\CanvasFromBytes\Application.cs
            // X:\jsc.svn\examples\javascript\synergy\webgl\WebGLEarthByBjorn\WebGLEarthByBjorn\Application.cs
            // x:\jsc.svn\examples\javascript\async\colordisco\colordisco\application.cs


            Native.body.style.backgroundColor = "red";

            //new IHTMLCanvas(
            //new HTML.Images.FromAssets.galaxy_starfield().bytes

            // how can we create a new canvas and write old pixels?
            new { }.With(
                async scope =>
                {
                    var i = new HTML.Images.FromAssets.galaxy_starfield();

                    var bytes = await i.async.bytes;

                    for (int ii = 0; ii < bytes.Length; ii += 4)
                    {
                  
                        bytes[ii + 3] = (byte)(bytes[ii + 0]);

                        bytes[ii + 0] = 0xff;
                        bytes[ii + 1] = 0xff;
                        bytes[ii + 2] = 0xff;
                    }


                    new IHTMLPre { bytes[0].ToString("x2") }.AttachToDocument();
                    new IHTMLPre { bytes[1].ToString("x2") }.AttachToDocument();
                    new IHTMLPre { bytes[2].ToString("x2") }.AttachToDocument();

                    // alpha.
                    new IHTMLPre { bytes[3].ToString("x2") }.AttachToDocument();

                    var c = new CanvasRenderingContext2D(i.width, i.height);

                    c.bytes = bytes;

                    c.canvas.AttachToDocument();


                }
            );


        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // X:\jsc.svn\examples\javascript\WebGL\WebGLSVGAnonymous\WebGLSVGAnonymous\Application.cs
            // X:\jsc.svn\examples\javascript\WebGL\WebGLVRCreativeLeadership\WebGLVRCreativeLeadership\Application.cs
            // X:\jsc.svn\examples\javascript\WebGL\WebGLSVGSprite\WebGLSVGSprite\Application.cs

            var l = new NotificationLayout().layout;

            l.AttachToDocument();


            new { }.With(
                async delegate
                {
                    var retry = new IHTMLButton { "retry" }.AttachToDocument();

                    do
                    {
                        new IHTMLHorizontalRule().AttachToDocument();

                        Task<ISVGSVGElement> n = l;

                        var svg = await n;

                        svg.AttachToDocument();

                        IHTMLImage i = svg;
                        i.AttachToDocument();

                        var c = new CanvasRenderingContext2D(svg.clientWidth, svg.clientHeight);

                        // http://www.w3schools.com/tags/canvas_fillstyle.asp
                        c.fillStyle = "red";

                        c.fillRect(0, 0, svg.clientWidth, svg.clientHeight);

                        // rather than to overload drawimage lets do an operator which is more exotic for the api
                        // at some point the canvas may implement the html drawing obseleting this workaround
                        c.drawImage(i, 0, 0, svg.clientWidth, svg.clientHeight);

                        c.canvas.AttachToDocument();
                    }
                    while (await retry.async.onclick);

                }
            );
        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // what about gif, svg, canvas and webgl?
            Native.document.icon = new fullbox();

            new IHTMLButton { "?" }.AttachToDocument().WhenClicked(
                 button =>
                 {
                     var div = new IHTMLDiv { "?" };

                     // 7x20
                     div.style.color = "red";
                     div.style.width = "16px";
                     div.style.height = "16px";

                     IHTMLImage i = div;

                     var c = new CanvasRenderingContext2D(16, 16);

                     c.drawImage(i, 0, 0, 16, 16);


                     Native.css.style.cursorImage = i;

                     // Uncaught SecurityError: Failed to execute 'toDataURL' on 'HTMLCanvasElement': Tainted canvases may not be exported.
                     // why wont this work?
                     Native.document.icon = c.canvas.toDataURL();


                 }
            );
        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            var l = new NotificationLayout().layout;

            l.AttachToDocument();

            var div0 = new IHTMLDiv().AttachToDocument();

            new { }.With(
                async delegate
                {

                    do
                    {
                        div0.Clear();

                        new IHTMLHorizontalRule().AttachToDocument();

                        Task<ISVGSVGElement> n = l;

                        var svg = await n;

                        IHTMLImage i = svg;

                        var c = new CanvasRenderingContext2D(l.clientWidth, l.clientHeight);
                        c.drawImage(i, 0, 0, l.clientWidth, l.clientHeight);
                        c.canvas.AttachTo(div0);
                    }
                    while (await l.async.onmutation);

                }
            );
        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            //FormStyler.AtFormCreated =
            //s =>
            //{
            //    s.Context.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;

            //    //var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDrag().AttachTo(s.Context.GetHTMLTarget());
            //    var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDragWithShadow().AttachTo(s.Context.GetHTMLTarget());



            //    s.Context.GetHTMLTarget().style.backgroundColor = "#efefef";
            //    //s.Context.GetHTMLTarget().style.backgroundColor = "#A26D41";

            //};

#if AsWEBSERVER
            #region += Launched chrome.app.window
            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                // if we run as a server. we can open up on android.

                //chrome.Notification.DefaultTitle = "Nexus7";
                //chrome.Notification.DefaultIconUrl = new x128().src;
                ChromeTCPServer.TheServerWithStyledForm.Invoke(
                     AppSource.Text
                //, AtFormCreated: FormStyler.AtFormCreated

                //AtFormConstructor:
                //    f =>
                //    {
                //        //arg[0] is typeof System.Int32
                //        //script: error JSC1000: No implementation found for this native method, please implement [static System.Drawing.Color.FromArgb(System.Int32)]

                //        // X:\jsc.svn\examples\javascript\forms\Test\TestFromArgb\TestFromArgb\ApplicationControl.cs

                //        f.BackColor = System.Drawing.Color.FromArgb(0xA26D41);
                //    }
                );
                return;
            }
            #endregion
#else

            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    //chrome.runtime.UpdateAvailable += delegate
                    //{
                    //    new chrome.Notification(title: "UpdateAvailable");

                    //};

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "x360x83");

                        // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: new
                               {
                                   alwaysOnTop = true,
                                   visibleOnAllWorkspaces = true
                               }
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion


#endif


            // GpuProcessHostUIShim: The GPU process crashed!

            var poke = new WebGLRenderingContext();

            if (poke == null)
            {
                new IHTMLPre { "GpuProcessHostUIShim: The GPU process crashed! restart process."
                }.AttachToDocument();
                return;
            }

            // are we in a RemoteApp ? software renderer?
            // this may hang the buggy rdp protocol...




            // crash
            //int cubefacesizeMAX = 2048 * 2; // 6 faces, ?
            //int cubefacesizeMAX = 2048 * 2; // 6 faces, ?
            int cubefacesizeMAX = 2048 * 1; // 6 faces, ?
            int cubefacesize = cubefacesizeMAX; // 6 faces, ?
            //int cubefacesize = 1024; // 6 faces, ?
            // "X:\vr\tape1\0000x2048.png"
            // for 60hz render we may want to use float camera percision, not available for ui.
            //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x2048.png" "/sdcard/oculus/360photos/"
            //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x128.png" "/sdcard/oculus/360photos/"

            //if (Environment.ProcessorCount < 8)
            //    //cubefacesize = 64; // 6 faces, ?

            //    // fast gif?
            //cubefacesize = 1024; // 6 faces, ?

            // not 8k..
            //cubefacesize = 512; // 6 faces, ?

            // big cubeface may be draw only half of itself?


            // can we keep fast fps yet highp?

            // can we choose this on runtime? designtime wants fast fps, yet for end product we want highdef on our render farm?
            //const int cubefacesize = 128; // 6 faces, ?

            //var cubecameraoffsetx = 256;
            var cubecameraoffsetx = 400;


            //var uizoom = 0.1;
            //var uizoom = cubefacesize / 128f;
            var uizoom = 128f / cubefacesize;


            Native.css.style.backgroundColor = "blue";
            Native.css.style.overflow = IStyle.OverflowEnum.hidden;

            Native.body.Clear();
            (Native.body.style as dynamic).webkitUserSelect = "text";

            IHTMLCanvas shader1canvas = null;




            //return;

            // Earth params
            //var radius = 0.5;
            //var radius = 1024;
            //var radius = 2048;
            //var radius = 512;
            //var radius = 256;
            //var radius = 400;

            // can we have not fly beyond moon too much?
            //var radius = 500;
            //var radius = 480;
            //var radius = -480;

            //var segments = 32;
            //var segments = 128 * 2;
            //var rotation = 6;


            //const int size = 128;
            //const int size = 256; // 6 faces, 12KB
            //const int size = 512; // 6 faces, ?

            // WebGL: drawArrays: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled.

            //const int size = 720; // 6 faces, ?
            //const int size = 1024; // 6 faces, ?
            //const int cubefacesize = 1024; // 6 faces, ?

            // THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter is set to THREE.LinearFilter or THREE.NearestFilter. ( chrome-extension://aemlnmcokphbneegoefdckonejmknohh/assets/x360x83/anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs.jpg )


            // var far = 0xffffff;

            // need a zoom effect
            // 5pixels to 33%


            // radius needs to be a bit bigger so wa cant zoom thru it
            // far image at this distance 
            var skyboxradius0 = 0 + 2048 * 4;

            var near = cubefacesize * 0.33;


            var skyboxradius = skyboxradius0 * 1.2;

            var far = skyboxradius * 2;
            //var near = cubefacesize * 0.5;
            //var near = cubefacesize * 0.4;
            //var near = cubefacesize * 0.25;

            new IHTMLPre { new { Environment.ProcessorCount, cubefacesize } }.AttachToDocument();

            //new IHTMLPre { "can we stream it into VR, shadertoy, youtube 360, youtube stereo yet?" }.AttachToDocument();


            var sw = Stopwatch.StartNew();



            var oo = new List<THREE.Object3D>();



            // what about physics and that portal rendering?

            // if we are running as a chrome web server, we may also be opened as android ndk webview app
            //var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: window.aspect, near: 1, far: 2000);
            // once we update source
            // save the source
            // manually recompile 
            //cameraPX.position.z = 400;

            //// the camera should be close enough for the object to float off the FOV of PX
            //cameraPX.position.z = 200;

            // scene
            // can we make the 3D object orbit around us ?
            // and
            // stream it to vr?
            var scene = new THREE.Scene();



            // since our cube camera is somewhat a fixed thing
            // would it be easier to move mountains to come to us?
            // once we change code would chrome app be able to let VR know that a new view is available?
            var sceneg = new THREE.Group();
            sceneg.AttachTo(scene);

            // whatif we reuse the skybox as is and skip rendering it?
            // that means we cannot rotate via sky, we have to rotate other elements in reverse.
            // can we have a checkbox to hide or render the skybox?
            var scenezooms = new THREE.Group();
            scenezooms.AttachTo(scene);




            // fly up?
            //sceneg.translateZ(-1024);
            // rotate the world, as the skybox then matches what we have on filesystem
            scene.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
            //scene.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
            // yet for headtracking we shall rotate camera


            //sceneg.position.set(0, 0, -1024);
            //sceneg.position.set(0, -1024, 0);

            //scene.add(new THREE.AmbientLight(0x333333));
            //scene.add(new THREE.AmbientLight(0xffffff));
            //scene.add(new THREE.AmbientLight(0xaaaaaa));
            //scene.add(new THREE.AmbientLight(0xcccccc));
            //scene.add(new THREE.AmbientLight(0xeeeeee));
            scene.add(new THREE.AmbientLight(0xffffff));




            //var light = new THREE.DirectionalLight(0xffffff, 1);
            //// sun should be beyond moon
            ////light.position.set(-5 * virtualDistance, -3 * virtualDistance, -5 * virtualDistance);
            ////light.position.set(-15 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);

            //// where shall the light source be to see half planet?
            //light.position.set(-1 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);
            //scene.add(light);



            //var lightX = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightX" }.AttachToDocument();
            //var lightY = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightY" }.AttachToDocument();
            //var lightZ = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightZ" }.AttachToDocument();

            //new IHTMLHorizontalRule { }.AttachToDocument();

            // whats WebGLRenderTargetCube do?

            // WebGLRenderer preserveDrawingBuffer 



            var renderer0 = new THREE.WebGLRenderer(

                new
                {
                    //antialias = true,

                    alpha = true,

                    preserveDrawingBuffer = true
                }
            );

            // https://github.com/mrdoob/three.js/issues/3836

            // the construct. white bg

            // cyan?
            //renderer0.setClearColor(0xfffff, 1);
            //renderer0.setClearColor(0xfffff, 0);
            renderer0.setClearColor(0x0, 0);
            //renderer0.setClearColor(0x0, 1);

            //renderer.setSize(window.Width, window.Height);
            renderer0.setSize(cubefacesize, cubefacesize);

            //renderer0.domElement.AttachToDocument();
            //rendererPX.domElement.style.SetLocation(0, 0);
            //renderer0.domElement.style.SetLocation(4, 4);


            // top

            // http://stackoverflow.com/questions/27612524/can-multiple-webglrenderers-render-the-same-scene


            // need a place to show the cubemap face to GUI 
            // how does the stereo OTOY do it?
            // https://www.opengl.org/wiki/Sampler_(GLSL)

            // http://www.richardssoftware.net/Home/Post/25

            // [+X, –X, +Y, –Y, +Z, –Z] fa



            // move up
            //camera.position.set(-1200, 800, 1200);
            //var cameraoffset = new THREE.Vector3(0, 15, 0);

            // can we aniamte it?
            //var cameraoffset = new THREE.Vector3(0, 800, 1200);
            // can we have linear animation fromcenter of the map to the edge and back?
            // then do the flat earth sun orbit?
            var cameraoffset = new THREE.Vector3(
                // left?
                -512,
                // height?
                //0,
                //1600,
                //1024,

                // if the camera is in the center, would we need to move the scene?
                // we have to move the camera. as we move the scene the lights are messed up
                //2014,
                1024,

                //1200
                0
                // can we hover top of the map?
                );

            // original vieworigin
            //var cameraoffset = new THREE.Vector3(-1200, 800, 1200);

            // whatif we want more than 30sec video? 2min animation? more frames to render? 2gb disk?
            var frameIDslider = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0, max = 1800, valueAsNumber = 1800 / 2, title = "frameIDslider" }.AttachToDocument();












            var xframeID = 0;



            new { }.With(

                async delegate
                {

                next:

                    //Console.WriteLine("enter vsync0ambient");
                    Native.document.title = new { xframeID }.ToString();


                    vsync0ambient = new TaskCompletionSource<object>();

                    await vsync0ambient.Task;

                    await Task.Delay(1000 / 15);
                    //await Task.Delay(1000);
                    //Console.WriteLine("await vsync0ambient 5");
                    //await Task.Delay(5000);

                    xframeID++;

                    goto next;
                }

            );














            new IHTMLHorizontalRule { }.AttachToDocument();

            var camerazMIN = 0 - 2048 * 4;

            var camerax = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "camerax" }.AttachToDocument();
            // up. whats the most high a rocket can go 120km?
            new IHTMLHorizontalRule { }.AttachToDocument();


            // how high is the bunker?
            var cameray = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 2048 * 4, valueAsNumber = 0, title = "cameray" }.AttachToDocument();
            new IHTMLBreak { }.AttachToDocument();
            var camerayHigh = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = cameray.max, max = 1024 * 256, valueAsNumber = cameray.max, title = "cameray" }.AttachToDocument();
            new IHTMLHorizontalRule { }.AttachToDocument();
            //var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();
            //var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0 - 2048 * 4, title = "cameraz" }.AttachToDocument();
            var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = camerazMIN, max = 0, valueAsNumber = camerazMIN, title = "cameraz" }.AttachToDocument();
            // the zoom hting..


            new IHTMLHorizontalRule { }.AttachToDocument();

            var skyrotup = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -450, max = 450, valueAsNumber = 0, title = "skyrotup" }.AttachToDocument();
            var skyrotright = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -450, max = 450, valueAsNumber = 0, title = "skyrotright" }.AttachToDocument();

            new IHTMLPre { () => new { skyrotup = skyrotup.valueAsNumber, skyrotright = skyrotright.valueAsNumber } }.AttachToDocument();


            new IHTMLHorizontalRule { }.AttachToDocument();

            // were we able to test for it?
            //var zoomrotup = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -450, max = 450, valueAsNumber = -12, title = "up" }.AttachToDocument();
            var zoomrotup = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -450, max = 450, valueAsNumber = 0, title = "up" }.AttachToDocument();
            var zoomrotright = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -450, max = 450, valueAsNumber = 0, title = "right" }.AttachToDocument();

            new IHTMLPre { () => new { 
                

                // on 0 zoom we should rely on the original skybox?
                cameraz = cameraz.valueAsNumber, 
                
                zoomrotup = zoomrotup.valueAsNumber, zoomrotright = zoomrotright.valueAsNumber } }.AttachToDocument();



            // for render server
            var fcamerax = 0.0;
            var fcameray = 0.0;
            var fcameraz = 0.0;

            //while (await camerax.async.onchange)

            //cameray.onchange += delegate
            //{
            //    if (cameray.valueAsNumber < cameray.max)
            //        camerayHigh.valueAsNumber = camerayHigh.min;
            //};

            camerayHigh.onmousedown += delegate
            {
                //if (camerayHigh.valueAsNumber > camerayHigh.min)
                cameray.valueAsNumber = cameray.max;
            };


            Action applycameraoffset = delegate
            {
                // make sure UI and gpu sync up

                var cy = cameray;

                if (cameray.valueAsNumber < cameray.max)
                    camerayHigh.valueAsNumber = camerayHigh.min;

                if (camerayHigh.valueAsNumber > camerayHigh.min)
                    cameray.valueAsNumber = cameray.max;

                if (cameray.valueAsNumber == cameray.max)
                    cy = camerayHigh;



                cameraoffset = new THREE.Vector3(
                    // left?
                  1.0 * (camerax + fcamerax),
                    // height?
                    //0,
                    //1600,
                    //1024,

                   // if the camera is in the center, would we need to move the scene?
                    // we have to move the camera. as we move the scene the lights are messed up
                    //2014,
                   1.0 * (cy + fcameray),

                 //1200
                 1.0 * (cameraz + fcameraz)
                    // can we hover top of the map?
                   );
            };


            #region y
            // need to rotate90?
            var cameraNY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            applycameraoffset += delegate
            {
                cameraNY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNY.lookAt(new THREE.Vector3(0, -1, 0));
                cameraNY.position.add(cameraoffset);
            };

            //cameraNY.lookAt(new THREE.Vector3(0, 1, 0));
            var canvasNY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasNY.canvas.title = "NY";
            canvasNY.canvas.AttachToDocument();
            canvasNY.canvas.style.transformOrigin = "0 0";
            // roslyn!
            //canvasNY.canvas.style.transform = $"scale({uizoom})";
            canvasNY.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            applycameraoffset += delegate
            {
                cameraPY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPY.lookAt(new THREE.Vector3(0, 1, 0));
                cameraPY.position.add(cameraoffset);
            };
            //cameraPY.lookAt(new THREE.Vector3(0, -1, 0));
            var canvasPY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 0);
            canvasPY.canvas.title = "PY";
            canvasPY.canvas.AttachToDocument();
            canvasPY.canvas.style.transformOrigin = "0 0";
            //canvasPY.canvas.style.transform = $"scale({uizoom})";
            canvasPY.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion

            // transpose xz?

            #region x
            var cameraNX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            applycameraoffset += delegate
            {
                cameraNX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNX.lookAt(new THREE.Vector3(0, 0, 1));
                cameraNX.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(0, 0, -1));
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNX.lookAt(new THREE.Vector3(1, 0, 0));
            var canvasNX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 2, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNX.canvas.title = "NX";
            canvasNX.canvas.AttachToDocument();
            canvasNX.canvas.style.transformOrigin = "0 0";
            //canvasNX.canvas.style.transform = $"scale({uizoom})";
            canvasNX.canvas.style.transform = "scale(" + uizoom + ")";


            // front??
            var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            applycameraoffset += delegate
            {
                cameraPX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPX.lookAt(new THREE.Vector3(0, 0, -1));
                cameraPX.position.add(cameraoffset);
            };
            //cameraPX.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPX.lookAt(new THREE.Vector3(1, 0, 0));
            //cameraPX.lookAt(new THREE.Vector3(-1, 0, 0));
            var canvasPX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPX.canvas.title = "PX";
            canvasPX.canvas.AttachToDocument();
            canvasPX.canvas.style.transformOrigin = "0 0";
            //canvasPX.canvas.style.transform = $"scale({uizoom})";
            canvasPX.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion



            #region z
            var cameraNZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, -1));
            applycameraoffset += delegate
            {
                cameraNZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNZ.lookAt(new THREE.Vector3(1, 0, 0));
                cameraNZ.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, 1));
            var canvasNZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 3, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNZ.canvas.title = "NZ";
            canvasNZ.canvas.AttachToDocument();
            canvasNZ.canvas.style.transformOrigin = "0 0";
            //canvasNZ.canvas.style.transform = $"scale({uizoom})";
            canvasNZ.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            //cameraPZ.lookAt(new THREE.Vector3(1, 0, 0));
            applycameraoffset += delegate
            {
                cameraPZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPZ.lookAt(new THREE.Vector3(-1, 0, 0));
                cameraPZ.position.add(cameraoffset);
            };
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, -1));
            var canvasPZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPZ.canvas.title = "PZ";
            canvasPZ.canvas.AttachToDocument();
            canvasPZ.canvas.style.transformOrigin = "0 0";
            //canvasPZ.canvas.style.transform = $"scale({uizoom})";
            canvasPZ.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion




            // c++ alias locals would be nice..
            var canvas0 = (IHTMLCanvas)renderer0.domElement;


            var old = new
            {



                CursorX = 0,
                CursorY = 0
            };


            var st = new Stopwatch();
            st.Start();

            //canvas0.css.active.style.cursor = IStyle.CursorEnum.move;




            // X:\jsc.svn\examples\javascript\Test\TestMouseMovement\TestMouseMovement\Application.cs


            // THREE.WebGLProgram: gl.getProgramInfoLog() C:\fakepath(78,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll
            // THREE.WebGLProgram: gl.getProgramInfoLog() (79,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll

            // http://www.roadtovr.com/youtube-confirms-stereo-3d-360-video-support-coming-soon/
            // https://www.youtube.com/watch?v=D-Wl9jAB45Q



            #region gl4K spherical
            var gl4K = new WebGLRenderingContext(alpha: true, preserveDrawingBuffer: true);
            var c4k = gl4K.canvas.AttachToDocument();


            //  3840x2160

            //c.style.SetSize(3840, 2160);

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150722/360-youtube


            // when can we go up?
            c4k.width = 3840;
            c4k.height = 2160;

            // https://www.youtube.com/watch?v=sLprVF6d7Ug
            // 8K is 7680 4320

            // https://www.youtube.com/watch?v=RNdHaeBhT9Q
            // 8K is 7680 3840

            //c.width = 7680;
            ////c.height = 3840;
            //c.height = 4320;



            //c.width = 3840 * 2;
            //c.height = 2160 * 2;


            //c.width = 3840;
            //c.height = 2160;
            // 1,777777777777778

            // https://www.youtube.com/watch?v=fTfJwzRsE-w
            //c.width = 7580;
            //c.height = 3840;
            //1,973958333333333

            //7580
            //    3840

            // wont work
            //c.width = 8192;
            //c.height = 4096;


            // this has the wrong aspect?
            //c.width = 6466;
            //c.height = 3232;

            new IHTMLPre { new { c4k.width, c4k.height } }.AttachToDocument();

            //6466x3232

            //var suizoom = 720f / c.height;
            //var suizoom = 360f / c.height;
            var suizoom = 480f / c4k.width;

            c4k.style.backgroundColor = "yellow";
            c4k.style.transformOrigin = "0 0";
            //c.style.transform = $"scale({suizoom})";
            c4k.style.transform = "scale(" + suizoom + ")";
            //c.style.backgroundColor = "yellow";
            c4k.style.position = IStyle.PositionEnum.absolute;

            c4k.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0,
                8 + (int)(uizoom * cubefacesize + 8) * 3 + 120
                );


            // until we figure out how to fix the shader, we can try to fake it?
            // will allow atleast a nice static 8K image?
            // S6 did a 6546x3272 image. 5k?

            // 1.77
            var c8k = new CanvasRenderingContext2D(3840 * 2, 2160 * 2);

            //var c8k = new CanvasRenderingContext2D(5120, 2880);

            // 5120 x 2880 pixel

            // 8k canvas wont load in chrome?
            c8k.canvas.AttachToDocument();


            c8k.canvas.style.backgroundColor = "cyan";
            c8k.canvas.style.transformOrigin = "0% 0%";
            c8k.canvas.style.transform = "scale(" + (suizoom / 2) + ")";

            c8k.canvas.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 120,
                8 + (int)(uizoom * cubefacesize + 8) * 3 + 120 + 320
                );



            var pass = new CubeToEquirectangular.Library.ShaderToy.EffectPass(
                       null,
                       gl4K,
                       precission: CubeToEquirectangular.Library.ShaderToy.DetermineShaderPrecission(gl4K),
                       supportDerivatives: gl4K.getExtension("OES_standard_derivatives") != null,
                       callback: null,
                       obj: null,
                       forceMuted: false,
                       forcePaused: false,
                //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                       outputGainNode: null
                   );

            // how shall we upload our textures?
            // can we reference GLSL.samplerCube yet?
            //pass.mInputs[0] = new samplerCube { };
            pass.mInputs[0] = new CubeToEquirectangular.Library.ShaderToy.samplerCube { };

            pass.MakeHeader_Image();
            var vs = new Shaders.ProgramFragmentShader();
            pass.NewShader_Image(vs);

            #endregion



            // why is it flipped?

            //var frame0 = new HTML.Images.FromAssets.tiles_regrid().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets.galaxy_starfield().AttachToDocument();
            var frame0 = new HTML.Images.FromAssets._20150912_154522().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._20150912_154522recenter().AttachToDocument();


            //var frame0 = new HTML.Images.FromAssets.galaxy_starfield150FOV().AttachToDocument();
            //var xor = new HTML.Images.FromAssets.Orion360_test_image_8192x4096().AttachToDocument();
            //var xor = new HTML.Images.FromAssets._2_no_clouds_4k().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().AttachToDocument();


            // 270px
            //xor.style.height = "";
            frame0.style.height = "270px";
            frame0.style.width = "480px";
            frame0.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 + 16,
                8 + (int)(uizoom * cubefacesize + 8) * 3 + 120);




            #region DirectoryEntry
            var dir = default(DirectoryEntry);

            new IHTMLButton { "openDirectory" }.AttachToDocument().onclick += async delegate
            {
                dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
            };
            frame0.style.cursor = IStyle.CursorEnum.pointer;
            frame0.title = "save frame";


            frame0.onclick += delegate
            {
                // http://paulbourke.net/papers/vsmm2006/vsmm2006.pdf
                //            A method of creating synthetic stereoscopic panoramic images that can be implemented
                //in most rendering packages has been presented. If single panoramic pairs can be created
                //then stereoscopic panoramic movies are equally possible giving rise to the prospect of
                //movies where the viewer can interact with, at least with regard to what they choose to look
                //at.These images can be projected so as to engage the two features of the human visual
                //system that assist is giving us a sense of immersion, the feeling of “being there”. That is,
                //imagery that contains parallax information as captured from two horizontally separated eye
                //positions (stereopsis)and imagery that fills our peripheral vision.The details that define
                //how the two panoramic images should be created in rendering packages are provided, in
                //particular, how to precisely configure the virtual cameras and control the distance to zero
                //parallax.

                // grab a frame

                if (dir == null)
                {
                    var c8ksw = Stopwatch.StartNew();

                    c8k.drawImage(frame0, 0, 0, c8k.canvas.width, c8k.canvas.height);
                    c8k.drawImage(gl4K, 0, 0, c8k.canvas.width, c8k.canvas.height);


                    // not exporting to file system?
                    //var f0 = new IHTMLImage { src = gl4K.canvas.toDataURL() };
                    //var f0 = new IHTMLImage { src = c8k.canvas.toDataURL() };

                    // png would be 50mb?
                    var f0 = new IHTMLImage { src = c8k.canvas.toDataURL(quality: 0.9) };
                    // 22989ms { c8ksw = 00:00:12.12976 }
                    Console.WriteLine(new { c8ksw });

                    //var f0 = (IHTMLImage)gl.canvas;
                    //var f0 = (IHTMLImage)gl.canvas;
                    //var base64 = gl.canvas.toDataURL();


                    //frame0.src = base64;
                    frame0.src = f0.src;

                    // 7MB!

                    return;
                }

                //                // ---------------------------
                //IrfanView
                //---------------------------
                //Warning !
                //The file: "X:\vr\tape1\0001.jpg" is a PNG file with incorrect extension !
                //Rename ?
                //---------------------------
                //Yes   No   
                //---------------------------

                // haha this will render the thumbnail.
                //dir.WriteAllBytes("0000.png", frame0);

                //dir.WriteAllBytes("0000.png", gl.canvas);

                var glsw = Stopwatch.StartNew();
                dir.WriteAllBytes("0000.png", gl4K);

                new IHTMLPre { new { glsw.ElapsedMilliseconds } }.AttachToDocument();

                // {{ ElapsedMilliseconds = 1548 }}

                // 3.7MB
                // 3840x2160

            };

            #endregion


            #region render 60hz 30sec
            new IHTMLButton {
                "render 60hz 30sec"
            }.AttachToDocument().onclick += async e =>
            {
                e.Element.disabled = true;


                var total = Stopwatch.StartNew();
                var status = "rendering... " + new { dir };

                new IHTMLPre { () => status }.AttachToDocument();

                if (dir == null)
                {
                    //dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
                }

                total.Restart();



                vsync1renderman = new TaskCompletionSource<object>();
                await vsync1renderman.Task;

                status = "rendering... vsync";

                //var frameid = 0;
                frameIDslider.valueAsNumber = -1;

                goto beforeframe;


                // parallax offset?

                await_nextframe:


                //var filename = frameIDslider.valueAsNumber.ToString().PadLeft(4, '0') + ".png";
                var filename = frameIDslider.valueAsNumber.ToString().PadLeft(5, '0') + ".jpg";
                status = "rendering... " + new { filename };


                vsync1renderman = new TaskCompletionSource<object>();
                await vsync1renderman.Task;

                // frame0 has been rendered

                var swcapture = Stopwatch.StartNew();
                status = "WriteAllBytes... " + new { filename };
                //await Native.window.async.onframe;

                // https://code.google.com/p/chromium/issues/detail?id=404301
                if (dir != null)
                {
                    c8k.drawImage(frame0, 0, 0, c8k.canvas.width, c8k.canvas.height);
                    c8k.drawImage(gl4K, 0, 0, c8k.canvas.width, c8k.canvas.height);


                    //await dir.WriteAllBytes(filename, gl4K);
                    await dir.WriteAllBytes(filename, c8k);
                }

                //await dir.WriteAllBytes(filename, gl.canvas);

                status = "WriteAllBytes... done " + new { fcamerax, filename, swcapture.ElapsedMilliseconds };
                status = "rdy " + new { filename, fcamerax };
                //await Native.window.async.onframe;





                // design mode v render mode
                if (cubefacesize < cubefacesizeMAX)
                    frameIDslider.valueAsNumber += 15;
                else
                    frameIDslider.valueAsNumber++;




            beforeframe:

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);
                // fcamerax = radius * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);

                // some shaders need to know where the camera is looking from. can we tell them?

                //fcamerax = 2.2 * Math.Sin(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));
                //fcameraz = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));


                //// up
                //fcameray = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // cameraz.valueAsNumber = (int)(cameraz.max * Math.Sin(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f)));


                // up
                //fcameray = 128 * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                //fcamerax += (1.0 / 60.0);

                //fcamerax += (1.0 / 60.0) * 120;


                // in 30 sec can we have a zoom in and out?

                // so 15 sec at 60 fps needs to be -max z


                var a = Math.Abs(frameIDslider.valueAsNumber - (60 * 15));
                var aa = a / (60f * 15);

                //cameraz.valueAsNumber = (int)(camerazMIN * aa);
                cameraz.valueAsNumber = (int)(camerazMIN * (1.0 - aa));


                // 60hz 30sec
                if (frameIDslider.valueAsNumber < 60 * 30)
                {
                    // Blob GC? either this helms or the that we made a Blob static. 
                    //await Task.Delay(11);
                    await Task.Delay(33);

                    goto await_nextframe;
                }

                total.Stop();
                status = "all done " + new { frameid = frameIDslider.valueAsNumber, total.ElapsedMilliseconds };
                vsync1renderman = default(TaskCompletionSource<object>);
                // http://stackoverflow.com/questions/22899333/delete-javascript-blobs

                e.Element.disabled = false;
            };
            #endregion


            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLColladaExperiment\WebGLColladaExperiment\WebGLColladaExperiment.csproj"






            // asus will hang
            // https://3dwarehouse.sketchup.com/model.html?id=fb7a0448d940e575edc01389f336fb0a
            // can we get one frame into vr?

            // cube: mesh to cast shadows



            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000 })

            //    );
            //    floor2.position.set(0, 0, -cubefacesize / 2);
            //    floor2.AttachTo(scene);
            //}
            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0x0000ff, color = 0x0000ff })

            //    );
            //    floor2.position.set(-cubefacesize / 2, 0, 0);
            //    floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);

            //    floor2.AttachTo(scene);
            //}

            //var p900toCubeSize = cubefacesize / 1080f;
            var p900toCubeSize = cubefacesize / 1920f;

            //p900toCubeSize *= 0.7f;

            // where is this magic number coming from??
            p900toCubeSize *= 0.65f;
            //p900toCubeSize *= 0.5f;

            // http://stackoverflow.com/questions/17648067/three-js-drawing-two-overlapping-transparent-spheres-and-hiding-intersection



            var farimage = new output00609();
            var nearimage = new output01085();

            // front?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                //var tex0 = new THREE.Texture(shader1canvas) { needsUpdate = true };


                //var tex0 = new THREE.Texture(new output01027()) { needsUpdate = true };
                //var tex0 = new THREE.Texture(new output00630()) { needsUpdate = true };
                var tex0 = new THREE.Texture(farimage) { needsUpdate = true, minFilter = THREE.LinearFilter };
                applycameraoffset += delegate { tex0.needsUpdate = true; };

                //var planeGeometry0 = new THREE.PlaneGeometry(1920, 1080, 8, 8);
                var planeGeometry0 = new THREE.PlaneGeometry((int)(1920 * p900toCubeSize), (int)(1080 * p900toCubeSize), 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {
                            // black otherwise?
                            transparent = true,

                            map = tex0,


                            //ambient = 0x00ff00,
                            //color = 0x00ff00
                        })

                );

                //(floor2 as dynamic).renderDepth = 0.2;

                //floor2.position.set(0, 0, -cubefacesize  * 0.55);

                // zoom in and get 90FOV clouseup?
                floor2.position.set(-cubefacesize * 0.50 - skyboxradius0, 0, 0);
                //floor2.position.set(-skyboxradius0 - 128, 0, 0);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                //floor2.AttachTo(scene);
                floor2.AttachTo(scenezooms);
            }





            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                //var tex0 = new THREE.Texture(shader1canvas) { needsUpdate = true };


                //var tex0 = new THREE.Texture(new output01027()) { needsUpdate = true };
                var tex0 = new THREE.Texture(nearimage) { needsUpdate = true, minFilter = THREE.LinearFilter };
                //var tex0 = new THREE.Texture(new output00630()) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                //var planeGeometry0 = new THREE.PlaneGeometry(1920, 1080, 8, 8);
                //var planeGeometry0 = new THREE.PlaneGeometry((int)(1920 * 0.1), (int)(1080 * 0.1), 8, 8);



                var planeGeometry0 = new THREE.PlaneGeometry((int)(1920 * p900toCubeSize), (int)(1080 * p900toCubeSize), 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {
                            // black otherwise?
                            transparent = true,

                            map = tex0,


                            //ambient = 0x00ff00,
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, 0, -cubefacesize  * 0.55);
                //(floor2 as dynamic).renderDepth = 0.3;

                // zoom in and get 90FOV clouseup?
                floor2.position.set(-cubefacesize * 0.50, 0, 0);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                //floor2.AttachTo(scene);
                floor2.AttachTo(scenezooms);
            }


            Action<IHTMLImage, double> AddFrame = (img, z) =>
            {
                {
                    //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                    //var tex0 = new THREE.Texture(new moon());
                    //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                    //var tex0 = new THREE.Texture(shader1canvas) { needsUpdate = true };


                    //var tex0 = new THREE.Texture(new output01027()) { needsUpdate = true };
                    //var tex0 = new THREE.Texture(new output00630()) { needsUpdate = true };
                    var tex0 = new THREE.Texture(img) { needsUpdate = true, minFilter = THREE.LinearFilter };
                    applycameraoffset += delegate { tex0.needsUpdate = true; };

                    //var planeGeometry0 = new THREE.PlaneGeometry(1920, 1080, 8, 8);
                    var planeGeometry0 = new THREE.PlaneGeometry((int)(1920 * p900toCubeSize), (int)(1080 * p900toCubeSize), 8, 8);
                    var floor2 = new THREE.Mesh(planeGeometry0,
                        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                        new THREE.MeshPhongMaterial(
                            new
                            {
                                // black otherwise?
                                transparent = true,

                                map = tex0,


                                //ambient = 0x00ff00,
                                //color = 0x00ff00
                            })

                    );

                    //(floor2 as dynamic).renderDepth = 0.2;

                    //floor2.position.set(0, 0, -cubefacesize  * 0.55);

                    // zoom in and get 90FOV clouseup?
                    floor2.position.set(-cubefacesize * 0.50 - skyboxradius0 * z, 0, 0);
                    //floor2.position.set(-skyboxradius0 - 128, 0, 0);
                    floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                    //floor2.AttachTo(scene);
                    floor2.AttachTo(scenezooms);
                }

            };

            new IHTMLButton { "load frames from disk " }.AttachToDocument().onclick += async e =>
            {
                // Z:\jsc.svn\examples\javascript\chrome\apps\WebGL\360\x360x83\Application.cs
                // Z:\jsc.svn\examples\javascript\chrome\apps\WebGL\360\x360azimuthal\Application.cs

                e.Element.disabled = true;

                // how do we load the files?
                var dir2 = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });

                var dir2r = dir2.createReader();

                var files2 = await dir2r.readFileEntries();

                var files2count = files2.Count();

                Console.WriteLine(new { files2count });

                // does this dir have the first and last image we already know of?
                // 55390ms { files2count = 4324 }

                var firstcandidate = files2.First();

                //Console.WriteLine(new { firstcandidate, farimage.src });
                // 19230ms { firstcandidate = [object FileEntry], src = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/assets/x360x83/output00609.png }

                // 10903ms { firstcandidate = output00001.png, farimage = output00609.png }

                var files2skip = files2.SkipWhile(firstcandidate1 => firstcandidate1.name != farimage.src.SkipUntilLastOrEmpty("/"));
                //Console.WriteLine(new { files2skip = files2skip.Count() });

                var files2take = files2skip.TakeWhile(firstcandidate1 => firstcandidate1.name != nearimage.src.SkipUntilLastOrEmpty("/"));

                //6228ms { files2count = 4324 }
                //view-source:54116 6377ms { files2take = 476 }

                // reverse?
                var files3 = files2take.ToArray();
                var files3count = files3.Count();


                //Console.WriteLine(new { files2take = files2take.Count() });

                //var step = 8;
                //var step = 1;//crashes
                //var step = 2;//crashes after load
                var step = 4;//crashes after load
                //var step = 3;//
                //var step = (int)(files3count * 0.05);
                //var step = (int)(files3count * 0.25);

                for (int i = step; i < files3count; i += step)
                {
                    //files3[i].

                    Console.WriteLine(new { i });

                    e.Element.innerText = new { step, i, files3count }.ToString();


                    //files3[i].file()
                    var ff = await files3[i].file();

                    //83fc21e4-a2da-408d-9831-571313ead641
                    //Refcount: 1
                    //Content Type: image/png
                    //Type: file
                    //Path: X:\p900\7\DCIM\100NIKON\DSCN0018\output00767.png
                    //Modification Time: Sunday, September 13, 2015 at 10:24:01 AM
                    //Length: 2,131,791

                    // are we running out of blobs?

                    var url = ff.ToObjectURL();
                    var img = new IHTMLImage(url);


                    await img.async.oncomplete;

                    //async ff =>
                    //{
                    //    var ffbytes = await ff.readAsBytes();

                    //var ffimage = (IHTMLImage)ffbytes;

                    var aa = (double)i / files3count;

                    AddFrame(img, 1.0 - aa);

                    //    }
                    //);

                    //files3[i].
                }

                e.Element.innerText = "done " + new { step, files3count }.ToString();

            };


            //{
            //    //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
            //    //var tex0 = new THREE.Texture(new moon());
            //    //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
            //    var tex0 = new THREE.Texture(shader1canvas) { needsUpdate = true };

            //    applycameraoffset += delegate { tex0.needsUpdate = true; };

            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(
            //            new
            //            {

            //                map = tex0,


            //                //ambient = 0x00ff00,
            //                //color = 0x00ff00
            //            })

            //    );
            //    floor2.position.set(cubefacesize * 0.55, 0, 0);
            //    floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);

            //    floor2.AttachTo(scene);
            //}





            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeEarth\ChromeEarth\Application.cs
            // X:\jsc.svn\examples\javascript\canvas\ConvertBlackToAlpha\ConvertBlackToAlpha\Application.cs
            // hidden for alpha AppWindows
            //#if FBACKGROUND


            #region galaxy_starfield
            new THREE.Texture().With(
                async s =>
                {
                    //var i = new HTML.Images.FromAssets.galaxy_starfield();
                    //var i = new HTML.Images.FromAssets.galaxy_starfield150FOV();

                    var bytes = await frame0.async.bytes;

                    //for (int ii = 0; ii < bytes.Length; ii += 4)
                    //{

                    //    bytes[ii + 3] = (byte)(bytes[ii + 0]);

                    //    bytes[ii + 0] = 0xff;
                    //    bytes[ii + 1] = 0xff;
                    //    bytes[ii + 2] = 0xff;
                    //}

                    var cc = new CanvasRenderingContext2D(frame0.width, frame0.height);

                    cc.bytes = bytes;

                    // does not do a thing?
                    //s.flipY = true;

                    s.image = cc;
                    s.needsUpdate = true;

                    var stars_material = new THREE.MeshBasicMaterial(
                            new
                            {
                                //map = THREE.ImageUtils.loadTexture(new galaxy_starfield().src),
                                map = s,
                                // both?
                                //side = THREE.BackSide,
                                transparent = true
                            });

                    // nice
                    //stars_material.opacity = 0.5;



                    // THREE.SphereGeometry = function ( radius, widthSegments, heightSegments, phiStart, phiLength, thetaStart, thetaLength ) {
                    // http://learningthreejs.com/blog/2011/10/05/performance-merging-geometry/

                    // how are we to construct geometry that has higher detail in one spot for zoom in
                    var skyboxsphere = new THREE.SphereGeometry(skyboxradius, 512, 512

                    // left to right
                        // 0 .. 45 deg

                    // center it?
                        //Math.PI - Math.PI / 4
                        //, Math.PI / 4,



                    //// up to down
                        //// 0 .. 22 deg
                        //0, Math.PI / 2
                    );


                    //
                    var stars = new THREE.Mesh(
                        // radius not used?
                        //new THREE.SphereGeometry(skyboxradius, 64, 64),
                        //new THREE.SphereGeometry(skyboxradius, 8, 8),

                            // we need to be able to zoom in!
                        //new THREE.SphereGeometry(skyboxradius, 256, 256),

                            // chrome will crash on laptop?
                        // chrome will crash on red!
                        //new THREE.SphereGeometry(skyboxradius, 1024, 1024),
                        //new THREE.SphereGeometry(skyboxradius, 1024, 1024),
                        //new THREE.SphereGeometry(skyboxradius, 512, 512),
                        //new THREE.SphereGeometry(skyboxradius, 600, 600),

                            // orr perhaps do we need detailed geometry only in specific spot?
                            skyboxsphere,

                           stars_material
                        );

                    // http://stackoverflow.com/questions/8502150/three-js-how-can-i-dynamically-change-objects-opacity
                    //(stars_material as dynamic).opacity = 0.5;

                    stars.scale.x = -1;


                    // http://stackoverflow.com/questions/31797871/three-js-alpha-on-entire-object
                    applycameraoffset += delegate
                    {
                        if (cameraz.valueAsNumber == 0)
                        {
                            // static 5k image should take over...
                            stars_material.opacity = 0.0;
                            return;
                        }


                        var a = (cameraz.valueAsNumber / (double)camerazMIN);

                        stars.rotation.set(0, 0, 0);

                        //    skyrotright
                        //stars.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * (skyrotup.valueAsNumber / 900.0));
                        stars.rotateOnAxis(new THREE.Vector3(0, 1, 0), (Math.PI / 2) * (skyrotright.valueAsNumber / 900.0));



                        stars.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * ((a * 35.0 + skyrotup.valueAsNumber) / 900.0));


                        stars_material.opacity = (1.0 - a) * 0.7 + 0.3;
                    };




                    // can we get our hrozion recentered?
                    //stars.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * (3 / 90.0));
                    //stars.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * (1.3 / 90.0));
                    //stars.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * (1.5 / 90.0));

                    applycameraoffset += delegate
                    {

                        scenezooms.rotation.set(0, 0, 0);
                        // keep skybox where it is

                        scenezooms.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * (zoomrotup.valueAsNumber / 900.0));
                        scenezooms.rotateOnAxis(new THREE.Vector3(0, 1, 0), (Math.PI / 2) * (zoomrotright.valueAsNumber / 900.0));

                    };


                    var hideskybox = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "hide skybox", @checked = true }.AttachToDocument();

                    //Native.window.onframe += delegate
                    //{
                    //    //
                    //    stars.visible = !hideskybox.@checked;
                    //};

                    hideskybox.onchange += delegate
                    {
                        //
                        stars.visible = !hideskybox.@checked;
                    };
                    stars.visible = !hideskybox.@checked;



                    scene.add(stars);
                }
           );
            #endregion


            //var NYonly = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "NY only" }.AttachToDocument();
            var PXonly = new IHTMLInput
            {
                type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox,
                @checked = true,
                title = "PX only"
            }.AttachToDocument();


            //new Models.ColladaS6Edge().Source.Task.ContinueWithResult(
            //       dae =>
            {



                //dae.position.y = -80;

                //dae.AttachTo(sceneg);
                //scene.add(dae);
                //oo.Add(dae);




                // view-source:http://threejs.org/examples/webgl_multiple_canvases_circle.html
                // https://threejsdoc.appspot.com/doc/three.js/src.source/extras/cameras/CubeCamera.js.html
                Native.window.onframe +=
                    e =>
                    {
                        // Z:\jsc.svn\examples\javascript\test\TestDelegateIfIfReturn\Application.cs

                        // let render man know..

                        var flag1 = vsync1renderman != null;
                        // nonroslyn!!
                        if (flag1)
                        // this if block is not detected?
                        {
                            // whats going on  with if clauses?
                            nop();

                            // wtf???
                            var flag0 = vsync1renderman.Task.IsCompleted;
                            if (flag0)
                                return;
                        }
                        if (vsync0ambient != null)
                            if (vsync0ambient.Task.IsCompleted)
                                return;

                        // 38045ms Native.window.onframe { vsync1renderman = , vsync0ambient = [object Object] }

                        //Console.WriteLine("Native.window.onframe " + new { vsync1renderman, vsync0ambient });

                        //return;

                        //if (pause) return;
                        //if (pause.@checked)
                        //    return;


                        // can we float out of frame?
                        // haha. a bit too flickery.
                        //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.01) * 50.0;
                        //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.001) * 190.0;
                        //globesphere.position.y = Math.Sin(fcamerax * 0.001) * 90.0;
                        //clouds.position.y = Math.Cos(fcamerax * 0.001) * 90.0;

                        //sphere.rotation.y += speed;
                        //clouds.rotation.y += speed;

                        // manual rebuild?
                        // red compiler notifies laptop chrome of pending update
                        // app reloads

                        applycameraoffset();
                        renderer0.clear();
                        //rendererPY.clear();

                        //cameraPX.aspect = canvasPX.aspect;
                        //cameraPX.updateProjectionMatrix();

                        // um what does this do?
                        //cameraPX.position.z += (z - cameraPX.position.z) * e.delay.ElapsedMilliseconds / 200.0;
                        // mousewheel allos the camera to move closer
                        // once we see the frame in vr, can we udp sync vr tracking back to laptop?


                        //this.targetPX.x += 1;
                        //this.targetNX.x -= 1;

                        //this.targetPY.y += 1;
                        //this.targetNY.y -= 1;

                        //this.targetPZ.z += 1;
                        //this.targetNZ.z -= 1;

                        // how does the 360 or shadertoy want our cubemaps?


                        // and then rotate right?

                        // how can we render cubemap?

                        if (cameraz.valueAsNumber == 0)
                            renderer0.setClearColor(0x0, 0);
                        else
                            renderer0.setClearColor(0x0, 1);

                        new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                        }.WithEach(cc =>
                            {




                                cc.clearRect(0, 0, cubefacesize, cubefacesize);
                            }
                        );

                        //gl.clear()

                        if (PXonly.@checked)
                        {
                            var cameraPXsw = Stopwatch.StartNew();

                            renderer0.render(scene, cameraPX);

                            // 35207ms { cameraPXsw = 00:00:00.88 }
                            //75505ms { cameraPXsw = 00:00:00.61 }
                            //Console.WriteLine(new { cameraPXsw });

                            // clear if transparent?
                            canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                            //return;

                        }
                        else
                        {

                            #region x
                            // upside down?
                            renderer0.render(scene, cameraPX);
                            canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                            renderer0.render(scene, cameraNX);
                            canvasNX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            #endregion

                            #region z
                            renderer0.render(scene, cameraPZ);
                            canvasPZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                            renderer0.render(scene, cameraNZ);
                            canvasNZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            #endregion



                            #region y
                            renderer0.render(scene, cameraPY);

                            //canvasPY.save();
                            //canvasPY.translate(0, size);
                            //canvasPY.rotate((float)(-Math.PI / 2));
                            canvasPY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            //canvasPY.restore();


                            // the floor?

                            // render only this one?
                            renderer0.render(scene, cameraNY);
                            //canvasNY.save();
                            //canvasNY.translate(size, 0);
                            //canvasNY.rotate((float)(Math.PI / 2));
                            canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            //canvasNY.restore();
                            // ?
                            #endregion


                            //renderer0.render(scene, cameraPX);


                            //rendererPY.render(scene, cameraPY);

                            // at this point we should be able to render the sphere texture

                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;


                            //var cube0 = new IHTMLImage[] {
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),

                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),


                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
                            //};

                            #region Paint_Image
                            new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                        }.WithEachIndex(
                         (img, index) =>
                         {
                             gl4K.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);

                             //gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
                             gl4K.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);

                             // http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true

                             // https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
                             //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
                             //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);

                             gl4K.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);

                         }
                      );


                            // http://stackoverflow.com/questions/11544608/how-to-clear-a-rectangle-area-in-webgl

                            if (cameraz.valueAsNumber == 0)
                                gl4K.clearColor(0, 0, 0, 0);
                            else
                                gl4K.clearColor(0, 0, 0, 1);

                            gl4K.clear(gl.COLOR_BUFFER_BIT);

                            // could do dynamic resolution- fog of war or fog of FOV. where up to 150deg field of vision is encouragedm, not 360
                            pass.Paint_Image(
                           0,

                           0,
                           0,
                           0,
                           0
                                //,

                      // gl_FragCoord
                                // cannot be scaled, and can be referenced directly.
                                // need another way to scale
                                //zoom: 0.3f
                      );

                            //paintsw.Stop();


                            // what does it do?
                            gl4K.flush();
                            #endregion


                        }

                        // let render man know..
                        if (vsync1renderman != null)
                            if (!vsync1renderman.Task.IsCompleted)
                                vsync1renderman.SetResult(null);

                        if (vsync0ambient != null)
                            if (!vsync0ambient.Task.IsCompleted)
                                vsync0ambient.SetResult(null);
                    };


            }
            //);





            Console.WriteLine("do you see it?");
        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // http://www.brucelawson.co.uk/2012/specifying-which-camera-in-getusermedia/
            // https://groups.google.com/forum/#!topic/discuss-webrtc/xLBdsLz_Mbw
            // http://stackoverflow.com/questions/15322622/using-multiple-usb-cameras-with-web-rtc

            // dont we have async available?
            Native.window.navigator.getUserMedia(
                stream =>
                {
                    // getUserMedia error { code = , message = , name = DevicesNotFoundError }


                    var v = new IHTMLVideo { src = stream.ToObjectURL() };

                    v.AttachToDocument();

                    v.play();

                    new IHTMLButton { innerText = "record frame" }.AttachToDocument().onclick +=
                        delegate
                        {
                            // Uncaught IndexSizeError: Index or size was negative, or greater than the allowed value. 
                            var c = new CanvasRenderingContext2D(v.clientWidth, v.clientHeight);

                            c.drawImage(
                                v, 0, 0, c.canvas.width, c.canvas.height
                            );

                            var bytes = c.bytes;

                            c.canvas.AttachToDocument();
                        };

                    new IHTMLButton { innerText = "record gif" }.AttachToDocument().WhenClicked(

                       async btn =>
                       {
                           btn.disabled = true;


                           var frames = new List<byte[]>();
                           var x = v.clientWidth;
                           var y = v.clientHeight;

                           //new Timer(
                           //    async tt =>
                           //    {


                           btn.innerText = "rec " + new { frames.Count }.ToString();
                           //while (frames.Count < 60)

                           //while (frames.Count < 20)
                           while (frames.Count < 2)

                               // works with two frames.
                               // are we logging too much data?

                               // cant we hop to another thread in bytes instead yet?
                           // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20160108
                               // are we encoding grame bytes as base64?

                           {
                               await Task.Delay(1000 / 5);

                               btn.innerText = new { frames.Count }.ToString();

                               var bytes = v.bytes;

                               Console.WriteLine(new { bytes.Length });

                               // script: error JSC1000: No implementation found for this native method, please implement
                               // [System.Threading.Tasks.TaskFactory`1.
                               // StartNew(, System.Object, System.Threading.CancellationToken, System.Threading.Tasks.TaskCreationOptions, System.Threading.Tasks.TaskScheduler)]

                               frames.Add(bytes);

                           }


                           Console.WriteLine("encoding!");

                           var e = new Stopwatch();
                           e.Start();


                           // crashes the browser why???
                           var src = await new GIFEncoderWorker(
                               x,
                               y,
                                   delay: 1000 / 10,
                               frames: frames,
                               AtFrame:
                                async index =>
                                {
                                    btn.innerText = new { index, frames.Count }.ToString();

                                    if (index == frames.Count - 1)
                                    {
                                        btn.style.color = "blue";
                                        await Task.Delay(800);
                                        btn.style.color = "";

                                        btn.innerText = "record gif";

                                    }
                                }


                           );


                           Console.WriteLine("done! " + new { e.Elapsed });

                           new IHTMLImage { src = src }.AttachToDocument();


                           btn.disabled = false;


                           btn.title = new { e.Elapsed, src.Length }.ToString();

                       }
                       );
                }
            );

        }
        // http://youtu.be/Lo1IU8UAutE
        // 60hz 2160 4K!

        // The equirectangular projection was used in map creation since it was invented around 100 A.D. by Marinus of Tyre. 

        //        C:\Users\Arvo> "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hzsky.png" "/sdcard/oculus/360photos/"
        //1533 KB/s(3865902 bytes in 2.461s)

        //C:\Users\Arvo> "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hznosky.png" "/sdcard/oculus/360photos/"
        //1556 KB/s(2714294 bytes in 1.703s)

        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hz2048c3840x2160.png" "/sdcard/oculus/360photos/"



        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809/chrome360hz

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809

        // the eye nor the display will be able to do any stereo
        // until tech is near matrix capability. 2019?

        // cubemap can be used for all long range scenes
        // http://www.imdb.com/title/tt0112111/?ref_=nv_sr_1


        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150808/cubemapcamera
        // subst a: s:\jsc.svn\examples\javascript\chrome\apps\WebGL\Chrome360HZ\Chrome360HZ\bin\Debug\staging\Chrome360HZ.Application\web
        // Z:\jsc.svn\examples\javascript\chrome\apps\WebGL\Chrome360HZ\Chrome360HZ\bin\Debug\staging\Chrome360HZ.Application\web

        // ColladaLoader: Empty or non-existing file (assets/Chrome360HZ/S6Edge.dae)

        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            //FormStyler.AtFormCreated =
            //s =>
            //{
            //    s.Context.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;

            //    //var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDrag().AttachTo(s.Context.GetHTMLTarget());
            //    var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDragWithShadow().AttachTo(s.Context.GetHTMLTarget());



            //    s.Context.GetHTMLTarget().style.backgroundColor = "#efefef";
            //    //s.Context.GetHTMLTarget().style.backgroundColor = "#A26D41";

            //};

#if AsWEBSERVER
            #region += Launched chrome.app.window
            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                // if we run as a server. we can open up on android.

                //chrome.Notification.DefaultTitle = "Nexus7";
                //chrome.Notification.DefaultIconUrl = new x128().src;
                ChromeTCPServer.TheServerWithStyledForm.Invoke(
                     AppSource.Text
                //, AtFormCreated: FormStyler.AtFormCreated

                //AtFormConstructor:
                //    f =>
                //    {
                //        //arg[0] is typeof System.Int32
                //        //script: error JSC1000: No implementation found for this native method, please implement [static System.Drawing.Color.FromArgb(System.Int32)]

                //        // X:\jsc.svn\examples\javascript\forms\Test\TestFromArgb\TestFromArgb\ApplicationControl.cs

                //        f.BackColor = System.Drawing.Color.FromArgb(0xA26D41);
                //    }
                );
                return;
            }
            #endregion
#else

            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    //chrome.runtime.UpdateAvailable += delegate
                    //{
                    //    new chrome.Notification(title: "UpdateAvailable");

                    //};

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "Chrome360HZ");

                        // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: new
                               {
                                   alwaysOnTop = true,
                                   visibleOnAllWorkspaces = true
                               }
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion


#endif

            //const int size = 128;
            //const int size = 256; // 6 faces, 12KB
            //const int size = 512; // 6 faces, ?

            // WebGL: drawArrays: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled.

            //const int size = 720; // 6 faces, ?
            //const int size = 1024; // 6 faces, ?
            //const int size = 1024; // 6 faces, ?
            //const int cubefacesize = 2048; // 6 faces, ?
            const int cubefacesize = 512; // 6 faces, ?



            var uizoom = 0.05;

            var far = 0xfffff;

            Native.css.style.backgroundColor = "blue";
            Native.css.style.overflow = IStyle.OverflowEnum.hidden;

            Native.body.Clear();

            new IHTMLPre { "can we stream it into VR, shadertoy, youtube 360, youtube stereo yet?" }.AttachToDocument();


            var sw = Stopwatch.StartNew();

            var pause = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "pause" }.AttachToDocument();


            pause.onchange += delegate
            {

                if (pause.@checked)
                    sw.Stop();
                else
                    sw.Start();


            };

            var oo = new List<THREE.Object3D>();

            #region scene
            var window = Native.window;


            // what about physics and that portal rendering?

            // if we are running as a chrome web server, we may also be opened as android ndk webview app
            //var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: window.aspect, near: 1, far: 2000);
            // once we update source
            // save the source
            // manually recompile 
            //cameraPX.position.z = 400;

            //// the camera should be close enough for the object to float off the FOV of PX
            //cameraPX.position.z = 200;

            // scene
            // can we make the 3D object orbit around us ?
            // and
            // stream it to vr?
            var scene = new THREE.Scene();

            var ambient = new THREE.AmbientLight(0x303030);
            scene.add(ambient);

            // should we fix jsc to do a more correct IDL?
            //var directionalLight = new THREE.DirectionalLight(0xffffff, 0.7);
            //directionalLight.position.set(0, 0, 1);
            //scene.add(directionalLight);

            #region light
            //var light = new THREE.DirectionalLight(0xffffff, 1.0);
            var light = new THREE.DirectionalLight(0xffffff, 2.5);
            //var light = new THREE.DirectionalLight(0xffffff, 2.5);
            //var light = new THREE.DirectionalLight(0xffffff, 1.5);
            //var lightOffset = new THREE.Vector3(0, 1000, 2500.0);
            var lightOffset = new THREE.Vector3(
                2000,
                700,

                // lower makes longer shadows 
                700.0
                );
            light.position.copy(lightOffset);
            light.castShadow = true;

            var xlight = light as dynamic;
            xlight.shadowMapWidth = 4096;
            xlight.shadowMapHeight = 2048;

            xlight.shadowDarkness = 0.1;
            //xlight.shadowDarkness = 0.5;

            xlight.shadowCameraNear = 10;
            xlight.shadowCameraFar = 10000;
            xlight.shadowBias = 0.00001;
            xlight.shadowCameraRight = 4000;
            xlight.shadowCameraLeft = -4000;
            xlight.shadowCameraTop = 4000;
            xlight.shadowCameraBottom = -4000;

            xlight.shadowCameraVisible = true;

            scene.add(light);
            #endregion




            // whats WebGLRenderTargetCube do?

            // WebGLRenderer preserveDrawingBuffer 



            var renderer0 = new THREE.WebGLRenderer(

                new
                {
                    antialias = true,
                    alpha = true,
                    preserveDrawingBuffer = true
                }
            );

            // https://github.com/mrdoob/three.js/issues/3836

            // the construct. white bg
            renderer0.setClearColor(0xfffff, 1);

            //renderer.setSize(window.Width, window.Height);
            renderer0.setSize(cubefacesize, cubefacesize);

            //renderer0.domElement.AttachToDocument();
            //rendererPX.domElement.style.SetLocation(0, 0);
            //renderer0.domElement.style.SetLocation(4, 4);


            // top

            // http://stackoverflow.com/questions/27612524/can-multiple-webglrenderers-render-the-same-scene


            // need a place to show the cubemap face to GUI 
            // how does the stereo OTOY do it?
            // https://www.opengl.org/wiki/Sampler_(GLSL)

            // http://www.richardssoftware.net/Home/Post/25

            // [+X, –X, +Y, –Y, +Z, –Z] fa



            // move up
            //camera.position.set(-1200, 800, 1200);
            //var cameraoffset = new THREE.Vector3(0, 15, 0);
            var cameraoffset = new THREE.Vector3(-1200, 800, 1200);

            #region y
            // need to rotate90?
            var cameraNY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            cameraNY.lookAt(new THREE.Vector3(0, -1, 0));
            cameraNY.position.add(cameraoffset);

            //cameraNY.lookAt(new THREE.Vector3(0, 1, 0));
            var canvasNY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNY.canvas.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasNY.canvas.title = "NY";
            canvasNY.canvas.AttachToDocument();
            canvasNY.canvas.style.transformOrigin = "0 0";
            canvasNY.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            cameraPY.lookAt(new THREE.Vector3(0, 1, 0));
            cameraPY.position.add(cameraoffset);
            //cameraPY.lookAt(new THREE.Vector3(0, -1, 0));
            var canvasPY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPY.canvas.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 0);
            canvasPY.canvas.title = "PY";
            canvasPY.canvas.AttachToDocument();
            canvasPY.canvas.style.transformOrigin = "0 0";
            canvasPY.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion

            // transpose xz?

            #region x
            var cameraNX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            cameraNX.lookAt(new THREE.Vector3(0, 0, 1));
            cameraNX.position.add(cameraoffset);
            //cameraNX.lookAt(new THREE.Vector3(0, 0, -1));
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNX.lookAt(new THREE.Vector3(1, 0, 0));
            var canvasNX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNX.canvas.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 2, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNX.canvas.title = "NX";
            canvasNX.canvas.AttachToDocument();
            canvasNX.canvas.style.transformOrigin = "0 0";
            canvasNX.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            cameraPX.lookAt(new THREE.Vector3(0, 0, -1));
            cameraPX.position.add(cameraoffset);
            //cameraPX.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPX.lookAt(new THREE.Vector3(1, 0, 0));
            //cameraPX.lookAt(new THREE.Vector3(-1, 0, 0));
            var canvasPX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPX.canvas.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPX.canvas.title = "PX";
            canvasPX.canvas.AttachToDocument();
            canvasPX.canvas.style.transformOrigin = "0 0";
            canvasPX.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion



            #region z
            var cameraNZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, -1));
            cameraNZ.lookAt(new THREE.Vector3(1, 0, 0));
            cameraNZ.position.add(cameraoffset);
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, 1));
            var canvasNZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNZ.canvas.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 3, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNZ.canvas.title = "NZ";
            canvasNZ.canvas.AttachToDocument();
            canvasNZ.canvas.style.transformOrigin = "0 0";
            canvasNZ.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraPZ.lookAt(new THREE.Vector3(1, 0, 0));
            cameraPZ.lookAt(new THREE.Vector3(-1, 0, 0));
            cameraPZ.position.add(cameraoffset);
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, -1));
            var canvasPZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPZ.canvas.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPZ.canvas.title = "PZ";
            canvasPZ.canvas.AttachToDocument();
            canvasPZ.canvas.style.transformOrigin = "0 0";
            canvasPZ.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion




            // c++ alias locals would be nice..
            var canvas0 = (IHTMLCanvas)renderer0.domElement;


            var old = new
            {



                CursorX = 0,
                CursorY = 0
            };


            var st = new Stopwatch();
            st.Start();

            //canvas0.css.active.style.cursor = IStyle.CursorEnum.move;

            #region onmousedown
            Native.body.onmousedown +=
                async e =>
                {
                    if (e.Element.nodeName.ToLower() != "canvas")
                        return;

                    // movementX no longer works
                    old = new
                    {


                        e.CursorX,
                        e.CursorY
                    };


                    //e.CaptureMouse();
                    var release = e.Element.CaptureMouse();
                    await e.Element.async.onmouseup;

                    release();


                };
            #endregion



            // X:\jsc.svn\examples\javascript\Test\TestMouseMovement\TestMouseMovement\Application.cs
            #region onmousemove
            Native.body.onmousemove +=
                e =>
                {
                    if (e.Element.nodeName.ToLower() != "canvas")
                    {
                        Native.body.style.cursor = IStyle.CursorEnum.@default;
                        return;
                    }

                    e.preventDefault();
                    e.stopPropagation();


                    Native.body.style.cursor = IStyle.CursorEnum.move;

                    var pointerLock = canvas0 == Native.document.pointerLockElement;


                    //Console.WriteLine(new { e.MouseButton, pointerLock, e.movementX });

                    if (e.MouseButton == IEvent.MouseButtonEnum.Left)
                    {

                        oo.WithEach(
                            x =>
                            {
                                x.rotation.y += 0.006 * (e.CursorX - old.CursorX);
                                x.rotation.x += 0.006 * (e.CursorY - old.CursorY);
                            }
                        );

                        old = new
                        {


                            e.CursorX,
                            e.CursorY
                        };



                    }

                };
            #endregion

            // THREE.WebGLProgram: gl.getProgramInfoLog() C:\fakepath(78,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll
            // THREE.WebGLProgram: gl.getProgramInfoLog() (79,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll

            // http://www.roadtovr.com/youtube-confirms-stereo-3d-360-video-support-coming-soon/
            // https://www.youtube.com/watch?v=D-Wl9jAB45Q



            #region spherical
            var gl = new WebGLRenderingContext(alpha: true, preserveDrawingBuffer: true);
            var c = gl.canvas.AttachToDocument();

            //  3840x2160

            //c.style.SetSize(3840, 2160);

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150722/360-youtube


            c.width = 3840;
            c.height = 2160;


            //c.width = 3840 * 2;
            //c.height = 2160 * 2;


            //c.width = 3840;
            //c.height = 2160;
            // 1,777777777777778

            // https://www.youtube.com/watch?v=fTfJwzRsE-w
            //c.width = 7580;
            //c.height = 3840;
            //1,973958333333333

            //7580
            //    3840

            // wont work
            //c.width = 8192;
            //c.height = 4096;


            // this has the wrong aspect?
            //c.width = 6466;
            //c.height = 3232;

            new IHTMLPre { new { c.width, c.height } }.AttachToDocument();

            //6466x3232

            //var suizoom = 720f / c.height;
            //var suizoom = 360f / c.height;
            var suizoom = 480f / c.width;

            c.style.transformOrigin = "0 0";
            c.style.transform = "scale(" + suizoom + ")";
            c.style.backgroundColor = "yellow";
            c.style.position = IStyle.PositionEnum.absolute;

            c.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 3);

            var pass = new CubeToEquirectangular.Library.ShaderToy.EffectPass(
                       null,
                       gl,
                       precission: CubeToEquirectangular.Library.ShaderToy.DetermineShaderPrecission(gl),
                       supportDerivatives: gl.getExtension("OES_standard_derivatives") != null,
                       callback: null,
                       obj: null,
                       forceMuted: false,
                       forcePaused: false,
                //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                       outputGainNode: null
                   );

            // how shall we upload our textures?
            // can we reference GLSL.samplerCube yet?
            //pass.mInputs[0] = new samplerCube { };
            pass.mInputs[0] = new CubeToEquirectangular.Library.ShaderToy.samplerCube { };

            pass.MakeHeader_Image();
            var vs = new Shaders.ProgramFragmentShader();
            pass.NewShader_Image(vs);

            #endregion




            //var frame0 = new HTML.Images.FromAssets.tiles_regrid().AttachToDocument();
            var frame0 = new HTML.Images.FromAssets.anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs().AttachToDocument();
            //var xor = new HTML.Images.FromAssets.Orion360_test_image_8192x4096().AttachToDocument();
            //var xor = new HTML.Images.FromAssets._2_no_clouds_4k().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().AttachToDocument();


            // 270px
            //xor.style.height = "";
            frame0.style.height = "270px";
            frame0.style.width = "480px";
            frame0.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 + 16, 8 + (int)(uizoom * cubefacesize + 8) * 3);


            var mesh = new THREE.Mesh(new THREE.SphereGeometry(far / 2, 50, 50),
           new THREE.MeshBasicMaterial(new
           {
               map = THREE.ImageUtils.loadTexture(
                   //new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().src
                   //new HTML.Images.FromAssets._4008650304_7f837ccbb7_b().src
                  frame0.src
                   //new WebGLEquirectangularPanorama.HTML.Images.FromAssets.PANO_20130616_222058().src
                   //new WebGLEquirectangularPanorama.HTML.Images.FromAssets.PANO_20121225_210448().src

                   )
           }));
            mesh.scale.x = -1;

            #region fixup rotation

            //mesh.rotateOnAxis(new THREE.Vector3(1, 0, 0), Math.PI / 2);
            //mesh.rotateOnAxis(new THREE.Vector3(1, 0, 0), -Math.PI / 2);
            mesh.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
            #endregion


            scene.add(mesh);


            //new IHTMLButton { }

            var dir = default(DirectoryEntry);

            new IHTMLButton { "openDirectory" }.AttachToDocument().onclick += async delegate
            {
                dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
            };

            frame0.onclick += delegate
            {
                // http://paulbourke.net/papers/vsmm2006/vsmm2006.pdf
                //            A method of creating synthetic stereoscopic panoramic images that can be implemented
                //in most rendering packages has been presented. If single panoramic pairs can be created
                //then stereoscopic panoramic movies are equally possible giving rise to the prospect of
                //movies where the viewer can interact with, at least with regard to what they choose to look
                //at.These images can be projected so as to engage the two features of the human visual
                //system that assist is giving us a sense of immersion, the feeling of “being there”. That is,
                //imagery that contains parallax information as captured from two horizontally separated eye
                //positions (stereopsis)and imagery that fills our peripheral vision.The details that define
                //how the two panoramic images should be created in rendering packages are provided, in
                //particular, how to precisely configure the virtual cameras and control the distance to zero
                //parallax.

                // grab a frame


                var f0 = new IHTMLImage { src = gl.canvas.toDataURL() };

                //var f0 = (IHTMLImage)gl.canvas;
                //var f0 = (IHTMLImage)gl.canvas;
                //var base64 = gl.canvas.toDataURL();


                //frame0.src = base64;
                frame0.src = f0.src;

                // 7MB!

                if (dir == null)
                    return;

                //                // ---------------------------
                //IrfanView
                //---------------------------
                //Warning !
                //The file: "X:\vr\tape1\0001.jpg" is a PNG file with incorrect extension !
                //Rename ?
                //---------------------------
                //Yes   No   
                //---------------------------

                // haha this will render the thumbnail.
                //dir.WriteAllBytes("0000.png", frame0);

                dir.WriteAllBytes("0000.png", f0);
                // 3.7MB
                // 3840x2160

            };



            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLColladaExperiment\WebGLColladaExperiment\WebGLColladaExperiment.csproj"

            #region WebGLRah66Comanche
            // why isnt it being found?
            // "Z:\jsc.svn\examples\javascript\WebGL\collada\WebGLRah66Comanche\WebGLRah66Comanche\WebGLRah66Comanche.csproj"
            new global::WebGLRah66Comanche.Comanche(
            ).Source.Task.ContinueWithResult(
                dae =>
                {

                    //dae.position.y = -40;
                    //dae.position.z = 280;
                    scene.add(dae);
                    //oo.Add(dae);

                    // wont do it
                    //dae.castShadow = true;

                    dae.children[0].children[0].children.WithEach(x => x.castShadow = true);


                    // the rotors?
                    dae.children[0].children[0].children.Last().children.WithEach(x => x.castShadow = true);


                    dae.scale.set(0.5, 0.5, 0.5);
                    dae.position.x = -900;
                    dae.position.z = +900;

                    // raise it up
                    dae.position.y = 400;

                    //var sw = Stopwatch.StartNew();

                    //Native.window.onframe += delegate
                    //{
                    //    //dae.children[0].children[0].children.Last().al
                    //    //dae.children[0].children[0].children.Last().rotation.z = sw.ElapsedMilliseconds * 0.01;
                    //    //dae.children[0].children[0].children.Last().rotation.x = sw.ElapsedMilliseconds * 0.01;
                    //    dae.children[0].children[0].children.Last().rotation.y = sw.ElapsedMilliseconds * 0.01;
                    //};
                }
            );
            #endregion



            #region tree
            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLGodRay\WebGLGodRay\WebGLGodRay.csproj"

            var materialScene = new THREE.MeshBasicMaterial(new { color = 0x000000, shading = THREE.FlatShading });
            var tloader = new THREE.JSONLoader();

            // http://stackoverflow.com/questions/16539736/do-not-use-system-runtime-compilerservices-dynamicattribute-use-the-dynamic
            // https://msdn.microsoft.com/en-us/library/system.runtime.compilerservices.dynamicattribute%28v=vs.110%29.aspx
            //System.Runtime.CompilerServices.DynamicAttribute

            tloader.load(

                new WebGLGodRay.Models.tree().Content.src,

                new Action<THREE.Geometry>(
                xgeometry =>
                {

                    var treeMesh = new THREE.Mesh(xgeometry, materialScene);
                    treeMesh.position.set(0, -150, -150);
                    treeMesh.position.x = -900;
                    treeMesh.position.z = -900;

                    treeMesh.position.y = 25;

                    var tsc = 400;
                    treeMesh.scale.set(tsc, tsc, tsc);

                    treeMesh.matrixAutoUpdate = false;
                    treeMesh.updateMatrix();


                    treeMesh.AttachTo(scene);

                }
                )
                );
            #endregion

            #region create field

            // THREE.PlaneGeometry: Consider using THREE.PlaneBufferGeometry for lower memory footprint.

            // could we get some film grain?
            var planeGeometry = new THREE.CubeGeometry(512, 512, 1);
            var plane = new THREE.Mesh(planeGeometry,
                    new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })

                );
            //plane.castShadow = false;
            plane.receiveShadow = true;


            {

                var parent = new THREE.Object3D();
                parent.add(plane);
                parent.rotation.x = -Math.PI / 2;
                parent.scale.set(10, 10, 10);

                scene.add(parent);
            }

            var random = new Random();
            var meshArray = new List<THREE.Mesh>();
            var geometry = new THREE.CubeGeometry(1, 1, 1);
            //var sw = Stopwatch.StartNew();

            for (var i = 3; i < 9; i++)
            {

                //THREE.MeshPhongMaterial
                var ii = new THREE.Mesh(geometry,


                    new THREE.MeshPhongMaterial(new { ambient = 0x000000, color = 0xA06040, specular = 0xA26D41, shininess = 1 })

                    //new THREE.MeshLambertMaterial(
                    //new
                    //{
                    //    color = (Convert.ToInt32(0xffffff * random.NextDouble())),
                    //    specular = 0xffaaaa,
                    //    ambient= 0x050505, 
                    //})

                    );
                ii.position.x = i % 7 * 200 - 2.5f;

                // raise it up
                ii.position.y = .5f * 100;
                ii.position.z = -1 * i * 100;
                ii.castShadow = true;
                ii.receiveShadow = true;
                //ii.scale.set(100, 100, 100 * i);
                ii.scale.set(100, 100 * i, 100);


                meshArray.Add(ii);

                scene.add(ii);

                if (i % 2 == 0)
                {
#if FWebGLHZBlendCharacter
                    #region SpeedBlendCharacter
					var _i = i;
					{ WebGLHZBlendCharacter.HTML.Pages.TexturesImages ref0; }

					var blendMesh = new THREE.SpeedBlendCharacter();
					blendMesh.load(
						new WebGLHZBlendCharacter.Models.marine_anims().Content.src,
						new Action(
							delegate
							{
								// buildScene
								//blendMesh.rotation.y = Math.PI * -135 / 180;
								blendMesh.castShadow = true;
								// we cannot scale down we want our shadows
								//blendMesh.scale.set(0.1, 0.1, 0.1);

								blendMesh.position.x = (_i + 2) % 7 * 200 - 2.5f;

								// raise it up
								//blendMesh.position.y = .5f * 100;
								blendMesh.position.z = -1 * _i * 100;


								var xtrue = true;
								// run
								blendMesh.setSpeed(1.0);

								// will in turn call THREE.AnimationHandler.play( this );
								//blendMesh.run.play();
								// this wont help. bokah does not see the animation it seems.
								//blendMesh.run.update(1);

								blendMesh.showSkeleton(!xtrue);

								scene.add(blendMesh);


								Native.window.onframe +=
								 delegate
								 {

									 blendMesh.rotation.y = Math.PI * 0.0002 * sw.ElapsedMilliseconds;



									 ii.rotation.y = Math.PI * 0.0002 * sw.ElapsedMilliseconds;

								 };

							}
						)
					);
                    #endregion
#endif
                }

            }
            #endregion


            #region HZCannon
            // "Z:\jsc.svn\examples\javascript\WebGL\HeatZeekerRTSOrto\HeatZeekerRTSOrto\HeatZeekerRTSOrto.csproj"
            new HeatZeekerRTSOrto.HZCannon().Source.Task.ContinueWithResult(
                async cube =>
                {
                    // https://github.com/mrdoob/three.js/issues/1285
                    //cube.children.WithEach(c => c.castShadow = true);

                    //cube.traverse(
                    //    new Action<THREE.Object3D>(
                    //        child =>
                    //        {
                    //            // does it work? do we need it?
                    //            //if (child is THREE.Mesh)

                    //            child.castShadow = true;
                    //            //child.receiveShadow = true;

                    //        }
                    //    )
                    //);

                    // um can edit and continue insert code going back in time?
                    cube.scale.x = 10.0;
                    cube.scale.y = 10.0;
                    cube.scale.z = 10.0;



                    //cube.castShadow = true;
                    //dae.receiveShadow = true;

                    //cube.position.x = -100;

                    ////cube.position.y = (cube.scale.y * 50) / 2;
                    //cube.position.z = Math.Floor((random() * 1000 - 500) / 50) * 50 + 25;



                    // if i want to rotate, how do I do it?
                    //cube.rotation.z = random() + Math.PI;
                    //cube.rotation.x = random() + Math.PI;
                    var sw2 = Stopwatch.StartNew();



                    scene.add(cube);
                    //interactiveObjects.Add(cube);

                    // offset is wrong
                    //while (true)
                    //{
                    //    await Native.window.async.onframe;

                    //    cube.rotation.y = Math.PI * 0.0002 * sw2.ElapsedMilliseconds;

                    //}
                }
            );
            #endregion


            #region HZCannon
            new HeatZeekerRTSOrto.HZCannon().Source.Task.ContinueWithResult(
                async cube =>
                {
                    // https://github.com/mrdoob/three.js/issues/1285
                    //cube.children.WithEach(c => c.castShadow = true);

                    //cube.traverse(
                    //    new Action<THREE.Object3D>(
                    //        child =>
                    //        {
                    //            // does it work? do we need it?
                    //            //if (child is THREE.Mesh)

                    //            child.castShadow = true;
                    //            //child.receiveShadow = true;

                    //        }
                    //    )
                    //);

                    // um can edit and continue insert code going back in time?
                    cube.scale.x = 10.0;
                    cube.scale.y = 10.0;
                    cube.scale.z = 10.0;



                    //cube.castShadow = true;
                    //dae.receiveShadow = true;


                    // jsc shat about out of band code patching?
                    cube.position.z = 600;
                    cube.position.x = -900;
                    //cube.position.y = -400;

                    //cube.position.x = -100;
                    //cube.position.y = -400;

                    ////cube.position.y = (cube.scale.y * 50) / 2;
                    //cube.position.z = Math.Floor((random() * 1000 - 500) / 50) * 50 + 25;



                    // if i want to rotate, how do I do it?
                    //cube.rotation.z = random() + Math.PI;
                    //cube.rotation.x = random() + Math.PI;
                    var sw2 = Stopwatch.StartNew();



                    scene.add(cube);
                    //interactiveObjects.Add(cube);

                    // offset is wrong
                    //while (true)
                    //{
                    //    await Native.window.async.onframe;

                    //    cube.rotation.y = Math.PI * 0.0002 * sw2.ElapsedMilliseconds;

                    //}
                }
            );
            #endregion


            #region HZBunker
            new HeatZeekerRTSOrto.HZBunker().Source.Task.ContinueWithResult(
                     cube =>
                     {
                         // https://github.com/mrdoob/three.js/issues/1285
                         //cube.children.WithEach(c => c.castShadow = true);
                         cube.castShadow = true;

                         //cube.traverse(
                         //    new Action<THREE.Object3D>(
                         //        child =>
                         //        {
                         //            // does it work? do we need it?
                         //            //if (child is THREE.Mesh)
                         //            child.castShadow = true;
                         //            //child.receiveShadow = true;

                         //        }
                         //    )
                         //);

                         // um can edit and continue insert code going back in time?
                         cube.scale.x = 10.0;
                         cube.scale.y = 10.0;
                         cube.scale.z = 10.0;

                         //cube.castShadow = true;
                         //dae.receiveShadow = true;

                         cube.position.x = -1000;
                         //cube.position.y = (cube.scale.y * 50) / 2;
                         cube.position.z = 0;

                         scene.add(cube);
                     }
                 );
            #endregion


            new Models.ColladaS6Edge().Source.Task.ContinueWithResult(
                   dae =>
                   {
                       // 90deg
                       dae.rotation.x = -Math.Cos(Math.PI);

                       //dae.scale.x = 30;
                       //dae.scale.y = 30;
                       //dae.scale.z = 30;
                       dae.position.z = -(65 - 200);





                       var scale = 0.9;

                       // jsc, do we have ILObserver available yet?
                       dae.scale.x = scale;
                       dae.scale.y = scale;
                       dae.scale.z = scale;


                       #region onmousewheel
                       Native.body.onmousewheel +=
                           e =>
                           {
                               e.preventDefault();

                               //camera.position.z = 1.5;

                               // min max. shall adjust speed also!
                               // max 4.0
                               // min 0.6
                               dae.position.z -= 10.0 * e.WheelDirection;

                               //camera.position.z = 400;
                               //dae.position.z = dae.position.z.Max(-200).Min(200);

                               //Native.document.title = new { z }.ToString();

                           };
                       #endregion


                       //dae.position.y = -80;

                       scene.add(dae);
                       oo.Add(dae);




                       // view-source:http://threejs.org/examples/webgl_multiple_canvases_circle.html
                       // https://threejsdoc.appspot.com/doc/three.js/src.source/extras/cameras/CubeCamera.js.html
                       Native.window.onframe +=
                           e =>
                           {
                               //if (pause) return;
                               //if (pause.@checked)
                               //    return;


                               // can we float out of frame?
                               // haha. a bit too flickery.
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.01) * 50.0;
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.001) * 190.0;
                               dae.position.x = Math.Sin(sw.ElapsedMilliseconds * 0.0001) * 190.0;
                               dae.position.y = Math.Cos(sw.ElapsedMilliseconds * 0.0001) * 90.0;
                               // manual rebuild?
                               // red compiler notifies laptop chrome of pending update
                               // app reloads


                               renderer0.clear();
                               //rendererPY.clear();

                               //cameraPX.aspect = canvasPX.aspect;
                               //cameraPX.updateProjectionMatrix();

                               // um what does this do?
                               //cameraPX.position.z += (z - cameraPX.position.z) * e.delay.ElapsedMilliseconds / 200.0;
                               // mousewheel allos the camera to move closer
                               // once we see the frame in vr, can we udp sync vr tracking back to laptop?


                               //this.targetPX.x += 1;
                               //this.targetNX.x -= 1;

                               //this.targetPY.y += 1;
                               //this.targetNY.y -= 1;

                               //this.targetPZ.z += 1;
                               //this.targetNZ.z -= 1;

                               // how does the 360 or shadertoy want our cubemaps?


                               // and then rotate right?

                               // how can we render cubemap?


                               #region x
                               // upside down?
                               renderer0.render(scene, cameraPX);
                               canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                               renderer0.render(scene, cameraNX);
                               canvasNX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               #endregion

                               #region z
                               renderer0.render(scene, cameraPZ);
                               canvasPZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                               renderer0.render(scene, cameraNZ);
                               canvasNZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               #endregion



                               #region y
                               renderer0.render(scene, cameraPY);

                               //canvasPY.save();
                               //canvasPY.translate(0, size);
                               //canvasPY.rotate((float)(-Math.PI / 2));
                               canvasPY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               //canvasPY.restore();


                               renderer0.render(scene, cameraNY);
                               //canvasNY.save();
                               //canvasNY.translate(size, 0);
                               //canvasNY.rotate((float)(Math.PI / 2));
                               canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               //canvasNY.restore();
                               // ?
                               #endregion


                               //renderer0.render(scene, cameraPX);


                               //rendererPY.render(scene, cameraPY);

                               // at this point we should be able to render the sphere texture

                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;


                               //var cube0 = new IHTMLImage[] {
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),

                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),


                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
                               //};

                               new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                               }.WithEachIndex(
                                   (img, index) =>
                                   {
                                       gl.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);

                                       //gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
                                       gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);

                                       // http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true

                                       // https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);

                                       gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);

                                   }
                                );


                               pass.Paint_Image(
                                     0,

                                     0,
                                     0,
                                     0,
                                     0
                                   //,

                                // gl_FragCoord
                                   // cannot be scaled, and can be referenced directly.
                                   // need another way to scale
                                   //zoom: 0.3f
                                );

                               //paintsw.Stop();


                               // what does it do?
                               gl.flush();

                           };


                   }
               );


            #endregion



            Console.WriteLine("do you see it?");
        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {

            //            will skip DefineVersionInfoResource
            //2b5c:02:01:1e RewriteToAssembly error: System.NotSupportedException: Type 'xchrome.BCLImplementation.System.Net.Sockets.__TcpListener' was not completed.

            // Uncaught ReferenceError: kvYlVNrcg0qBT40xR_bUt6Q is not defined 

            // it does work, yet chrome server is a slow asset server for now..

#if false
            #region AtFormCreated
            FormStyler.AtFormCreated =
                 s =>
                 {
                     s.Context.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;

                     var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDrag().AttachTo(s.Context.GetHTMLTarget());
                 };
            #endregion



            #region ChromeTCPServer
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                chrome.Notification.DefaultTitle = "Craft";
                chrome.Notification.DefaultIconUrl = new HTML.Images.FromAssets.Preview().src;

                ChromeTCPServer.TheServerWithStyledForm.Invoke(
                    AppSource.Text,
                    AtFormCreated: FormStyler.AtFormCreated
                );

                return;
            }
            #endregion



#endif



            // Process com.abstractatech.gamification.craft (pid 1456) has died.
            // http://stackoverflow.com/questions/7316680/process-has-died

            // http://zproxy.wordpress.com/2012/11/13/dos-warcraft/
            // load the shadow land


            // why does this not work??
            page.body.style.cursorImage = new severed();
            page.Preview.style.cursorImage = new guantlet();
            page.Avatar.style.cursorImage = new guantlet();

            // http://www.effectgames.com/effect/article.psp.html/joe/Old_School_Color_Cycling_with_HTML5
            // http://www.effectgames.com/demos/canvascycle/
            // http://productforums.google.com/forum/#!topic/chrome/hgxRgmT0INY
            // http://www.boutell.com/newfaq/creating/midi.html
            //new com.abstractatech.gamification.craft.HTML.Audio.FromAssets.intro().AttachToHead().play();

            // http://www.javascripter.net/faq/sound/play.htm

            new HTML.Audio.FromAssets.IntroWarII { loop = true, volume = 0.5 }.play();

            //new IHTMLObject
            //{
            //    data =
            //        new com.abstractatech.gamification.craft.HTML.Audio.FromAssets.intro().src
            //}.AttachToHead();

            //            <object data="mysong.mid">
            //<param name="loop" value="10"/>
            //If you're seeing this, you don't have a MIDI player
            //on your computer.
            //</object>


            // IStyleSheet.Default.AddRule("body", "cursor: url('" + new cursor().src + "'), auto;", 0);
            // 

            // do we have to use MIDI.js?
            //new HTML.Audio.FromAssets.Warcraft1_TitleTheme().play();

            #region OrcSounds
            var orc = new Func<Action>(
                delegate
                {
                    //                    I/ChromiumHTTPDataSourceSupport(  124): Request failed with status 4 and os_error -102
                    //I/chromium(27962): [INFO:CONSOLE(40699)] "Uncaught InvalidStateError: An attempt was made to use an object that is not, or is no longer, usable.", source: http://192.168.43.7:18906/view-source (40699)

                    var nodes = new OrcSounds().AudioElements()

                        // will we run out of memory?
                        //.Take(2)

                        .ToArray();

                    nodes.WithEach(a => a.load());

                    return delegate
                    {
                        var r = new Random().Next();
                        var i = r % nodes.Length;

                        Console.WriteLine(new { i });

                        nodes[i].play();


                        var src = nodes[i].src;

                        nodes[i] = (IHTMLAudio)nodes[i].cloneNode(false);

                        //nodes[i].src = src;
                        //nodes[i].load();

                    };
                }
            )();
            #endregion


            page.Preview.onclick +=
                delegate
                {
                    orc();
                };

            // vb xlinq ..<img>


            #region arena
            // could run out of memory fast!
            var map = new Point(2048 * 3, 2048 * 2);

            var arena = new ArenaControl();

            arena.Layers.Canvas.style.backgroundColor = Color.FromGray(0);
            arena.SetLocation(Rectangle.Of(0, 0, Native.window.Width, Native.window.Height));
            arena.SetCanvasSize(map);

            arena.Control.AttachToDocument();


            page.header.AttachTo(
                arena.Layers.User
            );

            //arena.DrawTextToInfo("Craft", new Point(8, 8), Color.White);

            Native.window.onresize +=
                delegate
                {
                    arena.SetLocation(
                        Rectangle.Of(0, 0, Native.window.Width, Native.window.Height));

                    arena.SetCanvasPosition(
                        arena.CurrentCanvasPosition
                        );
                };
            #endregion

            //arena.CurrentCanvasPosition = 
            arena.SetCanvasViewCenter(
              new Point(
                  1048 + 2048,
                  1024 + 2048
              )
            );


            //{ X = -2645 } 
            //{ X = -2290 } 


            //Console.WriteLine(new { arena.CurrentCanvasPosition.X });
            Console.WriteLine(new { arena.GetCanvasViewCenter().X });

            new ScriptCoreLib.JavaScript.Runtime.Timer(
                delegate
                {
                    // two fingers?
                    if (arena.DragEngaged)
                        return;

                    if (arena.InSelectionMode)
                        return;



                    var pp = arena.GetCanvasViewCenter();

                    //Console.WriteLine(new { pp.X });

                    pp.X += 1;

                    //Console.WriteLine(new { pp.X });

                    arena.SetCanvasViewCenter(pp);
                }
            ).StartInterval(1000 / 5);

            var scalarx = 0;

            new WarcraftImages().ImageElements().Randomize()
                .Take(6)
                .WithEachIndex(
                (img, index) =>
                {
                    // we need 4x images. think retina displays.
                    Console.WriteLine(new { index });

                    img.InvokeOnComplete(
                        delegate
                        {
                            var context = new CanvasRenderingContext2D(img.width * 2, img.height * 2)
                            {
                                // http://phoboslab.org/log/html5
                                // not supported for IE
                                ImageSmoothingEnabled = false
                            };


                            context.drawImage(img, 0, 0, context.canvas.width, context.canvas.height);


                            var x = scalarx % map.X;
                            var y = (int)Math.Floor((float)scalarx / map.X) * context.canvas.height;

                            //Console.WriteLine(new { index, x, y });

                            context.canvas.AttachTo(arena.Layers.Canvas).style.SetLocation(
                                x, y
                            );

                            scalarx += context.canvas.width;
                        }
                    );

                    //img.AttachToDocument();
                }
            );
        }
        // 20140526 roslyn friendly!
        // and broken again

        //        script: error JSC1000:
        //error:
        //  statement cannot be a load instruction(or is it a bug?)
        //  [0x000a]
        //        ldarg.0    +1 -0

        // assembly: V:\Abstractatech.JavaScript.Avatar.Application.exe
        // type: Abstractatech.JavaScript.Avatar.ApplicationImplementation+<MakeCamGrabber>d__1+<MoveNext>0600002d, Abstractatech.JavaScript.Avatar.Application, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null
        // offset: 0x000a
        //  method:Int32<06ad> call.try(<MoveNext>0600002d, <MakeCamGrabber>d__1 ByRef, System.Runtime.CompilerServices.TaskAwaiter`1[ScriptCoreLib.JavaScript.DOM.HTML.IHTMLImage] ByRef, System.Runtime.CompilerServic

        public static async void MakeCamGrabber(
            IHTMLDiv c,
            bool sizeToWindow = false,
            Action<WebCamAvatarsSheet1Row> yield = null
            )
        {
            if (sizeToWindow)
            {
                #region onresize
                Native.window.With(
                    async window =>
                    {
                        while (true)
                        {
                            c.style.transformOrigin = "0% 0%";

                            var scale =

                                Native.window.Height / (double)(480 + 96);

                            if (Native.window.Height > Native.window.Width)
                                scale = Native.window.Width / (double)(640);


                            c.style.transform = "scale("
                                + scale
                                + ")";

                            var w = (int)(scale * (640));
                            var h = (int)(scale * (480 + 96));


                            c.style.width = w + "px";
                            c.style.height = h + "px";

                            c.style.SetLocation(
                                (Native.window.Width - w) / 2,
                                (Native.window.Height - h) / 2
                            );

                            await window.async.onresize;
                        }
                    }
                );
                #endregion

            }

            c.style.backgroundColor = "black";

            #region localStorageKeys

            // or webSQL?
            var localStorageKeys = new
            {

                img640x480 = new { img = "avatar", w = 640, h = 480 },
                img96gif = new { img = "avatar", w = 96, h = 96 },

                frames = new[] {
                            new { index= 0, img = "avatar", w = 96, h = 96 },
                            new { index= 1, img = "avatar", w = 96, h = 96 },
                            new { index= 2, img = "avatar", w = 96, h = 96 },
                            new { index= 3, img = "avatar", w = 96, h = 96 },
                        }
            };
            #endregion



            //c.css.children
            c.css.children.style.SetLocation(0, 0);

            c.style.position = IStyle.PositionEnum.relative;
            c.style.width = (640) + "px";
            c.style.height = (480 + 96) + "px";






            c.css.hover.style.cursor = IStyle.CursorEnum.pointer;

            #region empty
            var css = c.css.empty.before;

            css.style.textAlign = IStyle.TextAlignEnum.center;
            css.style.display = IStyle.DisplayEnum.block;
            css.style.width = (640) + "px";
            css.style.color = "white";
            css.style.paddingTop = 300 + "px";


            c.css.hover.empty.before.style.color = "yellow";
            #endregion




            var retry = 0;
            retry:
            retry++;

            Console.WriteLine(new { retry });


            css.contentText = "either drag a picture here -or- click here to use your webcam";

            var snapshot = new CanvasRenderingContext2D(640, 480);

            var frames = new List<IHTMLImage>();

            c.css[IHTMLElement.HTMLElementEnum.img][0].style.SetLocation(96 * 0, 480);
            c.css[IHTMLElement.HTMLElementEnum.img][1].style.SetLocation(96 * 1, 480);
            c.css[IHTMLElement.HTMLElementEnum.img][2].style.SetLocation(96 * 2, 480);
            c.css[IHTMLElement.HTMLElementEnum.img][3].style.SetLocation(96 * 3, 480);
            c.css[IHTMLElement.HTMLElementEnum.img][4].style.SetLocation(96 * 4, 480);
            c.css[IHTMLElement.HTMLElementEnum.img][5].style.SetLocation(96 * 5, 480);

            var size = 400;

            #region newmask
            Action newmask = delegate
            {
                #region grid
                new IHTMLDiv
                {

                }.AttachTo(c).With(
                    async grid =>
                    {
                        grid.style.SetLocation(
                            (640 - size) / 2,
                            (480 - size) / 2,

                            size - 2,
                            size - 2
                        );

                        var s = Stopwatch.StartNew();

                        // X:\jsc.svn\examples\javascript\LINQ\LINQWebCamAvatars\LINQWebCamAvatars\Application.cs
                        // until orphanized
                        while (c.parentNode != null)
                        {
                            //await Native.window.requestAnimationFrameAsync;
                            await Native.window.async.onframe;

                            var a = (Math.Cos(s.ElapsedMilliseconds * 0.001) + 1) / 2.0;

                            grid.style.border = "1px dotted rgba(255,255,255, "
                                + (1.0 - a)
                                + ")";

                            //mask_css.style.Opacity = a;

                        }
                    }
                );
                #endregion

                #region mask
                var mask = new CanvasRenderingContext2D(640, 480 + 96);

                mask.canvas.style.zIndex = 100;

                //mask.drawImage(
                //    v, 0, 0,

                //    mask.canvas.width,
                //    mask.canvas.height
                //);

                mask.fillStyle = "rgba(0,0,0, 0.8)";
                mask.fillRect(
                       0, 0,

                       640,
                       480 + 96
                   );


                mask.clearRect(
                      (640 - size) / 2,
                        (480 - size) / 2,

                        size,
                        size
                );



                //var bytes = i.bytes;

                mask.canvas.AttachTo(c);
                #endregion


            };
            #endregion


            #region localStorage
            var base64 = Native.window.localStorage[localStorageKeys.img640x480];
            if (base64 != null)
            {
                var base64image = new IHTMLImage { src = base64 };

                await base64image;

                snapshot.drawImage(base64image, 0, 0, 640, 480);
                snapshot.canvas.AttachTo(c);


                for (int i = 0; i < 5; i++)
                {
                    var base64f = Native.window.localStorage[localStorageKeys.frames[
                        localStorageKeys.frames.Length - i - 1]];

                    if (base64f != null)
                    {
                        var newframe = new IHTMLImage { src = base64f };
                        newframe.AttachTo(c);
                        frames.Add(newframe);
                    }
                }



                newmask();

                var base64gif = Native.window.localStorage[localStorageKeys.img96gif];

                #region atgif
                Action<string> atgif =
                    gif =>
                    {
                        //Native.document.title = new { gif.Length }.ToString();

                        var newframe = new IHTMLImage { src = gif };

                        newframe.style.zIndex = 300;

                        newframe.AttachTo(c);
                        frames.Add(newframe);

                        //if (frames.Count > 5)
                        //    frames.Remove(frames[0].Orphanize());
                    };
                #endregion


                if (base64gif != null)
                    atgif(base64gif);
                else
                {
                    var bytes = frames.Select(x => x.bytes.Result).ToArray().AsEnumerable();

                    //bytes = bytes.Concat(bytes.Skip(1).Reverse().Skip(1)).ToArray().AsEnumerable();

                    // build it
                    new GIFEncoderWorker(
                         96,
                         96,
                             delay: 1000 / 10,
                         frames: bytes,
                         AtFrame:
                          async index =>
                          {
                              //Native.document.title = new { index }.ToString();
                          }


                     ).Task.ContinueWithResult(
                        gif =>
                        {
                            Native.window.localStorage[localStorageKeys.img96gif] = gif;


                            // report sizes. smaller is better if db
                            Console.WriteLine(
                                // { Avatar640x480 = 54843, Avatar96gif = 54734 } 
                                new
                                {
                                    Avatar640x480 = base64.Length,
                                    Avatar96gif = gif.Length
                                }
                            );


                            if (yield != null)
                                yield(
                                    new WebCamAvatarsSheet1Row
                                    {
                                        Avatar640x480 = base64,
                                        Avatar96frame1 = Native.window.localStorage[localStorageKeys.frames[0]],
                                        // do we want to report frames?
                                        Avatar96gif = gif
                                    }
                                );


                            atgif(gif);
                        }
                        );
                }

            }
            #endregion

            Console.WriteLine("await c.async.onclick");
            await c.async.onclick;
            Console.WriteLine("await c.async.onclick done");

            c.Clear();

            css.content = "awaiting for video";




            var v = await Native.window.navigator.async.onvideo;


            v.AttachTo(c);
            v.play();


            var mask_css = c.css[IHTMLElement.HTMLElementEnum.canvas];



            newmask();

            var z96 = new CanvasRenderingContext2D(96, 96);

            z96.canvas.AttachTo(c);
            //z96.canvas.style.backgroundColor = "gray";
            z96.canvas.style.SetLocation(96 * 5, 480);

            z96.canvas.style.zIndex = 300;


            var ok = c.async.onclick;

            #region frames



            while (!ok.IsCompleted)
            {

                z96.drawImage(
                    image: v,
                    sx: (640 - size) / 2,
                    sy: (480 - size) / 2,

                    sw: size,
                    sh: size,
                    dx: 0,
                    dy: 0,
                    dw: 96,
                    dh: 96
                );

                var newframe = new IHTMLImage { src = z96.canvas.toDataURL() };
                newframe.AttachTo(c);
                frames.Add(newframe);

                if (frames.Count > 5)
                    frames.Remove(frames[0].Orphanize());


                await (1000 / 15);
            }
            #endregion

            snapshot.drawImage(v, 0, 0, 640, 480);

            #region localStorage

            // https://developer.mozilla.org/en/docs/Web/API/HTMLCanvasElement
            //Native.window.localStorage[localStorageKeys.img640x480] = 

            var firstTry = snapshot.canvas.toDataURL(

            // shall we use enum
            type: "image/jpeg"
            );
            if (firstTry.Length >= (1024 * 64))
            {
                Console.WriteLine("Reducing quality");
                firstTry = snapshot.canvas.toDataURL(

            // shall we use enum
            type: "image/jpeg",
            quality: 0.5
            );
            }

            // can we use SQL instead now?
            Native.window.localStorage[localStorageKeys.img640x480] = firstTry;



            frames.WithEachIndex(
                (k, index) =>
                {
                    Native.window.localStorage[localStorageKeys.frames[index]] = k.src;
                }
            );

            Native.window.localStorage.removeItem(localStorageKeys.img96gif);
            #endregion


            v.src = "";
            c.Clear();



            goto retry;
        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150812/cssstereo



            // https://www.reddit.com/r/GearVR/comments/35g8w7/real_world_stereoscopic_360_panoramas/
            // https://www.reddit.com/r/oculus/comments/35gcn2/real_world_stereoscopic_panoramas_with_gear_vr/
            // http://stackoverflow.com/questions/9032050/canvas-mask-an-image-and-preserve-its-alpha-channel
            // https://3dwarehouse.sketchup.com/model.html?id=48bcce07b0baf689d9e6f00e848ea18
            // https://www.youtube.com/watch?v=OurzBO1cDto

            Native.body.style.margin = "0px";
            Native.body.style.overflow = IStyle.OverflowEnum.hidden;
            Native.body.Clear();

            new IHTMLPre { "loading stereo... 2MB!" }.AttachToDocument();

            // if this is a chrome app
            // would GearVR 360 app be able to request current stereo cubemap?






            // Uncaught TypeError: Cannot read property '0' of undefined
            var keys = new int[0xffff];

            //c = a[0].keys[jAEABqtXvT6n4h6m6ZavdA(b)];
            //d = c[0];
            //c[0] = (((d + 1)));



            Native.document.body.onkeyup +=
             e =>
             {

                 var z = keys[e.KeyCode];

                 if (z > 0)
                     z++;
                 else
                     z = 1;

                 // what does it do?
                 keys[e.KeyCode] = z;

                 // 4333ms {{ KeyCode = 83, S = 83, z = NaN }}
                 Console.WriteLine(new { e.KeyCode, System.Windows.Forms.Keys.S, z });

             };


            // skybox cubemap
            var iii = new IHTMLImage[] {
                        new px(), new nx(),
                        new py(), new ny(),
                        new pz(), new nz()
                    };


            //var f12 = await new airplane().async.oncomplete;
            //var f12 = new airplane();



            Task.WhenAll(from x in iii select x.async.oncomplete).ContinueWithResult(
                ii =>



               // how can we do an alpha clear on the jpg?
               //new airplane().async.oncomplete.ContinueWithResult(
               //new airplane_leftwindows().async.oncomplete.ContinueWithResult(
               new airplane_mask().async.oncomplete.ContinueWithResult(
               f12_mask =>
               new airplane().async.oncomplete.ContinueWithResult(
               f12 =>
               {
                   //Func<int, string, IHTMLImage> xf = (i, globalCompositeOperation) =>
                   //{
                   //    // we do have a skybox example somewhere...
                   //    var f1 = new CanvasRenderingContext2D(w: f12.height, h: f12.height);

                   //    f1.drawImage(f12, i * f12.height, 0, sw: f12.height, sh: f12.height, dx: 0, dy: 0, dw: f12.height, dh: f12.height);

                   //    // https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/globalCompositeOperation
                   //    f1.globalCompositeOperation = globalCompositeOperation;

                   //    f1.drawImage(f12_mask, i * f12.height, 0, sw: f12.height, sh: f12.height, dx: 0, dy: 0, dw: f12.height, dh: f12.height);

                   //    return f1;
                   //};


                   //new[] {
                   //    "source-over",
                   //    "source-in",
                   //    "source-out",
                   //    "source-atop",
                   //    "destination-over",
                   //    "destination-in",
                   //    "destination-out",
                   //    "destination-atop",
                   //    "lighter",
                   //    "copy",
                   //    "xor",
                   //    "multiply",
                   //    "screen",
                   //    "overlay",
                   //    "darken",
                   //    "lighten",
                   //    "color-dodge",
                   //    "color-burn",
                   //    "hard-light",
                   //    "soft-light",
                   //    "difference",
                   //    "exclusion",
                   //    "hue",
                   //    "saturation",
                   //    "color",
                   //    "luminosity"
                   //}.WithEach(globalCompositeOperation =>
                   //    {

                   //        xf(4, globalCompositeOperation).AttachToDocument().title = globalCompositeOperation;
                   //    }
                   //);


                   Func<int, IHTMLImage> f = i =>
                   {
                       // we do have a skybox example somewhere...
                       var f1 = new CanvasRenderingContext2D(w: f12.height, h: f12.height);

                       f1.drawImage(f12, i * f12.height, 0, sw: f12.height, sh: f12.height, dx: 0, dy: 0, dw: f12.height, dh: f12.height);

                       // https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/globalCompositeOperation
                       f1.globalCompositeOperation = "multiply";

                       f1.drawImage(f12_mask, i * f12.height, 0, sw: f12.height, sh: f12.height, dx: 0, dy: 0, dw: f12.height, dh: f12.height);

                       return f1;
                   };



                   var skyScene0 = new THREE.Scene();
                   var skyScene1 = new THREE.Scene();
                   var skyScene2 = new THREE.Scene { };


                   #region  createSky
                   // gearvr has photos360 app



                   //var textureCube = THREE.ImageUtils.loadTextureCube(urls);


                   var vertexShader =
            @"
varying vec3 vWorldPosition;
            "
            //+ THREE.ShaderChunk["logdepthbuf_pars_vertex"]
            +
            @"
void main() {
vec4 worldPosition = modelMatrix * vec4( position, 1.0 ); 
vWorldPosition = worldPosition.xyz;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
"
    //+ THREE.ShaderChunk["logdepthbuf_vertex"]
    + @"
}
"

        ;
                   // https://www.opengl.org/sdk/docs/tutorials/ClockworkCoders/texturing.php

                   var fragmentShader =
            @"
uniform samplerCube tCube; 
uniform float tFlip; 
varying vec3 vWorldPosition;
                   "
    //+ THREE.ShaderChunk["logdepthbuf_pars_fragment"] 
    + @"
void main() 
{

vec4 c = textureCube( tCube, vec3( tFlip * vWorldPosition.x, vWorldPosition.yz ) );
"

    // : gl.getShaderInfoLog() ERROR: 0:52: 'assign' :  cannot convert from 'const int' to 'highp float'

    //+ THREE.ShaderChunk["logdepthbuf_fragment"] 
    + @"

// _mask.png has the bits
if (c.r == 0.0) if (c.g == 0.0) if (c.b == 0.0) discard;

gl_FragColor = c;

}
"
               // vec4 textureCube(samplerCube s, vec3 coord [, float bias])
               // 
               ;



                   var skyMeshmaterial0 = new THREE.ShaderMaterial(new
                   {
                       fragmentShader = fragmentShader,
                       vertexShader = vertexShader,
                       uniforms = new
                       {
                           tCube = new
                           {
                               type = "t",
                               value = new THREE.CubeTexture(ii
                           )
                               {
                                   flipY = false,

                                   // !!
                                   needsUpdate = true
                               }
                           },
                           tFlip = new { type = "f", value = -1 }
                       },
                       depthWrite = false,
                       side = THREE.BackSide
                   });

                   var mesh0 = new THREE.Mesh(new THREE.BoxGeometry(10000, 10000, 10000), skyMeshmaterial0).AttachTo(skyScene0);



                   // stereo vs mono skybox
                   var skyMeshmaterial1 = new THREE.ShaderMaterial(new
                   {
                       fragmentShader = fragmentShader,
                       vertexShader = vertexShader,
                       uniforms = new
                       {
                           tCube = new
                           {
                               type = "t",
                               value = new THREE.CubeTexture(
                                 f(0), f(1),
                                f(2), f(3),
                                f(4), f(5)
                               )
                               {
                                   flipY = false,

                                   // !!
                                   needsUpdate = true
                               }
                           },
                           tFlip = new { type = "f", value = -1 }
                       },
                       depthWrite = false,
                       side = THREE.BackSide,

                   });

                   var mesh1 = new THREE.Mesh(new THREE.BoxGeometry(10000, 10000, 10000), skyMeshmaterial1).AttachTo(skyScene1);


                   var skyMeshmaterial2 = new THREE.ShaderMaterial(new
                   {
                       fragmentShader = fragmentShader,
                       vertexShader = vertexShader,
                       uniforms = new
                       {
                           tCube = new
                           {
                               type = "t",
                               value = new THREE.CubeTexture(
                                f(0 + 6), f(1 + 6),
                               f(2 + 6), f(3 + 6),
                               f(4 + 6), f(5 + 6)
                              )
                               {
                                   flipY = false,


                                   // !!
                                   needsUpdate = true
                               }
                           },
                           tFlip = new { type = "f", value = -1 }
                       },
                       depthWrite = false,
                       side = THREE.BackSide
                   });

                   var mesh2 = new THREE.Mesh(new THREE.BoxGeometry(10000, 10000, 10000), skyMeshmaterial2).AttachTo(skyScene2);
                   #endregion


                   Native.body.Clear();

                   var skyCamera = new THREE.PerspectiveCamera(90, Native.window.aspect, 1, 20000);
                   //var skyCamera = new THREE.PerspectiveCamera(45, Native.window.aspect, 1, 20000);

                   // not using css?
                   // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150812/cssstereo

                   var renderer = new THREE.WebGLRenderer(new { antialias = true, alpha = false })
                   {
                       autoClear = false,
                       shadowMapEnabled = true,
                       shadowMapType = THREE.PCFSoftShadowMap
                   };

                   renderer.setSize(Native.window.Width, Native.window.Height);
                   renderer.domElement.AttachToDocument();


                   #region onresize
                   new { }.With(
                        async delegate
                        {
                            do
                            {
                                skyCamera.aspect = Native.window.aspect;
                                skyCamera.updateProjectionMatrix();
                                renderer.setSize(Native.window.Width, Native.window.Height);

                            } while (await Native.window.async.onresize);
                        }
                    );
                   #endregion

                   Native.document.body.onmousewheel +=
                      e =>
                      {
                          skyCamera.fov -= e.WheelDirection * 5.0;
                          skyCamera.updateProjectionMatrix();
                      };



                   var target0 = new THREE.Vector3();

                   var lon = 90.0;
                   var lat = 0.0;
                   var phi = 0.0;
                   var theta = 0.0;





                   var drag = false;

                   var sw = Stopwatch.StartNew();

                   Native.window.onframe +=
                        delegate
                        {
                            var sinfps = 1 + (int)(((Math.Sin(sw.ElapsedMilliseconds * 0.0001) + 1.0) / 2.0) * 59);
                            Native.document.title = "" + new { sinfps };


                            //var eye = (sw.ElapsedMilliseconds / (200)) % 2;
                            //var eye = (sw.ElapsedMilliseconds / (1000 / 15)) % 2;
                            //var eye = (sw.ElapsedMilliseconds / (1000 / 30)) % 2;
                            //var eye = (sw.ElapsedMilliseconds / (1000 / 45)) % 2;
                            //var eye = (sw.ElapsedMilliseconds / (1000 / 60)) % 2;
                            var eye = (sw.ElapsedMilliseconds / (1000 / sinfps)) % 2;
                            //  if we are a multiprocess renderer, get the volatile eye id 


                            // doesnt work?
                            //skyMesh0.visible = eye == 0;

                            //mesh0.rotation = new THREE.Vector3(0, 0, sw.ElapsedMilliseconds);


                            if (Native.document.pointerLockElement == Native.document.body)
                                lon += 0.00;
                            else
                            {

                                lon += 0.01;
                            }

                            lat = Math.Max(-85, Math.Min(85, lat));


                            // this is wrong, but gets the idea across.
                            phi = THREE.Math.degToRad(90 - lat) + Math.Sin(sw.ElapsedMilliseconds * 0.001) * 0.1 - 0.3;
                            theta = THREE.Math.degToRad(lon);

                            target0.x = Math.Sin(phi) * Math.Cos(theta);
                            target0.y = Math.Cos(phi);
                            target0.z = Math.Sin(phi) * Math.Sin(theta);

                            skyCamera.lookAt(target0);


                            renderer.clear();

                            renderer.render(skyScene0, skyCamera);


                            phi = THREE.Math.degToRad(90 - lat);
                            theta = THREE.Math.degToRad(lon);

                            target0.x = Math.Sin(phi) * Math.Cos(theta);
                            target0.y = Math.Cos(phi);
                            target0.z = Math.Sin(phi) * Math.Sin(theta);

                            skyCamera.lookAt(target0);

                            if ((keys[(int)System.Windows.Forms.Keys.S] % 3) == 0)
                            {

                                if (eye == 0)
                                    renderer.render(skyScene1, skyCamera);
                                else
                                    renderer.render(skyScene2, skyCamera);
                            }
                            else if ((keys[(int)System.Windows.Forms.Keys.S] % 3) == 1)
                            {
                                renderer.render(skyScene1, skyCamera);
                            }
                        };

                   #region ontouchmove
                   var touchX = 0;
                   var touchY = 0;

                   Native.document.body.ontouchstart +=
                                          e =>
                                          {
                                              e.preventDefault();

                                              var touch = e.touches[0];

                                              touchX = touch.screenX;
                                              touchY = touch.screenY;

                                          };


                   Native.document.body.ontouchmove +=
                     e =>
                     {
                         e.preventDefault();

                         var touch = e.touches[0];

                         lon -= (touch.screenX - touchX) * 0.1;
                         lat += (touch.screenY - touchY) * 0.1;

                         touchX = touch.screenX;
                         touchY = touch.screenY;

                     };
                   #endregion






                   #region camera rotation
                   Native.document.body.onmousemove +=
                      e =>
                      {
                          e.preventDefault();

                          if (Native.document.pointerLockElement == Native.document.body)
                          {
                              lon += e.movementX * 0.1;
                              lat -= e.movementY * 0.1;

                              //Console.WriteLine(new { lon, lat, e.movementX, e.movementY });
                          }
                      };


                   Native.document.body.onmouseup +=
                     e =>
                     {
                         drag = false;
                         e.preventDefault();
                     };

                   Native.document.body.onmousedown +=
                       e =>
                       {
                           //e.CaptureMouse();

                           drag = true;
                           e.preventDefault();
                           Native.document.body.requestPointerLock();

                       };


                   Native.document.body.ondblclick +=
                       e =>
                       {
                           e.preventDefault();

                           Console.WriteLine("requestPointerLock");
                       };

                   #endregion



               }
           )
           )
           );

        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // haha this is rather messed up on android.
            // perhaps the css3d runs out of memory?

            //var frame0 = Task.Delay(100);
            //var frame1 = Task.Delay(200);



            var sw = Stopwatch.StartNew();



            //var lon0 = 90.0;
            var lon0 = 0.0;
            var lon1 = 0.0;

            var lon = new sum(
                 () => lon0,
                 () => lon1
             );

            var lat0 = 0.0;
            var lat1 = 0.0;

            // or could we do it with byref or pointers?
            var lat = new sum(
                () => lat0,
                () => lat1
            );

            var phi = 0.0;
            var theta = 0.0;



            var camera_rotation_z = 0.0;


            var drag = false;

            new { }.With(
                async delegate
                {

                    //var f12 = await new airplane().async.oncomplete;

                    // S6 likes 1024 more...
                    var f12 = await new airplane_1024().async.oncomplete;



                    Action<int> draweye = async (int eyeid) =>
                        {
                            // view-source:file:///X:/opensource/github/three.js/examples/css3d_panorama.html

                            //var camera = new THREE.PerspectiveCamera(75, Native.window.aspect, 1, 1000);

                            // wearality lenses?
                            //var camera = new THREE.PerspectiveCamera(90, Native.window.aspect, 1, 1000);
                            //var camera = new THREE.PerspectiveCamera(120, Native.window.aspect, 1, 1000);
                            //var camera = new THREE.PerspectiveCamera(96, Native.window.aspect, 1, 1000);
                            var camera = new THREE.PerspectiveCamera(90, Native.window.aspect, 1, 1000);
                            var scene = new THREE.Scene();

                            var renderer0 = new THREE.CSS3DRenderer();

                            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150621


                            // s6 webview wont know?
                            //var f12_height = 1536;
                            // or is s6 memory or size limited?
                            var f12_height = f12.height;

                            //var f12 = await new airplane_low().async.oncomplete;
                            //var f12 = await new airplane_1024().async.oncomplete;
                            //var f12 = await new airplane_729().async.oncomplete;
                            // stop flickering damnet
                            //var f12 = await new airplane_400().async.oncomplete;



                            // ok we got this cool 12 frame stereo map.

                            // what happens if we just pass it?
                            // haha. we get all frames in one.


                            //Func<int, var> f = i =>

                            #region f
                            Func<int, IHTMLCanvas> f = i =>
                            {
                                // we do have a skybox example somewhere...
                                var f1 = new CanvasRenderingContext2D(w: f12_height, h: f12_height);


                                // can we keep animating the stereo ?

                                // if we return canvas it gets messed up. why?
                                // looks to  be a bug?

                                //var stale = new IHTMLImage();

                                if (eyeid == 0)
                                {
                                    // GearVR would have both eyes!
                                    // laptop has to flip between eyes to give similar effect?
                                    // if this were a chrome app. could gearvr request the frames into the photos360 app?

                                    f1.drawImage(f12, i * f12_height, 0, sw: f12_height, sh: f12_height,
                                    dx: 0, dy: 0, dw: f12_height, dh: f12_height);

                                    // whenever we call drawImage ? callsite event monitoring?
                                    // this seems to be slow
                                    //stale.src = f1.canvas.toDataURL();

                                    // can we have a synchronized frame choreo?
                                    //await Task.Delay(1000 / 15);
                                }
                                else
                                {
                                    //await frame0;

                                    // update!

                                    f1.drawImage(f12, (i + 6) * f12_height, 0, sw: f12_height, sh: f12_height,
                                    dx: 0, dy: 0, dw: f12_height, dh: f12_height);

                                    //stale.src = f1.canvas.toDataURL();

                                    //await frame1;
                                    //await Task.Delay(1000 / 15);

                                };

                                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150812/cssstereo

                                return f1;
                            };
                            #endregion


                            //var f0 = new CanvasRenderingContext2D(w: f12.height, h: f12.height);
                            //f0.drawImage(f12, 0, 0, sw: f12.height, sh: f12.height,
                            //    dx: 0, dy: 0, dw: f12.height, dh: f12.height);

                            #region sides
                            var sides = new Func<side>[]
                    {
                () => new side
                {
                    CSS3DObject_element=  f(0),
                    position= new THREE.Vector3( -512, 0, 0 ),
                    rotation= new THREE.Vector3( 0, Math.PI / 2, 0 )
                },
                () => new side {
                    //img=  new humus_nx(),
                    CSS3DObject_element = f(1),

                    position= new THREE.Vector3( 512, 0, 0 ),
                    rotation= new THREE.Vector3( 0, -Math.PI / 2, 0 )
                },
                () => new side{
                    CSS3DObject_element=  f(2),
                    //img=  new humus_py(),
                    position= new THREE.Vector3( 0,  512, 0 ),
                    rotation= new THREE.Vector3( Math.PI / 2, 0, Math.PI )
                },
                () => new side{
                    //img=  new humus_ny(),
                    CSS3DObject_element=  f(3),
                    position= new THREE.Vector3( 0, -512, 0 ),
                    rotation= new THREE.Vector3( - Math.PI / 2, 0, Math.PI )
                },
                () => new side{
                    CSS3DObject_element=  f(4),
                    //img=  new humus_pz(),
                    position= new THREE.Vector3( 0, 0,  512 ),
                    rotation= new THREE.Vector3( 0, Math.PI, 0 )
                },
                () => new side{
                    CSS3DObject_element=  f(5),
                    //img=  new humus_nz(),
                    position= new THREE.Vector3( 0, 0, -512 ),
                    rotation= new THREE.Vector3( 0, 0, 0 )
                }
            };
                            #endregion

                            for (var i = 0; i < sides.Length; i++)
                            {
                                if (i == 1)
                                {
                                    var side = sides[i]();

                                    var element = side.CSS3DObject_element;

                                    element.style.SetSize(1026, 1026);
                                    //element.style.SetSize(256, 256);

                                    //element.width = 1026; // 2 pixels extra to close the gap.

                                    var xobject = new THREE.CSS3DObject(element);
                                    xobject.position.set(side.position.x, side.position.y, side.position.z);
                                    xobject.rotation.set(side.rotation.x, side.rotation.y, side.rotation.z);

                                    // and lets use max res?
                                    // since everything flicers, lets keep only front CSS thing
                                    scene.add(xobject);
                                }

                            }


                            //<div style="-webkit-transform-style: preserve-3d; width: 978px; height: 664px; -webkit-transform: translate3d(0px, 0px, 432.6708237832803px) matrix3d(0.34382355213165283, -0.024581052362918854, -0.938712477684021, 0, 0, -0.9996572732925415, 0.026176948100328445, 0, 0.9390342831611633, 0.00900025200098753, 0.34370577335357666, 0, 0, 0, 0, 0.9999999403953552) translate3d(489px, 332px, 0px);">
                            //        <img src="assets/CSSStereoCubeMap/posx.jpg" width="1026" style="width: 1024px; height: 1024px; position: absolute; -webkit-transform-style: preserve-3d; -webkit-transform: translate3d(-50%, -50%, 0px) matrix3d(1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1, 0, -512, 0, 0, 1);"><img src="assets/CSSStereoCubeMap/negx.jpg" width="1026" style="width: 1024px; height: 1024px; position: absolute; -webkit-transform-style: preserve-3d; -webkit-transform: translate3d(-50%, -50%, 0px) matrix3d(1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1, 0, 512, 0, 0, 1);"><img src="assets/CSSStereoCubeMap/posy.jpg" width="1026" style="width: 1024px; height: 1024px; position: absolute; -webkit-transform-style: preserve-3d; -webkit-transform: translate3d(-50%, -50%, 0px) matrix3d(1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1, 0, 0, 512, 0, 1);"><img src="assets/CSSStereoCubeMap/negy.jpg" width="1026" style="width: 1024px; height: 1024px; position: absolute; -webkit-transform-style: preserve-3d; -webkit-transform: translate3d(-50%, -50%, 0px) matrix3d(1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1, 0, 0, -512, 0, 1);"><img src="assets/CSSStereoCubeMap/posz.jpg" width="1026" style="width: 1024px; height: 1024px; position: absolute; -webkit-transform-style: preserve-3d; -webkit-transform: translate3d(-50%, -50%, 0px) matrix3d(1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1, 0, 0, 0, 512, 1);"><img src="assets/CSSStereoCubeMap/negz.jpg" width="1026" style="width: 1024px; height: 1024px; position: absolute; -webkit-transform-style: preserve-3d; -webkit-transform: translate3d(-50%, -50%, 0px) matrix3d(1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1, 0, 0, 0, -512, 1);"></div>
                            //<div style="-webkit-transform-style: preserve-3d; width: 978px; height: 664px; -webkit-transform: translate3d(0px, 0px, 432.6708237832803px) matrix3d(-0.4524347484111786, 0, 0.8917974829673767, 0, 0, -1, 0, 0, -0.8917974829673767, 0, -0.4524347484111786, 0, 0, 0, 0, 1) translate3d(489px, 332px, 0px);">
                            // <img width="1026" src="textures/cube/Bridge2/posx.jpg"                                             style="position: absolute; -webkit-transform-style: preserve-3d; -webkit-transform: translate3d(-50%, -50%, 0px) matrix3d(0, 0, -1, 0, 0, -1, 0, 0, 1, 0, 0, 0, -512, 0, 0, 1);"><img width="1026" src="textures/cube/Bridge2/negx.jpg" style="position: absolute; -webkit-transform-style: preserve-3d; -webkit-transform: translate3d(-50%, -50%, 0px) matrix3d(0, 0, 1, 0, 0, -1, 0, 0, -1, 0, 0, 0, 512, 0, 0, 1);"><img width="1026" src="textures/cube/Bridge2/posy.jpg" style="position: absolute; -webkit-transform-style: preserve-3d; -webkit-transform: translate3d(-50%, -50%, 0px) matrix3d(-1, 0, 0, 0, 0, 0, 1, 0, 0, -1, 0, 0, 0, 512, 0, 1);"><img width="1026" src="textures/cube/Bridge2/negy.jpg" style="position: absolute; -webkit-transform-style: preserve-3d; -webkit-transform: translate3d(-50%, -50%, 0px) matrix3d(-1, 0, 0, 0, 0, 0, -1, 0, 0, 1, 0, 0, 0, -512, 0, 1);"><img width="1026" src="textures/cube/Bridge2/posz.jpg" style="position: absolute; -webkit-transform-style: preserve-3d; -webkit-transform: translate3d(-50%, -50%, 0px) matrix3d(-1, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, 512, 1);"><img width="1026" src="textures/cube/Bridge2/negz.jpg" style="position: absolute; -webkit-transform-style: preserve-3d; -webkit-transform: translate3d(-50%, -50%, 0px) matrix3d(1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1, 0, 0, 0, -512, 1);"></div> 


                            renderer0.domElement.AttachToDocument();

                            #region onresize
                            new { }.With(
                                async delegate
                                {
                                    do
                                    {

                                        //camera.aspect = Native.window.aspect;
                                        camera.aspect = Native.window.Width / 2.0 / Native.window.Height;
                                        camera.updateProjectionMatrix();

                                        //renderer0.setSize(Native.window.Width / 2, Native.window.Height);
                                        renderer0.setSize(Native.window.Width / 2, Native.window.Height);
                                        //renderer0.domElement.style.SetLocation(Native.window.Width / 2 * eyeid, 0);
                                        renderer0.domElement.style.SetLocation(Native.window.Width / 2 * (1 - eyeid), 0);

                                    }
                                    while (await Native.window.async.onresize);
                                });
                            #endregion

                            var target = new THREE.Vector3();


                            Native.window.onframe +=
                                delegate
                                {
                                    //if (Native.document.pointerLockElement == Native.document.body)
                                    //    lon += 0.00;
                                    //else
                                    //    lon += 0.01;

                                    //lat = Math.Max(-85, Math.Min(85, lat));

                                    Native.document.title = new { lon, lat }.ToString();


                                    //phi = THREE.Math.degToRad(90 - lat);
                                    //theta = THREE.Math.degToRad(lon);

                                    //target.x = Math.Sin(phi) * Math.Cos(theta);
                                    //target.y = Math.Cos(phi);
                                    //target.z = Math.Sin(phi) * Math.Sin(theta);

                                    //camera.lookAt(target);


                                    phi = THREE.Math.degToRad(90 - lat);
                                    theta = THREE.Math.degToRad(lon);

                                    target.x = 500 * Math.Sin(phi) * Math.Cos(theta);
                                    target.y = 500 * Math.Cos(phi);
                                    target.z = 500 * Math.Sin(phi) * Math.Sin(theta);

                                    camera.lookAt(target);
                                    camera.rotation.z += camera_rotation_z;


                                    renderer0.render(scene, camera);
                                };


                        };

                    draweye(1);
                    draweye(0);


                    var compassHeadingOffset = 0.0;
                    var compassHeadingInitialized = 0;
                    var compassHeadingInitializedsw = Stopwatch.StartNew();

                    #region compassHeading
                    // X:\jsc.svn\examples\javascript\android\Test\TestCompassHeading\TestCompassHeading\Application.cs
                    Native.window.ondeviceorientation +=
                      dataValues =>
                      {
                          // Convert degrees to radians
                          var alphaRad = dataValues.alpha * (Math.PI / 180);
                          var betaRad = dataValues.beta * (Math.PI / 180);
                          var gammaRad = dataValues.gamma * (Math.PI / 180);

                          // Calculate equation components
                          var cA = Math.Cos(alphaRad);
                          var sA = Math.Sin(alphaRad);
                          var cB = Math.Cos(betaRad);
                          var sB = Math.Sin(betaRad);
                          var cG = Math.Cos(gammaRad);
                          var sG = Math.Sin(gammaRad);

                          // Calculate A, B, C rotation components
                          var rA = -cA * sG - sA * sB * cG;
                          var rB = -sA * sG + cA * sB * cG;
                          var rC = -cB * cG;

                          // Calculate compass heading
                          var compassHeading = Math.Atan(rA / rB);

                          // Convert from half unit circle to whole unit circle
                          if (rB < 0)
                          {
                              compassHeading += Math.PI;
                          }
                          else if (rA < 0)
                          {
                              compassHeading += 2 * Math.PI;
                          }

                          /*
                          Alternative calculation (replacing lines 99-107 above):

                            var compassHeading = Math.atan2(rA, rB);

                            if(rA < 0) {
                              compassHeading += 2 * Math.PI;
                            }
                          */

                          // Convert radians to degrees
                          compassHeading *= 180 / Math.PI;

                          // Compass heading can only be derived if returned values are 'absolute'

                          // X:\jsc.svn\examples\javascript\android\Test\TestCompassHeadingWithReset\TestCompassHeadingWithReset\Application.cs

                          //Native.document.body.innerText = new { compassHeading }.ToString();
                          //if (compassHeadingInitialized > 0)
                          if (compassHeadingInitializedsw.ElapsedMilliseconds > 5000)
                          {
                              lon1 = compassHeading - compassHeadingOffset;
                          }
                          else
                          {

                              compassHeadingOffset = compassHeading;
                              compassHeadingInitialized++;
                              //compassHeadingInitialized.
                          }

                      };
                    #endregion

                    #region gamma
                    Native.window.ondeviceorientation +=
                        //e => Native.body.innerText = new { e.alpha, e.beta, e.gamma }.ToString();
                        //e => lon = e.gamma;
                        e =>
                        {
                            lat1 = e.gamma;

                            // after servicing a running instance would be nice
                            // either by patching or just re running the whole iteration in the backgrou
                            //camera_rotation_z = e.beta * 0.02;
                            camera_rotation_z = e.beta * -0.01;
                        };
                    #endregion



                    #region camera rotation
                    var old = new { clientX = 0, clientY = 0 };

                    Native.document.body.ontouchstart +=
                        e =>
                        {
                            var n = new { e.touches[0].clientX, e.touches[0].clientY };
                            old = n;
                        };

                    Native.document.body.ontouchmove +=
                            e =>
                            {
                                var n = new { e.touches[0].clientX, e.touches[0].clientY };

                                e.preventDefault();

                                lon0 += (n.clientX - old.clientX) * 0.2;
                                lat0 -= (n.clientY - old.clientY) * 0.2;

                                old = n;
                            };


                    Native.document.body.onmousemove +=
                        e =>
                        {
                            e.preventDefault();

                            if (Native.document.pointerLockElement == Native.document.body)
                            {
                                lon0 += e.movementX * 0.1;
                                lat0 -= e.movementY * 0.1;

                                //Console.WriteLine(new { lon, lat, e.movementX, e.movementY });
                            }
                        };


                    Native.document.body.onmouseup +=
                      e =>
                      {
                          //drag = false;
                          e.preventDefault();
                      };

                    Native.document.body.onmousedown +=
                        e =>
                        {
                            //e.CaptureMouse();

                            //drag = true;
                            e.preventDefault();
                            Native.document.body.requestPointerLock();

                        };



                    #endregion



                }
            );

        }
        public Application(IDefault page = null)
        {
            var size = 500;

            var gl = new WebGLRenderingContext(antialias: true, preserveDrawingBuffer: true);

            this.canvas = gl.canvas.AttachToDocument();

            canvas.style.backgroundColor = "black";
            //canvas.style.backgroundColor = "blue";

            Native.document.body.style.overflow = IStyle.OverflowEnum.hidden;
            canvas.style.SetLocation(0, 0, size, size);

            canvas.width = size;
            canvas.height = size;

            var gl_viewportWidth = size;
            var gl_viewportHeight = size;

            canvas.style.SetLocation(0, 0, gl_viewportWidth, gl_viewportHeight);

            #region IsDisposed
            var IsDisposed = false;

            this.Dispose = delegate
            {
                if (IsDisposed)
                    return;

                IsDisposed = true;

                canvas.Orphanize();
            };
            #endregion

            #region AtResize
            Action AtResize =
                delegate
                {
                    gl_viewportWidth = Native.window.Width;
                    gl_viewportHeight = Native.window.Height;

                    canvas.style.SetSize(gl_viewportWidth, gl_viewportHeight);

                    canvas.width = gl_viewportWidth;
                    canvas.height = gl_viewportHeight;
                };

            if (page != null)
                Native.window.onresize +=
                    e =>
                    {
                        AtResize();
                    };
            AtResize();
            #endregion


            #region requestFullscreen
            Native.Document.body.ondblclick +=
                delegate
                {
                    if (IsDisposed)
                        return;

                    // http://tutorialzine.com/2012/02/enhance-your-website-fullscreen-api/

                    Native.Document.body.requestFullscreen();


                };
            #endregion





            #region initShaders
            var shaderProgram = gl.createProgram(
                new GeometryVertexShader(),
                new GeometryFragmentShader()
            );


            gl.linkProgram(shaderProgram);
            gl.useProgram(shaderProgram);

            var shaderProgram_vertexPositionAttribute = gl.getAttribLocation(shaderProgram, "aVertexPosition");
            gl.enableVertexAttribArray((uint)shaderProgram_vertexPositionAttribute);

            var shaderProgram_textureCoordAttribute = gl.getAttribLocation(shaderProgram, "aTextureCoord");
            gl.enableVertexAttribArray((uint)shaderProgram_textureCoordAttribute);

            var shaderProgram_pMatrixUniform = gl.getUniformLocation(shaderProgram, "uPMatrix");
            var shaderProgram_mvMatrixUniform = gl.getUniformLocation(shaderProgram, "uMVMatrix");

            // new in lesson 05
            var shaderProgram_samplerUniform = gl.getUniformLocation(shaderProgram, "uSampler");
            #endregion



            var mvMatrix = glMatrix.mat4.create();
            var mvMatrixStack = new Stack<Float32Array>();

            var pMatrix = glMatrix.mat4.create();

            #region new in lesson 03
            Action mvPushMatrix = delegate
            {
                var copy = glMatrix.mat4.create();
                glMatrix.mat4.set(mvMatrix, copy);
                mvMatrixStack.Push(copy);
            };

            Action mvPopMatrix = delegate
            {
                mvMatrix = mvMatrixStack.Pop();
            };
            #endregion


            #region setMatrixUniforms
            Action setMatrixUniforms =
                delegate
                {
                    gl.uniformMatrix4fv(shaderProgram_pMatrixUniform, false, pMatrix);
                    gl.uniformMatrix4fv(shaderProgram_mvMatrixUniform, false, mvMatrix);
                };
            #endregion




            #region init buffers


            #region cubeVertexPositionBuffer
            var cubeVertexPositionBuffer = gl.createBuffer();
            gl.bindBuffer(gl.ARRAY_BUFFER, cubeVertexPositionBuffer);
            var vertices = new[]{

                // Front face RED
                -1.0f, -1.0f,  1.0f,
                 1.0f, -1.0f,  1.0f,
                 1.0f,  1.0f,  1.0f,
                -1.0f,  1.0f,  1.0f,

                //// Back face YELLOW
                //-1.0f, -1.0f, -1.0f,
                //-1.0f,  1.0f, -1.0f,
                // 1.0f,  1.0f, -1.0f,
                // 1.0f, -1.0f, -1.0f,

                //// Top face GREEN
                //-1.0f,  1.0f, -1.0f,
                //-1.0f,  1.0f,  1.0f,
                // 1.0f,  1.0f,  1.0f,
                // 1.0f,  1.0f, -1.0f,

                //// Bottom face BEIGE
                //-1.0f, -1.0f, -1.0f,
                // 1.0f, -1.0f, -1.0f,
                // 1.0f, -1.0f,  1.0f,
                //-1.0f, -1.0f,  1.0f,

                //// Right face PURPLE
                // 1.0f, -1.0f, -1.0f,
                // 1.0f,  1.0f, -1.0f,
                // 1.0f,  1.0f,  1.0f,
                // 1.0f, -1.0f,  1.0f,

                //// Left face
                //-1.0f, -1.0f, -1.0f,
                //-1.0f, -1.0f,  1.0f,
                //-1.0f,  1.0f,  1.0f,
                //-1.0f,  1.0f, -1.0f
            };
            gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);

            var cubeVertexPositionBuffer_itemSize = 3;
            //var cubeVertexPositionBuffer_numItems = 6 * 6;
            var cubeVertexPositionBuffer_numItems = 6 * 1;
            #endregion

            #region cubeVertexTextureCoordBuffer

            var cubeVertexTextureCoordBuffer = gl.createBuffer();
            gl.bindBuffer(gl.ARRAY_BUFFER, cubeVertexTextureCoordBuffer);
            var textureCoords = new float[]{
                  // Front face
                  0.0f, 0.0f,
                  1.0f, 0.0f,
                  1.0f, 1.0f,
                  0.0f, 1.0f,

                  //// Back face
                  //1.0f, 0.0f,
                  //1.0f, 1.0f,
                  //0.0f, 1.0f,
                  //0.0f, 0.0f,

                  //// Top face
                  //0.0f, 1.0f,
                  //0.0f, 0.0f,
                  //1.0f, 0.0f,
                  //1.0f, 1.0f,

                  //// Bottom face
                  //1.0f, 1.0f,
                  //0.0f, 1.0f,
                  //0.0f, 0.0f,
                  //1.0f, 0.0f,

                  //// Right face
                  //1.0f, 0.0f,
                  //1.0f, 1.0f,
                  //0.0f, 1.0f,
                  //0.0f, 0.0f,

                  //// Left face
                  //0.0f, 0.0f,
                  //1.0f, 0.0f,
                  //1.0f, 1.0f,
                  //0.0f, 1.0f,
            };
            gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(textureCoords), gl.STATIC_DRAW);
            var cubeVertexTextureCoordBuffer_itemSize = 2;
            var cubeVertexTextureCoordBuffer_numItems = 24;

            var cubeVertexIndexBuffer = gl.createBuffer();
            gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, cubeVertexIndexBuffer);
            var cubeVertexIndices = new UInt16[]{
                0, 1, 2,      0, 2, 3,    // Front face
                4, 5, 6,      4, 6, 7,    // Back face
                8, 9, 10,     8, 10, 11,  // Top face
                12, 13, 14,   12, 14, 15, // Bottom face
                16, 17, 18,   16, 18, 19, // Right face
                20, 21, 22,   20, 22, 23  // Left face
            };

            gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(cubeVertexIndices), gl.STATIC_DRAW);
            var cubeVertexIndexBuffer_itemSize = 1;
            var cubeVertexIndexBuffer_numItems = cubeVertexPositionBuffer_numItems;

            #endregion

            #endregion

            var tex1 = gl.createTexture();
            var tex1i = new WebGLSVGAnonymous.HTML.Images.FromAssets.Anonymous_LogosSingleNoWings();
            //var tex1i = new WebGLSVGAnonymous.HTML.Images.FromAssets.nehe();
            // WebGL: drawElements: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled. 
            tex1i.width = 1024 * 2;
            tex1i.height = 1024 * 2;




            // initTexture new in lesson 05
            var tex0 = gl.createTexture();
            var tex0i = new WebGLSVGAnonymous.HTML.Images.FromAssets.Anonymous_LogosSingleWings();
            //var tex0i = new WebGLSVGAnonymous.HTML.Images.FromAssets.nehe();
            // WebGL: drawElements: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled. 
            tex0i.width = 1024 * 2;
            tex0i.height = 1024 * 2;




            tex1i.InvokeOnComplete(
                delegate
                {
                    tex0i.InvokeOnComplete(
                        delegate
                        {
                            // this is a workaround
                            // chrome has a bug where svg textures are merged..
                            var tex1ii = new CanvasRenderingContext2D(1024 * 2, 1024 * 2);

                            tex1ii.drawImage(
                                tex1i, 0, 0, 1024 * 2, 1024 * 2);

                            {
                                gl.activeTexture(gl.TEXTURE1);
                                gl.bindTexture(gl.TEXTURE_2D, tex1);
                                gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);
                                gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, tex1ii.canvas);
                                gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, (int)gl.NEAREST);
                                gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, (int)gl.NEAREST);
                                gl.generateMipmap(gl.TEXTURE_2D);

                                gl.bindTexture(gl.TEXTURE_2D, null);
                            }


                            {
                                gl.activeTexture(gl.TEXTURE0);
                                gl.bindTexture(gl.TEXTURE_2D, tex0);
                                gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);
                                // http://msdn.microsoft.com/en-us/library/ie/dn302435(v=vs.85).aspx
                                gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, tex0i);
                                gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, (int)gl.NEAREST);
                                gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, (int)gl.NEAREST);
                                gl.generateMipmap(gl.TEXTURE_2D);
                                gl.bindTexture(gl.TEXTURE_2D, null);
                            }



                            //gl.clearColor(0.0f, 0.0f, 0.0f, 1.0f);
                            //gl.enable(gl.DEPTH_TEST);
                            gl.enable(gl.BLEND);
                            //gl.enable(gl.CULL_FACE);

                            // http://stackoverflow.com/questions/11521035/blending-with-html-background-in-webgl
                            gl.blendFuncSeparate(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA, gl.ONE, gl.ONE_MINUS_SRC_ALPHA);





                            var xRot = 0.0f;
                            var yRot = 0.0f;
                            var zRot = 0.0f;
                            var lastTime = 0L;
                            Action animate = delegate
                            {
                                var timeNow = new IDate().getTime();
                                if (lastTime != 0)
                                {
                                    var elapsed = timeNow - lastTime;

                                    //xRot += (9 * elapsed) / 1000.0f;
                                    yRot += (40 * elapsed) / 1000.0f;
                                    //zRot += (9 * elapsed) / 1000.0f;
                                }
                                lastTime = timeNow;
                            };

                            Func<float, float> degToRad = (degrees) =>
                            {
                                return degrees * (f)Math.PI / 180f;
                            };


                            var f = new Designer();

                            f.trackBar1.Value = -20;
                            f.trackBar2.Value = -10;


                            if (page != null)
                                f.Show();

                            Action<bool> drawScene = Anonymous_LogosSingleNoWings_Checked =>
                            {




                                glMatrix.mat4.perspective(45f, (float)gl_viewportWidth / (float)gl_viewportHeight, 0.1f, 100.0f, pMatrix);
                                glMatrix.mat4.identity(mvMatrix);

                                var u1 = f.trackBar1.Value * 0.1f;
                                glMatrix.mat4.translate(mvMatrix, new float[] { 0.0f, 0.0f, u1 });

                                //glMatrix.mat4.rotate(mvMatrix, degToRad(xRot), new[] { 1f, 0f, 0f });

                                if (Anonymous_LogosSingleNoWings_Checked)
                                    glMatrix.mat4.rotate(mvMatrix, degToRad(yRot), new[] { 0f, 1f, 0f });
                                else
                                    glMatrix.mat4.rotate(mvMatrix, degToRad(f.trackBar3.Value), new[] { 0f, 1f, 0f });

                                var u2 = f.trackBar2.Value * 0.1f;
                                glMatrix.mat4.translate(mvMatrix, new float[] { 0.0f, 0.0f, u2 });

                                Native.document.title = new { u1, u2 }.ToString();

                                //glMatrix.mat4.rotate(mvMatrix, degToRad(zRot), new[] { 0f, 0f, 1f });


                                gl.bindBuffer(gl.ARRAY_BUFFER, cubeVertexPositionBuffer);
                                gl.vertexAttribPointer((uint)shaderProgram_vertexPositionAttribute, cubeVertexPositionBuffer_itemSize, gl.FLOAT, false, 0, 0);

                                gl.bindBuffer(gl.ARRAY_BUFFER, cubeVertexTextureCoordBuffer);
                                gl.vertexAttribPointer((uint)shaderProgram_textureCoordAttribute, cubeVertexTextureCoordBuffer_itemSize, gl.FLOAT, false, 0, 0);



                                if (Anonymous_LogosSingleNoWings_Checked)
                                {
                                    gl.activeTexture(gl.TEXTURE0);

                                    gl.bindTexture(gl.TEXTURE_2D, tex0);
                                    gl.uniform1i(shaderProgram_samplerUniform, 0);
                                }
                                else
                                {
                                    gl.activeTexture(gl.TEXTURE0);

                                    gl.bindTexture(gl.TEXTURE_2D, tex1);
                                    gl.uniform1i(shaderProgram_samplerUniform, 0);
                                }



                                gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, cubeVertexIndexBuffer);
                                setMatrixUniforms();
                                gl.drawElements(gl.TRIANGLES, cubeVertexIndexBuffer_numItems, gl.UNSIGNED_SHORT, 0);

                                gl.bindTexture(gl.TEXTURE_2D, null);


                            };


                            var c = 0;


                            Native.window.onframe += delegate
                            {
                                c++;

                                if (page == null)
                                {
                                    gl_viewportWidth = canvas.clientWidth;
                                    gl_viewportHeight = canvas.clientHeight;

                                    canvas.style.SetLocation(0, 0, gl_viewportWidth, gl_viewportHeight);

                                    canvas.width = gl_viewportWidth;
                                    canvas.height = gl_viewportHeight;
                                }
                                gl.viewport(0, 0, gl_viewportWidth, gl_viewportHeight);
                                gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);


                                if (f.Anonymous_LogosSingleNoWings.Checked)
                                    drawScene(false);

                                if (f.Anonymous_LogosSingleWings.Checked)
                                    drawScene(true);

                                animate();

                            };



                        }
                    );
                }
            );

        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // what about service worker?

            Native.window.localStorage[new { index = 1 }].With(
                dataURL =>
                {
                    var i = new IHTMLImage { src = dataURL }.AttachToDocument();


                    new IHTMLBreak().AttachToDocument();

                    new IHTMLButton("clear localStorage").AttachToDocument().WhenClicked(
                        clear =>
                        {
                            Native.window.localStorage.removeItem(new { index = 1 });

                            i.Orphanize();

                            clear.Orphanize();
                        }
                    );

                }
            );


            new Preview().AttachToDocument().InvokeOnComplete(
                img =>
                {
                    // https://www.ibm.com/developerworks/community/blogs/bobleah/entry/html5_code_example_store_images_using_localstorage57?lang=en
                    // http://stackoverflow.com/questions/934012/get-image-data-in-javascript

                    //var canvas = new IHTMLCanvas
                    //{
                    //    width = img.width,
                    //    height = img.height
                    //};

                    //var context = (CanvasRenderingContext2D)canvas.getContext("2d");

                    var context = new CanvasRenderingContext2D();

                    context.canvas.width = img.width;
                    context.canvas.height = img.height;

                    context.drawImage(img, 0, 0, img.width, img.height);

                    // http://www.w3schools.com/tags/canvas_filltext.asp
                    context.fillStyle = "yellow";
                    context.font = "20px Verdana";
                    context.moveTo(0, 0);

                    // y means bottom
                    context.fillText("cache", 0, 20, img.width);


                    context.canvas.AttachToDocument();

                    var dataURL = context.canvas.toDataURL();

                    // data. ?
                    Native.window.localStorage[new { index = 1 }] = dataURL;


                    new IHTMLPre { innerText = new { dataURL }.ToString() }.AttachToDocument();

                }
            );
        }
        // http://youtu.be/Lo1IU8UAutE
        // 60hz 2160 4K!

        // The equirectangular projection was used in map creation since it was invented around 100 A.D. by Marinus of Tyre. 

        //        C:\Users\Arvo> "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hzsky.png" "/sdcard/oculus/360photos/"
        //1533 KB/s(3865902 bytes in 2.461s)

        //C:\Users\Arvo> "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape360globe1\0000.png" "/sdcard/oculus/360photos/tape360globe1.png"
        //1556 KB/s(2714294 bytes in 1.703s)

        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hz2048c3840x2160.png" "/sdcard/oculus/360photos/"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push   "X:\vr\tape360globe1\0000.png" "/sdcard/oculus/360photos/tape360globe2.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push   "X:\vr\tape360globe1\0000.png" "/sdcard/oculus/360photos/tape360globenight.png"



        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809/chrome360hz

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809

        // the eye nor the display will be able to do any stereo
        // until tech is near matrix capability. 2019?

        // cubemap can be used for all long range scenes
        // http://www.imdb.com/title/tt0112111/?ref_=nv_sr_1


        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150808/cubemapcamera
        // subst /D b:
        // subst b: s:\jsc.svn\examples\javascript\chrome\apps\WebGL\Chrome360GlobeAnimation\Chrome360GlobeAnimation\bin\Debug\staging\Chrome360GlobeAnimation.Application\web
        // subst a: z:\jsc.svn\examples\javascript\chrome\apps\WebGL\Chrome360GlobeAnimation\Chrome360GlobeAnimation\bin\Debug\staging\Chrome360GlobeAnimation.Application\web
        // Z:\jsc.svn\examples\javascript\chrome\apps\WebGL\Chrome360GlobeAnimation\Chrome360GlobeAnimation\bin\Debug\staging\Chrome360GlobeAnimation.Application\web
        // what if we want to do subst in another winstat or session?

        // ColladaLoader: Empty or non-existing file (assets/Chrome360GlobeAnimation/S6Edge.dae)

        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            //FormStyler.AtFormCreated =
            //s =>
            //{
            //    s.Context.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;

            //    //var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDrag().AttachTo(s.Context.GetHTMLTarget());
            //    var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDragWithShadow().AttachTo(s.Context.GetHTMLTarget());



            //    s.Context.GetHTMLTarget().style.backgroundColor = "#efefef";
            //    //s.Context.GetHTMLTarget().style.backgroundColor = "#A26D41";

            //};

#if AsWEBSERVER
            #region += Launched chrome.app.window
            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                // if we run as a server. we can open up on android.

                //chrome.Notification.DefaultTitle = "Nexus7";
                //chrome.Notification.DefaultIconUrl = new x128().src;
                ChromeTCPServer.TheServerWithStyledForm.Invoke(
                     AppSource.Text
                //, AtFormCreated: FormStyler.AtFormCreated

                //AtFormConstructor:
                //    f =>
                //    {
                //        //arg[0] is typeof System.Int32
                //        //script: error JSC1000: No implementation found for this native method, please implement [static System.Drawing.Color.FromArgb(System.Int32)]

                //        // X:\jsc.svn\examples\javascript\forms\Test\TestFromArgb\TestFromArgb\ApplicationControl.cs

                //        f.BackColor = System.Drawing.Color.FromArgb(0xA26D41);
                //    }
                );
                return;
            }
            #endregion
#else

            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    //chrome.runtime.UpdateAvailable += delegate
                    //{
                    //    new chrome.Notification(title: "UpdateAvailable");

                    //};

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "Chrome360GlobeAnimation");

                        // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: new
                               {
                                   alwaysOnTop = true,
                                   visibleOnAllWorkspaces = true
                               }
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion


#endif

            // Earth params
            //var radius = 0.5;
            //var radius = 1024;
            //var radius = 2048;
            //var radius = 512;
            //var radius = 256;
            //var radius = 400;

            // can we have not fly beyond moon too much?
            //var radius = 500;
            var radius = 480;

            //var segments = 32;
            var segments = 128;
            //var rotation = 6;


            //const int size = 128;
            //const int size = 256; // 6 faces, 12KB
            //const int size = 512; // 6 faces, ?

            // WebGL: drawArrays: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled.

            //const int size = 720; // 6 faces, ?
            //const int size = 1024; // 6 faces, ?
            //const int cubefacesize = 1024; // 6 faces, ?

            // THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter is set to THREE.LinearFilter or THREE.NearestFilter. ( chrome-extension://aemlnmcokphbneegoefdckonejmknohh/assets/Chrome360GlobeAnimation/anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs.jpg )
            int cubefacesize = 2048; // 6 faces, ?
                                     //int cubefacesize = 1024; // 6 faces, ?
                                     // "X:\vr\tape1\0000x2048.png"
                                     // for 60hz render we may want to use float camera percision, not available for ui.
                                     //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x2048.png" "/sdcard/oculus/360photos/"
                                     //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x128.png" "/sdcard/oculus/360photos/"

            //if (Environment.ProcessorCount < 8)
            //    //cubefacesize = 64; // 6 faces, ?
            //    cubefacesize = 1024; // 6 faces, ?

            new IHTMLPre { new { Environment.ProcessorCount, cubefacesize } }.AttachToDocument();

            // can we keep fast fps yet highp?

            // can we choose this on runtime? designtime wants fast fps, yet for end product we want highdef on our render farm?
            //const int cubefacesize = 128; // 6 faces, ?

            //var cubecameraoffsetx = 256;
            var cubecameraoffsetx = 400;


            //var uizoom = 0.1;
            //var uizoom = cubefacesize / 128f;
            var uizoom = 128f / cubefacesize;

            var far = 0xffffff;

            Native.css.style.backgroundColor = "blue";
            Native.css.style.overflow = IStyle.OverflowEnum.hidden;

            Native.body.Clear();
            (Native.body.style as dynamic).webkitUserSelect = "text";

            //new IHTMLPre { "can we stream it into VR, shadertoy, youtube 360, youtube stereo yet?" }.AttachToDocument();


            var sw = Stopwatch.StartNew();



            var oo = new List<THREE.Object3D>();

            var window = Native.window;


            // what about physics and that portal rendering?

            // if we are running as a chrome web server, we may also be opened as android ndk webview app
            //var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: window.aspect, near: 1, far: 2000);
            // once we update source
            // save the source
            // manually recompile 
            //cameraPX.position.z = 400;

            //// the camera should be close enough for the object to float off the FOV of PX
            //cameraPX.position.z = 200;

            // scene
            // can we make the 3D object orbit around us ?
            // and
            // stream it to vr?
            var scene = new THREE.Scene();

            // since our cube camera is somewhat a fixed thing
            // would it be easier to move mountains to come to us?
            // once we change code would chrome app be able to let VR know that a new view is available?
            var sceneg = new THREE.Group();
            sceneg.AttachTo(scene);


            // fly up?
            //sceneg.translateZ(-1024);
            // rotate the world, as the skybox then matches what we have on filesystem
            scene.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
            // yet for headtracking we shall rotate camera


            //sceneg.position.set(0, 0, -1024);
            //sceneg.position.set(0, -1024, 0);

            scene.add(new THREE.AmbientLight(0x333333));


            //384,400 km

            var distanceKM = 384400f;
            var earthdiameterKM = 12742f;
            var moondiameterKM = 3475f;
            var virtualDistance = radius / earthdiameterKM * distanceKM;
            // 12,742 km

            // 2,159.2 miles (3,475 km).


            var light = new THREE.DirectionalLight(0xffffff, 1);
            // sun should be beyond moon
            //light.position.set(-5 * virtualDistance, -3 * virtualDistance, -5 * virtualDistance);
            light.position.set(-15 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);
            scene.add(light);





            // whats WebGLRenderTargetCube do?

            // WebGLRenderer preserveDrawingBuffer 



            var renderer0 = new THREE.WebGLRenderer(

                new
                {
                    antialias = true,
                    alpha = true,
                    preserveDrawingBuffer = true
                }
            );

            // https://github.com/mrdoob/three.js/issues/3836

            // the construct. white bg
            //renderer0.setClearColor(0xfffff, 1);
            renderer0.setClearColor(0x0, 1);

            //renderer.setSize(window.Width, window.Height);
            renderer0.setSize(cubefacesize, cubefacesize);

            //renderer0.domElement.AttachToDocument();
            //rendererPX.domElement.style.SetLocation(0, 0);
            //renderer0.domElement.style.SetLocation(4, 4);


            // top

            // http://stackoverflow.com/questions/27612524/can-multiple-webglrenderers-render-the-same-scene


            // need a place to show the cubemap face to GUI 
            // how does the stereo OTOY do it?
            // https://www.opengl.org/wiki/Sampler_(GLSL)

            // http://www.richardssoftware.net/Home/Post/25

            // [+X, –X, +Y, –Y, +Z, –Z] fa



            // move up
            //camera.position.set(-1200, 800, 1200);
            //var cameraoffset = new THREE.Vector3(0, 15, 0);

            // can we aniamte it?
            //var cameraoffset = new THREE.Vector3(0, 800, 1200);
            // can we have linear animation fromcenter of the map to the edge and back?
            // then do the flat earth sun orbit?
            var cameraoffset = new THREE.Vector3(
                // left?
                -512,
                // height?
                //0,
                //1600,
                //1024,

                // if the camera is in the center, would we need to move the scene?
                // we have to move the camera. as we move the scene the lights are messed up
                //2014,
                1024,

                //1200
                0
                // can we hover top of the map?
                );

            // original vieworigin
            //var cameraoffset = new THREE.Vector3(-1200, 800, 1200);



            var camerax = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "camerax" }.AttachToDocument();
            // up. whats the most high a rocket can go 120km?
            new IHTMLHorizontalRule { }.AttachToDocument();


            // how high is the bunker?
            var cameray = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 2048 * 4, valueAsNumber = 0, title = "cameray" }.AttachToDocument();
            new IHTMLBreak { }.AttachToDocument();
            var camerayHigh = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = cameray.max, max = 1024 * 256, valueAsNumber = cameray.max, title = "cameray" }.AttachToDocument();
            new IHTMLHorizontalRule { }.AttachToDocument();
            var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();

            // for render server
            var fcamerax = 0.0;
            var fcameray = 0.0;
            var fcameraz = 0.0;

            //while (await camerax.async.onchange)

            //cameray.onchange += delegate
            //{
            //    if (cameray.valueAsNumber < cameray.max)
            //        camerayHigh.valueAsNumber = camerayHigh.min;
            //};

            camerayHigh.onmousedown += delegate
            {
                //if (camerayHigh.valueAsNumber > camerayHigh.min)
                cameray.valueAsNumber = cameray.max;
            };


            Action applycameraoffset = delegate
            {
                // make sure UI and gpu sync up

                var cy = cameray;

                if (cameray.valueAsNumber < cameray.max)
                    camerayHigh.valueAsNumber = camerayHigh.min;

                if (camerayHigh.valueAsNumber > camerayHigh.min)
                    cameray.valueAsNumber = cameray.max;

                if (cameray.valueAsNumber == cameray.max)
                    cy = camerayHigh;



                cameraoffset = new THREE.Vector3(
                  // left?
                  camerax + fcamerax,
                   // height?
                   //0,
                   //1600,
                   //1024,

                   // if the camera is in the center, would we need to move the scene?
                   // we have to move the camera. as we move the scene the lights are messed up
                   //2014,
                   cy + fcameray,

                   //1200
                   cameraz + fcameraz
                   // can we hover top of the map?
                   );
            };


            #region y
            // need to rotate90?
            var cameraNY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNY.lookAt(new THREE.Vector3(0, -1, 0));
                cameraNY.position.add(cameraoffset);
            };

            //cameraNY.lookAt(new THREE.Vector3(0, 1, 0));
            var canvasNY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasNY.canvas.title = "NY";
            canvasNY.canvas.AttachToDocument();
            canvasNY.canvas.style.transformOrigin = "0 0";
            canvasNY.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPY.lookAt(new THREE.Vector3(0, 1, 0));
                cameraPY.position.add(cameraoffset);
            };
            //cameraPY.lookAt(new THREE.Vector3(0, -1, 0));
            var canvasPY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 0);
            canvasPY.canvas.title = "PY";
            canvasPY.canvas.AttachToDocument();
            canvasPY.canvas.style.transformOrigin = "0 0";
            canvasPY.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion

            // transpose xz?

            #region x
            var cameraNX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNX.lookAt(new THREE.Vector3(0, 0, 1));
                cameraNX.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(0, 0, -1));
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNX.lookAt(new THREE.Vector3(1, 0, 0));
            var canvasNX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 2, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNX.canvas.title = "NX";
            canvasNX.canvas.AttachToDocument();
            canvasNX.canvas.style.transformOrigin = "0 0";
            canvasNX.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPX.lookAt(new THREE.Vector3(0, 0, -1));
                cameraPX.position.add(cameraoffset);
            };
            //cameraPX.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPX.lookAt(new THREE.Vector3(1, 0, 0));
            //cameraPX.lookAt(new THREE.Vector3(-1, 0, 0));
            var canvasPX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPX.canvas.title = "PX";
            canvasPX.canvas.AttachToDocument();
            canvasPX.canvas.style.transformOrigin = "0 0";
            canvasPX.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion



            #region z
            var cameraNZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, -1));
            applycameraoffset += delegate
            {
                cameraNZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNZ.lookAt(new THREE.Vector3(1, 0, 0));
                cameraNZ.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, 1));
            var canvasNZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 3, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNZ.canvas.title = "NZ";
            canvasNZ.canvas.AttachToDocument();
            canvasNZ.canvas.style.transformOrigin = "0 0";
            canvasNZ.canvas.style.transform ="scale(" + uizoom + ")";

            var cameraPZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraPZ.lookAt(new THREE.Vector3(1, 0, 0));
            applycameraoffset += delegate
            {
                cameraPZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPZ.lookAt(new THREE.Vector3(-1, 0, 0));
                cameraPZ.position.add(cameraoffset);
            };
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, -1));
            var canvasPZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPZ.canvas.title = "PZ";
            canvasPZ.canvas.AttachToDocument();
            canvasPZ.canvas.style.transformOrigin = "0 0";
            canvasPZ.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion




            // c++ alias locals would be nice..
            var canvas0 = (IHTMLCanvas)renderer0.domElement;


            var old = new
            {



                CursorX = 0,
                CursorY = 0
            };


            var st = new Stopwatch();
            st.Start();

            //canvas0.css.active.style.cursor = IStyle.CursorEnum.move;

            #region onmousedown
            Native.body.onmousedown +=
                async e =>
                {
                    if (e.Element.nodeName.ToLower() != "canvas")
                        return;

                    // movementX no longer works
                    old = new
                    {


                        e.CursorX,
                        e.CursorY
                    };


                    //e.CaptureMouse();
                    var release = e.Element.CaptureMouse();
                    await e.Element.async.onmouseup;

                    release();


                };
            #endregion



            // X:\jsc.svn\examples\javascript\Test\TestMouseMovement\TestMouseMovement\Application.cs
            #region onmousemove
            Native.body.onmousemove +=
                e =>
                {
                    if (e.Element.nodeName.ToLower() != "canvas")
                    {
                        Native.body.style.cursor = IStyle.CursorEnum.@default;
                        return;
                    }

                    e.preventDefault();
                    e.stopPropagation();


                    Native.body.style.cursor = IStyle.CursorEnum.move;

                    var pointerLock = canvas0 == Native.document.pointerLockElement;


                    //Console.WriteLine(new { e.MouseButton, pointerLock, e.movementX });

                    if (e.MouseButton == IEvent.MouseButtonEnum.Left)
                    {

                        oo.WithEach(
                            x =>
                            {
                                x.rotation.y += 0.006 * (e.CursorX - old.CursorX);
                                x.rotation.x += 0.006 * (e.CursorY - old.CursorY);
                            }
                        );

                        old = new
                        {


                            e.CursorX,
                            e.CursorY
                        };



                    }

                };
            #endregion

            // THREE.WebGLProgram: gl.getProgramInfoLog() C:\fakepath(78,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll
            // THREE.WebGLProgram: gl.getProgramInfoLog() (79,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll

            // http://www.roadtovr.com/youtube-confirms-stereo-3d-360-video-support-coming-soon/
            // https://www.youtube.com/watch?v=D-Wl9jAB45Q



            #region spherical
            var gl = new WebGLRenderingContext(alpha: true, preserveDrawingBuffer: true);
            var c = gl.canvas.AttachToDocument();

            //  3840x2160

            //c.style.SetSize(3840, 2160);

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150722/360-youtube


            c.width = 3840;
            c.height = 2160;


            //c.width = 3840 * 2;
            //c.height = 2160 * 2;


            //c.width = 3840;
            //c.height = 2160;
            // 1,777777777777778

            // https://www.youtube.com/watch?v=fTfJwzRsE-w
            //c.width = 7580;
            //c.height = 3840;
            //1,973958333333333

            //7580
            //    3840

            // wont work
            //c.width = 8192;
            //c.height = 4096;


            // this has the wrong aspect?
            //c.width = 6466;
            //c.height = 3232;

            new IHTMLPre { new { c.width, c.height } }.AttachToDocument();

            //6466x3232

            //var suizoom = 720f / c.height;
            //var suizoom = 360f / c.height;
            var suizoom = 480f / c.width;

            c.style.transformOrigin = "0 0";
            c.style.transform = "scale(" + suizoom + ")";
            //c.style.backgroundColor = "yellow";
            c.style.position = IStyle.PositionEnum.absolute;

            c.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 3);

            var pass = new CubeToEquirectangular.Library.ShaderToy.EffectPass(
                       null,
                       gl,
                       precission: CubeToEquirectangular.Library.ShaderToy.DetermineShaderPrecission(gl),
                       supportDerivatives: gl.getExtension("OES_standard_derivatives") != null,
                       callback: null,
                       obj: null,
                       forceMuted: false,
                       forcePaused: false,
                       //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                       outputGainNode: null
                   );

            // how shall we upload our textures?
            // can we reference GLSL.samplerCube yet?
            //pass.mInputs[0] = new samplerCube { };
            pass.mInputs[0] = new CubeToEquirectangular.Library.ShaderToy.samplerCube { };

            pass.MakeHeader_Image();
            var vs = new Shaders.ProgramFragmentShader();
            pass.NewShader_Image(vs);

            #endregion




            //var frame0 = new HTML.Images.FromAssets.tiles_regrid().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets.galaxy_starfield().AttachToDocument();
            var frame0 = new HTML.Images.FromAssets.galaxy_starfield150FOV().AttachToDocument();
            //var xor = new HTML.Images.FromAssets.Orion360_test_image_8192x4096().AttachToDocument();
            //var xor = new HTML.Images.FromAssets._2_no_clouds_4k().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().AttachToDocument();


            // 270px
            //xor.style.height = "";
            frame0.style.height = "270px";
            frame0.style.width = "480px";
            frame0.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 + 16, 8 + (int)(uizoom * cubefacesize + 8) * 3);





            #region fixup rotation

            //mesh.rotateOnAxis(new THREE.Vector3(1, 0, 0), Math.PI / 2);
            //mesh.rotateOnAxis(new THREE.Vector3(1, 0, 0), -Math.PI / 2);

            // dont need the fixup. unless we want to animate the sky rotate?
            //mesh.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
            #endregion


            // hide the sky to see camera lines?
            //  can we show this as HUD on VR in webview?
            //skybox.visible = false;
            //scene.add(skybox);




            //new IHTMLButton { }

            #region DirectoryEntry
            var dir = default(DirectoryEntry);

            new IHTMLButton { "openDirectory" }.AttachToDocument().onclick += async delegate
            {
                dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
            };

            frame0.onclick += delegate
            {
                // http://paulbourke.net/papers/vsmm2006/vsmm2006.pdf
                //            A method of creating synthetic stereoscopic panoramic images that can be implemented
                //in most rendering packages has been presented. If single panoramic pairs can be created
                //then stereoscopic panoramic movies are equally possible giving rise to the prospect of
                //movies where the viewer can interact with, at least with regard to what they choose to look
                //at.These images can be projected so as to engage the two features of the human visual
                //system that assist is giving us a sense of immersion, the feeling of “being there”. That is,
                //imagery that contains parallax information as captured from two horizontally separated eye
                //positions (stereopsis)and imagery that fills our peripheral vision.The details that define
                //how the two panoramic images should be created in rendering packages are provided, in
                //particular, how to precisely configure the virtual cameras and control the distance to zero
                //parallax.

                // grab a frame

                if (dir == null)
                {
                    // not exporting to file system?
                    var f0 = new IHTMLImage { src = gl.canvas.toDataURL() };

                    //var f0 = (IHTMLImage)gl.canvas;
                    //var f0 = (IHTMLImage)gl.canvas;
                    //var base64 = gl.canvas.toDataURL();


                    //frame0.src = base64;
                    frame0.src = f0.src;

                    // 7MB!

                    return;
                }

                //                // ---------------------------
                //IrfanView
                //---------------------------
                //Warning !
                //The file: "X:\vr\tape1\0001.jpg" is a PNG file with incorrect extension !
                //Rename ?
                //---------------------------
                //Yes   No   
                //---------------------------

                // haha this will render the thumbnail.
                //dir.WriteAllBytes("0000.png", frame0);

                //dir.WriteAllBytes("0000.png", gl.canvas);

                var glsw = Stopwatch.StartNew();
                dir.WriteAllBytes("0000.png", gl);

                new IHTMLPre { new { glsw.ElapsedMilliseconds } }.AttachToDocument();

                // {{ ElapsedMilliseconds = 1548 }}

                // 3.7MB
                // 3840x2160

            };

            #endregion

            var vsync = default(TaskCompletionSource<object>);

            new IHTMLButton {
                "render 60hz 30sec"
            }.AttachToDocument().onclick += async e =>
            {
                e.Element.disabled = true;


                var total = Stopwatch.StartNew();
                var status = "rendering... " + new { dir };

                new IHTMLPre { () => status }.AttachToDocument();

                if (dir == null)
                {
                    //dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
                }

                total.Restart();



                vsync = new TaskCompletionSource<object>();
                await vsync.Task;

                status = "rendering... vsync";

                var frameid = 0;

                goto beforeframe;


                // parallax offset?

                await_nextframe:


                var filename = frameid.ToString().PadLeft(4, '0') + ".png";
                status = "rendering... " + new { frameid, filename };


                vsync = new TaskCompletionSource<object>();
                await vsync.Task;

                // frame0 has been rendered

                var swcapture = Stopwatch.StartNew();
                status = "WriteAllBytes... " + new { filename };
                //await Native.window.async.onframe;

                // https://code.google.com/p/chromium/issues/detail?id=404301
                if (dir != null)
                    await dir.WriteAllBytes(filename, gl);
                //await dir.WriteAllBytes(filename, gl.canvas);

                status = "WriteAllBytes... done " + new { fcamerax, filename, swcapture.ElapsedMilliseconds };
                status = "rdy " + new { filename, fcamerax };
                //await Native.window.async.onframe;


                beforeframe:

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);
                fcamerax = radius * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                cameraz.valueAsNumber = (int)(cameraz.max * Math.Sin(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f)));


                // up
                //fcameray = 128 * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                //fcamerax += (1.0 / 60.0);

                //fcamerax += (1.0 / 60.0) * 120;

                if (Environment.ProcessorCount < 8)
                    frameid += 15;
                else
                    frameid++;

                // 60hz 30sec
                if (frameid < 60 * 30)
                {
                    // Blob GC? either this helms or the that we made a Blob static. 
                    await Task.Delay(11);

                    goto await_nextframe;
                }

                total.Stop();
                status = "all done " + new { frameid, total.ElapsedMilliseconds };
                vsync = default(TaskCompletionSource<object>);
                // http://stackoverflow.com/questions/22899333/delete-javascript-blobs

                e.Element.disabled = false;
            };

            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLColladaExperiment\WebGLColladaExperiment\WebGLColladaExperiment.csproj"


            #region moonsphere
            var moonsphere = new THREE.Mesh(
                    new THREE.SphereGeometry(radius / earthdiameterKM * moondiameterKM, segments, segments),
                    new THREE.MeshPhongMaterial(
                            new
                            {
                                map = new THREE.Texture().With(

                                    async s =>
                                    {
                                        //0:75ms event: _2_no_clouds_4k_low view-source:36543
                                        //Application Cache Progress event (1 of 2) http://192.168.1.72:22248/view-source 192.168.1.72/:1
                                        //Application Cache Progress event (2 of 2)  192.168.1.72/:1
                                        //Application Cache Cached event 192.168.1.72/:1
                                        //1:1018ms event: _2_no_clouds_4k_low done view-source:36543
                                        //1:1019ms event: _2_no_clouds_4k view-source:36543
                                        //event.returnValue is deprecated. Please use the standard event.preventDefault() instead. view-source:2995
                                        //1:16445ms event: _2_no_clouds_4k done 

                                        // ~ tilde to open css editor?


                                        //await 20000;

                                        //Console.WriteLine("event: _2_no_clouds_4k");
                                        s.image = await new moon();
                                        s.needsUpdate = true;
                                        //Console.WriteLine("event: _2_no_clouds_4k done");
                                    }
                                ),


                                //bumpMap = THREE.ImageUtils.loadTexture(
                                //    new elev_bump_4k().src
                                ////new elev_bump_4k_low().src
                                //),


                                //// applies onyl to shaders to create the shadow
                                //bumpScale = 0.005,

                                //specularMap = new THREE.Texture().With(
                                //    async s =>
                                //    {

                                //        Console.WriteLine("event: water_4k");
                                //        s.image = await new water_4k().async.oncomplete;
                                //        s.needsUpdate = true;
                                //        Console.WriteLine("event: water_4k done");
                                //    }
                                //),


                                ////specular =    new THREE.Color("grey")								
                                //specular = new THREE.Color(0xa0a0a0)
                            })
                );
            #endregion


            //moonsphere.position.set(radius * 4, 0, 0);
            //moonsphere.position.set(0, 0, -radius * 4);

            scene.add(moonsphere);


            #region globesphere
            var globesphere = new THREE.Mesh(
                    new THREE.SphereGeometry(radius, segments, segments),
                    new THREE.MeshPhongMaterial(
                            new
                            {
                                map = new THREE.Texture().With(

                                    async s =>
                                    {
                                        //0:75ms event: _2_no_clouds_4k_low view-source:36543
                                        //Application Cache Progress event (1 of 2) http://192.168.1.72:22248/view-source 192.168.1.72/:1
                                        //Application Cache Progress event (2 of 2)  192.168.1.72/:1
                                        //Application Cache Cached event 192.168.1.72/:1
                                        //1:1018ms event: _2_no_clouds_4k_low done view-source:36543
                                        //1:1019ms event: _2_no_clouds_4k view-source:36543
                                        //event.returnValue is deprecated. Please use the standard event.preventDefault() instead. view-source:2995
                                        //1:16445ms event: _2_no_clouds_4k done 

                                        // ~ tilde to open css editor?


                                        //await 20000;

                                        //Console.WriteLine("event: _2_no_clouds_4k");
                                        s.image = await new _2_no_clouds_4k();
                                        s.needsUpdate = true;
                                        //Console.WriteLine("event: _2_no_clouds_4k done");
                                    }
                                ),


                                bumpMap = THREE.ImageUtils.loadTexture(
                                    new elev_bump_4k().src
                                //new elev_bump_4k_low().src
                                ),


                                // applies onyl to shaders to create the shadow
                                bumpScale = 0.005,

                                specularMap = new THREE.Texture().With(
                                    async s =>
                                    {

                                        Console.WriteLine("event: water_4k");
                                        s.image = await new water_4k().async.oncomplete;
                                        s.needsUpdate = true;
                                        Console.WriteLine("event: water_4k done");
                                    }
                                ),


                                //specular =    new THREE.Color("grey")								
                                specular = new THREE.Color(0xa0a0a0)
                            })
                );
            #endregion



            // http://stackoverflow.com/questions/12447734/three-js-updateing-texture-on-plane


            //globesphere.rotation.y = rotation;
            scene.add(globesphere);


            #region clouds
            var clouds = new THREE.Mesh(
                    new THREE.SphereGeometry(
                        //radius + 0.003,
                        radius + radius * 0.01,
                        segments, segments),
                    new THREE.MeshPhongMaterial(
                        new
                        {
                            //map = THREE.ImageUtils.loadTexture(
                            //    //new fair_clouds_4k().src
                            //    new fair_clouds_4k_low().src
                            //    ),


                            map = new THREE.Texture().With(
                                async s =>
                                {


                                    Console.WriteLine("event: fair_clouds_4k");
                                    s.image = await new fair_clouds_4k().async.oncomplete;
                                    s.needsUpdate = true;
                                    Console.WriteLine("event: fair_clouds_4k done");
                                }
                            ),


                            transparent = true
                        })
                );
            //clouds.rotation.y = rotation;
            scene.add(clouds);
            #endregion



            //clouds.position.set(0, virtualDistance / -2, 0);
            //globesphere.position.set(0, virtualDistance / -2, 0);

            //moonsphere.position.set(0, virtualDistance / 2, 0);


            //clouds.position.set(virtualDistance / -2, 0, 0);
            //globesphere.position.set(virtualDistance / -2, 0, 0);

            //moonsphere.position.set(virtualDistance / 2, 0, 0);

            clouds.position.set(virtualDistance / 2, (int)(-radius * 1.06), 0);
            globesphere.position.set(virtualDistance / 2, (int)(-radius * 1.06), 0);

            // moon dark side away from earth?
            //moonsphere.position.set(virtualDistance / -2, -radius, 0);
            moonsphere.position.set(virtualDistance / -2, radius / earthdiameterKM * moondiameterKM, 0);

            applycameraoffset += delegate
            {
                globesphere.rotation.y = Math.Sin((cameraz.max * 0.5 + cameraz.valueAsNumber) * 0.0005);
                clouds.rotation.y = Math.Sin((cameraz.max * 0.5 + cameraz.valueAsNumber) * 0.0006);
            };

            //globesphere.position.set(0, radius / earthdiameterKM * distanceKM, 0);

            //clouds.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
            //globesphere.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);



            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeEarth\ChromeEarth\Application.cs
            // X:\jsc.svn\examples\javascript\canvas\ConvertBlackToAlpha\ConvertBlackToAlpha\Application.cs
            // hidden for alpha AppWindows
            //#if FBACKGROUND

            #region galaxy_starfield
            new THREE.Texture().With(
                async s =>
                {
                    //var i = new HTML.Images.FromAssets.galaxy_starfield();
                    var i = new HTML.Images.FromAssets.galaxy_starfield150FOV();

                    var bytes = await i.async.bytes;

                    //for (int ii = 0; ii < bytes.Length; ii += 4)
                    //{

                    //    bytes[ii + 3] = (byte)(bytes[ii + 0]);

                    //    bytes[ii + 0] = 0xff;
                    //    bytes[ii + 1] = 0xff;
                    //    bytes[ii + 2] = 0xff;
                    //}

                    var cc = new CanvasRenderingContext2D(i.width, i.height);

                    cc.bytes = bytes;

                    s.image = cc;
                    s.needsUpdate = true;

                    var stars_material = new THREE.MeshBasicMaterial(
                            new
                            {
                                //map = THREE.ImageUtils.loadTexture(new galaxy_starfield().src),
                                map = s,
                                side = THREE.BackSide,
                                transparent = true
                            });


                    var stars = new THREE.Mesh(
                            new THREE.SphereGeometry(far * 0.9, 64, 64),
                           stars_material
                        );

                    // http://stackoverflow.com/questions/8502150/three-js-how-can-i-dynamically-change-objects-opacity
                    //(stars_material as dynamic).opacity = 0.5;


                    scene.add(stars);
                }
           );
            #endregion




            new Models.ColladaS6Edge().Source.Task.ContinueWithResult(
                   dae =>
                   {
                       // 90deg
                       dae.rotation.x = -Math.Cos(Math.PI);

                       //dae.scale.x = 30;
                       //dae.scale.y = 30;
                       //dae.scale.z = 30;
                       dae.position.z = -(65 - 200);





                       var scale = 0.9;

                       // jsc, do we have ILObserver available yet?
                       dae.scale.x = scale;
                       dae.scale.y = scale;
                       dae.scale.z = scale;


                       #region onmousewheel
                       Native.body.onmousewheel +=
                           e =>
                           {
                               e.preventDefault();

                               //camera.position.z = 1.5;

                               // min max. shall adjust speed also!
                               // max 4.0
                               // min 0.6
                               dae.position.z -= 10.0 * e.WheelDirection;

                               //camera.position.z = 400;
                               //dae.position.z = dae.position.z.Max(-200).Min(200);

                               //Native.document.title = new { z }.ToString();

                           };
                       #endregion


                       //dae.position.y = -80;

                       //dae.AttachTo(sceneg);
                       //scene.add(dae);
                       //oo.Add(dae);




                       // view-source:http://threejs.org/examples/webgl_multiple_canvases_circle.html
                       // https://threejsdoc.appspot.com/doc/three.js/src.source/extras/cameras/CubeCamera.js.html
                       Native.window.onframe +=
                           e =>
                           {
                               // let render man know..
                               if (vsync != null)
                                   if (vsync.Task.IsCompleted)
                                       return;


                               //if (pause) return;
                               //if (pause.@checked)
                               //    return;


                               // can we float out of frame?
                               // haha. a bit too flickery.
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.01) * 50.0;
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.001) * 190.0;
                               //globesphere.position.y = Math.Sin(fcamerax * 0.001) * 90.0;
                               //clouds.position.y = Math.Cos(fcamerax * 0.001) * 90.0;

                               //sphere.rotation.y += speed;
                               //clouds.rotation.y += speed;

                               // manual rebuild?
                               // red compiler notifies laptop chrome of pending update
                               // app reloads

                               applycameraoffset();
                               renderer0.clear();
                               //rendererPY.clear();

                               //cameraPX.aspect = canvasPX.aspect;
                               //cameraPX.updateProjectionMatrix();

                               // um what does this do?
                               //cameraPX.position.z += (z - cameraPX.position.z) * e.delay.ElapsedMilliseconds / 200.0;
                               // mousewheel allos the camera to move closer
                               // once we see the frame in vr, can we udp sync vr tracking back to laptop?


                               //this.targetPX.x += 1;
                               //this.targetNX.x -= 1;

                               //this.targetPY.y += 1;
                               //this.targetNY.y -= 1;

                               //this.targetPZ.z += 1;
                               //this.targetNZ.z -= 1;

                               // how does the 360 or shadertoy want our cubemaps?


                               // and then rotate right?

                               // how can we render cubemap?


                               #region x
                               // upside down?
                               renderer0.render(scene, cameraPX);
                               canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                               renderer0.render(scene, cameraNX);
                               canvasNX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               #endregion

                               #region z
                               renderer0.render(scene, cameraPZ);
                               canvasPZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                               renderer0.render(scene, cameraNZ);
                               canvasNZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               #endregion



                               #region y
                               renderer0.render(scene, cameraPY);

                               //canvasPY.save();
                               //canvasPY.translate(0, size);
                               //canvasPY.rotate((float)(-Math.PI / 2));
                               canvasPY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               //canvasPY.restore();


                               renderer0.render(scene, cameraNY);
                               //canvasNY.save();
                               //canvasNY.translate(size, 0);
                               //canvasNY.rotate((float)(Math.PI / 2));
                               canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               //canvasNY.restore();
                               // ?
                               #endregion


                               //renderer0.render(scene, cameraPX);


                               //rendererPY.render(scene, cameraPY);

                               // at this point we should be able to render the sphere texture

                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;


                               //var cube0 = new IHTMLImage[] {
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),

                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),


                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
                               //};

                               new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                               }.WithEachIndex(
                                   (img, index) =>
                                   {
                                       gl.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);

                                       //gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
                                       gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);

                                       // http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true

                                       // https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);

                                       gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);

                                   }
                                );

                               // could do dynamic resolution- fog of war or fog of FOV. where up to 150deg field of vision is encouragedm, not 360
                               pass.Paint_Image(
                                     0,

                                     0,
                                     0,
                                     0,
                                     0
                                //,

                                // gl_FragCoord
                                // cannot be scaled, and can be referenced directly.
                                // need another way to scale
                                //zoom: 0.3f
                                );

                               //paintsw.Stop();


                               // what does it do?
                               gl.flush();

                               // let render man know..
                               if (vsync != null)
                                   if (!vsync.Task.IsCompleted)
                                       vsync.SetResult(null);
                           };


                   }
               );





            Console.WriteLine("do you see it?");
        }
		/// <summary>
		/// This is a javascript application.
		/// </summary>
		/// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
		public Application(IApp page)
		{
			// http://www.doingbusiness.org/data/exploreeconomies/puerto-rico/starting-a-business
			// http://www.doingbusiness.org/data/exploretopics/starting-a-business

			// X:\jsc.svn\examples\javascript\svg\DEAGELForecast\DEAGELForecast\Application.cs
			// https://sites.google.com/a/jsc-solutions.net/backlog/knowledge-base/2014/201410/20141028

			// http://www.thecommonsenseshow.com/2014/10/23/the-target-date-for-americas-depopulation-has-been-set/

			// would ScriptCoreLib be able to 
			// play navigator if we also
			// have the role of AppWindow or Android Widget Service?


			// if we are running in a SYSTEM account
			// the chrome no-sandbox only allows software renderer
			// where we get 1 frame per sec.


			// on older systems we may not get GL_OES_standard_derivatives 
			// http://stackoverflow.com/questions/16795278/disable-some-gl-extensions-for-debugging-three-js-app
			// 			( parameters.bumpMap || parameters.normalMap ) ? "#extension GL_OES_standard_derivatives : enable" : "",
			// or that system is just old as hell


			// http://stackoverflow.com/questions/16795278/disable-some-gl-extensions-for-debugging-three-js-app

			//http://thematicmapping.org/playground/webgl/earth/
			// https://sites.google.com/a/jsc-solutions.net/backlog/knowledge-base/2014/201402/20140222


			// Earth params
			var radius = 0.5;

			//var segments = 32;
			var segments = 128;
			var rotation = 6;

			var scene = new THREE.Scene();

			var camera = new THREE.PerspectiveCamera(45, Native.window.aspect, 0.01, 1000);
			camera.position.z = 1.5;

			var renderer = new THREE.WebGLRenderer(
				   new
				   {
					   // http://stackoverflow.com/questions/20495302/transparent-background-with-three-js
					   alpha = true,
					   preserveDrawingBuffer = true
				   }

				);
			renderer.setSize();
			//renderer.setClearColor(
			scene.add(new THREE.AmbientLight(0x333333));


			var light = new THREE.DirectionalLight(0xffffff, 1);
			light.position.set(5, 3, 5);
			scene.add(light);






			#region sphere
			var sphere = new THREE.Mesh(
					new THREE.SphereGeometry(radius, segments, segments),
					new THREE.MeshPhongMaterial(
							new
							{
								map = new THREE.Texture().With(

									async s =>
									{
										//0:75ms event: _2_no_clouds_4k_low view-source:36543
										//Application Cache Progress event (1 of 2) http://192.168.1.72:22248/view-source 192.168.1.72/:1
										//Application Cache Progress event (2 of 2)  192.168.1.72/:1
										//Application Cache Cached event 192.168.1.72/:1
										//1:1018ms event: _2_no_clouds_4k_low done view-source:36543
										//1:1019ms event: _2_no_clouds_4k view-source:36543
										//event.returnValue is deprecated. Please use the standard event.preventDefault() instead. view-source:2995
										//1:16445ms event: _2_no_clouds_4k done 

										// ~ tilde to open css editor?




										Console.WriteLine("event: _2_no_clouds_4k_low");
										//s.image = await new _2_no_clouds_4k_low();
										s.image = await new _2_no_clouds_DEAGEL_4k_low().async.oncomplete;
										// http://www.deagel.com/
										//s.image = await new _2_no_clouds_DEAGEL_CIMSS_4k_low();
										//s.image = new _2_no_clouds_4k_low();
										//await s.image;

										s.needsUpdate = true;
										Console.WriteLine("event: _2_no_clouds_4k_low done");

                                        //await 20000;

                                        //Console.WriteLine("event: _2_no_clouds_4k");
                                        s.image = await new _2_no_clouds_4k();
                                        s.needsUpdate = true;
                                        //Console.WriteLine("event: _2_no_clouds_4k done");
                                    }
								),


								bumpMap = THREE.ImageUtils.loadTexture(
									new elev_bump_4k().src
								//new elev_bump_4k_low().src
								),


								// applies onyl to shaders to create the shadow
								bumpScale = 0.005,

								specularMap = new THREE.Texture().With(
									async s =>
									{
										Console.WriteLine("event: water_4k_low");
										s.image = await new water_4k_low().async.oncomplete;
										s.needsUpdate = true;
										Console.WriteLine("event: water_4k_low done");

										await Task.Delay(20000);

										Console.WriteLine("event: water_4k");
										s.image = await new water_4k().async.oncomplete;
										s.needsUpdate = true;
										Console.WriteLine("event: water_4k done");
									}
								),


								//specular =    new THREE.Color("grey")								
								specular = new THREE.Color(0xa0a0a0)
							})
				);
			#endregion

			// http://stackoverflow.com/questions/12447734/three-js-updateing-texture-on-plane


			sphere.rotation.y = rotation;
			scene.add(sphere);


			#region clouds
			var clouds = new THREE.Mesh(
					new THREE.SphereGeometry(
						//radius + 0.003,
						radius + 0.006,
						segments, segments),
					new THREE.MeshPhongMaterial(
						new
						{
							//map = THREE.ImageUtils.loadTexture(
							//    //new fair_clouds_4k().src
							//    new fair_clouds_4k_low().src
							//    ),


							map = new THREE.Texture().With(
								async s =>
								{
									Console.WriteLine("event: fair_clouds_4k_low");
									s.image = await new fair_clouds_4k_low().async.oncomplete;
									s.needsUpdate = true;
									Console.WriteLine("event: fair_clouds_4k_low done");

                                    await Task.Delay(20000);

									Console.WriteLine("event: fair_clouds_4k");
									s.image = await new fair_clouds_4k().async.oncomplete;
									s.needsUpdate = true;
									Console.WriteLine("event: fair_clouds_4k done");
								}
							),


							transparent = true
						})
				);
			clouds.rotation.y = rotation;
			scene.add(clouds);
			#endregion



			// X:\jsc.svn\examples\javascript\chrome\apps\ChromeEarth\ChromeEarth\Application.cs
			// X:\jsc.svn\examples\javascript\canvas\ConvertBlackToAlpha\ConvertBlackToAlpha\Application.cs
			// hidden for alpha AppWindows
			//#if FBACKGROUND

			#region galaxy_starfield
			new THREE.Texture().With(
				async s =>
				{
					var i = new HTML.Images.FromAssets.galaxy_starfield();

					var bytes = await i.async.bytes;

					for (int ii = 0; ii < bytes.Length; ii += 4)
					{

						bytes[ii + 3] = (byte)(bytes[ii + 0]);

						bytes[ii + 0] = 0xff;
						bytes[ii + 1] = 0xff;
						bytes[ii + 2] = 0xff;
					}

					var c = new CanvasRenderingContext2D(i.width, i.height);

					c.bytes = bytes;

					s.image = c;
					s.needsUpdate = true;

					var stars_material = new THREE.MeshBasicMaterial(
							new
							{
								//map = THREE.ImageUtils.loadTexture(new galaxy_starfield().src),
								map = s,
								side = THREE.BackSide,
								transparent = true
							});


					var stars = new THREE.Mesh(
							new THREE.SphereGeometry(90, 64, 64),
						   stars_material
						);

					// http://stackoverflow.com/questions/8502150/three-js-how-can-i-dynamically-change-objects-opacity
					//(stars_material as dynamic).opacity = 0.5;


					scene.add(stars);
				}
		   );
			#endregion



			//#endif


			//var controls = new THREE.TrackballControls(camera);
			//Native.document.body.style.margin = "0";
			//Native.document.body.style.overflow = IStyle.OverflowEnum.hidden;

			this.canvas = (IHTMLCanvas)renderer.domElement;

			//renderer.domElement.AttachToDocument();
			this.canvas.AttachToDocument();
			this.canvas.style.SetLocation(0, 0);

			// jsc, what pointers do we have in store?
			this.canvas.css.style.cursor = IStyle.CursorEnum.pointer;
			this.canvas.css.active.style.cursor = IStyle.CursorEnum.move;


			var
				old = new
				{
					sphere = new
					{
						sphere.rotation.x,
						sphere.rotation.y

					},
					clouds = new
					{
						clouds.rotation.x,
						clouds.rotation.y,
					},


					CursorX = 0,
					CursorY = 0
				};




			var z = camera.position.z;


			#region sfx
			var sfx = new WebGLEarthByBjorn.HTML.Audio.FromAssets.SatelliteBeep_Sputnik1
			{
				autobuffer = true,

				// this aint working
				//loop = true 

			};

			sfx.play();
			#endregion


			//sfx.AttachToHead();


			// http://soundfxnow.com/sound-fx/sputnik-satellite-beeping/

			#region onmousewheel
			this.canvas.onmousewheel +=
				e =>
				{
					//camera.position.z = 1.5;

					// min max. shall adjust speed also!
					// max 4.0
					// min 0.6
					z -= 0.1 * e.WheelDirection;

					z = z.Max(0.6).Min(4.5);

					//Native.document.title = new { camera.position.z }.ToString();

				};
			#endregion


			#region onmousedown
			this.canvas.onmousedown +=
				e =>
				{
					var pointerLock = this.canvas == Native.document.pointerLockElement;

					if (e.MouseButton == IEvent.MouseButtonEnum.Middle)
					{
						// F11 ?
						//this.canvas.requestFullscreen();
						this.canvas.requestPointerLock();
					}
					else
					{
						// movementX no longer works
						old = new
						{
							sphere = new
							{
								sphere.rotation.x,
								sphere.rotation.y

							},
							clouds = new
							{
								clouds.rotation.x,
								clouds.rotation.y,
							},


							e.CursorX,
							e.CursorY
						};

						if (pointerLock)
						{
							// skip
						}
						else
						{
							e.CaptureMouse();
						}
					}

				};
			#endregion


			canvas.css.active.style.cursor = IStyle.CursorEnum.move;


			// X:\jsc.svn\examples\javascript\Test\TestMouseMovement\TestMouseMovement\Application.cs
			#region onmousemove
			this.canvas.onmousemove +=
				e =>
				{
					var pointerLock = this.canvas == Native.document.pointerLockElement;


					//Console.WriteLine(new { e.MouseButton, pointerLock, e.movementX });

					if (e.MouseButton == IEvent.MouseButtonEnum.Left)
					{
						if (pointerLock)
						{
							sphere.rotation.x += 0.01 * e.movementY;
							sphere.rotation.y += 0.01 * e.movementX;

							clouds.rotation.x += 0.01 * e.movementY;
							clouds.rotation.y += 0.01 * e.movementX;
						}
						else
						{
							sphere.rotation.x = old.sphere.x + 0.01 * (e.CursorY - old.CursorY);
							sphere.rotation.y = old.sphere.y + 0.01 * (e.CursorX - old.CursorX);

							clouds.rotation.x = old.clouds.x + 0.01 * (e.CursorY - old.CursorY);
							clouds.rotation.y = old.clouds.y + 0.01 * (e.CursorX - old.CursorX);
						}


						//    Native.document.title = new { e.movementX, e.movementY }.ToString();

					}

				};
			#endregion


			// could we 
			#region onframe
			Native.window.onframe +=
				e =>
				{
					if (this.canvas.parentNode == null)
						return;

					camera.aspect = canvas.clientWidth / (double)canvas.clientHeight;
					camera.updateProjectionMatrix();

					camera.position.z += (z - camera.position.z) * e.delay.ElapsedMilliseconds / 200;

					// autorotation should pause while under mouse drag?
					// the larger the vew the slower the rotation shall be
					var speed = 0.0001 * e.delay.ElapsedMilliseconds +
						0.007
						* 96.0 / canvas.clientHeight
						* 1.0 / camera.position.z;

					//Native.document.title = new { s = 96.0 / canvas.clientHeight }.ToString();
					//Native.document.title = new { speed }.ToString();


					//controls.update();
					sphere.rotation.y += speed;
					clouds.rotation.y += speed;


					renderer.render(scene, camera);
				};
			#endregion


			Native.window.onresize +=
				delegate
			{


				//if (canvas.parentNode == Native.document.body)

				// are we embedded?
				if (page != null)
					renderer.setSize();
			};

			//var ze = new ZeProperties();

			//ze.Show();
			//ze.treeView1.Nodes.Clear();

			//ze.Add(() => renderer);
			////ze.Add(() => controls);
			//ze.Add(() => scene);
			//ze.Left = 0;
			//new IStyle(this.canvas.css.before)
			//{
			//    content = "'do a middle click to maximize the earth dashboard'",

			//    left = "1em",
			//    bottom = "1em",

			//    color = "white",

			//    position = IStyle.PositionEnum.absolute
			//};
		}
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            //#region ChromeTCPServer
            //dynamic self = Native.self;
            //dynamic self_chrome = self.chrome;
            //object self_chrome_socket = self_chrome.socket;

            //if (self_chrome_socket != null)
            //{
            //    //chrome.Notification.DefaultIconUrl = new HTML.Images.FromAssets.Preview().src;
            //    chrome.Notification.DefaultTitle = "WebGLToAnimatedGIFExperiment";


            //    ChromeTCPServer.TheServerWithStyledForm.Invoke(
            //        AppSource.Text
            //    );

            //    return;
            //}
            //#endregion




            var ani6 = new WebGLEarthByBjorn.Application(null);

            ani6.canvas.AttachTo(page.e1);
            ani6.canvas.style.SetSize(96, 96);
            ani6.canvas.style.position = IStyle.PositionEnum.relative;



            var ani5 = new WebGLSVGAnonymous.Application(null);

            ani5.canvas.AttachTo(page.e1);

            // gif needs a bg?
            //ani4.canvas.style.backgroundColor = "yellow";

            ani5.canvas.style.SetSize(96, 96);
            ani5.canvas.style.position = IStyle.PositionEnum.relative;

            var ani4 = new WebGLColladaExperiment.Application(null);

            ani4.canvas.AttachTo(page.e1);

            // gif needs a bg?
            //ani4.canvas.style.backgroundColor = "yellow";

            ani4.canvas.style.SetSize(96, 96);
            ani4.canvas.style.position = IStyle.PositionEnum.relative;


            var ani3 = new WebGLTetrahedron.Application();

            ani3.gl.canvas.AttachTo(page.e1);

            // : ScriptComponent
            var ani2 = new WebGLEscherDrosteEffect.Application();

            ani2.gl.canvas.AttachTo(page.e1);





            var gl = new WebGLRenderingContext(alpha: false, preserveDrawingBuffer: true);

            // replace placeholder
            gl.canvas.id = page.canvas.id;
            page.canvas = gl.canvas;

            page.canvas.width = 96;
            page.canvas.height = 96;

            var s = new SpiralSurface(this);

            this.onsurface(gl);



            this.onresize(page.canvas.width, page.canvas.height);

            var st = new Stopwatch();
            st.Start();

            Native.window.onframe += delegate
            {
                s.ucolor_1 = (float)Math.Sin(st.ElapsedMilliseconds * 0.001) * 0.5f + 0.5f;

                this.onframe();
            };






            // jsc should link that js file once we reference it. for now its manual


            #region activate
            Action<IHTMLCanvas> activate =
                xcanvas =>
                {
                    var context = new { canvas = xcanvas };

                    context.canvas.style.border = "2px solid yellow";
                    context.canvas.style.cursor = IStyle.CursorEnum.pointer;

                    context.canvas.onclick +=
                         delegate
                         {
                             var c0 = new CanvasRenderingContext2D(96, 96);
                             c0.canvas.AttachToDocument();

                             var frames = new List<byte[]>();

                             // view-source:36524
                             //{ ToBase64String_while_timeout = 00:00:00.504, i = 911952, length = 1454096 } view-source:36524


                             //var framecount = 240;
                             //var delay = 1000 / 60;

                             var framecount = 16;
                             var delay = 1000 / 15;

                             new ScriptCoreLib.JavaScript.Runtime.Timer(
                                 async t =>
                                 {
                                     if (t.Counter == framecount)
                                     {
                                         Console.WriteLine("GIFEncoderWorker!");


                                         var src = await new GIFEncoderWorker(
                                                 96,
                                                 96,
                                                  delay: delay,
                                                 frames: frames
                                         );

                                         Console.WriteLine("done!");

                                         new IHTMLImage { src = src }.AttachToDocument();

                                         return;
                                     }

                                     if (t.Counter >= framecount)
                                     {
                                         c0.bytes = frames[t.Counter % frames.Count];

                                         return;
                                     }

                                     #region force redraw all
                                     s.ucolor_2 = t.Counter / 32.0f;

                                     // force redraw
                                     this.onframe();
                                     #endregion


                                     if (!t.IsAlive)
                                         return;

                                     c0.drawImage(context.canvas, 0, 0, 96, 96);

                                     frames.Add(c0.getImageData().data);



                                 }
                              ).StartInterval(1000 / 60);

                         };
                };
            #endregion


            activate(gl.canvas);
            activate(ani2.gl.canvas);
            activate(ani3.gl.canvas);
            activate(ani4.canvas);
            activate(ani5.canvas);
            activate(ani6.canvas);





        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // https://developers.google.com/web/fundamentals/input/touch/touchevents/

            // https://googlesamples.github.io/web-fundamentals/samples/input/touch/touch-demo-1.html

            // https://code.google.com/p/chromium/issues/detail?id=516050
            // http://stackoverflow.com/questions/17886073/do-chrome-and-firefox-support-pointer-event-pressure-tilt-touch-etc-any-so
            // https://mobiforge.com/design-development/html5-pointer-events-api-combining-touch-mouse-and-pen

            // http://arstechnica.com/information-technology/2015/03/chromium-team-reverses-course-will-adopt-ies-merged-mouse-touch-apis/
            // http://www.infoq.com/news/2015/04/google-pointer-events

            // http://www.theverge.com/2015/3/25/8291893/google-chrome-pointer-events-support
            // http://www.zdnet.com/article/google-will-implement-pointer-events-api-in-chrome-after-all/

            // http://www.w3.org/TR/pointerevents/

            // http://question.ikende.com/question/32303938343332303831

            // https://www.chromestatus.com/features/4504699138998272

            // https://msdn.microsoft.com/en-us/library/hh771911(v=vs.85).aspx

            var xPointerEvent = (Native.window as dynamic).PointerEvent;

            // --enable-blink-features=PointerEvent
            new IHTMLPre { new { xPointerEvent } }.AttachToDocument();

            //  dom.w3c_pointer_events.enabled
            //  dom.w3c_touch_events.enabled
            // https://hacks.mozilla.org/2015/08/pointer-events-now-in-firefox-nightly/
            new IHTMLPre { "firefox nightly seems to do pen tilt?" }.AttachToDocument();
            // works with ie too.
            // yet oly if we had tilt and pressure
            // does xt even have em natively?

            var onpointerover = new IHTMLPre { "onpointerover ?" }.AttachToDocument();
            var onpointerdown = new IHTMLPre { "onpointerdown ?" }.AttachToDocument();
            var onpointermove = new IHTMLPre { "onpointermove ?" }.AttachToDocument();

            var c = new CanvasRenderingContext2D(800, 200);

            c.canvas.style.border = "1px solid blue";
            c.canvas.AttachToDocument();
            c.canvas.style.SetLocation(0, 0);

            Native.document.body.style.marginTop = "200px";

            c.beginPath();
            c.moveTo(0, 0);

            // whatif xt does not have tilt data available?
            // X:\opensource\unmonitored\WintabDN\FormTestApp\TestForm.cs

            c.canvas.onpointerover += e =>
            {
                // got a stylus dell xt?
                // make sure ip is set to dhcp to get into wifi
                // make sure onenote still works...

                e.stopPropagation();
                e.preventDefault();

                onpointerover.innerText = "onpointerover " + new { e.pointerType, e.tiltX, e.tiltY, e.pressure, e.movementX };

            };

            c.canvas.onpointerdown += e =>
            {
                // got a stylus dell xt?
                // make sure ip is set to dhcp to get into wifi
                // make sure onenote still works...

                e.stopPropagation();
                e.preventDefault();

                onpointerdown.innerText = "onpointerdown " + new { e.pointerType, e.tiltX, e.tiltY, e.pressure, e.movementX };

            };

            //Native.document.body.onpointermove += e =>
            c.canvas.onpointermove += e =>
            {
                // got a stylus dell xt?
                // make sure ip is set to dhcp to get into wifi
                // make sure onenote still works...
                e.stopPropagation();
                e.preventDefault();


                onpointermove.innerText = "onpointermove " + new { e.pointerType, e.tiltX, e.tiltY, e.pressure, e.movementX };


                //Native.document.body.ScrollToBottom();


                if (e.pressure > 0)
                    c.strokeStyle = "red";
                else
                    c.strokeStyle = "blue";
                c.lineTo(e.CursorX, e.CursorY);
                //c.lineTo(e.OffsetX, e.OffsetY);
                //c.lineTo(e.movementX, e.movementY);
                c.stroke();
                // https://web.archive.org/web/20150423093435/http://www.n-trig.com/wintab-compatible-applications/
                // https://web.archive.org/web/20150318140957/http://www.n-trig.com/wintab-2/
                // https://web.archive.org/web/20150316135525/http://www.wacomeng.com/windows/index.html


            };

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001

            // https://msdn.microsoft.com/en-us/library/windows/hardware/jj151564(v=vs.85).aspx

        }
        public Application(IApp page)
        {
            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151101

            // first lets do some chrome extension magic.

            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;



            #region self_chrome_tabs extension
            object self_chrome_tabs = self_chrome.tabs;
            if (self_chrome_tabs != null)
            {
                Console.WriteLine("running as extension " + new { typeof(Application).Assembly.GetName().Name });
                //  verified.
                // running as extension {{ Name = ChromeHybridCaptureAE.Application }}

                new { }.With(
                     async delegate
                     {
                         app = (await chrome.management.getAll()).FirstOrDefault(item => item.name.StartsWith(typeof(Application).Assembly.GetName().Name));

                         if (app == null)
                         {
                             Console.WriteLine("running as extension. app not found.");
                             return;
                         }

                         // running as extension {{ shortName = ChromeHybridCaptureAE.Application.exe, launchType = OPEN_AS_WINDOW }}
                         Console.WriteLine("running as extension " + new { app.shortName, app.launchType });

                         chrome.runtime.connect(app.id).With(
                            async (chrome.Port port) =>
                            {
                                //Console.WriteLine("extension chrome.runtime.connect done " + new { port.name, port.sender.id });
                                //Console.WriteLine("extension chrome.runtime.connect done " + new { port.name, port.sender });
                                Console.WriteLine("extension chrome.runtime.connect done, click launch");



                                #region extension: HopToChromeApp.VirtualOnCompleted
                                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeapp
                                HopToChromeApp.VirtualOnCompleted = async (that, continuation) =>
                                {
                                    // state 0 ? or state -1 ?
                                    Console.WriteLine("extension HopToChromeApp enter ");

                                    TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableContinuation r = TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableFromContinuation(continuation);

                                    // now send the jump instruction... will it make it?
                                    port.postMessage(r.shadowstate);
                                };
                                #endregion



                                // is async better than event += here?
                                var m = default(chrome.PortMessageEvent);
                                while (m = await port.async.onmessage)
                                {
                                    var message = m.data;

                                    // verified.

                                    #region IAsyncStateMachine
                                    // casting from anonymous object.
                                    var xShadowIAsyncStateMachine = (TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine)message;

                                    // 12961ms extension  port.onMessage {{ state = null, TypeName = null }}
                                    // or constructor id?
                                    Console.WriteLine("extension  port.onMessage " + new { xShadowIAsyncStateMachine.state, xShadowIAsyncStateMachine.TypeName });


                                    // 12468ms extension  port.onMessage {{ message = do HopToChromeExtension {{ TypeName = <Namespace>.___ctor_b__4_9_d, state = 0 }}, expando_isstring = true, is_string = false, equals_typeofstring = false }}
                                    //2015-08-22 15:49:45.729 view-source:53670 12471ms extension  port.onMessage {{ message = do HopToChromeExtension {{ TypeName = <Namespace>.___ctor_b__4_9_d, state = 0 }} }}
                                    //2015-08-22 15:49:45.733 view-source:53670 12475ms extension  port.onMessage {{ message = [object Object], expando_isstring = false, is_string = false, equals_typeofstring = false }}
                                    //2015-08-22 15:49:45.737 view-source:53670 12479ms extension  port.onMessage {{ state = 0, TypeName = <Namespace>.___ctor_b__4_9_d }}


                                    #region xAsyncStateMachineType
                                    var xAsyncStateMachineType = typeof(Application).Assembly.GetTypes().FirstOrDefault(
                                        xAsyncStateMachineTypeCandidate =>
                                        {
                                            // safety check 1

                                            //Console.WriteLine(new { sw.ElapsedMilliseconds, item.FullName });

                                            var xisIAsyncStateMachine = typeof(IAsyncStateMachine).IsAssignableFrom(xAsyncStateMachineTypeCandidate);
                                            if (xisIAsyncStateMachine)
                                            {
                                                //Console.WriteLine(new { item.FullName, isIAsyncStateMachine });

                                                return xAsyncStateMachineTypeCandidate.FullName == xShadowIAsyncStateMachine.TypeName;
                                            }

                                            return false;
                                        }
                                    );
                                    #endregion

                                    Console.WriteLine(new { xAsyncStateMachineType });

                                    var NewStateMachine = FormatterServices.GetUninitializedObject(xAsyncStateMachineType);
                                    var isIAsyncStateMachine = NewStateMachine is IAsyncStateMachine;

                                    var NewStateMachineI = (IAsyncStateMachine)NewStateMachine;

                                    #region 1__state
                                    xAsyncStateMachineType.GetFields(
                                        System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance
                                        ).WithEach(
                                        AsyncStateMachineSourceField =>
                                        {

                                            //Console.WriteLine(new { AsyncStateMachineSourceField });

                                            if (AsyncStateMachineSourceField.Name.EndsWith("1__state"))
                                            {
                                                AsyncStateMachineSourceField.SetValue(
                                                    NewStateMachineI,
                                                    xShadowIAsyncStateMachine.state
                                                );
                                            }

                                            var xStringField = TestSwitchToServiceContextAsync.ArrayListExtensions.AsEnumerable(xShadowIAsyncStateMachine.StringFields).FirstOrDefault(
                                               f => DecoratedString(f.FieldName) == DecoratedString(AsyncStateMachineSourceField.Name)
                                           );

                                            if (xStringField != null)
                                            {
                                                // once we are to go back to client. we need to reverse it?

                                                AsyncStateMachineSourceField.SetValue(
                                                    NewStateMachineI,
                                                    xStringField.value
                                                 );
                                                // next xml?
                                                // before lets send our strings back with the new state!
                                                // what about exceptions?
                                            }
                                        }
                                    );
                                    #endregion

                                    NewStateMachineI.MoveNext();
                                    #endregion

                                }
                            }
                          );
                     }
                );

                return;
            }
            #endregion

            object self_chrome_socket = self_chrome.socket;
            if (self_chrome_socket != null)
            {
                #region running as appwindow
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("running as appwindow");


                    MessagePort appwindow_to_app2 = null;

                    // called by? 619  app:HopToChromeAppWindow
                    #region  appwindow Native.window.onmessage
                    Native.window.onmessage += e =>
                    {
                        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow

                        // appwindow Native.window.onmessage {{ data = app to appwindow! }}

                        var message = e.data;

                        //Console.WriteLine("appwindow Native.window.onmessage " + new { e.data });


                        // extension  port.onMessage {{ message = from app hello to extension }}
                        //var expando_isstring = ScriptCoreLib.JavaScript.Runtime.Expando.Of(message).IsString;

                        // look app sent a message to extension
                        //Console.WriteLine("app  port.onMessage " + new { message });

                        {
                            // DataCloneError: Failed to execute 'postMessage' on 'Window': Port at index 0 is already neutered.

                            if (e.ports != null)
                                foreach (var port in e.ports)
                                {
                                    Console.WriteLine("appwindow    Native.window.onmessage " + new { port });

                                    appwindow_to_app2 = port;
                                }

                        }

                        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeapp




                        // casting from anonymous object.
                        var xShadowIAsyncStateMachine = (TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine)message;

                        // or constructor id?
                        Console.WriteLine("appwindow     Native.window.onmessage " + new { xShadowIAsyncStateMachine.state, xShadowIAsyncStateMachine.TypeName });

                        // 12468ms extension  port.onMessage {{ message = do HopToChromeExtension {{ TypeName = <Namespace>.___ctor_b__4_9_d, state = 0 }}, expando_isstring = true, is_string = false, equals_typeofstring = false }}
                        //2015-08-22 15:49:45.729 view-source:53670 12471ms extension  port.onMessage {{ message = do HopToChromeExtension {{ TypeName = <Namespace>.___ctor_b__4_9_d, state = 0 }} }}
                        //2015-08-22 15:49:45.733 view-source:53670 12475ms extension  port.onMessage {{ message = [object Object], expando_isstring = false, is_string = false, equals_typeofstring = false }}
                        //2015-08-22 15:49:45.737 view-source:53670 12479ms extension  port.onMessage {{ state = 0, TypeName = <Namespace>.___ctor_b__4_9_d }}


                        #region xAsyncStateMachineType
                        var xAsyncStateMachineType = typeof(Application).Assembly.GetTypes().FirstOrDefault(
                            xAsyncStateMachineTypeCandidate =>
                            {
                                // safety check 1

                                //Console.WriteLine(new { sw.ElapsedMilliseconds, item.FullName });

                                var xisIAsyncStateMachine = typeof(IAsyncStateMachine).IsAssignableFrom(xAsyncStateMachineTypeCandidate);
                                if (xisIAsyncStateMachine)
                                {
                                    //Console.WriteLine(new { item.FullName, isIAsyncStateMachine });

                                    return xAsyncStateMachineTypeCandidate.FullName == xShadowIAsyncStateMachine.TypeName;
                                }

                                return false;
                            }
                        );
                        #endregion


                        var NewStateMachine = FormatterServices.GetUninitializedObject(xAsyncStateMachineType);
                        var isIAsyncStateMachine = NewStateMachine is IAsyncStateMachine;

                        var NewStateMachineI = (IAsyncStateMachine)NewStateMachine;

                        #region 1__state
                        xAsyncStateMachineType.GetFields(
                          System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance
                          ).WithEach(
                           AsyncStateMachineSourceField =>
                           {

                               //Console.WriteLine(new { AsyncStateMachineSourceField });

                               if (AsyncStateMachineSourceField.Name.EndsWith("1__state"))
                               {
                                   AsyncStateMachineSourceField.SetValue(
                                       NewStateMachineI,
                                       xShadowIAsyncStateMachine.state
                                    );
                               }

                               // X:\jsc.svn\examples\javascript\async\Test\TestSwitchToServiceContextAsync\TestSwitchToServiceContextAsync\CLRWebServiceInvoke.cs
                               // field names/ tokens need to be encrypted like typeinfo.

                               // some do manual restore
                               // X:\jsc.svn\examples\javascript\chrome\extensions\ChromeExtensionHopToTabThenIFrame\ChromeExtensionHopToTabThenIFrame\Application.cs
                               // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow
                               // or, are we supposed to initialize a string value here?

                               var xStringField = TestSwitchToServiceContextAsync.ArrayListExtensions.AsEnumerable(xShadowIAsyncStateMachine.StringFields).FirstOrDefault(
                                   f => DecoratedString(f.FieldName) == DecoratedString(AsyncStateMachineSourceField.Name)
                               );

                               if (xStringField != null)
                               {
                                   // once we are to go back to client. we need to reverse it?

                                   AsyncStateMachineSourceField.SetValue(
                                       NewStateMachineI,
                                       xStringField.value
                                    );
                                   // next xml?
                                   // before lets send our strings back with the new state!
                                   // what about exceptions?
                               }
                           }
                      );
                        #endregion

                        NewStateMachineI.MoveNext();

                    };
                    #endregion


                    #region appwindow:HopToChromeApp
                    HopToChromeApp.VirtualOnCompleted = async (that, continuation) =>
                    {
                        // do we have the port to send back our portal warp?

                        // state 0 ? or state -1 ?
                        Console.WriteLine("appwindow HopToChromeApp  enter " + new { appwindow_to_app2 });
                        // appwindow HopToChromeApp  enter {{ appwindow_to_app2 = null }}

                        //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow
                        //// async dont like ref?
                        TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableContinuation r = TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableFromContinuation(continuation);
                        // 29035ms extension  port.onMessage {{ message = do HopToChromeExtension }}
                        //appwindow_to_app.postMessage("do HopToChromeAppWindow " + new { r.shadowstate.TypeName, r.shadowstate.state });
                        // now send the jump instruction... will it make it?
                        appwindow_to_app2.postMessage(r.shadowstate);
                    };
                    #endregion

                    return;
                }
                #endregion


                // running as app {{ Name = ChromeHybridCaptureAE.Application }} now reenable extension..
                Console.WriteLine("running as app " + new { typeof(Application).Assembly.GetName().Name } + " now reenable extension..");



                #region app:appwindow_to_app
                Action<object> appwindow_to_app = data =>
                {
                    var xShadowIAsyncStateMachine = (TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine)data;

                    Console.WriteLine("app appwindow_to_app " + new { xShadowIAsyncStateMachine.TypeName });

                    #region xAsyncStateMachineType
                    var xAsyncStateMachineType = typeof(Application).Assembly.GetTypes().FirstOrDefault(
                        xAsyncStateMachineTypeCandidate =>
                        {
                            // safety check 1

                            //Console.WriteLine(new { sw.ElapsedMilliseconds, item.FullName });

                            var xisIAsyncStateMachine = typeof(IAsyncStateMachine).IsAssignableFrom(xAsyncStateMachineTypeCandidate);
                            if (xisIAsyncStateMachine)
                            {
                                //Console.WriteLine(new { item.FullName, isIAsyncStateMachine });

                                return xAsyncStateMachineTypeCandidate.FullName == xShadowIAsyncStateMachine.TypeName;
                            }

                            return false;
                        }
                    );
                    #endregion

                    Console.WriteLine("app appwindow_to_app " + new { xAsyncStateMachineType });

                    var NewStateMachine = FormatterServices.GetUninitializedObject(xAsyncStateMachineType);
                    var isIAsyncStateMachine = NewStateMachine is IAsyncStateMachine;

                    var NewStateMachineI = (IAsyncStateMachine)NewStateMachine;

                    #region 1__state
                    xAsyncStateMachineType.GetFields(
                      System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance
                      ).WithEach(
                       AsyncStateMachineSourceField =>
                       {

                           //Console.WriteLine(new { AsyncStateMachineSourceField });

                           if (AsyncStateMachineSourceField.Name.EndsWith("1__state"))
                           {
                               AsyncStateMachineSourceField.SetValue(
                                   NewStateMachineI,
                                   xShadowIAsyncStateMachine.state
                                );
                           }

                           // X:\jsc.svn\examples\javascript\async\Test\TestSwitchToServiceContextAsync\TestSwitchToServiceContextAsync\CLRWebServiceInvoke.cs
                           // field names/ tokens need to be encrypted like typeinfo.

                           // some do manual restore
                           // X:\jsc.svn\examples\javascript\chrome\extensions\ChromeExtensionHopToTabThenIFrame\ChromeExtensionHopToTabThenIFrame\Application.cs
                           // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow
                           // or, are we supposed to initialize a string value here?

                           var xStringField = TestSwitchToServiceContextAsync.ArrayListExtensions.AsEnumerable(xShadowIAsyncStateMachine.StringFields).FirstOrDefault(
                               f => DecoratedString(f.FieldName) == DecoratedString(AsyncStateMachineSourceField.Name)
                           );

                           if (xStringField != null)
                           {
                               // once we are to go back to client. we need to reverse it?

                               AsyncStateMachineSourceField.SetValue(
                                   NewStateMachineI,
                                   xStringField.value
                                );
                               // next xml?
                               // before lets send our strings back with the new state!
                               // what about exceptions?
                           }
                       }
                  );
                    #endregion

                    NewStateMachineI.MoveNext();
                };
                #endregion

                var c = new MessageChannel();
                var cneutered = c;
                c.port1.onmessage += e =>
                {
                    Console.WriteLine("app HopToChromeAppWindow MessageChannel onmessage " + new { e.data });

                    appwindow_to_app(e.data);
                };

                c.port1.start();
                c.port2.start();

                #region app:HopToChromeAppWindow
                HopToChromeAppWindow.VirtualOnCompleted = async (that, continuation) =>
                {
                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150824/webgliframebuffer

                    // state 0 ? or state -1 ?
                    Console.WriteLine("app HopToChromeAppWindow  enter ");

                    #region outputWindow
                    if (that.window == null)
                    {
                        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow


                        if (outputWindow == null)
                        {
                            // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                            outputWindow = await chrome.app.window.create(
                                                   Native.document.location.pathname,

                                                   // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                                                   // this ctually works. but we wont see console on app log..
                                                   options: new { hidden = true, alwaysOnTop = true }
                                            );

                            ////xappwindow.setAlwaysOnTop

                            // or can we stay hidden?
                            //that.window.show();

                            await outputWindow.contentWindow.async.onload;
                        }
                        // reuse the window...
                        that.window = outputWindow;
                    }
                    #endregion


                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow
                    // async dont like ref?
                    TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableContinuation r = TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableFromContinuation(continuation);
                    // 29035ms extension  port.onMessage {{ message = do HopToChromeExtension }}



                    // Z:\jsc.svn\core\ScriptCoreLib\JavaScript\DOM\IWindow.postMessage.cs
                    // how do we use this thing?



                    //                    15ms appwindow    Native.window.onmessage: {{ ports = [object MessagePort] }}
                    //2015-08-22 20:50:18.019 view-source:53702 17ms appwindow    Native.window.onmessage: {{ port = [object MessagePort] }}
                    //that.window.contentWindow.postMessage("do HopToChromeAppWindow " + new { r.shadowstate.TypeName, r.shadowstate.state }, transfer: c.port2);

                    // now send the jump instruction... will it make it?
                    if (cneutered != null)
                    {
                        that.window.contentWindow.postMessage(r.shadowstate, transfer: cneutered.port2);
                        cneutered = null;
                    }
                    else
                    {
                        that.window.contentWindow.postMessage(r.shadowstate);

                    }
                };
                #endregion

                #region ConnectExternal
                chrome.runtime.ConnectExternal += async port =>
                {
                    // app chrome.runtime.ConnectExternal {{ id = jadmeogmbokffpkdfeiemjplohfgkidd }} now click launch!
                    Console.WriteLine("app chrome.runtime.ConnectExternal " + new { port.sender.id } + " now click launch!");

                    new chrome.Notification(
                        title: typeof(Application).Assembly.GetName().Name,
                        message: "ready. click launch"
                    ).Clicked += delegate
                    {
                        // https://developer.chrome.com/apps/app_runtime

                        // management_api
                    };

                    // can we have async thing here?


                    #region HopToChromeExtension
                    HopToChromeExtension.VirtualOnCompleted = async (that, continuation) =>
                    {
                        // state 0 ? or state -1 ?
                        Console.WriteLine("app HopToChromeExtension enter ");

                        // where is it defined?
                        // z:\jsc.svn\examples\javascript\async\Test\TestSwitchToServiceContextAsync\TestSwitchToServiceContextAsync\ShadowIAsyncStateMachine.cs

                        // async dont like ref?
                        TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableContinuation r = TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableFromContinuation(continuation);


                        // now send the jump instruction... will it make it?
                        port.postMessage(r.shadowstate);


                        // how would we know to continue from current continuation?
                        // or are we fine to rebuild the scope if we jump back?
                    };
                    #endregion



                    var m = default(chrome.PortMessageEvent);
                    while (m = await port.async.onmessage)
                    {
                        //var m = await port.async.onmessage;

                        var message = m.data;

                        #region IAsyncStateMachine
                        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeapp
                        // casting from anonymous object.
                        var xShadowIAsyncStateMachine = (TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine)message;

                        // or constructor id?
                        Console.WriteLine("app  port.onMessage " + new { xShadowIAsyncStateMachine.state, xShadowIAsyncStateMachine.TypeName });

                        // 12468ms extension  port.onMessage {{ message = do HopToChromeExtension {{ TypeName = <Namespace>.___ctor_b__4_9_d, state = 0 }}, expando_isstring = true, is_string = false, equals_typeofstring = false }}
                        //2015-08-22 15:49:45.729 view-source:53670 12471ms extension  port.onMessage {{ message = do HopToChromeExtension {{ TypeName = <Namespace>.___ctor_b__4_9_d, state = 0 }} }}
                        //2015-08-22 15:49:45.733 view-source:53670 12475ms extension  port.onMessage {{ message = [object Object], expando_isstring = false, is_string = false, equals_typeofstring = false }}
                        //2015-08-22 15:49:45.737 view-source:53670 12479ms extension  port.onMessage {{ state = 0, TypeName = <Namespace>.___ctor_b__4_9_d }}


                        #region xAsyncStateMachineType
                        var xAsyncStateMachineType = typeof(Application).Assembly.GetTypes().FirstOrDefault(
                            xAsyncStateMachineTypeCandidate =>
                            {
                                // safety check 1

                                //Console.WriteLine(new { sw.ElapsedMilliseconds, item.FullName });

                                var xisIAsyncStateMachine = typeof(IAsyncStateMachine).IsAssignableFrom(xAsyncStateMachineTypeCandidate);
                                if (xisIAsyncStateMachine)
                                {
                                    //Console.WriteLine(new { item.FullName, isIAsyncStateMachine });

                                    return xAsyncStateMachineTypeCandidate.FullName == xShadowIAsyncStateMachine.TypeName;
                                }

                                return false;
                            }
                        );
                        #endregion


                        var NewStateMachine = FormatterServices.GetUninitializedObject(xAsyncStateMachineType);
                        var isIAsyncStateMachine = NewStateMachine is IAsyncStateMachine;

                        var NewStateMachineI = (IAsyncStateMachine)NewStateMachine;

                        #region 1__state
                        xAsyncStateMachineType.GetFields(
                          System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance
                          ).WithEach(
                           AsyncStateMachineSourceField =>
                           {

                               //Console.WriteLine(new { AsyncStateMachineSourceField });

                               if (AsyncStateMachineSourceField.Name.EndsWith("1__state"))
                               {
                                   AsyncStateMachineSourceField.SetValue(
                                       NewStateMachineI,
                                       xShadowIAsyncStateMachine.state
                                    );
                               }


                               // z:\jsc.svn\examples\javascript\async\Test\TestSwitchToServiceContextAsync\TestSwitchToServiceContextAsync\CLRWebServiceInvoke.cs
                               // field names/ tokens need to be encrypted like typeinfo.

                               // some do manual restore
                               // X:\jsc.svn\examples\javascript\chrome\extensions\ChromeExtensionHopToTabThenIFrame\ChromeExtensionHopToTabThenIFrame\Application.cs
                               // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow
                               // or, are we supposed to initialize a string value here?

                               var xStringField = TestSwitchToServiceContextAsync.ArrayListExtensions.AsEnumerable(xShadowIAsyncStateMachine.StringFields).FirstOrDefault(
                       f => DecoratedString(f.FieldName) == DecoratedString(AsyncStateMachineSourceField.Name)
                   );

                               if (xStringField != null)
                               {
                                   // once we are to go back to client. we need to reverse it?

                                   AsyncStateMachineSourceField.SetValue(
                           NewStateMachineI,
                           xStringField.value
                        );
                                   // next xml?
                                   // before lets send our strings back with the new state!
                                   // what about exceptions?
                               }
                           }
                      );
                        #endregion

                        NewStateMachineI.MoveNext();
                        #endregion
                    }
                };
                #endregion




                // nuget chrome
                chrome.app.runtime.Launched += async delegate
                {
                    { fixup: await Task.CompletedTask; }



                    Console.WriteLine("app Launched, will hop to extension");
                    // verified.


                    await default(HopToChromeExtension);

                    // https://developer.chrome.com/extensions/management
                    // as an extension we now can inspect our app ?
                    Console.WriteLine("hop from app to extension " + new { app.name });
                    // hop from app to extension {{ name = ChromeHybridCaptureAE.Application.exe }}


                    // what else can we do as an extension?

                    // lets hop back?

                    await default(HopToChromeApp);

                    Console.WriteLine("hop from extension to app ");

                    // ok hopping works. lets do some ui now.
                    //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow
                    await default(HopToChromeAppWindow);


                    Native.document.documentElement.style.overflow = IStyle.OverflowEnum.auto;
                    //Native.body.style.overflow = IStyle.OverflowEnum.auto;
                    Native.body.Clear();
                    (Native.body.style as dynamic).webkitUserSelect = "text";

                    chrome.app.window.current().show();

                    // now what?

                    new IHTMLPre { "now what?" }.AttachToDocument();

                    // lets have some ui to do a fullsceen tab capture?
                    // http://earth.nullschool.net/#2015/10/31/1500Z/wind/surface/level/anim=off/overlay=temp/azimuthal_equidistant=24.64,98.15,169
                    // http://earth.nullschool.net/#2015/10/31/2100Z/wind/surface/level/anim=off/overlay=temp/azimuthal_equidistant=24.64,98.15,169

                    // TypeError: Cannot set property 'uri' of null
                    // jsc statemachine hop doesnt like scopes yet. staic it is.
                    uri = new IHTMLInput { value = "http://earth.nullschool.net/#2015/10/31/2100Z/wind/surface/level/anim=off/overlay=temp/azimuthal_equidistant=24.64,98.15,169" }.AttachToDocument();
                    uri.style.width = "100%";

                    Console.WriteLine("appwindow: " + new { uri });

                    new IHTMLPre { () => new { index } }.AttachToDocument();

                    frameID = new IHTMLInput
                    {
                        type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range,
                        min = 0,
                        //max = 3600,
                        max = 240,
                        valueAsNumber = 0
                    }.AttachToDocument().With(
                        async i =>
                        {

                            do
                            {
                                // http://earth.nullschool.net/#2015/10/01/0000Z/wind/surface/level/anim=off/overlay=temp/azimuthal_equidistant=24.64,98.15,169
                                index = (int)i.valueAsNumber;

                                var hh = 3 * index;
                                var dd = 1 + Math.Floor(hh / 24.0);

                                uri.value =
                                    "http://earth.nullschool.net/#2015/10/"
                                    + dd.ToString().PadLeft(2, '0')
                                    + "/"
                                    + (hh % 24).ToString().PadLeft(2, '0')
                                    + "00Z/wind/surface/level/anim=off/overlay=temp/azimuthal_equidistant=24.64,98.15,169";

                            }
                            while (await i.async.onchange);
                        }
                    );

                    go = new IHTMLButton { "go" }.AttachToDocument();

                    Console.WriteLine("appwindow: " + new { go });

                    go.onclick += async delegate
                    {
                        //Error CS0158  The label 'fixup' shadows another label by the same name in a contained scope ChromeHybridCaptureAE   Z:\jsc.svn\examples\javascript\chrome\hybrid\ChromeHybridCaptureAE\Application.cs   791
                        { fixup: await Task.CompletedTask; }

                        Native.body.style.backgroundColor = "yellow";

                        var scope_uri = uri.value;

                        #region  await default(HopToChromeExtension)
                        await default(HopToChromeApp);

                        Console.WriteLine("hop from appwindow to app! " + new { scope_uri });
                        // verify


                        await default(HopToChromeExtension);
                        #endregion


                        Console.WriteLine("hop from app to extension! " + new { scope_uri });

                        //  hop from app to extension! {{ scope_uri = http://earth.nullschool.net/#2015/10/31/2100Z/wind/surface/level/anim=off/overlay=temp/azimuthal_equidistant=24.64,98.15,169 }}

                        var tabwindow = await chrome.windows.create(new { state = "fullscreen", url = scope_uri });

                        // um. unless we hop into it we wont know when its ready?
                        //await Task.Delay(7000);
                        await Task.Delay(5000);

                        // Error: Invalid value for argument 2. Property 'format': Value must be one of: [jpeg, png]. at validate 
                        var captureVisibleTab = await tabwindow.id.captureVisibleTab(options: new { format = "jpeg" });

                        Console.WriteLine("extension captureVisibleTab " + new { captureVisibleTab.Length });

                        await tabwindow.id.remove();


                        #region await default(HopToChromeAppWindow)
                        await default(HopToChromeApp);
                        Console.WriteLine("app " + new { captureVisibleTab.Length });
                        await default(HopToChromeAppWindow);
                        Console.WriteLine("appwindow " + new { captureVisibleTab.Length });
                        #endregion

                        // appwindow {{ Length = 272711 }}

                        var icaptureVisibleTabFull = await new IHTMLImage { src = captureVisibleTab }.async.oncomplete;

                        // {{ width = 1920, height = 1080 }}
                        new IHTMLPre { new { icaptureVisibleTabFull.width, icaptureVisibleTabFull.height } }
                        ;
                        //                        .AttachToDocument();


                        var focus = new CanvasRenderingContext2D(icaptureVisibleTabFull.height, icaptureVisibleTabFull.height);
                        focus.drawImage(icaptureVisibleTabFull, (icaptureVisibleTabFull.width - icaptureVisibleTabFull.height) / 2, 0, icaptureVisibleTabFull.height, icaptureVisibleTabFull.height, 0, 0, icaptureVisibleTabFull.height, icaptureVisibleTabFull.height);
                        var icaptureVisibleTab = new IHTMLImage { src = focus.canvas.toDataURL() }.AttachToDocument();


                        icaptureVisibleTab.style.width = "100%";
                        icaptureVisibleTab.AttachToDocument();

                        await new IHTMLButton { "save " }.AttachToDocument().async.onclick;

                        if (dir == null)
                            dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });

                        var file = index.ToString().PadLeft(5, '0') + ".jpg";

                        await dir.WriteAllBytes(file, focus);

                        //frameID.valueAsNumber++;

                        await new IHTMLButton { "automate " + new { frameID = frameID.valueAsNumber } }.AttachToDocument().async.onclick;

                        //new IHTMLPre { uri.value }.AttachToDocument();

                        next:

                        frameID.valueAsNumber++;
                        await Native.window.async.onframe;
                        await Native.window.async.onframe;

                        //new IHTMLPre { uri.value }.AttachToDocument();

                        // go?

                        // TypeError: Cannot read property 'id' of null
                        scope_uri = uri.value;

                        #region  await default(HopToChromeExtension)
                        await default(HopToChromeApp);

                        Console.WriteLine("hop from appwindow to app! " + new { scope_uri });
                        // verify


                        await default(HopToChromeExtension);
                        #endregion
                        var tabwindow2 = await chrome.windows.create(new { state = "fullscreen", url = scope_uri });
                        await Task.Delay(5000);
                        var captureVisibleTab2 = await tabwindow2.id.captureVisibleTab(options: new { format = "jpeg" });
                        await tabwindow2.id.remove();
                        #region await default(HopToChromeAppWindow)
                        await default(HopToChromeApp);
                        Console.WriteLine("app " + new { captureVisibleTab.Length });
                        await default(HopToChromeAppWindow);
                        Console.WriteLine("appwindow " + new { captureVisibleTab.Length });
                        #endregion
                        var icaptureVisibleTabFull2 = await new IHTMLImage { src = captureVisibleTab2 }.async.oncomplete;
                        var focus2 = new CanvasRenderingContext2D(icaptureVisibleTabFull2.height, icaptureVisibleTabFull2.height);
                        focus2.drawImage(icaptureVisibleTabFull2, (icaptureVisibleTabFull2.width - icaptureVisibleTabFull2.height) / 2, 0, icaptureVisibleTabFull2.height, icaptureVisibleTabFull2.height, 0, 0, icaptureVisibleTabFull2.height, icaptureVisibleTabFull2.height);
                        var icaptureVisibleTab2 = new IHTMLImage { src = focus2.canvas.toDataURL() };
                        var file2 = index.ToString().PadLeft(5, '0') + ".jpg";
                        await dir.WriteAllBytes(file2, focus2);

                        // done?
                        goto next;

                    };


                    await default(HopToChromeApp);

                    Console.WriteLine("hop from appwindow to app!");

                };


                return;
            }

            Console.WriteLine("running as content?");
        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712
            new { }.With(
                async delegate
                {
                    Native.body.Clear();

                    // would it be easy to do head tracking via webcam for VR?
                    // the app would run on android, yet
                    // two sattelites could spawn on two laptops to track the head.

                    // would we be able to thread hop between camera devices and android?

                    // http://shopap.lenovo.com/hk/en/laptops/lenovo/u-series/u330p/
                    // The U330p's integrated 720p HD webcam

                    Console.WriteLine("awaiting onvideo...");
                    var v = await Native.window.navigator.async.onvideo;
                    Console.WriteLine("awaiting onvideo... done");

                    // 93ms getUserMedia error { code = , err = [object NavigatorUserMediaError] }

                    v.AttachToDocument();

                    v.play();

                    // what do we see at this point?

                    // first, could we detect greenscreen without having one?

                    // assuming the camera is static, we could remove the pixels that never seem to move

                    // a shader program, consuming the video would be able to apply the effects a lot faster.
                    // doing it in ui thread will slow it down.

                    //					videoHeight: 480
                    //videoWidth: 640

                    // 
                    new IHTMLPre {

                        new { v.videoWidth, v.videoHeight }
                    }.AttachToDocument();
                    // do we know the size of the cam?
                    // {{ videoWidth = 0, videoHeight = 0 }}

                    var sw = Stopwatch.StartNew();

                    //Error CS4004  Cannot await in an unsafe context TestGetUserMedia    X:\jsc.svn\examples\javascript\async\test\TestGetUserMedia\TestGetUserMedia\Application.cs  45
                    // https://social.msdn.microsoft.com/Forums/en-US/29a3ca5b-c783-4197-af08-7b3c83585e99/minor-compiler-message-unsafe-async?forum=async


                    while (v.videoWidth == 0)
                        await Native.window.async.onframe;

                    new IHTMLPre {
                        new { v.videoWidth, v.videoHeight, sw.ElapsedMilliseconds, Environment.ProcessorCount }
                    }.AttachToDocument();

                    // {{ videoWidth = 640, videoHeight = 480, ElapsedMilliseconds = 793, ProcessorCount = 4 }}
                    // {{ videoWidth = 1280, videoHeight = 720, ElapsedMilliseconds = 368, ProcessorCount = 4 }}


                    var frame0 = new CanvasRenderingContext2D(
                        v.videoWidth, v.videoHeight
                    );

                    frame0.canvas.AttachToDocument();

                    var frame0sw = Stopwatch.StartNew();
                    var frame0c = 0;

                    // battery/full speed
                    // {{ frame0c = 1752, ElapsedMilliseconds = 66 }}

                    // 
                    new IHTMLPre {
                                () => new { frame0c, frame0sw.ElapsedMilliseconds, fps = 1000 / frame0sw.ElapsedMilliseconds  }
                            }.AttachToDocument();


                    // jsc, when can we start using semaphores?
                    // this could also trgger state sync
                    //var xx = new System.Threading.SemaphoreSlim(1);

                    var slider = new IHTMLInput
                    {
                        type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range,
                        max = 4 * v.videoWidth * v.videoHeight,
                        valueAsNumber = 2 * v.videoWidth * v.videoHeight
                    }.AttachToDocument();

                    new { }.With(
                        async delegate
                        {
                            // could we hop into worker thread, and await for bytes to render?

                            // this is essentially a shader

                            // switch to worker here
                            // at runtime we should know, which fields in this state are in use

                            do
                            {
                                frame0c++;
                                frame0sw = Stopwatch.StartNew();


                                frame0.drawImage(
                                    v,
                                    0, 0, v.videoWidth, v.videoHeight);


                                // could we do thread hopping here to multicore process the data without shaders?
                                // RGB,
                                // would we have each core work on 8bits. a single color?

                                // X:\jsc.svn\examples\javascript\canvas\CanvasFromBytes\CanvasFromBytes\Application.cs 


                                var rgba_bytes = frame0.bytes;


                                //var rgba_pixels = (rgba[])rgba_bytes;
                                //Error CS0030  Cannot convert type 'byte[]' to 'TestGetUserMedia.rgba[]'   TestGetUserMedia X:\jsc.svn\examples\javascript\async\test\TestGetUserMedia\TestGetUserMedia\Application.cs  98

#if FPOINTERS
					unsafe
					{
						//Error CS0030  Cannot convert type 'byte[]' to 'TestGetUserMedia.rgba*'    TestGetUserMedia X:\jsc.svn\examples\javascript\async\test\TestGetUserMedia\TestGetUserMedia\Application.cs  109
						//var rgba_pixels = (rgba*)rgba_bytes;
						//Error CS0030  Cannot convert type 'byte[]' to 'void*' TestGetUserMedia X:\jsc.svn\examples\javascript\async\test\TestGetUserMedia\TestGetUserMedia\Application.cs  110

						fixed (byte* rgba_ptr = rgba_bytes)
						{
							// script: error JSC1000: running a newer compiler? opcode unsupported - [0x0035] sizeof     +1 -0

							// how would a shader do it?
							var rgba_pixels = (rgba*)rgba_ptr;

							// looks legit

							// how does it compile for js?

							for (int x = 0; x < v.videoWidth; x++)
								for (int y = 0; y < v.videoHeight; y++)
								{
									rgba_pixels[x + y * v.videoWidth].b = 0;
									rgba_pixels[x + y * v.videoWidth].g = 0;

								}

						}

					}
#endif

                                // make it all blue
                                // glsl. u8vec4

                                // lets deal only with first half of bytes
                                //for (int x = 0; x < rgba_bytes.Length / 2; x += 4)
                                //for (int x = 0; x < rgba_bytes.Length; x += 4)
                                for (int x = 0; x < slider.valueAsNumber; x += 4)
                                {
                                    //// red
                                    //rgba_bytes[x + 0] = 0;
                                    //rgba_bytes[x + 1] = (byte)(1 - rgba_bytes[x + 1]);
                                    //// blue
                                    //rgba_bytes[x + 2] = 0;


                                    // red
                                    rgba_bytes[x + 0] = 0;
                                    rgba_bytes[x + 1] = (byte)(
                                        (3 * 255 - rgba_bytes[x + 0] - rgba_bytes[x + 1] - rgba_bytes[x + 2])
                                        / 3
                                    );

                                    // blue
                                    rgba_bytes[x + 2] = 0;
                                }

                                frame0.bytes = rgba_bytes;


                            } while (await Native.window.async.onframe);
                        }
                    );



                    // {{ videoWidth = 640, videoHeight = 480, ElapsedMilliseconds = 109 }}

                    await Native.window.async.onblur;

                    // stream is not stopped yet?
                    //v.Orphanize();

                    Native.body.style.backgroundColor = "yellow";
                }
            );
        }
        public Application(IApp page)
        {
            // X:\jsc.svn\examples\javascript\chrome\extensions\ChromeTabsExperiment\ChromeTabsExperiment\Application.cs
            Console.WriteLine("enter ChromeHybridCapture Application");


            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;



            #region self_chrome_tabs extension
            object self_chrome_tabs = self_chrome.tabs;
            if (self_chrome_tabs != null)
            {
                // running as extension {{ FullName = <Namespace>.Application }}

                //Console.WriteLine("running as extension " + new { typeof(Application).FullName });
                //Console.WriteLine("running as extension " + new { typeof(Application).Assembly.FullName });

                // can we find and connect to app?
                Console.WriteLine("running as extension " + new { typeof(Application).Assembly.GetName().Name });

                //70ms chrome.management.getAll
                //2015-08-22 13:41:33.591 view-source:53670 98ms chrome.management.getAll {{ Length = 28 }}
                //2015-08-22 13:41:33.594 view-source:53670 101ms ExtensionInfo {{ id = aemlnmcokphbneegoefdckonejmknohh, name = ChromeHybridCapture }}
                //2015-08-22 13:41:33.597 view-source:53670 104ms ExtensionInfo {{ id = apdfllckaahabafndbhieahigkjlhalf, name = Google Drive }}
                //2015-08-22 13:41:33.599 view-source:53670 106ms ExtensionInfo {{ id = blpcfgokakmgnkcojhhkbfbldkacnbeo, name = YouTube }}
                //2015-08-22 13:41:33.602 view-source:53670 109ms ExtensionInfo {{ id = cgnjcccfcjhdnbfgjgllglbhfcgndmea, name = WebGLHZBlendCharacter }}
                //2015-08-22 13:41:33.604 view-source:53670 111ms ExtensionInfo {{ id = coobgpohoikkiipiblmjeljniedjpjpf, name = Google Search }}
                //2015-08-22 13:41:33.608 view-source:53670 114ms ExtensionInfo {{ id = fkgibadjpabiongmgoeomdbcefhabmah, name = ChromeCaptureToFile.Application.exe }}
                //2015-08-22 13:41:33.610 view-source:53670 117ms ExtensionInfo {{ id = ghbmnnjooekpmoecnnnilnnbdlolhkhi, name = Google Docs Offline }}
                //2015-08-22 13:41:33.612 view-source:53670 119ms ExtensionInfo {{ id = haebnnbpedcbhciplfhjjkbafijpncjl, name = TinEye Reverse Image Search }}
                //2015-08-22 13:41:33.614 view-source:53670 121ms ExtensionInfo {{ id = lchcahaldakdnjlkchkgncecgpcnabgo, name = Heat Zeeker }}
                //2015-08-22 13:41:33.616 view-source:53670 123ms ExtensionInfo {{ id = nhkcfbkpodjkallcfebgihcoglfaniep, name = freenode irc }}
                //2015-08-22 13:41:33.619 view-source:53670 126ms ExtensionInfo {{ id = ogmpedngmnolclkmlpcdgmfonlagkejp, name = Private Joe: Urban Warfare }}
                //2015-08-22 13:41:33.621 view-source:53670 128ms ExtensionInfo {{ id = pcklgpcdddecpmkiinpkhehanbijjepn, name = idea-remixer }}
                //2015-08-22 13:41:33.624 view-source:53670 131ms ExtensionInfo {{ id = pjkljhegncpnkpknbcohdijeoejaedia, name = Gmail }}
                //2015-08-22 13:41:33.626 view-source:53670 133ms ExtensionInfo {{ id = plgmlhohecdddhbmmkncjdmlhcmaachm, name = draw.io (Legacy) }}
                //2015-08-22 13:41:33.629 view-source:53670 136ms ExtensionInfo {{ id = aapbdbdomjkkjkaonfhkkikfgjllcleb, name = Google Translate }}
                //2015-08-22 13:41:33.631 view-source:53670 138ms ExtensionInfo {{ id = bcfddoencoiedfjgepnlhcpfikgaogdg, name = QR-Code Tag Extension }}
                //2015-08-22 13:41:33.633 view-source:53670 139ms ExtensionInfo {{ id = coblegoildgpecccijneplifmeghcgip, name = Web Cache }}
                //2015-08-22 13:41:33.635 view-source:53670 142ms ExtensionInfo {{ id = ganlifbpkcplnldliibcbegplfmcfigp, name = Collusion for Chrome }}
                //2015-08-22 13:41:33.638 view-source:53670 145ms ExtensionInfo {{ id = gighmmpiobklfepjocnamgkkbiglidom, name = AdBlock }}
                //2015-08-22 13:41:33.640 view-source:53670 147ms ExtensionInfo {{ id = iiabebggdceojiejhopnopmbkgandhha, name = Operation Heat Zeeker }}
                //2015-08-22 13:41:33.642 view-source:53670 149ms ExtensionInfo {{ id = jkgfnfnagdnjicmonpfhhdnkdjgjdamo, name = Avalon Spider Solitaire }}
                //2015-08-22 13:41:33.644 view-source:53670 151ms ExtensionInfo {{ id = kdifgkljkjhpflhalpkhehlldfakggdi, name = my.jsc-solutions.net }}
                //2015-08-22 13:41:33.646 view-source:53670 153ms ExtensionInfo {{ id = lmjegmlicamnimmfhcmpkclmigmmcbeh, name = Application Launcher for Drive (by Google) }}
                //2015-08-22 13:41:33.648 view-source:53670 155ms ExtensionInfo {{ id = mmfbcljfglbokpmkimbfghdkjmjhdgbg, name = Text }}
                //2015-08-22 13:41:33.650 view-source:53670 157ms ExtensionInfo {{ id = molncoemjfmpgdkbdlbjmhlcgniigdnf, name = Project Naptha }}
                //2015-08-22 13:41:33.652 view-source:53670 159ms ExtensionInfo {{ id = ogkcjmbhnfmlnielkjhedpcjomeaghda, name = WebGL Inspector }}
                //2015-08-22 13:41:33.653 view-source:53670 160ms ExtensionInfo {{ id = pkngagjebplcgimojegcakmnlggmcjlc, name = LBA Redux }}
                //2015-08-22 13:41:33.657 view-source:53670 164ms ExtensionInfo {{ id = ppmibgfeefcglejjlpeihfdimbkfbbnm, name = Change HTTP Request Header }}

                Action<string> __ChromeCaptureToFile_Application_sendMessage = delegate { };

                new { }.With(
                    async delegate
                    {
                        //  TypeError: chrome.management.getAll is not a function

                        Console.WriteLine("chrome.management.getAll");
                        var extensions = await chrome.management.getAll();

                        Console.WriteLine("chrome.management.getAll " + new { extensions.Length });
                        // https://developer.chrome.com/extensions/management#type-ExtensionInfo

                        //                        view - source:53670 69ms chrome.management.getAll
                        //2015 - 08 - 22 13:34:13.514 view - source:53670 89ms chrome.management.getAll { { Length = 28 } }
                        //                        2015 - 08 - 22 13:34:13.518 view - source:53670 93ms ExtensionInfo { { item = [object Object] } }

                        foreach (var item in extensions)
                        {
                            //Console.WriteLine("ExtensionInfo " + new { item });
                            //Console.WriteLine("ExtensionInfo " + new { item.id, item.name });

                            //2015-08-22 13:41:33.608 view-source:53670 114ms ExtensionInfo {{ id = fkgibadjpabiongmgoeomdbcefhabmah, name = ChromeCaptureToFile.Application.exe }}

                            // typeof(self) ?
                            if (item.name.StartsWith(typeof(Application).Assembly.GetName().Name))
                            {
                                var __item = item;

                                // we will also know when it reloads? we have to reconnect then?

                                __ChromeCaptureToFile_Application_sendMessage = message =>
                                {
                                    Console.WriteLine("extension chrome.runtime.sendMessage " + new { message, __item.id, __item.name });

                                    chrome.runtime.sendMessage(item.id, message, null);
                                };

                                chrome.runtime.sendMessage(item.id, "extension to app!", null);


                                // extension chrome.runtime.connect done {{ name = , sender = null }}
                                chrome.runtime.connect(item.id).With(
                                    port =>
                                    {
                                        //Console.WriteLine("extension chrome.runtime.connect done " + new { port.name, port.sender.id });
                                        //Console.WriteLine("extension chrome.runtime.connect done " + new { port.name, port.sender });
                                        Console.WriteLine("extension chrome.runtime.connect done");




                                        #region extension: HopToChromeApp.VirtualOnCompleted
                                        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeapp
                                        HopToChromeApp.VirtualOnCompleted = async (that, continuation) =>
                                       {
                                           // state 0 ? or state -1 ?
                                           Console.WriteLine("extension HopToChromeApp VirtualOnCompleted enter ");

                                           TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableContinuation r = TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableFromContinuation(continuation);

                                           // 29035ms extension  port.onMessage {{ message = do HopToChromeExtension }}
                                           port.postMessage("do HopToChromeApp " + new { r.shadowstate.TypeName, r.shadowstate.state });


                                           // now send the jump instruction... will it make it?
                                           port.postMessage(r.shadowstate);
                                       };
                                        #endregion




                                        // is the app now able to send extension messages?

                                        port.onMessage.addListener(
                                            new Action<object>(
                                                (message) =>
                                                {
                                                    // 4847ms extension  port.onMessage {{ message = do HopToChromeExtension {{ TypeName = <Namespace>.___ctor_b__4_9_d, state = 0 }}



                                                    // extension  port.onMessage {{ message = from app hello to extension }}
                                                    // extension  port.onMessage {{ message = [object Object] }}

                                                    // look app sent a message to extension

                                                    // 191ms extension  port.onMessage {{ message = from app hello to extension, is_string = false, equals_typeofstring = false }}

                                                    // 219ms extension  port.onMessage {{ message = from app hello to extension, expando_isstring = true, is_string = false, equals_typeofstring = false }}
                                                    //var expando_isstring = ScriptCoreLib.JavaScript.Runtime.Expando.Of(message).IsString;

                                                    // roslyn? wtf
                                                    //var is_string = message is string;
                                                    //var equals_typeofstring = message.GetType() == typeof(string);

                                                    //Console.WriteLine("extension  port.onMessage " + new { expando_isstring, is_string, equals_typeofstring });

                                                    // extension  port.onMessage {{ state = null, TypeName = null }}
                                                    if (message is string)
                                                    {
                                                        Console.WriteLine("extension  port.onMessage: " + message);

                                                        // Z:\jsc.svn\examples\javascript\async\Test\TestSwitchToServiceContextAsync\TestSwitchToServiceContextAsync\ShadowIAsyncStateMachine.cs
                                                        return;
                                                    }


                                                    #region IAsyncStateMachine
                                                    // casting from anonymous object.
                                                    var xShadowIAsyncStateMachine = (TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine)message;

                                                    // or constructor id?
                                                    Console.WriteLine("extension  port.onMessage " + new { xShadowIAsyncStateMachine.state, xShadowIAsyncStateMachine.TypeName });

                                                    // 12468ms extension  port.onMessage {{ message = do HopToChromeExtension {{ TypeName = <Namespace>.___ctor_b__4_9_d, state = 0 }}, expando_isstring = true, is_string = false, equals_typeofstring = false }}
                                                    //2015-08-22 15:49:45.729 view-source:53670 12471ms extension  port.onMessage {{ message = do HopToChromeExtension {{ TypeName = <Namespace>.___ctor_b__4_9_d, state = 0 }} }}
                                                    //2015-08-22 15:49:45.733 view-source:53670 12475ms extension  port.onMessage {{ message = [object Object], expando_isstring = false, is_string = false, equals_typeofstring = false }}
                                                    //2015-08-22 15:49:45.737 view-source:53670 12479ms extension  port.onMessage {{ state = 0, TypeName = <Namespace>.___ctor_b__4_9_d }}


                                                    #region xAsyncStateMachineType
                                                    var xAsyncStateMachineType = typeof(Application).Assembly.GetTypes().FirstOrDefault(
                                                        xAsyncStateMachineTypeCandidate =>
                                                        {
                                                            // safety check 1

                                                            //Console.WriteLine(new { sw.ElapsedMilliseconds, item.FullName });

                                                            var xisIAsyncStateMachine = typeof(IAsyncStateMachine).IsAssignableFrom(xAsyncStateMachineTypeCandidate);
                                                            if (xisIAsyncStateMachine)
                                                            {
                                                                //Console.WriteLine(new { item.FullName, isIAsyncStateMachine });

                                                                return xAsyncStateMachineTypeCandidate.FullName == xShadowIAsyncStateMachine.TypeName;
                                                            }

                                                            return false;
                                                        }
                                                    );
                                                    #endregion


                                                    var NewStateMachine = FormatterServices.GetUninitializedObject(xAsyncStateMachineType);
                                                    var isIAsyncStateMachine = NewStateMachine is IAsyncStateMachine;

                                                    var NewStateMachineI = (IAsyncStateMachine)NewStateMachine;

                                                    #region 1__state
                                                    xAsyncStateMachineType.GetFields(
                                                      System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance
                                                      ).WithEach(
                                                       AsyncStateMachineSourceField =>
                                                       {

                                                           //Console.WriteLine(new { AsyncStateMachineSourceField });

                                                           if (AsyncStateMachineSourceField.Name.EndsWith("1__state"))
                                                           {
                                                               AsyncStateMachineSourceField.SetValue(
                                                                   NewStateMachineI,
                                                                   xShadowIAsyncStateMachine.state
                                                                );
                                                           }


                                                       }
                                                  );
                                                    #endregion

                                                    NewStateMachineI.MoveNext();
                                                    #endregion

                                                }
                                            )
                                        );



                                        //chrome.tabs.Created += async tab =>
                                        //{
                                        //    port.postMessage("chrome.tabs.Created " + new { tab });

                                        //};

                                        //chrome.tabs.Updated += async (tabId, x, tab) =>
                                        //{
                                        //    //  Updated {{ i = 0, x = null, tab = null }}

                                        //    port.postMessage("chrome.tabs.Updated  " + new { tabId, x, tab });


                                        //};
                                    }
                                );
                            }
                        }

                    }
                    );

                //chrome.tabs.Created += async tab =>
                //{
                //    Console.WriteLine("chrome.tabs.Created " + new { tab });

                //};

                //chrome.tabs.Updated += async (tabId, x, tab) =>
                //{
                //    //  Updated {{ i = 0, x = null, tab = null }}

                //    Console.WriteLine("chrome.tabs.Updated  " + new { tabId, x, tab });


                //};



                return;
            }
            #endregion


            #region self_chrome_socket app
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("appwindow chrome.app.window.create, is that you?");

                    MessagePort appwindow_to_app = null;

                    // called by? 619  app:HopToChromeAppWindow
                    #region  appwindow Native.window.onmessage
                    Native.window.onmessage += e =>
                    {
                        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow

                        // appwindow Native.window.onmessage {{ data = app to appwindow! }}

                        var message = e.data;

                        //Console.WriteLine("appwindow Native.window.onmessage " + new { e.data });


                        // extension  port.onMessage {{ message = from app hello to extension }}
                        //var expando_isstring = ScriptCoreLib.JavaScript.Runtime.Expando.Of(message).IsString;

                        // look app sent a message to extension
                        //Console.WriteLine("app  port.onMessage " + new { message });

                        if (message is string)
                        {
                            Console.WriteLine("appwindow    Native.window.onmessage: " + message);
                            //Console.WriteLine("appwindow    Native.window.onmessage: " + new { e.ports });

                            if (e.ports != null)
                                foreach (var port in e.ports)
                                {
                                    Console.WriteLine("appwindow    Native.window.onmessage " + new { port });

                                    appwindow_to_app = port;
                                }

                            //e.po

                            return;
                        }

                        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeapp




                        // casting from anonymous object.
                        var xShadowIAsyncStateMachine = (TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine)message;

                        // or constructor id?
                        Console.WriteLine("appwindow     Native.window.onmessage " + new { xShadowIAsyncStateMachine.state, xShadowIAsyncStateMachine.TypeName });

                        // 12468ms extension  port.onMessage {{ message = do HopToChromeExtension {{ TypeName = <Namespace>.___ctor_b__4_9_d, state = 0 }}, expando_isstring = true, is_string = false, equals_typeofstring = false }}
                        //2015-08-22 15:49:45.729 view-source:53670 12471ms extension  port.onMessage {{ message = do HopToChromeExtension {{ TypeName = <Namespace>.___ctor_b__4_9_d, state = 0 }} }}
                        //2015-08-22 15:49:45.733 view-source:53670 12475ms extension  port.onMessage {{ message = [object Object], expando_isstring = false, is_string = false, equals_typeofstring = false }}
                        //2015-08-22 15:49:45.737 view-source:53670 12479ms extension  port.onMessage {{ state = 0, TypeName = <Namespace>.___ctor_b__4_9_d }}


                        #region xAsyncStateMachineType
                        var xAsyncStateMachineType = typeof(Application).Assembly.GetTypes().FirstOrDefault(
                            xAsyncStateMachineTypeCandidate =>
                            {
                                // safety check 1

                                //Console.WriteLine(new { sw.ElapsedMilliseconds, item.FullName });

                                var xisIAsyncStateMachine = typeof(IAsyncStateMachine).IsAssignableFrom(xAsyncStateMachineTypeCandidate);
                                if (xisIAsyncStateMachine)
                                {
                                    //Console.WriteLine(new { item.FullName, isIAsyncStateMachine });

                                    return xAsyncStateMachineTypeCandidate.FullName == xShadowIAsyncStateMachine.TypeName;
                                }

                                return false;
                            }
                        );
                        #endregion


                        var NewStateMachine = FormatterServices.GetUninitializedObject(xAsyncStateMachineType);
                        var isIAsyncStateMachine = NewStateMachine is IAsyncStateMachine;

                        var NewStateMachineI = (IAsyncStateMachine)NewStateMachine;

                        #region 1__state
                        xAsyncStateMachineType.GetFields(
                          System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance
                          ).WithEach(
                           AsyncStateMachineSourceField =>
                           {

                               //Console.WriteLine(new { AsyncStateMachineSourceField });

                               if (AsyncStateMachineSourceField.Name.EndsWith("1__state"))
                               {
                                   AsyncStateMachineSourceField.SetValue(
                                       NewStateMachineI,
                                       xShadowIAsyncStateMachine.state
                                    );
                               }

                               // X:\jsc.svn\examples\javascript\async\Test\TestSwitchToServiceContextAsync\TestSwitchToServiceContextAsync\CLRWebServiceInvoke.cs
                               // field names/ tokens need to be encrypted like typeinfo.

                               // some do manual restore
                               // X:\jsc.svn\examples\javascript\chrome\extensions\ChromeExtensionHopToTabThenIFrame\ChromeExtensionHopToTabThenIFrame\Application.cs
                               // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow
                               // or, are we supposed to initialize a string value here?

                               var xStringField = TestSwitchToServiceContextAsync.ArrayListExtensions.AsEnumerable(xShadowIAsyncStateMachine.StringFields).FirstOrDefault(
                                   f => DecoratedString(f.FieldName) == DecoratedString(AsyncStateMachineSourceField.Name)
                               );

                               if (xStringField != null)
                               {
                                   // once we are to go back to client. we need to reverse it?

                                   AsyncStateMachineSourceField.SetValue(
                                       NewStateMachineI,
                                       xStringField.value
                                    );
                                   // next xml?
                                   // before lets send our strings back with the new state!
                                   // what about exceptions?
                               }
                           }
                      );
                        #endregion

                        NewStateMachineI.MoveNext();

                    };
                    #endregion


                    #region appwindow:HopToChromeApp
                    HopToChromeApp.VirtualOnCompleted = async (that, continuation) =>
                    {
                        // do we have the port to send back our portal warp?

                        // state 0 ? or state -1 ?
                        Console.WriteLine("appwindow HopToChromeApp VirtualOnCompleted enter " + new { appwindow_to_app });

                        //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow
                        //// async dont like ref?
                        TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableContinuation r = TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableFromContinuation(continuation);
                        // 29035ms extension  port.onMessage {{ message = do HopToChromeExtension }}
                        appwindow_to_app.postMessage("do HopToChromeAppWindow " + new { r.shadowstate.TypeName, r.shadowstate.state });
                        // now send the jump instruction... will it make it?
                        appwindow_to_app.postMessage(r.shadowstate);
                    };
                    #endregion


                }
                else
                {
                    //Console.WriteLine("running as app");

                    // running as app {{ FullName = ChromeHybridCapture.Application, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null }}
                    //Console.WriteLine("running as app " + new { typeof(Application).Assembly.FullName });

                    // running as app {{ Name = ChromeHybridCapture.Application }}
                    Console.WriteLine("running as app " + new { typeof(Application).Assembly.GetName().Name } + " now reenable extension..");


                    #region app:appwindow_to_app
                    Action<object> appwindow_to_app = data =>
                    {
                        var xShadowIAsyncStateMachine = (TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine)data;

                        Console.WriteLine("app appwindow_to_app " + new { xShadowIAsyncStateMachine.TypeName });

                        #region xAsyncStateMachineType
                        var xAsyncStateMachineType = typeof(Application).Assembly.GetTypes().FirstOrDefault(
                            xAsyncStateMachineTypeCandidate =>
                            {
                                // safety check 1

                                //Console.WriteLine(new { sw.ElapsedMilliseconds, item.FullName });

                                var xisIAsyncStateMachine = typeof(IAsyncStateMachine).IsAssignableFrom(xAsyncStateMachineTypeCandidate);
                                if (xisIAsyncStateMachine)
                                {
                                    //Console.WriteLine(new { item.FullName, isIAsyncStateMachine });

                                    return xAsyncStateMachineTypeCandidate.FullName == xShadowIAsyncStateMachine.TypeName;
                                }

                                return false;
                            }
                        );
                        #endregion

                        Console.WriteLine("app appwindow_to_app " + new { xAsyncStateMachineType });

                        var NewStateMachine = FormatterServices.GetUninitializedObject(xAsyncStateMachineType);
                        var isIAsyncStateMachine = NewStateMachine is IAsyncStateMachine;

                        var NewStateMachineI = (IAsyncStateMachine)NewStateMachine;

                        #region 1__state
                        xAsyncStateMachineType.GetFields(
                          System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance
                          ).WithEach(
                           AsyncStateMachineSourceField =>
                           {

                               //Console.WriteLine(new { AsyncStateMachineSourceField });

                               if (AsyncStateMachineSourceField.Name.EndsWith("1__state"))
                               {
                                   AsyncStateMachineSourceField.SetValue(
                                       NewStateMachineI,
                                       xShadowIAsyncStateMachine.state
                                    );
                               }

                               // X:\jsc.svn\examples\javascript\async\Test\TestSwitchToServiceContextAsync\TestSwitchToServiceContextAsync\CLRWebServiceInvoke.cs
                               // field names/ tokens need to be encrypted like typeinfo.

                               // some do manual restore
                               // X:\jsc.svn\examples\javascript\chrome\extensions\ChromeExtensionHopToTabThenIFrame\ChromeExtensionHopToTabThenIFrame\Application.cs
                               // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow
                               // or, are we supposed to initialize a string value here?

                               var xStringField = TestSwitchToServiceContextAsync.ArrayListExtensions.AsEnumerable(xShadowIAsyncStateMachine.StringFields).FirstOrDefault(
                                       f => DecoratedString(f.FieldName) == DecoratedString(AsyncStateMachineSourceField.Name)
                                   );

                               if (xStringField != null)
                               {
                                   // once we are to go back to client. we need to reverse it?

                                   AsyncStateMachineSourceField.SetValue(
                                           NewStateMachineI,
                                           xStringField.value
                                        );
                                   // next xml?
                                   // before lets send our strings back with the new state!
                                   // what about exceptions?
                               }
                           }
                      );
                        #endregion

                        NewStateMachineI.MoveNext();
                    };
                    #endregion


                    #region app:HopToChromeAppWindow
                    HopToChromeAppWindow.VirtualOnCompleted = async (that, continuation) =>
                    {
                        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150824/webgliframebuffer

                        // state 0 ? or state -1 ?
                        Console.WriteLine("app HopToChromeAppWindow VirtualOnCompleted enter ");

                        #region outputWindow
                        if (that.window == null)
                        {
                            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow

                            if (outputWindow == null)
                            {
                                // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                                outputWindow = await chrome.app.window.create(
                                                       Native.document.location.pathname,

                                                       // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                                                       // this ctually works. but we wont see console on app log..
                                                       options: new { hidden = true, alwaysOnTop = true }
                                                );

                                ////xappwindow.setAlwaysOnTop

                                // or can we stay hidden?
                                //that.window.show();

                                await outputWindow.contentWindow.async.onload;
                            }
                            // reuse the window...
                            that.window = outputWindow;
                        }
                        #endregion


                        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow
                        // async dont like ref?
                        TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableContinuation r = TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableFromContinuation(continuation);
                        // 29035ms extension  port.onMessage {{ message = do HopToChromeExtension }}



                        // Z:\jsc.svn\core\ScriptCoreLib\JavaScript\DOM\IWindow.postMessage.cs
                        // how do we use this thing?
                        var c = new MessageChannel();

                        c.port1.onmessage += e =>
                        {
                            Console.WriteLine("app HopToChromeAppWindow MessageChannel onmessage " + new { e.data });

                            appwindow_to_app(e.data);
                        };

                        c.port1.start();
                        c.port2.start();


                        //                    15ms appwindow    Native.window.onmessage: {{ ports = [object MessagePort] }}
                        //2015-08-22 20:50:18.019 view-source:53702 17ms appwindow    Native.window.onmessage: {{ port = [object MessagePort] }}
                        that.window.contentWindow.postMessage("do HopToChromeAppWindow " + new { r.shadowstate.TypeName, r.shadowstate.state }, transfer: c.port2);

                        // now send the jump instruction... will it make it?
                        that.window.contentWindow.postMessage(r.shadowstate);
                    };
                    #endregion





                    #region app:ConnectExternal
                    chrome.runtime.ConnectExternal += port =>
                    {
                        // app chrome.runtime.ConnectExternal {{ name = , id = jadmeogmbokffpkdfeiemjplohfgkidd }}

                        //Console.WriteLine("app chrome.runtime.ConnectExternal " + new { port.name, port.sender.id });
                        Console.WriteLine("app chrome.runtime.ConnectExternal " + new { port.sender.id } + " now click launch!");

                        new chrome.Notification(title: "ChromeHybridCapture", message: "service connected. click launch").Clicked += delegate
                        {
                            // https://developer.chrome.com/apps/app_runtime

                            // management_api
                        };

                        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hybrid

                        // should we now be able to hop to our tab?
                        // what about if we are in an app window?

                        port.onMessage.addListener(
                            new Action<object>(
                                (message) =>
                                {
                                    // extension  port.onMessage {{ message = from app hello to extension }}
                                    var expando_isstring = ScriptCoreLib.JavaScript.Runtime.Expando.Of(message).IsString;

                                    // look app sent a message to extension
                                    //Console.WriteLine("app  port.onMessage " + new { message });

                                    if (expando_isstring)
                                    {
                                        Console.WriteLine("app  port.onMessage: " + message);
                                        return;
                                    }


                                    #region IAsyncStateMachine
                                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeapp
                                    // casting from anonymous object.
                                    var xShadowIAsyncStateMachine = (TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine)message;

                                    // or constructor id?
                                    Console.WriteLine("app  port.onMessage " + new { xShadowIAsyncStateMachine.state, xShadowIAsyncStateMachine.TypeName });

                                    // 12468ms extension  port.onMessage {{ message = do HopToChromeExtension {{ TypeName = <Namespace>.___ctor_b__4_9_d, state = 0 }}, expando_isstring = true, is_string = false, equals_typeofstring = false }}
                                    //2015-08-22 15:49:45.729 view-source:53670 12471ms extension  port.onMessage {{ message = do HopToChromeExtension {{ TypeName = <Namespace>.___ctor_b__4_9_d, state = 0 }} }}
                                    //2015-08-22 15:49:45.733 view-source:53670 12475ms extension  port.onMessage {{ message = [object Object], expando_isstring = false, is_string = false, equals_typeofstring = false }}
                                    //2015-08-22 15:49:45.737 view-source:53670 12479ms extension  port.onMessage {{ state = 0, TypeName = <Namespace>.___ctor_b__4_9_d }}


                                    #region xAsyncStateMachineType
                                    var xAsyncStateMachineType = typeof(Application).Assembly.GetTypes().FirstOrDefault(
                                        xAsyncStateMachineTypeCandidate =>
                                        {
                                            // safety check 1

                                            //Console.WriteLine(new { sw.ElapsedMilliseconds, item.FullName });

                                            var xisIAsyncStateMachine = typeof(IAsyncStateMachine).IsAssignableFrom(xAsyncStateMachineTypeCandidate);
                                            if (xisIAsyncStateMachine)
                                            {
                                                //Console.WriteLine(new { item.FullName, isIAsyncStateMachine });

                                                return xAsyncStateMachineTypeCandidate.FullName == xShadowIAsyncStateMachine.TypeName;
                                            }

                                            return false;
                                        }
                                    );
                                    #endregion


                                    var NewStateMachine = FormatterServices.GetUninitializedObject(xAsyncStateMachineType);
                                    var isIAsyncStateMachine = NewStateMachine is IAsyncStateMachine;

                                    var NewStateMachineI = (IAsyncStateMachine)NewStateMachine;

                                    #region 1__state
                                    xAsyncStateMachineType.GetFields(
                                      System.Reflection.BindingFlags.Public | System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance
                                      ).WithEach(
                                       AsyncStateMachineSourceField =>
                                       {

                                           //Console.WriteLine(new { AsyncStateMachineSourceField });

                                           if (AsyncStateMachineSourceField.Name.EndsWith("1__state"))
                                           {
                                               AsyncStateMachineSourceField.SetValue(
                                                   NewStateMachineI,
                                                   xShadowIAsyncStateMachine.state
                                                );
                                           }


                                           // z:\jsc.svn\examples\javascript\async\Test\TestSwitchToServiceContextAsync\TestSwitchToServiceContextAsync\CLRWebServiceInvoke.cs
                                           // field names/ tokens need to be encrypted like typeinfo.

                                           // some do manual restore
                                           // X:\jsc.svn\examples\javascript\chrome\extensions\ChromeExtensionHopToTabThenIFrame\ChromeExtensionHopToTabThenIFrame\Application.cs
                                           // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow
                                           // or, are we supposed to initialize a string value here?

                                           var xStringField = TestSwitchToServiceContextAsync.ArrayListExtensions.AsEnumerable(xShadowIAsyncStateMachine.StringFields).FirstOrDefault(
                                               f => DecoratedString(f.FieldName) == DecoratedString(AsyncStateMachineSourceField.Name)
                                           );

                                           if (xStringField != null)
                                           {
                                               // once we are to go back to client. we need to reverse it?

                                               AsyncStateMachineSourceField.SetValue(
                                                   NewStateMachineI,
                                                   xStringField.value
                                                );
                                               // next xml?
                                               // before lets send our strings back with the new state!
                                               // what about exceptions?
                                           }
                                       }
                                  );
                                    #endregion

                                    NewStateMachineI.MoveNext();
                                    #endregion
                                }
                            )
                        );



                        //port.postMessage(
                        //    new
                        //    {
                        //        text = "from app hello to extension"
                        //    }
                        //);

                        port.postMessage("from app hello to extension, click launch?");

                        // enable
                        //await default(HopToChromeExtension);

                        #region HopToChromeExtension
                        HopToChromeExtension.VirtualOnCompleted = async (that, continuation) =>
                        {
                            // state 0 ? or state -1 ?
                            Console.WriteLine("app HopToChromeExtension VirtualOnCompleted enter ");

                            // where is it defined?
                            // z:\jsc.svn\examples\javascript\async\Test\TestSwitchToServiceContextAsync\TestSwitchToServiceContextAsync\ShadowIAsyncStateMachine.cs

                            // async dont like ref?
                            TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableContinuation r = TestSwitchToServiceContextAsync.ShadowIAsyncStateMachine.ResumeableFromContinuation(continuation);

                            // 29035ms extension  port.onMessage {{ message = do HopToChromeExtension }}
                            port.postMessage("do HopToChromeExtension " + new { r.shadowstate.TypeName, r.shadowstate.state });


                            // now send the jump instruction... will it make it?
                            port.postMessage(r.shadowstate);


                            // how would we know to continue from current continuation?
                            // or are we fine to rebuild the scope if we jump back?
                        };
                        #endregion




                    };
                    #endregion

                    chrome.runtime.MessageExternal += (message, sender, sendResponse) =>
                    {
                        // was the extension able to pass us a message?

                        //Console.WriteLine("chrome.runtime.MessageExternal " + new { message, sender, sendResponse });
                        Console.WriteLine("app chrome.runtime.MessageExternal " + new { message });

                        // app chrome.runtime.MessageExternal {{ message = extension to app! }}

                        // remember the connection to enable hop to extension?
                    };

                    //Action AtLaunch = delegate { };

                    #region  Launched
                    // can the extension launch us too?
                    // either the user launches by a click or we launch from extension?
                    chrome.app.runtime.Launched += async delegate
                    {
                        // state 0 ? or state -1 ?

                        Console.WriteLine("app chrome.app.runtime.Launched before delay");

                        await Task.Delay(1);

                        nexttake:
                        //Console.WriteLine("nexttake " + new { index });
                        Console.WriteLine("nexttake");

                        // using IDisposable ?
                        await default(HopToChromeExtension);

                        #region HopToChromeExtension
                        // now this would be cool if it worked?
                        Console.WriteLine("app to extension chrome.app.runtime.Launched, only state was sent over?");

                        // can we do our thing and jump back with the capture now?

                        // lets create a tab for us to jump into..
                        //var tab = await chrome.tabs.create(new { url = "http://example.com" });

                        // https://developer.chrome.com/extensions/activeTab

                        // implict/auto context hop?
                        var tabsw = Stopwatch.StartNew();

                        // which window station?
                        var tabwindow = await chrome.windows.create(new { state = "fullscreen", url = "http://www.flightradar24.com/59.15,23.86/9" });
                        //var tabwindow = await chrome.windows.create(new { state = "fullscreen", url = "http://example.com" });

                        //var tab = await chrome.tabs.create(new { url = "http://www.flightradar24.com/59.05,24.14/8" });
                        // would we need to hop into the tab to inspec data?
                        Console.WriteLine("extension chrome.tabs.create done. about to capture... " + new { tabsw.ElapsedMilliseconds });


                        // how do we know a tab is loaded?
                        // content takes a while to load doesnt it...
                        await Task.Delay(7000);

                        // Error: Invocation of form tabs.captureVisibleTab(object, null, function) doesn't match definition tabs.captureVisibleTab(optional integer windowId, optional object options, function callback)

                        //var captureVisibleTab = await tab.windowId.captureVisibleTab(null);

                        // Unchecked runtime.lastError while running tabs.captureVisibleTab: Failed to capture tab: unknown error

                        retry_captureVisibleTab:
                        var captureVisibleTab = await tabwindow.id.captureVisibleTab(options: new { format = "png" });


                        if (captureVisibleTab == null)
                        {
                            await Task.Delay(500);
                            Console.WriteLine("extension chrome.tabs.create done. about to capture... error");
                            goto retry_captureVisibleTab;
                        }

                        // extension captureVisibleTab {{ Length = 47743 }}
                        Console.WriteLine("extension captureVisibleTab " + new { captureVisibleTab.Length });

                        await Task.Delay(500);

                        // or just unload the window?
                        //await tab.id.remove();
                        await tabwindow.id.remove();


                        Console.WriteLine("extension to app chrome.tabs.create removed, jump back?");
                        #endregion

                        await default(HopToChromeApp);

                        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeapp
                        Console.WriteLine("extension to app chrome.tabs.create removed, jump back done! did the strings make it?");
                        if (captureVisibleTab == null)
                            Console.WriteLine("app chrome.tabs.create removed, jump back done! did the strings make it? no");
                        else
                            Console.WriteLine("app chrome.tabs.create removed, jump back done! did the strings make it? yes " + new { captureVisibleTab.Length });



                        Console.WriteLine("app chrome.fileSystem.chooseEntry");

                        // not available in background?
                        // TypeError: Cannot read property 'chooseEntry' of undefined
                        // Unchecked runtime.lastError while running fileSystem.chooseEntry: Invalid calling page. This function can't be called from a background page.
                        //var dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });

                        // can we jump to extension to open our tab?



                        //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150822/hoptochromeappwindow
                        // defined at?
                        await default(HopToChromeAppWindow);

                        #region HopToChromeAppWindow
                        if (captureVisibleTab == null)
                            Console.WriteLine("appwindow chrome.tabs.create removed, jump back done! did the strings make it? no");
                        else
                            Console.WriteLine("appwindow chrome.tabs.create removed, jump back done! did the strings make it? yes " + new { captureVisibleTab.Length });


                        // 182ms appwindow chrome.fileSystem.chooseEntry
                        Console.WriteLine("appwindow chrome.fileSystem.chooseEntry");

                        // Unchecked runtime.lastError while running fileSystem.chooseEntry: User cancelled

                        Native.body.style.overflow = IStyle.OverflowEnum.auto;
                        Native.body.Clear();

                        chrome.app.window.current().show();

                        var icaptureVisibleTabFull = await new IHTMLImage { src = captureVisibleTab }.async.oncomplete;
                        ;

                        // 200, 80
                        // 1600, 880

                        // 1920x1080
                        var focusw = icaptureVisibleTabFull.width - 320;
                        var focush = icaptureVisibleTabFull.height - 200;

                        new IHTMLPre { new { focusw, focush } }.AttachToDocument();

                        var focus = new CanvasRenderingContext2D(focusw, focush);

                        focus.drawImage(icaptureVisibleTabFull, 200, 80, focusw, focush, 0, 0, focusw, focush);



                        // this is a thumbnail
                        var icaptureVisibleTab = new IHTMLImage { src = captureVisibleTab }.AttachToDocument();

                        new IStyle(icaptureVisibleTab)
                        {
                            width = "120px",
                            border = "1px dashed blue"
                        };



                        Native.document.documentElement.style.overflow = IStyle.OverflowEnum.auto;

                        if (dir == null)
                        {
                            await new IHTMLButton { "openDirectory" }.AttachToDocument().async.onclick;

                            dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
                        }

                        var file = index.ToString().PadLeft(5, '0') + ".png";

                        new IHTMLPre { "WriteAllBytes... " + new { file } }.AttachToDocument();

                        //await dir.WriteAllBytes("0001.png", icaptureVisibleTabFull);

                        // chrome://blob-internals/
                        // those blobs wont go away...
                        // GC kicks in at 36 it seems.
                        await dir.WriteAllBytes(file, focus);
                        focus = null;

                        //                        [Window Title]
                        //Location is not available

                        //[Content]
                        //R:\ is not accessible.

                        //Logon failure: unknown user name or bad password.


                        //[OK]

                        //                        ---------------------------
                        //                        Restoring Network Connections
                        //---------------------------
                        //An error occurred while reconnecting R:
                        //                            to
                        //\\192.168.1.12\x$
                        //Microsoft Windows Network: The local device name is already in use.


                        //This connection has not been restored.
                        //---------------------------
                        //OK
                        //-------------------------- -


                        new IHTMLPre { "WriteAllBytes... done " + new { file } }.AttachToDocument();

                        index++;
                        #endregion




                        //Console.WriteLine("app chrome.app.window content loaded!");


                        //Console.WriteLine("app chrome.app.runtime.Launched ready to exit");
                        //await Task.Delay(3000);

                        //// wont work?
                        //w.close();

                        ////1343ms app chrome.runtime.MessageExternal {{ message = extension to app! }}
                        ////2015-08-22 15:18:44.738 view-source:53670 1357ms app chrome.runtime.ConnectExternal {{ id = jadmeogmbokffpkdfeiemjplohfgkidd }}
                        ////2015-08-22 15:18:52.314 view-source:53670 8933ms app chrome.app.runtime.Launched
                        ////2015-08-22 15:18:52.342 view-source:53670 8961ms app chrome.app.runtime.Launched exit
                        ////2015-08-22 15:18:52.348 view-source:53670 8967ms app  port.onMessage {{ message = chrome.tabs.Created {{ tab = [object Object] }} }}
                        ////2015-08-22 15:18:52.652 view-source:53670 9271ms app  port.onMessage {{ message = chrome.tabs.Updated  {{ tabId = 419, x = [object Object], tab = [object Object] }} }}
                        ////2015-08-22 15:18:52.690 view-source:53670 9308ms app  port.onMessage {{ message = chrome.tabs.Updated  {{ tabId = 419, x = [object Object], tab = [object Object] }} }}

                        //Console.WriteLine("app chrome.app.runtime.Launched exit");


                        // or tcp?
                        //await new IHTMLButton { "next take" }.AttachToDocument().async.onclick;


                        // var countdown = Stopwatch.StartNew();

                        //Error CS1929  'int' does not contain a definition for 'GetAwaiter' and the best extension method overload 'IXMLHttpRequestAsyncExtensions.GetAwaiter(IXMLHttpRequest)' requires a receiver of type 'IXMLHttpRequest'  ChromeHybridCapture Z:\jsc.svn\examples\javascript\chrome\hybrid\ChromeHybridCapture\ChromeHybridCapture\Application.cs 1141    IntelliSense

                        // script: error JSC1000: No implementation found for this native method, please implement [static System.TimeSpan.op_Subtraction(System.TimeSpan, System.TimeSpan)]

                        // TypeError: Cannot set property 'countdown' of null

                        //  Cannot set property 'sw' of null
                        countdown = Stopwatch.StartNew();

                        new IHTMLPre { () => "close to abort... " + countdown.ElapsedMilliseconds }.AttachToDocument();

                        // GC?
                        await Task.Delay(2000);

                        //await Task.WhenAny(
                        //      new IHTMLButton {
                        //          "click next take or wait "
                        //      }.AttachToDocument().async.onclick,
                        //      Task.Delay(1000)
                        //  );


                        //await
                        //  new IHTMLButton {
                        //          "click next take "
                        //  }.AttachToDocument().async.onclick;


                        new IHTMLPre { "preparing..." }.AttachToDocument();

                        Console.WriteLine("appwindow: appwindow to app");

                        await default(HopToChromeApp);

                        Console.WriteLine("app: appwindow to app");

                        goto nexttake;



                    };
                    #endregion


                    return;
                }
            }
            #endregion


            // running as regular web page?

            Console.WriteLine("running as content?");

            // were we loaded into chrome.app.window?


            //new IHTMLButton { "openDirectory" }.AttachToDocument().onclick += async delegate
            // {
            //     var dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });

            // };

        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            FormStyler.AtFormCreated = FormStylerLikeFloat.LikeFloat;

            // http://html5doctor.com/drag-and-drop-to-server/

            #region ondrop
            Native.document.body.ondragover +=
                evt =>
                {
                    evt.stopPropagation();
                    evt.preventDefault();

                    evt.dataTransfer.dropEffect = "copy"; // Explicitly show this is a copy.


                    page.Header.style.color = JSColor.Green;


                    var types = evt.dataTransfer.types == null ? 0 : evt.dataTransfer.types.Length;

                    if (evt.dataTransfer.types != null)
                        foreach (var type in evt.dataTransfer.types.AsEnumerable())
                        {
                            Console.WriteLine(
                                new { type }
                                );
                        }

                    var items = evt.dataTransfer.items == null ? 0u : evt.dataTransfer.items.length;
                    var files = evt.dataTransfer.files == null ? 0u : evt.dataTransfer.files.length;



                    Console.WriteLine("ondragover: " +
                        new
                        {

                            types,
                            items,
                            files
                        }
                    );
                };


            Native.document.body.ondragleave +=
                delegate
                {
                    page.Header.style.color = JSColor.None;
                };

            #region DetectCanvasFromBytesExperiment
            Action<Form, WebBrowser, string, string, long> DetectCanvasFromBytesExperiment =
                (ff, web, ContentValue, src, ContentBytesLength) =>
                {
                    web.Navigated +=
                           async delegate
                           {
                               if (ContentValue != "png.png")
                                   return;

                               // X:\jsc.svn\examples\javascript\canvas\CanvasFromBytes\CanvasFromBytes\Application.cs
                               //Console.WriteLine("interesting, is it one of ours? " + new { ContentValue });
                               ff.Text = "interesting, is it one of ours? " + new { ContentValue };

                               var csrci = new IHTMLImage { src = src };

                               //await csrci.async.onlo
                               await csrci;

                               if (csrci.width != csrci.height)
                                   return;


                               var w = csrci.width;


                               // do the reverse
                               var z = new CanvasRenderingContext2D(w, w);
                               z.drawImage(csrci, 0, 0, w, w);
                               //z.canvas.AttachToDocument();

                               // whats the bytes?

                               var zbytes = z.bytes;

                               ff.Text = "will decode " + new { zbytes.Length, ContentBytesLength }.ToString();

                               #region decode
                               var decodebytes = await Task.Factory.StartNew(
                                   new { zbytes },
                                   scope =>
                                   {
                                       // Native.Console.WriteLine += ?

                                       // { Length = 2053956, u4 = 3c68746d } 

                                       Console.WriteLine(
                                        new
                                        {
                                            scope.zbytes.Length,

                                            u4 = new[] { 
                                            scope.zbytes[0 * 4 + 0],
                                            scope.zbytes[0 * 4 +1],
                                            scope.zbytes[0 * 4 +2],
                                            scope.zbytes[0 * 4 +3]
                                        }.ToHexString()
                                        }
                                       );


                                       // Uncaught Error: InvalidOperationException: { MethodToken = dAAABv_a4OTKgLfLC20SaJA } function is not available at { href =

                                       var wwbytes = new byte[scope.zbytes.Length / 4];
                                       var wi = 0;

                                       for (int i = 0; i < scope.zbytes.Length; i += 4)
                                       {
                                           // that be the red
                                           //wwbytes[wi] = scope.zbytes[i];

                                           // bet we need alpha
                                           wwbytes[wi] = scope.zbytes[i + 3];
                                           wi++;
                                       }



                                       return wwbytes;
                                   }
                               );
                               #endregion


                               var html = Encoding.UTF8.GetString(decodebytes);

                               ff.Text = "decoded " + new { html.Length, ContentBytesLength }.ToString();

                               //Console.WriteLine(new { html });


                               // um hide old data.
                               web.Hide();

                               var xweb = new WebBrowser { Dock = DockStyle.Fill };
                               xweb.AttachTo(ff);
                               xweb.DocumentText = html;
                               ff.Text = "!decoded " + new { html.Length, ContentBytesLength }.ToString();
                           };

                };
            #endregion


            Native.document.body.ondrop +=
                evt =>
                {
                    //if (evt.dataTransfer == null)
                    //    return;

                    var types = evt.dataTransfer.types == null ? 0 : evt.dataTransfer.types.Length;

                    var items = evt.dataTransfer.items == null ? 0u : evt.dataTransfer.items.length;
                    var files = evt.dataTransfer.files == null ? 0u : evt.dataTransfer.files.length;


                    Console.WriteLine("ondrop: " +
                        new
                        {

                            types,
                            items,
                            files
                        }
                    );


                    page.Header.style.color = JSColor.None;


                    //var xfiles = evt.dataTransfer.files.AsEnumerable().Concat(
                    //    from x in evt.dataTransfer.items.AsEnumerable()
                    //    let f = x.getAsFile()
                    //    where f != null
                    //    select f
                    //);

                    #region DataTable
                    if (evt.dataTransfer.items != null)
                    {
                        // X:\jsc.svn\examples\javascript\DragDataTableIntoCSVFile\DragDataTableIntoCSVFile\Application.cs

                        evt.dataTransfer.items.AsEnumerable().Where(
                            x =>

                                x.type.ToLower() ==

                                // let jsc type system sort it out?
                                // how much reflection does jsc give us nowadays?
                                typeof(DataTable).Name.ToLower()

                        ).WithEach(
                            async x =>
                            {
                                // http://www.whatwg.org/specs/web-apps/current-work/multipage/dnd.html#dfnReturnLink-0
                                var DataTable_xml = await x.getAsString();

                                var DataTable = StringConversionsForDataTable.ConvertFromString(DataTable_xml);

                                var ff = new Form { Text = new { typeof(DataTable).Name }.ToString() };

                                var g = new DataGridView { DataSource = DataTable, Dock = DockStyle.Fill };

                                ff.Controls.Add(g);


                                ff.Show();

                            }
                        );
                    }
                    #endregion

                    #region files
                    evt.dataTransfer.files.AsEnumerable().WithEachIndex(
                        (f, index) =>
                        {
                            Console.WriteLine(
                                new
                                {

                                    f.name,
                                    f.size,
                                    f.lastModifiedDate
                                }
                            );

                            var ff = new Form();
                            ff.PopupInsteadOfClosing(HandleFormClosing: false);



                            ff.Text = new { f.type, f.name, f.size }.ToString();


                            ff.Show();

                            ff.MoveTo(
                                evt.CursorX + 32 * index,
                                evt.CursorY + 24 * index
                            );

                            var fc = ff.GetHTMLTargetContainer();

                            fc.title = ff.Text;

                            #region image
                            var i = default(IHTMLImage);

                            if (f.type.StartsWith("image/"))
                            {
                                // um would we have a timing issue here?
                                f.ToDataURLAsync(
                                    src =>
                                    {
                                        i = new IHTMLImage { src = src }.AttachTo(fc);
                                        i.style.width = "100%";

                                        i.InvokeOnComplete(
                                            delegate
                                            {

                                                ff.ClientSize = new System.Drawing.Size(
                                                    // keep it reasonable!
                                                    i.width.Min(600),
                                                    i.height.Min(400)
                                                );

                                            }
                                        );
                                    }
                                );
                            }
                            #endregion

                            // http://html5doctor.com/drag-and-drop-to-server/

#if FUTURE
                            service.XUpload(f, delegate { });
#endif


                            var d = new FormData();

                            d.append("foo", f, f.name);

                            var xhr = new IXMLHttpRequest();

                            xhr.open(ScriptCoreLib.Shared.HTTPMethodEnum.POST, "/upload");

                            #region InvokeOnComplete
                            xhr.InvokeOnComplete(
                                delegate
                                {
                                    Console.WriteLine("upload complete!");

                                    SystemSounds.Beep.Play();

                                    //Console.Beep();
                                    XElement.Parse(xhr.responseText).Elements("ContentKey").WithEach(
                                        ContentKey =>
                                        {
                                            var __ContentKey = (Table1_ContentKey)int.Parse(ContentKey.Value);

                                            var web = new WebBrowser { Dock = DockStyle.Fill };

                                            web.Hide();
                                            web.AttachTo(ff);


                                            var src = "/io/" + ContentKey.Value;

                                            if (i == null)
                                            {
                                                web.Show();
                                            }
                                            else
                                            {
                                                web.Navigated +=
                                                    delegate
                                                    {
                                                        i.Orphanize();
                                                        web.Show();
                                                    };
                                            }


                                            // "X:\jsc.svn\examples\javascript\canvas\CanvasFromBytes\png.png"
                                            DetectCanvasFromBytesExperiment(
                                                ff,
                                                web,
                                                 f.name,
                                                src,
                                               (long)f.size
                                            );

                                            web.Navigate(src);


                                            //if (i != null)
                                            //{
                                            //    i.src = src;
                                            //}

                                            __ContentKey.SetLeft(ff.Left);
                                            __ContentKey.SetTop(ff.Top);

                                            ff.LocationChanged +=
                                                delegate
                                                {
                                                    __ContentKey.SetLeft(ff.Left);
                                                    __ContentKey.SetTop(ff.Top);
                                                };

                                            ff.SizeChanged +=
                                                delegate
                                                {
                                                    __ContentKey.SetWidth(ff.Width);
                                                    __ContentKey.SetHeight(ff.Height);
                                                };

                                            ff.FormClosing +=
                                                delegate
                                                {
                                                    __ContentKey
                                                        .Delete();
                                                };


                                            #region onmousewheel
                                            ff.GetHTMLTarget().With(
                                                ffh =>
                                                {
                                                    dynamic ffhs = ffh.style;
                                                    // http://css-infos.net/property/-webkit-transition
                                                    //ffhs.webkitTransition = "webkitTransform 0.3s linear";

                                                    ffh.onmousewheel +=
                                                        e =>
                                                        {
                                                            e.preventDefault();
                                                            e.stopPropagation();


                                                            if (e.WheelDirection > 0)
                                                            {
                                                                ff.Width = (int)(ff.Width * 1.1);
                                                                ff.Height = (int)(ff.Height * 1.1);
                                                            }
                                                            else
                                                            {
                                                                ff.Width = (int)(ff.Width * 0.9);
                                                                ff.Height = (int)(ff.Height * 0.9);
                                                            }

                                                        };

                                                }
                                            );
                                            #endregion

                                        }
                                    );
                                }
                            );
                            #endregion


                            //------WebKitFormBoundaryDmGHAZzeMBbcD5mu
                            //Content-Disposition: form-data; name="foo"; filename="FlashHeatZeeker.UnitPedControl.ApplicationSprite.swf"
                            //Content-Type: application/x-shockwave-flash


                            //------WebKitFormBoundaryDmGHAZzeMBbcD5mu--

                            Console.WriteLine("before upload...");
                            xhr.send(d);
                        }
                    );
                    #endregion


                    // let's disable other handlers
                    //evt.dataTransfer = null;

                    evt.stopPropagation();
                    evt.stopImmediatePropagation();

                    evt.preventDefault();
                };
            #endregion

            #region restore
            {
                var index = 0;

                default(Table1_ContentKey).WithEach(
                    (__ContentKey, ContentBytesLength, ContentValue, Left, Top, Width, Height) =>
                    {

                        var ff = new Form();
                        ff.PopupInsteadOfClosing(HandleFormClosing: false);


                        ff.Text = new { __ContentKey, ContentValue, ContentBytesLength }.ToString();


                        ff.Show();

                        if (Left > 0)
                            ff.MoveTo(
                                Left,
                                Top
                            );
                        else

                            ff.MoveBy(
                                32 * index,
                                24 * index
                            );

                        index++;

                        #region onmousewheel
                        ff.GetHTMLTarget().With(
                            ffh =>
                            {
                                dynamic ffhs = ffh.style;
                                // http://css-infos.net/property/-webkit-transition
                                //ffhs.webkitTransition = "webkitTransform 0.3s linear";

                                ffh.onmousewheel +=
                                    e =>
                                    {
                                        e.preventDefault();
                                        e.stopPropagation();

                                        if (e.WheelDirection > 0)
                                        {
                                            ff.Width = (int)(ff.Width * 1.1);
                                            ff.Height = (int)(ff.Height * 1.1);
                                        }
                                        else
                                        {
                                            ff.Width = (int)(ff.Width * 0.9);
                                            ff.Height = (int)(ff.Height * 0.9);
                                        }

                                    };

                            }
                        );
                        #endregion



                        //var fc = ff.GetHTMLTargetContainer();
                        var src = "/io/" + __ContentKey;

                        //var i = new IHTMLImage { src = src }.AttachTo(fc);
                        //i.style.width = "100%";

                        var web = new WebBrowser { Dock = DockStyle.Fill };

                        web.AttachTo(ff);

                        //script: error JSC1000: No implementation found for this native method, please implement [System.Windows.Forms.WebBrowser.add_DocumentCompleted(System.Windows.Forms.WebBrowserDocumentCompletedEventHandler)]
                        //script: warning JSC1000: Did you reference ScriptCoreLib via IAssemblyReferenceToken?
                        //script: error JSC1000: error at DropFileIntoSQLite.Application+<>c__DisplayClass2e.<.ctor>b__15,
                        // assembly: V:\DropFileIntoSQLite.Application.exe
                        // type: DropFileIntoSQLite.Application+<>c__DisplayClass2e, DropFileIntoSQLite.Application, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null

                        //web.DocumentCompleted +=
                        //    delegate
                        //    {



                        //};

                        DetectCanvasFromBytesExperiment(
                            ff,
                            web,
                            ContentValue,
                            src,
                            ContentBytesLength
                        );


                        web.Navigate(src);

                        if (Width > 0)
                            ff.SizeTo(
                               Width,
                               Height
                           );
                        //else
                        //    i.InvokeOnComplete(
                        //        delegate
                        //        {


                        //            ff.ClientSize = new System.Drawing.Size(i.width, i.height);

                        //        }
                        //    );

                        ff.LocationChanged +=
                            delegate
                            {
                                __ContentKey.SetLeft(ff.Left);
                                __ContentKey.SetTop(ff.Top);
                            };

                        ff.SizeChanged +=
                            delegate
                            {
                                __ContentKey.SetWidth(ff.Width);
                                __ContentKey.SetHeight(ff.Height);
                            };

                        ff.FormClosing +=
                            delegate
                            {
                                __ContentKey.Delete();
                            };
                    }
                );
            }
            #endregion



            // can we write about this?
            #region bookmark launcher
            var href = @"javascript:
((function(h,i)
{ 
    

    var a=-1, 
    b='onreadystatechange', 
    c=document.getElementsByTagName('HEAD')[0], 
    d, 
    e, 
    f, 
    g=c.childNodes, 
    d; 

    e=document.createElement('base');   
    e.href='%%';
    c.appendChild(e);     

    d = function () 
    {  
        next: while (1)  
        {   
            a++;     
            if (a ==h.length)   
            {   
                i();    
                return;   
            }     

            /*
            for (f=0;f<g.length;f++)   
            {   
                var v =g[f];    
                var w =h[a];       

                if (v.nodeName =='SCRIPT')    
                    if (v.src ==w || v.src.substr(v.src.length - w.length - 1,w.length + 1) =='/' + w)    
                        continue next;   
            } */      
            e=document.createElement('SCRIPT');   
            e.src='%%' + h[a];     
            e[b in e?b:'onload']=  function()    
            {       
                var f=e.readyState;    
                if(f==null||f=='loaded'||f=='complete')     
                    d();    
            };  
 
        c.appendChild(e);     
        return;  
    } 
}; 
d();

}

)(['view-source'],function(){}))".Replace("%%", Native.Document.location + "");

            page.Header.draggable = true;
            page.Header.ondragstart +=
                e =>
                {
                    e.dataTransfer.setData("text/uri-list", href);
                };


            IStyleSheet.Default["#Header:hover"].style.color = "red";
            IStyleSheet.Default["#Header:hover"].style.cursor = IStyle.CursorEnum.pointer;

            #endregion


            new About().Show();
        }
        // dont we have our own encoder now??



        // X:\jsc.svn\core\ScriptCoreLib\Shared\BCLImplementation\System\Tuple.cs

        //no implementation for System.Tuple b6efdcc2-6386-375e-84aa-6732b6518b3f
        //script: error JSC1000: No implementation found for this native method, please implement [static System.Tuple.Create(System.IProgress`1[[System.Int32, mscorlib, Version=4.0.0.0, Culture=ne
        //script: warning JSC1000: Did you reference ScriptCoreLib via IAssemblyReferenceToken?
        //script: error JSC1000: error at GIFEncoderWorker..ctor,
        // assembly: U:\jsgif.Application.exe
        // type: GIFEncoderWorker, jsgif.Application, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null
        // offset: 0x0049
        //  method:Void .ctor(Int32, Int32, Int32, Int32, System.Collections.Generic.IEnumerable`1[System.Byte[]], System.Action`1[System.Int32])
        //*** Compiler cannot continue... press enter to quit.


        // based on http://antimatter15.com/wp/2010/07/javascript-to-animated-gif/

        //cript: error JSC1000: No implementation found for this native method, please implement [static System.Tuple.Create(System.IProgress`1[[System.Int32,
        //cript: warning JSC1000: Did you reference ScriptCoreLib via IAssemblyReferenceToken?
        //cript: error JSC1000: error at GIFEncoderWorker..ctor,
        //assembly: V:\jsgif.Application.exe
        //type: GIFEncoderWorker, jsgif.Application, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null
        //offset: 0x0049
        // method:Void .ctor(Int32, Int32, Int32, Int32, System.Collections.Generic.IEnumerable`1[System.Byte[]], System.Action`1[System.Int32])
        //** Compiler cannot continue... press enter to quit.



        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IDefault page)
        {
            // https://bugzilla.mozilla.org/show_bug.cgi?id=709490
            // http://stackoverflow.com/questions/7844886/using-webgl-from-inside-a-web-worker-is-it-possible-how

            const int x = 640;
            const int y = 480;
            // do we need automatic decomposer?

            var context = new CanvasRenderingContext2D(x, y);

            // needs to be in dom? no
            var canvas = context.canvas;


            context.fillStyle = "rgb(255,255,255)";
            context.fillRect(0, 0, canvas.width, canvas.height);

			// rebuild redux?
            var my_gradient = context.createLinearGradient(0, 0, 16, 0);
            my_gradient.addColorStop(0, "blue");
            my_gradient.addColorStop(1, "white");
            //context.fillStyle = my_gradient; //"rgb(255,255,255)";  
            context.fillStyle = "rgb(255,255,255)";  
            context.fillRect(0, 0, 16, canvas.height); //GIF can't do transparent so do white

            Action yield = delegate { };

            var r = new Random();

            Func<byte> random = r.NextByte;


            //encoder.addFrame(context);


            // 4 : 3sec
            // 8 : 0.00:00:06 
            // 16 : 0.00:00:18 

            var frames = new List<byte[]>();

            // whats the difference? should jsc switch to tyhe typed array yet?
            //var frames = new List<byte[]>();

            for (int i = 0; i < 16; i++)
            {
                context.fillStyle = "rgb(" + random() + "," + random() + "," + random() + ")";
                context.fillRect(
                    random() / 4,
                    random() / 4,
                    32 + random(),
                    32 + random()
                );

                var data = context.getImageData().data;

                //Uint8Array
                // { data = [object Uint8ClampedArray] } 
                Console.WriteLine(new { data });

                frames.Add(data);


            }

            yield += delegate
            {

            };

            new IHTMLButton { innerText = "encode!" }.AttachToDocument().WhenClicked(
                delegate
                {
                    var e = new Stopwatch();
                    e.Start();


                    Console.WriteLine("new encoder");

                    var encoder = new GIFEncoder();
                    encoder.setSize(x, y);
                    encoder.setRepeat(0); //auto-loop
                    encoder.setDelay(1000 / 15);
                    encoder.start();

                    foreach (var data in frames)
                    {
                        Console.WriteLine("addFrame");
                        encoder.addFrame((Uint8ClampedArray)(object)data, true);
                    }

                    Console.WriteLine("finish");

                    encoder.finish();

                    var bytes = Encoding.ASCII.GetBytes(encoder.stream().getData());
                    var src = "data:image/gif;base64," + Convert.ToBase64String(bytes);

                    Console.WriteLine(e.Elapsed);

                    new IHTMLImage { src = src }.AttachToDocument();
                }
            );

            new IHTMLButton { innerText = "encode via worker" }.AttachToDocument().WhenClicked(
                 delegate
                 {
                     var e = new Stopwatch();
                     e.Start();

                     var w = new Worker(
                         scope =>
                         {
                             Console.WriteLine("new encoder");

                             var encoder = new GIFEncoder();
                             encoder.setSize(x, y);
                             encoder.setRepeat(0); //auto-loop
                             encoder.setDelay(1000 / 15);
                             encoder.start();

                             scope.onmessage +=
                                 ee =>
                                 {
                                     dynamic xdata = ee.data;
                                     string message = xdata.message;

                                     //Console.WriteLine(new { message });

                                     if (message == "addFrame")
                                     {
                                         Console.WriteLine("addFrame");

                                         // http://stackoverflow.com/questions/8776751/web-worker-dealing-with-imagedata-working-with-firefox-but-not-chrome
                                         //byte[] data = xdata.data;
                                         Uint8ClampedArray data = xdata.data;
                                         //data[
                                         // { data = [object Uint8ClampedArray] } 
                                         //Console.WriteLine(new { data });

                                         encoder.addFrame(data, true);


                                     }

                                     if (message == "finish")
                                     {
                                         Console.WriteLine("finish");

                                         encoder.finish();

                                         var bytes = Encoding.ASCII.GetBytes(encoder.stream().getData());
                                         var src = "data:image/gif;base64," + Convert.ToBase64String(bytes);

                                         ee.ports.WithEach(port => port.postMessage(src));

                                     }
                                 };
                         }
                     );





                     foreach (var data in frames)
                     {

                         dynamic xdata = new object();

                         xdata.message = "addFrame";
                         xdata.data = data;

                         // Error	4	Cannot convert lambda expression to type 'ScriptCoreLib.JavaScript.DOM.MessagePort[]' because it is not a delegate type	X:\jsc.svn\examples\javascript\synergy\jsgif\jsgif\Application.cs	171	38	jsgif


                         w.postMessage(
                             (object)xdata,
                             (MessageEvent ee) =>
                             {
                                 // ?
                                 Console.WriteLine("addFrame done");
                             }
                         );
                     }

                     w.postMessage(
                        new { message = "finish" },
                        (MessageEvent ee) =>
                        {
                            Console.WriteLine("finish done");

                            var src = (string)ee.data;

                            Console.WriteLine(e.Elapsed);
                            new IHTMLImage { src = src }.AttachToDocument();

                            w.terminate();
                        }
                     );



                     //var bytes = Encoding.ASCII.GetBytes(encoder.stream().getData());
                     //var src = "data:image/gif;base64," + Convert.ToBase64String(bytes);


                     //new IHTMLImage { src = src }.AttachToDocument();
                 }
             );

            new IHTMLButton { innerText = "encode via GIFEncoderAsync. works!" }.AttachToDocument().WhenClicked(
                btn =>
                {
                    btn.disabled = true;
                    var e = new Stopwatch();
                    e.Start();

                    var w = new GIFEncoderAsync(
                        width: x,
                        height: y,
                        delay: 1000 / 10
                    );

                    //http://www.w3schools.com/tags/tryit.asp?filename=tryhtml5_meter

                    var innerText = btn.innerText;

                    btn.Clear();

                    dynamic meter = new IHTMLElement("meter").AttachTo(btn);

                    meter.max = frames.Count;
                    var value = 0;

                    foreach (var data in frames)
                    {
                        w.addFrame((Uint8ClampedArray)(object)data,
                            delegate
                            {
                                Console.WriteLine("addFrame done");

                                value++;

                                meter.value = value;
                            }
                        );
                    }

                    w.finish(
                        src =>
                        {
                            Console.WriteLine("finish done");


                            Console.WriteLine(e.Elapsed);
                            new IHTMLImage { src = src }.AttachToDocument();

                            btn.innerText = innerText;

                            btn.disabled = false;
                            btn.title = new { e.Elapsed }.ToString();

                        }
                    );


                }
            );


            new IHTMLButton { innerText = "encode via GIFEncoderWorker. crashes the system? why?" }.AttachToDocument().WhenClicked(
                async btn =>
                {
                    Console.WriteLine("encoding!");

                    btn.disabled = true;
                    var e = new Stopwatch();
                    e.Start();

                    var src = await new GIFEncoderWorker(
                          x,
                          y,
                           delay: 1000 / 10,
                          frames: frames,

                          AtFrame:
                            index =>
                            {
                                btn.innerText = new { index, frames.Count }.ToString();

                            }
                      );



                    Console.WriteLine("done!");
                    Console.WriteLine(e.Elapsed);

                    new IHTMLImage { src = src }.AttachToDocument();


                    btn.disabled = false;
                    btn.title = new { e.Elapsed }.ToString();

                }
            );

        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            //FormStyler.AtFormCreated =
            //s =>
            //{
            //    s.Context.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;

            //    //var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDrag().AttachTo(s.Context.GetHTMLTarget());
            //    var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDragWithShadow().AttachTo(s.Context.GetHTMLTarget());



            //    s.Context.GetHTMLTarget().style.backgroundColor = "#efefef";
            //    //s.Context.GetHTMLTarget().style.backgroundColor = "#A26D41";

            //};

#if AsWEBSERVER
            #region += Launched chrome.app.window
            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                // if we run as a server. we can open up on android.

                //chrome.Notification.DefaultTitle = "Nexus7";
                //chrome.Notification.DefaultIconUrl = new x128().src;
                ChromeTCPServer.TheServerWithStyledForm.Invoke(
                     AppSource.Text
                //, AtFormCreated: FormStyler.AtFormCreated

                //AtFormConstructor:
                //    f =>
                //    {
                //        //arg[0] is typeof System.Int32
                //        //script: error JSC1000: No implementation found for this native method, please implement [static System.Drawing.Color.FromArgb(System.Int32)]

                //        // X:\jsc.svn\examples\javascript\forms\Test\TestFromArgb\TestFromArgb\ApplicationControl.cs

                //        f.BackColor = System.Drawing.Color.FromArgb(0xA26D41);
                //    }
                );
                return;
            }
            #endregion
#else

            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    //chrome.runtime.UpdateAvailable += delegate
                    //{
                    //    new chrome.Notification(title: "UpdateAvailable");

                    //};

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "x360stereomidnightsun");

                        // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: new
                               {
                                   alwaysOnTop = true,
                                   visibleOnAllWorkspaces = true
                               }
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion


#endif


            // crash
            //int cubefacesizeMAX = 2048 * 2; // 6 faces, ?
            int cubefacesizeMAX = 1024; // 6 faces, ?
            int cubefacesize = cubefacesizeMAX; // 6 faces, ?
            //int cubefacesize = 1024; // 6 faces, ?
            // "X:\vr\tape1\0000x2048.png"
            // for 60hz render we may want to use float camera percision, not available for ui.
            //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x2048.png" "/sdcard/oculus/360photos/"
            //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x128.png" "/sdcard/oculus/360photos/"

            if (Environment.ProcessorCount < 8)
                //cubefacesize = 64; // 6 faces, ?

                // fast gif?
                cubefacesize = 1024; // 6 faces, ?


            // can we keep fast fps yet highp?

            // can we choose this on runtime? designtime wants fast fps, yet for end product we want highdef on our render farm?
            //const int cubefacesize = 128; // 6 faces, ?

            //var cubecameraoffsetx = 256;
            var cubecameraoffsetx = 400;


            //var uizoom = 0.1;
            //var uizoom = cubefacesize / 128f;
            var uizoom = 128f / cubefacesize;


            Native.css.style.backgroundColor = "darkcyan";
            Native.css.style.overflow = IStyle.OverflowEnum.hidden;

            Native.body.Clear();
            (Native.body.style as dynamic).webkitUserSelect = "text";

            IHTMLCanvas shader1canvas = null;




            //return;

            // Earth params
            //var radius = 0.5;
            //var radius = 1024;
            //var radius = 2048;
            //var radius = 512;
            //var radius = 256;
            //var radius = 400;

            // can we have not fly beyond moon too much?
            //var radius = 500;
            var radius = 480;

            //var segments = 32;
            var segments = 128 * 2;
            //var rotation = 6;


            //const int size = 128;
            //const int size = 256; // 6 faces, 12KB
            //const int size = 512; // 6 faces, ?

            // WebGL: drawArrays: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled.

            //const int size = 720; // 6 faces, ?
            //const int size = 1024; // 6 faces, ?
            //const int cubefacesize = 1024; // 6 faces, ?

            // THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter is set to THREE.LinearFilter or THREE.NearestFilter. ( chrome-extension://aemlnmcokphbneegoefdckonejmknohh/assets/x360stereomidnightsun/anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs.jpg )


            var far = 0xffffff;

            new IHTMLPre { new { Environment.ProcessorCount, cubefacesize } }.AttachToDocument();

            //new IHTMLPre { "can we stream it into VR, shadertoy, youtube 360, youtube stereo yet?" }.AttachToDocument();


            var sw = Stopwatch.StartNew();



            var oo = new List<THREE.Object3D>();

            var window = Native.window;


            // what about physics and that portal rendering?

            // if we are running as a chrome web server, we may also be opened as android ndk webview app
            //var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: window.aspect, near: 1, far: 2000);
            // once we update source
            // save the source
            // manually recompile 
            //cameraPX.position.z = 400;

            //// the camera should be close enough for the object to float off the FOV of PX
            //cameraPX.position.z = 200;

            // scene
            // can we make the 3D object orbit around us ?
            // and
            // stream it to vr?
            var scene = new THREE.Scene();



            // since our cube camera is somewhat a fixed thing
            // would it be easier to move mountains to come to us?
            // once we change code would chrome app be able to let VR know that a new view is available?
            var sceneg = new THREE.Group();
            sceneg.AttachTo(scene);


            // fly up?
            //sceneg.translateZ(-1024);
            // rotate the world, as the skybox then matches what we have on filesystem
            scene.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
            // yet for headtracking we shall rotate camera


            //sceneg.position.set(0, 0, -1024);
            //sceneg.position.set(0, -1024, 0);

            //scene.add(new THREE.AmbientLight(0x333333));
            //scene.add(new THREE.AmbientLight(0xffffff));
            //scene.add(new THREE.AmbientLight(0xaaaaaa));
            //scene.add(new THREE.AmbientLight(0xcccccc));
            //scene.add(new THREE.AmbientLight(0xeeeeee));
            scene.add(new THREE.AmbientLight(0xffffff));




            //var light = new THREE.DirectionalLight(0xffffff, 1);
            //// sun should be beyond moon
            ////light.position.set(-5 * virtualDistance, -3 * virtualDistance, -5 * virtualDistance);
            ////light.position.set(-15 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);

            //// where shall the light source be to see half planet?
            //light.position.set(-1 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);
            //scene.add(light);



            //var lightX = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightX" }.AttachToDocument();
            //var lightY = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightY" }.AttachToDocument();
            //var lightZ = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightZ" }.AttachToDocument();

            //new IHTMLHorizontalRule { }.AttachToDocument();

            // whats WebGLRenderTargetCube do?

            // WebGLRenderer preserveDrawingBuffer 



            var renderer0 = new THREE.WebGLRenderer(

                new
                {
                    //antialias = true,
                    alpha = true,
                    preserveDrawingBuffer = true
                }
            );

            // https://github.com/mrdoob/three.js/issues/3836

            // the construct. white bg
            //renderer0.setClearColor(0xfffff, 1);
            //renderer0.setClearColor(0x0, 1);
            renderer0.setClearColor(0x0, 0);

            //renderer.setSize(window.Width, window.Height);
            renderer0.setSize(cubefacesize, cubefacesize);

            //renderer0.domElement.AttachToDocument();
            //rendererPX.domElement.style.SetLocation(0, 0);
            //renderer0.domElement.style.SetLocation(4, 4);


            // top

            // http://stackoverflow.com/questions/27612524/can-multiple-webglrenderers-render-the-same-scene


            // need a place to show the cubemap face to GUI 
            // how does the stereo OTOY do it?
            // https://www.opengl.org/wiki/Sampler_(GLSL)

            // http://www.richardssoftware.net/Home/Post/25

            // [+X, –X, +Y, –Y, +Z, –Z] fa



            // move up
            //camera.position.set(-1200, 800, 1200);
            //var cameraoffset = new THREE.Vector3(0, 15, 0);

            // can we aniamte it?
            //var cameraoffset = new THREE.Vector3(0, 800, 1200);
            // can we have linear animation fromcenter of the map to the edge and back?
            // then do the flat earth sun orbit?
            var cameraoffset = new THREE.Vector3(
                // left?
                -512,
                // height?
                //0,
                //1600,
                //1024,

                // if the camera is in the center, would we need to move the scene?
                // we have to move the camera. as we move the scene the lights are messed up
                //2014,
                1024,

                //1200
                0
                // can we hover top of the map?
                );

            // original vieworigin
            //var cameraoffset = new THREE.Vector3(-1200, 800, 1200);

            // whatif we want more than 30sec video? 2min animation? more frames to render? 2gb disk?





            var maxfps = 60;
            //var maxlengthseconds = 60;
            var maxlengthseconds = 120;

            var maxframes = maxlengthseconds * maxfps;

            var frameIDanimation = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "frameIDanimation", @checked = false }.AttachToDocument();

            // whatif we want more than 30sec video? 2min animation? more frames to render? 2gb disk?
            var frameIDslider = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0, max = maxframes, valueAsNumber = 0, title = "frameIDslider" }.AttachToDocument();
            //var frameIDslider = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0, max = 1800, valueAsNumber = 1800 / 2, title = "frameIDslider" }.AttachToDocument();




            new IHTMLHorizontalRule { }.AttachToDocument();

            var camerax = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -32, max = 32, valueAsNumber = 0, title = "camerax" }.AttachToDocument();
            camerax.css.after.contentText = "x: ";
            new IHTMLBreak { }.AttachToDocument();

            //camerax.style.borderLeft = "1em solid red";

            // up. whats the most high a rocket can go 120km?
            //new IHTMLHorizontalRule { }.AttachToDocument();

            // how high is the bunker?
            var cameray = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -32, max = 32, valueAsNumber = 0, title = "cameray" }.AttachToDocument();
            cameray.css.after.contentText = "y: ";
            new IHTMLBreak { }.AttachToDocument();

            // we wont be going to orbit
            //new IHTMLBreak { }.AttachToDocument();
            //var camerayHigh = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = cameray.max, max = 1024 * 256, valueAsNumber = cameray.max, title = "cameray" }.AttachToDocument();
            //new IHTMLHorizontalRule { }.AttachToDocument();
            //var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();
            //var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -2048 / 2, max = 0 + 2048 / 2, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();
            var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -32, max = 32, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();
            cameraz.css.after.contentText = "z: ";

            // for render server
            var fcamerax = 0.0;
            var fcameray = 0.0;
            var fcameraz = 0.0;


            new IHTMLHorizontalRule { }.AttachToDocument();





            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151114/stereo

            // not used for this example tho...
            var itemRotation = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -180, max = 180, valueAsNumber = 0, title = "itemRotation" }.AttachToDocument();
            var spriteOffset = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0, max = 11, valueAsNumber = 0, title = "spriteOffset" }.AttachToDocument();
            //var itemRotation = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -90, max = 90, valueAsNumber = 33, title = "itemRotation" }.AttachToDocument();

            //while (await camerax.async.onchange)

            //cameray.onchange += delegate
            //{
            //    if (cameray.valueAsNumber < cameray.max)
            //        camerayHigh.valueAsNumber = camerayHigh.min;
            //};

            //camerayHigh.onmousedown += delegate
            //{
            //    //if (camerayHigh.valueAsNumber > camerayHigh.min)
            //    cameray.valueAsNumber = cameray.max;
            //};


            Action applycameraoffset = delegate
            {
                // make sure UI and gpu sync up

                var cy = cameray;



                // we wont be going to orbit

                //if (cameray.valueAsNumber < cameray.max)
                //    camerayHigh.valueAsNumber = camerayHigh.min;

                //if (camerayHigh.valueAsNumber > camerayHigh.min)
                //    cameray.valueAsNumber = cameray.max;

                //if (cameray.valueAsNumber == cameray.max)
                //    cy = camerayHigh;



                cameraoffset = new THREE.Vector3(
                    // left?
                  1.0 * (camerax + fcamerax),
                    // height?
                    //0,
                    //1600,
                    //1024,

                   // if the camera is in the center, would we need to move the scene?
                    // we have to move the camera. as we move the scene the lights are messed up
                    //2014,
                   1.0 * (cy + fcameray),

                 //1200c
                 1.0 * (cameraz + fcameraz)
                    // can we hover top of the map?
                   );
            };


            #region y
            // need to rotate90?
            var cameraNY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNY.lookAt(new THREE.Vector3(0, -1, 0));
                cameraNY.position.add(cameraoffset);
            };

            //cameraNY.lookAt(new THREE.Vector3(0, 1, 0));
            var canvasNY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasNY.canvas.title = "NY";
            canvasNY.canvas.AttachToDocument();
            canvasNY.canvas.style.transformOrigin = "0 0";
            // roslyn!
            canvasNY.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPY.lookAt(new THREE.Vector3(0, 1, 0));
                cameraPY.position.add(cameraoffset);
            };
            //cameraPY.lookAt(new THREE.Vector3(0, -1, 0));
            var canvasPY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 0);
            canvasPY.canvas.title = "PY";
            canvasPY.canvas.AttachToDocument();
            canvasPY.canvas.style.transformOrigin = "0 0";
            canvasPY.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion

            // transpose xz?

            #region x
            var cameraNX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNX.lookAt(new THREE.Vector3(0, 0, 1));
                cameraNX.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(0, 0, -1));
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNX.lookAt(new THREE.Vector3(1, 0, 0));
            var canvasNX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 2, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNX.canvas.title = "NX";
            canvasNX.canvas.AttachToDocument();
            canvasNX.canvas.style.transformOrigin = "0 0";
            canvasNX.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPX.lookAt(new THREE.Vector3(0, 0, -1));
                cameraPX.position.add(cameraoffset);
            };
            //cameraPX.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPX.lookAt(new THREE.Vector3(1, 0, 0));
            //cameraPX.lookAt(new THREE.Vector3(-1, 0, 0));

            var canvasPX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPX.canvas.title = "PX";
            canvasPX.canvas.AttachToDocument();
            canvasPX.canvas.style.transformOrigin = "0 0";
            canvasPX.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion

            // lets have the item twice the cube item size. and offset -0.5 to recenter.
            // this wont work as we are cloning the buffer for now!
            //var canvasPXitem = new CanvasRenderingContext2D(cubefacesize, cubefacesize * 2);
            var canvasPXitem = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPXitem.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasPXitem.canvas.title = "item";
            canvasPXitem.canvas.AttachToDocument();
            canvasPXitem.canvas.style.transformOrigin = "0 0";
            canvasPXitem.canvas.style.transform = "scale(" + uizoom + ")";
            canvasPXitem.canvas.style.border = "1px solid yellow";

            //canvasPXitem.fillText("hello", 1, 1, cubefacesize);

            //canvasPXitem.drawImage(
            //    //new IHTMLPre { "hello" }
            //    new IHTMLDiv { "hello world. can we draw html into 360 VR yet?" }, 0, 0, cubefacesize, cubefacesize
            //);







            // http://www.w3schools.com/tags/canvas_fillstyle.asp
            canvasPXitem.fillStyle = "red";

            // too big?
            //canvasPXitem.fillRect(
            //    x: cubefacesize / 3,
            //    y: cubefacesize / 4,
            //    w: cubefacesize / 3,
            //    h: cubefacesize / 2
            //);



            // canvasPXitem.fillRect(
            //    x: (cubefacesize - cubefacesize / 6) / 2,
            //    y: (cubefacesize - cubefacesize / 3) / 2,

            //    w: cubefacesize / 6,
            //    h: cubefacesize / 3
            //);



            #region z
            var cameraNZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, -1));
            applycameraoffset += delegate
            {
                cameraNZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNZ.lookAt(new THREE.Vector3(1, 0, 0));
                cameraNZ.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, 1));
            var canvasNZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 3, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNZ.canvas.title = "NZ";
            canvasNZ.canvas.AttachToDocument();
            canvasNZ.canvas.style.transformOrigin = "0 0";
            canvasNZ.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraPZ.lookAt(new THREE.Vector3(1, 0, 0));
            applycameraoffset += delegate
            {
                cameraPZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPZ.lookAt(new THREE.Vector3(-1, 0, 0));
                cameraPZ.position.add(cameraoffset);
            };
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, -1));
            var canvasPZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPZ.canvas.title = "PZ";
            canvasPZ.canvas.AttachToDocument();
            canvasPZ.canvas.style.transformOrigin = "0 0";
            canvasPZ.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion




            // c++ alias locals would be nice..
            var canvas0 = (IHTMLCanvas)renderer0.domElement;


            var old = new
            {



                CursorX = 0,
                CursorY = 0
            };


            var st = new Stopwatch();
            st.Start();

            //canvas0.css.active.style.cursor = IStyle.CursorEnum.move;




            // X:\jsc.svn\examples\javascript\Test\TestMouseMovement\TestMouseMovement\Application.cs


            // THREE.WebGLProgram: gl.getProgramInfoLog() C:\fakepath(78,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll
            // THREE.WebGLProgram: gl.getProgramInfoLog() (79,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll

            // http://www.roadtovr.com/youtube-confirms-stereo-3d-360-video-support-coming-soon/
            // https://www.youtube.com/watch?v=D-Wl9jAB45Q



            #region spherical
            var gl = new WebGLRenderingContext(alpha: true, preserveDrawingBuffer: true);
            var c = gl.canvas.AttachToDocument();

            //  3840x2160

            //c.style.SetSize(3840, 2160);

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150722/360-youtube

            // bots cannot get a bigger mp4 from yt, and vrideo renders 2k on gearvr.
            c.width = 3840;
            c.height = 2160;


            //c.width = 3840 * 2;
            //c.height = 2160 * 2;


            //c.width = 3840;
            //c.height = 2160;
            // 1,777777777777778

            // https://www.youtube.com/watch?v=fTfJwzRsE-w
            //c.width = 7580;
            //c.height = 3840;
            //1,973958333333333

            //7580
            //    3840

            // wont work
            //c.width = 8192;
            //c.height = 4096;


            // this has the wrong aspect?
            //c.width = 6466;
            //c.height = 3232;

            new IHTMLPre { new { c.width, c.height } }.AttachToDocument();

            //6466x3232

            //var suizoom = 720f / c.height;
            //var suizoom = 360f / c.height;
            var suizoom = 480f / c.width;

            c.style.transformOrigin = "0 0";
            c.style.transform = "scale(" + suizoom + ")";
            //c.style.backgroundColor = "yellow";
            c.style.position = IStyle.PositionEnum.absolute;

            c.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 3);

            var pass = new CubeToEquirectangular.Library.ShaderToy.EffectPass(
                       null,
                       gl,
                       precission: CubeToEquirectangular.Library.ShaderToy.DetermineShaderPrecission(gl),
                       supportDerivatives: gl.getExtension("OES_standard_derivatives") != null,
                       callback: null,
                       obj: null,
                       forceMuted: false,
                       forcePaused: false,
                //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                       outputGainNode: null
                   );

            // how shall we upload our textures?
            // can we reference GLSL.samplerCube yet?
            //pass.mInputs[0] = new samplerCube { };
            pass.mInputs[0] = new CubeToEquirectangular.Library.ShaderToy.samplerCube { };

            pass.MakeHeader_Image();
            var vs = new Shaders.ProgramFragmentShader();
            pass.NewShader_Image(vs);

            #endregion




            //var frame0 = new HTML.Images.FromAssets.tiles_regrid().AttachToDocument();
            var frame0 = new HTML.Images.FromAssets.galaxy_starfield().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets.galaxy_starfield150FOV().AttachToDocument();
            //var xor = new HTML.Images.FromAssets.Orion360_test_image_8192x4096().AttachToDocument();
            //var xor = new HTML.Images.FromAssets._2_no_clouds_4k().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().AttachToDocument();


            // 270px
            //xor.style.height = "";
            frame0.style.height = "270px";
            frame0.style.width = "480px";
            frame0.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 + 16, 8 + (int)(uizoom * cubefacesize + 8) * 3);



            var frame2 = new HTML.Images.FromAssets.galaxy_starfield().AttachToDocument();

            frame2.style.height = "270px";
            frame2.style.width = "480px";
            frame2.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 * 2 + 16 * 2, 8 + (int)(uizoom * cubefacesize + 8) * 3);




            #region DirectoryEntry
            var dir = default(DirectoryEntry);

            new IHTMLButton { "openDirectory" }.AttachToDocument().onclick += async delegate
            {
                dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
            };
            frame0.style.cursor = IStyle.CursorEnum.pointer;
            frame0.title = "save frame";


            frame0.onclick += delegate
            {
                // http://paulbourke.net/papers/vsmm2006/vsmm2006.pdf
                //            A method of creating synthetic stereoscopic panoramic images that can be implemented
                //in most rendering packages has been presented. If single panoramic pairs can be created
                //then stereoscopic panoramic movies are equally possible giving rise to the prospect of
                //movies where the viewer can interact with, at least with regard to what they choose to look
                //at.These images can be projected so as to engage the two features of the human visual
                //system that assist is giving us a sense of immersion, the feeling of “being there”. That is,
                //imagery that contains parallax information as captured from two horizontally separated eye
                //positions (stereopsis)and imagery that fills our peripheral vision.The details that define
                //how the two panoramic images should be created in rendering packages are provided, in
                //particular, how to precisely configure the virtual cameras and control the distance to zero
                //parallax.

                // grab a frame

                if (dir == null)
                {
                    // not exporting to file system?
                    var f0 = new IHTMLImage { src = gl.canvas.toDataURL() };

                    //var f0 = (IHTMLImage)gl.canvas;
                    //var f0 = (IHTMLImage)gl.canvas;
                    //var base64 = gl.canvas.toDataURL();


                    //frame0.src = base64;
                    frame0.src = f0.src;

                    // 7MB!

                    return;
                }

                //                // ---------------------------
                //IrfanView
                //---------------------------
                //Warning !
                //The file: "X:\vr\tape1\0001.jpg" is a PNG file with incorrect extension !
                //Rename ?
                //---------------------------
                //Yes   No   
                //---------------------------

                // haha this will render the thumbnail.
                //dir.WriteAllBytes("0000.png", frame0);

                //dir.WriteAllBytes("0000.png", gl.canvas);

                var glsw = Stopwatch.StartNew();
                dir.WriteAllBytes("0000.png", gl);

                new IHTMLPre { new { glsw.ElapsedMilliseconds } }.AttachToDocument();

                // {{ ElapsedMilliseconds = 1548 }}

                // 3.7MB
                // 3840x2160

            };

            #endregion

            var vsync = default(TaskCompletionSource<object>);


            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLColladaExperiment\WebGLColladaExperiment\WebGLColladaExperiment.csproj"






            // asus will hang
            // https://3dwarehouse.sketchup.com/model.html?id=fb7a0448d940e575edc01389f336fb0a
            // can we get one frame into vr?

            // cube: mesh to cast shadows



            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000 })

            //    );
            //    floor2.position.set(0, 0, -cubefacesize / 2);
            //    floor2.AttachTo(scene);
            //}
            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0x0000ff, color = 0x0000ff })

            //    );
            //    floor2.position.set(-cubefacesize / 2, 0, 0);
            //    floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);

            //    floor2.AttachTo(scene);
            //}

            //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
            //var tex0 = new THREE.Texture(new moon());
            //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
            var texPXitem = new THREE.Texture(

                //shader1canvas

                canvasPXitem.canvas

                ) { needsUpdate = true };


            var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            var floor2 = new THREE.Mesh(planeGeometry0,
                //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                new THREE.MeshPhongMaterial(
                    new
                    {

                        map = texPXitem,

                        transparent = true,
                        alphaTest = 0.5

                        //ambient = 0x00ff00,
                        //color = 0x00ff00
                    })

            );
            //floor2.position.set(0, 0, -cubefacesize  * 0.55);

            floor2.AttachTo(scene);

            applycameraoffset += delegate
            {
                texPXitem.needsUpdate = true;

                //floor2.position.set(-cubefacesize * 0.5, 0, 0);
                //floor2.position.set(-cubefacesize * 0.33, 0, 0);
                // floor2.position.set(-cubefacesize * 0.25, 0, 0);

                //floor2.position.set(-cubefacesize * 0.225, 0, 0);
                floor2.position.set(-cubefacesize * 0.23, 0, 0);

                // too close!
                //floor2.position.set(-cubefacesize * 0.20, 0, 0);

                floor2.rotation.set(0, 0, 0);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2 + radians(itemRotation.valueAsNumber));
            };






            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeEarth\ChromeEarth\Application.cs
            // X:\jsc.svn\examples\javascript\canvas\ConvertBlackToAlpha\ConvertBlackToAlpha\Application.cs
            // hidden for alpha AppWindows
            //#if FBACKGROUND

            new IHTMLBreak { }.AttachToDocument();

            var iskybox2 = new HTML.Images.FromAssets._2massAllskyGAMMA();
            var iskybox1 = new HTML.Images.FromAssets.anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs();

            var hideskybox1 = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "hide skybox1", @checked = true }.AttachToDocument();
            var hideskybox2 = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "hide skybox2", @checked = false }.AttachToDocument();

            #region drawStereoFrame
            Func<CanvasRenderingContext2D, Task> drawStereoFrame = async canvasTB =>
            {
                //var xIPD = 4.0;
                var xIPD = 6.0;

                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151203/x360stereomidnightsun

                // fake skybox?
                canvasTB.fillStyle = "darkcyan";
                canvasTB.fillRect(0, 0, c.width, c.height);

                //canvasTB.drawImage(stereoT, 0, 0, c.width, c.height, 0, 0, c.width, c.height / 2);
                //canvasTB.drawImage(stereoB, 0, 0, c.width, c.height, 0, c.height / 2, c.width, c.height / 2);

                // 12 frames in total. lets add em all
                // can we add a secondary stereo frame ? at 45deg?

                var offsetrotation = 360 / 12;

                hideskybox1.@checked = true;
                hideskybox2.@checked = false;

                // mono bg!
                floor2.visible = false;
                fcamerax = 0;
                await Native.window.async.onframe;
                await Native.window.async.onframe;

                canvasTB.drawImage(gl.canvas, 0, 0, c.width, c.height, 0, 0, c.width, c.height / 2);
                canvasTB.drawImage(gl.canvas, 0, 0, c.width, c.height, 0, c.height / 2, c.width, c.height / 2);

                // keep only bg. hide stereo sprite
                floor2.visible = true;

                //await Native.window.async.onframe;


                // we need our stereo item frame thanks. no bg.
                hideskybox1.@checked = true;
                hideskybox2.@checked = true;

                await Native.window.async.onframe;






                //fcamerax = -xIPD;
                //await Native.window.async.onframe;
                //var stereoT = new IHTMLImage { src = gl.canvas.toDataURL() };

                //fcamerax = +xIPD;

                //await Native.window.async.onframe;
                //var stereoB = new IHTMLImage { src = gl.canvas.toDataURL() };

                ////await Native.window.async.onframe;
                //await stereoB.async.oncomplete;


                // we now have a stereo sprite.
                // can we rotate it on top of the background?


                // 8K fulldome is a resolution of 8192×8192 
                // 8K UHD is a resolution of 7680 × 4320 (33.2 megapixels) 
                // 8192×4320 t
                // Digital video formats with resolutions of 4K (3840×2160) and 8K (7680×4320)


                // WebGL: CONTEXT_LOST_WEBGL: loseContext: context lost ?
                for (int stereoframei = 0; stereoframei < 12; stereoframei++)
                {
                    spriteOffset.valueAsNumber = stereoframei;

                    Console.WriteLine(new { stereoframei });
                    double ioffsetdeg = offsetrotation * stereoframei;


                    ioffsetdeg += (degrees(frameIDslider.valueAsNumber / (60 * 60 / 5.0) * Math.PI * 2));


                    // follow the moon?
                    //stars.rotateOnAxis(new THREE.Vector3(0, -1, 0),
                    //    frameIDslider.valueAsNumber / (60 * 60 / 5.0) * Math.PI * 2
                    //);


                    var ipxoffset = (int)Math.Floor(c.width * ioffsetdeg / 360);

                    ipxoffset = ipxoffset % c.width;

                    fcamerax = -xIPD;
                    await Native.window.async.onframe;
                    var stereoT = gl.canvas;
                    canvasTB.drawImage(stereoT, 0, 0, c.width, c.height, ipxoffset, 0, c.width, c.height / 2);
                    canvasTB.drawImage(stereoT, 0, 0, c.width, c.height, -c.width + ipxoffset, 0, c.width, c.height / 2);

                    fcamerax = +xIPD;
                    await Native.window.async.onframe;
                    var stereoB = gl.canvas;
                    canvasTB.drawImage(stereoB, 0, 0, c.width, c.height, ipxoffset, c.height / 2, c.width, c.height / 2);
                    canvasTB.drawImage(stereoB, 0, 0, c.width, c.height, -c.width + ipxoffset, c.height / 2, c.width, c.height / 2);
                }


                //var canvasTB8K = new CanvasRenderingContext2D(c.width * 2, c.height * 2);
                //canvasTB8K.drawImage(f0, 0, 0, c.width, c.height, 0, 0, c.width * 2, c.height);
                //canvasTB8K.drawImage(f1, 0, 0, c.width, c.height, 0, c.height, c.width * 2, c.height);

                // https://www.reddit.com/r/GearVR/comments/2vrfyu/id_suggest_makers_of_360_videos_make_them_the/
                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151114/stereo
                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151203
                // can we actually watch stereo _TB images on gearVR?

            };
            #endregion

            #region stero
            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151114/stereo
            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151112
            new IHTMLButton { "make me a stero TB image " }.AttachToDocument().With(
                async e =>
                {
                    // http://www.vrideo.com/watch/ALdE7mm
                    // https://www.youtube.com/watch?v=S3iTPxMIlCI

                    var onclick = e.async.onclick;

                    while (await onclick)
                    {




                        // keep it 4K, as hw, yt is not ready for 60fps 8K!
                        var canvasTB = new CanvasRenderingContext2D(c.width, c.height);

                        drawStereoFrame(canvasTB);

                        // gearVR will get a black screen
                        // 
                        //frame2.src = canvasTB8K.canvas.toDataURL();
                        frame2.src = canvasTB.canvas.toDataURL();


                        onclick = e.async.onclick;

                        //while (!onclick.IsCompleted)
                        //{
                        //    await Task.Delay(1000 / 30);
                        //    frame0.src = f0.src;
                        //    await Task.Delay(1000 / 30);
                        //    frame0.src = f1.src;
                        //}
                    }
                }
            );
            #endregion


            #region render 60hz 30sec
            new IHTMLButton {
                //"render 60hz 30sec"
                //$"render {maxfps}hz {maxlengthseconds}sec"
                "render " + new {maxfps} + "hz " + new {maxlengthseconds} + "sec"
            }.AttachToDocument().onclick += async e =>
            {
                e.Element.disabled = true;

                //var canvasTB = new CanvasRenderingContext2D(c.width * 2, c.height * 2);
                var canvasTB = new CanvasRenderingContext2D(c.width, c.height);


                var total = Stopwatch.StartNew();
                var status = "rendering... " + new { dir };

                new IHTMLPre { () => status }.AttachToDocument();

                if (dir == null)
                {
                    //dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
                }

                total.Restart();



                //vsync = new TaskCompletionSource<object>();
                //await vsync.Task;

                status = "rendering... vsync";

                //var frameid = 0;
                //frameIDanimation.@checked = true;
                frameIDslider.valueAsNumber = -1;

                // allow the animation values to sink in
                //vsync = new TaskCompletionSource<object>();
                //await vsync.Task;



                goto beforeframe;


                // parallax offset?

                await_nextframe:


                var filename = frameIDslider.valueAsNumber.ToString().PadLeft(5, '0') + ".jpg";
                status = "rendering... " + new { filename };

                await drawStereoFrame(canvasTB);

                //var xIPD = 4.0;


                //// left eye
                //fcamerax = -xIPD;
                //vsync = new TaskCompletionSource<object>();
                //await vsync.Task;
                //var f0 = new IHTMLImage { src = gl.canvas.toDataURL() };


                //// right eye
                //fcamerax = +xIPD;
                //vsync = new TaskCompletionSource<object>();
                //await vsync.Task;
                //var f1 = new IHTMLImage { src = gl.canvas.toDataURL() };
                //await f1.async.oncomplete;



                //canvasTB.drawImage(f0, 0, 0, c.width, c.height, 0, 0, c.width * 2, c.height);
                //canvasTB.drawImage(f1, 0, 0, c.width, c.height, 0, c.height, c.width * 2, c.height);


                // frame0 has been rendered

                var swcapture = Stopwatch.StartNew();
                status = "WriteAllBytes... " + new { filename };
                //await Native.window.async.onframe;

                // https://code.google.com/p/chromium/issues/detail?id=404301
                if (dir == null)
                {
                    frame2.src = canvasTB.canvas.toDataURL();

                    await Task.Delay(500);
                }
                else
                    await dir.WriteAllBytes(filename, canvasTB);
                //await dir.WriteAllBytes(filename, gl);
                //await dir.WriteAllBytes(filename, gl.canvas);

                status = "WriteAllBytes... done " + new { fcamerax, filename, swcapture.ElapsedMilliseconds };
                status = "rdy " + new { filename, fcamerax };
                //await Native.window.async.onframe;





                // design mode v render mode
                if (cubefacesize < cubefacesizeMAX)
                    frameIDslider.valueAsNumber += 60;
                else
                    frameIDslider.valueAsNumber++;




            beforeframe:

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);
                // fcamerax = radius * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);

                // some shaders need to know where the camera is looking from. can we tell them?

                //fcamerax = 2.2 * Math.Sin(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));
                //fcameraz = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));


                //// up
                //fcameray = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // cameraz.valueAsNumber = (int)(cameraz.max * Math.Sin(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f)));


                // up
                //fcameray = 128 * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                //fcamerax += (1.0 / 60.0);

                //fcamerax += (1.0 / 60.0) * 120;



                // 60hz 30sec
                if (frameIDslider.valueAsNumber < maxframes)
                {
                    // Blob GC? either this helms or the that we made a Blob static. 
                    await Task.Delay(11);

                    goto await_nextframe;
                }

                total.Stop();
                status = "all done " + new { frameid = frameIDslider.valueAsNumber, total.ElapsedMilliseconds };
                vsync = default(TaskCompletionSource<object>);
                // http://stackoverflow.com/questions/22899333/delete-javascript-blobs

                e.Element.disabled = false;
            };
            #endregion



            new { }.With(
                async delegate
                {

                    var tex1 = new the_midnight_sun_by_isilmetriel { };

                    await tex1.async.oncomplete;

                    // first one is 124, while others is 123?
                    var tex1w = 123;
                    //var tex1w = 120;
                    var tex1h = 626;

                    //canvasPXitem.drawImage(
                    //           tex1, 2, 2, tex1w, tex1h, 0, 0, tex1w, tex1h
                    //       );


                    // how long until jsc can upstream small updates to code?



                    //// canvasPXitem.drawImage(
                    ////     (IHTMLCanvas)renderer0.domElement,



                    ////    sx: (cubefacesize - cubefacesize / 6) / 2,
                    ////    sy: (cubefacesize - cubefacesize / 3) / 2,

                    ////    sw: cubefacesize / 6,
                    ////    sh: cubefacesize / 3,

                    ////    dx: (cubefacesize - cubefacesize / 6) / 2,
                    ////    dy: (cubefacesize - cubefacesize / 3) / 2,

                    ////    dw: cubefacesize / 6,
                    ////    dh: cubefacesize / 3
                    ////);





                    await iskybox2.async.oncomplete;

                    var bytes1 = await iskybox1.async.bytes;

                    //for (int ii = 0; ii < bytes.Length; ii += 4)
                    //{

                    //    bytes[ii + 3] = (byte)(bytes[ii + 0]);

                    //    bytes[ii + 0] = 0xff;
                    //    bytes[ii + 1] = 0xff;
                    //    bytes[ii + 2] = 0xff;
                    //}

                    var cc = new CanvasRenderingContext2D(iskybox1.width, iskybox1.height);

                    cc.bytes = bytes1;

                    //s.image = cc;
                    //s.needsUpdate = true;

                    var skybox1_material = new THREE.MeshBasicMaterial(
                            new
                            {
                                //map = THREE.ImageUtils.loadTexture(new galaxy_starfield().src),
                                map = new THREE.Texture { image = cc, needsUpdate = true },
                                side = THREE.BackSide,
                                transparent = true
                            });


                    var skybox1 = new THREE.Mesh(
                        //new THREE.SphereGeometry(far * 0.92, 64, 64),
                        //new THREE.SphereGeometry(far * 0.80, 64, 64),

                            // still zfighting
                        //new THREE.SphereGeometry(far * 0.50, 64, 64),

                            // the other option is to have a single bg and blend on tht. this is just a rotation visualization.
                            new THREE.SphereGeometry(far * 0.30, 64, 64),
                           skybox1_material
                        );

                    // http://stackoverflow.com/questions/8502150/three-js-how-can-i-dynamically-change-objects-opacity
                    //(stars_material as dynamic).opacity = 0.5;


                    hideskybox1.onchange += delegate
                    {
                        skybox1.visible = !hideskybox1.@checked;
                    };
                    skybox1.visible = !hideskybox1.@checked;


                    scene.add(skybox1);




                    applycameraoffset += delegate
                    {
                        if (frameIDanimation.@checked)
                        {
                            itemRotation.valueAsNumber = (frameIDslider.valueAsNumber / 2) % 360 - 180;

                            hideskybox1.@checked = (frameIDslider.valueAsNumber / 2 + 180) % 720 < 360;
                        }
                    };


                    // target bg
                    var skybox2 = new THREE.Mesh(
                            new THREE.SphereGeometry(far * 0.95, 64, 64),
                           new THREE.MeshBasicMaterial(
                            new
                            {
                                map = new THREE.Texture { image = iskybox2, needsUpdate = true },
                                side = THREE.BackSide,
                                transparent = true
                            })
                    );


                    skybox2.AttachTo(scene).With(
                        stars =>
                        {
                            applycameraoffset += delegate
                              {

                                  skybox2.visible = !hideskybox2.@checked;



                                  skybox1.rotation.set(0, 0, 0);
                                  // spin
                                  skybox1.rotateOnAxis(new THREE.Vector3(0, -1, 0),
                                     frameIDslider.valueAsNumber / (60 * 60 / 5.0) * Math.PI * 2
                                 );

                                  // reset
                                  stars.rotation.set(0, 0, 0);

                                  // slow rotate in place
                                  stars.rotateOnAxis(new THREE.Vector3(1, 0, 0),
                                      frameIDslider.valueAsNumber / 3600.0 * Math.PI * 2
                                  );

                                  // follow the moon?
                                  stars.rotateOnAxis(new THREE.Vector3(0, -1, 0),
                                      frameIDslider.valueAsNumber / (60 * 60 / 5.0) * Math.PI * 2
                                  );
                              };
                        }

                    );

                    // can we get our stereo sprite this way?
                    // do we get clean tiles with transparency?
                    // on x83 on frame0 we were able to hide skybox. how?
                    //hideskybox2.onchange += delegate
                    //{
                    //    skybox2.visible = !hideskybox2.@checked;
                    //};
                    //skybox2.visible = !hideskybox2.@checked;



                    Console.WriteLine("skybox added");






                    //dae.position.y = -80;

                    //dae.AttachTo(sceneg);
                    //scene.add(dae);
                    //oo.Add(dae);




                    // view-source:http://threejs.org/examples/webgl_multiple_canvases_circle.html
                    // https://threejsdoc.appspot.com/doc/three.js/src.source/extras/cameras/CubeCamera.js.html
                    Native.window.onframe +=
                        e =>
                        {
                            // let render man know..
                            if (vsync != null)
                                if (vsync.Task.IsCompleted)
                                    return;


                            //if (pause) return;
                            //if (pause.@checked)
                            //    return;


                            // can we float out of frame?
                            // haha. a bit too flickery.
                            //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.01) * 50.0;
                            //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.001) * 190.0;
                            //globesphere.position.y = Math.Sin(fcamerax * 0.001) * 90.0;
                            //clouds.position.y = Math.Cos(fcamerax * 0.001) * 90.0;

                            //sphere.rotation.y += speed;
                            //clouds.rotation.y += speed;

                            // manual rebuild?
                            // red compiler notifies laptop chrome of pending update
                            // app reloads

                            applycameraoffset();
                            renderer0.clear();





                            // spriteOffset
                            canvasPXitem.drawImage(
                                       tex1,

                                       //2px black border!
                                //((tex1w + 4) * spriteOffset.valueAsNumber) + 2,
                                       ((tex1w + 2) * spriteOffset.valueAsNumber) + 3,

                                       2, tex1w, tex1h,

                                       // dest
                                       (cubefacesize - tex1w) / 2,
                                       (cubefacesize - tex1h) / 2,

                                       tex1w, tex1h
                                   );



                            //rendererPY.clear();

                            //cameraPX.aspect = canvasPX.aspect;
                            //cameraPX.updateProjectionMatrix();

                            // um what does this do?
                            //cameraPX.position.z += (z - cameraPX.position.z) * e.delay.ElapsedMilliseconds / 200.0;
                            // mousewheel allos the camera to move closer
                            // once we see the frame in vr, can we udp sync vr tracking back to laptop?


                            //this.targetPX.x += 1;
                            //this.targetNX.x -= 1;

                            //this.targetPY.y += 1;
                            //this.targetNY.y -= 1;

                            //this.targetPZ.z += 1;
                            //this.targetNZ.z -= 1;

                            // how does the 360 or shadertoy want our cubemaps?


                            // and then rotate right?

                            // how can we render cubemap?


                            // hide everything else


                            // inversion effect?
                            //// if (hideskybox1.@checked)
                            ////     skybox1.visible = true;
                            //// else
                            ////     skybox1.visible = false;

                            //// floor2.visible = false;
                            //// renderer0.render(scene, cameraPX);
                            //// //canvasPXitem.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);


                            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151203
                            // can we draw from that special image?


                            //canvasPXitem.drawImage(
                            //    tex1, 2, 2, 124, 630 - 4, 0, 0, 124, 626
                            //);

                            //// canvasPXitem.drawImage(
                            ////     (IHTMLCanvas)renderer0.domElement,



                            ////    sx: (cubefacesize - cubefacesize / 6) / 2,
                            ////    sy: (cubefacesize - cubefacesize / 3) / 2,

                            ////    sw: cubefacesize / 6,
                            ////    sh: cubefacesize / 3,

                            ////    dx: (cubefacesize - cubefacesize / 6) / 2,
                            ////    dy: (cubefacesize - cubefacesize / 3) / 2,

                            ////    dw: cubefacesize / 6,
                            ////    dh: cubefacesize / 3
                            ////);


                            //// skybox1.visible = !hideskybox1.@checked;
                            //// floor2.visible = true;


                            #region x
                            canvasPX.clearRect(0, 0, cubefacesize, cubefacesize);
                            // upside down?
                            renderer0.render(scene, cameraPX);
                            canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                            canvasNX.clearRect(0, 0, cubefacesize, cubefacesize);
                            renderer0.render(scene, cameraNX);
                            canvasNX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            #endregion

                            #region z

                            canvasPZ.clearRect(0, 0, cubefacesize, cubefacesize);
                            renderer0.render(scene, cameraPZ);
                            canvasPZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                            canvasNZ.clearRect(0, 0, cubefacesize, cubefacesize);
                            renderer0.render(scene, cameraNZ);
                            canvasNZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            #endregion



                            #region y
                            canvasPY.clearRect(0, 0, cubefacesize, cubefacesize);
                            renderer0.render(scene, cameraPY);

                            //canvasPY.save();
                            //canvasPY.translate(0, size);
                            //canvasPY.rotate((float)(-Math.PI / 2));
                            canvasPY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            //canvasPY.restore();


                            canvasNY.clearRect(0, 0, cubefacesize, cubefacesize);
                            renderer0.render(scene, cameraNY);
                            //canvasNY.save();
                            //canvasNY.translate(size, 0);
                            //canvasNY.rotate((float)(Math.PI / 2));
                            canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            //canvasNY.restore();
                            // ?
                            #endregion


                            //renderer0.render(scene, cameraPX);


                            //rendererPY.render(scene, cameraPY);

                            // at this point we should be able to render the sphere texture

                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;


                            //var cube0 = new IHTMLImage[] {
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),

                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),


                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
                            //};

                            new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                            }.WithEachIndex(
                             (img, index) =>
                             {
                                 gl.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);

                                 //gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
                                 gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);

                                 // http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true

                                 // https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
                                 //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
                                 //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);

                                 gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);

                             }
                          );


                            //if (cameraz.valueAsNumber == 0)
                            gl.clearColor(0, 0, 0, 0);
                            //else
                            //gl4K.clearColor(0, 0, 0, 1);

                            gl.clear(gl.COLOR_BUFFER_BIT);


                            // could do dynamic resolution- fog of war or fog of FOV. where up to 150deg field of vision is encouragedm, not 360
                            pass.Paint_Image(
                               0,

                               0,
                               0,
                               0,
                               0
                                //,

                          // gl_FragCoord
                                // cannot be scaled, and can be referenced directly.
                                // need another way to scale
                                //zoom: 0.3f
                          );

                            //paintsw.Stop();


                            // what does it do?
                            gl.flush();

                            // let render man know..
                            if (vsync != null)
                                if (!vsync.Task.IsCompleted)
                                    vsync.SetResult(null);
                        };





                    Console.WriteLine("do you see it?");

                }
           );




        }
        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809

        // the eye nor the display will be able to do any stereo
        // until tech is near matrix capability. 2019?

        // cubemap can be used for all long range scenes
        // http://www.imdb.com/title/tt0112111/?ref_=nv_sr_1


        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150808/cubemapcamera
        // subst a: s:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeEquirectangularCameraExperiment\ChromeEquirectangularCameraExperiment\bin\Debug\staging\ChromeEquirectangularCameraExperiment.Application\web
        // Z:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeEquirectangularCameraExperiment\ChromeEquirectangularCameraExperiment\bin\Debug\staging\ChromeEquirectangularCameraExperiment.Application\web

        // ColladaLoader: Empty or non-existing file (assets/ChromeEquirectangularCameraExperiment/S6Edge.dae)

        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            //FormStyler.AtFormCreated =
            //s =>
            //{
            //    s.Context.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;

            //    //var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDrag().AttachTo(s.Context.GetHTMLTarget());
            //    var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDragWithShadow().AttachTo(s.Context.GetHTMLTarget());



            //    s.Context.GetHTMLTarget().style.backgroundColor = "#efefef";
            //    //s.Context.GetHTMLTarget().style.backgroundColor = "#A26D41";

            //};

#if AsWEBSERVER
            #region += Launched chrome.app.window
            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                // if we run as a server. we can open up on android.

                //chrome.Notification.DefaultTitle = "Nexus7";
                //chrome.Notification.DefaultIconUrl = new x128().src;
                ChromeTCPServer.TheServerWithStyledForm.Invoke(
                     AppSource.Text
                //, AtFormCreated: FormStyler.AtFormCreated

                //AtFormConstructor:
                //    f =>
                //    {
                //        //arg[0] is typeof System.Int32
                //        //script: error JSC1000: No implementation found for this native method, please implement [static System.Drawing.Color.FromArgb(System.Int32)]

                //        // X:\jsc.svn\examples\javascript\forms\Test\TestFromArgb\TestFromArgb\ApplicationControl.cs

                //        f.BackColor = System.Drawing.Color.FromArgb(0xA26D41);
                //    }
                );
                return;
            }
            #endregion
#else

            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    //chrome.runtime.UpdateAvailable += delegate
                    //{
                    //    new chrome.Notification(title: "UpdateAvailable");

                    //};

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "ChromeEquirectangularCameraExperiment");

                        // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: new
                               {
                                   alwaysOnTop = true,
                                   visibleOnAllWorkspaces = true
                               }
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion


#endif


            Native.css.style.backgroundColor = "blue";
            Native.css.style.overflow = IStyle.OverflowEnum.hidden;

            Native.body.Clear();

            new IHTMLPre { "can we stream it into VR, shadertoy, youtube 360, youtube stereo yet?" }.AttachToDocument();


            var sw = Stopwatch.StartNew();

            var pause = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "pause" }.AttachToDocument();


            pause.onchange += delegate
            {

                if (pause.@checked)
                    sw.Stop();
                else
                    sw.Start();


            };

            var oo = new List<THREE.Object3D>();

            #region scene
            var window = Native.window;


            // what about physics and that portal rendering?

            // if we are running as a chrome web server, we may also be opened as android ndk webview app
            //var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: window.aspect, near: 1, far: 2000);
            // once we update source
            // save the source
            // manually recompile 
            //cameraPX.position.z = 400;

            //// the camera should be close enough for the object to float off the FOV of PX
            //cameraPX.position.z = 200;

            // scene
            // can we make the 3D object orbit around us ?
            // and
            // stream it to vr?
            var scene = new THREE.Scene();

            var ambient = new THREE.AmbientLight(0x303030);
            scene.add(ambient);

            // should we fix jsc to do a more correct IDL?
            var directionalLight = new THREE.DirectionalLight(0xffffff, 0.7);
            directionalLight.position.set(0, 0, 1);
            scene.add(directionalLight);



            // whats WebGLRenderTargetCube do?

            // WebGLRenderer preserveDrawingBuffer 

            //const int size = 128;
            //const int size = 256; // 6 faces, 12KB
            //const int size = 512; // 6 faces, ?

            // WebGL: drawArrays: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled.

            //const int size = 720; // 6 faces, ?
            const int size = 1024; // 6 faces, ?

            var renderer0 = new THREE.WebGLRenderer(

                new
                {
                    antialias = true,
                    alpha = true,
                    preserveDrawingBuffer = true
                }
            );

            // https://github.com/mrdoob/three.js/issues/3836

            // the construct. white bg
            renderer0.setClearColor(0xfffff, 1);

            //renderer.setSize(window.Width, window.Height);
            renderer0.setSize(size, size);

            //renderer0.domElement.AttachToDocument();
            //rendererPX.domElement.style.SetLocation(0, 0);
            //renderer0.domElement.style.SetLocation(4, 4);


            // top

            // http://stackoverflow.com/questions/27612524/can-multiple-webglrenderers-render-the-same-scene


            // need a place to show the cubemap face to GUI 
            // how does the stereo OTOY do it?
            // https://www.opengl.org/wiki/Sampler_(GLSL)

            // http://www.richardssoftware.net/Home/Post/25

            // [+X, –X, +Y, –Y, +Z, –Z] fa

            var uizoom = 0.1;

            var far = 0xffff;

            #region y
            // need to rotate90?
            var cameraNY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            cameraNY.lookAt(new THREE.Vector3(0, -1, 0));
            //cameraNY.lookAt(new THREE.Vector3(0, 1, 0));
            var canvasNY = new CanvasRenderingContext2D(size, size);
            canvasNY.canvas.style.SetLocation(8 + (int)(uizoom * size + 8) * 1, 8 + (int)(uizoom * size + 8) * 2);
            canvasNY.canvas.title = "NY";
            canvasNY.canvas.AttachToDocument();
            canvasNY.canvas.style.transformOrigin = "0 0";
            canvasNY.canvas.style.transform = $"scale({uizoom})";

            var cameraPY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            cameraPY.lookAt(new THREE.Vector3(0, 1, 0));
            //cameraPY.lookAt(new THREE.Vector3(0, -1, 0));
            var canvasPY = new CanvasRenderingContext2D(size, size);
            canvasPY.canvas.style.SetLocation(8 + (int)(uizoom * size + 8) * 1, 8 + (int)(uizoom * size + 8) * 0);
            canvasPY.canvas.title = "PY";
            canvasPY.canvas.AttachToDocument();
            canvasPY.canvas.style.transformOrigin = "0 0";
            canvasPY.canvas.style.transform = $"scale({uizoom})";
            #endregion

            // transpose xz?

            #region x
            var cameraNX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            cameraNX.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraNX.lookAt(new THREE.Vector3(0, 0, -1));
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNX.lookAt(new THREE.Vector3(1, 0, 0));
            var canvasNX = new CanvasRenderingContext2D(size, size);
            canvasNX.canvas.style.SetLocation(8 + (int)(uizoom * size + 8) * 2, 8 + (int)(uizoom * size + 8) * 1);
            canvasNX.canvas.title = "NX";
            canvasNX.canvas.AttachToDocument();
            canvasNX.canvas.style.transformOrigin = "0 0";
            canvasNX.canvas.style.transform = $"scale({uizoom})";

            var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            cameraPX.lookAt(new THREE.Vector3(0, 0, -1));
            //cameraPX.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPX.lookAt(new THREE.Vector3(1, 0, 0));
            //cameraPX.lookAt(new THREE.Vector3(-1, 0, 0));
            var canvasPX = new CanvasRenderingContext2D(size, size);
            canvasPX.canvas.style.SetLocation(8 + (int)(uizoom * size + 8) * 0, 8 + (int)(uizoom * size + 8) * 1);
            canvasPX.canvas.title = "PX";
            canvasPX.canvas.AttachToDocument();
            canvasPX.canvas.style.transformOrigin = "0 0";
            canvasPX.canvas.style.transform = $"scale({uizoom})";
            #endregion



            #region z
            var cameraNZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, -1));
            cameraNZ.lookAt(new THREE.Vector3(1, 0, 0));
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, 1));
            var canvasNZ = new CanvasRenderingContext2D(size, size);
            canvasNZ.canvas.style.SetLocation(8 + (int)(uizoom * size + 8) * 3, 8 + (int)(uizoom * size + 8) * 1);
            canvasNZ.canvas.title = "NZ";
            canvasNZ.canvas.AttachToDocument();
            canvasNZ.canvas.style.transformOrigin = "0 0";
            canvasNZ.canvas.style.transform = $"scale({uizoom})";

            var cameraPZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraPZ.lookAt(new THREE.Vector3(1, 0, 0));
            cameraPZ.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, -1));
            var canvasPZ = new CanvasRenderingContext2D(size, size);
            canvasPZ.canvas.style.SetLocation(8 + (int)(uizoom * size + 8) * 1, 8 + (int)(uizoom * size + 8) * 1);
            canvasPZ.canvas.title = "PZ";
            canvasPZ.canvas.AttachToDocument();
            canvasPZ.canvas.style.transformOrigin = "0 0";
            canvasPZ.canvas.style.transform = $"scale({uizoom})";
            #endregion




            // c++ alias locals would be nice..
            var canvas0 = (IHTMLCanvas)renderer0.domElement;


            var old = new
            {



                CursorX = 0,
                CursorY = 0
            };


            var st = new Stopwatch();
            st.Start();

            //canvas0.css.active.style.cursor = IStyle.CursorEnum.move;

            #region onmousedown
            Native.body.onmousedown +=
                async e =>
                {
                    if (e.Element.nodeName.ToLower() != "canvas")
                        return;

                    // movementX no longer works
                    old = new
                    {


                        e.CursorX,
                        e.CursorY
                    };


                    //e.CaptureMouse();
                    var release = e.Element.CaptureMouse();
                    await e.Element.async.onmouseup;

                    release();


                };
            #endregion



            // X:\jsc.svn\examples\javascript\Test\TestMouseMovement\TestMouseMovement\Application.cs
            #region onmousemove
            Native.body.onmousemove +=
                e =>
                {
                    if (e.Element.nodeName.ToLower() != "canvas")
                    {
                        Native.body.style.cursor = IStyle.CursorEnum.@default;
                        return;
                    }

                    e.preventDefault();
                    e.stopPropagation();


                    Native.body.style.cursor = IStyle.CursorEnum.move;

                    var pointerLock = canvas0 == Native.document.pointerLockElement;


                    //Console.WriteLine(new { e.MouseButton, pointerLock, e.movementX });

                    if (e.MouseButton == IEvent.MouseButtonEnum.Left)
                    {

                        oo.WithEach(
                            x =>
                            {
                                x.rotation.y += 0.006 * (e.CursorX - old.CursorX);
                                x.rotation.x += 0.006 * (e.CursorY - old.CursorY);
                            }
                        );

                        old = new
                        {


                            e.CursorX,
                            e.CursorY
                        };



                    }

                };
            #endregion

            // THREE.WebGLProgram: gl.getProgramInfoLog() C:\fakepath(78,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll
            // THREE.WebGLProgram: gl.getProgramInfoLog() (79,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll

            // http://www.roadtovr.com/youtube-confirms-stereo-3d-360-video-support-coming-soon/
            // https://www.youtube.com/watch?v=D-Wl9jAB45Q



            #region spherical
            var gl = new WebGLRenderingContext(alpha: true, preserveDrawingBuffer: true);
            var c = gl.canvas.AttachToDocument();

            //  3840x2160

            //c.style.SetSize(3840, 2160);

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150722/360-youtube


            c.width = 3840;
            c.height = 2160;

            // wont work
            //c.width = 8192;
            //c.height = 4096;


            // this has the wrong aspect?
            //c.width = 6466;
            //c.height = 3232;

            new IHTMLPre { new { c.width, c.height } }.AttachToDocument();

            //6466x3232

            //var suizoom = 720f / c.height;
            //var suizoom = 360f / c.height;
            var suizoom = 480f / c.width;

            c.style.transformOrigin = "0 0";
            c.style.transform = $"scale({suizoom})";
            c.style.backgroundColor = "yellow";
            c.style.position = IStyle.PositionEnum.absolute;

            c.style.SetLocation(8 + (int)(uizoom * size + 8) * 0, 8 + (int)(uizoom * size + 8) * 3);

            var pass = new CubeToEquirectangular.Library.ShaderToy.EffectPass(
                       null,
                       gl,
                       precission: CubeToEquirectangular.Library.ShaderToy.DetermineShaderPrecission(gl),
                       supportDerivatives: gl.getExtension("OES_standard_derivatives") != null,
                       callback: null,
                       obj: null,
                       forceMuted: false,
                       forcePaused: false,
                       //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                       outputGainNode: null
                   );

            // how shall we upload our textures?
            // can we reference GLSL.samplerCube yet?
            //pass.mInputs[0] = new samplerCube { };
            pass.mInputs[0] = new CubeToEquirectangular.Library.ShaderToy.samplerCube { };

            pass.MakeHeader_Image();
            var vs = new Shaders.ProgramFragmentShader();
            pass.NewShader_Image(vs);

            #endregion




            //var xor = new HTML.Images.FromAssets.tiles_regrid().AttachToDocument();
            //var xor = new HTML.Images.FromAssets.Orion360_test_image_8192x4096().AttachToDocument();
            //var xor = new HTML.Images.FromAssets._2_no_clouds_4k().AttachToDocument();
            var frame0 = new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().AttachToDocument();


            // 270px
            //xor.style.height = "";
            frame0.style.height = "270px";
            frame0.style.width = "480px";
            frame0.style.SetLocation(
                8 + (int)(uizoom * size + 8) * 0 + 480 + 16, 8 + (int)(uizoom * size + 8) * 3);


            var mesh = new THREE.Mesh(new THREE.SphereGeometry(0xFFF, 50, 50),
           new THREE.MeshBasicMaterial(new
           {
               map = THREE.ImageUtils.loadTexture(
                  //new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().src
                  //new HTML.Images.FromAssets._4008650304_7f837ccbb7_b().src
                  frame0.src
                   //new WebGLEquirectangularPanorama.HTML.Images.FromAssets.PANO_20130616_222058().src
                   //new WebGLEquirectangularPanorama.HTML.Images.FromAssets.PANO_20121225_210448().src

                   )
           }));
            mesh.scale.x = -1;

            #region fixup rotation

            //mesh.rotateOnAxis(new THREE.Vector3(1, 0, 0), Math.PI / 2);
            //mesh.rotateOnAxis(new THREE.Vector3(1, 0, 0), -Math.PI / 2);
            mesh.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
            #endregion


            scene.add(mesh);



            frame0.onclick += delegate
            {
                // http://paulbourke.net/papers/vsmm2006/vsmm2006.pdf
                //            A method of creating synthetic stereoscopic panoramic images that can be implemented
                //in most rendering packages has been presented. If single panoramic pairs can be created
                //then stereoscopic panoramic movies are equally possible giving rise to the prospect of
                //movies where the viewer can interact with, at least with regard to what they choose to look
                //at.These images can be projected so as to engage the two features of the human visual
                //system that assist is giving us a sense of immersion, the feeling of “being there”. That is,
                //imagery that contains parallax information as captured from two horizontally separated eye
                //positions (stereopsis)and imagery that fills our peripheral vision.The details that define
                //how the two panoramic images should be created in rendering packages are provided, in
                //particular, how to precisely configure the virtual cameras and control the distance to zero
                //parallax.

                // grab a frame

                var base64 = gl.canvas.toDataURL();

                frame0.src = base64;
                // 7MB!

            };

            new Models.ColladaS6Edge().Source.Task.ContinueWithResult(
                   dae =>
                   {
                       // 90deg
                       dae.rotation.x = -Math.Cos(Math.PI);

                       //dae.scale.x = 30;
                       //dae.scale.y = 30;
                       //dae.scale.z = 30;
                       dae.position.z = -(65 - 200);





                       var scale = 0.9;

                       // jsc, do we have ILObserver available yet?
                       dae.scale.x = scale;
                       dae.scale.y = scale;
                       dae.scale.z = scale;


                       #region onmousewheel
                       Native.body.onmousewheel +=
                           e =>
                           {
                               e.preventDefault();

                               //camera.position.z = 1.5;

                               // min max. shall adjust speed also!
                               // max 4.0
                               // min 0.6
                               dae.position.z -= 10.0 * e.WheelDirection;

                               //camera.position.z = 400;
                               //dae.position.z = dae.position.z.Max(-200).Min(200);

                               //Native.document.title = new { z }.ToString();

                           };
                       #endregion


                       //dae.position.y = -80;

                       scene.add(dae);
                       oo.Add(dae);




                       // view-source:http://threejs.org/examples/webgl_multiple_canvases_circle.html
                       // https://threejsdoc.appspot.com/doc/three.js/src.source/extras/cameras/CubeCamera.js.html
                       Native.window.onframe +=
                           e =>
                           {
                               //if (pause) return;
                               //if (pause.@checked)
                               //    return;


                               // can we float out of frame?
                               // haha. a bit too flickery.
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.01) * 50.0;
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.001) * 190.0;
                               dae.position.x = Math.Sin(sw.ElapsedMilliseconds * 0.0001) * 190.0;
                               dae.position.y = Math.Cos(sw.ElapsedMilliseconds * 0.0001) * 90.0;
                               // manual rebuild?
                               // red compiler notifies laptop chrome of pending update
                               // app reloads


                               renderer0.clear();
                               //rendererPY.clear();

                               //cameraPX.aspect = canvasPX.aspect;
                               //cameraPX.updateProjectionMatrix();

                               // um what does this do?
                               //cameraPX.position.z += (z - cameraPX.position.z) * e.delay.ElapsedMilliseconds / 200.0;
                               // mousewheel allos the camera to move closer
                               // once we see the frame in vr, can we udp sync vr tracking back to laptop?


                               //this.targetPX.x += 1;
                               //this.targetNX.x -= 1;

                               //this.targetPY.y += 1;
                               //this.targetNY.y -= 1;

                               //this.targetPZ.z += 1;
                               //this.targetNZ.z -= 1;

                               // how does the 360 or shadertoy want our cubemaps?


                               // and then rotate right?

                               // how can we render cubemap?


                               #region x
                               // upside down?
                               renderer0.render(scene, cameraPX);
                               canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, size, size);

                               renderer0.render(scene, cameraNX);
                               canvasNX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, size, size);
                               #endregion

                               #region z
                               renderer0.render(scene, cameraPZ);
                               canvasPZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, size, size);

                               renderer0.render(scene, cameraNZ);
                               canvasNZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, size, size);
                               #endregion



                               #region y
                               renderer0.render(scene, cameraPY);

                               //canvasPY.save();
                               //canvasPY.translate(0, size);
                               //canvasPY.rotate((float)(-Math.PI / 2));
                               canvasPY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, size, size);
                               //canvasPY.restore();


                               renderer0.render(scene, cameraNY);
                               //canvasNY.save();
                               //canvasNY.translate(size, 0);
                               //canvasNY.rotate((float)(Math.PI / 2));
                               canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, size, size);
                               //canvasNY.restore();
                               // ?
                               #endregion


                               //renderer0.render(scene, cameraPX);


                               //rendererPY.render(scene, cameraPY);

                               // at this point we should be able to render the sphere texture

                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;


                               //var cube0 = new IHTMLImage[] {
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),

                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),


                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
                               //};

                               new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                               }.WithEachIndex(
                                   (img, index) =>
                                   {
                                       gl.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);

                                       //gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
                                       gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);

                                       // http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true

                                       // https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);

                                       gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);

                                   }
                                );


                               pass.Paint_Image(
                                     0,

                                     0,
                                     0,
                                     0,
                                     0
                                //,

                                // gl_FragCoord
                                // cannot be scaled, and can be referenced directly.
                                // need another way to scale
                                //zoom: 0.3f
                                );

                               //paintsw.Stop();


                               // what does it do?
                               gl.flush();

                           };


                   }
               );


            #endregion



            Console.WriteLine("do you see it?");
        }
        public Application(IApp page)
        {
            // how does this worK?
            // run it to see what we have?
            // seems to work. but laptp is freezing up????

            new forest5().With(
                async img =>
                {

                    await img;

                    var st0 = new Stopwatch();
                    st0.Start();
                    var c0 = new CanvasRenderingContext2D(
                         img.width,
                         img.height
                     );


                    c0.drawImage(img, 0, 0, img.width, img.height);
                    var imageData = c0.getImageData(0, 0, img.width, img.height);

                    var AnimatedColors = new List<int> {
                        0x3C4C8C,
                        0x2C407C,
                        0x2C3C84,
                        0x20306C
                    };



                    #region f
                    Func<int[], Task<CanvasRenderingContext2D>> f =
                         AnimatedColorsTo =>
                         {

                             var c1 = new CanvasRenderingContext2D(
                                  img.width,
                                  img.height
                              );



                             var imageData1 = c1.getImageData(0, 0, img.width, img.height);

                             // This loop gets every pixels on the image and
                             for (var j = 0; j < imageData.width; j++)
                             {
                                 for (var ji = 0; ji < imageData.height; ji++)
                                 {
                                     var index = (uint)((ji * 4) * imageData.width + (j * 4));
                                     var red = imageData.data[index];
                                     var green = imageData.data[index + 1];
                                     var blue = imageData.data[index + 2];
                                     var alpha = imageData.data[index + 3];
                                     var average = (byte)((red + green + blue) / 3);

                                     var u8 = (red << 16) + (green << 8) + blue;
                                     var u8i = AnimatedColors.IndexOf(u8);

                                     if (u8i >= 0)
                                     {
                                         u8 = AnimatedColorsTo[u8i];

                                         red = (byte)((u8 >> 16) & 0xff);
                                         green = (byte)((u8 >> 8) & 0xff);
                                         blue = (byte)((u8 >> 0) & 0xff);
                                     }
                                     else
                                     {
                                         alpha = 0x00;
                                     }


                                     // 3C4C8C
                                     // 2C407C
                                     // 20306C

                                     imageData1.data[index] = red;
                                     imageData1.data[index + 1] = green;
                                     imageData1.data[index + 2] = blue;

                                     //imageData.data[index + 1] = average;
                                     //imageData.data[index + 2] = average;

                                     imageData1.data[index + 3] = alpha;
                                 }
                             }

                             // overwrite original image
                             c1.putImageData(imageData1, 0, 0, 0, 0, img.width, img.height);

                             var xx = new TaskCompletionSource<CanvasRenderingContext2D>();
                             xx.SetResult(c1);
                             return xx.Task;

                         };
                    #endregion

                    var st = new Stopwatch();
                    st.Start();

                    //{ ElapsedMilliseconds = 4795 } 

                    var a1 = new[]{
                            AnimatedColors[1],
                            AnimatedColors[2],
                            AnimatedColors[3],
                            AnimatedColors[0],
                        };
                    var o1 = f(a1).Result;


                    var a2 = new[]{
                                    AnimatedColors[2],
                                    AnimatedColors[3],
                                    AnimatedColors[0],
                                    AnimatedColors[1],
                                };
                    var o2 = f(a2).Result;

                    var a3 =
                       new[]{
                                            AnimatedColors[3],
                                            AnimatedColors[0],
                                            AnimatedColors[1],
                                            AnimatedColors[2],
                                        };
                    var o3 = f(a3).Result;

                    Console.WriteLine(new { st.ElapsedMilliseconds });

                    c0.canvas.AttachToDocument().style.SetLocation(0, 0);
                    o1.canvas.AttachToDocument().style.SetLocation(0, 0);
                    o2.canvas.AttachToDocument().style.SetLocation(0, 0);
                    o3.canvas.AttachToDocument().style.SetLocation(0, 0);

                    new Timer(
                        t =>
                        {
                            if (t.Counter % AnimatedColors.Count == 0)
                            {
                                o1.canvas.Hide();
                                o2.canvas.Hide();
                                o3.canvas.Hide();
                                return;
                            }

                            if (t.Counter % AnimatedColors.Count == 1)
                            {
                                o1.canvas.Show();
                                o2.canvas.Hide();
                                o3.canvas.Hide();
                                return;
                            }

                            if (t.Counter % AnimatedColors.Count == 2)
                            {
                                o1.canvas.Hide();
                                o2.canvas.Show();
                                o3.canvas.Hide();
                                return;
                            }

                            if (t.Counter % AnimatedColors.Count == 3)
                            {
                                o1.canvas.Hide();
                                o2.canvas.Hide();
                                o3.canvas.Show();
                                return;
                            }
                        }
                    ).StartInterval(1000 / 5);

                    // { ElapsedMilliseconds = 1843 } 
                    Console.WriteLine("all: " + new { st.ElapsedMilliseconds });
                }
            );
        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {

            // X:\jsc.svn\examples\javascript\WebCamToGIFAnimation\WebCamToGIFAnimation\Application.cs

            control.nfc.onnfc +=
                nfcid =>
                {
                    page.nfcid.innerText = new { nfcid }.ToString();
                };

            Native.window.navigator.getUserMedia(
               stream =>
               {

                   var v = new IHTMLVideo { src = stream.ToObjectURL() };

                   v.AttachToDocument();

                   v.play();



                   control.nfc.onnfc +=
                        nfcid =>
                        {
                            var c = new CanvasRenderingContext2D(v.clientWidth, v.clientHeight);

                            c.drawImage(
                                v, 0, 0, c.canvas.width, c.canvas.height
                            );

                            c.fillText(
                                new { nfcid }.ToString()
                                , 4, 32, 400
                            );

                            var bytes = c.bytes;

                            c.canvas.AttachToDocument();
                        };

                   new IHTMLButton { innerText = "record frame" }.AttachToDocument().onclick +=
                       delegate
                       {
                           var c = new CanvasRenderingContext2D(v.clientWidth, v.clientHeight);

                           c.drawImage(
                               v, 0, 0, c.canvas.width, c.canvas.height
                           );

                           c.fillText(
                              new { xnfcid = "?", page.nfcid }.ToString()
                              , 4, 32, 400
                          );

                           var bytes = c.bytes;

                           c.canvas.AttachToDocument();
                       };
               }
           );

        }
        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150912/x360mountains




        //  ls sdcard/oculus/360photos/
        // "X:\vr\0000.png"
        // R:\util\android-sdk-windows\platform-tools\adb.exe push "X:\vr\0000.png" /sdcard/oculus/360photos/
        // 2649 KB/s (1085134 bytes in 0.400s)

        // "X:\vr\tr.png"
        // R:\util\android-sdk-windows\platform-tools\adb.exe push "X:\vr\tr.png" /sdcard/oculus/360photos/


        // R:\util\android-sdk-windows\platform-tools\adb.exe push  "X:\vr\code.png" /sdcard/oculus/360photos/
        // R:\util\android-sdk-windows\platform-tools\adb.exe push  "X:\vr\cone2.png" /sdcard/oculus/360photos/
        // "X:\vr\code.png"

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150906/roomscanningeffectbyrosme

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150816/iss

        // https://www.youtube.com/watch?v=UWiq-qgedws
        // https://www.youtube.com/watch?v=TwRSOEG-Gx4
        // http://youtu.be/Lo1IU8UAutE
        // 60hz 2160 4K!

        // The equirectangular projection was used in map creation since it was invented around 100 A.D. by Marinus of Tyre. 

        //        C:\Users\Arvo> "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hzsky.png" "/sdcard/oculus/360photos/"
        //1533 KB/s(3865902 bytes in 2.461s)

        //C:\Users\Arvo> "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape360globe1\0000.png" "/sdcard/oculus/360photos/tape360globe1.png"
        //1556 KB/s(2714294 bytes in 1.703s)

        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hz2048c3840x2160.png" "/sdcard/oculus/360photos/"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push   "X:\vr\tape360globe1\0000.png" "/sdcard/oculus/360photos/tape360globe2.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push   "X:\vr\tape360globe1\0000.png" "/sdcard/oculus/360photos/tape360globenight.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push   "R:\vr\tape360iss\0000.png" "/sdcard/oculus/360photos/tape360iss.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push   "R:\vr\tape360iss\0230.png" "/sdcard/oculus/360photos/tape360iss0230.png"

        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push    "X:\vr\sh1\0000.png" "/sdcard/oculus/360photos/sh1.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push     "R:\vr\tape360columns\0000.png" "/sdcard/oculus/360photos/tape360columns.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push    "X:\vr\edge.png" "/sdcard/oculus/360photos/tape360columns.png"
        // 4041 KB/s (3248448 bytes in 0.785s)

        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push      "X:\vr\terrain.png" "/sdcard/oculus/360photos/"

        // could we udp our 360 image from webgl to vr yet?

        // "R:\vr\tape360iss\0230.png"

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809/chrome360hz

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809

        // the eye nor the display will be able to do any stereo
        // until tech is near matrix capability. 2019?

        // cubemap can be used for all long range scenes
        // http://www.imdb.com/title/tt0112111/?ref_=nv_sr_1


        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150808/cubemapcamera
        // subst /D b:
        // subst b: s:\jsc.svn\examples\javascript\chrome\apps\WebGL\x360mountains\x360mountains\bin\Debug\staging\x360mountains.Application\web
        // subst a: z:\jsc.svn\examples\javascript\chrome\apps\WebGL\x360mountains\x360mountains\bin\Debug\staging\x360mountains.Application\web
        // Z:\jsc.svn\examples\javascript\chrome\apps\WebGL\x360mountains\x360mountains\bin\Debug\staging\x360mountains.Application\web
        // what if we want to do subst in another winstat or session?

        // ColladaLoader: Empty or non-existing file (assets/x360mountains/S6Edge.dae)

        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            //FormStyler.AtFormCreated =
            //s =>
            //{
            //    s.Context.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;

            //    //var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDrag().AttachTo(s.Context.GetHTMLTarget());
            //    var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDragWithShadow().AttachTo(s.Context.GetHTMLTarget());



            //    s.Context.GetHTMLTarget().style.backgroundColor = "#efefef";
            //    //s.Context.GetHTMLTarget().style.backgroundColor = "#A26D41";

            //};

#if AsWEBSERVER
            #region += Launched chrome.app.window
            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                // if we run as a server. we can open up on android.

                //chrome.Notification.DefaultTitle = "Nexus7";
                //chrome.Notification.DefaultIconUrl = new x128().src;
                ChromeTCPServer.TheServerWithStyledForm.Invoke(
                     AppSource.Text
                //, AtFormCreated: FormStyler.AtFormCreated

                //AtFormConstructor:
                //    f =>
                //    {
                //        //arg[0] is typeof System.Int32
                //        //script: error JSC1000: No implementation found for this native method, please implement [static System.Drawing.Color.FromArgb(System.Int32)]

                //        // X:\jsc.svn\examples\javascript\forms\Test\TestFromArgb\TestFromArgb\ApplicationControl.cs

                //        f.BackColor = System.Drawing.Color.FromArgb(0xA26D41);
                //    }
                );
                return;
            }
            #endregion
#else

            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    //chrome.runtime.UpdateAvailable += delegate
                    //{
                    //    new chrome.Notification(title: "UpdateAvailable");

                    //};

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "x360mountains");

                        // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: new
                               {
                                   alwaysOnTop = true,
                                   visibleOnAllWorkspaces = true
                               }
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion


#endif


            //var vs0 = new TraceConeWithCRTByKlk.Shaders.Program360FragmentShader();
            //var vs0 = new FaceEdgeVertexByPaniq.Shaders.Program360FragmentShader();
            var vs0 = new ChromeShaderToyMountainsByHoskins.Shaders.Program360FragmentShader();


            // onframe need syncs to enable GC!
            var vsync = default(TaskCompletionSource<object>);
            Func<bool> vsyncReady = delegate
            {

                if (vsync != null)
                    if (vsync.Task.IsCompleted)
                        return true;


                return false;
            };



            // crash
            //int cubefacesizeMAX = 2048 * 2; // 6 faces, ?

            // not responding...
            //int cubefacesizeMAX = 2048 * 2; // 6 faces, ?
            int cubefacesizeMAX = 2048; // 6 faces, ?
            //int cubefacesizeMAX = 1024; // 6 faces, ?
            int cubefacesize = cubefacesizeMAX; // 6 faces, ?
                                                //int cubefacesize = 1024; // 6 faces, ?
                                                // "X:\vr\tape1\0000x2048.png"
                                                // for 60hz render we may want to use float camera percision, not available for ui.
                                                //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x2048.png" "/sdcard/oculus/360photos/"
                                                //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x128.png" "/sdcard/oculus/360photos/"

            // force laptop into preview. when can we have a button for it?
            //if (Environment.ProcessorCount < 8)
            //    cubefacesize = 64; // 6 faces, ?

            // fast gif?
            //cubefacesize = 128; // 6 faces, ?
            //cubefacesize = 512; // 6 faces, ?
            //    [GroupMarkerNotSet(crbug.com / 242999)!:247F0809]
            //RENDER WARNING: texture bound to texture unit 0 is not renderable.It maybe non-power-of-2 and have incompatible texture filtering.

            // can we keep fast fps yet highp?

            // can we choose this on runtime? designtime wants fast fps, yet for end product we want highdef on our render farm?
            //const int cubefacesize = 128; // 6 faces, ?

            //var cubecameraoffsetx = 256;
            var cubecameraoffsetx = 400;


            //var uizoom = 0.1;
            //var uizoom = cubefacesize / 128f;
            var uizoom = 128f / cubefacesize;


            Native.css.style.backgroundColor = "blue";
            Native.css.style.overflow = IStyle.OverflowEnum.hidden;

            Native.body.Clear();
            (Native.body.style as dynamic).webkitUserSelect = "text";





            //return;

            // Earth params
            //var radius = 0.5;
            //var radius = 1024;
            //var radius = 2048;
            //var radius = 512;
            //var radius = 256;
            //var radius = 400;

            // can we have not fly beyond moon too much?
            //var radius = 500;
            var radius = 480;

            //var segments = 32;
            var segments = 128 * 2;
            //var rotation = 6;


            //const int size = 128;
            //const int size = 256; // 6 faces, 12KB
            //const int size = 512; // 6 faces, ?

            // WebGL: drawArrays: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled.

            //const int size = 720; // 6 faces, ?
            //const int size = 1024; // 6 faces, ?
            //const int cubefacesize = 1024; // 6 faces, ?

            // THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter is set to THREE.LinearFilter or THREE.NearestFilter. ( chrome-extension://aemlnmcokphbneegoefdckonejmknohh/assets/x360mountains/anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs.jpg )


            var far = 0xffffff;

            new IHTMLPre { new { Environment.ProcessorCount, cubefacesize } }.AttachToDocument();

            //new IHTMLPre { "can we stream it into VR, shadertoy, youtube 360, youtube stereo yet?" }.AttachToDocument();


            var sw = Stopwatch.StartNew();



            var oo = new List<THREE.Object3D>();

            var window = Native.window;


            // what about physics and that portal rendering?

            // if we are running as a chrome web server, we may also be opened as android ndk webview app
            //var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: window.aspect, near: 1, far: 2000);
            // once we update source
            // save the source
            // manually recompile 
            //cameraPX.position.z = 400;

            //// the camera should be close enough for the object to float off the FOV of PX
            //cameraPX.position.z = 200;

            // scene
            // can we make the 3D object orbit around us ?
            // and
            // stream it to vr?
            var scene = new THREE.Scene();



            // since our cube camera is somewhat a fixed thing
            // would it be easier to move mountains to come to us?
            // once we change code would chrome app be able to let VR know that a new view is available?
            var sceneg = new THREE.Group();
            sceneg.AttachTo(scene);


            // fly up?
            //sceneg.translateZ(-1024);
            // rotate the world, as the skybox then matches what we have on filesystem
            scene.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
            // yet for headtracking we shall rotate camera


            //sceneg.position.set(0, 0, -1024);
            //sceneg.position.set(0, -1024, 0);

            //scene.add(new THREE.AmbientLight(0x333333));
            //scene.add(new THREE.AmbientLight(0xffffff));
            //scene.add(new THREE.AmbientLight(0xaaaaaa));
            //scene.add(new THREE.AmbientLight(0xcccccc));
            //scene.add(new THREE.AmbientLight(0xeeeeee));
            scene.add(new THREE.AmbientLight(0xffffff));




            //var light = new THREE.DirectionalLight(0xffffff, 1);
            //// sun should be beyond moon
            ////light.position.set(-5 * virtualDistance, -3 * virtualDistance, -5 * virtualDistance);
            ////light.position.set(-15 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);

            //// where shall the light source be to see half planet?
            //light.position.set(-1 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);
            //scene.add(light);



            //var lightX = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightX" }.AttachToDocument();
            //var lightY = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightY" }.AttachToDocument();
            //var lightZ = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightZ" }.AttachToDocument();

            //new IHTMLHorizontalRule { }.AttachToDocument();

            // whats WebGLRenderTargetCube do?

            // WebGLRenderer preserveDrawingBuffer 



            var renderer0 = new THREE.WebGLRenderer(

                new
                {
                    //antialias = true,
                    //alpha = true,
                    preserveDrawingBuffer = true
                }
            );

            // https://github.com/mrdoob/three.js/issues/3836

            // the construct. white bg
            //renderer0.setClearColor(0xfffff, 1);
            renderer0.setClearColor(0x0, 1);

            //renderer.setSize(window.Width, window.Height);
            renderer0.setSize(cubefacesize, cubefacesize);

            //renderer0.domElement.AttachToDocument();
            //rendererPX.domElement.style.SetLocation(0, 0);
            //renderer0.domElement.style.SetLocation(4, 4);


            // top

            // http://stackoverflow.com/questions/27612524/can-multiple-webglrenderers-render-the-same-scene


            // need a place to show the cubemap face to GUI 
            // how does the stereo OTOY do it?
            // https://www.opengl.org/wiki/Sampler_(GLSL)

            // http://www.richardssoftware.net/Home/Post/25

            // [+X, –X, +Y, –Y, +Z, –Z] fa



            // move up
            //camera.position.set(-1200, 800, 1200);
            //var cameraoffset = new THREE.Vector3(0, 15, 0);

            // can we aniamte it?
            //var cameraoffset = new THREE.Vector3(0, 800, 1200);
            // can we have linear animation fromcenter of the map to the edge and back?
            // then do the flat earth sun orbit?
            var cameraoffset = new THREE.Vector3(
                // left?
                -512,
                // height?
                //0,
                //1600,
                //1024,

                // if the camera is in the center, would we need to move the scene?
                // we have to move the camera. as we move the scene the lights are messed up
                //2014,
                1024,

                //1200
                0
                // can we hover top of the map?
                );

            // original vieworigin
            //var cameraoffset = new THREE.Vector3(-1200, 800, 1200);













            var bottomRotate100 = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -314, max = 314, valueAsNumber = 0, title = "bottomRotate" }.AttachToDocument();


            var maxfps = 60;
            //var maxlengthseconds = 60;
            var maxlengthseconds = 120;

            var maxframes = maxlengthseconds * maxfps;

            // whatif we want more than 30sec video? 2min animation? more frames to render? 2gb disk?
            var frameIDslider = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0, max = maxframes, valueAsNumber = 137, title = "frameIDslider" }.AttachToDocument();
            frameIDslider.onchange += delegate { frameIDslider.title = "frameIDslider " + frameIDslider.valueAsNumber; };




            //var vs0 = new ChromeShaderToyRelentlessBySrtuss.Shaders.ProgramFragmentShader();
            //var vs0 = new TraceConeWithCRTByKlk.Shaders.ProgramFragmentShader();








            // left
            IHTMLCanvas shader0canvasPZ = null;

            // locCameraTargetOffset to look left?
            #region shader0canvasPZ
            new { }.With(
              async delegate
              {
                  //return;

                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new Xor3DAlienLandByXor.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();
                  // now we have an empty shell
                  // which tostrings to the glsl code for gpu
                  // and if we were to initialize 



                  // enable intellisense
                  //var vs0i = (RoomScanningEffectByRosme.Shaders.__ProgramFragmentUniforms)(object)vs0;


                  // script: error JSC1000: No implementation found for this native method, please implement [static ScriptCoreLib.GLSL.Shader.vec3(System.Single, System.Single, System.Single)]

                  //     b.__this._vs0i_5__2.uCameraTargetOffset = new ctor$aQ8ABjj5gzW_aEh4Cmq2oMg(1, 0, 0);

                  // 270ms ReferenceError: ctor$aQ8ABjj5gzW_aEh4Cmq2oMg is not defined

                  // wishful thinking eh
                  //vec3 uCameraTargetOffset = vec3(0.0f, 0.0f, -1.0f);
                  //vs0i.uCameraTargetOffset = new ScriptCoreLib.GLSL.vec3(1.0f, 0.0f, 0.0f);
                  // this would mean the program was selected and uniform was uploaded to gpu




                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader0canvasPZ = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  //c0.style.SetLocation(720, 8);
                  c0.style.SetLocation(800, 360);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);

                  var sw0 = Stopwatch.StartNew();

                  pass0.ProgramSelected += mProgram =>
                  {
                      // ldflda?
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1.0f, 0, 0.0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.0f, 0, 1.0f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.0f, 0, -1.0f);

                      // left?
                      //forward=normalize(float3(0.0 , 0.0 ,1.0));
                  };


                  Native.window.onframe += delegate
                  {
                      // let render man know..
                      if (vsyncReady())
                          return;

                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };



              }
          );
            #endregion





            // front
            IHTMLCanvas shader1canvasPX = null;

            #region shader1canvasPX
            new { }.With(
              async delegate
              {
                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();

                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader1canvasPX = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  c0.style.SetLocation(720, 8);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);


                  pass0.ProgramSelected += mProgram =>
                  {
                      // off by 45deg__

                      // ldflda?
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.0f, 0, -1.0f);

                      // fixup
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 1.0f, 0, -1.0f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 1.0f, 0, 0.0f);


                      // front
                      //forward=normalize(float3(1.0 , 0.0 ,0.0));
                  };

                  var sw0 = Stopwatch.StartNew();

                  Native.window.onframe += delegate
                  {
                      // let render man know..
                      // let render man know..
                      if (vsyncReady())
                          return;

                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };

              }
          );
            #endregion



            // back
            IHTMLCanvas shader1canvasNX = null;

            #region shader1canvasNX
            new { }.With(
              async delegate
              {
                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();

                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader1canvasNX = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  c0.style.SetLocation(720, 8);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);


                  pass0.ProgramSelected += mProgram =>
                  {
                      // ldflda?
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, 0, 1.0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1.0f, 0, 1.0f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1.0f, 0, 0.0f);

                      // back
                      //forward=normalize(float3(-1.0 , 0.0 ,0.0));

                  };

                  var sw0 = Stopwatch.StartNew();

                  Native.window.onframe += delegate
                  {
                      // let render man know..
                      // let render man know..
                      if (vsyncReady())
                          return;

                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };

              }
          );
            #endregion






            // right
            IHTMLCanvas shader2canvasNZ = null;

            // locCameraTargetOffset to look right?
            #region shader2canvasNZ
            new { }.With(
              async delegate
              {
                  //return;

                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new Xor3DAlienLandByXor.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();
                  // now we have an empty shell
                  // which tostrings to the glsl code for gpu
                  // and if we were to initialize 



                  // enable intellisense
                  //var vs0i = (RoomScanningEffectByRosme.Shaders.__ProgramFragmentUniforms)(object)vs0;


                  // script: error JSC1000: No implementation found for this native method, please implement [static ScriptCoreLib.GLSL.Shader.vec3(System.Single, System.Single, System.Single)]

                  //     b.__this._vs0i_5__2.uCameraTargetOffset = new ctor$aQ8ABjj5gzW_aEh4Cmq2oMg(1, 0, 0);

                  // 270ms ReferenceError: ctor$aQ8ABjj5gzW_aEh4Cmq2oMg is not defined

                  // wishful thinking eh
                  //vec3 uCameraTargetOffset = vec3(0.0f, 0.0f, -1.0f);
                  //vs0i.uCameraTargetOffset = new ScriptCoreLib.GLSL.vec3(1.0f, 0.0f, 0.0f);
                  // this would mean the program was selected and uniform was uploaded to gpu




                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader2canvasNZ = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  //c0.style.SetLocation(720, 8);
                  c0.style.SetLocation(800, 360);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);

                  var sw0 = Stopwatch.StartNew();

                  pass0.ProgramSelected += mProgram =>
                  {
                      // ldflda?
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 1.0f, 0, 1.0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.0f, 0, -1.0f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.0f, 0, 1.0f);

                      // right
                      //forward=normalize(float3(0.0 , 0.0 ,-1.0));

                  };

                  Native.window.onframe += delegate
                  {
                      // let render man know..
                      // let render man know..
                      if (vsyncReady())
                          return;
                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };

              }
          );
            #endregion












            // bottom
            IHTMLCanvas shader2canvasNY = null;

            // locCameraTargetOffset to look bottom?
            #region shader2canvasNY
            new { }.With(
              async delegate
              {
                  //return;

                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new Xor3DAlienLandByXor.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();
                  // now we have an empty shell
                  // which tostrings to the glsl code for gpu
                  // and if we were to initialize 



                  // enable intellisense
                  //var vs0i = (RoomScanningEffectByRosme.Shaders.__ProgramFragmentUniforms)(object)vs0;


                  // script: error JSC1000: No implementation found for this native method, please implement [static ScriptCoreLib.GLSL.Shader.vec3(System.Single, System.Single, System.Single)]

                  //     b.__this._vs0i_5__2.uCameraTargetOffset = new ctor$aQ8ABjj5gzW_aEh4Cmq2oMg(1, 0, 0);

                  // 270ms ReferenceError: ctor$aQ8ABjj5gzW_aEh4Cmq2oMg is not defined

                  // wishful thinking eh
                  //vec3 uCameraTargetOffset = vec3(0.0f, 0.0f, -1.0f);
                  //vs0i.uCameraTargetOffset = new ScriptCoreLib.GLSL.vec3(1.0f, 0.0f, 0.0f);
                  // this would mean the program was selected and uniform was uploaded to gpu




                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader2canvasNY = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  //c0.style.SetLocation(720, 8);
                  c0.style.SetLocation(800, 360);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);

                  var sw0 = Stopwatch.StartNew();

                  pass0.ProgramSelected += mProgram =>
                  {
                      // ldflda?

                      // 45deg off??


                      // front
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, 0, -1.0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, -.0001f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, .1f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, 0f);

                      // left
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1.0f, 0, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -.0001f, -1, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1f, -1, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, 0);

                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.01f, -1, 0.01f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.001f, -1, 0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, -0.0001f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, 1, -0.0001f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, .0001f, -1, 0);

                  };

                  Native.window.onframe += delegate
                  {
                      // let render man know..
                      // let render man know..
                      if (vsyncReady())
                          return;

                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };

              }
          );
            #endregion











            // top
            IHTMLCanvas shader2canvasPY = null;

            // locCameraTargetOffset to look right?
            #region shader2canvasPY
            new { }.With(
              async delegate
              {
                  //return;

                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new Xor3DAlienLandByXor.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();
                  // now we have an empty shell
                  // which tostrings to the glsl code for gpu
                  // and if we were to initialize 



                  // enable intellisense
                  //var vs0i = (RoomScanningEffectByRosme.Shaders.__ProgramFragmentUniforms)(object)vs0;


                  // script: error JSC1000: No implementation found for this native method, please implement [static ScriptCoreLib.GLSL.Shader.vec3(System.Single, System.Single, System.Single)]

                  //     b.__this._vs0i_5__2.uCameraTargetOffset = new ctor$aQ8ABjj5gzW_aEh4Cmq2oMg(1, 0, 0);

                  // 270ms ReferenceError: ctor$aQ8ABjj5gzW_aEh4Cmq2oMg is not defined

                  // wishful thinking eh
                  //vec3 uCameraTargetOffset = vec3(0.0f, 0.0f, -1.0f);
                  //vs0i.uCameraTargetOffset = new ScriptCoreLib.GLSL.vec3(1.0f, 0.0f, 0.0f);
                  // this would mean the program was selected and uniform was uploaded to gpu




                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader2canvasPY = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  //c0.style.SetLocation(720, 8);
                  c0.style.SetLocation(800, 360);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);

                  var sw0 = Stopwatch.StartNew();

                  pass0.ProgramSelected += mProgram =>
                  {
                      // ldflda?

                      // 45deg off??


                      // front
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, 0, -1.0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, -.0001f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, .1f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, 0f);

                      // left
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1.0f, 0, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -.0001f, -1, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1f, -1, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, 0);

                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.01f, -1, 0.01f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.001f, 1, 0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, 1, -0.0001f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, -0.0001f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, 0.0001f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, .0001f, -1, 0);

                  };

                  Native.window.onframe += delegate
                  {
                      //d = a[0].CS___8__locals1.vsync != null;
                      //e = a[0].CS___8__locals1.vsync.kAcABp_b1ITCbIktNs3el5Q().dgQABqwxMjO1zVAJb5WXKA();


                      // let render man know..
                      // let render man know..
                      if (vsyncReady())
                          return;

                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };

              }
          );
            #endregion






            new IHTMLHorizontalRule { }.AttachToDocument();

            var camerax = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "camerax" }.AttachToDocument();
            // up. whats the most high a rocket can go 120km?
            new IHTMLHorizontalRule { }.AttachToDocument();


            // how high is the bunker?
            var cameray = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 2048 * 4, valueAsNumber = 0, title = "cameray" }.AttachToDocument();
            new IHTMLBreak { }.AttachToDocument();
            var camerayHigh = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = cameray.max, max = 1024 * 256, valueAsNumber = cameray.max, title = "cameray" }.AttachToDocument();
            new IHTMLHorizontalRule { }.AttachToDocument();
            var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();

            // for render server
            var fcamerax = 0.0;
            var fcameray = 0.0;
            var fcameraz = 0.0;

            //while (await camerax.async.onchange)

            //cameray.onchange += delegate
            //{
            //    if (cameray.valueAsNumber < cameray.max)
            //        camerayHigh.valueAsNumber = camerayHigh.min;
            //};

            camerayHigh.onmousedown += delegate
            {
                //if (camerayHigh.valueAsNumber > camerayHigh.min)
                cameray.valueAsNumber = cameray.max;
            };


            Action applycameraoffset = delegate
            {
                // make sure UI and gpu sync up

                var cy = cameray;

                if (cameray.valueAsNumber < cameray.max)
                    camerayHigh.valueAsNumber = camerayHigh.min;

                if (camerayHigh.valueAsNumber > camerayHigh.min)
                    cameray.valueAsNumber = cameray.max;

                if (cameray.valueAsNumber == cameray.max)
                    cy = camerayHigh;



                cameraoffset = new THREE.Vector3(
                  // left?
                  1.0 * (camerax + fcamerax),
                   // height?
                   //0,
                   //1600,
                   //1024,

                   // if the camera is in the center, would we need to move the scene?
                   // we have to move the camera. as we move the scene the lights are messed up
                   //2014,
                   1.0 * (cy + fcameray),

                 //1200
                 1.0 * (cameraz + fcameraz)
                   // can we hover top of the map?
                   );
            };


            #region y
            // need to rotate90?
            var cameraNY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNY.lookAt(new THREE.Vector3(0, -1, 0));
                cameraNY.position.add(cameraoffset);
            };

            //cameraNY.lookAt(new THREE.Vector3(0, 1, 0));
            var canvasNY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasNY.canvas.title = "NY";
            canvasNY.canvas.AttachToDocument();
            canvasNY.canvas.style.transformOrigin = "0 0";
            // roslyn!
            canvasNY.canvas.style.transform = $"scale({uizoom})";

            var cameraPY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPY.lookAt(new THREE.Vector3(0, 1, 0));
                cameraPY.position.add(cameraoffset);
            };
            //cameraPY.lookAt(new THREE.Vector3(0, -1, 0));
            var canvasPY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 0);
            canvasPY.canvas.title = "PY";
            canvasPY.canvas.AttachToDocument();
            canvasPY.canvas.style.transformOrigin = "0 0";
            canvasPY.canvas.style.transform = $"scale({uizoom})";
            #endregion

            // transpose xz?

            #region x
            var cameraNX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNX.lookAt(new THREE.Vector3(0, 0, 1));
                cameraNX.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(0, 0, -1));
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNX.lookAt(new THREE.Vector3(1, 0, 0));
            var canvasNX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 2, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNX.canvas.title = "NX";
            canvasNX.canvas.AttachToDocument();
            canvasNX.canvas.style.transformOrigin = "0 0";
            canvasNX.canvas.style.transform = $"scale({uizoom})";

            var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPX.lookAt(new THREE.Vector3(0, 0, -1));
                cameraPX.position.add(cameraoffset);
            };
            //cameraPX.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPX.lookAt(new THREE.Vector3(1, 0, 0));
            //cameraPX.lookAt(new THREE.Vector3(-1, 0, 0));
            var canvasPX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPX.canvas.title = "PX";
            canvasPX.canvas.AttachToDocument();
            canvasPX.canvas.style.transformOrigin = "0 0";
            canvasPX.canvas.style.transform = $"scale({uizoom})";
            #endregion



            #region z
            var cameraNZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, -1));
            applycameraoffset += delegate
            {
                cameraNZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNZ.lookAt(new THREE.Vector3(1, 0, 0));
                cameraNZ.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, 1));
            var canvasNZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 3, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNZ.canvas.title = "NZ";
            canvasNZ.canvas.AttachToDocument();
            canvasNZ.canvas.style.transformOrigin = "0 0";
            canvasNZ.canvas.style.transform = $"scale({uizoom})";

            var cameraPZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraPZ.lookAt(new THREE.Vector3(1, 0, 0));
            applycameraoffset += delegate
            {
                cameraPZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPZ.lookAt(new THREE.Vector3(-1, 0, 0));
                cameraPZ.position.add(cameraoffset);
            };
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, -1));
            var canvasPZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPZ.canvas.title = "PZ";
            canvasPZ.canvas.AttachToDocument();
            canvasPZ.canvas.style.transformOrigin = "0 0";
            canvasPZ.canvas.style.transform = $"scale({uizoom})";
            #endregion




            // c++ alias locals would be nice..
            var canvas0 = (IHTMLCanvas)renderer0.domElement;


            var old = new
            {



                CursorX = 0,
                CursorY = 0
            };


            var st = new Stopwatch();
            st.Start();

            //canvas0.css.active.style.cursor = IStyle.CursorEnum.move;




            // X:\jsc.svn\examples\javascript\Test\TestMouseMovement\TestMouseMovement\Application.cs


            // THREE.WebGLProgram: gl.getProgramInfoLog() C:\fakepath(78,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll
            // THREE.WebGLProgram: gl.getProgramInfoLog() (79,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll

            // http://www.roadtovr.com/youtube-confirms-stereo-3d-360-video-support-coming-soon/
            // https://www.youtube.com/watch?v=D-Wl9jAB45Q



            #region spherical
            var gl = new WebGLRenderingContext(alpha: true, preserveDrawingBuffer: true);
            var c = gl.canvas.AttachToDocument();

            //  3840x2160

            //c.style.SetSize(3840, 2160);

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150722/360-youtube


            c.width = 3840;
            c.height = 2160;


            //c.width = 3840 * 2;
            //c.height = 2160 * 2;


            //c.width = 3840;
            //c.height = 2160;
            // 1,777777777777778

            // https://www.youtube.com/watch?v=fTfJwzRsE-w
            //c.width = 7580;
            //c.height = 3840;
            //1,973958333333333

            //7580
            //    3840

            // wont work
            //c.width = 8192;
            //c.height = 4096;


            // this has the wrong aspect?
            //c.width = 6466;
            //c.height = 3232;

            new IHTMLPre { new { c.width, c.height } }.AttachToDocument();

            //6466x3232

            //var suizoom = 720f / c.height;
            //var suizoom = 360f / c.height;
            var suizoom = 480f / c.width;

            c.style.transformOrigin = "0 0";
            c.style.transform = $"scale({suizoom})";
            //c.style.backgroundColor = "yellow";
            c.style.position = IStyle.PositionEnum.absolute;

            c.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 3);

            var pass = new CubeToEquirectangular.Library.ShaderToy.EffectPass(
                       null,
                       gl,
                       precission: CubeToEquirectangular.Library.ShaderToy.DetermineShaderPrecission(gl),
                       supportDerivatives: gl.getExtension("OES_standard_derivatives") != null,
                       callback: null,
                       obj: null,
                       forceMuted: false,
                       forcePaused: false,
                       //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                       outputGainNode: null
                   );

            // how shall we upload our textures?
            // can we reference GLSL.samplerCube yet?
            //pass.mInputs[0] = new samplerCube { };
            pass.mInputs[0] = new CubeToEquirectangular.Library.ShaderToy.samplerCube { };

            pass.MakeHeader_Image();
            var vs = new Shaders.ProgramFragmentShader();
            pass.NewShader_Image(vs);

            #endregion




            //var frame0 = new HTML.Images.FromAssets.tiles_regrid().AttachToDocument();
            var frame0 = new HTML.Images.FromAssets.galaxy_starfield().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets.galaxy_starfield150FOV().AttachToDocument();
            //var xor = new HTML.Images.FromAssets.Orion360_test_image_8192x4096().AttachToDocument();
            //var xor = new HTML.Images.FromAssets._2_no_clouds_4k().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().AttachToDocument();


            // 270px
            //xor.style.height = "";
            frame0.style.height = "270px";
            frame0.style.width = "480px";
            frame0.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 + 16, 8 + (int)(uizoom * cubefacesize + 8) * 3);




            #region DirectoryEntry
            var dir = default(DirectoryEntry);
            int files2count = 0;

            new IHTMLButton { "openDirectory" }.AttachToDocument().onclick += async delegate
            {
                dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });

                var dir2r = dir.createReader();

                var files2 = await dir2r.readFileEntries();

                files2count = files2.Count();

                if (files2count > 0)
                {
                    new IHTMLPre { new { files2count } }.AttachToDocument();

                }
            };
            frame0.style.cursor = IStyle.CursorEnum.pointer;
            frame0.title = "save frame";


            frame0.onclick += delegate
            {
                // http://paulbourke.net/papers/vsmm2006/vsmm2006.pdf
                //            A method of creating synthetic stereoscopic panoramic images that can be implemented
                //in most rendering packages has been presented. If single panoramic pairs can be created
                //then stereoscopic panoramic movies are equally possible giving rise to the prospect of
                //movies where the viewer can interact with, at least with regard to what they choose to look
                //at.These images can be projected so as to engage the two features of the human visual
                //system that assist is giving us a sense of immersion, the feeling of “being there”. That is,
                //imagery that contains parallax information as captured from two horizontally separated eye
                //positions (stereopsis)and imagery that fills our peripheral vision.The details that define
                //how the two panoramic images should be created in rendering packages are provided, in
                //particular, how to precisely configure the virtual cameras and control the distance to zero
                //parallax.

                // grab a frame

                if (dir == null)
                {
                    // not exporting to file system?
                    var f0 = new IHTMLImage { src = gl.canvas.toDataURL() };

                    //var f0 = (IHTMLImage)gl.canvas;
                    //var f0 = (IHTMLImage)gl.canvas;
                    //var base64 = gl.canvas.toDataURL();


                    //frame0.src = base64;
                    frame0.src = f0.src;

                    // 7MB!

                    return;
                }

                //                // ---------------------------
                //IrfanView
                //---------------------------
                //Warning !
                //The file: "X:\vr\tape1\0001.jpg" is a PNG file with incorrect extension !
                //Rename ?
                //---------------------------
                //Yes   No   
                //---------------------------

                // haha this will render the thumbnail.
                //dir.WriteAllBytes("0000.png", frame0);

                //dir.WriteAllBytes("0000.png", gl.canvas);

                var glsw = Stopwatch.StartNew();
                dir.WriteAllBytes("0000.png", gl);

                new IHTMLPre { new { glsw.ElapsedMilliseconds } }.AttachToDocument();

                // {{ ElapsedMilliseconds = 1548 }}

                // 3.7MB
                // 3840x2160

            };

            #endregion


            #region render 60hz 30sec
            new IHTMLButton {
                $"render {maxfps}hz {maxlengthseconds}sec"
            }.AttachToDocument().onclick += async e =>
            {
                e.Element.disabled = true;


                var total = Stopwatch.StartNew();
                var status = "rendering... " + new { dir };

                new IHTMLPre { () => status }.AttachToDocument();

                if (dir == null)
                {
                    //dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
                }

                total.Restart();



                vsync = new TaskCompletionSource<object>();
                await vsync.Task;

                status = "rendering... vsync";

                //var frameid = 0;


                //frameIDslider.valueAsNumber = -1;
                frameIDslider.valueAsNumber = files2count - 1;

                goto beforeframe;


                // parallax offset?

                await_nextframe:


                var filename = frameIDslider.valueAsNumber.ToString().PadLeft(5, '0') + ".png";
                status = "rendering... " + new { filename };


                vsync = new TaskCompletionSource<object>();
                await vsync.Task;

                // frame0 has been rendered

                var swcapture = Stopwatch.StartNew();
                status = "WriteAllBytes... " + new { filename };
                //await Native.window.async.onframe;

                // https://code.google.com/p/chromium/issues/detail?id=404301
                if (dir != null)
                    await dir.WriteAllBytes(filename, gl);
                //await dir.WriteAllBytes(filename, gl.canvas);

                status = "WriteAllBytes... done " + new { fcamerax, filename, swcapture.ElapsedMilliseconds };
                status = "rdy " + new { filename, fcamerax };
                //await Native.window.async.onframe;





                // design mode v render mode
                if (cubefacesize < cubefacesizeMAX)
                    frameIDslider.valueAsNumber += 15;
                else
                    frameIDslider.valueAsNumber++;




                beforeframe:

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);
                // fcamerax = radius * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);

                // some shaders need to know where the camera is looking from. can we tell them?

                //fcamerax = 2.2 * Math.Sin(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));
                //fcameraz = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));


                //// up
                //fcameray = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // cameraz.valueAsNumber = (int)(cameraz.max * Math.Sin(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f)));


                // up
                //fcameray = 128 * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                //fcamerax += (1.0 / 60.0);

                //fcamerax += (1.0 / 60.0) * 120;



                // 60hz 30sec
                if (frameIDslider.valueAsNumber < maxframes)
                {
                    // Blob GC? either this helms or the that we made a Blob static. 
                    //await Task.Delay(11);
                    await Task.Delay(33);
                    // gc at 260 happened twice?
                    goto await_nextframe;
                }

                total.Stop();
                status = "all done " + new { frameid = frameIDslider.valueAsNumber, total.ElapsedMilliseconds };
                vsync = default(TaskCompletionSource<object>);
                // http://stackoverflow.com/questions/22899333/delete-javascript-blobs

                e.Element.disabled = false;
            };
            #endregion


            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLColladaExperiment\WebGLColladaExperiment\WebGLColladaExperiment.csproj"






            // asus will hang
            // https://3dwarehouse.sketchup.com/model.html?id=fb7a0448d940e575edc01389f336fb0a
            // can we get one frame into vr?

            // cube: mesh to cast shadows



            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000 })

            //    );
            //    floor2.position.set(0, 0, -cubefacesize / 2);
            //    floor2.AttachTo(scene);
            //}
            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0x0000ff, color = 0x0000ff })

            //    );
            //    floor2.position.set(-cubefacesize / 2, 0, 0);
            //    floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);

            //    floor2.AttachTo(scene);
            //}

            // front?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader1canvasPX) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0x00ff00,
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, 0, -cubefacesize  * 0.55);
                floor2.position.set(-cubefacesize * 0.5, 0, 0);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                floor2.AttachTo(scene);
            }



            // left?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader0canvasPZ) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0xff0000,

                            // can we color mark it?
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, -cubefacesize * 0.5, 0);
                floor2.position.set(0, 0, cubefacesize * 0.5);
                //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI);

                floor2.AttachTo(scene);
            }





            // right?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader2canvasNZ) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0x00ff00,

                            // can we color mark it?
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, -cubefacesize * 0.5, 0);
                floor2.position.set(0, 0, -cubefacesize * 0.5);
                //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
                //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI);

                floor2.AttachTo(scene);
            }


            // back?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader1canvasNX) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0x00ff00,
                            //color = 0x00ff00
                        })

                );
                floor2.position.set(cubefacesize * 0.5, 0, 0);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);


                floor2.AttachTo(scene);
            }









            // bottom?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader2canvasNY) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0x00ff00,

                            // can we color mark it?
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, -cubefacesize * 0.5, 0);
                //floor2.position.set(cubefacesize * 0.5, 0, 0);
                //floor2.position.set(-cubefacesize * 0.5, 0, 0);
                floor2.position.set(0, -cubefacesize * 0.5, 0);


                //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI / 2);

                applycameraoffset += delegate
                {
                    floor2.rotation.set(0, 0, 0);

                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), -Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                    floor2.rotateOnAxis(new THREE.Vector3(1, 0, 0), -Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI );
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI);
                    floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), -Math.PI + bottomRotate100 * 0.01f);

                };

                floor2.AttachTo(scene);
            }



            // top?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader2canvasPY) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0x00ff00,

                            // can we color mark it?
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, -cubefacesize * 0.5, 0);
                //floor2.position.set(cubefacesize * 0.5, 0, 0);
                //floor2.position.set(-cubefacesize * 0.5, 0, 0);
                floor2.position.set(0, cubefacesize * 0.5, 0);


                //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI / 2);

                applycameraoffset += delegate
                {
                    floor2.rotation.set(0, 0, 0);

                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), -Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                    floor2.rotateOnAxis(new THREE.Vector3(1, 0, 0), Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI );
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI);
                    floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), bottomRotate100 * 0.01f);

                };

                floor2.AttachTo(scene);
            }




            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeEarth\ChromeEarth\Application.cs
            // X:\jsc.svn\examples\javascript\canvas\ConvertBlackToAlpha\ConvertBlackToAlpha\Application.cs
            // hidden for alpha AppWindows
            //#if FBACKGROUND

            #region galaxy_starfield
            new THREE.Texture().With(
                async s =>
                {
                    var i = new HTML.Images.FromAssets.galaxy_starfield();
                    //var i = new HTML.Images.FromAssets.galaxy_starfield150FOV();

                    var bytes = await i.async.bytes;

                    //for (int ii = 0; ii < bytes.Length; ii += 4)
                    //{

                    //    bytes[ii + 3] = (byte)(bytes[ii + 0]);

                    //    bytes[ii + 0] = 0xff;
                    //    bytes[ii + 1] = 0xff;
                    //    bytes[ii + 2] = 0xff;
                    //}

                    var cc = new CanvasRenderingContext2D(i.width, i.height);

                    cc.bytes = bytes;

                    s.image = cc;
                    s.needsUpdate = true;

                    var stars_material = new THREE.MeshBasicMaterial(
                            new
                            {
                                //map = THREE.ImageUtils.loadTexture(new galaxy_starfield().src),
                                map = s,
                                side = THREE.BackSide,
                                transparent = true
                            });


                    var stars = new THREE.Mesh(
                            new THREE.SphereGeometry(far * 0.9, 64, 64),
                           stars_material
                        );

                    // http://stackoverflow.com/questions/8502150/three-js-how-can-i-dynamically-change-objects-opacity
                    //(stars_material as dynamic).opacity = 0.5;


                    scene.add(stars);
                }
           );
            #endregion




            new { }.With(
                   delegate
                   {



                       //dae.position.y = -80;

                       //dae.AttachTo(sceneg);
                       //scene.add(dae);
                       //oo.Add(dae);


                       //var rdysw = Stopwatch.StartNew();

                       //Console.WriteLine()

                       // view-source:http://threejs.org/examples/webgl_multiple_canvases_circle.html
                       // https://threejsdoc.appspot.com/doc/three.js/src.source/extras/cameras/CubeCamera.js.html
                       Native.window.onframe +=
                           e =>
                           {



                               // let render man know..
                               // let render man know..
                               if (vsyncReady())
                                   return;


                               //if (pause) return;
                               //if (pause.@checked)
                               //    return;


                               // can we float out of frame?
                               // haha. a bit too flickery.
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.01) * 50.0;
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.001) * 190.0;
                               //globesphere.position.y = Math.Sin(fcamerax * 0.001) * 90.0;
                               //clouds.position.y = Math.Cos(fcamerax * 0.001) * 90.0;

                               //sphere.rotation.y += speed;
                               //clouds.rotation.y += speed;

                               // manual rebuild?
                               // red compiler notifies laptop chrome of pending update
                               // app reloads

                               applycameraoffset();
                               renderer0.clear();
                               //rendererPY.clear();

                               //cameraPX.aspect = canvasPX.aspect;
                               //cameraPX.updateProjectionMatrix();

                               // um what does this do?
                               //cameraPX.position.z += (z - cameraPX.position.z) * e.delay.ElapsedMilliseconds / 200.0;
                               // mousewheel allos the camera to move closer
                               // once we see the frame in vr, can we udp sync vr tracking back to laptop?


                               //this.targetPX.x += 1;
                               //this.targetNX.x -= 1;

                               //this.targetPY.y += 1;
                               //this.targetNY.y -= 1;

                               //this.targetPZ.z += 1;
                               //this.targetNZ.z -= 1;

                               // how does the 360 or shadertoy want our cubemaps?


                               // and then rotate right?

                               // how can we render cubemap?



                               #region x
                               // upside down?
                               // are we ready?
                               renderer0.render(scene, cameraPX);
                               canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                               renderer0.render(scene, cameraNX);
                               canvasNX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               #endregion

                               #region z
                               renderer0.render(scene, cameraPZ);
                               canvasPZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                               renderer0.render(scene, cameraNZ);
                               canvasNZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               #endregion



                               #region y
                               renderer0.render(scene, cameraPY);

                               //canvasPY.save();
                               //canvasPY.translate(0, size);
                               //canvasPY.rotate((float)(-Math.PI / 2));
                               canvasPY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               //canvasPY.restore();


                               renderer0.render(scene, cameraNY);
                               //canvasNY.save();
                               //canvasNY.translate(size, 0);
                               //canvasNY.rotate((float)(Math.PI / 2));
                               canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               //canvasNY.restore();
                               // ?
                               #endregion


                               //renderer0.render(scene, cameraPX);


                               //rendererPY.render(scene, cameraPY);

                               // at this point we should be able to render the sphere texture

                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;


                               //var cube0 = new IHTMLImage[] {
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),

                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),


                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
                               //};

                               new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                               }.WithEachIndex(
                                   (img, index) =>
                                   {
                                       gl.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);

                                       //gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
                                       gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);

                                       // http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true

                                       // https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);

                                       gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);

                                   }
                                );

                               // could do dynamic resolution- fog of war or fog of FOV. where up to 150deg field of vision is encouragedm, not 360
                               pass.Paint_Image(
                                     0,

                                     0,
                                     0,
                                     0,
                                     0
                                //,

                                // gl_FragCoord
                                // cannot be scaled, and can be referenced directly.
                                // need another way to scale
                                //zoom: 0.3f
                                );

                               //paintsw.Stop();


                               // what does it do?
                               gl.flush();

                               // let render man know..
                               if (vsync != null)
                                   if (!vsync.Task.IsCompleted)
                                       vsync.SetResult(null);
                           };


                   }
               );





            Console.WriteLine("do you see it?");
        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // x:\jsc.svn\examples\javascript\chrome\apps\chromeudpsendasync\chromeudpsendasync\application.cs
            // reload on idle?
            // edit and continue over udp?

            // https://sites.google.com/a/jsc-solutions.net/backlog/knowledge-base/2015/201503/20150306/udp

            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    chrome.runtime.UpdateAvailable += delegate
                    {
                        new chrome.Notification(title: "UpdateAvailable");

                    };

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "ChromeUDPSendAsync");

                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: null
                        );

                        //xappwindow.setAlwaysOnTop
                        xappwindow.setAlwaysOnTop(true);

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion

            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeNetworkInterfaces\ChromeNetworkInterfaces\Application.cs

            //{{ Length = 4 }}
            //{{ prefixLength = 64, name = {D7020941-742E-4570-93B2-C0372D3D870F}, address = fe80::88c0:f0a:9ccf:cba0 }}
            //{{ prefixLength = 24, name = {D7020941-742E-4570-93B2-C0372D3D870F}, address = 192.168.43.28 }}
            //{{ prefixLength = 64, name = {A8657A4E-8BFA-41CC-87BE-6847E33E1A81}, address = 2001:0:9d38:6abd:20a6:2815:3f57:d4e3 }}
            //{{ prefixLength = 64, name = {A8657A4E-8BFA-41CC-87BE-6847E33E1A81}, address = fe80::20a6:2815:3f57:d4e3 }}


            var c = new CanvasRenderingContext2D(800, 400);

            c.canvas.style.border = "1px solid blue";
            c.canvas.AttachToDocument();
            c.canvas.style.SetLocation(0, 0);

            Native.document.body.style.marginTop = "400px";

            new IHTMLButton { "clear" }.AttachToDocument().onclick += delegate
            {
                c.clearRect(0, 0, 800, 400);
            };

            Action begin = delegate
            {
                c.beginPath();
                c.moveTo(0, 0);
            };

            // ok his app needs to run as a chrome app.
            new { }.With(
                async delegate
                {
                    (Native.document.body.style as dynamic).webkitUserSelect = "auto";
                    Native.document.documentElement.style.overflow = IStyle.OverflowEnum.auto;


                    new IHTMLButton { "update pending... update available. click to reload.." }.AttachToDocument().onclick += delegate
                    {
                        // can we get an udp signal from the compiler when the app is out of date, when the update is pending?
                        chrome.runtime.reload();
                    };

                    var n = await chrome.socket.getNetworkList();

                    // Z:\jsc.svn\examples\javascript\chrome\hybrid\HybridHopToUDPChromeApp\Application.cs
                    var n24 = n.Where(x => x.prefixLength == 24).ToArray();

                    n24.WithEach(
                        async nic =>
                        {
                            // wifi? lan?
                            var status = new IHTMLPre { new { nic.name, nic.address } }.AttachToDocument();
                            var buffer = new IHTMLPre { }.AttachToDocument();

                            // Z:\jsc.svn\examples\javascript\chrome\hybrid\HybridHopToUDPChromeApp\Application.cs
                            //var uu = new UdpClient(40014);
                            var uu = new UdpClient(40094);

                            //args.mouse = "awaiting vertexTransform at " + nic + " :40014";

                            // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                            // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs
                            //uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic);
                            uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"));
                            while (true)
                            {
                                var ux = await uu.ReceiveAsync(); // did we jump to ui thread?
                                                                  //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer));
                                                                  //args.vertexTransform = x.Buffer;


                                buffer.innerText = new { ux.Buffer.Length }.ToString();

                                // cam we get also some floats?

                                // https://www.khronos.org/registry/typedarray/specs/latest/

                                float[] f = new Float32Array(new Uint8ClampedArray(ux.Buffer).buffer);

                                // pen x, y, pressure
                                if (f.Length >= 3)
                                {
                                    for (int i = 0; i < 3; i++)
                                    {
                                        var fi = f[i];

                                        new IHTMLDiv { new { i, fi } }.AttachTo(buffer);

                                    }

                                    var x = f[0];
                                    var y = f[1];
                                    var pressure = f[2];


                                    begin();

                                    c.lineWidth = 1 + (pressure / 255.0 * 7);

                                    if (pressure > 0)
                                        c.strokeStyle = "blue";
                                    else
                                        c.strokeStyle = "rgba(0,0,255,0.25)";

                                    c.lineTo((int)(x * 0.1), 400 - (int)(y * 0.1));

                                    //c.lineTo(e.OffsetX, e.OffsetY);
                                    //c.lineTo(e.movementX, e.movementY);
                                    c.stroke();

                                    begin = delegate
                                    {
                                        c.beginPath();
                                        c.moveTo((int)(x * 0.1), 400 - (int)(y * 0.1));
                                    };

                                }
                                //new Float32Array()
                            }
                        }
                    );

                }
             );


        }
        public Application(IDefault page)
        {
            var MandelbrotAsync = new MandelbrotAsync();


            var context = new CanvasRenderingContext2D(MandelbrotProvider.DefaultWidth, MandelbrotProvider.DefaultHeight);
            var canvas = context.canvas.AttachToDocument();

            var fpsc = new Stopwatch();
            fpsc.Start();

            var fps = 0;
            Native.window.onframe +=
                delegate
                {
                    if (fpsc.ElapsedMilliseconds < 1000)
                    {
                        fps++;
                    }
                    else
                    {
                        Native.document.title = new { fps }.ToString();
                        fps = 0;
                        fpsc.Restart();
                    }

                    if (MandelbrotAsync.frame == null)
                        return;

                    context.bytes = MandelbrotAsync.frame;
                };



        }
Example #30
-1
        public __Bitmap(int width, int height)
        {
            this.Width = width;
            this.Height = height;

            this.InternalCanvas = new IHTMLCanvas
            {
                width = width,
                height = height
            };

            this.InternalContext = (CanvasRenderingContext2D)this.InternalCanvas.getContext("2d");
        }