Esempio n. 1
0
        private void MakeClothMesh(THREE.Texture t)
        {
            THREE.Texture clothTexture = t;

            clothTexture.wrapS = THREE.WrapType.MirroredRepeatWrapping;
            clothTexture.wrapT = THREE.WrapType.RepeatWrapping;

            clothTexture.repeat.y   = -1;
            clothTexture.anisotropy = 16;


            THREE.MeshPhongMaterial clothMaterial = new THREE.MeshPhongMaterial();

            clothMaterial.specular = new THREE.Color().setHex(0x030303);
            //clothMaterial.color = new THREE.Color(1, 0.4, 0);
            clothMaterial.map                = clothTexture;
            clothMaterial.side               = THREE.SideType.DoubleSide;
            clothMaterial.alphaTest          = 0.5;
            clothMaterial.wireframe          = false;
            clothMaterial.wireframeLinewidth = 2;

            // cloth mesh
            THREE.Mesh clothMesh = new THREE.Mesh(cloth.Geometry, clothMaterial);
            clothMesh.position.set(0, 0, 0);
            clothMesh.castShadow = true;


            THREE.ShaderMaterial shaderMat = null;;

            THREE.ShaderMaterialOptions o = new THREE.ShaderMaterialOptions()
            {
                texture = new THREE.Uniform()
                {
                    type = "t", value = clothTexture
                }
            };

            shaderMat = new THREE.ShaderMaterial();

            shaderMat.side = THREE.SideType.DoubleSide;

            string vertexShader   = Shaders.vertex;
            string fragmentShader = Shaders.fragment;

            shaderMat.vertexShader   = vertexShader;
            shaderMat.fragmentShader = fragmentShader;

            clothMesh.customDepthMaterial = shaderMat;
            scene.add(clothMesh);
        }
Esempio n. 2
0
        private void MakeGroundPlane(THREE.Texture t)
        {
            THREE.Texture groundTexture = t;
            groundTexture.wrapS = THREE.WrapType.RepeatWrapping;
            groundTexture.wrapT = THREE.WrapType.RepeatWrapping;
            groundTexture.repeat.set(25, 25);
            groundTexture.anisotropy = 16;

            var groundMaterial = new THREE.MeshPhongMaterial();

            groundMaterial.color    = new THREE.Color().setHex(0xffffff);
            groundMaterial.specular = new THREE.Color().setHex(0x111111);
            groundMaterial.map      = groundTexture;;

            THREE.Mesh planeMesh = new THREE.Mesh(new THREE.PlaneBufferGeometry(20000, 20000), groundMaterial);
            planeMesh.position.y    = -250;
            planeMesh.rotation.x    = -Math.PI / 2;
            planeMesh.receiveShadow = true;
            scene.add(planeMesh);
        }
        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150912/x360mountains




        //  ls sdcard/oculus/360photos/
        // "X:\vr\0000.png"
        // R:\util\android-sdk-windows\platform-tools\adb.exe push "X:\vr\0000.png" /sdcard/oculus/360photos/
        // 2649 KB/s (1085134 bytes in 0.400s)

        // "X:\vr\tr.png"
        // R:\util\android-sdk-windows\platform-tools\adb.exe push "X:\vr\tr.png" /sdcard/oculus/360photos/


        // R:\util\android-sdk-windows\platform-tools\adb.exe push  "X:\vr\code.png" /sdcard/oculus/360photos/
        // R:\util\android-sdk-windows\platform-tools\adb.exe push  "X:\vr\cone2.png" /sdcard/oculus/360photos/
        // "X:\vr\code.png"

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150906/roomscanningeffectbyrosme

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150816/iss

        // https://www.youtube.com/watch?v=UWiq-qgedws
        // https://www.youtube.com/watch?v=TwRSOEG-Gx4
        // http://youtu.be/Lo1IU8UAutE
        // 60hz 2160 4K!

        // The equirectangular projection was used in map creation since it was invented around 100 A.D. by Marinus of Tyre. 

        //        C:\Users\Arvo> "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hzsky.png" "/sdcard/oculus/360photos/"
        //1533 KB/s(3865902 bytes in 2.461s)

        //C:\Users\Arvo> "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape360globe1\0000.png" "/sdcard/oculus/360photos/tape360globe1.png"
        //1556 KB/s(2714294 bytes in 1.703s)

        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hz2048c3840x2160.png" "/sdcard/oculus/360photos/"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push   "X:\vr\tape360globe1\0000.png" "/sdcard/oculus/360photos/tape360globe2.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push   "X:\vr\tape360globe1\0000.png" "/sdcard/oculus/360photos/tape360globenight.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push   "R:\vr\tape360iss\0000.png" "/sdcard/oculus/360photos/tape360iss.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push   "R:\vr\tape360iss\0230.png" "/sdcard/oculus/360photos/tape360iss0230.png"

        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push    "X:\vr\sh1\0000.png" "/sdcard/oculus/360photos/sh1.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push     "R:\vr\tape360columns\0000.png" "/sdcard/oculus/360photos/tape360columns.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push    "X:\vr\edge.png" "/sdcard/oculus/360photos/tape360columns.png"
        // 4041 KB/s (3248448 bytes in 0.785s)

        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push      "X:\vr\terrain.png" "/sdcard/oculus/360photos/"

        // could we udp our 360 image from webgl to vr yet?

        // "R:\vr\tape360iss\0230.png"

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809/chrome360hz

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809

        // the eye nor the display will be able to do any stereo
        // until tech is near matrix capability. 2019?

        // cubemap can be used for all long range scenes
        // http://www.imdb.com/title/tt0112111/?ref_=nv_sr_1


        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150808/cubemapcamera
        // subst /D b:
        // subst b: s:\jsc.svn\examples\javascript\chrome\apps\WebGL\x360mountains\x360mountains\bin\Debug\staging\x360mountains.Application\web
        // subst a: z:\jsc.svn\examples\javascript\chrome\apps\WebGL\x360mountains\x360mountains\bin\Debug\staging\x360mountains.Application\web
        // Z:\jsc.svn\examples\javascript\chrome\apps\WebGL\x360mountains\x360mountains\bin\Debug\staging\x360mountains.Application\web
        // what if we want to do subst in another winstat or session?

        // ColladaLoader: Empty or non-existing file (assets/x360mountains/S6Edge.dae)

        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            //FormStyler.AtFormCreated =
            //s =>
            //{
            //    s.Context.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;

            //    //var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDrag().AttachTo(s.Context.GetHTMLTarget());
            //    var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDragWithShadow().AttachTo(s.Context.GetHTMLTarget());



            //    s.Context.GetHTMLTarget().style.backgroundColor = "#efefef";
            //    //s.Context.GetHTMLTarget().style.backgroundColor = "#A26D41";

            //};

#if AsWEBSERVER
            #region += Launched chrome.app.window
            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                // if we run as a server. we can open up on android.

                //chrome.Notification.DefaultTitle = "Nexus7";
                //chrome.Notification.DefaultIconUrl = new x128().src;
                ChromeTCPServer.TheServerWithStyledForm.Invoke(
                     AppSource.Text
                //, AtFormCreated: FormStyler.AtFormCreated

                //AtFormConstructor:
                //    f =>
                //    {
                //        //arg[0] is typeof System.Int32
                //        //script: error JSC1000: No implementation found for this native method, please implement [static System.Drawing.Color.FromArgb(System.Int32)]

                //        // X:\jsc.svn\examples\javascript\forms\Test\TestFromArgb\TestFromArgb\ApplicationControl.cs

                //        f.BackColor = System.Drawing.Color.FromArgb(0xA26D41);
                //    }
                );
                return;
            }
            #endregion
#else

            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    //chrome.runtime.UpdateAvailable += delegate
                    //{
                    //    new chrome.Notification(title: "UpdateAvailable");

                    //};

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "x360mountains");

                        // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: new
                               {
                                   alwaysOnTop = true,
                                   visibleOnAllWorkspaces = true
                               }
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion


#endif


            //var vs0 = new TraceConeWithCRTByKlk.Shaders.Program360FragmentShader();
            //var vs0 = new FaceEdgeVertexByPaniq.Shaders.Program360FragmentShader();
            var vs0 = new ChromeShaderToyMountainsByHoskins.Shaders.Program360FragmentShader();


            // onframe need syncs to enable GC!
            var vsync = default(TaskCompletionSource<object>);
            Func<bool> vsyncReady = delegate
            {

                if (vsync != null)
                    if (vsync.Task.IsCompleted)
                        return true;


                return false;
            };



            // crash
            //int cubefacesizeMAX = 2048 * 2; // 6 faces, ?

            // not responding...
            //int cubefacesizeMAX = 2048 * 2; // 6 faces, ?
            int cubefacesizeMAX = 2048; // 6 faces, ?
            //int cubefacesizeMAX = 1024; // 6 faces, ?
            int cubefacesize = cubefacesizeMAX; // 6 faces, ?
                                                //int cubefacesize = 1024; // 6 faces, ?
                                                // "X:\vr\tape1\0000x2048.png"
                                                // for 60hz render we may want to use float camera percision, not available for ui.
                                                //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x2048.png" "/sdcard/oculus/360photos/"
                                                //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x128.png" "/sdcard/oculus/360photos/"

            // force laptop into preview. when can we have a button for it?
            //if (Environment.ProcessorCount < 8)
            //    cubefacesize = 64; // 6 faces, ?

            // fast gif?
            //cubefacesize = 128; // 6 faces, ?
            //cubefacesize = 512; // 6 faces, ?
            //    [GroupMarkerNotSet(crbug.com / 242999)!:247F0809]
            //RENDER WARNING: texture bound to texture unit 0 is not renderable.It maybe non-power-of-2 and have incompatible texture filtering.

            // can we keep fast fps yet highp?

            // can we choose this on runtime? designtime wants fast fps, yet for end product we want highdef on our render farm?
            //const int cubefacesize = 128; // 6 faces, ?

            //var cubecameraoffsetx = 256;
            var cubecameraoffsetx = 400;


            //var uizoom = 0.1;
            //var uizoom = cubefacesize / 128f;
            var uizoom = 128f / cubefacesize;


            Native.css.style.backgroundColor = "blue";
            Native.css.style.overflow = IStyle.OverflowEnum.hidden;

            Native.body.Clear();
            (Native.body.style as dynamic).webkitUserSelect = "text";





            //return;

            // Earth params
            //var radius = 0.5;
            //var radius = 1024;
            //var radius = 2048;
            //var radius = 512;
            //var radius = 256;
            //var radius = 400;

            // can we have not fly beyond moon too much?
            //var radius = 500;
            var radius = 480;

            //var segments = 32;
            var segments = 128 * 2;
            //var rotation = 6;


            //const int size = 128;
            //const int size = 256; // 6 faces, 12KB
            //const int size = 512; // 6 faces, ?

            // WebGL: drawArrays: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled.

            //const int size = 720; // 6 faces, ?
            //const int size = 1024; // 6 faces, ?
            //const int cubefacesize = 1024; // 6 faces, ?

            // THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter is set to THREE.LinearFilter or THREE.NearestFilter. ( chrome-extension://aemlnmcokphbneegoefdckonejmknohh/assets/x360mountains/anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs.jpg )


            var far = 0xffffff;

            new IHTMLPre { new { Environment.ProcessorCount, cubefacesize } }.AttachToDocument();

            //new IHTMLPre { "can we stream it into VR, shadertoy, youtube 360, youtube stereo yet?" }.AttachToDocument();


            var sw = Stopwatch.StartNew();



            var oo = new List<THREE.Object3D>();

            var window = Native.window;


            // what about physics and that portal rendering?

            // if we are running as a chrome web server, we may also be opened as android ndk webview app
            //var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: window.aspect, near: 1, far: 2000);
            // once we update source
            // save the source
            // manually recompile 
            //cameraPX.position.z = 400;

            //// the camera should be close enough for the object to float off the FOV of PX
            //cameraPX.position.z = 200;

            // scene
            // can we make the 3D object orbit around us ?
            // and
            // stream it to vr?
            var scene = new THREE.Scene();



            // since our cube camera is somewhat a fixed thing
            // would it be easier to move mountains to come to us?
            // once we change code would chrome app be able to let VR know that a new view is available?
            var sceneg = new THREE.Group();
            sceneg.AttachTo(scene);


            // fly up?
            //sceneg.translateZ(-1024);
            // rotate the world, as the skybox then matches what we have on filesystem
            scene.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
            // yet for headtracking we shall rotate camera


            //sceneg.position.set(0, 0, -1024);
            //sceneg.position.set(0, -1024, 0);

            //scene.add(new THREE.AmbientLight(0x333333));
            //scene.add(new THREE.AmbientLight(0xffffff));
            //scene.add(new THREE.AmbientLight(0xaaaaaa));
            //scene.add(new THREE.AmbientLight(0xcccccc));
            //scene.add(new THREE.AmbientLight(0xeeeeee));
            scene.add(new THREE.AmbientLight(0xffffff));




            //var light = new THREE.DirectionalLight(0xffffff, 1);
            //// sun should be beyond moon
            ////light.position.set(-5 * virtualDistance, -3 * virtualDistance, -5 * virtualDistance);
            ////light.position.set(-15 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);

            //// where shall the light source be to see half planet?
            //light.position.set(-1 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);
            //scene.add(light);



            //var lightX = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightX" }.AttachToDocument();
            //var lightY = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightY" }.AttachToDocument();
            //var lightZ = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightZ" }.AttachToDocument();

            //new IHTMLHorizontalRule { }.AttachToDocument();

            // whats WebGLRenderTargetCube do?

            // WebGLRenderer preserveDrawingBuffer 



            var renderer0 = new THREE.WebGLRenderer(

                new
                {
                    //antialias = true,
                    //alpha = true,
                    preserveDrawingBuffer = true
                }
            );

            // https://github.com/mrdoob/three.js/issues/3836

            // the construct. white bg
            //renderer0.setClearColor(0xfffff, 1);
            renderer0.setClearColor(0x0, 1);

            //renderer.setSize(window.Width, window.Height);
            renderer0.setSize(cubefacesize, cubefacesize);

            //renderer0.domElement.AttachToDocument();
            //rendererPX.domElement.style.SetLocation(0, 0);
            //renderer0.domElement.style.SetLocation(4, 4);


            // top

            // http://stackoverflow.com/questions/27612524/can-multiple-webglrenderers-render-the-same-scene


            // need a place to show the cubemap face to GUI 
            // how does the stereo OTOY do it?
            // https://www.opengl.org/wiki/Sampler_(GLSL)

            // http://www.richardssoftware.net/Home/Post/25

            // [+X, –X, +Y, –Y, +Z, –Z] fa



            // move up
            //camera.position.set(-1200, 800, 1200);
            //var cameraoffset = new THREE.Vector3(0, 15, 0);

            // can we aniamte it?
            //var cameraoffset = new THREE.Vector3(0, 800, 1200);
            // can we have linear animation fromcenter of the map to the edge and back?
            // then do the flat earth sun orbit?
            var cameraoffset = new THREE.Vector3(
                // left?
                -512,
                // height?
                //0,
                //1600,
                //1024,

                // if the camera is in the center, would we need to move the scene?
                // we have to move the camera. as we move the scene the lights are messed up
                //2014,
                1024,

                //1200
                0
                // can we hover top of the map?
                );

            // original vieworigin
            //var cameraoffset = new THREE.Vector3(-1200, 800, 1200);













            var bottomRotate100 = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -314, max = 314, valueAsNumber = 0, title = "bottomRotate" }.AttachToDocument();


            var maxfps = 60;
            //var maxlengthseconds = 60;
            var maxlengthseconds = 120;

            var maxframes = maxlengthseconds * maxfps;

            // whatif we want more than 30sec video? 2min animation? more frames to render? 2gb disk?
            var frameIDslider = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0, max = maxframes, valueAsNumber = 137, title = "frameIDslider" }.AttachToDocument();
            frameIDslider.onchange += delegate { frameIDslider.title = "frameIDslider " + frameIDslider.valueAsNumber; };




            //var vs0 = new ChromeShaderToyRelentlessBySrtuss.Shaders.ProgramFragmentShader();
            //var vs0 = new TraceConeWithCRTByKlk.Shaders.ProgramFragmentShader();








            // left
            IHTMLCanvas shader0canvasPZ = null;

            // locCameraTargetOffset to look left?
            #region shader0canvasPZ
            new { }.With(
              async delegate
              {
                  //return;

                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new Xor3DAlienLandByXor.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();
                  // now we have an empty shell
                  // which tostrings to the glsl code for gpu
                  // and if we were to initialize 



                  // enable intellisense
                  //var vs0i = (RoomScanningEffectByRosme.Shaders.__ProgramFragmentUniforms)(object)vs0;


                  // script: error JSC1000: No implementation found for this native method, please implement [static ScriptCoreLib.GLSL.Shader.vec3(System.Single, System.Single, System.Single)]

                  //     b.__this._vs0i_5__2.uCameraTargetOffset = new ctor$aQ8ABjj5gzW_aEh4Cmq2oMg(1, 0, 0);

                  // 270ms ReferenceError: ctor$aQ8ABjj5gzW_aEh4Cmq2oMg is not defined

                  // wishful thinking eh
                  //vec3 uCameraTargetOffset = vec3(0.0f, 0.0f, -1.0f);
                  //vs0i.uCameraTargetOffset = new ScriptCoreLib.GLSL.vec3(1.0f, 0.0f, 0.0f);
                  // this would mean the program was selected and uniform was uploaded to gpu




                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader0canvasPZ = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  //c0.style.SetLocation(720, 8);
                  c0.style.SetLocation(800, 360);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);

                  var sw0 = Stopwatch.StartNew();

                  pass0.ProgramSelected += mProgram =>
                  {
                      // ldflda?
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1.0f, 0, 0.0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.0f, 0, 1.0f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.0f, 0, -1.0f);

                      // left?
                      //forward=normalize(float3(0.0 , 0.0 ,1.0));
                  };


                  Native.window.onframe += delegate
                  {
                      // let render man know..
                      if (vsyncReady())
                          return;

                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };



              }
          );
            #endregion





            // front
            IHTMLCanvas shader1canvasPX = null;

            #region shader1canvasPX
            new { }.With(
              async delegate
              {
                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();

                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader1canvasPX = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  c0.style.SetLocation(720, 8);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);


                  pass0.ProgramSelected += mProgram =>
                  {
                      // off by 45deg__

                      // ldflda?
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.0f, 0, -1.0f);

                      // fixup
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 1.0f, 0, -1.0f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 1.0f, 0, 0.0f);


                      // front
                      //forward=normalize(float3(1.0 , 0.0 ,0.0));
                  };

                  var sw0 = Stopwatch.StartNew();

                  Native.window.onframe += delegate
                  {
                      // let render man know..
                      // let render man know..
                      if (vsyncReady())
                          return;

                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };

              }
          );
            #endregion



            // back
            IHTMLCanvas shader1canvasNX = null;

            #region shader1canvasNX
            new { }.With(
              async delegate
              {
                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();

                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader1canvasNX = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  c0.style.SetLocation(720, 8);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);


                  pass0.ProgramSelected += mProgram =>
                  {
                      // ldflda?
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, 0, 1.0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1.0f, 0, 1.0f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1.0f, 0, 0.0f);

                      // back
                      //forward=normalize(float3(-1.0 , 0.0 ,0.0));

                  };

                  var sw0 = Stopwatch.StartNew();

                  Native.window.onframe += delegate
                  {
                      // let render man know..
                      // let render man know..
                      if (vsyncReady())
                          return;

                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };

              }
          );
            #endregion






            // right
            IHTMLCanvas shader2canvasNZ = null;

            // locCameraTargetOffset to look right?
            #region shader2canvasNZ
            new { }.With(
              async delegate
              {
                  //return;

                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new Xor3DAlienLandByXor.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();
                  // now we have an empty shell
                  // which tostrings to the glsl code for gpu
                  // and if we were to initialize 



                  // enable intellisense
                  //var vs0i = (RoomScanningEffectByRosme.Shaders.__ProgramFragmentUniforms)(object)vs0;


                  // script: error JSC1000: No implementation found for this native method, please implement [static ScriptCoreLib.GLSL.Shader.vec3(System.Single, System.Single, System.Single)]

                  //     b.__this._vs0i_5__2.uCameraTargetOffset = new ctor$aQ8ABjj5gzW_aEh4Cmq2oMg(1, 0, 0);

                  // 270ms ReferenceError: ctor$aQ8ABjj5gzW_aEh4Cmq2oMg is not defined

                  // wishful thinking eh
                  //vec3 uCameraTargetOffset = vec3(0.0f, 0.0f, -1.0f);
                  //vs0i.uCameraTargetOffset = new ScriptCoreLib.GLSL.vec3(1.0f, 0.0f, 0.0f);
                  // this would mean the program was selected and uniform was uploaded to gpu




                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader2canvasNZ = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  //c0.style.SetLocation(720, 8);
                  c0.style.SetLocation(800, 360);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);

                  var sw0 = Stopwatch.StartNew();

                  pass0.ProgramSelected += mProgram =>
                  {
                      // ldflda?
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 1.0f, 0, 1.0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.0f, 0, -1.0f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.0f, 0, 1.0f);

                      // right
                      //forward=normalize(float3(0.0 , 0.0 ,-1.0));

                  };

                  Native.window.onframe += delegate
                  {
                      // let render man know..
                      // let render man know..
                      if (vsyncReady())
                          return;
                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };

              }
          );
            #endregion












            // bottom
            IHTMLCanvas shader2canvasNY = null;

            // locCameraTargetOffset to look bottom?
            #region shader2canvasNY
            new { }.With(
              async delegate
              {
                  //return;

                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new Xor3DAlienLandByXor.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();
                  // now we have an empty shell
                  // which tostrings to the glsl code for gpu
                  // and if we were to initialize 



                  // enable intellisense
                  //var vs0i = (RoomScanningEffectByRosme.Shaders.__ProgramFragmentUniforms)(object)vs0;


                  // script: error JSC1000: No implementation found for this native method, please implement [static ScriptCoreLib.GLSL.Shader.vec3(System.Single, System.Single, System.Single)]

                  //     b.__this._vs0i_5__2.uCameraTargetOffset = new ctor$aQ8ABjj5gzW_aEh4Cmq2oMg(1, 0, 0);

                  // 270ms ReferenceError: ctor$aQ8ABjj5gzW_aEh4Cmq2oMg is not defined

                  // wishful thinking eh
                  //vec3 uCameraTargetOffset = vec3(0.0f, 0.0f, -1.0f);
                  //vs0i.uCameraTargetOffset = new ScriptCoreLib.GLSL.vec3(1.0f, 0.0f, 0.0f);
                  // this would mean the program was selected and uniform was uploaded to gpu




                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader2canvasNY = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  //c0.style.SetLocation(720, 8);
                  c0.style.SetLocation(800, 360);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);

                  var sw0 = Stopwatch.StartNew();

                  pass0.ProgramSelected += mProgram =>
                  {
                      // ldflda?

                      // 45deg off??


                      // front
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, 0, -1.0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, -.0001f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, .1f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, 0f);

                      // left
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1.0f, 0, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -.0001f, -1, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1f, -1, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, 0);

                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.01f, -1, 0.01f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.001f, -1, 0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, -0.0001f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, 1, -0.0001f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, .0001f, -1, 0);

                  };

                  Native.window.onframe += delegate
                  {
                      // let render man know..
                      // let render man know..
                      if (vsyncReady())
                          return;

                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };

              }
          );
            #endregion











            // top
            IHTMLCanvas shader2canvasPY = null;

            // locCameraTargetOffset to look right?
            #region shader2canvasPY
            new { }.With(
              async delegate
              {
                  //return;

                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new Xor3DAlienLandByXor.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();
                  // now we have an empty shell
                  // which tostrings to the glsl code for gpu
                  // and if we were to initialize 



                  // enable intellisense
                  //var vs0i = (RoomScanningEffectByRosme.Shaders.__ProgramFragmentUniforms)(object)vs0;


                  // script: error JSC1000: No implementation found for this native method, please implement [static ScriptCoreLib.GLSL.Shader.vec3(System.Single, System.Single, System.Single)]

                  //     b.__this._vs0i_5__2.uCameraTargetOffset = new ctor$aQ8ABjj5gzW_aEh4Cmq2oMg(1, 0, 0);

                  // 270ms ReferenceError: ctor$aQ8ABjj5gzW_aEh4Cmq2oMg is not defined

                  // wishful thinking eh
                  //vec3 uCameraTargetOffset = vec3(0.0f, 0.0f, -1.0f);
                  //vs0i.uCameraTargetOffset = new ScriptCoreLib.GLSL.vec3(1.0f, 0.0f, 0.0f);
                  // this would mean the program was selected and uniform was uploaded to gpu




                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader2canvasPY = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  //c0.style.SetLocation(720, 8);
                  c0.style.SetLocation(800, 360);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);

                  var sw0 = Stopwatch.StartNew();

                  pass0.ProgramSelected += mProgram =>
                  {
                      // ldflda?

                      // 45deg off??


                      // front
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, 0, -1.0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, -.0001f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, .1f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, 0f);

                      // left
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1.0f, 0, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -.0001f, -1, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1f, -1, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, 0);

                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.01f, -1, 0.01f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.001f, 1, 0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, 1, -0.0001f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, -0.0001f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, 0.0001f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, .0001f, -1, 0);

                  };

                  Native.window.onframe += delegate
                  {
                      //d = a[0].CS___8__locals1.vsync != null;
                      //e = a[0].CS___8__locals1.vsync.kAcABp_b1ITCbIktNs3el5Q().dgQABqwxMjO1zVAJb5WXKA();


                      // let render man know..
                      // let render man know..
                      if (vsyncReady())
                          return;

                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };

              }
          );
            #endregion






            new IHTMLHorizontalRule { }.AttachToDocument();

            var camerax = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "camerax" }.AttachToDocument();
            // up. whats the most high a rocket can go 120km?
            new IHTMLHorizontalRule { }.AttachToDocument();


            // how high is the bunker?
            var cameray = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 2048 * 4, valueAsNumber = 0, title = "cameray" }.AttachToDocument();
            new IHTMLBreak { }.AttachToDocument();
            var camerayHigh = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = cameray.max, max = 1024 * 256, valueAsNumber = cameray.max, title = "cameray" }.AttachToDocument();
            new IHTMLHorizontalRule { }.AttachToDocument();
            var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();

            // for render server
            var fcamerax = 0.0;
            var fcameray = 0.0;
            var fcameraz = 0.0;

            //while (await camerax.async.onchange)

            //cameray.onchange += delegate
            //{
            //    if (cameray.valueAsNumber < cameray.max)
            //        camerayHigh.valueAsNumber = camerayHigh.min;
            //};

            camerayHigh.onmousedown += delegate
            {
                //if (camerayHigh.valueAsNumber > camerayHigh.min)
                cameray.valueAsNumber = cameray.max;
            };


            Action applycameraoffset = delegate
            {
                // make sure UI and gpu sync up

                var cy = cameray;

                if (cameray.valueAsNumber < cameray.max)
                    camerayHigh.valueAsNumber = camerayHigh.min;

                if (camerayHigh.valueAsNumber > camerayHigh.min)
                    cameray.valueAsNumber = cameray.max;

                if (cameray.valueAsNumber == cameray.max)
                    cy = camerayHigh;



                cameraoffset = new THREE.Vector3(
                  // left?
                  1.0 * (camerax + fcamerax),
                   // height?
                   //0,
                   //1600,
                   //1024,

                   // if the camera is in the center, would we need to move the scene?
                   // we have to move the camera. as we move the scene the lights are messed up
                   //2014,
                   1.0 * (cy + fcameray),

                 //1200
                 1.0 * (cameraz + fcameraz)
                   // can we hover top of the map?
                   );
            };


            #region y
            // need to rotate90?
            var cameraNY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNY.lookAt(new THREE.Vector3(0, -1, 0));
                cameraNY.position.add(cameraoffset);
            };

            //cameraNY.lookAt(new THREE.Vector3(0, 1, 0));
            var canvasNY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasNY.canvas.title = "NY";
            canvasNY.canvas.AttachToDocument();
            canvasNY.canvas.style.transformOrigin = "0 0";
            // roslyn!
            canvasNY.canvas.style.transform = $"scale({uizoom})";

            var cameraPY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPY.lookAt(new THREE.Vector3(0, 1, 0));
                cameraPY.position.add(cameraoffset);
            };
            //cameraPY.lookAt(new THREE.Vector3(0, -1, 0));
            var canvasPY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 0);
            canvasPY.canvas.title = "PY";
            canvasPY.canvas.AttachToDocument();
            canvasPY.canvas.style.transformOrigin = "0 0";
            canvasPY.canvas.style.transform = $"scale({uizoom})";
            #endregion

            // transpose xz?

            #region x
            var cameraNX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNX.lookAt(new THREE.Vector3(0, 0, 1));
                cameraNX.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(0, 0, -1));
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNX.lookAt(new THREE.Vector3(1, 0, 0));
            var canvasNX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 2, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNX.canvas.title = "NX";
            canvasNX.canvas.AttachToDocument();
            canvasNX.canvas.style.transformOrigin = "0 0";
            canvasNX.canvas.style.transform = $"scale({uizoom})";

            var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPX.lookAt(new THREE.Vector3(0, 0, -1));
                cameraPX.position.add(cameraoffset);
            };
            //cameraPX.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPX.lookAt(new THREE.Vector3(1, 0, 0));
            //cameraPX.lookAt(new THREE.Vector3(-1, 0, 0));
            var canvasPX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPX.canvas.title = "PX";
            canvasPX.canvas.AttachToDocument();
            canvasPX.canvas.style.transformOrigin = "0 0";
            canvasPX.canvas.style.transform = $"scale({uizoom})";
            #endregion



            #region z
            var cameraNZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, -1));
            applycameraoffset += delegate
            {
                cameraNZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNZ.lookAt(new THREE.Vector3(1, 0, 0));
                cameraNZ.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, 1));
            var canvasNZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 3, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNZ.canvas.title = "NZ";
            canvasNZ.canvas.AttachToDocument();
            canvasNZ.canvas.style.transformOrigin = "0 0";
            canvasNZ.canvas.style.transform = $"scale({uizoom})";

            var cameraPZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraPZ.lookAt(new THREE.Vector3(1, 0, 0));
            applycameraoffset += delegate
            {
                cameraPZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPZ.lookAt(new THREE.Vector3(-1, 0, 0));
                cameraPZ.position.add(cameraoffset);
            };
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, -1));
            var canvasPZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPZ.canvas.title = "PZ";
            canvasPZ.canvas.AttachToDocument();
            canvasPZ.canvas.style.transformOrigin = "0 0";
            canvasPZ.canvas.style.transform = $"scale({uizoom})";
            #endregion




            // c++ alias locals would be nice..
            var canvas0 = (IHTMLCanvas)renderer0.domElement;


            var old = new
            {



                CursorX = 0,
                CursorY = 0
            };


            var st = new Stopwatch();
            st.Start();

            //canvas0.css.active.style.cursor = IStyle.CursorEnum.move;




            // X:\jsc.svn\examples\javascript\Test\TestMouseMovement\TestMouseMovement\Application.cs


            // THREE.WebGLProgram: gl.getProgramInfoLog() C:\fakepath(78,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll
            // THREE.WebGLProgram: gl.getProgramInfoLog() (79,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll

            // http://www.roadtovr.com/youtube-confirms-stereo-3d-360-video-support-coming-soon/
            // https://www.youtube.com/watch?v=D-Wl9jAB45Q



            #region spherical
            var gl = new WebGLRenderingContext(alpha: true, preserveDrawingBuffer: true);
            var c = gl.canvas.AttachToDocument();

            //  3840x2160

            //c.style.SetSize(3840, 2160);

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150722/360-youtube


            c.width = 3840;
            c.height = 2160;


            //c.width = 3840 * 2;
            //c.height = 2160 * 2;


            //c.width = 3840;
            //c.height = 2160;
            // 1,777777777777778

            // https://www.youtube.com/watch?v=fTfJwzRsE-w
            //c.width = 7580;
            //c.height = 3840;
            //1,973958333333333

            //7580
            //    3840

            // wont work
            //c.width = 8192;
            //c.height = 4096;


            // this has the wrong aspect?
            //c.width = 6466;
            //c.height = 3232;

            new IHTMLPre { new { c.width, c.height } }.AttachToDocument();

            //6466x3232

            //var suizoom = 720f / c.height;
            //var suizoom = 360f / c.height;
            var suizoom = 480f / c.width;

            c.style.transformOrigin = "0 0";
            c.style.transform = $"scale({suizoom})";
            //c.style.backgroundColor = "yellow";
            c.style.position = IStyle.PositionEnum.absolute;

            c.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 3);

            var pass = new CubeToEquirectangular.Library.ShaderToy.EffectPass(
                       null,
                       gl,
                       precission: CubeToEquirectangular.Library.ShaderToy.DetermineShaderPrecission(gl),
                       supportDerivatives: gl.getExtension("OES_standard_derivatives") != null,
                       callback: null,
                       obj: null,
                       forceMuted: false,
                       forcePaused: false,
                       //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                       outputGainNode: null
                   );

            // how shall we upload our textures?
            // can we reference GLSL.samplerCube yet?
            //pass.mInputs[0] = new samplerCube { };
            pass.mInputs[0] = new CubeToEquirectangular.Library.ShaderToy.samplerCube { };

            pass.MakeHeader_Image();
            var vs = new Shaders.ProgramFragmentShader();
            pass.NewShader_Image(vs);

            #endregion




            //var frame0 = new HTML.Images.FromAssets.tiles_regrid().AttachToDocument();
            var frame0 = new HTML.Images.FromAssets.galaxy_starfield().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets.galaxy_starfield150FOV().AttachToDocument();
            //var xor = new HTML.Images.FromAssets.Orion360_test_image_8192x4096().AttachToDocument();
            //var xor = new HTML.Images.FromAssets._2_no_clouds_4k().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().AttachToDocument();


            // 270px
            //xor.style.height = "";
            frame0.style.height = "270px";
            frame0.style.width = "480px";
            frame0.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 + 16, 8 + (int)(uizoom * cubefacesize + 8) * 3);




            #region DirectoryEntry
            var dir = default(DirectoryEntry);
            int files2count = 0;

            new IHTMLButton { "openDirectory" }.AttachToDocument().onclick += async delegate
            {
                dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });

                var dir2r = dir.createReader();

                var files2 = await dir2r.readFileEntries();

                files2count = files2.Count();

                if (files2count > 0)
                {
                    new IHTMLPre { new { files2count } }.AttachToDocument();

                }
            };
            frame0.style.cursor = IStyle.CursorEnum.pointer;
            frame0.title = "save frame";


            frame0.onclick += delegate
            {
                // http://paulbourke.net/papers/vsmm2006/vsmm2006.pdf
                //            A method of creating synthetic stereoscopic panoramic images that can be implemented
                //in most rendering packages has been presented. If single panoramic pairs can be created
                //then stereoscopic panoramic movies are equally possible giving rise to the prospect of
                //movies where the viewer can interact with, at least with regard to what they choose to look
                //at.These images can be projected so as to engage the two features of the human visual
                //system that assist is giving us a sense of immersion, the feeling of “being there”. That is,
                //imagery that contains parallax information as captured from two horizontally separated eye
                //positions (stereopsis)and imagery that fills our peripheral vision.The details that define
                //how the two panoramic images should be created in rendering packages are provided, in
                //particular, how to precisely configure the virtual cameras and control the distance to zero
                //parallax.

                // grab a frame

                if (dir == null)
                {
                    // not exporting to file system?
                    var f0 = new IHTMLImage { src = gl.canvas.toDataURL() };

                    //var f0 = (IHTMLImage)gl.canvas;
                    //var f0 = (IHTMLImage)gl.canvas;
                    //var base64 = gl.canvas.toDataURL();


                    //frame0.src = base64;
                    frame0.src = f0.src;

                    // 7MB!

                    return;
                }

                //                // ---------------------------
                //IrfanView
                //---------------------------
                //Warning !
                //The file: "X:\vr\tape1\0001.jpg" is a PNG file with incorrect extension !
                //Rename ?
                //---------------------------
                //Yes   No   
                //---------------------------

                // haha this will render the thumbnail.
                //dir.WriteAllBytes("0000.png", frame0);

                //dir.WriteAllBytes("0000.png", gl.canvas);

                var glsw = Stopwatch.StartNew();
                dir.WriteAllBytes("0000.png", gl);

                new IHTMLPre { new { glsw.ElapsedMilliseconds } }.AttachToDocument();

                // {{ ElapsedMilliseconds = 1548 }}

                // 3.7MB
                // 3840x2160

            };

            #endregion


            #region render 60hz 30sec
            new IHTMLButton {
                $"render {maxfps}hz {maxlengthseconds}sec"
            }.AttachToDocument().onclick += async e =>
            {
                e.Element.disabled = true;


                var total = Stopwatch.StartNew();
                var status = "rendering... " + new { dir };

                new IHTMLPre { () => status }.AttachToDocument();

                if (dir == null)
                {
                    //dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
                }

                total.Restart();



                vsync = new TaskCompletionSource<object>();
                await vsync.Task;

                status = "rendering... vsync";

                //var frameid = 0;


                //frameIDslider.valueAsNumber = -1;
                frameIDslider.valueAsNumber = files2count - 1;

                goto beforeframe;


                // parallax offset?

                await_nextframe:


                var filename = frameIDslider.valueAsNumber.ToString().PadLeft(5, '0') + ".png";
                status = "rendering... " + new { filename };


                vsync = new TaskCompletionSource<object>();
                await vsync.Task;

                // frame0 has been rendered

                var swcapture = Stopwatch.StartNew();
                status = "WriteAllBytes... " + new { filename };
                //await Native.window.async.onframe;

                // https://code.google.com/p/chromium/issues/detail?id=404301
                if (dir != null)
                    await dir.WriteAllBytes(filename, gl);
                //await dir.WriteAllBytes(filename, gl.canvas);

                status = "WriteAllBytes... done " + new { fcamerax, filename, swcapture.ElapsedMilliseconds };
                status = "rdy " + new { filename, fcamerax };
                //await Native.window.async.onframe;





                // design mode v render mode
                if (cubefacesize < cubefacesizeMAX)
                    frameIDslider.valueAsNumber += 15;
                else
                    frameIDslider.valueAsNumber++;




                beforeframe:

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);
                // fcamerax = radius * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);

                // some shaders need to know where the camera is looking from. can we tell them?

                //fcamerax = 2.2 * Math.Sin(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));
                //fcameraz = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));


                //// up
                //fcameray = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // cameraz.valueAsNumber = (int)(cameraz.max * Math.Sin(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f)));


                // up
                //fcameray = 128 * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                //fcamerax += (1.0 / 60.0);

                //fcamerax += (1.0 / 60.0) * 120;



                // 60hz 30sec
                if (frameIDslider.valueAsNumber < maxframes)
                {
                    // Blob GC? either this helms or the that we made a Blob static. 
                    //await Task.Delay(11);
                    await Task.Delay(33);
                    // gc at 260 happened twice?
                    goto await_nextframe;
                }

                total.Stop();
                status = "all done " + new { frameid = frameIDslider.valueAsNumber, total.ElapsedMilliseconds };
                vsync = default(TaskCompletionSource<object>);
                // http://stackoverflow.com/questions/22899333/delete-javascript-blobs

                e.Element.disabled = false;
            };
            #endregion


            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLColladaExperiment\WebGLColladaExperiment\WebGLColladaExperiment.csproj"






            // asus will hang
            // https://3dwarehouse.sketchup.com/model.html?id=fb7a0448d940e575edc01389f336fb0a
            // can we get one frame into vr?

            // cube: mesh to cast shadows



            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000 })

            //    );
            //    floor2.position.set(0, 0, -cubefacesize / 2);
            //    floor2.AttachTo(scene);
            //}
            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0x0000ff, color = 0x0000ff })

            //    );
            //    floor2.position.set(-cubefacesize / 2, 0, 0);
            //    floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);

            //    floor2.AttachTo(scene);
            //}

            // front?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader1canvasPX) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0x00ff00,
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, 0, -cubefacesize  * 0.55);
                floor2.position.set(-cubefacesize * 0.5, 0, 0);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                floor2.AttachTo(scene);
            }



            // left?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader0canvasPZ) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0xff0000,

                            // can we color mark it?
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, -cubefacesize * 0.5, 0);
                floor2.position.set(0, 0, cubefacesize * 0.5);
                //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI);

                floor2.AttachTo(scene);
            }





            // right?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader2canvasNZ) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0x00ff00,

                            // can we color mark it?
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, -cubefacesize * 0.5, 0);
                floor2.position.set(0, 0, -cubefacesize * 0.5);
                //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
                //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI);

                floor2.AttachTo(scene);
            }


            // back?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader1canvasNX) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0x00ff00,
                            //color = 0x00ff00
                        })

                );
                floor2.position.set(cubefacesize * 0.5, 0, 0);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);


                floor2.AttachTo(scene);
            }









            // bottom?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader2canvasNY) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0x00ff00,

                            // can we color mark it?
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, -cubefacesize * 0.5, 0);
                //floor2.position.set(cubefacesize * 0.5, 0, 0);
                //floor2.position.set(-cubefacesize * 0.5, 0, 0);
                floor2.position.set(0, -cubefacesize * 0.5, 0);


                //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI / 2);

                applycameraoffset += delegate
                {
                    floor2.rotation.set(0, 0, 0);

                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), -Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                    floor2.rotateOnAxis(new THREE.Vector3(1, 0, 0), -Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI );
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI);
                    floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), -Math.PI + bottomRotate100 * 0.01f);

                };

                floor2.AttachTo(scene);
            }



            // top?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader2canvasPY) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0x00ff00,

                            // can we color mark it?
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, -cubefacesize * 0.5, 0);
                //floor2.position.set(cubefacesize * 0.5, 0, 0);
                //floor2.position.set(-cubefacesize * 0.5, 0, 0);
                floor2.position.set(0, cubefacesize * 0.5, 0);


                //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI / 2);

                applycameraoffset += delegate
                {
                    floor2.rotation.set(0, 0, 0);

                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), -Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                    floor2.rotateOnAxis(new THREE.Vector3(1, 0, 0), Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI );
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI);
                    floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), bottomRotate100 * 0.01f);

                };

                floor2.AttachTo(scene);
            }




            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeEarth\ChromeEarth\Application.cs
            // X:\jsc.svn\examples\javascript\canvas\ConvertBlackToAlpha\ConvertBlackToAlpha\Application.cs
            // hidden for alpha AppWindows
            //#if FBACKGROUND

            #region galaxy_starfield
            new THREE.Texture().With(
                async s =>
                {
                    var i = new HTML.Images.FromAssets.galaxy_starfield();
                    //var i = new HTML.Images.FromAssets.galaxy_starfield150FOV();

                    var bytes = await i.async.bytes;

                    //for (int ii = 0; ii < bytes.Length; ii += 4)
                    //{

                    //    bytes[ii + 3] = (byte)(bytes[ii + 0]);

                    //    bytes[ii + 0] = 0xff;
                    //    bytes[ii + 1] = 0xff;
                    //    bytes[ii + 2] = 0xff;
                    //}

                    var cc = new CanvasRenderingContext2D(i.width, i.height);

                    cc.bytes = bytes;

                    s.image = cc;
                    s.needsUpdate = true;

                    var stars_material = new THREE.MeshBasicMaterial(
                            new
                            {
                                //map = THREE.ImageUtils.loadTexture(new galaxy_starfield().src),
                                map = s,
                                side = THREE.BackSide,
                                transparent = true
                            });


                    var stars = new THREE.Mesh(
                            new THREE.SphereGeometry(far * 0.9, 64, 64),
                           stars_material
                        );

                    // http://stackoverflow.com/questions/8502150/three-js-how-can-i-dynamically-change-objects-opacity
                    //(stars_material as dynamic).opacity = 0.5;


                    scene.add(stars);
                }
           );
            #endregion




            new { }.With(
                   delegate
                   {



                       //dae.position.y = -80;

                       //dae.AttachTo(sceneg);
                       //scene.add(dae);
                       //oo.Add(dae);


                       //var rdysw = Stopwatch.StartNew();

                       //Console.WriteLine()

                       // view-source:http://threejs.org/examples/webgl_multiple_canvases_circle.html
                       // https://threejsdoc.appspot.com/doc/three.js/src.source/extras/cameras/CubeCamera.js.html
                       Native.window.onframe +=
                           e =>
                           {



                               // let render man know..
                               // let render man know..
                               if (vsyncReady())
                                   return;


                               //if (pause) return;
                               //if (pause.@checked)
                               //    return;


                               // can we float out of frame?
                               // haha. a bit too flickery.
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.01) * 50.0;
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.001) * 190.0;
                               //globesphere.position.y = Math.Sin(fcamerax * 0.001) * 90.0;
                               //clouds.position.y = Math.Cos(fcamerax * 0.001) * 90.0;

                               //sphere.rotation.y += speed;
                               //clouds.rotation.y += speed;

                               // manual rebuild?
                               // red compiler notifies laptop chrome of pending update
                               // app reloads

                               applycameraoffset();
                               renderer0.clear();
                               //rendererPY.clear();

                               //cameraPX.aspect = canvasPX.aspect;
                               //cameraPX.updateProjectionMatrix();

                               // um what does this do?
                               //cameraPX.position.z += (z - cameraPX.position.z) * e.delay.ElapsedMilliseconds / 200.0;
                               // mousewheel allos the camera to move closer
                               // once we see the frame in vr, can we udp sync vr tracking back to laptop?


                               //this.targetPX.x += 1;
                               //this.targetNX.x -= 1;

                               //this.targetPY.y += 1;
                               //this.targetNY.y -= 1;

                               //this.targetPZ.z += 1;
                               //this.targetNZ.z -= 1;

                               // how does the 360 or shadertoy want our cubemaps?


                               // and then rotate right?

                               // how can we render cubemap?



                               #region x
                               // upside down?
                               // are we ready?
                               renderer0.render(scene, cameraPX);
                               canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                               renderer0.render(scene, cameraNX);
                               canvasNX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               #endregion

                               #region z
                               renderer0.render(scene, cameraPZ);
                               canvasPZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                               renderer0.render(scene, cameraNZ);
                               canvasNZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               #endregion



                               #region y
                               renderer0.render(scene, cameraPY);

                               //canvasPY.save();
                               //canvasPY.translate(0, size);
                               //canvasPY.rotate((float)(-Math.PI / 2));
                               canvasPY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               //canvasPY.restore();


                               renderer0.render(scene, cameraNY);
                               //canvasNY.save();
                               //canvasNY.translate(size, 0);
                               //canvasNY.rotate((float)(Math.PI / 2));
                               canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               //canvasNY.restore();
                               // ?
                               #endregion


                               //renderer0.render(scene, cameraPX);


                               //rendererPY.render(scene, cameraPY);

                               // at this point we should be able to render the sphere texture

                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;


                               //var cube0 = new IHTMLImage[] {
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),

                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),


                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
                               //};

                               new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                               }.WithEachIndex(
                                   (img, index) =>
                                   {
                                       gl.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);

                                       //gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
                                       gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);

                                       // http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true

                                       // https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);

                                       gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);

                                   }
                                );

                               // could do dynamic resolution- fog of war or fog of FOV. where up to 150deg field of vision is encouragedm, not 360
                               pass.Paint_Image(
                                     0,

                                     0,
                                     0,
                                     0,
                                     0
                                //,

                                // gl_FragCoord
                                // cannot be scaled, and can be referenced directly.
                                // need another way to scale
                                //zoom: 0.3f
                                );

                               //paintsw.Stop();


                               // what does it do?
                               gl.flush();

                               // let render man know..
                               if (vsync != null)
                                   if (!vsync.Task.IsCompleted)
                                       vsync.SetResult(null);
                           };


                   }
               );





            Console.WriteLine("do you see it?");
        }
Esempio n. 4
0
        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150812/cssstereo

        //0001 02000178 ScriptCoreLib::ScriptCoreLib.Shared.BCLImplementation.System.Security.Cryptography.__MD5CryptoServiceProvider
        //script: error JSC1000: Java : Opcode not implemented: stind.i1 at ScriptCoreLib.Shared.BCLImplementation.System.Security.Cryptography.__MD5CryptoServiceProviderByMahmood.CreatePaddedBuffer
        //internal compiler error at method

        //       script: error JSC1000: Java :
        //BCL needs another method, please define it.
        //Cannot call type without script attribute :
        //System.Threading.Monitor for Void Enter(System.Object, Boolean ByRef) used at
        //WebGLVRCreativeLeadership.Activities.ApplicationWebServiceActivity+<>c__DisplayClass24.<CreateServer>b__29 at offset 0018.
        //If the use of this method is intended, an implementation should be provided with the attribute[Script(Implements = typeof(...)] set.You may have mistyped it.

        //		I/chromium(13596): [INFO: CONSOLE(97050)] "THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter is set to THREE.LinearFilter or THREE.NearestFilter. ( undefined )", source: http://192.168.1.228:23993/view-source (97050)
        //I/chromium(13596): [INFO: CONSOLE(97050)] "THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter is set to THREE.LinearFilter or THREE.NearestFilter. ( undefined )", source: http://192.168.1.228:23993/view-source (97050)

        public Application(com.abstractatech.apps.vr.HTML.Pages.IApp page)
        {
            #region += Launched chrome.app.window
            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
            dynamic self               = Native.self;
            dynamic self_chrome        = self.chrome;
            object  self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                ChromeTCPServer.TheServerWithAppWindow.Invoke(com.abstractatech.apps.vr.HTML.Pages.AppSource.Text);

                return;
            }
            #endregion

            // https://play.google.com/store/apps/details?id=com.abstractatech.vr
            // could we display LAN UDP notifications too. like
            // which youtube video is playing?

            Native.body.Clear();
            Native.body.style.margin          = "0px";
            Native.body.style.backgroundColor = "black";

            // https://vronecontest.zeiss.com/index.php?controller=ideas&view=show&id=652

            //          hResolution: 1920,
            //vResolution: 1080,

            // "X:\jsc.svn\examples\javascript\synergy\webgl\WebGLEquirectangularPanorama\WebGLEquirectangularPanorama.sln"

            // http://oculusstreetview.eu.pn/?lat=44.301987&lng=9.211561999999958&q=3&s=false&heading=0
            // https://github.com/troffmo5/OculusStreetView

            // http://stackoverflow.com/questions/23817633/threejs-using-a-sprite-with-the-oculusrifteffect
            // http://laht.info/dk2-parameters-for-three-oculusrifteffect-js/

            // http://stemkoski.github.io/Three.js/Sprites.html
            // http://stemkoski.github.io/Three.js/Texture-Animation.html
            // http://blog.thematicmapping.org/2013/10/terrain-visualization-with-threejs-and.html

            // http://mrdoob.github.io/three.js/examples/webgl_panorama_equirectangular.html

            var window = Native.window;

            var fov = 70.0;

            var camera = new THREE.PerspectiveCamera(fov,
                                                     window.aspect, 1, 1100);
            var target = new THREE.Vector3(0, 0, 0);

            //(camera as dynamic).target = target;

            var scene = new THREE.Scene();
            //scene.add(new THREE.AmbientLight(0xffffff));
            //scene.add(new THREE.AmbientLight(0xafafaf));

            // http://www.html5canvastutorials.com/three/html5-canvas-webgl-ambient-lighting-with-three-js/

            // http://stackoverflow.com/questions/14717135/three-js-ambient-light-unexpected-effect

            scene.add(new THREE.AmbientLight(0x2f2f2f));


            //var light = new THREE.DirectionalLight(0xffffff, 1.0);
            #region light
            var light = new THREE.DirectionalLight(0xffffff, 2.5);
            //var light = new THREE.DirectionalLight(0xffffff, 2.5);
            //var light = new THREE.DirectionalLight(0xffffff, 1.5);
            //var lightOffset = new THREE.Vector3(0, 1000, 2500.0);
            var lightOffset = new THREE.Vector3(
                2000,
                700,

                // lower makes longer shadows
                700.0
                );
            light.position.copy(lightOffset);
            light.castShadow = true;

            var xlight = light as dynamic;
            xlight.shadowMapWidth  = 4096;
            xlight.shadowMapHeight = 2048;

            xlight.shadowDarkness = 0.3;
            //xlight.shadowDarkness = 0.5;

            xlight.shadowCameraNear   = 10;
            xlight.shadowCameraFar    = 10000;
            xlight.shadowBias         = 0.00001;
            xlight.shadowCameraRight  = 4000;
            xlight.shadowCameraLeft   = -4000;
            xlight.shadowCameraTop    = 4000;
            xlight.shadowCameraBottom = -4000;

            xlight.shadowCameraVisible = true;

            scene.add(light);
            #endregion

            var mesh = new THREE.Mesh(new THREE.SphereGeometry(500, 60, 40),
                                      new THREE.MeshBasicMaterial(new
            {
                map = THREE.ImageUtils.loadTexture(
                    new _2294472375_24a3b8ef46_o().src
                    //new WebGLEquirectangularPanorama.HTML.Images.FromAssets.PANO_20130616_222058().src
                    //new WebGLEquirectangularPanorama.HTML.Images.FromAssets.PANO_20121225_210448().src

                    )
            }));
            mesh.scale.x = -1;
            scene.add(mesh);

            #region sprite2
            var crateTexture = THREE.ImageUtils.loadTexture(
                new ChromeCreativeLeadership.HTML.Images.FromAssets.Mockup().src

                );

            var crateMaterial = new THREE.SpriteMaterial(
                new
            {
                map = crateTexture,
                useScreenCoordinates = false,
                //color = 0xff0000
                color = 0xffffff
            }
                );



            var sprite2 = new THREE.Sprite(crateMaterial);

            //floor
            //sprite2.position.set(0, -200, 0);

            // left
            //sprite2.position.set(200, 50, 0);

            sprite2.position.set(0, 0, 200);

            //sprite2.position.set(-100, 0, 0);
            sprite2.scale.set(
                _2294472375_24a3b8ef46_o.ImageDefaultWidth * 0.08,
                _2294472375_24a3b8ef46_o.ImageDefaultHeight * 0.08,
                //64, 64,
                1.0);                 // imageWidth, imageHeight
            scene.add(sprite2);
            #endregion


            #region ColladaAudiA4
            new ColladaAudiA4().Source.Task.ContinueWithResult(
                dae =>
            {
                // 90deg
                //dae.rotation.x = -Math.Cos(Math.PI);

                //dae.scale.x = 30;
                //dae.scale.y = 30;
                //dae.scale.z = 30;
                //dae.position.z = 65;

                // right
                dae.position.z = -20;
                dae.position.x = -100;
                dae.position.y = -90;


                // jsc, do we have ILObserver available yet?
                dae.scale.x = 1.0;
                dae.scale.y = 1.0;
                dae.scale.z = 1.0;

                //dae.position.y = -80;

                scene.add(dae);
                //oo.Add(dae);
            }
                );
            #endregion



            #region nexus7
            new nexus7().Source.Task.ContinueWithResult(
                dae =>
            {
                // 90deg
                dae.rotation.x = -Math.Cos(Math.PI);

                //dae.scale.x = 30;
                //dae.scale.y = 30;
                //dae.scale.z = 30;
                //dae.position.z = 65;

                // left
                //dae.position.x = 200;

                dae.position.z = -100;
                dae.position.y = -50;

                //dae.position.z = 100;



                // jsc, do we have ILObserver available yet?
                dae.scale.x = 13.5;
                dae.scale.y = 13.5;
                dae.scale.z = 13.5;

                //dae.position.y = -80;

                scene.add(dae);
                //oo.Add(dae);
            }
                );
            #endregion



            //          // DK2
            //          hResolution: 1920,
            //vResolution: 1080,

            var renderer = new THREE.WebGLRenderer();
            renderer.setSize(1920, 1080);


            // broken?
            var distortionK       = new double[] { 1.0, 0.22, 0.24, 0.0 };
            var chromaAbParameter = new double[] { 0.996, -0.004, 1.014, 0.0 };

            var HMD = new OculusRiftEffectOptions
            {
                hResolution = window.Width,
                vResolution = window.Height,

                hScreenSize            = 0.12576,
                vScreenSize            = 0.07074,
                interpupillaryDistance = 0.0635,
                lensSeparationDistance = 0.0635,
                eyeToScreenDistance    = 0.041,

                //  j.distortionK = [0, 1.875, -71.68, 1.595, -3.218644E+26, 1.615, 0, 0];
                //distortionK = new double[] { 1.0, 0.22, 0.24, 0.0 },
                distortionK = distortionK,

                // j.chromaAbParameter = [1.609382E+22, 1.874, -5.189695E+11, -0.939, 4.463059E-29, 1.87675, 0, 0];
                //chromaAbParameter = new double[] { 0.996, -0.004, 1.014, 0.0 }
                chromaAbParameter = chromaAbParameter
            };

            var effect = new THREE.OculusRiftEffect(
                renderer, new
            {
                worldScale = 100,

                //HMD
            }
                );

            effect.setSize(1920, 1080);


            renderer.domElement.AttachToDocument();

            //renderer.domElement.style.position = IStyle.PositionEnum.absolute;
            renderer.domElement.style.SetLocation(0, 0);

            Native.document.body.style.overflow = IStyle.OverflowEnum.hidden;

            // x:\jsc.svn\examples\javascript\synergy\comanchebysiorki\comanchebysiorki\application.cs


            #region onresize
            new { }.With(
                async delegate
            {
                retry:

                var s = (double)Native.window.Width / 1920.0;


                Native.document.body.style.transform       = "scale(" + s + ")";
                Native.document.body.style.transformOrigin = "0% 0%";

                await Native.window.async.onresize;
                goto retry;
            }
                );
            #endregion



            //#region onresize
            //Native.window.onresize +=
            //    delegate
            //    {
            //        camera.aspect = Native.window.aspect;
            //        camera.updateProjectionMatrix();

            //        renderer.setSize(window.Width, window.Height);
            //        effect.setSize(window.Width, window.Height);
            //    };
            //#endregion


            //Native.document.body.onmousewheel +=
            //    e =>
            //    {
            //        fov -= e.WheelDirection * 5.0;
            //        camera.projectionMatrix.makePerspective(fov,
            //            (double)window.Width / window.Height, 1, 1100);
            //    };

            var lon0 = -45.0;
            var lon1 = 0.0;

            var lon = new sum(
                () => lon0,
                () => lon1
                );

            var lat0 = 0.0;
            var lat1 = 0.0;

            // or could we do it with byref or pointers?
            var lat = new sum(
                () => lat0,
                () => lat1
                );

            var phi   = 0.0;
            var theta = 0.0;

            var camera_rotation_z = 0.0;


            var labove = new NotificationLayout();

            #region sprite2 above
            {
                labove.Message.innerText       = "VR CREATIVE LEADERSHIP";
                labove.layout.style.background = "";

                new { }.With(
                    async delegate
                {
                    retry:

                    // make it blink. gaze cursor is on it?


                    labove.box.style.background = "rgba(255,255,0,0.7)";
                    labove.box.setAttribute("invoke", "onmutation1");
                    await Task.Delay(1500);

                    // is mutation observer noticing it?
                    labove.box.style.background = "rgba(255,255,255,0.7)";
                    labove.box.setAttribute("invoke", "onmutation2");
                    await Task.Delay(1500);


                    goto retry;
                }
                    );



                var c = labove.AsCanvas();

                var xcrateTexture = new THREE.Texture(c);

                // like video texture
                Native.window.onframe += delegate { xcrateTexture.needsUpdate = true; };

                var xcrateMaterial = new THREE.SpriteMaterial(
                    new
                {
                    map = xcrateTexture,
                    useScreenCoordinates = false,
                    //color = 0xff0000
                    color = 0xffffff
                }
                    );



                var xsprite2 = new THREE.Sprite(xcrateMaterial);

                //floor
                //sprite2.position.set(0, -200, 0);

                // left
                xsprite2.position.set(200, 0, 0);

                //sprite2.position.set(0, 0, 200);

                //sprite2.position.set(-100, 0, 0);
                xsprite2.scale.set(
                    c.width * 0.5,
                    c.height * 0.5,
                    //64, 64,
                    1.0);                     // imageWidth, imageHeight
                scene.add(xsprite2);
            }
            #endregion



            //var lineTo = new List<THREE.Vector3>();
            var others = new
            {
                ui     = default(NotificationLayout),
                canvas = default(IHTMLCanvas),

                map = default(THREE.Texture),

                sprite = default(THREE.Sprite)
            }.ToEmptyList();

            #region add
            Action <NotificationLayout> add = ui =>
            {
                ui.layout.style.background = "";

                var canvas = ui.AsCanvas();
                var index  = others.Count;

                //ui.Message  += new { index };
                //ui.Message.innerText += new { index };

                //lbelow0.Message = new { THREE.REVISION }.ToString();

                // THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter is set to THREE.LinearFilter or THREE.NearestFilter. ( undefined )

                var map = new THREE.Texture(canvas);

                map.minFilter = THREE.LinearFilter;

                // like video texture

                var xcrateMaterial = new THREE.SpriteMaterial(
                    new
                {
                    map,
                    useScreenCoordinates = false,
                    //color = 0xff0000
                    color = 0xffffff
                }
                    );



                var sprite = new THREE.Sprite(xcrateMaterial);

                //floor
                //sprite2.position.set(0, -200, 0);

                // left middle
                //sprite2.position.set(200, 0, 0);
                //sprite.position.set(250, -50, 50);
                //lineTo.Add(sprite.position);



                //sprite2.position.set(0, 0, 200);

                //sprite2.position.set(-100, 0, 0);
                sprite.scale.set(
                    canvas.width * 0.5,
                    canvas.height * 0.5,
                    //64, 64,
                    1.0);                     // imageWidth, imageHeight
                scene.add(sprite);

                others.Add(
                    new
                {
                    ui,
                    canvas,
                    map,
                    sprite
                }
                    );

                var sw = Stopwatch.StartNew();

                Native.window.onframe += delegate
                {
                    // can we get some of the crazy c++ template bitmapbuffer code from the past?
                    map.needsUpdate = true;

                    var offset = Math.PI * 2 * ((double)(index + 1) / others.Count);

                    sprite.position.x = 300;

                    sprite.position.z = Math.Sin(sw.ElapsedMilliseconds * 0.00001 + offset) * 120;
                    sprite.position.y = Math.Cos(sw.ElapsedMilliseconds * 0.00001 + offset) * 120;
                };
            };
            #endregion


            #region NotificationLayout
            add(
                new NotificationLayout
            {
                // keep attributes around?
                // like we do with XElement sync?
                Icon = new _0_10122014_ECAF4B1F638429B44F4B142C2A4F38EE().With(
                    x =>
                {
                    x.style.width  = "96px";
                    x.style.height = "96px";
                }
                    ),
                Message = "Advanced Mechanics by Portugal Design Lab"
            }
                );

            add(
                new NotificationLayout
            {
                // keep attributes around?
                // like we do with XElement sync?
                Icon = new _42_08122014_D2639E0AAA3E54E5F4568760AEE605EE().With(
                    x =>
                {
                    x.style.width  = "96px";
                    x.style.height = "96px";
                }
                    ),
                Message = "Face Value by mshariful"
            }
                );



            add(
                new NotificationLayout
            {
                // keep attributes around?
                // like we do with XElement sync?
                Icon = new _371_28122014_2045510F2F7974319A9F7E9F4B39DF07().With(
                    x =>
                {
                    x.style.width  = "96px";
                    x.style.height = "96px";
                }
                    ),
                Message = "Mind Wall. by Sumit Goski"
            }
                );
            #endregion



            #region lines
            {
                var geometry = new THREE.Geometry
                {
                    // how can we keep streaming verticies data points to gpu?
                    vertices =

                        others.SelectMany(
                            lineTo =>
                            new[]
                    {
                        new THREE.Vector3(200, 0, 0),
                        lineTo.sprite.position
                    }
                            ).ToArray()

                        // https://github.com/mrdoob/three.js/wiki/Updates
                        // http://stackoverflow.com/questions/17842521/adding-geometry-to-a-three-js-mesh-after-render
                        //verticesNeedUpdate = true
                };

                Native.window.onframe += delegate { geometry.verticesNeedUpdate = true; };

                var o = new THREE.Line(geometry, new THREE.LineDashedMaterial(
                                           new { color = 0x00aaff, dashSize = 1, gapSize = 0.5, linewidth = 1 }
                                           ), THREE.LineStrip);

                //objects.Add(o);
                scene.add(o);
            }
            #endregion

            Native.window.onframe +=
                ee =>
            {
                // look we are having html in 3D in VR!



                //if (Native.document.pointerLockElement == Native.document.body)
                //    lon += 0.00;
                //else
                //    lon += 0.01;

                //lat = Math.Max(-85, Math.Min(85, lat));

                //Native.document.title = new { lon, lat }.ToString();


                phi   = THREE.Math.degToRad(90 - lat);
                theta = THREE.Math.degToRad(lon);

                target.x = 500 * Math.Sin(phi) * Math.Cos(theta);
                target.y = 500 * Math.Cos(phi);
                target.z = 500 * Math.Sin(phi) * Math.Sin(theta);

                camera.lookAt(target);
                camera.rotation.z += camera_rotation_z;

                //renderer.render(scene, camera);
                effect.render(scene, camera);
            };



            // http://blog.thematicmapping.org/2013/10/terrain-visualization-with-threejs-and.html

            // http://stackoverflow.com/questions/13278087/determine-vertical-direction-of-a-touchmove



            var compassHeadingOffset      = 0.0;
            var compassHeadingInitialized = 0;

            #region compassHeading
            // X:\jsc.svn\examples\javascript\android\Test\TestCompassHeading\TestCompassHeading\Application.cs
            Native.window.ondeviceorientation +=
                dataValues =>
            {
                // Convert degrees to radians
                var alphaRad = dataValues.alpha * (Math.PI / 180);
                var betaRad  = dataValues.beta * (Math.PI / 180);
                var gammaRad = dataValues.gamma * (Math.PI / 180);

                // Calculate equation components
                var cA = Math.Cos(alphaRad);
                var sA = Math.Sin(alphaRad);
                var cB = Math.Cos(betaRad);
                var sB = Math.Sin(betaRad);
                var cG = Math.Cos(gammaRad);
                var sG = Math.Sin(gammaRad);

                // Calculate A, B, C rotation components
                var rA = -cA * sG - sA * sB * cG;
                var rB = -sA * sG + cA * sB * cG;
                var rC = -cB * cG;

                // Calculate compass heading
                var compassHeading = Math.Atan(rA / rB);

                // Convert from half unit circle to whole unit circle
                if (rB < 0)
                {
                    compassHeading += Math.PI;
                }
                else if (rA < 0)
                {
                    compassHeading += 2 * Math.PI;
                }

                /*
                 * Alternative calculation (replacing lines 99-107 above):
                 *
                 * var compassHeading = Math.atan2(rA, rB);
                 *
                 * if(rA < 0) {
                 * compassHeading += 2 * Math.PI;
                 * }
                 */

                // Convert radians to degrees
                compassHeading *= 180 / Math.PI;

                // Compass heading can only be derived if returned values are 'absolute'

                // X:\jsc.svn\examples\javascript\android\Test\TestCompassHeadingWithReset\TestCompassHeadingWithReset\Application.cs

                //Native.document.body.innerText = new { compassHeading }.ToString();
                if (compassHeadingInitialized > 0)
                {
                    lon1 = compassHeading - compassHeadingOffset;
                }
                else
                {
                    compassHeadingOffset = compassHeading;
                    compassHeadingInitialized++;
                }
            };
            #endregion

            #region gamma
            Native.window.ondeviceorientation +=
                //e => Native.body.innerText = new { e.alpha, e.beta, e.gamma }.ToString();
                //e => lon = e.gamma;
                e =>
            {
                lat1 = e.gamma;

                // after servicing a running instance would be nice
                // either by patching or just re running the whole iteration in the backgrou
                camera_rotation_z = e.beta * 0.02;
            };
            #endregion



            #region camera rotation
            var old = new { clientX = 0, clientY = 0 };

            Native.document.body.ontouchstart +=
                e =>
            {
                var n = new { e.touches[0].clientX, e.touches[0].clientY };
                old = n;
            };

            Native.document.body.ontouchmove +=
                e =>
            {
                var n = new { e.touches[0].clientX, e.touches[0].clientY };

                e.preventDefault();

                lon0 += (n.clientX - old.clientX) * 0.2;
                lat0 -= (n.clientY - old.clientY) * 0.2;

                old = n;
            };


            Native.document.body.onmousemove +=
                e =>
            {
                e.preventDefault();

                if (Native.document.pointerLockElement == Native.document.body)
                {
                    lon0 += e.movementX * 0.1;
                    lat0 -= e.movementY * 0.1;

                    //Console.WriteLine(new { lon, lat, e.movementX, e.movementY });
                }
            };


            Native.document.body.onmouseup +=
                e =>
            {
                //drag = false;
                e.preventDefault();
            };

            Native.document.body.onmousedown +=
                e =>
            {
                //e.CaptureMouse();

                //drag = true;
                e.preventDefault();
                Native.document.body.requestPointerLock();
            };


            Native.document.body.ondblclick +=
                e =>
            {
                e.preventDefault();

                Console.WriteLine("requestPointerLock");
            };

            #endregion

            Native.body.onmousewheel +=
                e =>
            {
                camera_rotation_z += 0.1 * e.WheelDirection;;
            };
            // https://developer.android.com/training/system-ui/immersive.html
        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            //FormStyler.AtFormCreated =
            //s =>
            //{
            //    s.Context.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;

            //    //var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDrag().AttachTo(s.Context.GetHTMLTarget());
            //    var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDragWithShadow().AttachTo(s.Context.GetHTMLTarget());



            //    s.Context.GetHTMLTarget().style.backgroundColor = "#efefef";
            //    //s.Context.GetHTMLTarget().style.backgroundColor = "#A26D41";

            //};

#if AsWEBSERVER
            #region += Launched chrome.app.window
            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                // if we run as a server. we can open up on android.

                //chrome.Notification.DefaultTitle = "Nexus7";
                //chrome.Notification.DefaultIconUrl = new x128().src;
                ChromeTCPServer.TheServerWithStyledForm.Invoke(
                     AppSource.Text
                //, AtFormCreated: FormStyler.AtFormCreated

                //AtFormConstructor:
                //    f =>
                //    {
                //        //arg[0] is typeof System.Int32
                //        //script: error JSC1000: No implementation found for this native method, please implement [static System.Drawing.Color.FromArgb(System.Int32)]

                //        // X:\jsc.svn\examples\javascript\forms\Test\TestFromArgb\TestFromArgb\ApplicationControl.cs

                //        f.BackColor = System.Drawing.Color.FromArgb(0xA26D41);
                //    }
                );
                return;
            }
            #endregion
#else

            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    //chrome.runtime.UpdateAvailable += delegate
                    //{
                    //    new chrome.Notification(title: "UpdateAvailable");

                    //};

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "x360stereomidnightsun");

                        // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: new
                               {
                                   alwaysOnTop = true,
                                   visibleOnAllWorkspaces = true
                               }
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion


#endif


            // crash
            //int cubefacesizeMAX = 2048 * 2; // 6 faces, ?
            int cubefacesizeMAX = 1024; // 6 faces, ?
            int cubefacesize = cubefacesizeMAX; // 6 faces, ?
            //int cubefacesize = 1024; // 6 faces, ?
            // "X:\vr\tape1\0000x2048.png"
            // for 60hz render we may want to use float camera percision, not available for ui.
            //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x2048.png" "/sdcard/oculus/360photos/"
            //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x128.png" "/sdcard/oculus/360photos/"

            if (Environment.ProcessorCount < 8)
                //cubefacesize = 64; // 6 faces, ?

                // fast gif?
                cubefacesize = 1024; // 6 faces, ?


            // can we keep fast fps yet highp?

            // can we choose this on runtime? designtime wants fast fps, yet for end product we want highdef on our render farm?
            //const int cubefacesize = 128; // 6 faces, ?

            //var cubecameraoffsetx = 256;
            var cubecameraoffsetx = 400;


            //var uizoom = 0.1;
            //var uizoom = cubefacesize / 128f;
            var uizoom = 128f / cubefacesize;


            Native.css.style.backgroundColor = "darkcyan";
            Native.css.style.overflow = IStyle.OverflowEnum.hidden;

            Native.body.Clear();
            (Native.body.style as dynamic).webkitUserSelect = "text";

            IHTMLCanvas shader1canvas = null;




            //return;

            // Earth params
            //var radius = 0.5;
            //var radius = 1024;
            //var radius = 2048;
            //var radius = 512;
            //var radius = 256;
            //var radius = 400;

            // can we have not fly beyond moon too much?
            //var radius = 500;
            var radius = 480;

            //var segments = 32;
            var segments = 128 * 2;
            //var rotation = 6;


            //const int size = 128;
            //const int size = 256; // 6 faces, 12KB
            //const int size = 512; // 6 faces, ?

            // WebGL: drawArrays: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled.

            //const int size = 720; // 6 faces, ?
            //const int size = 1024; // 6 faces, ?
            //const int cubefacesize = 1024; // 6 faces, ?

            // THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter is set to THREE.LinearFilter or THREE.NearestFilter. ( chrome-extension://aemlnmcokphbneegoefdckonejmknohh/assets/x360stereomidnightsun/anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs.jpg )


            var far = 0xffffff;

            new IHTMLPre { new { Environment.ProcessorCount, cubefacesize } }.AttachToDocument();

            //new IHTMLPre { "can we stream it into VR, shadertoy, youtube 360, youtube stereo yet?" }.AttachToDocument();


            var sw = Stopwatch.StartNew();



            var oo = new List<THREE.Object3D>();

            var window = Native.window;


            // what about physics and that portal rendering?

            // if we are running as a chrome web server, we may also be opened as android ndk webview app
            //var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: window.aspect, near: 1, far: 2000);
            // once we update source
            // save the source
            // manually recompile 
            //cameraPX.position.z = 400;

            //// the camera should be close enough for the object to float off the FOV of PX
            //cameraPX.position.z = 200;

            // scene
            // can we make the 3D object orbit around us ?
            // and
            // stream it to vr?
            var scene = new THREE.Scene();



            // since our cube camera is somewhat a fixed thing
            // would it be easier to move mountains to come to us?
            // once we change code would chrome app be able to let VR know that a new view is available?
            var sceneg = new THREE.Group();
            sceneg.AttachTo(scene);


            // fly up?
            //sceneg.translateZ(-1024);
            // rotate the world, as the skybox then matches what we have on filesystem
            scene.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
            // yet for headtracking we shall rotate camera


            //sceneg.position.set(0, 0, -1024);
            //sceneg.position.set(0, -1024, 0);

            //scene.add(new THREE.AmbientLight(0x333333));
            //scene.add(new THREE.AmbientLight(0xffffff));
            //scene.add(new THREE.AmbientLight(0xaaaaaa));
            //scene.add(new THREE.AmbientLight(0xcccccc));
            //scene.add(new THREE.AmbientLight(0xeeeeee));
            scene.add(new THREE.AmbientLight(0xffffff));




            //var light = new THREE.DirectionalLight(0xffffff, 1);
            //// sun should be beyond moon
            ////light.position.set(-5 * virtualDistance, -3 * virtualDistance, -5 * virtualDistance);
            ////light.position.set(-15 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);

            //// where shall the light source be to see half planet?
            //light.position.set(-1 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);
            //scene.add(light);



            //var lightX = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightX" }.AttachToDocument();
            //var lightY = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightY" }.AttachToDocument();
            //var lightZ = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightZ" }.AttachToDocument();

            //new IHTMLHorizontalRule { }.AttachToDocument();

            // whats WebGLRenderTargetCube do?

            // WebGLRenderer preserveDrawingBuffer 



            var renderer0 = new THREE.WebGLRenderer(

                new
                {
                    //antialias = true,
                    alpha = true,
                    preserveDrawingBuffer = true
                }
            );

            // https://github.com/mrdoob/three.js/issues/3836

            // the construct. white bg
            //renderer0.setClearColor(0xfffff, 1);
            //renderer0.setClearColor(0x0, 1);
            renderer0.setClearColor(0x0, 0);

            //renderer.setSize(window.Width, window.Height);
            renderer0.setSize(cubefacesize, cubefacesize);

            //renderer0.domElement.AttachToDocument();
            //rendererPX.domElement.style.SetLocation(0, 0);
            //renderer0.domElement.style.SetLocation(4, 4);


            // top

            // http://stackoverflow.com/questions/27612524/can-multiple-webglrenderers-render-the-same-scene


            // need a place to show the cubemap face to GUI 
            // how does the stereo OTOY do it?
            // https://www.opengl.org/wiki/Sampler_(GLSL)

            // http://www.richardssoftware.net/Home/Post/25

            // [+X, –X, +Y, –Y, +Z, –Z] fa



            // move up
            //camera.position.set(-1200, 800, 1200);
            //var cameraoffset = new THREE.Vector3(0, 15, 0);

            // can we aniamte it?
            //var cameraoffset = new THREE.Vector3(0, 800, 1200);
            // can we have linear animation fromcenter of the map to the edge and back?
            // then do the flat earth sun orbit?
            var cameraoffset = new THREE.Vector3(
                // left?
                -512,
                // height?
                //0,
                //1600,
                //1024,

                // if the camera is in the center, would we need to move the scene?
                // we have to move the camera. as we move the scene the lights are messed up
                //2014,
                1024,

                //1200
                0
                // can we hover top of the map?
                );

            // original vieworigin
            //var cameraoffset = new THREE.Vector3(-1200, 800, 1200);

            // whatif we want more than 30sec video? 2min animation? more frames to render? 2gb disk?





            var maxfps = 60;
            //var maxlengthseconds = 60;
            var maxlengthseconds = 120;

            var maxframes = maxlengthseconds * maxfps;

            var frameIDanimation = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "frameIDanimation", @checked = false }.AttachToDocument();

            // whatif we want more than 30sec video? 2min animation? more frames to render? 2gb disk?
            var frameIDslider = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0, max = maxframes, valueAsNumber = 0, title = "frameIDslider" }.AttachToDocument();
            //var frameIDslider = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0, max = 1800, valueAsNumber = 1800 / 2, title = "frameIDslider" }.AttachToDocument();




            new IHTMLHorizontalRule { }.AttachToDocument();

            var camerax = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -32, max = 32, valueAsNumber = 0, title = "camerax" }.AttachToDocument();
            camerax.css.after.contentText = "x: ";
            new IHTMLBreak { }.AttachToDocument();

            //camerax.style.borderLeft = "1em solid red";

            // up. whats the most high a rocket can go 120km?
            //new IHTMLHorizontalRule { }.AttachToDocument();

            // how high is the bunker?
            var cameray = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -32, max = 32, valueAsNumber = 0, title = "cameray" }.AttachToDocument();
            cameray.css.after.contentText = "y: ";
            new IHTMLBreak { }.AttachToDocument();

            // we wont be going to orbit
            //new IHTMLBreak { }.AttachToDocument();
            //var camerayHigh = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = cameray.max, max = 1024 * 256, valueAsNumber = cameray.max, title = "cameray" }.AttachToDocument();
            //new IHTMLHorizontalRule { }.AttachToDocument();
            //var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();
            //var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -2048 / 2, max = 0 + 2048 / 2, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();
            var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -32, max = 32, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();
            cameraz.css.after.contentText = "z: ";

            // for render server
            var fcamerax = 0.0;
            var fcameray = 0.0;
            var fcameraz = 0.0;


            new IHTMLHorizontalRule { }.AttachToDocument();





            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151114/stereo

            // not used for this example tho...
            var itemRotation = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -180, max = 180, valueAsNumber = 0, title = "itemRotation" }.AttachToDocument();
            var spriteOffset = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0, max = 11, valueAsNumber = 0, title = "spriteOffset" }.AttachToDocument();
            //var itemRotation = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -90, max = 90, valueAsNumber = 33, title = "itemRotation" }.AttachToDocument();

            //while (await camerax.async.onchange)

            //cameray.onchange += delegate
            //{
            //    if (cameray.valueAsNumber < cameray.max)
            //        camerayHigh.valueAsNumber = camerayHigh.min;
            //};

            //camerayHigh.onmousedown += delegate
            //{
            //    //if (camerayHigh.valueAsNumber > camerayHigh.min)
            //    cameray.valueAsNumber = cameray.max;
            //};


            Action applycameraoffset = delegate
            {
                // make sure UI and gpu sync up

                var cy = cameray;



                // we wont be going to orbit

                //if (cameray.valueAsNumber < cameray.max)
                //    camerayHigh.valueAsNumber = camerayHigh.min;

                //if (camerayHigh.valueAsNumber > camerayHigh.min)
                //    cameray.valueAsNumber = cameray.max;

                //if (cameray.valueAsNumber == cameray.max)
                //    cy = camerayHigh;



                cameraoffset = new THREE.Vector3(
                    // left?
                  1.0 * (camerax + fcamerax),
                    // height?
                    //0,
                    //1600,
                    //1024,

                   // if the camera is in the center, would we need to move the scene?
                    // we have to move the camera. as we move the scene the lights are messed up
                    //2014,
                   1.0 * (cy + fcameray),

                 //1200c
                 1.0 * (cameraz + fcameraz)
                    // can we hover top of the map?
                   );
            };


            #region y
            // need to rotate90?
            var cameraNY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNY.lookAt(new THREE.Vector3(0, -1, 0));
                cameraNY.position.add(cameraoffset);
            };

            //cameraNY.lookAt(new THREE.Vector3(0, 1, 0));
            var canvasNY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasNY.canvas.title = "NY";
            canvasNY.canvas.AttachToDocument();
            canvasNY.canvas.style.transformOrigin = "0 0";
            // roslyn!
            canvasNY.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPY.lookAt(new THREE.Vector3(0, 1, 0));
                cameraPY.position.add(cameraoffset);
            };
            //cameraPY.lookAt(new THREE.Vector3(0, -1, 0));
            var canvasPY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 0);
            canvasPY.canvas.title = "PY";
            canvasPY.canvas.AttachToDocument();
            canvasPY.canvas.style.transformOrigin = "0 0";
            canvasPY.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion

            // transpose xz?

            #region x
            var cameraNX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNX.lookAt(new THREE.Vector3(0, 0, 1));
                cameraNX.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(0, 0, -1));
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNX.lookAt(new THREE.Vector3(1, 0, 0));
            var canvasNX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 2, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNX.canvas.title = "NX";
            canvasNX.canvas.AttachToDocument();
            canvasNX.canvas.style.transformOrigin = "0 0";
            canvasNX.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPX.lookAt(new THREE.Vector3(0, 0, -1));
                cameraPX.position.add(cameraoffset);
            };
            //cameraPX.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPX.lookAt(new THREE.Vector3(1, 0, 0));
            //cameraPX.lookAt(new THREE.Vector3(-1, 0, 0));

            var canvasPX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPX.canvas.title = "PX";
            canvasPX.canvas.AttachToDocument();
            canvasPX.canvas.style.transformOrigin = "0 0";
            canvasPX.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion

            // lets have the item twice the cube item size. and offset -0.5 to recenter.
            // this wont work as we are cloning the buffer for now!
            //var canvasPXitem = new CanvasRenderingContext2D(cubefacesize, cubefacesize * 2);
            var canvasPXitem = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPXitem.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasPXitem.canvas.title = "item";
            canvasPXitem.canvas.AttachToDocument();
            canvasPXitem.canvas.style.transformOrigin = "0 0";
            canvasPXitem.canvas.style.transform = "scale(" + uizoom + ")";
            canvasPXitem.canvas.style.border = "1px solid yellow";

            //canvasPXitem.fillText("hello", 1, 1, cubefacesize);

            //canvasPXitem.drawImage(
            //    //new IHTMLPre { "hello" }
            //    new IHTMLDiv { "hello world. can we draw html into 360 VR yet?" }, 0, 0, cubefacesize, cubefacesize
            //);







            // http://www.w3schools.com/tags/canvas_fillstyle.asp
            canvasPXitem.fillStyle = "red";

            // too big?
            //canvasPXitem.fillRect(
            //    x: cubefacesize / 3,
            //    y: cubefacesize / 4,
            //    w: cubefacesize / 3,
            //    h: cubefacesize / 2
            //);



            // canvasPXitem.fillRect(
            //    x: (cubefacesize - cubefacesize / 6) / 2,
            //    y: (cubefacesize - cubefacesize / 3) / 2,

            //    w: cubefacesize / 6,
            //    h: cubefacesize / 3
            //);



            #region z
            var cameraNZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, -1));
            applycameraoffset += delegate
            {
                cameraNZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNZ.lookAt(new THREE.Vector3(1, 0, 0));
                cameraNZ.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, 1));
            var canvasNZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 3, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNZ.canvas.title = "NZ";
            canvasNZ.canvas.AttachToDocument();
            canvasNZ.canvas.style.transformOrigin = "0 0";
            canvasNZ.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraPZ.lookAt(new THREE.Vector3(1, 0, 0));
            applycameraoffset += delegate
            {
                cameraPZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPZ.lookAt(new THREE.Vector3(-1, 0, 0));
                cameraPZ.position.add(cameraoffset);
            };
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, -1));
            var canvasPZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPZ.canvas.title = "PZ";
            canvasPZ.canvas.AttachToDocument();
            canvasPZ.canvas.style.transformOrigin = "0 0";
            canvasPZ.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion




            // c++ alias locals would be nice..
            var canvas0 = (IHTMLCanvas)renderer0.domElement;


            var old = new
            {



                CursorX = 0,
                CursorY = 0
            };


            var st = new Stopwatch();
            st.Start();

            //canvas0.css.active.style.cursor = IStyle.CursorEnum.move;




            // X:\jsc.svn\examples\javascript\Test\TestMouseMovement\TestMouseMovement\Application.cs


            // THREE.WebGLProgram: gl.getProgramInfoLog() C:\fakepath(78,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll
            // THREE.WebGLProgram: gl.getProgramInfoLog() (79,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll

            // http://www.roadtovr.com/youtube-confirms-stereo-3d-360-video-support-coming-soon/
            // https://www.youtube.com/watch?v=D-Wl9jAB45Q



            #region spherical
            var gl = new WebGLRenderingContext(alpha: true, preserveDrawingBuffer: true);
            var c = gl.canvas.AttachToDocument();

            //  3840x2160

            //c.style.SetSize(3840, 2160);

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150722/360-youtube

            // bots cannot get a bigger mp4 from yt, and vrideo renders 2k on gearvr.
            c.width = 3840;
            c.height = 2160;


            //c.width = 3840 * 2;
            //c.height = 2160 * 2;


            //c.width = 3840;
            //c.height = 2160;
            // 1,777777777777778

            // https://www.youtube.com/watch?v=fTfJwzRsE-w
            //c.width = 7580;
            //c.height = 3840;
            //1,973958333333333

            //7580
            //    3840

            // wont work
            //c.width = 8192;
            //c.height = 4096;


            // this has the wrong aspect?
            //c.width = 6466;
            //c.height = 3232;

            new IHTMLPre { new { c.width, c.height } }.AttachToDocument();

            //6466x3232

            //var suizoom = 720f / c.height;
            //var suizoom = 360f / c.height;
            var suizoom = 480f / c.width;

            c.style.transformOrigin = "0 0";
            c.style.transform = "scale(" + suizoom + ")";
            //c.style.backgroundColor = "yellow";
            c.style.position = IStyle.PositionEnum.absolute;

            c.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 3);

            var pass = new CubeToEquirectangular.Library.ShaderToy.EffectPass(
                       null,
                       gl,
                       precission: CubeToEquirectangular.Library.ShaderToy.DetermineShaderPrecission(gl),
                       supportDerivatives: gl.getExtension("OES_standard_derivatives") != null,
                       callback: null,
                       obj: null,
                       forceMuted: false,
                       forcePaused: false,
                //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                       outputGainNode: null
                   );

            // how shall we upload our textures?
            // can we reference GLSL.samplerCube yet?
            //pass.mInputs[0] = new samplerCube { };
            pass.mInputs[0] = new CubeToEquirectangular.Library.ShaderToy.samplerCube { };

            pass.MakeHeader_Image();
            var vs = new Shaders.ProgramFragmentShader();
            pass.NewShader_Image(vs);

            #endregion




            //var frame0 = new HTML.Images.FromAssets.tiles_regrid().AttachToDocument();
            var frame0 = new HTML.Images.FromAssets.galaxy_starfield().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets.galaxy_starfield150FOV().AttachToDocument();
            //var xor = new HTML.Images.FromAssets.Orion360_test_image_8192x4096().AttachToDocument();
            //var xor = new HTML.Images.FromAssets._2_no_clouds_4k().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().AttachToDocument();


            // 270px
            //xor.style.height = "";
            frame0.style.height = "270px";
            frame0.style.width = "480px";
            frame0.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 + 16, 8 + (int)(uizoom * cubefacesize + 8) * 3);



            var frame2 = new HTML.Images.FromAssets.galaxy_starfield().AttachToDocument();

            frame2.style.height = "270px";
            frame2.style.width = "480px";
            frame2.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 * 2 + 16 * 2, 8 + (int)(uizoom * cubefacesize + 8) * 3);




            #region DirectoryEntry
            var dir = default(DirectoryEntry);

            new IHTMLButton { "openDirectory" }.AttachToDocument().onclick += async delegate
            {
                dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
            };
            frame0.style.cursor = IStyle.CursorEnum.pointer;
            frame0.title = "save frame";


            frame0.onclick += delegate
            {
                // http://paulbourke.net/papers/vsmm2006/vsmm2006.pdf
                //            A method of creating synthetic stereoscopic panoramic images that can be implemented
                //in most rendering packages has been presented. If single panoramic pairs can be created
                //then stereoscopic panoramic movies are equally possible giving rise to the prospect of
                //movies where the viewer can interact with, at least with regard to what they choose to look
                //at.These images can be projected so as to engage the two features of the human visual
                //system that assist is giving us a sense of immersion, the feeling of “being there”. That is,
                //imagery that contains parallax information as captured from two horizontally separated eye
                //positions (stereopsis)and imagery that fills our peripheral vision.The details that define
                //how the two panoramic images should be created in rendering packages are provided, in
                //particular, how to precisely configure the virtual cameras and control the distance to zero
                //parallax.

                // grab a frame

                if (dir == null)
                {
                    // not exporting to file system?
                    var f0 = new IHTMLImage { src = gl.canvas.toDataURL() };

                    //var f0 = (IHTMLImage)gl.canvas;
                    //var f0 = (IHTMLImage)gl.canvas;
                    //var base64 = gl.canvas.toDataURL();


                    //frame0.src = base64;
                    frame0.src = f0.src;

                    // 7MB!

                    return;
                }

                //                // ---------------------------
                //IrfanView
                //---------------------------
                //Warning !
                //The file: "X:\vr\tape1\0001.jpg" is a PNG file with incorrect extension !
                //Rename ?
                //---------------------------
                //Yes   No   
                //---------------------------

                // haha this will render the thumbnail.
                //dir.WriteAllBytes("0000.png", frame0);

                //dir.WriteAllBytes("0000.png", gl.canvas);

                var glsw = Stopwatch.StartNew();
                dir.WriteAllBytes("0000.png", gl);

                new IHTMLPre { new { glsw.ElapsedMilliseconds } }.AttachToDocument();

                // {{ ElapsedMilliseconds = 1548 }}

                // 3.7MB
                // 3840x2160

            };

            #endregion

            var vsync = default(TaskCompletionSource<object>);


            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLColladaExperiment\WebGLColladaExperiment\WebGLColladaExperiment.csproj"






            // asus will hang
            // https://3dwarehouse.sketchup.com/model.html?id=fb7a0448d940e575edc01389f336fb0a
            // can we get one frame into vr?

            // cube: mesh to cast shadows



            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000 })

            //    );
            //    floor2.position.set(0, 0, -cubefacesize / 2);
            //    floor2.AttachTo(scene);
            //}
            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0x0000ff, color = 0x0000ff })

            //    );
            //    floor2.position.set(-cubefacesize / 2, 0, 0);
            //    floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);

            //    floor2.AttachTo(scene);
            //}

            //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
            //var tex0 = new THREE.Texture(new moon());
            //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
            var texPXitem = new THREE.Texture(

                //shader1canvas

                canvasPXitem.canvas

                ) { needsUpdate = true };


            var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            var floor2 = new THREE.Mesh(planeGeometry0,
                //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                new THREE.MeshPhongMaterial(
                    new
                    {

                        map = texPXitem,

                        transparent = true,
                        alphaTest = 0.5

                        //ambient = 0x00ff00,
                        //color = 0x00ff00
                    })

            );
            //floor2.position.set(0, 0, -cubefacesize  * 0.55);

            floor2.AttachTo(scene);

            applycameraoffset += delegate
            {
                texPXitem.needsUpdate = true;

                //floor2.position.set(-cubefacesize * 0.5, 0, 0);
                //floor2.position.set(-cubefacesize * 0.33, 0, 0);
                // floor2.position.set(-cubefacesize * 0.25, 0, 0);

                //floor2.position.set(-cubefacesize * 0.225, 0, 0);
                floor2.position.set(-cubefacesize * 0.23, 0, 0);

                // too close!
                //floor2.position.set(-cubefacesize * 0.20, 0, 0);

                floor2.rotation.set(0, 0, 0);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2 + radians(itemRotation.valueAsNumber));
            };






            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeEarth\ChromeEarth\Application.cs
            // X:\jsc.svn\examples\javascript\canvas\ConvertBlackToAlpha\ConvertBlackToAlpha\Application.cs
            // hidden for alpha AppWindows
            //#if FBACKGROUND

            new IHTMLBreak { }.AttachToDocument();

            var iskybox2 = new HTML.Images.FromAssets._2massAllskyGAMMA();
            var iskybox1 = new HTML.Images.FromAssets.anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs();

            var hideskybox1 = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "hide skybox1", @checked = true }.AttachToDocument();
            var hideskybox2 = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "hide skybox2", @checked = false }.AttachToDocument();

            #region drawStereoFrame
            Func<CanvasRenderingContext2D, Task> drawStereoFrame = async canvasTB =>
            {
                //var xIPD = 4.0;
                var xIPD = 6.0;

                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151203/x360stereomidnightsun

                // fake skybox?
                canvasTB.fillStyle = "darkcyan";
                canvasTB.fillRect(0, 0, c.width, c.height);

                //canvasTB.drawImage(stereoT, 0, 0, c.width, c.height, 0, 0, c.width, c.height / 2);
                //canvasTB.drawImage(stereoB, 0, 0, c.width, c.height, 0, c.height / 2, c.width, c.height / 2);

                // 12 frames in total. lets add em all
                // can we add a secondary stereo frame ? at 45deg?

                var offsetrotation = 360 / 12;

                hideskybox1.@checked = true;
                hideskybox2.@checked = false;

                // mono bg!
                floor2.visible = false;
                fcamerax = 0;
                await Native.window.async.onframe;
                await Native.window.async.onframe;

                canvasTB.drawImage(gl.canvas, 0, 0, c.width, c.height, 0, 0, c.width, c.height / 2);
                canvasTB.drawImage(gl.canvas, 0, 0, c.width, c.height, 0, c.height / 2, c.width, c.height / 2);

                // keep only bg. hide stereo sprite
                floor2.visible = true;

                //await Native.window.async.onframe;


                // we need our stereo item frame thanks. no bg.
                hideskybox1.@checked = true;
                hideskybox2.@checked = true;

                await Native.window.async.onframe;






                //fcamerax = -xIPD;
                //await Native.window.async.onframe;
                //var stereoT = new IHTMLImage { src = gl.canvas.toDataURL() };

                //fcamerax = +xIPD;

                //await Native.window.async.onframe;
                //var stereoB = new IHTMLImage { src = gl.canvas.toDataURL() };

                ////await Native.window.async.onframe;
                //await stereoB.async.oncomplete;


                // we now have a stereo sprite.
                // can we rotate it on top of the background?


                // 8K fulldome is a resolution of 8192×8192 
                // 8K UHD is a resolution of 7680 × 4320 (33.2 megapixels) 
                // 8192×4320 t
                // Digital video formats with resolutions of 4K (3840×2160) and 8K (7680×4320)


                // WebGL: CONTEXT_LOST_WEBGL: loseContext: context lost ?
                for (int stereoframei = 0; stereoframei < 12; stereoframei++)
                {
                    spriteOffset.valueAsNumber = stereoframei;

                    Console.WriteLine(new { stereoframei });
                    double ioffsetdeg = offsetrotation * stereoframei;


                    ioffsetdeg += (degrees(frameIDslider.valueAsNumber / (60 * 60 / 5.0) * Math.PI * 2));


                    // follow the moon?
                    //stars.rotateOnAxis(new THREE.Vector3(0, -1, 0),
                    //    frameIDslider.valueAsNumber / (60 * 60 / 5.0) * Math.PI * 2
                    //);


                    var ipxoffset = (int)Math.Floor(c.width * ioffsetdeg / 360);

                    ipxoffset = ipxoffset % c.width;

                    fcamerax = -xIPD;
                    await Native.window.async.onframe;
                    var stereoT = gl.canvas;
                    canvasTB.drawImage(stereoT, 0, 0, c.width, c.height, ipxoffset, 0, c.width, c.height / 2);
                    canvasTB.drawImage(stereoT, 0, 0, c.width, c.height, -c.width + ipxoffset, 0, c.width, c.height / 2);

                    fcamerax = +xIPD;
                    await Native.window.async.onframe;
                    var stereoB = gl.canvas;
                    canvasTB.drawImage(stereoB, 0, 0, c.width, c.height, ipxoffset, c.height / 2, c.width, c.height / 2);
                    canvasTB.drawImage(stereoB, 0, 0, c.width, c.height, -c.width + ipxoffset, c.height / 2, c.width, c.height / 2);
                }


                //var canvasTB8K = new CanvasRenderingContext2D(c.width * 2, c.height * 2);
                //canvasTB8K.drawImage(f0, 0, 0, c.width, c.height, 0, 0, c.width * 2, c.height);
                //canvasTB8K.drawImage(f1, 0, 0, c.width, c.height, 0, c.height, c.width * 2, c.height);

                // https://www.reddit.com/r/GearVR/comments/2vrfyu/id_suggest_makers_of_360_videos_make_them_the/
                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151114/stereo
                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151203
                // can we actually watch stereo _TB images on gearVR?

            };
            #endregion

            #region stero
            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151114/stereo
            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151112
            new IHTMLButton { "make me a stero TB image " }.AttachToDocument().With(
                async e =>
                {
                    // http://www.vrideo.com/watch/ALdE7mm
                    // https://www.youtube.com/watch?v=S3iTPxMIlCI

                    var onclick = e.async.onclick;

                    while (await onclick)
                    {




                        // keep it 4K, as hw, yt is not ready for 60fps 8K!
                        var canvasTB = new CanvasRenderingContext2D(c.width, c.height);

                        drawStereoFrame(canvasTB);

                        // gearVR will get a black screen
                        // 
                        //frame2.src = canvasTB8K.canvas.toDataURL();
                        frame2.src = canvasTB.canvas.toDataURL();


                        onclick = e.async.onclick;

                        //while (!onclick.IsCompleted)
                        //{
                        //    await Task.Delay(1000 / 30);
                        //    frame0.src = f0.src;
                        //    await Task.Delay(1000 / 30);
                        //    frame0.src = f1.src;
                        //}
                    }
                }
            );
            #endregion


            #region render 60hz 30sec
            new IHTMLButton {
                //"render 60hz 30sec"
                //$"render {maxfps}hz {maxlengthseconds}sec"
                "render " + new {maxfps} + "hz " + new {maxlengthseconds} + "sec"
            }.AttachToDocument().onclick += async e =>
            {
                e.Element.disabled = true;

                //var canvasTB = new CanvasRenderingContext2D(c.width * 2, c.height * 2);
                var canvasTB = new CanvasRenderingContext2D(c.width, c.height);


                var total = Stopwatch.StartNew();
                var status = "rendering... " + new { dir };

                new IHTMLPre { () => status }.AttachToDocument();

                if (dir == null)
                {
                    //dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
                }

                total.Restart();



                //vsync = new TaskCompletionSource<object>();
                //await vsync.Task;

                status = "rendering... vsync";

                //var frameid = 0;
                //frameIDanimation.@checked = true;
                frameIDslider.valueAsNumber = -1;

                // allow the animation values to sink in
                //vsync = new TaskCompletionSource<object>();
                //await vsync.Task;



                goto beforeframe;


                // parallax offset?

                await_nextframe:


                var filename = frameIDslider.valueAsNumber.ToString().PadLeft(5, '0') + ".jpg";
                status = "rendering... " + new { filename };

                await drawStereoFrame(canvasTB);

                //var xIPD = 4.0;


                //// left eye
                //fcamerax = -xIPD;
                //vsync = new TaskCompletionSource<object>();
                //await vsync.Task;
                //var f0 = new IHTMLImage { src = gl.canvas.toDataURL() };


                //// right eye
                //fcamerax = +xIPD;
                //vsync = new TaskCompletionSource<object>();
                //await vsync.Task;
                //var f1 = new IHTMLImage { src = gl.canvas.toDataURL() };
                //await f1.async.oncomplete;



                //canvasTB.drawImage(f0, 0, 0, c.width, c.height, 0, 0, c.width * 2, c.height);
                //canvasTB.drawImage(f1, 0, 0, c.width, c.height, 0, c.height, c.width * 2, c.height);


                // frame0 has been rendered

                var swcapture = Stopwatch.StartNew();
                status = "WriteAllBytes... " + new { filename };
                //await Native.window.async.onframe;

                // https://code.google.com/p/chromium/issues/detail?id=404301
                if (dir == null)
                {
                    frame2.src = canvasTB.canvas.toDataURL();

                    await Task.Delay(500);
                }
                else
                    await dir.WriteAllBytes(filename, canvasTB);
                //await dir.WriteAllBytes(filename, gl);
                //await dir.WriteAllBytes(filename, gl.canvas);

                status = "WriteAllBytes... done " + new { fcamerax, filename, swcapture.ElapsedMilliseconds };
                status = "rdy " + new { filename, fcamerax };
                //await Native.window.async.onframe;





                // design mode v render mode
                if (cubefacesize < cubefacesizeMAX)
                    frameIDslider.valueAsNumber += 60;
                else
                    frameIDslider.valueAsNumber++;




            beforeframe:

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);
                // fcamerax = radius * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);

                // some shaders need to know where the camera is looking from. can we tell them?

                //fcamerax = 2.2 * Math.Sin(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));
                //fcameraz = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));


                //// up
                //fcameray = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // cameraz.valueAsNumber = (int)(cameraz.max * Math.Sin(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f)));


                // up
                //fcameray = 128 * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                //fcamerax += (1.0 / 60.0);

                //fcamerax += (1.0 / 60.0) * 120;



                // 60hz 30sec
                if (frameIDslider.valueAsNumber < maxframes)
                {
                    // Blob GC? either this helms or the that we made a Blob static. 
                    await Task.Delay(11);

                    goto await_nextframe;
                }

                total.Stop();
                status = "all done " + new { frameid = frameIDslider.valueAsNumber, total.ElapsedMilliseconds };
                vsync = default(TaskCompletionSource<object>);
                // http://stackoverflow.com/questions/22899333/delete-javascript-blobs

                e.Element.disabled = false;
            };
            #endregion



            new { }.With(
                async delegate
                {

                    var tex1 = new the_midnight_sun_by_isilmetriel { };

                    await tex1.async.oncomplete;

                    // first one is 124, while others is 123?
                    var tex1w = 123;
                    //var tex1w = 120;
                    var tex1h = 626;

                    //canvasPXitem.drawImage(
                    //           tex1, 2, 2, tex1w, tex1h, 0, 0, tex1w, tex1h
                    //       );


                    // how long until jsc can upstream small updates to code?



                    //// canvasPXitem.drawImage(
                    ////     (IHTMLCanvas)renderer0.domElement,



                    ////    sx: (cubefacesize - cubefacesize / 6) / 2,
                    ////    sy: (cubefacesize - cubefacesize / 3) / 2,

                    ////    sw: cubefacesize / 6,
                    ////    sh: cubefacesize / 3,

                    ////    dx: (cubefacesize - cubefacesize / 6) / 2,
                    ////    dy: (cubefacesize - cubefacesize / 3) / 2,

                    ////    dw: cubefacesize / 6,
                    ////    dh: cubefacesize / 3
                    ////);





                    await iskybox2.async.oncomplete;

                    var bytes1 = await iskybox1.async.bytes;

                    //for (int ii = 0; ii < bytes.Length; ii += 4)
                    //{

                    //    bytes[ii + 3] = (byte)(bytes[ii + 0]);

                    //    bytes[ii + 0] = 0xff;
                    //    bytes[ii + 1] = 0xff;
                    //    bytes[ii + 2] = 0xff;
                    //}

                    var cc = new CanvasRenderingContext2D(iskybox1.width, iskybox1.height);

                    cc.bytes = bytes1;

                    //s.image = cc;
                    //s.needsUpdate = true;

                    var skybox1_material = new THREE.MeshBasicMaterial(
                            new
                            {
                                //map = THREE.ImageUtils.loadTexture(new galaxy_starfield().src),
                                map = new THREE.Texture { image = cc, needsUpdate = true },
                                side = THREE.BackSide,
                                transparent = true
                            });


                    var skybox1 = new THREE.Mesh(
                        //new THREE.SphereGeometry(far * 0.92, 64, 64),
                        //new THREE.SphereGeometry(far * 0.80, 64, 64),

                            // still zfighting
                        //new THREE.SphereGeometry(far * 0.50, 64, 64),

                            // the other option is to have a single bg and blend on tht. this is just a rotation visualization.
                            new THREE.SphereGeometry(far * 0.30, 64, 64),
                           skybox1_material
                        );

                    // http://stackoverflow.com/questions/8502150/three-js-how-can-i-dynamically-change-objects-opacity
                    //(stars_material as dynamic).opacity = 0.5;


                    hideskybox1.onchange += delegate
                    {
                        skybox1.visible = !hideskybox1.@checked;
                    };
                    skybox1.visible = !hideskybox1.@checked;


                    scene.add(skybox1);




                    applycameraoffset += delegate
                    {
                        if (frameIDanimation.@checked)
                        {
                            itemRotation.valueAsNumber = (frameIDslider.valueAsNumber / 2) % 360 - 180;

                            hideskybox1.@checked = (frameIDslider.valueAsNumber / 2 + 180) % 720 < 360;
                        }
                    };


                    // target bg
                    var skybox2 = new THREE.Mesh(
                            new THREE.SphereGeometry(far * 0.95, 64, 64),
                           new THREE.MeshBasicMaterial(
                            new
                            {
                                map = new THREE.Texture { image = iskybox2, needsUpdate = true },
                                side = THREE.BackSide,
                                transparent = true
                            })
                    );


                    skybox2.AttachTo(scene).With(
                        stars =>
                        {
                            applycameraoffset += delegate
                              {

                                  skybox2.visible = !hideskybox2.@checked;



                                  skybox1.rotation.set(0, 0, 0);
                                  // spin
                                  skybox1.rotateOnAxis(new THREE.Vector3(0, -1, 0),
                                     frameIDslider.valueAsNumber / (60 * 60 / 5.0) * Math.PI * 2
                                 );

                                  // reset
                                  stars.rotation.set(0, 0, 0);

                                  // slow rotate in place
                                  stars.rotateOnAxis(new THREE.Vector3(1, 0, 0),
                                      frameIDslider.valueAsNumber / 3600.0 * Math.PI * 2
                                  );

                                  // follow the moon?
                                  stars.rotateOnAxis(new THREE.Vector3(0, -1, 0),
                                      frameIDslider.valueAsNumber / (60 * 60 / 5.0) * Math.PI * 2
                                  );
                              };
                        }

                    );

                    // can we get our stereo sprite this way?
                    // do we get clean tiles with transparency?
                    // on x83 on frame0 we were able to hide skybox. how?
                    //hideskybox2.onchange += delegate
                    //{
                    //    skybox2.visible = !hideskybox2.@checked;
                    //};
                    //skybox2.visible = !hideskybox2.@checked;



                    Console.WriteLine("skybox added");






                    //dae.position.y = -80;

                    //dae.AttachTo(sceneg);
                    //scene.add(dae);
                    //oo.Add(dae);




                    // view-source:http://threejs.org/examples/webgl_multiple_canvases_circle.html
                    // https://threejsdoc.appspot.com/doc/three.js/src.source/extras/cameras/CubeCamera.js.html
                    Native.window.onframe +=
                        e =>
                        {
                            // let render man know..
                            if (vsync != null)
                                if (vsync.Task.IsCompleted)
                                    return;


                            //if (pause) return;
                            //if (pause.@checked)
                            //    return;


                            // can we float out of frame?
                            // haha. a bit too flickery.
                            //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.01) * 50.0;
                            //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.001) * 190.0;
                            //globesphere.position.y = Math.Sin(fcamerax * 0.001) * 90.0;
                            //clouds.position.y = Math.Cos(fcamerax * 0.001) * 90.0;

                            //sphere.rotation.y += speed;
                            //clouds.rotation.y += speed;

                            // manual rebuild?
                            // red compiler notifies laptop chrome of pending update
                            // app reloads

                            applycameraoffset();
                            renderer0.clear();





                            // spriteOffset
                            canvasPXitem.drawImage(
                                       tex1,

                                       //2px black border!
                                //((tex1w + 4) * spriteOffset.valueAsNumber) + 2,
                                       ((tex1w + 2) * spriteOffset.valueAsNumber) + 3,

                                       2, tex1w, tex1h,

                                       // dest
                                       (cubefacesize - tex1w) / 2,
                                       (cubefacesize - tex1h) / 2,

                                       tex1w, tex1h
                                   );



                            //rendererPY.clear();

                            //cameraPX.aspect = canvasPX.aspect;
                            //cameraPX.updateProjectionMatrix();

                            // um what does this do?
                            //cameraPX.position.z += (z - cameraPX.position.z) * e.delay.ElapsedMilliseconds / 200.0;
                            // mousewheel allos the camera to move closer
                            // once we see the frame in vr, can we udp sync vr tracking back to laptop?


                            //this.targetPX.x += 1;
                            //this.targetNX.x -= 1;

                            //this.targetPY.y += 1;
                            //this.targetNY.y -= 1;

                            //this.targetPZ.z += 1;
                            //this.targetNZ.z -= 1;

                            // how does the 360 or shadertoy want our cubemaps?


                            // and then rotate right?

                            // how can we render cubemap?


                            // hide everything else


                            // inversion effect?
                            //// if (hideskybox1.@checked)
                            ////     skybox1.visible = true;
                            //// else
                            ////     skybox1.visible = false;

                            //// floor2.visible = false;
                            //// renderer0.render(scene, cameraPX);
                            //// //canvasPXitem.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);


                            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151203
                            // can we draw from that special image?


                            //canvasPXitem.drawImage(
                            //    tex1, 2, 2, 124, 630 - 4, 0, 0, 124, 626
                            //);

                            //// canvasPXitem.drawImage(
                            ////     (IHTMLCanvas)renderer0.domElement,



                            ////    sx: (cubefacesize - cubefacesize / 6) / 2,
                            ////    sy: (cubefacesize - cubefacesize / 3) / 2,

                            ////    sw: cubefacesize / 6,
                            ////    sh: cubefacesize / 3,

                            ////    dx: (cubefacesize - cubefacesize / 6) / 2,
                            ////    dy: (cubefacesize - cubefacesize / 3) / 2,

                            ////    dw: cubefacesize / 6,
                            ////    dh: cubefacesize / 3
                            ////);


                            //// skybox1.visible = !hideskybox1.@checked;
                            //// floor2.visible = true;


                            #region x
                            canvasPX.clearRect(0, 0, cubefacesize, cubefacesize);
                            // upside down?
                            renderer0.render(scene, cameraPX);
                            canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                            canvasNX.clearRect(0, 0, cubefacesize, cubefacesize);
                            renderer0.render(scene, cameraNX);
                            canvasNX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            #endregion

                            #region z

                            canvasPZ.clearRect(0, 0, cubefacesize, cubefacesize);
                            renderer0.render(scene, cameraPZ);
                            canvasPZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                            canvasNZ.clearRect(0, 0, cubefacesize, cubefacesize);
                            renderer0.render(scene, cameraNZ);
                            canvasNZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            #endregion



                            #region y
                            canvasPY.clearRect(0, 0, cubefacesize, cubefacesize);
                            renderer0.render(scene, cameraPY);

                            //canvasPY.save();
                            //canvasPY.translate(0, size);
                            //canvasPY.rotate((float)(-Math.PI / 2));
                            canvasPY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            //canvasPY.restore();


                            canvasNY.clearRect(0, 0, cubefacesize, cubefacesize);
                            renderer0.render(scene, cameraNY);
                            //canvasNY.save();
                            //canvasNY.translate(size, 0);
                            //canvasNY.rotate((float)(Math.PI / 2));
                            canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            //canvasNY.restore();
                            // ?
                            #endregion


                            //renderer0.render(scene, cameraPX);


                            //rendererPY.render(scene, cameraPY);

                            // at this point we should be able to render the sphere texture

                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;


                            //var cube0 = new IHTMLImage[] {
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),

                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),


                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
                            //};

                            new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                            }.WithEachIndex(
                             (img, index) =>
                             {
                                 gl.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);

                                 //gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
                                 gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);

                                 // http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true

                                 // https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
                                 //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
                                 //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);

                                 gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);

                             }
                          );


                            //if (cameraz.valueAsNumber == 0)
                            gl.clearColor(0, 0, 0, 0);
                            //else
                            //gl4K.clearColor(0, 0, 0, 1);

                            gl.clear(gl.COLOR_BUFFER_BIT);


                            // could do dynamic resolution- fog of war or fog of FOV. where up to 150deg field of vision is encouragedm, not 360
                            pass.Paint_Image(
                               0,

                               0,
                               0,
                               0,
                               0
                                //,

                          // gl_FragCoord
                                // cannot be scaled, and can be referenced directly.
                                // need another way to scale
                                //zoom: 0.3f
                          );

                            //paintsw.Stop();


                            // what does it do?
                            gl.flush();

                            // let render man know..
                            if (vsync != null)
                                if (!vsync.Task.IsCompleted)
                                    vsync.SetResult(null);
                        };





                    Console.WriteLine("do you see it?");

                }
           );




        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150824

            // http://stackoverflow.com/questions/14103986/canvas-and-spritematerial

            // X:\jsc.svn\examples\javascript\WebGL\WebGLSVGAnonymous\WebGLSVGAnonymous\Application.cs
            // X:\jsc.svn\examples\javascript\WebGL\WebGLVRCreativeLeadership\WebGLVRCreativeLeadership\Application.cs
            // X:\jsc.svn\examples\javascript\WebGL\WebGLSVGSprite\WebGLSVGSprite\Application.cs

            //var l = new NotificationLayout().layout;

            //l.AttachToDocument();

            // : INodeConvertible<IHTMLElement>
            //var c = (IHTMLCanvas)l.layout;
            //var c = (IHTMLCanvas)l;

            // look we have a databound 2D image!

            // make it implicit operator for assetslibrary?

            //l.style



            //c.AttachToDocument();



            // https://play.google.com/store/apps/details?id=com.abstractatech.vr
            // could we display LAN UDP notifications too. like
            // which youtube video is playing?

            Native.body.Clear();
            Native.body.style.margin = "0px";
            Native.body.style.backgroundColor = "black";

            // https://vronecontest.zeiss.com/index.php?controller=ideas&view=show&id=652

            //          hResolution: 1920,
            //vResolution: 1080,

            // "X:\jsc.svn\examples\javascript\synergy\webgl\WebGLEquirectangularPanorama\WebGLEquirectangularPanorama.sln"

            // http://oculusstreetview.eu.pn/?lat=44.301987&lng=9.211561999999958&q=3&s=false&heading=0
            // https://github.com/troffmo5/OculusStreetView

            // http://stackoverflow.com/questions/23817633/threejs-using-a-sprite-with-the-oculusrifteffect
            // http://laht.info/dk2-parameters-for-three-oculusrifteffect-js/

            // http://stemkoski.github.io/Three.js/Sprites.html
            // http://stemkoski.github.io/Three.js/Texture-Animation.html
            // http://blog.thematicmapping.org/2013/10/terrain-visualization-with-threejs-and.html

            // http://mrdoob.github.io/three.js/examples/webgl_panorama_equirectangular.html

            var window = Native.window;

            var fov = 70.0;

            var camera = new THREE.PerspectiveCamera(fov,
                1920.0 / 1080.0,
                //window.aspect,
                1, 1100);
            var target = new THREE.Vector3(0, 0, 0);

            //(camera as dynamic).target = target;

            var scene = new THREE.Scene();
            //scene.add(new THREE.AmbientLight(0xffffff));
            //scene.add(new THREE.AmbientLight(0xafafaf));

            // http://www.html5canvastutorials.com/three/html5-canvas-webgl-ambient-lighting-with-three-js/

            // http://stackoverflow.com/questions/14717135/three-js-ambient-light-unexpected-effect

            scene.add(new THREE.AmbientLight(0x2f2f2f));


            //var light = new THREE.DirectionalLight(0xffffff, 1.0);
            #region light
            var light = new THREE.DirectionalLight(0xffffff, 2.5);
            //var light = new THREE.DirectionalLight(0xffffff, 2.5);
            //var light = new THREE.DirectionalLight(0xffffff, 1.5);
            //var lightOffset = new THREE.Vector3(0, 1000, 2500.0);
            var lightOffset = new THREE.Vector3(
                2000,
                700,

                // lower makes longer shadows 
                700.0
                );
            light.position.copy(lightOffset);
            light.castShadow = true;

            var xlight = light as dynamic;
            xlight.shadowMapWidth = 4096;
            xlight.shadowMapHeight = 2048;

            xlight.shadowDarkness = 0.3;
            //xlight.shadowDarkness = 0.5;

            xlight.shadowCameraNear = 10;
            xlight.shadowCameraFar = 10000;
            xlight.shadowBias = 0.00001;
            xlight.shadowCameraRight = 4000;
            xlight.shadowCameraLeft = -4000;
            xlight.shadowCameraTop = 4000;
            xlight.shadowCameraBottom = -4000;

            xlight.shadowCameraVisible = true;

            scene.add(light);
            #endregion

            var mesh = new THREE.Mesh(new THREE.SphereGeometry(500, 60, 40),
                new THREE.MeshBasicMaterial(new
                {
                    map = THREE.ImageUtils.loadTexture(
                        new _2294472375_24a3b8ef46_o().src
                        //new WebGLEquirectangularPanorama.HTML.Images.FromAssets.PANO_20130616_222058().src
                        //new WebGLEquirectangularPanorama.HTML.Images.FromAssets.PANO_20121225_210448().src

                        )
                }));
            mesh.scale.x = -1;
            scene.add(mesh);

            var labove = new NotificationLayout();

            #region sprite2 above
            {
                labove.Message.innerText = "VR CREATIVE LEADERSHIP";
                labove.layout.style.background = "";

                new { }.With(
                    async delegate
                    {
                        retry:

                        // make it blink. gaze cursor is on it?


                        labove.box.style.background = "rgba(255,255,0,0.7)";
                        labove.box.setAttribute("invoke", "onmutation1");
                        await Task.Delay(1500);

                        // is mutation observer noticing it?
                        labove.box.style.background = "rgba(255,255,255,0.7)";
                        labove.box.setAttribute("invoke", "onmutation2");
                        await Task.Delay(1500);


                        goto retry;
                    }
                );



                var c = labove.AsCanvas();

                var xcrateTexture = new THREE.Texture(c);

                // like video texture
                Native.window.onframe += delegate { xcrateTexture.needsUpdate = true; };

                var xcrateMaterial = new THREE.SpriteMaterial(
                    new
                    {
                        map = xcrateTexture,
                        useScreenCoordinates = false,
                        //color = 0xff0000
                        color = 0xffffff
                    }
            );



                var xsprite2 = new THREE.Sprite(xcrateMaterial);

                //floor
                //sprite2.position.set(0, -200, 0);

                // left
                xsprite2.position.set(200, 0, 0);

                //sprite2.position.set(0, 0, 200);

                //sprite2.position.set(-100, 0, 0);
                xsprite2.scale.set(
                   c.width * 0.5,
                   c.height * 0.5,
                    //64, 64,
                    1.0); // imageWidth, imageHeight
                scene.add(xsprite2);
            }
            #endregion




            //var lineTo = new List<THREE.Vector3>();
            var others = new
            {
                ui = default(NotificationLayout),
                canvas = default(IHTMLCanvas),

                map = default(THREE.Texture),

                sprite = default(THREE.Sprite)

            }.ToEmptyList();

            #region add
            Action<NotificationLayout> add = ui =>
            {
                ui.layout.style.background = "";

                var canvas = ui.AsCanvas();
                var index = others.Count;

                //ui.Message  += new { index };
                //ui.Message.innerText += new { index };

                //lbelow0.Message = new { THREE.REVISION }.ToString();

                // THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter is set to THREE.LinearFilter or THREE.NearestFilter. ( undefined )

                var map = new THREE.Texture(canvas);

                map.minFilter = THREE.LinearFilter;

                // like video texture

                var xcrateMaterial = new THREE.SpriteMaterial(
                    new
                    {
                        map,
                        useScreenCoordinates = false,
                        //color = 0xff0000
                        color = 0xffffff
                    }
                );



                var sprite = new THREE.Sprite(xcrateMaterial);

                //floor
                //sprite2.position.set(0, -200, 0);

                // left middle
                //sprite2.position.set(200, 0, 0);
                //sprite.position.set(250, -50, 50);
                //lineTo.Add(sprite.position);




                //sprite2.position.set(0, 0, 200);

                //sprite2.position.set(-100, 0, 0);
                sprite.scale.set(
                   canvas.width * 0.5,
                   canvas.height * 0.5,
                    //64, 64,
                    1.0); // imageWidth, imageHeight
                scene.add(sprite);

                others.Add(
                    new
                    {
                        ui,
                        canvas,
                        map,
                        sprite
                    }
                );

                var sw = Stopwatch.StartNew();

                Native.window.onframe += delegate
                {
                    // can we get some of the crazy c++ template bitmapbuffer code from the past?
                    map.needsUpdate = true;

                    var offset = Math.PI * 2 * ((double)(index + 1) / others.Count);

                    sprite.position.x = 300;

                    sprite.position.z = Math.Sin(sw.ElapsedMilliseconds * 0.00001 + offset) * 120;
                    sprite.position.y = Math.Cos(sw.ElapsedMilliseconds * 0.00001 + offset) * 120;
                };

            };
            #endregion


            add(
                new NotificationLayout
                {
                    // keep attributes around?
                    // like we do with XElement sync?
                    Icon = new HTML.Images.FromAssets._0_10122014_ECAF4B1F638429B44F4B142C2A4F38EE().With(
                        x =>
                        {
                            x.style.width = "96px";
                            x.style.height = "96px";
                        }
                    ),
                    Message = "Advanced Mechanics by Portugal Design Lab"
                }
              );

            add(
                   new NotificationLayout
                   {
                       // keep attributes around?
                       // like we do with XElement sync?
                       Icon = new HTML.Images.FromAssets._42_08122014_D2639E0AAA3E54E5F4568760AEE605EE().With(
                           x =>
                           {
                               x.style.width = "96px";
                               x.style.height = "96px";
                           }
                       ),
                       Message = "Face Value by mshariful"
                   }
                 );



            add(
                   new NotificationLayout
                   {
                       // keep attributes around?
                       // like we do with XElement sync?
                       Icon = new HTML.Images.FromAssets._371_28122014_2045510F2F7974319A9F7E9F4B39DF07().With(
                           x =>
                           {
                               x.style.width = "96px";
                               x.style.height = "96px";
                           }
                       ),
                       Message = "Mind Wall. by Sumit Goski"
                   }
                 );


            #region lines
            {
                var geometry = new THREE.Geometry
                {
                    // how can we keep streaming verticies data points to gpu?
                    vertices =

                        others.SelectMany(
                            lineTo =>
                                new[]
                                {
                                    new THREE.Vector3(200, 0, 0),
                                    lineTo.sprite.position
                                }
                        ).ToArray()

                    // https://github.com/mrdoob/three.js/wiki/Updates
                    // http://stackoverflow.com/questions/17842521/adding-geometry-to-a-three-js-mesh-after-render
                    //verticesNeedUpdate = true
                };

                Native.window.onframe += delegate { geometry.verticesNeedUpdate = true; };

                var o = new THREE.Line(geometry, new THREE.LineDashedMaterial(
                    new { color = 0x00aaff, dashSize = 1, gapSize = 0.5, linewidth = 1 }
                    ), THREE.LineStrip);

                //objects.Add(o);
                scene.add(o);
            }
            #endregion


            //          // DK2
            //          hResolution: 1920,
            //vResolution: 1080,

            var renderer = new THREE.WebGLRenderer();
            renderer.setSize(1920, 1080);

            #region HMD
            // broken?
            var distortionK = new double[] { 1.0, 0.22, 0.24, 0.0 };
            var chromaAbParameter = new double[] { 0.996, -0.004, 1.014, 0.0 };

            var HMD = new OculusRiftEffectOptions
            {
                hResolution = window.Width,
                vResolution = window.Height,

                hScreenSize = 0.12576,
                vScreenSize = 0.07074,
                interpupillaryDistance = 0.0635,
                lensSeparationDistance = 0.0635,
                eyeToScreenDistance = 0.041,

                //  j.distortionK = [0, 1.875, -71.68, 1.595, -3.218644E+26, 1.615, 0, 0];
                //distortionK = new double[] { 1.0, 0.22, 0.24, 0.0 },
                distortionK = distortionK,

                // j.chromaAbParameter = [1.609382E+22, 1.874, -5.189695E+11, -0.939, 4.463059E-29, 1.87675, 0, 0];
                //chromaAbParameter = new double[] { 0.996, -0.004, 1.014, 0.0 }
                chromaAbParameter = chromaAbParameter

            };
            #endregion


            //var effect = new THREE.OculusRiftEffect(
            //    renderer, new
            //    {
            //        worldScale = 100,

            //        //HMD
            //    }
            //    );

            //effect.setSize(1920, 1080);


            renderer.domElement.AttachToDocument();

            //renderer.domElement.style.position = IStyle.PositionEnum.absolute;
            renderer.domElement.style.SetLocation(0, 0);

            Native.document.body.style.overflow = IStyle.OverflowEnum.hidden;

            // x:\jsc.svn\examples\javascript\synergy\comanchebysiorki\comanchebysiorki\application.cs

            new { }.With(
                     async delegate
                     {
                         retry:

                         var s = (double)Native.window.Width / 1920.0;


                         Native.document.body.style.transform = "scale(" + s + ")";
                         Native.document.body.style.transformOrigin = "0% 0%";

                         await Native.window.async.onresize;
                         goto retry;
                     }
                   );

            //#region onresize
            //Native.window.onresize +=
            //    delegate
            //    {
            //        camera.aspect = Native.window.aspect;
            //        camera.updateProjectionMatrix();

            //        renderer.setSize(window.Width, window.Height);
            //        effect.setSize(window.Width, window.Height);
            //    };
            //#endregion


            //Native.document.body.onmousewheel +=
            //    e =>
            //    {
            //        fov -= e.WheelDirection * 5.0;
            //        camera.projectionMatrix.makePerspective(fov,
            //            (double)window.Width / window.Height, 1, 1100);
            //    };

            var lon0 = -45.0;
            var lon1 = 0.0;

            var lon = new sum(
                 () => lon0,
                 () => lon1
             );

            var lat0 = 0.0;
            var lat1 = 0.0;

            // or could we do it with byref or pointers?
            var lat = new sum(
                () => lat0,
                () => lat1
            );

            var phi = 0.0;
            var theta = 0.0;

            //var controls = new THREE.OrbitControls(camera);

            var camera_rotation_z = 0.0;

            //Native.document.onmousemove +=
            //  e =>
            //  {

            //      l.Message = new { e.CursorX, e.CursorY }.ToString();
            //  };


            Native.window.onframe +=
                        ee =>
                        {
                            //labove.Message = new
                            //{
                            //    lon,
                            //    lat,
                            //    ee.counter
                            //}.ToString();

                            //if (Native.document.pointerLockElement == Native.document.body)
                            //    lon += 0.00;
                            //else
                            //    lon += 0.01;

                            //lat = Math.Max(-85, Math.Min(85, lat));

                            //Native.document.title = new { lon, lat }.ToString();


                            phi = THREE.Math.degToRad(90 - lat);
                            theta = THREE.Math.degToRad(lon);

                            target.x = 500 * Math.Sin(phi) * Math.Cos(theta);
                            target.y = 500 * Math.Cos(phi);
                            target.z = 500 * Math.Sin(phi) * Math.Sin(theta);


                            //controls.update();
                            //camera.position = controls.center.clone();


                            camera.lookAt(target);
                            camera.rotation.z += camera_rotation_z;

                            renderer.render(scene, camera);

                            //effect.render(scene, camera);
                        };



            // http://blog.thematicmapping.org/2013/10/terrain-visualization-with-threejs-and.html

            // http://stackoverflow.com/questions/13278087/determine-vertical-direction-of-a-touchmove



            var compassHeadingOffset = 0.0;
            var compassHeadingInitialized = 0;

            #region compassHeading
            // X:\jsc.svn\examples\javascript\android\Test\TestCompassHeading\TestCompassHeading\Application.cs
            Native.window.ondeviceorientation +=
                          dataValues =>
                          {
                              // Convert degrees to radians
                              var alphaRad = dataValues.alpha * (Math.PI / 180);
                              var betaRad = dataValues.beta * (Math.PI / 180);
                              var gammaRad = dataValues.gamma * (Math.PI / 180);

                              // Calculate equation components
                              var cA = Math.Cos(alphaRad);
                              var sA = Math.Sin(alphaRad);
                              var cB = Math.Cos(betaRad);
                              var sB = Math.Sin(betaRad);
                              var cG = Math.Cos(gammaRad);
                              var sG = Math.Sin(gammaRad);

                              // Calculate A, B, C rotation components
                              var rA = -cA * sG - sA * sB * cG;
                              var rB = -sA * sG + cA * sB * cG;
                              var rC = -cB * cG;

                              // Calculate compass heading
                              var compassHeading = Math.Atan(rA / rB);

                              // Convert from half unit circle to whole unit circle
                              if (rB < 0)
                              {
                                  compassHeading += Math.PI;
                              }
                              else if (rA < 0)
                              {
                                  compassHeading += 2 * Math.PI;
                              }

                              /*
                              Alternative calculation (replacing lines 99-107 above):

                                var compassHeading = Math.atan2(rA, rB);

                                if(rA < 0) {
                                  compassHeading += 2 * Math.PI;
                                }
                              */

                              // Convert radians to degrees
                              compassHeading *= 180 / Math.PI;

                              // Compass heading can only be derived if returned values are 'absolute'

                              // X:\jsc.svn\examples\javascript\android\Test\TestCompassHeadingWithReset\TestCompassHeadingWithReset\Application.cs

                              //Native.document.body.innerText = new { compassHeading }.ToString();
                              if (compassHeadingInitialized > 0)
                              {
                                  lon1 = compassHeading - compassHeadingOffset;
                              }
                              else
                              {
                                  compassHeadingOffset = compassHeading;
                                  compassHeadingInitialized++;
                              }

                          };
            #endregion

            #region gamma
            Native.window.ondeviceorientation +=
                //e => Native.body.innerText = new { e.alpha, e.beta, e.gamma }.ToString();
                //e => lon = e.gamma;
                e =>
                {
                    lat1 = e.gamma;

                    // after servicing a running instance would be nice
                    // either by patching or just re running the whole iteration in the backgrou
                    camera_rotation_z = e.beta * 0.02;
                };
            #endregion



            #region camera rotation
            var old = new { clientX = 0, clientY = 0 };

            Native.document.body.ontouchstart +=
                            e =>
                            {
                                var n = new { e.touches[0].clientX, e.touches[0].clientY };
                                old = n;
                            };

            Native.document.body.ontouchmove +=
                    e =>
                    {
                        var n = new { e.touches[0].clientX, e.touches[0].clientY };

                        e.preventDefault();

                        lon0 += (n.clientX - old.clientX) * 0.2;
                        lat0 -= (n.clientY - old.clientY) * 0.2;

                        old = n;
                    };


            Native.document.body.onmousemove +=
                e =>
                {
                    e.preventDefault();

                    if (Native.document.pointerLockElement == Native.document.body)
                    {
                        lon0 += e.movementX * 0.1;
                        lat0 -= e.movementY * 0.1;

                        //Console.WriteLine(new { lon, lat, e.movementX, e.movementY });
                    }
                };


            Native.document.body.onmouseup +=
              e =>
              {
                  //drag = false;
                  e.preventDefault();
              };

            Native.document.body.onmousedown +=
                e =>
                {
                    //e.CaptureMouse();

                    //drag = true;
                    e.preventDefault();
                    Native.document.body.requestPointerLock();

                };


            Native.document.body.ondblclick +=
                e =>
                {
                    e.preventDefault();

                    Console.WriteLine("requestPointerLock");
                };

            #endregion

            Native.body.onmousewheel +=
                e =>
                {

                    camera_rotation_z += 0.1 * e.WheelDirection; ;

                };
            // https://developer.android.com/training/system-ui/immersive.html




            //var ze = new ZeProperties();

            //ze.Show();
            //ze.treeView1.Nodes.Clear();

            //ze.Add(() => renderer);
            ////ze.Add(() => controls);
            //ze.Add(() => scene);
            //ze.Left = 0;
        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            //FormStyler.AtFormCreated =
            //s =>
            //{
            //    s.Context.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;

            //    //var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDrag().AttachTo(s.Context.GetHTMLTarget());
            //    var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDragWithShadow().AttachTo(s.Context.GetHTMLTarget());



            //    s.Context.GetHTMLTarget().style.backgroundColor = "#efefef";
            //    //s.Context.GetHTMLTarget().style.backgroundColor = "#A26D41";

            //};

#if AsWEBSERVER
            #region += Launched chrome.app.window
            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                // if we run as a server. we can open up on android.

                //chrome.Notification.DefaultTitle = "Nexus7";
                //chrome.Notification.DefaultIconUrl = new x128().src;
                ChromeTCPServer.TheServerWithStyledForm.Invoke(
                     AppSource.Text
                //, AtFormCreated: FormStyler.AtFormCreated

                //AtFormConstructor:
                //    f =>
                //    {
                //        //arg[0] is typeof System.Int32
                //        //script: error JSC1000: No implementation found for this native method, please implement [static System.Drawing.Color.FromArgb(System.Int32)]

                //        // X:\jsc.svn\examples\javascript\forms\Test\TestFromArgb\TestFromArgb\ApplicationControl.cs

                //        f.BackColor = System.Drawing.Color.FromArgb(0xA26D41);
                //    }
                );
                return;
            }
            #endregion
#else

            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    //chrome.runtime.UpdateAvailable += delegate
                    //{
                    //    new chrome.Notification(title: "UpdateAvailable");

                    //};

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "x360x83");

                        // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: new
                               {
                                   alwaysOnTop = true,
                                   visibleOnAllWorkspaces = true
                               }
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion


#endif


            // GpuProcessHostUIShim: The GPU process crashed!

            var poke = new WebGLRenderingContext();

            if (poke == null)
            {
                new IHTMLPre { "GpuProcessHostUIShim: The GPU process crashed! restart process."
                }.AttachToDocument();
                return;
            }

            // are we in a RemoteApp ? software renderer?
            // this may hang the buggy rdp protocol...




            // crash
            //int cubefacesizeMAX = 2048 * 2; // 6 faces, ?
            //int cubefacesizeMAX = 2048 * 2; // 6 faces, ?
            int cubefacesizeMAX = 2048 * 1; // 6 faces, ?
            int cubefacesize = cubefacesizeMAX; // 6 faces, ?
            //int cubefacesize = 1024; // 6 faces, ?
            // "X:\vr\tape1\0000x2048.png"
            // for 60hz render we may want to use float camera percision, not available for ui.
            //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x2048.png" "/sdcard/oculus/360photos/"
            //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x128.png" "/sdcard/oculus/360photos/"

            //if (Environment.ProcessorCount < 8)
            //    //cubefacesize = 64; // 6 faces, ?

            //    // fast gif?
            //cubefacesize = 1024; // 6 faces, ?

            // not 8k..
            //cubefacesize = 512; // 6 faces, ?

            // big cubeface may be draw only half of itself?


            // can we keep fast fps yet highp?

            // can we choose this on runtime? designtime wants fast fps, yet for end product we want highdef on our render farm?
            //const int cubefacesize = 128; // 6 faces, ?

            //var cubecameraoffsetx = 256;
            var cubecameraoffsetx = 400;


            //var uizoom = 0.1;
            //var uizoom = cubefacesize / 128f;
            var uizoom = 128f / cubefacesize;


            Native.css.style.backgroundColor = "blue";
            Native.css.style.overflow = IStyle.OverflowEnum.hidden;

            Native.body.Clear();
            (Native.body.style as dynamic).webkitUserSelect = "text";

            IHTMLCanvas shader1canvas = null;




            //return;

            // Earth params
            //var radius = 0.5;
            //var radius = 1024;
            //var radius = 2048;
            //var radius = 512;
            //var radius = 256;
            //var radius = 400;

            // can we have not fly beyond moon too much?
            //var radius = 500;
            //var radius = 480;
            //var radius = -480;

            //var segments = 32;
            //var segments = 128 * 2;
            //var rotation = 6;


            //const int size = 128;
            //const int size = 256; // 6 faces, 12KB
            //const int size = 512; // 6 faces, ?

            // WebGL: drawArrays: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled.

            //const int size = 720; // 6 faces, ?
            //const int size = 1024; // 6 faces, ?
            //const int cubefacesize = 1024; // 6 faces, ?

            // THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter is set to THREE.LinearFilter or THREE.NearestFilter. ( chrome-extension://aemlnmcokphbneegoefdckonejmknohh/assets/x360x83/anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs.jpg )


            // var far = 0xffffff;

            // need a zoom effect
            // 5pixels to 33%


            // radius needs to be a bit bigger so wa cant zoom thru it
            // far image at this distance 
            var skyboxradius0 = 0 + 2048 * 4;

            var near = cubefacesize * 0.33;


            var skyboxradius = skyboxradius0 * 1.2;

            var far = skyboxradius * 2;
            //var near = cubefacesize * 0.5;
            //var near = cubefacesize * 0.4;
            //var near = cubefacesize * 0.25;

            new IHTMLPre { new { Environment.ProcessorCount, cubefacesize } }.AttachToDocument();

            //new IHTMLPre { "can we stream it into VR, shadertoy, youtube 360, youtube stereo yet?" }.AttachToDocument();


            var sw = Stopwatch.StartNew();



            var oo = new List<THREE.Object3D>();



            // what about physics and that portal rendering?

            // if we are running as a chrome web server, we may also be opened as android ndk webview app
            //var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: window.aspect, near: 1, far: 2000);
            // once we update source
            // save the source
            // manually recompile 
            //cameraPX.position.z = 400;

            //// the camera should be close enough for the object to float off the FOV of PX
            //cameraPX.position.z = 200;

            // scene
            // can we make the 3D object orbit around us ?
            // and
            // stream it to vr?
            var scene = new THREE.Scene();



            // since our cube camera is somewhat a fixed thing
            // would it be easier to move mountains to come to us?
            // once we change code would chrome app be able to let VR know that a new view is available?
            var sceneg = new THREE.Group();
            sceneg.AttachTo(scene);

            // whatif we reuse the skybox as is and skip rendering it?
            // that means we cannot rotate via sky, we have to rotate other elements in reverse.
            // can we have a checkbox to hide or render the skybox?
            var scenezooms = new THREE.Group();
            scenezooms.AttachTo(scene);




            // fly up?
            //sceneg.translateZ(-1024);
            // rotate the world, as the skybox then matches what we have on filesystem
            scene.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
            //scene.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
            // yet for headtracking we shall rotate camera


            //sceneg.position.set(0, 0, -1024);
            //sceneg.position.set(0, -1024, 0);

            //scene.add(new THREE.AmbientLight(0x333333));
            //scene.add(new THREE.AmbientLight(0xffffff));
            //scene.add(new THREE.AmbientLight(0xaaaaaa));
            //scene.add(new THREE.AmbientLight(0xcccccc));
            //scene.add(new THREE.AmbientLight(0xeeeeee));
            scene.add(new THREE.AmbientLight(0xffffff));




            //var light = new THREE.DirectionalLight(0xffffff, 1);
            //// sun should be beyond moon
            ////light.position.set(-5 * virtualDistance, -3 * virtualDistance, -5 * virtualDistance);
            ////light.position.set(-15 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);

            //// where shall the light source be to see half planet?
            //light.position.set(-1 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);
            //scene.add(light);



            //var lightX = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightX" }.AttachToDocument();
            //var lightY = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightY" }.AttachToDocument();
            //var lightZ = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightZ" }.AttachToDocument();

            //new IHTMLHorizontalRule { }.AttachToDocument();

            // whats WebGLRenderTargetCube do?

            // WebGLRenderer preserveDrawingBuffer 



            var renderer0 = new THREE.WebGLRenderer(

                new
                {
                    //antialias = true,

                    alpha = true,

                    preserveDrawingBuffer = true
                }
            );

            // https://github.com/mrdoob/three.js/issues/3836

            // the construct. white bg

            // cyan?
            //renderer0.setClearColor(0xfffff, 1);
            //renderer0.setClearColor(0xfffff, 0);
            renderer0.setClearColor(0x0, 0);
            //renderer0.setClearColor(0x0, 1);

            //renderer.setSize(window.Width, window.Height);
            renderer0.setSize(cubefacesize, cubefacesize);

            //renderer0.domElement.AttachToDocument();
            //rendererPX.domElement.style.SetLocation(0, 0);
            //renderer0.domElement.style.SetLocation(4, 4);


            // top

            // http://stackoverflow.com/questions/27612524/can-multiple-webglrenderers-render-the-same-scene


            // need a place to show the cubemap face to GUI 
            // how does the stereo OTOY do it?
            // https://www.opengl.org/wiki/Sampler_(GLSL)

            // http://www.richardssoftware.net/Home/Post/25

            // [+X, –X, +Y, –Y, +Z, –Z] fa



            // move up
            //camera.position.set(-1200, 800, 1200);
            //var cameraoffset = new THREE.Vector3(0, 15, 0);

            // can we aniamte it?
            //var cameraoffset = new THREE.Vector3(0, 800, 1200);
            // can we have linear animation fromcenter of the map to the edge and back?
            // then do the flat earth sun orbit?
            var cameraoffset = new THREE.Vector3(
                // left?
                -512,
                // height?
                //0,
                //1600,
                //1024,

                // if the camera is in the center, would we need to move the scene?
                // we have to move the camera. as we move the scene the lights are messed up
                //2014,
                1024,

                //1200
                0
                // can we hover top of the map?
                );

            // original vieworigin
            //var cameraoffset = new THREE.Vector3(-1200, 800, 1200);

            // whatif we want more than 30sec video? 2min animation? more frames to render? 2gb disk?
            var frameIDslider = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0, max = 1800, valueAsNumber = 1800 / 2, title = "frameIDslider" }.AttachToDocument();












            var xframeID = 0;



            new { }.With(

                async delegate
                {

                next:

                    //Console.WriteLine("enter vsync0ambient");
                    Native.document.title = new { xframeID }.ToString();


                    vsync0ambient = new TaskCompletionSource<object>();

                    await vsync0ambient.Task;

                    await Task.Delay(1000 / 15);
                    //await Task.Delay(1000);
                    //Console.WriteLine("await vsync0ambient 5");
                    //await Task.Delay(5000);

                    xframeID++;

                    goto next;
                }

            );














            new IHTMLHorizontalRule { }.AttachToDocument();

            var camerazMIN = 0 - 2048 * 4;

            var camerax = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "camerax" }.AttachToDocument();
            // up. whats the most high a rocket can go 120km?
            new IHTMLHorizontalRule { }.AttachToDocument();


            // how high is the bunker?
            var cameray = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 2048 * 4, valueAsNumber = 0, title = "cameray" }.AttachToDocument();
            new IHTMLBreak { }.AttachToDocument();
            var camerayHigh = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = cameray.max, max = 1024 * 256, valueAsNumber = cameray.max, title = "cameray" }.AttachToDocument();
            new IHTMLHorizontalRule { }.AttachToDocument();
            //var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();
            //var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0 - 2048 * 4, title = "cameraz" }.AttachToDocument();
            var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = camerazMIN, max = 0, valueAsNumber = camerazMIN, title = "cameraz" }.AttachToDocument();
            // the zoom hting..


            new IHTMLHorizontalRule { }.AttachToDocument();

            var skyrotup = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -450, max = 450, valueAsNumber = 0, title = "skyrotup" }.AttachToDocument();
            var skyrotright = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -450, max = 450, valueAsNumber = 0, title = "skyrotright" }.AttachToDocument();

            new IHTMLPre { () => new { skyrotup = skyrotup.valueAsNumber, skyrotright = skyrotright.valueAsNumber } }.AttachToDocument();


            new IHTMLHorizontalRule { }.AttachToDocument();

            // were we able to test for it?
            //var zoomrotup = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -450, max = 450, valueAsNumber = -12, title = "up" }.AttachToDocument();
            var zoomrotup = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -450, max = 450, valueAsNumber = 0, title = "up" }.AttachToDocument();
            var zoomrotright = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -450, max = 450, valueAsNumber = 0, title = "right" }.AttachToDocument();

            new IHTMLPre { () => new { 
                

                // on 0 zoom we should rely on the original skybox?
                cameraz = cameraz.valueAsNumber, 
                
                zoomrotup = zoomrotup.valueAsNumber, zoomrotright = zoomrotright.valueAsNumber } }.AttachToDocument();



            // for render server
            var fcamerax = 0.0;
            var fcameray = 0.0;
            var fcameraz = 0.0;

            //while (await camerax.async.onchange)

            //cameray.onchange += delegate
            //{
            //    if (cameray.valueAsNumber < cameray.max)
            //        camerayHigh.valueAsNumber = camerayHigh.min;
            //};

            camerayHigh.onmousedown += delegate
            {
                //if (camerayHigh.valueAsNumber > camerayHigh.min)
                cameray.valueAsNumber = cameray.max;
            };


            Action applycameraoffset = delegate
            {
                // make sure UI and gpu sync up

                var cy = cameray;

                if (cameray.valueAsNumber < cameray.max)
                    camerayHigh.valueAsNumber = camerayHigh.min;

                if (camerayHigh.valueAsNumber > camerayHigh.min)
                    cameray.valueAsNumber = cameray.max;

                if (cameray.valueAsNumber == cameray.max)
                    cy = camerayHigh;



                cameraoffset = new THREE.Vector3(
                    // left?
                  1.0 * (camerax + fcamerax),
                    // height?
                    //0,
                    //1600,
                    //1024,

                   // if the camera is in the center, would we need to move the scene?
                    // we have to move the camera. as we move the scene the lights are messed up
                    //2014,
                   1.0 * (cy + fcameray),

                 //1200
                 1.0 * (cameraz + fcameraz)
                    // can we hover top of the map?
                   );
            };


            #region y
            // need to rotate90?
            var cameraNY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            applycameraoffset += delegate
            {
                cameraNY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNY.lookAt(new THREE.Vector3(0, -1, 0));
                cameraNY.position.add(cameraoffset);
            };

            //cameraNY.lookAt(new THREE.Vector3(0, 1, 0));
            var canvasNY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasNY.canvas.title = "NY";
            canvasNY.canvas.AttachToDocument();
            canvasNY.canvas.style.transformOrigin = "0 0";
            // roslyn!
            //canvasNY.canvas.style.transform = $"scale({uizoom})";
            canvasNY.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            applycameraoffset += delegate
            {
                cameraPY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPY.lookAt(new THREE.Vector3(0, 1, 0));
                cameraPY.position.add(cameraoffset);
            };
            //cameraPY.lookAt(new THREE.Vector3(0, -1, 0));
            var canvasPY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 0);
            canvasPY.canvas.title = "PY";
            canvasPY.canvas.AttachToDocument();
            canvasPY.canvas.style.transformOrigin = "0 0";
            //canvasPY.canvas.style.transform = $"scale({uizoom})";
            canvasPY.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion

            // transpose xz?

            #region x
            var cameraNX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            applycameraoffset += delegate
            {
                cameraNX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNX.lookAt(new THREE.Vector3(0, 0, 1));
                cameraNX.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(0, 0, -1));
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNX.lookAt(new THREE.Vector3(1, 0, 0));
            var canvasNX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 2, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNX.canvas.title = "NX";
            canvasNX.canvas.AttachToDocument();
            canvasNX.canvas.style.transformOrigin = "0 0";
            //canvasNX.canvas.style.transform = $"scale({uizoom})";
            canvasNX.canvas.style.transform = "scale(" + uizoom + ")";


            // front??
            var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            applycameraoffset += delegate
            {
                cameraPX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPX.lookAt(new THREE.Vector3(0, 0, -1));
                cameraPX.position.add(cameraoffset);
            };
            //cameraPX.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPX.lookAt(new THREE.Vector3(1, 0, 0));
            //cameraPX.lookAt(new THREE.Vector3(-1, 0, 0));
            var canvasPX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPX.canvas.title = "PX";
            canvasPX.canvas.AttachToDocument();
            canvasPX.canvas.style.transformOrigin = "0 0";
            //canvasPX.canvas.style.transform = $"scale({uizoom})";
            canvasPX.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion



            #region z
            var cameraNZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, -1));
            applycameraoffset += delegate
            {
                cameraNZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNZ.lookAt(new THREE.Vector3(1, 0, 0));
                cameraNZ.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, 1));
            var canvasNZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 3, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNZ.canvas.title = "NZ";
            canvasNZ.canvas.AttachToDocument();
            canvasNZ.canvas.style.transformOrigin = "0 0";
            //canvasNZ.canvas.style.transform = $"scale({uizoom})";
            canvasNZ.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            //cameraPZ.lookAt(new THREE.Vector3(1, 0, 0));
            applycameraoffset += delegate
            {
                cameraPZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPZ.lookAt(new THREE.Vector3(-1, 0, 0));
                cameraPZ.position.add(cameraoffset);
            };
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, -1));
            var canvasPZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPZ.canvas.title = "PZ";
            canvasPZ.canvas.AttachToDocument();
            canvasPZ.canvas.style.transformOrigin = "0 0";
            //canvasPZ.canvas.style.transform = $"scale({uizoom})";
            canvasPZ.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion




            // c++ alias locals would be nice..
            var canvas0 = (IHTMLCanvas)renderer0.domElement;


            var old = new
            {



                CursorX = 0,
                CursorY = 0
            };


            var st = new Stopwatch();
            st.Start();

            //canvas0.css.active.style.cursor = IStyle.CursorEnum.move;




            // X:\jsc.svn\examples\javascript\Test\TestMouseMovement\TestMouseMovement\Application.cs


            // THREE.WebGLProgram: gl.getProgramInfoLog() C:\fakepath(78,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll
            // THREE.WebGLProgram: gl.getProgramInfoLog() (79,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll

            // http://www.roadtovr.com/youtube-confirms-stereo-3d-360-video-support-coming-soon/
            // https://www.youtube.com/watch?v=D-Wl9jAB45Q



            #region gl4K spherical
            var gl4K = new WebGLRenderingContext(alpha: true, preserveDrawingBuffer: true);
            var c4k = gl4K.canvas.AttachToDocument();


            //  3840x2160

            //c.style.SetSize(3840, 2160);

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150722/360-youtube


            // when can we go up?
            c4k.width = 3840;
            c4k.height = 2160;

            // https://www.youtube.com/watch?v=sLprVF6d7Ug
            // 8K is 7680 4320

            // https://www.youtube.com/watch?v=RNdHaeBhT9Q
            // 8K is 7680 3840

            //c.width = 7680;
            ////c.height = 3840;
            //c.height = 4320;



            //c.width = 3840 * 2;
            //c.height = 2160 * 2;


            //c.width = 3840;
            //c.height = 2160;
            // 1,777777777777778

            // https://www.youtube.com/watch?v=fTfJwzRsE-w
            //c.width = 7580;
            //c.height = 3840;
            //1,973958333333333

            //7580
            //    3840

            // wont work
            //c.width = 8192;
            //c.height = 4096;


            // this has the wrong aspect?
            //c.width = 6466;
            //c.height = 3232;

            new IHTMLPre { new { c4k.width, c4k.height } }.AttachToDocument();

            //6466x3232

            //var suizoom = 720f / c.height;
            //var suizoom = 360f / c.height;
            var suizoom = 480f / c4k.width;

            c4k.style.backgroundColor = "yellow";
            c4k.style.transformOrigin = "0 0";
            //c.style.transform = $"scale({suizoom})";
            c4k.style.transform = "scale(" + suizoom + ")";
            //c.style.backgroundColor = "yellow";
            c4k.style.position = IStyle.PositionEnum.absolute;

            c4k.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0,
                8 + (int)(uizoom * cubefacesize + 8) * 3 + 120
                );


            // until we figure out how to fix the shader, we can try to fake it?
            // will allow atleast a nice static 8K image?
            // S6 did a 6546x3272 image. 5k?

            // 1.77
            var c8k = new CanvasRenderingContext2D(3840 * 2, 2160 * 2);

            //var c8k = new CanvasRenderingContext2D(5120, 2880);

            // 5120 x 2880 pixel

            // 8k canvas wont load in chrome?
            c8k.canvas.AttachToDocument();


            c8k.canvas.style.backgroundColor = "cyan";
            c8k.canvas.style.transformOrigin = "0% 0%";
            c8k.canvas.style.transform = "scale(" + (suizoom / 2) + ")";

            c8k.canvas.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 120,
                8 + (int)(uizoom * cubefacesize + 8) * 3 + 120 + 320
                );



            var pass = new CubeToEquirectangular.Library.ShaderToy.EffectPass(
                       null,
                       gl4K,
                       precission: CubeToEquirectangular.Library.ShaderToy.DetermineShaderPrecission(gl4K),
                       supportDerivatives: gl4K.getExtension("OES_standard_derivatives") != null,
                       callback: null,
                       obj: null,
                       forceMuted: false,
                       forcePaused: false,
                //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                       outputGainNode: null
                   );

            // how shall we upload our textures?
            // can we reference GLSL.samplerCube yet?
            //pass.mInputs[0] = new samplerCube { };
            pass.mInputs[0] = new CubeToEquirectangular.Library.ShaderToy.samplerCube { };

            pass.MakeHeader_Image();
            var vs = new Shaders.ProgramFragmentShader();
            pass.NewShader_Image(vs);

            #endregion



            // why is it flipped?

            //var frame0 = new HTML.Images.FromAssets.tiles_regrid().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets.galaxy_starfield().AttachToDocument();
            var frame0 = new HTML.Images.FromAssets._20150912_154522().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._20150912_154522recenter().AttachToDocument();


            //var frame0 = new HTML.Images.FromAssets.galaxy_starfield150FOV().AttachToDocument();
            //var xor = new HTML.Images.FromAssets.Orion360_test_image_8192x4096().AttachToDocument();
            //var xor = new HTML.Images.FromAssets._2_no_clouds_4k().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().AttachToDocument();


            // 270px
            //xor.style.height = "";
            frame0.style.height = "270px";
            frame0.style.width = "480px";
            frame0.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 + 16,
                8 + (int)(uizoom * cubefacesize + 8) * 3 + 120);




            #region DirectoryEntry
            var dir = default(DirectoryEntry);

            new IHTMLButton { "openDirectory" }.AttachToDocument().onclick += async delegate
            {
                dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
            };
            frame0.style.cursor = IStyle.CursorEnum.pointer;
            frame0.title = "save frame";


            frame0.onclick += delegate
            {
                // http://paulbourke.net/papers/vsmm2006/vsmm2006.pdf
                //            A method of creating synthetic stereoscopic panoramic images that can be implemented
                //in most rendering packages has been presented. If single panoramic pairs can be created
                //then stereoscopic panoramic movies are equally possible giving rise to the prospect of
                //movies where the viewer can interact with, at least with regard to what they choose to look
                //at.These images can be projected so as to engage the two features of the human visual
                //system that assist is giving us a sense of immersion, the feeling of “being there”. That is,
                //imagery that contains parallax information as captured from two horizontally separated eye
                //positions (stereopsis)and imagery that fills our peripheral vision.The details that define
                //how the two panoramic images should be created in rendering packages are provided, in
                //particular, how to precisely configure the virtual cameras and control the distance to zero
                //parallax.

                // grab a frame

                if (dir == null)
                {
                    var c8ksw = Stopwatch.StartNew();

                    c8k.drawImage(frame0, 0, 0, c8k.canvas.width, c8k.canvas.height);
                    c8k.drawImage(gl4K, 0, 0, c8k.canvas.width, c8k.canvas.height);


                    // not exporting to file system?
                    //var f0 = new IHTMLImage { src = gl4K.canvas.toDataURL() };
                    //var f0 = new IHTMLImage { src = c8k.canvas.toDataURL() };

                    // png would be 50mb?
                    var f0 = new IHTMLImage { src = c8k.canvas.toDataURL(quality: 0.9) };
                    // 22989ms { c8ksw = 00:00:12.12976 }
                    Console.WriteLine(new { c8ksw });

                    //var f0 = (IHTMLImage)gl.canvas;
                    //var f0 = (IHTMLImage)gl.canvas;
                    //var base64 = gl.canvas.toDataURL();


                    //frame0.src = base64;
                    frame0.src = f0.src;

                    // 7MB!

                    return;
                }

                //                // ---------------------------
                //IrfanView
                //---------------------------
                //Warning !
                //The file: "X:\vr\tape1\0001.jpg" is a PNG file with incorrect extension !
                //Rename ?
                //---------------------------
                //Yes   No   
                //---------------------------

                // haha this will render the thumbnail.
                //dir.WriteAllBytes("0000.png", frame0);

                //dir.WriteAllBytes("0000.png", gl.canvas);

                var glsw = Stopwatch.StartNew();
                dir.WriteAllBytes("0000.png", gl4K);

                new IHTMLPre { new { glsw.ElapsedMilliseconds } }.AttachToDocument();

                // {{ ElapsedMilliseconds = 1548 }}

                // 3.7MB
                // 3840x2160

            };

            #endregion


            #region render 60hz 30sec
            new IHTMLButton {
                "render 60hz 30sec"
            }.AttachToDocument().onclick += async e =>
            {
                e.Element.disabled = true;


                var total = Stopwatch.StartNew();
                var status = "rendering... " + new { dir };

                new IHTMLPre { () => status }.AttachToDocument();

                if (dir == null)
                {
                    //dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
                }

                total.Restart();



                vsync1renderman = new TaskCompletionSource<object>();
                await vsync1renderman.Task;

                status = "rendering... vsync";

                //var frameid = 0;
                frameIDslider.valueAsNumber = -1;

                goto beforeframe;


                // parallax offset?

                await_nextframe:


                //var filename = frameIDslider.valueAsNumber.ToString().PadLeft(4, '0') + ".png";
                var filename = frameIDslider.valueAsNumber.ToString().PadLeft(5, '0') + ".jpg";
                status = "rendering... " + new { filename };


                vsync1renderman = new TaskCompletionSource<object>();
                await vsync1renderman.Task;

                // frame0 has been rendered

                var swcapture = Stopwatch.StartNew();
                status = "WriteAllBytes... " + new { filename };
                //await Native.window.async.onframe;

                // https://code.google.com/p/chromium/issues/detail?id=404301
                if (dir != null)
                {
                    c8k.drawImage(frame0, 0, 0, c8k.canvas.width, c8k.canvas.height);
                    c8k.drawImage(gl4K, 0, 0, c8k.canvas.width, c8k.canvas.height);


                    //await dir.WriteAllBytes(filename, gl4K);
                    await dir.WriteAllBytes(filename, c8k);
                }

                //await dir.WriteAllBytes(filename, gl.canvas);

                status = "WriteAllBytes... done " + new { fcamerax, filename, swcapture.ElapsedMilliseconds };
                status = "rdy " + new { filename, fcamerax };
                //await Native.window.async.onframe;





                // design mode v render mode
                if (cubefacesize < cubefacesizeMAX)
                    frameIDslider.valueAsNumber += 15;
                else
                    frameIDslider.valueAsNumber++;




            beforeframe:

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);
                // fcamerax = radius * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);

                // some shaders need to know where the camera is looking from. can we tell them?

                //fcamerax = 2.2 * Math.Sin(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));
                //fcameraz = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));


                //// up
                //fcameray = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // cameraz.valueAsNumber = (int)(cameraz.max * Math.Sin(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f)));


                // up
                //fcameray = 128 * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                //fcamerax += (1.0 / 60.0);

                //fcamerax += (1.0 / 60.0) * 120;


                // in 30 sec can we have a zoom in and out?

                // so 15 sec at 60 fps needs to be -max z


                var a = Math.Abs(frameIDslider.valueAsNumber - (60 * 15));
                var aa = a / (60f * 15);

                //cameraz.valueAsNumber = (int)(camerazMIN * aa);
                cameraz.valueAsNumber = (int)(camerazMIN * (1.0 - aa));


                // 60hz 30sec
                if (frameIDslider.valueAsNumber < 60 * 30)
                {
                    // Blob GC? either this helms or the that we made a Blob static. 
                    //await Task.Delay(11);
                    await Task.Delay(33);

                    goto await_nextframe;
                }

                total.Stop();
                status = "all done " + new { frameid = frameIDslider.valueAsNumber, total.ElapsedMilliseconds };
                vsync1renderman = default(TaskCompletionSource<object>);
                // http://stackoverflow.com/questions/22899333/delete-javascript-blobs

                e.Element.disabled = false;
            };
            #endregion


            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLColladaExperiment\WebGLColladaExperiment\WebGLColladaExperiment.csproj"






            // asus will hang
            // https://3dwarehouse.sketchup.com/model.html?id=fb7a0448d940e575edc01389f336fb0a
            // can we get one frame into vr?

            // cube: mesh to cast shadows



            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000 })

            //    );
            //    floor2.position.set(0, 0, -cubefacesize / 2);
            //    floor2.AttachTo(scene);
            //}
            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0x0000ff, color = 0x0000ff })

            //    );
            //    floor2.position.set(-cubefacesize / 2, 0, 0);
            //    floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);

            //    floor2.AttachTo(scene);
            //}

            //var p900toCubeSize = cubefacesize / 1080f;
            var p900toCubeSize = cubefacesize / 1920f;

            //p900toCubeSize *= 0.7f;

            // where is this magic number coming from??
            p900toCubeSize *= 0.65f;
            //p900toCubeSize *= 0.5f;

            // http://stackoverflow.com/questions/17648067/three-js-drawing-two-overlapping-transparent-spheres-and-hiding-intersection



            var farimage = new output00609();
            var nearimage = new output01085();

            // front?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                //var tex0 = new THREE.Texture(shader1canvas) { needsUpdate = true };


                //var tex0 = new THREE.Texture(new output01027()) { needsUpdate = true };
                //var tex0 = new THREE.Texture(new output00630()) { needsUpdate = true };
                var tex0 = new THREE.Texture(farimage) { needsUpdate = true, minFilter = THREE.LinearFilter };
                applycameraoffset += delegate { tex0.needsUpdate = true; };

                //var planeGeometry0 = new THREE.PlaneGeometry(1920, 1080, 8, 8);
                var planeGeometry0 = new THREE.PlaneGeometry((int)(1920 * p900toCubeSize), (int)(1080 * p900toCubeSize), 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {
                            // black otherwise?
                            transparent = true,

                            map = tex0,


                            //ambient = 0x00ff00,
                            //color = 0x00ff00
                        })

                );

                //(floor2 as dynamic).renderDepth = 0.2;

                //floor2.position.set(0, 0, -cubefacesize  * 0.55);

                // zoom in and get 90FOV clouseup?
                floor2.position.set(-cubefacesize * 0.50 - skyboxradius0, 0, 0);
                //floor2.position.set(-skyboxradius0 - 128, 0, 0);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                //floor2.AttachTo(scene);
                floor2.AttachTo(scenezooms);
            }





            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                //var tex0 = new THREE.Texture(shader1canvas) { needsUpdate = true };


                //var tex0 = new THREE.Texture(new output01027()) { needsUpdate = true };
                var tex0 = new THREE.Texture(nearimage) { needsUpdate = true, minFilter = THREE.LinearFilter };
                //var tex0 = new THREE.Texture(new output00630()) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                //var planeGeometry0 = new THREE.PlaneGeometry(1920, 1080, 8, 8);
                //var planeGeometry0 = new THREE.PlaneGeometry((int)(1920 * 0.1), (int)(1080 * 0.1), 8, 8);



                var planeGeometry0 = new THREE.PlaneGeometry((int)(1920 * p900toCubeSize), (int)(1080 * p900toCubeSize), 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {
                            // black otherwise?
                            transparent = true,

                            map = tex0,


                            //ambient = 0x00ff00,
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, 0, -cubefacesize  * 0.55);
                //(floor2 as dynamic).renderDepth = 0.3;

                // zoom in and get 90FOV clouseup?
                floor2.position.set(-cubefacesize * 0.50, 0, 0);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                //floor2.AttachTo(scene);
                floor2.AttachTo(scenezooms);
            }


            Action<IHTMLImage, double> AddFrame = (img, z) =>
            {
                {
                    //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                    //var tex0 = new THREE.Texture(new moon());
                    //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                    //var tex0 = new THREE.Texture(shader1canvas) { needsUpdate = true };


                    //var tex0 = new THREE.Texture(new output01027()) { needsUpdate = true };
                    //var tex0 = new THREE.Texture(new output00630()) { needsUpdate = true };
                    var tex0 = new THREE.Texture(img) { needsUpdate = true, minFilter = THREE.LinearFilter };
                    applycameraoffset += delegate { tex0.needsUpdate = true; };

                    //var planeGeometry0 = new THREE.PlaneGeometry(1920, 1080, 8, 8);
                    var planeGeometry0 = new THREE.PlaneGeometry((int)(1920 * p900toCubeSize), (int)(1080 * p900toCubeSize), 8, 8);
                    var floor2 = new THREE.Mesh(planeGeometry0,
                        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                        new THREE.MeshPhongMaterial(
                            new
                            {
                                // black otherwise?
                                transparent = true,

                                map = tex0,


                                //ambient = 0x00ff00,
                                //color = 0x00ff00
                            })

                    );

                    //(floor2 as dynamic).renderDepth = 0.2;

                    //floor2.position.set(0, 0, -cubefacesize  * 0.55);

                    // zoom in and get 90FOV clouseup?
                    floor2.position.set(-cubefacesize * 0.50 - skyboxradius0 * z, 0, 0);
                    //floor2.position.set(-skyboxradius0 - 128, 0, 0);
                    floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                    //floor2.AttachTo(scene);
                    floor2.AttachTo(scenezooms);
                }

            };

            new IHTMLButton { "load frames from disk " }.AttachToDocument().onclick += async e =>
            {
                // Z:\jsc.svn\examples\javascript\chrome\apps\WebGL\360\x360x83\Application.cs
                // Z:\jsc.svn\examples\javascript\chrome\apps\WebGL\360\x360azimuthal\Application.cs

                e.Element.disabled = true;

                // how do we load the files?
                var dir2 = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });

                var dir2r = dir2.createReader();

                var files2 = await dir2r.readFileEntries();

                var files2count = files2.Count();

                Console.WriteLine(new { files2count });

                // does this dir have the first and last image we already know of?
                // 55390ms { files2count = 4324 }

                var firstcandidate = files2.First();

                //Console.WriteLine(new { firstcandidate, farimage.src });
                // 19230ms { firstcandidate = [object FileEntry], src = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/assets/x360x83/output00609.png }

                // 10903ms { firstcandidate = output00001.png, farimage = output00609.png }

                var files2skip = files2.SkipWhile(firstcandidate1 => firstcandidate1.name != farimage.src.SkipUntilLastOrEmpty("/"));
                //Console.WriteLine(new { files2skip = files2skip.Count() });

                var files2take = files2skip.TakeWhile(firstcandidate1 => firstcandidate1.name != nearimage.src.SkipUntilLastOrEmpty("/"));

                //6228ms { files2count = 4324 }
                //view-source:54116 6377ms { files2take = 476 }

                // reverse?
                var files3 = files2take.ToArray();
                var files3count = files3.Count();


                //Console.WriteLine(new { files2take = files2take.Count() });

                //var step = 8;
                //var step = 1;//crashes
                //var step = 2;//crashes after load
                var step = 4;//crashes after load
                //var step = 3;//
                //var step = (int)(files3count * 0.05);
                //var step = (int)(files3count * 0.25);

                for (int i = step; i < files3count; i += step)
                {
                    //files3[i].

                    Console.WriteLine(new { i });

                    e.Element.innerText = new { step, i, files3count }.ToString();


                    //files3[i].file()
                    var ff = await files3[i].file();

                    //83fc21e4-a2da-408d-9831-571313ead641
                    //Refcount: 1
                    //Content Type: image/png
                    //Type: file
                    //Path: X:\p900\7\DCIM\100NIKON\DSCN0018\output00767.png
                    //Modification Time: Sunday, September 13, 2015 at 10:24:01 AM
                    //Length: 2,131,791

                    // are we running out of blobs?

                    var url = ff.ToObjectURL();
                    var img = new IHTMLImage(url);


                    await img.async.oncomplete;

                    //async ff =>
                    //{
                    //    var ffbytes = await ff.readAsBytes();

                    //var ffimage = (IHTMLImage)ffbytes;

                    var aa = (double)i / files3count;

                    AddFrame(img, 1.0 - aa);

                    //    }
                    //);

                    //files3[i].
                }

                e.Element.innerText = "done " + new { step, files3count }.ToString();

            };


            //{
            //    //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
            //    //var tex0 = new THREE.Texture(new moon());
            //    //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
            //    var tex0 = new THREE.Texture(shader1canvas) { needsUpdate = true };

            //    applycameraoffset += delegate { tex0.needsUpdate = true; };

            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(
            //            new
            //            {

            //                map = tex0,


            //                //ambient = 0x00ff00,
            //                //color = 0x00ff00
            //            })

            //    );
            //    floor2.position.set(cubefacesize * 0.55, 0, 0);
            //    floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);

            //    floor2.AttachTo(scene);
            //}





            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeEarth\ChromeEarth\Application.cs
            // X:\jsc.svn\examples\javascript\canvas\ConvertBlackToAlpha\ConvertBlackToAlpha\Application.cs
            // hidden for alpha AppWindows
            //#if FBACKGROUND


            #region galaxy_starfield
            new THREE.Texture().With(
                async s =>
                {
                    //var i = new HTML.Images.FromAssets.galaxy_starfield();
                    //var i = new HTML.Images.FromAssets.galaxy_starfield150FOV();

                    var bytes = await frame0.async.bytes;

                    //for (int ii = 0; ii < bytes.Length; ii += 4)
                    //{

                    //    bytes[ii + 3] = (byte)(bytes[ii + 0]);

                    //    bytes[ii + 0] = 0xff;
                    //    bytes[ii + 1] = 0xff;
                    //    bytes[ii + 2] = 0xff;
                    //}

                    var cc = new CanvasRenderingContext2D(frame0.width, frame0.height);

                    cc.bytes = bytes;

                    // does not do a thing?
                    //s.flipY = true;

                    s.image = cc;
                    s.needsUpdate = true;

                    var stars_material = new THREE.MeshBasicMaterial(
                            new
                            {
                                //map = THREE.ImageUtils.loadTexture(new galaxy_starfield().src),
                                map = s,
                                // both?
                                //side = THREE.BackSide,
                                transparent = true
                            });

                    // nice
                    //stars_material.opacity = 0.5;



                    // THREE.SphereGeometry = function ( radius, widthSegments, heightSegments, phiStart, phiLength, thetaStart, thetaLength ) {
                    // http://learningthreejs.com/blog/2011/10/05/performance-merging-geometry/

                    // how are we to construct geometry that has higher detail in one spot for zoom in
                    var skyboxsphere = new THREE.SphereGeometry(skyboxradius, 512, 512

                    // left to right
                        // 0 .. 45 deg

                    // center it?
                        //Math.PI - Math.PI / 4
                        //, Math.PI / 4,



                    //// up to down
                        //// 0 .. 22 deg
                        //0, Math.PI / 2
                    );


                    //
                    var stars = new THREE.Mesh(
                        // radius not used?
                        //new THREE.SphereGeometry(skyboxradius, 64, 64),
                        //new THREE.SphereGeometry(skyboxradius, 8, 8),

                            // we need to be able to zoom in!
                        //new THREE.SphereGeometry(skyboxradius, 256, 256),

                            // chrome will crash on laptop?
                        // chrome will crash on red!
                        //new THREE.SphereGeometry(skyboxradius, 1024, 1024),
                        //new THREE.SphereGeometry(skyboxradius, 1024, 1024),
                        //new THREE.SphereGeometry(skyboxradius, 512, 512),
                        //new THREE.SphereGeometry(skyboxradius, 600, 600),

                            // orr perhaps do we need detailed geometry only in specific spot?
                            skyboxsphere,

                           stars_material
                        );

                    // http://stackoverflow.com/questions/8502150/three-js-how-can-i-dynamically-change-objects-opacity
                    //(stars_material as dynamic).opacity = 0.5;

                    stars.scale.x = -1;


                    // http://stackoverflow.com/questions/31797871/three-js-alpha-on-entire-object
                    applycameraoffset += delegate
                    {
                        if (cameraz.valueAsNumber == 0)
                        {
                            // static 5k image should take over...
                            stars_material.opacity = 0.0;
                            return;
                        }


                        var a = (cameraz.valueAsNumber / (double)camerazMIN);

                        stars.rotation.set(0, 0, 0);

                        //    skyrotright
                        //stars.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * (skyrotup.valueAsNumber / 900.0));
                        stars.rotateOnAxis(new THREE.Vector3(0, 1, 0), (Math.PI / 2) * (skyrotright.valueAsNumber / 900.0));



                        stars.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * ((a * 35.0 + skyrotup.valueAsNumber) / 900.0));


                        stars_material.opacity = (1.0 - a) * 0.7 + 0.3;
                    };




                    // can we get our hrozion recentered?
                    //stars.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * (3 / 90.0));
                    //stars.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * (1.3 / 90.0));
                    //stars.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * (1.5 / 90.0));

                    applycameraoffset += delegate
                    {

                        scenezooms.rotation.set(0, 0, 0);
                        // keep skybox where it is

                        scenezooms.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * (zoomrotup.valueAsNumber / 900.0));
                        scenezooms.rotateOnAxis(new THREE.Vector3(0, 1, 0), (Math.PI / 2) * (zoomrotright.valueAsNumber / 900.0));

                    };


                    var hideskybox = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "hide skybox", @checked = true }.AttachToDocument();

                    //Native.window.onframe += delegate
                    //{
                    //    //
                    //    stars.visible = !hideskybox.@checked;
                    //};

                    hideskybox.onchange += delegate
                    {
                        //
                        stars.visible = !hideskybox.@checked;
                    };
                    stars.visible = !hideskybox.@checked;



                    scene.add(stars);
                }
           );
            #endregion


            //var NYonly = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "NY only" }.AttachToDocument();
            var PXonly = new IHTMLInput
            {
                type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox,
                @checked = true,
                title = "PX only"
            }.AttachToDocument();


            //new Models.ColladaS6Edge().Source.Task.ContinueWithResult(
            //       dae =>
            {



                //dae.position.y = -80;

                //dae.AttachTo(sceneg);
                //scene.add(dae);
                //oo.Add(dae);




                // view-source:http://threejs.org/examples/webgl_multiple_canvases_circle.html
                // https://threejsdoc.appspot.com/doc/three.js/src.source/extras/cameras/CubeCamera.js.html
                Native.window.onframe +=
                    e =>
                    {
                        // Z:\jsc.svn\examples\javascript\test\TestDelegateIfIfReturn\Application.cs

                        // let render man know..

                        var flag1 = vsync1renderman != null;
                        // nonroslyn!!
                        if (flag1)
                        // this if block is not detected?
                        {
                            // whats going on  with if clauses?
                            nop();

                            // wtf???
                            var flag0 = vsync1renderman.Task.IsCompleted;
                            if (flag0)
                                return;
                        }
                        if (vsync0ambient != null)
                            if (vsync0ambient.Task.IsCompleted)
                                return;

                        // 38045ms Native.window.onframe { vsync1renderman = , vsync0ambient = [object Object] }

                        //Console.WriteLine("Native.window.onframe " + new { vsync1renderman, vsync0ambient });

                        //return;

                        //if (pause) return;
                        //if (pause.@checked)
                        //    return;


                        // can we float out of frame?
                        // haha. a bit too flickery.
                        //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.01) * 50.0;
                        //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.001) * 190.0;
                        //globesphere.position.y = Math.Sin(fcamerax * 0.001) * 90.0;
                        //clouds.position.y = Math.Cos(fcamerax * 0.001) * 90.0;

                        //sphere.rotation.y += speed;
                        //clouds.rotation.y += speed;

                        // manual rebuild?
                        // red compiler notifies laptop chrome of pending update
                        // app reloads

                        applycameraoffset();
                        renderer0.clear();
                        //rendererPY.clear();

                        //cameraPX.aspect = canvasPX.aspect;
                        //cameraPX.updateProjectionMatrix();

                        // um what does this do?
                        //cameraPX.position.z += (z - cameraPX.position.z) * e.delay.ElapsedMilliseconds / 200.0;
                        // mousewheel allos the camera to move closer
                        // once we see the frame in vr, can we udp sync vr tracking back to laptop?


                        //this.targetPX.x += 1;
                        //this.targetNX.x -= 1;

                        //this.targetPY.y += 1;
                        //this.targetNY.y -= 1;

                        //this.targetPZ.z += 1;
                        //this.targetNZ.z -= 1;

                        // how does the 360 or shadertoy want our cubemaps?


                        // and then rotate right?

                        // how can we render cubemap?

                        if (cameraz.valueAsNumber == 0)
                            renderer0.setClearColor(0x0, 0);
                        else
                            renderer0.setClearColor(0x0, 1);

                        new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                        }.WithEach(cc =>
                            {




                                cc.clearRect(0, 0, cubefacesize, cubefacesize);
                            }
                        );

                        //gl.clear()

                        if (PXonly.@checked)
                        {
                            var cameraPXsw = Stopwatch.StartNew();

                            renderer0.render(scene, cameraPX);

                            // 35207ms { cameraPXsw = 00:00:00.88 }
                            //75505ms { cameraPXsw = 00:00:00.61 }
                            //Console.WriteLine(new { cameraPXsw });

                            // clear if transparent?
                            canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                            //return;

                        }
                        else
                        {

                            #region x
                            // upside down?
                            renderer0.render(scene, cameraPX);
                            canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                            renderer0.render(scene, cameraNX);
                            canvasNX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            #endregion

                            #region z
                            renderer0.render(scene, cameraPZ);
                            canvasPZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                            renderer0.render(scene, cameraNZ);
                            canvasNZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            #endregion



                            #region y
                            renderer0.render(scene, cameraPY);

                            //canvasPY.save();
                            //canvasPY.translate(0, size);
                            //canvasPY.rotate((float)(-Math.PI / 2));
                            canvasPY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            //canvasPY.restore();


                            // the floor?

                            // render only this one?
                            renderer0.render(scene, cameraNY);
                            //canvasNY.save();
                            //canvasNY.translate(size, 0);
                            //canvasNY.rotate((float)(Math.PI / 2));
                            canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            //canvasNY.restore();
                            // ?
                            #endregion


                            //renderer0.render(scene, cameraPX);


                            //rendererPY.render(scene, cameraPY);

                            // at this point we should be able to render the sphere texture

                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;


                            //var cube0 = new IHTMLImage[] {
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),

                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),


                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
                            //};

                            #region Paint_Image
                            new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                        }.WithEachIndex(
                         (img, index) =>
                         {
                             gl4K.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);

                             //gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
                             gl4K.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);

                             // http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true

                             // https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
                             //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
                             //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);

                             gl4K.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);

                         }
                      );


                            // http://stackoverflow.com/questions/11544608/how-to-clear-a-rectangle-area-in-webgl

                            if (cameraz.valueAsNumber == 0)
                                gl4K.clearColor(0, 0, 0, 0);
                            else
                                gl4K.clearColor(0, 0, 0, 1);

                            gl4K.clear(gl.COLOR_BUFFER_BIT);

                            // could do dynamic resolution- fog of war or fog of FOV. where up to 150deg field of vision is encouragedm, not 360
                            pass.Paint_Image(
                           0,

                           0,
                           0,
                           0,
                           0
                                //,

                      // gl_FragCoord
                                // cannot be scaled, and can be referenced directly.
                                // need another way to scale
                                //zoom: 0.3f
                      );

                            //paintsw.Stop();


                            // what does it do?
                            gl4K.flush();
                            #endregion


                        }

                        // let render man know..
                        if (vsync1renderman != null)
                            if (!vsync1renderman.Task.IsCompleted)
                                vsync1renderman.SetResult(null);

                        if (vsync0ambient != null)
                            if (!vsync0ambient.Task.IsCompleted)
                                vsync0ambient.SetResult(null);
                    };


            }
            //);





            Console.WriteLine("do you see it?");
        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page = null)
        {
            InitializeComponent();

            // http://johnstejskal.com/wp/super-hero-city-my-3d-webgl-game-using-three-js/
            // http://www.johnstejskal.com/dev/super-hero-city/

            //DiagnosticsConsole.ApplicationContent.BindKeyboardToDiagnosticsConsole();


            // decent simcity comes to mind
            // http://www.mrdoob.com/lab/javascript/webgl/city/01/

            //var renderer = this.renderer.renderer;

            var renderer = new THREE.WebGLRenderer(
                new { antialias = false, alpha = false }
            );

            renderer.setClearColor(new THREE.Color(0xd8e7ff));

            renderer.setSize(Native.window);

            // INodeConvertible?
            renderer.domElement.AttachToDocument();
            renderer.domElement.style.SetLocation(0, 0);

            var camera = new THREE.PerspectiveCamera(40, Native.window.aspect, 1, 3000);
            camera.position.y = 80;

            var scene = new THREE.Scene();
            scene.fog = new THREE.FogExp2(0xd0e0f0, 0.0025);


            {
                var light = new THREE.HemisphereLight(0xfffff0, 0x101020, 1.25);
                light.position.set(0.75, 1, 0.25);
                scene.add(light);
            }



            var plane = new THREE.Mesh(
                new THREE.PlaneGeometry(2000, 2000),
                new THREE.MeshBasicMaterial(new { color = 0x101018 })
            );

            plane.rotation.x = -90 * Math.PI / 180;
            scene.add(plane);







            var building_geometry = new THREE.CubeGeometry(1, 1, 1);
            building_geometry.applyMatrix(new THREE.Matrix4().makeTranslation(0, 0.5, 0));

            ((IArray<THREE.Face4>)(object)building_geometry.faces).splice(3, 1);

            ((IArray<object>)(object)building_geometry.faceVertexUvs[0]).splice(3, 1);

            building_geometry.faceVertexUvs[0][2][0].set(0, 0);
            building_geometry.faceVertexUvs[0][2][1].set(0, 0);
            building_geometry.faceVertexUvs[0][2][2].set(0, 0);

            // Uncaught TypeError: Cannot call method 'set' of undefined view-source:84609
            //building_geometry.faceVertexUvs[0][2][3].set(0, 0);


            Func<double> random = new Random().NextDouble;


            var building = new THREE.Mesh(building_geometry);
            var city = new THREE.Geometry();

            {
                var light = new THREE.Color(0xffffff);
                var shadow = new THREE.Color(0x303050);

                #region city
                for (var i = 0; i < 20000; i++)
                {

                    var value = 1 - random() * random();
                    var color = new THREE.Color(0).setRGB(value + random() * 0.1, value, value + random() * 0.1);

                    var top = color.clone().multiply(light);
                    var bottom = color.clone().multiply(shadow);

                    building.position.x = Math.Floor(random() * 200 - 100) * 10;
                    building.position.z = Math.Floor(random() * 200 - 100) * 10;
                    building.rotation.y = random();

                    building.scale.z = random() * random() * random() * random() * 50 + 10;
                    building.scale.x = building.scale.z;

                    building.scale.y = (random() * random() * random() * building.scale.x) * 8 + 8;

                    var geometry = building.geometry;

                    var jl = geometry.faces.Length;

                    for (var j = 0; j < jl; j++)
                    {
                        if (j == 2)
                        {
                            geometry.faces[j].vertexColors = new[] { color, color, color, color };
                        }
                        else
                        {
                            geometry.faces[j].vertexColors = new[] { top, bottom, bottom, top };
                        }
                    }

                    // THREE.GeometryUtils: .merge() has been moved to Geometry. Use geometry.merge( geometry2, matrix, materialIndexOffset ) instead.
                    // stop moving around code!


                    //city.merge
                    //city.merge(building.geometry);
                    // how??
                    THREE.GeometryUtils.merge(city, building);

                }
                #endregion

                #region generateTexture
                Func<IHTMLCanvas> generateTexture = delegate
                {


                    var context1 = new CanvasRenderingContext2D(32, 64);

                    context1.fillStyle = "#ffffff";
                    context1.fillRect(0, 0, 32, 64);

                    for (var y = 2; y < 64; y += 2)
                        for (var x = 0; x < 32; x += 2)
                        {

                            var value = Math.Floor(random() * 64);
                            context1.fillStyle = "rgb(" + value + "," + value + "," + value + ")";
                            context1.fillRect(x, y, 2, 1);

                        }


                    var context = new CanvasRenderingContext2D(512, 1024)
                    {
                        ImageSmoothingEnabled = false
                    };


                    context.drawImage(context1.canvas, 0, 0, context.canvas.width, context.canvas.height);

                    return context.canvas;

                };
                #endregion

                var texture = new THREE.Texture(generateTexture())
                {
                    anisotropy = renderer.getMaxAnisotropy(),
                    needsUpdate = true
                };

                var mesh = new THREE.Mesh(city, new THREE.MeshLambertMaterial(new { map = texture, vertexColors = THREE.VertexColors }));
                scene.add(mesh);
            }


            var controls = new THREE.FirstPersonControls(camera)
            {
                movementSpeed = 20,
                lookSpeed = 0.05,
                lookVertical = true
            };


            //var lastTime = Native.window.performance.now() / 1000;
            var delta = new Stopwatch();

            Native.window.onframe +=
                delegate
                {
                    //var time = Native.window.performance.now() / 1000;

                    controls.update(delta.ElapsedMilliseconds);
                    renderer.render(scene, camera);

                    //lastTime = time;
                };
        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {

           // SCENE
	         scene = new THREE.Scene();
	        // CAMERA
	        var SCREEN_WIDTH = Native.window.Width;
            var SCREEN_HEIGHT = Native.window.Height;
	        var VIEW_ANGLE = 45;
            var ASPECT = SCREEN_WIDTH / SCREEN_HEIGHT;
            var NEAR = 0.1;
            var FAR = 20000;

	        // perspective cameras
	        perspectiveCamera = new THREE.PerspectiveCamera(VIEW_ANGLE, ASPECT, NEAR, FAR);
	        perspectiveCamera.position.set(0,200,550);
	        perspectiveCamera.lookAt(scene.position);
	        scene.add(perspectiveCamera);

	        // orthographic cameras
	        mapCamera = new THREE.OrthographicCamera(
            Native.window.Width / -2,		// Left
            Native.window.Width / 2,		// Right
            Native.window.Height / 2,		// Top
            Native.window.Height / -2,	// Bottom
            -5000,            			// Near 
            10000 );           			// Far 
	        mapCamera.up = new THREE.Vector3(0,0,-1);
	        mapCamera.lookAt( new THREE.Vector3(0,-1,0) );
	        scene.add(mapCamera);

            renderer = new THREE.WebGLRenderer(
                   new
                   {
                       antialias = true
                   }
                );
            

	        // RENDERER
	        renderer.setSize(SCREEN_WIDTH, SCREEN_HEIGHT);
            this.container = (IHTMLCanvas)renderer.domElement;
            this.container.AttachToDocument();
            this.container.style.SetLocation(0, 0);
	
	        // EVENTS
            //THREEx.WindowResize(renderer, mapCamera);
            //THREEx.FullScreen.bindKey({ charCode : 'm'.charCodeAt(0) });

	        // STATS
            //stats = new Stats();
            //stats.domElement.style.position = 'absolute';
            //stats.domElement.style.bottom = '0px';
            //stats.domElement.style.zIndex = 100;
            //container.appendChild( stats.domElement );
	        // LIGHT
	        var light = new THREE.PointLight(0xffffff);
	        light.position.set(0,250,0);
	        scene.add(light);
	        // FLOOR
            var img = new uvgrid01();
            var floorTexture = new THREE.Texture().With(
                                async s =>
                                {
                                    s.image = await img;
                                    s.needsUpdate = true;
                                });
            floorTexture.wrapS = floorTexture.wrapT = THREE.RepeatWrapping; 
            floorTexture.repeat.set( 10, 10 );
	        var floorMaterial = new THREE.MeshBasicMaterial( new { map = floorTexture, side = THREE.DoubleSide } );
	        var floorGeometry = new THREE.PlaneGeometry(2000, 2000, 10, 10);
	        var floor = new THREE.Mesh(floorGeometry, floorMaterial);
	        floor.position.y = -0.5;
	        floor.rotation.x = Math.PI / 2;
	        scene.add(floor);
	
	        ////////////
	        // CUSTOM //
	        ////////////
	

    //        materialArray.push(new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'images/xpos.png' ) }));
    //materialArray.push(new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'images/xneg.png' ) }));
    //materialArray.push(new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'images/ypos.png' ) }));
    //materialArray.push(new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'images/yneg.png' ) }));
    //materialArray.push(new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'images/zpos.png' ) }));
    //materialArray.push(new THREE.MeshBasicMaterial( { map: THREE.ImageUtils.loadTexture( 'images/zneg.png' ) }));
	        // create an array with six textures for cube
	        var materialArray = new THREE.MeshBasicMaterial[6];
	        materialArray[0] = (new THREE.MeshBasicMaterial( new { map = new THREE.Texture().With(
                                async s =>
                                {
                                    s.image = await new xpos();
                                    s.needsUpdate = true;
                                }) 
            }));
	        materialArray[1] = (new THREE.MeshBasicMaterial( new {  map = new THREE.Texture().With(
                                async s =>
                                {
                                    s.image = await new xneg();
                                    s.needsUpdate = true;
                                }) 
            }));
	        materialArray[2] = (new THREE.MeshBasicMaterial( new { map = new THREE.Texture().With(
                                async s =>
                                {
                                    s.image = await new ypos();
                                    s.needsUpdate = true;
                                }) 
            }));
	        materialArray[3] = (new THREE.MeshBasicMaterial( new { map = new THREE.Texture().With(
                                async s =>
                                {
                                    s.image = await new yneg();
                                    s.needsUpdate = true;
                                }) 
            }));
	        materialArray[4] = (new THREE.MeshBasicMaterial( new {map = new THREE.Texture().With(
                                async s =>
                                {
                                    s.image = await new zpos();
                                    s.needsUpdate = true;
                                }) 
            }));
	        materialArray[5] = (new THREE.MeshBasicMaterial( new { 
            map = new THREE.Texture().With(
                                async s =>
                                {
                                    s.image = await new zneg();
                                    s.needsUpdate = true;
                                }) 
            }));

            //var MovingCubeMat = new THREE.MeshFaceMaterial(new { material = materialArray });

            var MovingCubeGeom = new THREE.CubeGeometry(50, 50, 50, 1, 1, 1);
            var MovingCube = new THREE.Mesh(MovingCubeGeom, new THREE.MeshBasicMaterial(new
                                            {
                                                color = new THREE.Color(0xFFFFFF)
                                            }));
            MovingCube.position.set(0, 25.1, 0);
            scene.add(MovingCube);	
	
	        // a little bit of scenery...

	        var ambientlight = new THREE.AmbientLight(0x111111);
	        scene.add( ambientlight );

            // torus knot
            var colorMaterial = new THREE.MeshLambertMaterial(new { color = new THREE.Color(0xff3333) });
            var shape = new THREE.Mesh(new THREE.TorusKnotGeometry(30, 6, 160, 10, 2, 5, 4), colorMaterial);
            shape.position.set(-200, 50, -200);
            scene.add(shape);
            // torus knot
            var colorMaterial2 = new THREE.MeshLambertMaterial(new { color = new THREE.Color(0x33ff33) });
            var shape2 = new THREE.Mesh(new THREE.TorusKnotGeometry(30, 6, 160, 10, 3, 2, 4), colorMaterial2);
            shape2.position.set(200, 50, -200);
            scene.add(shape2);
            // torus knot
            var colorMaterial3 = new THREE.MeshLambertMaterial(new { color = new THREE.Color(0xffff33) });
            var shape3 = new THREE.Mesh(new THREE.TorusKnotGeometry(30, 6, 160, 10, 4, 3, 4), colorMaterial3);
            shape3.position.set(200, 50, 200);
            scene.add(shape3);
            // torus knot
            var colorMaterial4 = new THREE.MeshLambertMaterial(new { color = new THREE.Color(0x3333ff) });
            var shape4 = new THREE.Mesh(new THREE.TorusKnotGeometry(30, 6, 160, 10, 3, 4, 4), colorMaterial4);
            shape4.position.set(-200, 50, 200);
            scene.add(shape4);

            renderer.setSize(Native.window.Width, Native.window.Height);
	        renderer.setClearColor( 0x000000, 1 );
	        renderer.autoClear = false;

            Native.window.onframe +=
                e =>
                {
                    perspectiveCamera.updateProjectionMatrix();

                    render();
                };
            var isPushed = false;

            this.container.onkeydown +=
                     m =>
                     {
                         isPushed = true;

                         if (isPushed)
                         {
                             //var delta = clock.getDelta(); // seconds.
                             var moveDistance = 200 * 2; // 200 pixels per second
                             var rotateAngle = Math.PI / 2 * 2;   // pi/2 radians (90 degrees) per second

                             // local transformations

                             // move forwards/backwards/left/right
                             if (m.KeyCode == 87)
                                 MovingCube.translateZ(-moveDistance);
                             if (m.KeyCode == 83)
                                 MovingCube.translateZ(moveDistance);
                             if (m.KeyCode == 81)
                                 MovingCube.translateX(-moveDistance);
                             if (m.KeyCode == 69)
                                 MovingCube.translateX(moveDistance);

                             //// rotate left/right/up/down
                             //var rotation_matrix = new THREE.Matrix4().identity();
                             //if (m.KeyCode == 65)
                             //    MovingCube.rotateOnAxis(new THREE.Vector3(0, 1, 0), rotateAngle);
                             //if (m.KeyCode == 68)
                             //    MovingCube.rotateOnAxis(new THREE.Vector3(0, 1, 0), -rotateAngle);

                             //if (m.KeyCode == 90)
                             //{
                             //    MovingCube.position.set(0, 25.1, 0);
                             //    MovingCube.rotation.set(0, 0, 0);
                             //}
                         }
             };

            this.container.onkeyup += e =>
            {
                isPushed = false;
            };

            Native.window.onresize +=
                delegate
                {


                    //if (canvas.parentNode == Native.document.body)

                    // are we embedded?
                    if (page != null)
                        renderer.setSize();
                };

          
        }