// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151227/movingmusic

        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // THREE.WebGLRenderer 72dev
            // nuget!
            // vs nuget search is brken!
            Console.WriteLine(new { THREE.REVISION } + " back screen until everithing loads!");


            // http://smus.com/ultrasonic-networking/
            // http://chromium.googlecode.com/svn/trunk/samples/audio/shiny-drum-machine.html
            // http://chromium.googlecode.com/svn/trunk/samples/audio/box2d-js/box2d-audio.html
            // http://chromium.googlecode.com/svn/trunk/samples/audio/simple.html
            // http://chromium.googlecode.com/svn/trunk/samples/audio/oscillator-fm2.html
            // http://borismus.github.io/spectrogram/
            // http://borismus.github.io/moving-music/
            // http://smus.com/spatial-audio-web-vr/
            // can we have 360 audio yet?

            // THREE.WebGLObjects: Converting... THREE.PointCloud THREE.BufferGeometry


            new { }.With(
                async delegate
                {
                    //HTML.Audio.FromAssets.

                    // does jsc assetslibrary do jpeg? or do we need jpg/

                    Native.document.body.Clear();
                    Native.document.body.style.margin = "0px";
                    Native.document.body.style.overflow = IStyle.OverflowEnum.hidden;


                    //Error	1	Cannot convert anonymous method to type 'dynamic' because it is not a delegate type	Z:\jsc.svn\examples\javascript\audio\synergy\MovingMusicByBorismus\Application.cs	93	68	MovingMusicByBorismus



                    //                        Choreographer.prototype.getAudioFile = function(set, basename) {
                    //  var extension = Util.isMp3Supported() ? 'mp3' : 'ogg';
                    //  return 'snd/' + set + '/' + basename + '.' + extension;
                    //};



                    // Largely from http://learningthreejs.com/blog/2011/08/15/lets-do-a-sky/

#if VideoRenderer
                        window.VideoRenderer.prototype.addSkybox = IFunction.OfDelegate(
                           new Func<object>(
                               () =>
                               {
                                   // black screen??
                                   //var far = 0x9999;

                                   var far = 0x999;





                                   var sphere = new THREE.Mesh(
                                       new THREE.SphereGeometry(far, 20, 20),
                                       new THREE.MeshBasicMaterial(
                                           new
                                           {
                                               //20150608_165300.jpg
                                               //map = THREE.ImageUtils.loadTexture(new HTML.Images.FromAssets.stolanuten().src)
                                               map = THREE.ImageUtils.loadTexture(new HTML.Images.FromAssets._20150608_165300().src)
                                           }
                                       )
                                   );
                                   sphere.scale.x = -1;

                                   // wont help us.
                                   //sphere.material.opacity = 0.5;


                                   Console.WriteLine("addSkybox... " + new { window.video.scene });

                                   sphere.AttachTo((THREE.Scene)window.video.scene);

                                   return null;
                               }
                           )
                       );
#endif





#if Choreographer
                        //window.Choreographer.prototype.initVocal = (Action)
                        //window.Choreographer.prototype.initVocal = IFunction.OfDelegate(
                        //window.Choreographer.prototype.initVocal = IFunction.Of(
                        window.Choreographer.prototype.getAudioFile = IFunction.OfDelegate(
                            new Func<object, string, string>(
                                (object set, string basename) =>
                                {
                                    Console.WriteLine("window.Choreographer.prototype.getAudioFile " + new { basename });

                                    //var russian = new MovingTrack({
                                    //    src: this.getAudioFile(set, 'Russian'),
                                    //    color: 0x19414B,
                                    //  });

                                    // this.manager.addTrack(cats);


                                    //                                    view-source:54442 29925ms window.Choreographer.prototype.getAudioFile { basename = Cats }
                                    //2015-11-16 12:25:32.573 view-source:54442 29927ms window.Choreographer.prototype.getAudioFile { basename = Nimoy }
                                    //2015-11-16 12:25:32.575 view-source:54442 29929ms window.Choreographer.prototype.getAudioFile { basename = Roth }
                                    //2015-11-16 12:25:32.576 view-source:54442 29930ms window.Choreographer.prototype.getAudioFile { basename = Russian }

                                    //if (basename == "Cats") return new HTML.Audio.FromAssets.Sweet_Dreams_My_Love_by_Alexander_J_Turner { }.src;
                                    if (basename == "Cats") return null;

                                    //if (basename == "Cats") return new HTML.Audio.FromAssets.loop_GallinagoDelicata { }.src;
                                    //if (basename == "Nimoy") return new HTML.Audio.FromAssets.sand_run { }.src;
                                    ////if (basename == "Roth") return new HTML.Audio.FromAssets.snd_jeepengine_start { }.src;
                                    //if (basename == "Roth") return new HTML.Audio.FromAssets.heartbeat3 { }.src;

                                    if (basename == "Nimoy") return null;
                                    if (basename == "Roth") return null;

                                    // the green blob.
                                    //return new HTML.Audio.FromAssets.Russian { }.src;
                                    return new HTML.Audio.FromAssets.crickets { }.src;

                                    // yellow is str track man
                                }
                            )
                        );
#endif




                    // X:\opensource\github\moving-music\js\audio-renderer.js


                    // defined at 
                    #region X:\opensource\github\moving-music\js\main.js
                    //window.main();
                    //  start();

                    //  var set = Util.getParameterByName('set');
                    //var mode = Util.getParameterByName('mode');
                    // Create the world.
                    var choreographer = new Choreographer();
                    //manager = choreographer.manager;
                    ////choreographer.on('modechanged', onModeChanged);

                    //// Create a video renderer.
                    //video = new VideoRenderer({selector: 'body', overview: false});
                    //video.setManager(manager);
                    //video.addLight();
                    //video.addSkybox();

                    // Create the audio renderer.
                    var audio = new AudioRenderer();
                    //audio.setManager(manager);
                    //audio.on('ready', onAudioLoaded);



                    // After a little while, if we're not loaded yet, start updating progress.



                    #endregion






                }
            );
        }
        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150912/x360mountains




        //  ls sdcard/oculus/360photos/
        // "X:\vr\0000.png"
        // R:\util\android-sdk-windows\platform-tools\adb.exe push "X:\vr\0000.png" /sdcard/oculus/360photos/
        // 2649 KB/s (1085134 bytes in 0.400s)

        // "X:\vr\tr.png"
        // R:\util\android-sdk-windows\platform-tools\adb.exe push "X:\vr\tr.png" /sdcard/oculus/360photos/


        // R:\util\android-sdk-windows\platform-tools\adb.exe push  "X:\vr\code.png" /sdcard/oculus/360photos/
        // R:\util\android-sdk-windows\platform-tools\adb.exe push  "X:\vr\cone2.png" /sdcard/oculus/360photos/
        // "X:\vr\code.png"

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150906/roomscanningeffectbyrosme

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150816/iss

        // https://www.youtube.com/watch?v=UWiq-qgedws
        // https://www.youtube.com/watch?v=TwRSOEG-Gx4
        // http://youtu.be/Lo1IU8UAutE
        // 60hz 2160 4K!

        // The equirectangular projection was used in map creation since it was invented around 100 A.D. by Marinus of Tyre. 

        //        C:\Users\Arvo> "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hzsky.png" "/sdcard/oculus/360photos/"
        //1533 KB/s(3865902 bytes in 2.461s)

        //C:\Users\Arvo> "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape360globe1\0000.png" "/sdcard/oculus/360photos/tape360globe1.png"
        //1556 KB/s(2714294 bytes in 1.703s)

        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hz2048c3840x2160.png" "/sdcard/oculus/360photos/"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push   "X:\vr\tape360globe1\0000.png" "/sdcard/oculus/360photos/tape360globe2.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push   "X:\vr\tape360globe1\0000.png" "/sdcard/oculus/360photos/tape360globenight.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push   "R:\vr\tape360iss\0000.png" "/sdcard/oculus/360photos/tape360iss.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push   "R:\vr\tape360iss\0230.png" "/sdcard/oculus/360photos/tape360iss0230.png"

        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push    "X:\vr\sh1\0000.png" "/sdcard/oculus/360photos/sh1.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push     "R:\vr\tape360columns\0000.png" "/sdcard/oculus/360photos/tape360columns.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push    "X:\vr\edge.png" "/sdcard/oculus/360photos/tape360columns.png"
        // 4041 KB/s (3248448 bytes in 0.785s)

        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push      "X:\vr\terrain.png" "/sdcard/oculus/360photos/"

        // could we udp our 360 image from webgl to vr yet?

        // "R:\vr\tape360iss\0230.png"

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809/chrome360hz

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809

        // the eye nor the display will be able to do any stereo
        // until tech is near matrix capability. 2019?

        // cubemap can be used for all long range scenes
        // http://www.imdb.com/title/tt0112111/?ref_=nv_sr_1


        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150808/cubemapcamera
        // subst /D b:
        // subst b: s:\jsc.svn\examples\javascript\chrome\apps\WebGL\x360mountains\x360mountains\bin\Debug\staging\x360mountains.Application\web
        // subst a: z:\jsc.svn\examples\javascript\chrome\apps\WebGL\x360mountains\x360mountains\bin\Debug\staging\x360mountains.Application\web
        // Z:\jsc.svn\examples\javascript\chrome\apps\WebGL\x360mountains\x360mountains\bin\Debug\staging\x360mountains.Application\web
        // what if we want to do subst in another winstat or session?

        // ColladaLoader: Empty or non-existing file (assets/x360mountains/S6Edge.dae)

        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            //FormStyler.AtFormCreated =
            //s =>
            //{
            //    s.Context.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;

            //    //var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDrag().AttachTo(s.Context.GetHTMLTarget());
            //    var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDragWithShadow().AttachTo(s.Context.GetHTMLTarget());



            //    s.Context.GetHTMLTarget().style.backgroundColor = "#efefef";
            //    //s.Context.GetHTMLTarget().style.backgroundColor = "#A26D41";

            //};

#if AsWEBSERVER
            #region += Launched chrome.app.window
            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                // if we run as a server. we can open up on android.

                //chrome.Notification.DefaultTitle = "Nexus7";
                //chrome.Notification.DefaultIconUrl = new x128().src;
                ChromeTCPServer.TheServerWithStyledForm.Invoke(
                     AppSource.Text
                //, AtFormCreated: FormStyler.AtFormCreated

                //AtFormConstructor:
                //    f =>
                //    {
                //        //arg[0] is typeof System.Int32
                //        //script: error JSC1000: No implementation found for this native method, please implement [static System.Drawing.Color.FromArgb(System.Int32)]

                //        // X:\jsc.svn\examples\javascript\forms\Test\TestFromArgb\TestFromArgb\ApplicationControl.cs

                //        f.BackColor = System.Drawing.Color.FromArgb(0xA26D41);
                //    }
                );
                return;
            }
            #endregion
#else

            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    //chrome.runtime.UpdateAvailable += delegate
                    //{
                    //    new chrome.Notification(title: "UpdateAvailable");

                    //};

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "x360mountains");

                        // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: new
                               {
                                   alwaysOnTop = true,
                                   visibleOnAllWorkspaces = true
                               }
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion


#endif


            //var vs0 = new TraceConeWithCRTByKlk.Shaders.Program360FragmentShader();
            //var vs0 = new FaceEdgeVertexByPaniq.Shaders.Program360FragmentShader();
            var vs0 = new ChromeShaderToyMountainsByHoskins.Shaders.Program360FragmentShader();


            // onframe need syncs to enable GC!
            var vsync = default(TaskCompletionSource<object>);
            Func<bool> vsyncReady = delegate
            {

                if (vsync != null)
                    if (vsync.Task.IsCompleted)
                        return true;


                return false;
            };



            // crash
            //int cubefacesizeMAX = 2048 * 2; // 6 faces, ?

            // not responding...
            //int cubefacesizeMAX = 2048 * 2; // 6 faces, ?
            int cubefacesizeMAX = 2048; // 6 faces, ?
            //int cubefacesizeMAX = 1024; // 6 faces, ?
            int cubefacesize = cubefacesizeMAX; // 6 faces, ?
                                                //int cubefacesize = 1024; // 6 faces, ?
                                                // "X:\vr\tape1\0000x2048.png"
                                                // for 60hz render we may want to use float camera percision, not available for ui.
                                                //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x2048.png" "/sdcard/oculus/360photos/"
                                                //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x128.png" "/sdcard/oculus/360photos/"

            // force laptop into preview. when can we have a button for it?
            //if (Environment.ProcessorCount < 8)
            //    cubefacesize = 64; // 6 faces, ?

            // fast gif?
            //cubefacesize = 128; // 6 faces, ?
            //cubefacesize = 512; // 6 faces, ?
            //    [GroupMarkerNotSet(crbug.com / 242999)!:247F0809]
            //RENDER WARNING: texture bound to texture unit 0 is not renderable.It maybe non-power-of-2 and have incompatible texture filtering.

            // can we keep fast fps yet highp?

            // can we choose this on runtime? designtime wants fast fps, yet for end product we want highdef on our render farm?
            //const int cubefacesize = 128; // 6 faces, ?

            //var cubecameraoffsetx = 256;
            var cubecameraoffsetx = 400;


            //var uizoom = 0.1;
            //var uizoom = cubefacesize / 128f;
            var uizoom = 128f / cubefacesize;


            Native.css.style.backgroundColor = "blue";
            Native.css.style.overflow = IStyle.OverflowEnum.hidden;

            Native.body.Clear();
            (Native.body.style as dynamic).webkitUserSelect = "text";





            //return;

            // Earth params
            //var radius = 0.5;
            //var radius = 1024;
            //var radius = 2048;
            //var radius = 512;
            //var radius = 256;
            //var radius = 400;

            // can we have not fly beyond moon too much?
            //var radius = 500;
            var radius = 480;

            //var segments = 32;
            var segments = 128 * 2;
            //var rotation = 6;


            //const int size = 128;
            //const int size = 256; // 6 faces, 12KB
            //const int size = 512; // 6 faces, ?

            // WebGL: drawArrays: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled.

            //const int size = 720; // 6 faces, ?
            //const int size = 1024; // 6 faces, ?
            //const int cubefacesize = 1024; // 6 faces, ?

            // THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter is set to THREE.LinearFilter or THREE.NearestFilter. ( chrome-extension://aemlnmcokphbneegoefdckonejmknohh/assets/x360mountains/anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs.jpg )


            var far = 0xffffff;

            new IHTMLPre { new { Environment.ProcessorCount, cubefacesize } }.AttachToDocument();

            //new IHTMLPre { "can we stream it into VR, shadertoy, youtube 360, youtube stereo yet?" }.AttachToDocument();


            var sw = Stopwatch.StartNew();



            var oo = new List<THREE.Object3D>();

            var window = Native.window;


            // what about physics and that portal rendering?

            // if we are running as a chrome web server, we may also be opened as android ndk webview app
            //var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: window.aspect, near: 1, far: 2000);
            // once we update source
            // save the source
            // manually recompile 
            //cameraPX.position.z = 400;

            //// the camera should be close enough for the object to float off the FOV of PX
            //cameraPX.position.z = 200;

            // scene
            // can we make the 3D object orbit around us ?
            // and
            // stream it to vr?
            var scene = new THREE.Scene();



            // since our cube camera is somewhat a fixed thing
            // would it be easier to move mountains to come to us?
            // once we change code would chrome app be able to let VR know that a new view is available?
            var sceneg = new THREE.Group();
            sceneg.AttachTo(scene);


            // fly up?
            //sceneg.translateZ(-1024);
            // rotate the world, as the skybox then matches what we have on filesystem
            scene.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
            // yet for headtracking we shall rotate camera


            //sceneg.position.set(0, 0, -1024);
            //sceneg.position.set(0, -1024, 0);

            //scene.add(new THREE.AmbientLight(0x333333));
            //scene.add(new THREE.AmbientLight(0xffffff));
            //scene.add(new THREE.AmbientLight(0xaaaaaa));
            //scene.add(new THREE.AmbientLight(0xcccccc));
            //scene.add(new THREE.AmbientLight(0xeeeeee));
            scene.add(new THREE.AmbientLight(0xffffff));




            //var light = new THREE.DirectionalLight(0xffffff, 1);
            //// sun should be beyond moon
            ////light.position.set(-5 * virtualDistance, -3 * virtualDistance, -5 * virtualDistance);
            ////light.position.set(-15 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);

            //// where shall the light source be to see half planet?
            //light.position.set(-1 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);
            //scene.add(light);



            //var lightX = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightX" }.AttachToDocument();
            //var lightY = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightY" }.AttachToDocument();
            //var lightZ = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightZ" }.AttachToDocument();

            //new IHTMLHorizontalRule { }.AttachToDocument();

            // whats WebGLRenderTargetCube do?

            // WebGLRenderer preserveDrawingBuffer 



            var renderer0 = new THREE.WebGLRenderer(

                new
                {
                    //antialias = true,
                    //alpha = true,
                    preserveDrawingBuffer = true
                }
            );

            // https://github.com/mrdoob/three.js/issues/3836

            // the construct. white bg
            //renderer0.setClearColor(0xfffff, 1);
            renderer0.setClearColor(0x0, 1);

            //renderer.setSize(window.Width, window.Height);
            renderer0.setSize(cubefacesize, cubefacesize);

            //renderer0.domElement.AttachToDocument();
            //rendererPX.domElement.style.SetLocation(0, 0);
            //renderer0.domElement.style.SetLocation(4, 4);


            // top

            // http://stackoverflow.com/questions/27612524/can-multiple-webglrenderers-render-the-same-scene


            // need a place to show the cubemap face to GUI 
            // how does the stereo OTOY do it?
            // https://www.opengl.org/wiki/Sampler_(GLSL)

            // http://www.richardssoftware.net/Home/Post/25

            // [+X, –X, +Y, –Y, +Z, –Z] fa



            // move up
            //camera.position.set(-1200, 800, 1200);
            //var cameraoffset = new THREE.Vector3(0, 15, 0);

            // can we aniamte it?
            //var cameraoffset = new THREE.Vector3(0, 800, 1200);
            // can we have linear animation fromcenter of the map to the edge and back?
            // then do the flat earth sun orbit?
            var cameraoffset = new THREE.Vector3(
                // left?
                -512,
                // height?
                //0,
                //1600,
                //1024,

                // if the camera is in the center, would we need to move the scene?
                // we have to move the camera. as we move the scene the lights are messed up
                //2014,
                1024,

                //1200
                0
                // can we hover top of the map?
                );

            // original vieworigin
            //var cameraoffset = new THREE.Vector3(-1200, 800, 1200);













            var bottomRotate100 = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -314, max = 314, valueAsNumber = 0, title = "bottomRotate" }.AttachToDocument();


            var maxfps = 60;
            //var maxlengthseconds = 60;
            var maxlengthseconds = 120;

            var maxframes = maxlengthseconds * maxfps;

            // whatif we want more than 30sec video? 2min animation? more frames to render? 2gb disk?
            var frameIDslider = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0, max = maxframes, valueAsNumber = 137, title = "frameIDslider" }.AttachToDocument();
            frameIDslider.onchange += delegate { frameIDslider.title = "frameIDslider " + frameIDslider.valueAsNumber; };




            //var vs0 = new ChromeShaderToyRelentlessBySrtuss.Shaders.ProgramFragmentShader();
            //var vs0 = new TraceConeWithCRTByKlk.Shaders.ProgramFragmentShader();








            // left
            IHTMLCanvas shader0canvasPZ = null;

            // locCameraTargetOffset to look left?
            #region shader0canvasPZ
            new { }.With(
              async delegate
              {
                  //return;

                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new Xor3DAlienLandByXor.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();
                  // now we have an empty shell
                  // which tostrings to the glsl code for gpu
                  // and if we were to initialize 



                  // enable intellisense
                  //var vs0i = (RoomScanningEffectByRosme.Shaders.__ProgramFragmentUniforms)(object)vs0;


                  // script: error JSC1000: No implementation found for this native method, please implement [static ScriptCoreLib.GLSL.Shader.vec3(System.Single, System.Single, System.Single)]

                  //     b.__this._vs0i_5__2.uCameraTargetOffset = new ctor$aQ8ABjj5gzW_aEh4Cmq2oMg(1, 0, 0);

                  // 270ms ReferenceError: ctor$aQ8ABjj5gzW_aEh4Cmq2oMg is not defined

                  // wishful thinking eh
                  //vec3 uCameraTargetOffset = vec3(0.0f, 0.0f, -1.0f);
                  //vs0i.uCameraTargetOffset = new ScriptCoreLib.GLSL.vec3(1.0f, 0.0f, 0.0f);
                  // this would mean the program was selected and uniform was uploaded to gpu




                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader0canvasPZ = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  //c0.style.SetLocation(720, 8);
                  c0.style.SetLocation(800, 360);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);

                  var sw0 = Stopwatch.StartNew();

                  pass0.ProgramSelected += mProgram =>
                  {
                      // ldflda?
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1.0f, 0, 0.0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.0f, 0, 1.0f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.0f, 0, -1.0f);

                      // left?
                      //forward=normalize(float3(0.0 , 0.0 ,1.0));
                  };


                  Native.window.onframe += delegate
                  {
                      // let render man know..
                      if (vsyncReady())
                          return;

                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };



              }
          );
            #endregion





            // front
            IHTMLCanvas shader1canvasPX = null;

            #region shader1canvasPX
            new { }.With(
              async delegate
              {
                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();

                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader1canvasPX = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  c0.style.SetLocation(720, 8);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);


                  pass0.ProgramSelected += mProgram =>
                  {
                      // off by 45deg__

                      // ldflda?
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.0f, 0, -1.0f);

                      // fixup
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 1.0f, 0, -1.0f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 1.0f, 0, 0.0f);


                      // front
                      //forward=normalize(float3(1.0 , 0.0 ,0.0));
                  };

                  var sw0 = Stopwatch.StartNew();

                  Native.window.onframe += delegate
                  {
                      // let render man know..
                      // let render man know..
                      if (vsyncReady())
                          return;

                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };

              }
          );
            #endregion



            // back
            IHTMLCanvas shader1canvasNX = null;

            #region shader1canvasNX
            new { }.With(
              async delegate
              {
                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();

                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader1canvasNX = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  c0.style.SetLocation(720, 8);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);


                  pass0.ProgramSelected += mProgram =>
                  {
                      // ldflda?
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, 0, 1.0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1.0f, 0, 1.0f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1.0f, 0, 0.0f);

                      // back
                      //forward=normalize(float3(-1.0 , 0.0 ,0.0));

                  };

                  var sw0 = Stopwatch.StartNew();

                  Native.window.onframe += delegate
                  {
                      // let render man know..
                      // let render man know..
                      if (vsyncReady())
                          return;

                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };

              }
          );
            #endregion






            // right
            IHTMLCanvas shader2canvasNZ = null;

            // locCameraTargetOffset to look right?
            #region shader2canvasNZ
            new { }.With(
              async delegate
              {
                  //return;

                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new Xor3DAlienLandByXor.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();
                  // now we have an empty shell
                  // which tostrings to the glsl code for gpu
                  // and if we were to initialize 



                  // enable intellisense
                  //var vs0i = (RoomScanningEffectByRosme.Shaders.__ProgramFragmentUniforms)(object)vs0;


                  // script: error JSC1000: No implementation found for this native method, please implement [static ScriptCoreLib.GLSL.Shader.vec3(System.Single, System.Single, System.Single)]

                  //     b.__this._vs0i_5__2.uCameraTargetOffset = new ctor$aQ8ABjj5gzW_aEh4Cmq2oMg(1, 0, 0);

                  // 270ms ReferenceError: ctor$aQ8ABjj5gzW_aEh4Cmq2oMg is not defined

                  // wishful thinking eh
                  //vec3 uCameraTargetOffset = vec3(0.0f, 0.0f, -1.0f);
                  //vs0i.uCameraTargetOffset = new ScriptCoreLib.GLSL.vec3(1.0f, 0.0f, 0.0f);
                  // this would mean the program was selected and uniform was uploaded to gpu




                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader2canvasNZ = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  //c0.style.SetLocation(720, 8);
                  c0.style.SetLocation(800, 360);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);

                  var sw0 = Stopwatch.StartNew();

                  pass0.ProgramSelected += mProgram =>
                  {
                      // ldflda?
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 1.0f, 0, 1.0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.0f, 0, -1.0f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.0f, 0, 1.0f);

                      // right
                      //forward=normalize(float3(0.0 , 0.0 ,-1.0));

                  };

                  Native.window.onframe += delegate
                  {
                      // let render man know..
                      // let render man know..
                      if (vsyncReady())
                          return;
                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };

              }
          );
            #endregion












            // bottom
            IHTMLCanvas shader2canvasNY = null;

            // locCameraTargetOffset to look bottom?
            #region shader2canvasNY
            new { }.With(
              async delegate
              {
                  //return;

                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new Xor3DAlienLandByXor.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();
                  // now we have an empty shell
                  // which tostrings to the glsl code for gpu
                  // and if we were to initialize 



                  // enable intellisense
                  //var vs0i = (RoomScanningEffectByRosme.Shaders.__ProgramFragmentUniforms)(object)vs0;


                  // script: error JSC1000: No implementation found for this native method, please implement [static ScriptCoreLib.GLSL.Shader.vec3(System.Single, System.Single, System.Single)]

                  //     b.__this._vs0i_5__2.uCameraTargetOffset = new ctor$aQ8ABjj5gzW_aEh4Cmq2oMg(1, 0, 0);

                  // 270ms ReferenceError: ctor$aQ8ABjj5gzW_aEh4Cmq2oMg is not defined

                  // wishful thinking eh
                  //vec3 uCameraTargetOffset = vec3(0.0f, 0.0f, -1.0f);
                  //vs0i.uCameraTargetOffset = new ScriptCoreLib.GLSL.vec3(1.0f, 0.0f, 0.0f);
                  // this would mean the program was selected and uniform was uploaded to gpu




                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader2canvasNY = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  //c0.style.SetLocation(720, 8);
                  c0.style.SetLocation(800, 360);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);

                  var sw0 = Stopwatch.StartNew();

                  pass0.ProgramSelected += mProgram =>
                  {
                      // ldflda?

                      // 45deg off??


                      // front
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, 0, -1.0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, -.0001f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, .1f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, 0f);

                      // left
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1.0f, 0, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -.0001f, -1, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1f, -1, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, 0);

                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.01f, -1, 0.01f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.001f, -1, 0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, -0.0001f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, 1, -0.0001f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, .0001f, -1, 0);

                  };

                  Native.window.onframe += delegate
                  {
                      // let render man know..
                      // let render man know..
                      if (vsyncReady())
                          return;

                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };

              }
          );
            #endregion











            // top
            IHTMLCanvas shader2canvasPY = null;

            // locCameraTargetOffset to look right?
            #region shader2canvasPY
            new { }.With(
              async delegate
              {
                  //return;

                  Native.body.style.margin = "0px";
                  (Native.body.style as dynamic).webkitUserSelect = "auto";

                  // https://sites.google.com/a/jsc-solutions.net/work/x3
                  //var vs0 = new ChromeShaderToyColumns.Shaders.ProgramFragmentShader();
                  //var vs0 = new x2001SpaceStationByOtavio.Shaders.ProgramFragmentShader();
                  //var vs0 = new Xor3DAlienLandByXor.Shaders.ProgramFragmentShader();
                  //var vs0 = new RoomScanningEffectByRosme.Shaders.ProgramFragmentShader();
                  // now we have an empty shell
                  // which tostrings to the glsl code for gpu
                  // and if we were to initialize 



                  // enable intellisense
                  //var vs0i = (RoomScanningEffectByRosme.Shaders.__ProgramFragmentUniforms)(object)vs0;


                  // script: error JSC1000: No implementation found for this native method, please implement [static ScriptCoreLib.GLSL.Shader.vec3(System.Single, System.Single, System.Single)]

                  //     b.__this._vs0i_5__2.uCameraTargetOffset = new ctor$aQ8ABjj5gzW_aEh4Cmq2oMg(1, 0, 0);

                  // 270ms ReferenceError: ctor$aQ8ABjj5gzW_aEh4Cmq2oMg is not defined

                  // wishful thinking eh
                  //vec3 uCameraTargetOffset = vec3(0.0f, 0.0f, -1.0f);
                  //vs0i.uCameraTargetOffset = new ScriptCoreLib.GLSL.vec3(1.0f, 0.0f, 0.0f);
                  // this would mean the program was selected and uniform was uploaded to gpu




                  var gl0 = new WebGLRenderingContext(alpha: true);
                  shader2canvasPY = gl0.canvas;

                  var c0 = gl0.canvas.AttachToDocument();

                  //c0.style.SetSize(460, 237);
                  //c0.width = 460;
                  //c0.height = 237;

                  //c0.style.SetSize((int)uizoom * 3, (int)uizoom * 3);
                  c0.style.SetSize(128, 128);
                  c0.width = cubefacesize;
                  c0.height = cubefacesize;

                  //c0.style.SetLocation(720, 8);
                  c0.style.SetLocation(800, 360);

                  var mMouseOriX = 0;
                  var mMouseOriY = 0;
                  var mMousePosX = 0;
                  var mMousePosY = 0;


                  var pass0 = new ChromeShaderToyColumns.Library.ShaderToy.EffectPass(
                    null,
                    gl0,
                    precission: ChromeShaderToyColumns.Library.ShaderToy.DetermineShaderPrecission(gl0),
                    supportDerivatives: gl0.getExtension("OES_standard_derivatives") != null,
                    callback: null,
                    obj: null,
                    forceMuted: false,
                    forcePaused: false,
                    //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                    outputGainNode: null
                );
                  pass0.MakeHeader_Image();
                  pass0.NewShader_Image(vs0);

                  var sw0 = Stopwatch.StartNew();

                  pass0.ProgramSelected += mProgram =>
                  {
                      // ldflda?

                      // 45deg off??


                      // front
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, 0, -1.0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, -.0001f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, .1f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, 0f);

                      // left
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1.0f, 0, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -.0001f, -1, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, -1f, -1, 0);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, 0);

                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.01f, -1, 0.01f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0.001f, 1, 0f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, 1, -0.0001f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, -0.0001f);
                      var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, 0, -1, 0.0001f);
                      //var l3 = gl0.getUniformLocation(mProgram, "uCameraTargetOffset"); if (l3 != null) gl0.uniform3f(l3, .0001f, -1, 0);

                  };

                  Native.window.onframe += delegate
                  {
                      //d = a[0].CS___8__locals1.vsync != null;
                      //e = a[0].CS___8__locals1.vsync.kAcABp_b1ITCbIktNs3el5Q().dgQABqwxMjO1zVAJb5WXKA();


                      // let render man know..
                      // let render man know..
                      if (vsyncReady())
                          return;

                      // 1800 is 30sec is 30 000
                      // frameIDslider?

                      //var fps60 = frameIDslider * 1000 / 60.0f;
                      var fps60 = frameIDslider * (1 / 60.0f);

                      pass0.Paint_Image(
                        fps60,

                        mMouseOriX,
                        mMouseOriY,
                        mMousePosX,
                        mMousePosY
                    //,

                    // gl_FragCoord
                    // cannot be scaled, and can be referenced directly.
                    // need another way to scale
                    //zoom: 0.3f
                    );

                      // what does it do?
                      // need redux build..
                      gl0.flush();

                      //await u.animate.async.@checked;
                  };

              }
          );
            #endregion






            new IHTMLHorizontalRule { }.AttachToDocument();

            var camerax = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "camerax" }.AttachToDocument();
            // up. whats the most high a rocket can go 120km?
            new IHTMLHorizontalRule { }.AttachToDocument();


            // how high is the bunker?
            var cameray = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 2048 * 4, valueAsNumber = 0, title = "cameray" }.AttachToDocument();
            new IHTMLBreak { }.AttachToDocument();
            var camerayHigh = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = cameray.max, max = 1024 * 256, valueAsNumber = cameray.max, title = "cameray" }.AttachToDocument();
            new IHTMLHorizontalRule { }.AttachToDocument();
            var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();

            // for render server
            var fcamerax = 0.0;
            var fcameray = 0.0;
            var fcameraz = 0.0;

            //while (await camerax.async.onchange)

            //cameray.onchange += delegate
            //{
            //    if (cameray.valueAsNumber < cameray.max)
            //        camerayHigh.valueAsNumber = camerayHigh.min;
            //};

            camerayHigh.onmousedown += delegate
            {
                //if (camerayHigh.valueAsNumber > camerayHigh.min)
                cameray.valueAsNumber = cameray.max;
            };


            Action applycameraoffset = delegate
            {
                // make sure UI and gpu sync up

                var cy = cameray;

                if (cameray.valueAsNumber < cameray.max)
                    camerayHigh.valueAsNumber = camerayHigh.min;

                if (camerayHigh.valueAsNumber > camerayHigh.min)
                    cameray.valueAsNumber = cameray.max;

                if (cameray.valueAsNumber == cameray.max)
                    cy = camerayHigh;



                cameraoffset = new THREE.Vector3(
                  // left?
                  1.0 * (camerax + fcamerax),
                   // height?
                   //0,
                   //1600,
                   //1024,

                   // if the camera is in the center, would we need to move the scene?
                   // we have to move the camera. as we move the scene the lights are messed up
                   //2014,
                   1.0 * (cy + fcameray),

                 //1200
                 1.0 * (cameraz + fcameraz)
                   // can we hover top of the map?
                   );
            };


            #region y
            // need to rotate90?
            var cameraNY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNY.lookAt(new THREE.Vector3(0, -1, 0));
                cameraNY.position.add(cameraoffset);
            };

            //cameraNY.lookAt(new THREE.Vector3(0, 1, 0));
            var canvasNY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasNY.canvas.title = "NY";
            canvasNY.canvas.AttachToDocument();
            canvasNY.canvas.style.transformOrigin = "0 0";
            // roslyn!
            canvasNY.canvas.style.transform = $"scale({uizoom})";

            var cameraPY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPY.lookAt(new THREE.Vector3(0, 1, 0));
                cameraPY.position.add(cameraoffset);
            };
            //cameraPY.lookAt(new THREE.Vector3(0, -1, 0));
            var canvasPY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 0);
            canvasPY.canvas.title = "PY";
            canvasPY.canvas.AttachToDocument();
            canvasPY.canvas.style.transformOrigin = "0 0";
            canvasPY.canvas.style.transform = $"scale({uizoom})";
            #endregion

            // transpose xz?

            #region x
            var cameraNX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNX.lookAt(new THREE.Vector3(0, 0, 1));
                cameraNX.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(0, 0, -1));
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNX.lookAt(new THREE.Vector3(1, 0, 0));
            var canvasNX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 2, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNX.canvas.title = "NX";
            canvasNX.canvas.AttachToDocument();
            canvasNX.canvas.style.transformOrigin = "0 0";
            canvasNX.canvas.style.transform = $"scale({uizoom})";

            var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPX.lookAt(new THREE.Vector3(0, 0, -1));
                cameraPX.position.add(cameraoffset);
            };
            //cameraPX.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPX.lookAt(new THREE.Vector3(1, 0, 0));
            //cameraPX.lookAt(new THREE.Vector3(-1, 0, 0));
            var canvasPX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPX.canvas.title = "PX";
            canvasPX.canvas.AttachToDocument();
            canvasPX.canvas.style.transformOrigin = "0 0";
            canvasPX.canvas.style.transform = $"scale({uizoom})";
            #endregion



            #region z
            var cameraNZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, -1));
            applycameraoffset += delegate
            {
                cameraNZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNZ.lookAt(new THREE.Vector3(1, 0, 0));
                cameraNZ.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, 1));
            var canvasNZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 3, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNZ.canvas.title = "NZ";
            canvasNZ.canvas.AttachToDocument();
            canvasNZ.canvas.style.transformOrigin = "0 0";
            canvasNZ.canvas.style.transform = $"scale({uizoom})";

            var cameraPZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraPZ.lookAt(new THREE.Vector3(1, 0, 0));
            applycameraoffset += delegate
            {
                cameraPZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPZ.lookAt(new THREE.Vector3(-1, 0, 0));
                cameraPZ.position.add(cameraoffset);
            };
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, -1));
            var canvasPZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPZ.canvas.title = "PZ";
            canvasPZ.canvas.AttachToDocument();
            canvasPZ.canvas.style.transformOrigin = "0 0";
            canvasPZ.canvas.style.transform = $"scale({uizoom})";
            #endregion




            // c++ alias locals would be nice..
            var canvas0 = (IHTMLCanvas)renderer0.domElement;


            var old = new
            {



                CursorX = 0,
                CursorY = 0
            };


            var st = new Stopwatch();
            st.Start();

            //canvas0.css.active.style.cursor = IStyle.CursorEnum.move;




            // X:\jsc.svn\examples\javascript\Test\TestMouseMovement\TestMouseMovement\Application.cs


            // THREE.WebGLProgram: gl.getProgramInfoLog() C:\fakepath(78,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll
            // THREE.WebGLProgram: gl.getProgramInfoLog() (79,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll

            // http://www.roadtovr.com/youtube-confirms-stereo-3d-360-video-support-coming-soon/
            // https://www.youtube.com/watch?v=D-Wl9jAB45Q



            #region spherical
            var gl = new WebGLRenderingContext(alpha: true, preserveDrawingBuffer: true);
            var c = gl.canvas.AttachToDocument();

            //  3840x2160

            //c.style.SetSize(3840, 2160);

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150722/360-youtube


            c.width = 3840;
            c.height = 2160;


            //c.width = 3840 * 2;
            //c.height = 2160 * 2;


            //c.width = 3840;
            //c.height = 2160;
            // 1,777777777777778

            // https://www.youtube.com/watch?v=fTfJwzRsE-w
            //c.width = 7580;
            //c.height = 3840;
            //1,973958333333333

            //7580
            //    3840

            // wont work
            //c.width = 8192;
            //c.height = 4096;


            // this has the wrong aspect?
            //c.width = 6466;
            //c.height = 3232;

            new IHTMLPre { new { c.width, c.height } }.AttachToDocument();

            //6466x3232

            //var suizoom = 720f / c.height;
            //var suizoom = 360f / c.height;
            var suizoom = 480f / c.width;

            c.style.transformOrigin = "0 0";
            c.style.transform = $"scale({suizoom})";
            //c.style.backgroundColor = "yellow";
            c.style.position = IStyle.PositionEnum.absolute;

            c.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 3);

            var pass = new CubeToEquirectangular.Library.ShaderToy.EffectPass(
                       null,
                       gl,
                       precission: CubeToEquirectangular.Library.ShaderToy.DetermineShaderPrecission(gl),
                       supportDerivatives: gl.getExtension("OES_standard_derivatives") != null,
                       callback: null,
                       obj: null,
                       forceMuted: false,
                       forcePaused: false,
                       //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                       outputGainNode: null
                   );

            // how shall we upload our textures?
            // can we reference GLSL.samplerCube yet?
            //pass.mInputs[0] = new samplerCube { };
            pass.mInputs[0] = new CubeToEquirectangular.Library.ShaderToy.samplerCube { };

            pass.MakeHeader_Image();
            var vs = new Shaders.ProgramFragmentShader();
            pass.NewShader_Image(vs);

            #endregion




            //var frame0 = new HTML.Images.FromAssets.tiles_regrid().AttachToDocument();
            var frame0 = new HTML.Images.FromAssets.galaxy_starfield().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets.galaxy_starfield150FOV().AttachToDocument();
            //var xor = new HTML.Images.FromAssets.Orion360_test_image_8192x4096().AttachToDocument();
            //var xor = new HTML.Images.FromAssets._2_no_clouds_4k().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().AttachToDocument();


            // 270px
            //xor.style.height = "";
            frame0.style.height = "270px";
            frame0.style.width = "480px";
            frame0.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 + 16, 8 + (int)(uizoom * cubefacesize + 8) * 3);




            #region DirectoryEntry
            var dir = default(DirectoryEntry);
            int files2count = 0;

            new IHTMLButton { "openDirectory" }.AttachToDocument().onclick += async delegate
            {
                dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });

                var dir2r = dir.createReader();

                var files2 = await dir2r.readFileEntries();

                files2count = files2.Count();

                if (files2count > 0)
                {
                    new IHTMLPre { new { files2count } }.AttachToDocument();

                }
            };
            frame0.style.cursor = IStyle.CursorEnum.pointer;
            frame0.title = "save frame";


            frame0.onclick += delegate
            {
                // http://paulbourke.net/papers/vsmm2006/vsmm2006.pdf
                //            A method of creating synthetic stereoscopic panoramic images that can be implemented
                //in most rendering packages has been presented. If single panoramic pairs can be created
                //then stereoscopic panoramic movies are equally possible giving rise to the prospect of
                //movies where the viewer can interact with, at least with regard to what they choose to look
                //at.These images can be projected so as to engage the two features of the human visual
                //system that assist is giving us a sense of immersion, the feeling of “being there”. That is,
                //imagery that contains parallax information as captured from two horizontally separated eye
                //positions (stereopsis)and imagery that fills our peripheral vision.The details that define
                //how the two panoramic images should be created in rendering packages are provided, in
                //particular, how to precisely configure the virtual cameras and control the distance to zero
                //parallax.

                // grab a frame

                if (dir == null)
                {
                    // not exporting to file system?
                    var f0 = new IHTMLImage { src = gl.canvas.toDataURL() };

                    //var f0 = (IHTMLImage)gl.canvas;
                    //var f0 = (IHTMLImage)gl.canvas;
                    //var base64 = gl.canvas.toDataURL();


                    //frame0.src = base64;
                    frame0.src = f0.src;

                    // 7MB!

                    return;
                }

                //                // ---------------------------
                //IrfanView
                //---------------------------
                //Warning !
                //The file: "X:\vr\tape1\0001.jpg" is a PNG file with incorrect extension !
                //Rename ?
                //---------------------------
                //Yes   No   
                //---------------------------

                // haha this will render the thumbnail.
                //dir.WriteAllBytes("0000.png", frame0);

                //dir.WriteAllBytes("0000.png", gl.canvas);

                var glsw = Stopwatch.StartNew();
                dir.WriteAllBytes("0000.png", gl);

                new IHTMLPre { new { glsw.ElapsedMilliseconds } }.AttachToDocument();

                // {{ ElapsedMilliseconds = 1548 }}

                // 3.7MB
                // 3840x2160

            };

            #endregion


            #region render 60hz 30sec
            new IHTMLButton {
                $"render {maxfps}hz {maxlengthseconds}sec"
            }.AttachToDocument().onclick += async e =>
            {
                e.Element.disabled = true;


                var total = Stopwatch.StartNew();
                var status = "rendering... " + new { dir };

                new IHTMLPre { () => status }.AttachToDocument();

                if (dir == null)
                {
                    //dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
                }

                total.Restart();



                vsync = new TaskCompletionSource<object>();
                await vsync.Task;

                status = "rendering... vsync";

                //var frameid = 0;


                //frameIDslider.valueAsNumber = -1;
                frameIDslider.valueAsNumber = files2count - 1;

                goto beforeframe;


                // parallax offset?

                await_nextframe:


                var filename = frameIDslider.valueAsNumber.ToString().PadLeft(5, '0') + ".png";
                status = "rendering... " + new { filename };


                vsync = new TaskCompletionSource<object>();
                await vsync.Task;

                // frame0 has been rendered

                var swcapture = Stopwatch.StartNew();
                status = "WriteAllBytes... " + new { filename };
                //await Native.window.async.onframe;

                // https://code.google.com/p/chromium/issues/detail?id=404301
                if (dir != null)
                    await dir.WriteAllBytes(filename, gl);
                //await dir.WriteAllBytes(filename, gl.canvas);

                status = "WriteAllBytes... done " + new { fcamerax, filename, swcapture.ElapsedMilliseconds };
                status = "rdy " + new { filename, fcamerax };
                //await Native.window.async.onframe;





                // design mode v render mode
                if (cubefacesize < cubefacesizeMAX)
                    frameIDslider.valueAsNumber += 15;
                else
                    frameIDslider.valueAsNumber++;




                beforeframe:

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);
                // fcamerax = radius * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);

                // some shaders need to know where the camera is looking from. can we tell them?

                //fcamerax = 2.2 * Math.Sin(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));
                //fcameraz = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));


                //// up
                //fcameray = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // cameraz.valueAsNumber = (int)(cameraz.max * Math.Sin(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f)));


                // up
                //fcameray = 128 * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                //fcamerax += (1.0 / 60.0);

                //fcamerax += (1.0 / 60.0) * 120;



                // 60hz 30sec
                if (frameIDslider.valueAsNumber < maxframes)
                {
                    // Blob GC? either this helms or the that we made a Blob static. 
                    //await Task.Delay(11);
                    await Task.Delay(33);
                    // gc at 260 happened twice?
                    goto await_nextframe;
                }

                total.Stop();
                status = "all done " + new { frameid = frameIDslider.valueAsNumber, total.ElapsedMilliseconds };
                vsync = default(TaskCompletionSource<object>);
                // http://stackoverflow.com/questions/22899333/delete-javascript-blobs

                e.Element.disabled = false;
            };
            #endregion


            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLColladaExperiment\WebGLColladaExperiment\WebGLColladaExperiment.csproj"






            // asus will hang
            // https://3dwarehouse.sketchup.com/model.html?id=fb7a0448d940e575edc01389f336fb0a
            // can we get one frame into vr?

            // cube: mesh to cast shadows



            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000 })

            //    );
            //    floor2.position.set(0, 0, -cubefacesize / 2);
            //    floor2.AttachTo(scene);
            //}
            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0x0000ff, color = 0x0000ff })

            //    );
            //    floor2.position.set(-cubefacesize / 2, 0, 0);
            //    floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);

            //    floor2.AttachTo(scene);
            //}

            // front?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader1canvasPX) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0x00ff00,
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, 0, -cubefacesize  * 0.55);
                floor2.position.set(-cubefacesize * 0.5, 0, 0);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                floor2.AttachTo(scene);
            }



            // left?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader0canvasPZ) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0xff0000,

                            // can we color mark it?
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, -cubefacesize * 0.5, 0);
                floor2.position.set(0, 0, cubefacesize * 0.5);
                //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI);

                floor2.AttachTo(scene);
            }





            // right?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader2canvasNZ) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0x00ff00,

                            // can we color mark it?
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, -cubefacesize * 0.5, 0);
                floor2.position.set(0, 0, -cubefacesize * 0.5);
                //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
                //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI);

                floor2.AttachTo(scene);
            }


            // back?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader1canvasNX) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0x00ff00,
                            //color = 0x00ff00
                        })

                );
                floor2.position.set(cubefacesize * 0.5, 0, 0);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);


                floor2.AttachTo(scene);
            }









            // bottom?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader2canvasNY) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0x00ff00,

                            // can we color mark it?
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, -cubefacesize * 0.5, 0);
                //floor2.position.set(cubefacesize * 0.5, 0, 0);
                //floor2.position.set(-cubefacesize * 0.5, 0, 0);
                floor2.position.set(0, -cubefacesize * 0.5, 0);


                //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI / 2);

                applycameraoffset += delegate
                {
                    floor2.rotation.set(0, 0, 0);

                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), -Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                    floor2.rotateOnAxis(new THREE.Vector3(1, 0, 0), -Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI );
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI);
                    floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), -Math.PI + bottomRotate100 * 0.01f);

                };

                floor2.AttachTo(scene);
            }



            // top?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                var tex0 = new THREE.Texture(shader2canvasPY) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {

                            map = tex0,


                            //ambient = 0x00ff00,

                            // can we color mark it?
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, -cubefacesize * 0.5, 0);
                //floor2.position.set(cubefacesize * 0.5, 0, 0);
                //floor2.position.set(-cubefacesize * 0.5, 0, 0);
                floor2.position.set(0, cubefacesize * 0.5, 0);


                //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI / 2);

                applycameraoffset += delegate
                {
                    floor2.rotation.set(0, 0, 0);

                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), -Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                    floor2.rotateOnAxis(new THREE.Vector3(1, 0, 0), Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI / 2);
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI );
                    //floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), Math.PI);
                    floor2.rotateOnAxis(new THREE.Vector3(0, 0, 1), bottomRotate100 * 0.01f);

                };

                floor2.AttachTo(scene);
            }




            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeEarth\ChromeEarth\Application.cs
            // X:\jsc.svn\examples\javascript\canvas\ConvertBlackToAlpha\ConvertBlackToAlpha\Application.cs
            // hidden for alpha AppWindows
            //#if FBACKGROUND

            #region galaxy_starfield
            new THREE.Texture().With(
                async s =>
                {
                    var i = new HTML.Images.FromAssets.galaxy_starfield();
                    //var i = new HTML.Images.FromAssets.galaxy_starfield150FOV();

                    var bytes = await i.async.bytes;

                    //for (int ii = 0; ii < bytes.Length; ii += 4)
                    //{

                    //    bytes[ii + 3] = (byte)(bytes[ii + 0]);

                    //    bytes[ii + 0] = 0xff;
                    //    bytes[ii + 1] = 0xff;
                    //    bytes[ii + 2] = 0xff;
                    //}

                    var cc = new CanvasRenderingContext2D(i.width, i.height);

                    cc.bytes = bytes;

                    s.image = cc;
                    s.needsUpdate = true;

                    var stars_material = new THREE.MeshBasicMaterial(
                            new
                            {
                                //map = THREE.ImageUtils.loadTexture(new galaxy_starfield().src),
                                map = s,
                                side = THREE.BackSide,
                                transparent = true
                            });


                    var stars = new THREE.Mesh(
                            new THREE.SphereGeometry(far * 0.9, 64, 64),
                           stars_material
                        );

                    // http://stackoverflow.com/questions/8502150/three-js-how-can-i-dynamically-change-objects-opacity
                    //(stars_material as dynamic).opacity = 0.5;


                    scene.add(stars);
                }
           );
            #endregion




            new { }.With(
                   delegate
                   {



                       //dae.position.y = -80;

                       //dae.AttachTo(sceneg);
                       //scene.add(dae);
                       //oo.Add(dae);


                       //var rdysw = Stopwatch.StartNew();

                       //Console.WriteLine()

                       // view-source:http://threejs.org/examples/webgl_multiple_canvases_circle.html
                       // https://threejsdoc.appspot.com/doc/three.js/src.source/extras/cameras/CubeCamera.js.html
                       Native.window.onframe +=
                           e =>
                           {



                               // let render man know..
                               // let render man know..
                               if (vsyncReady())
                                   return;


                               //if (pause) return;
                               //if (pause.@checked)
                               //    return;


                               // can we float out of frame?
                               // haha. a bit too flickery.
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.01) * 50.0;
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.001) * 190.0;
                               //globesphere.position.y = Math.Sin(fcamerax * 0.001) * 90.0;
                               //clouds.position.y = Math.Cos(fcamerax * 0.001) * 90.0;

                               //sphere.rotation.y += speed;
                               //clouds.rotation.y += speed;

                               // manual rebuild?
                               // red compiler notifies laptop chrome of pending update
                               // app reloads

                               applycameraoffset();
                               renderer0.clear();
                               //rendererPY.clear();

                               //cameraPX.aspect = canvasPX.aspect;
                               //cameraPX.updateProjectionMatrix();

                               // um what does this do?
                               //cameraPX.position.z += (z - cameraPX.position.z) * e.delay.ElapsedMilliseconds / 200.0;
                               // mousewheel allos the camera to move closer
                               // once we see the frame in vr, can we udp sync vr tracking back to laptop?


                               //this.targetPX.x += 1;
                               //this.targetNX.x -= 1;

                               //this.targetPY.y += 1;
                               //this.targetNY.y -= 1;

                               //this.targetPZ.z += 1;
                               //this.targetNZ.z -= 1;

                               // how does the 360 or shadertoy want our cubemaps?


                               // and then rotate right?

                               // how can we render cubemap?



                               #region x
                               // upside down?
                               // are we ready?
                               renderer0.render(scene, cameraPX);
                               canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                               renderer0.render(scene, cameraNX);
                               canvasNX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               #endregion

                               #region z
                               renderer0.render(scene, cameraPZ);
                               canvasPZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                               renderer0.render(scene, cameraNZ);
                               canvasNZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               #endregion



                               #region y
                               renderer0.render(scene, cameraPY);

                               //canvasPY.save();
                               //canvasPY.translate(0, size);
                               //canvasPY.rotate((float)(-Math.PI / 2));
                               canvasPY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               //canvasPY.restore();


                               renderer0.render(scene, cameraNY);
                               //canvasNY.save();
                               //canvasNY.translate(size, 0);
                               //canvasNY.rotate((float)(Math.PI / 2));
                               canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               //canvasNY.restore();
                               // ?
                               #endregion


                               //renderer0.render(scene, cameraPX);


                               //rendererPY.render(scene, cameraPY);

                               // at this point we should be able to render the sphere texture

                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;


                               //var cube0 = new IHTMLImage[] {
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),

                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),


                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
                               //};

                               new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                               }.WithEachIndex(
                                   (img, index) =>
                                   {
                                       gl.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);

                                       //gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
                                       gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);

                                       // http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true

                                       // https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);

                                       gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);

                                   }
                                );

                               // could do dynamic resolution- fog of war or fog of FOV. where up to 150deg field of vision is encouragedm, not 360
                               pass.Paint_Image(
                                     0,

                                     0,
                                     0,
                                     0,
                                     0
                                //,

                                // gl_FragCoord
                                // cannot be scaled, and can be referenced directly.
                                // need another way to scale
                                //zoom: 0.3f
                                );

                               //paintsw.Stop();


                               // what does it do?
                               gl.flush();

                               // let render man know..
                               if (vsync != null)
                                   if (!vsync.Task.IsCompleted)
                                       vsync.SetResult(null);
                           };


                   }
               );





            Console.WriteLine("do you see it?");
        }
Esempio n. 3
0
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // http://stackoverflow.com/questions/29048161/how-to-export-a-three-js-scene-into-a-360-texture-for-photosphere

            Native.body.style.background = "black";

            Native.body.style.margin   = "0px";
            Native.body.style.overflow = IStyle.OverflowEnum.hidden;
            Native.body.Clear();



            // https://github.com/turban/photosphere/blob/gh-pages/stolanuten.html

            var scene = new THREE.Scene();


            var renderer = new THREE.WebGLRenderer();

            renderer.setSize(Native.window.Width, Native.window.Height);
            // the thing you attach to dom
            renderer.domElement.AttachToDocument();


            // Z:\jsc.svn\examples\javascript\audio\synergy\MovingMusicByBorismus\Application.cs

            var sphere = new THREE.Mesh(
                new THREE.SphereGeometry(100, 20, 20),
                new THREE.MeshBasicMaterial(
                    new
            {
                //20150608_165300.jpg
                //map = THREE.ImageUtils.loadTexture(new HTML.Images.FromAssets.stolanuten().src)
                map = THREE.ImageUtils.loadTexture(new HTML.Images.FromAssets._20150608_165300().src)
            }
                    )
                );

            sphere.scale.x = -1;
            sphere.AttachTo(scene);

            var camera = new THREE.PerspectiveCamera(75, Native.window.aspect, 1, 1000);

            camera.position.x = 0.1;

            var controls = new THREE.OrbitControls(camera, renderer.domElement);



            Native.window.onframe +=
                delegate
            {
                controls.update();
                camera.position = controls.center.clone();

                renderer.render(scene, camera);
            };



            Native.window.onresize +=
                delegate
            {
                camera.aspect = Native.window.aspect;
                camera.updateProjectionMatrix();

                renderer.setSize(Native.window.Width, Native.window.Height);
            };

            // http://www.visualstudio.com/en-us/news/vs2015-vs
        }
        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809/chrome360hz

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809/chromeequirectangularcameraexperiment

        // "x:\util\android-sdk-windows\platform-tools\adb.exe" install -r "r:\jsc.svn\examples\javascript\WebGL\WebGLVRHZTeaser\WebGLVRHZTeaser\bin\Debug\staging\WebGLVRHZTeaser.ApplicationWebService\staging.apk\staging\apk\bin\WebGLVRHZTeaser.Activities-release.apk"

        //I/Web Console(25108): 0ms NewInstanceConstructor restore fields.. at http://10.144.157.179:23222/view-source:50800
        //I/Web Console(25108): THREE.WebGLRenderer at http://10.144.157.179:23222/view-source:90370
        //E/Web Console(25108): Error creating WebGL context. at http://10.144.157.179:23222/view-source:90581
        //E/Web Console(25108): Uncaught TypeError: Cannot read property 'getShaderPrecisionFormat' of null at http://10.144.157.179:23222/view-source:90585

        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
		{

#if false
			#region += Launched chrome.app.window
			// X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
			dynamic self = Native.self;
			dynamic self_chrome = self.chrome;
			object self_chrome_socket = self_chrome.socket;

			if (self_chrome_socket != null)
			{

				chrome.Notification.DefaultTitle = "HZ";
				chrome.Notification.DefaultIconUrl = new HTML.Images.FromAssets.Preview().src;

				ChromeTCPServer.TheServerWithAppWindow.Invoke(AppSource.Text);

				return;
			}
			#endregion
#endif

			// https://code.google.com/p/chromium/issues/detail?id=483890

			Native.document.body.style.margin = "0px";
			Native.document.body.style.overflow = IStyle.OverflowEnum.hidden;
			Native.body.style.backgroundColor = "black";
			Native.document.body.Clear();

			// what dto do if webgl not supported?


			double SCREEN_WIDTH = Native.window.Width;
			double SCREEN_HEIGHT = Native.window.Height;

			#region scene
			var scene = new THREE.Scene();
			var clock = new THREE.Clock();

			var sceneRenderTarget = new THREE.Scene();
			var cameraOrtho = new THREE.OrthographicCamera(
				(int)SCREEN_WIDTH / -2,
				(int)SCREEN_WIDTH / 2,
				(int)SCREEN_HEIGHT / 2,
				(int)SCREEN_HEIGHT / -2,
				-100000,
				100000
			);

			cameraOrtho.position.z = 100;
			sceneRenderTarget.add(cameraOrtho);



			var camera = new THREE.PerspectiveCamera(

				//40,
				20,
				//10,

				Native.window.aspect, 2,

				// how far out do we want to zoom?
				200000
				//9000
				);
			camera.position.set(-1200, 800, 1200);
			var target = new THREE.Vector3(0, 0, 0);

			scene.add(camera);
			//scene.add(new THREE.AmbientLight(0x212121));

			//var spotLight = new THREE.SpotLight(0xffffff, 1.15);
			//spotLight.position.set(500, 2000, 0);
			//spotLight.castShadow = true;
			//scene.add(spotLight);

			//var pointLight = new THREE.PointLight(0xff4400, 1.5);
			//pointLight.position.set(0, 0, 0);
			//scene.add(pointLight);


			//scene.add(new THREE.AmbientLight(0xaaaaaa));
			scene.add(new THREE.AmbientLight(0x101030));
			#endregion


			#region light
			//var light = new THREE.DirectionalLight(0xffffff, 1.0);
			var light = new THREE.DirectionalLight(0xffffff, 2.5);
			//var light = new THREE.DirectionalLight(0xffffff, 2.5);
			//var light = new THREE.DirectionalLight(0xffffff, 1.5);
			//var lightOffset = new THREE.Vector3(0, 1000, 2500.0);
			var lightOffset = new THREE.Vector3(
				2000,
				700,

				// lower makes longer shadows 
				700.0
				);
			light.position.copy(lightOffset);
			light.castShadow = true;

			var xlight = light as dynamic;
			xlight.shadowMapWidth = 4096;
			xlight.shadowMapHeight = 2048;

			xlight.shadowDarkness = 0.1;
			//xlight.shadowDarkness = 0.5;

			xlight.shadowCameraNear = 10;
			xlight.shadowCameraFar = 10000;
			xlight.shadowBias = 0.00001;
			xlight.shadowCameraRight = 4000;
			xlight.shadowCameraLeft = -4000;
			xlight.shadowCameraTop = 4000;
			xlight.shadowCameraBottom = -4000;

			xlight.shadowCameraVisible = true;

			scene.add(light);
			#endregion



			var renderer = new THREE.WebGLRenderer(
				new
				{

					// http://stackoverflow.com/questions/20495302/transparent-background-with-three-js
					alpha = true,
					preserveDrawingBuffer = true,
					antialias = true
				}

				);
			renderer.setSize(1920, 1080);
			//renderer.setSize(2560, 1440);
			renderer.domElement.AttachToDocument();
			renderer.shadowMapEnabled = true;
			renderer.shadowMapType = THREE.PCFSoftShadowMap;



			var renderTarget = new THREE.WebGLRenderTarget(
				   Native.window.Width, Native.window.Height,
				   new
				   {
					   minFilter = THREE.LinearFilter,
					   magFilter = THREE.LinearFilter,
					   format = THREE.RGBAFormat,
					   stencilBufer = false
				   }
			   );

			//var composer = new THREE.EffectComposer(renderer, renderTarget);
			//var renderModel = new THREE.RenderPass(scene, camera);
			//composer.addPass(renderModel);

			//#region vblur
			//var hblur = new THREE.ShaderPass(THREE.HorizontalTiltShiftShader);
			//var vblur = new THREE.ShaderPass(THREE.VerticalTiltShiftShader);

			////var bluriness = 6.0;
			//var bluriness = 4.0;

			//// Show Details	Severity	Code	Description	Project	File	Line
			////Error CS0656  Missing compiler required member 'Microsoft.CSharp.RuntimeBinder.CSharpArgumentInfo.Create' WebGLTiltShift Application.cs  183

			//(hblur.uniforms as dynamic).h.value = bluriness / Native.window.Width;
			//(vblur.uniforms as dynamic).v.value = bluriness / Native.window.Height;

			//(hblur.uniforms as dynamic).r.value = 0.5;
			//(vblur.uniforms as dynamic).r.value = 0.5;
			////vblur.renderToScreen = true;

			//composer.addPass(hblur);
			//composer.addPass(vblur);
			//#endregion

			// Uncaught TypeError: renderer.setSize is not a function
			// Uncaught TypeError: renderer.getClearColor is not a function

			var effect = new THREE.OculusRiftEffect(
				renderer,

				// how to get the vblur into oculus effect?

				//renderModel,
				//composer,
				//renderTarget,
				new
				{
					worldScale = 100,

					//HMD
				}
				);

			effect.setSize(1920, 1080);
            //effect.setSize(2560, 1440);

            #region WebGLRah66Comanche
            // why isnt it being found?
            // "Z:\jsc.svn\examples\javascript\WebGL\collada\WebGLRah66Comanche\WebGLRah66Comanche\WebGLRah66Comanche.csproj"
            new global::WebGLRah66Comanche.Comanche(
			).Source.Task.ContinueWithResult(
				dae =>
				{

					//dae.position.y = -40;
					//dae.position.z = 280;
					scene.add(dae);
					//oo.Add(dae);

					// wont do it
					//dae.castShadow = true;

					dae.children[0].children[0].children.WithEach(x => x.castShadow = true);


					// the rotors?
					dae.children[0].children[0].children.Last().children.WithEach(x => x.castShadow = true);


					dae.scale.set(0.5, 0.5, 0.5);
					dae.position.x = -900;
					dae.position.z = +900;

					// raise it up
					dae.position.y = 400;

					//var sw = Stopwatch.StartNew();

					//Native.window.onframe += delegate
					//{
					//    //dae.children[0].children[0].children.Last().al
					//    //dae.children[0].children[0].children.Last().rotation.z = sw.ElapsedMilliseconds * 0.01;
					//    //dae.children[0].children[0].children.Last().rotation.x = sw.ElapsedMilliseconds * 0.01;
					//    dae.children[0].children[0].children.Last().rotation.y = sw.ElapsedMilliseconds * 0.01;
					//};
				}
			);
			#endregion



			#region tree
			// X:\jsc.svn\examples\javascript\WebGL\WebGLGodRay\WebGLGodRay\Application.cs

			var materialScene = new THREE.MeshBasicMaterial(new { color = 0x000000, shading = THREE.FlatShading });
			var tloader = new THREE.JSONLoader();

			// http://stackoverflow.com/questions/16539736/do-not-use-system-runtime-compilerservices-dynamicattribute-use-the-dynamic
			// https://msdn.microsoft.com/en-us/library/system.runtime.compilerservices.dynamicattribute%28v=vs.110%29.aspx
			//System.Runtime.CompilerServices.DynamicAttribute

			tloader.load(

				new WebGLGodRay.Models.tree().Content.src,

				new Action<THREE.Geometry>(
				xgeometry =>
				{

					var treeMesh = new THREE.Mesh(xgeometry, materialScene);
					treeMesh.position.set(0, -150, -150);
					treeMesh.position.x = -900;
					treeMesh.position.z = -900;

					treeMesh.position.y = 25;

					var tsc = 400;
					treeMesh.scale.set(tsc, tsc, tsc);

					treeMesh.matrixAutoUpdate = false;
					treeMesh.updateMatrix();


					treeMesh.AttachTo(scene);

				}
				)
				);
			#endregion

			#region create field

			// THREE.PlaneGeometry: Consider using THREE.PlaneBufferGeometry for lower memory footprint.

			// could we get some film grain?
			var planeGeometry = new THREE.CubeGeometry(512, 512, 1);
			var plane = new THREE.Mesh(planeGeometry,
					new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })

				);
			//plane.castShadow = false;
			plane.receiveShadow = true;


			{

				var parent = new THREE.Object3D();
				parent.add(plane);
				parent.rotation.x = -Math.PI / 2;
				parent.scale.set(10, 10, 10);

				scene.add(parent);
			}

			var random = new Random();
			var meshArray = new List<THREE.Mesh>();
			var geometry = new THREE.CubeGeometry(1, 1, 1);
			var sw = Stopwatch.StartNew();

			for (var i = 3; i < 9; i++)
			{

				//THREE.MeshPhongMaterial
				var ii = new THREE.Mesh(geometry,


					new THREE.MeshPhongMaterial(new { ambient = 0x000000, color = 0xA06040, specular = 0xA26D41, shininess = 1 })

					//new THREE.MeshLambertMaterial(
					//new
					//{
					//    color = (Convert.ToInt32(0xffffff * random.NextDouble())),
					//    specular = 0xffaaaa,
					//    ambient= 0x050505, 
					//})

					);
				ii.position.x = i % 7 * 200 - 2.5f;

				// raise it up
				ii.position.y = .5f * 100;
				ii.position.z = -1 * i * 100;
				ii.castShadow = true;
				ii.receiveShadow = true;
				//ii.scale.set(100, 100, 100 * i);
				ii.scale.set(100, 100 * i, 100);


				meshArray.Add(ii);

				scene.add(ii);

				if (i % 2 == 0)
				{
#if FWebGLHZBlendCharacter
					#region SpeedBlendCharacter
					var _i = i;
					{ WebGLHZBlendCharacter.HTML.Pages.TexturesImages ref0; }

					var blendMesh = new THREE.SpeedBlendCharacter();
					blendMesh.load(
						new WebGLHZBlendCharacter.Models.marine_anims().Content.src,
						new Action(
							delegate
							{
								// buildScene
								//blendMesh.rotation.y = Math.PI * -135 / 180;
								blendMesh.castShadow = true;
								// we cannot scale down we want our shadows
								//blendMesh.scale.set(0.1, 0.1, 0.1);

								blendMesh.position.x = (_i + 2) % 7 * 200 - 2.5f;

								// raise it up
								//blendMesh.position.y = .5f * 100;
								blendMesh.position.z = -1 * _i * 100;


								var xtrue = true;
								// run
								blendMesh.setSpeed(1.0);

								// will in turn call THREE.AnimationHandler.play( this );
								//blendMesh.run.play();
								// this wont help. bokah does not see the animation it seems.
								//blendMesh.run.update(1);

								blendMesh.showSkeleton(!xtrue);

								scene.add(blendMesh);


								Native.window.onframe +=
								 delegate
								 {

									 blendMesh.rotation.y = Math.PI * 0.0002 * sw.ElapsedMilliseconds;



									 ii.rotation.y = Math.PI * 0.0002 * sw.ElapsedMilliseconds;

								 };

							}
						)
					);
					#endregion
#endif
				}

			}
			#endregion


			#region HZCannon
			new HeatZeekerRTSOrto.HZCannon().Source.Task.ContinueWithResult(
				async cube =>
				{
					// https://github.com/mrdoob/three.js/issues/1285
					//cube.children.WithEach(c => c.castShadow = true);

					//cube.traverse(
					//    new Action<THREE.Object3D>(
					//        child =>
					//        {
					//            // does it work? do we need it?
					//            //if (child is THREE.Mesh)

					//            child.castShadow = true;
					//            //child.receiveShadow = true;

					//        }
					//    )
					//);

					// um can edit and continue insert code going back in time?
					cube.scale.x = 10.0;
					cube.scale.y = 10.0;
					cube.scale.z = 10.0;



					//cube.castShadow = true;
					//dae.receiveShadow = true;

					//cube.position.x = -100;

					////cube.position.y = (cube.scale.y * 50) / 2;
					//cube.position.z = Math.Floor((random() * 1000 - 500) / 50) * 50 + 25;



					// if i want to rotate, how do I do it?
					//cube.rotation.z = random() + Math.PI;
					//cube.rotation.x = random() + Math.PI;
					var sw2 = Stopwatch.StartNew();



					scene.add(cube);
					//interactiveObjects.Add(cube);

					// offset is wrong
					//while (true)
					//{
					//    await Native.window.async.onframe;

					//    cube.rotation.y = Math.PI * 0.0002 * sw2.ElapsedMilliseconds;

					//}
				}
			);
			#endregion


			#region HZCannon
			new HeatZeekerRTSOrto.HZCannon().Source.Task.ContinueWithResult(
				async cube =>
				{
					// https://github.com/mrdoob/three.js/issues/1285
					//cube.children.WithEach(c => c.castShadow = true);

					//cube.traverse(
					//    new Action<THREE.Object3D>(
					//        child =>
					//        {
					//            // does it work? do we need it?
					//            //if (child is THREE.Mesh)

					//            child.castShadow = true;
					//            //child.receiveShadow = true;

					//        }
					//    )
					//);

					// um can edit and continue insert code going back in time?
					cube.scale.x = 10.0;
					cube.scale.y = 10.0;
					cube.scale.z = 10.0;



					//cube.castShadow = true;
					//dae.receiveShadow = true;


					// jsc shat about out of band code patching?
					cube.position.z = 600;
					cube.position.x = -900;
					//cube.position.y = -400;

					//cube.position.x = -100;
					//cube.position.y = -400;

					////cube.position.y = (cube.scale.y * 50) / 2;
					//cube.position.z = Math.Floor((random() * 1000 - 500) / 50) * 50 + 25;



					// if i want to rotate, how do I do it?
					//cube.rotation.z = random() + Math.PI;
					//cube.rotation.x = random() + Math.PI;
					var sw2 = Stopwatch.StartNew();



					scene.add(cube);
					//interactiveObjects.Add(cube);

					// offset is wrong
					//while (true)
					//{
					//    await Native.window.async.onframe;

					//    cube.rotation.y = Math.PI * 0.0002 * sw2.ElapsedMilliseconds;

					//}
				}
			);
			#endregion


			#region HZBunker
			new HeatZeekerRTSOrto.HZBunker().Source.Task.ContinueWithResult(
					 cube =>
					 {
						 // https://github.com/mrdoob/three.js/issues/1285
						 //cube.children.WithEach(c => c.castShadow = true);
						 cube.castShadow = true;

						 //cube.traverse(
						 //    new Action<THREE.Object3D>(
						 //        child =>
						 //        {
						 //            // does it work? do we need it?
						 //            //if (child is THREE.Mesh)
						 //            child.castShadow = true;
						 //            //child.receiveShadow = true;

						 //        }
						 //    )
						 //);

						 // um can edit and continue insert code going back in time?
						 cube.scale.x = 10.0;
						 cube.scale.y = 10.0;
						 cube.scale.z = 10.0;

						 //cube.castShadow = true;
						 //dae.receiveShadow = true;

						 cube.position.x = -1000;
						 //cube.position.y = (cube.scale.y * 50) / 2;
						 cube.position.z = 0;

						 scene.add(cube);
					 }
				 );
			#endregion






			var lon0 = -45.0;
			var lon1 = 0.0;

			var lon = new sum(
				 () => lon0,
				 () => lon1
			 );

			var lat0 = 0.0;
			var lat1 = 0.0;

			// or could we do it with byref or pointers?
			var lat = new sum(
				() => lat0,
				() => lat1
			);

			var phi = 0.0;
			var theta = 0.0;

			//var controls = new THREE.OrbitControls(camera);
			var camera_rotation_z = 0.0;

			Native.window.onframe +=
				delegate
				{
					////var delta = clock.getDelta();

					//controls.update();



					var scale = 1.0;
					var delta = clock.getDelta();
					var stepSize = delta * scale;

					if (stepSize > 0)
					{
						//characterController.update(stepSize, scale);
						//gui.setSpeed(blendMesh.speed);

						THREE.AnimationHandler.update(stepSize);
					}

					//camera.position = controls.center.clone();

					//if (Native.document.pointerLockElement == Native.document.body)
					//    lon += 0.00;
					//else
					//    lon += 0.01;

					//var lat2 = Math.Max(-85, Math.Min(85, lat));

					//Native.document.title = new { lon, lat }.ToString();
					//Native.document.title = new { lon0 }.ToString();


					phi = THREE.Math.degToRad(90 - lat);
					theta = THREE.Math.degToRad(lon);

					target.x = camera.position.x + (500 * Math.Sin(phi) * Math.Cos(theta));
					target.y = camera.position.y + (500 * Math.Cos(phi));
					target.z = camera.position.z + (500 * Math.Sin(phi) * Math.Sin(theta));


					//controls.update();
					//camera.position = controls.center.clone();

					// camera beta tilt?

					camera.lookAt(target);
					camera.rotation.z += camera_rotation_z;

					//composer.render(0.1);
					//renderer.render(scene, camera);
					effect.render(scene, camera);
				};


			new { }.With(
					 async delegate
					 {
						 retry:

                         //var s = (double)Native.window.Width / 1920.0;
                         //var s = (double)Native.window.Height / Native.screen.height;
                         //var s = (double)Native.window.Height / 1440;
                         var s = (double)Native.window.Height / 1080;


						 Native.document.body.style.transform = "scale(" + s + ")";
						 Native.document.body.style.transformOrigin = "0% 0%";

						 await Native.window.async.onresize;
						 goto retry;
					 }
				   );

			// gamma -0 .. -90

			var compassHeadingOffset = 0.0;
			var compassHeadingInitialized = 0;

			#region compassHeading
			// X:\jsc.svn\examples\javascript\android\Test\TestCompassHeading\TestCompassHeading\Application.cs
			Native.window.ondeviceorientation +=
			  dataValues =>
			  {
				  // Convert degrees to radians
				  var alphaRad = dataValues.alpha * (Math.PI / 180);
				  var betaRad = dataValues.beta * (Math.PI / 180);
				  var gammaRad = dataValues.gamma * (Math.PI / 180);

				  // Calculate equation components
				  var cA = Math.Cos(alphaRad);
				  var sA = Math.Sin(alphaRad);
				  var cB = Math.Cos(betaRad);
				  var sB = Math.Sin(betaRad);
				  var cG = Math.Cos(gammaRad);
				  var sG = Math.Sin(gammaRad);

				  // Calculate A, B, C rotation components
				  var rA = -cA * sG - sA * sB * cG;
				  var rB = -sA * sG + cA * sB * cG;
				  var rC = -cB * cG;

				  // Calculate compass heading
				  var compassHeading = Math.Atan(rA / rB);

				  // Convert from half unit circle to whole unit circle
				  if (rB < 0)
				  {
					  compassHeading += Math.PI;
				  }
				  else if (rA < 0)
				  {
					  compassHeading += 2 * Math.PI;
				  }

				  /*
                  Alternative calculation (replacing lines 99-107 above):

                    var compassHeading = Math.atan2(rA, rB);

                    if(rA < 0) {
                      compassHeading += 2 * Math.PI;
                    }
                  */

				  // Convert radians to degrees
				  compassHeading *= 180 / Math.PI;

				  // Compass heading can only be derived if returned values are 'absolute'

				  // X:\jsc.svn\examples\javascript\android\Test\TestCompassHeadingWithReset\TestCompassHeadingWithReset\Application.cs

				  //Native.document.body.innerText = new { compassHeading }.ToString();
				  if (compassHeadingInitialized > 0)
				  {
					  lon1 = compassHeading - compassHeadingOffset;
				  }
				  else
				  {
					  compassHeadingOffset = compassHeading;
					  compassHeadingInitialized++;
				  }

			  };
			#endregion

			#region gamma
			Native.window.ondeviceorientation +=
				//e => Native.body.innerText = new { e.alpha, e.beta, e.gamma }.ToString();
				//e => lon = e.gamma;
				e =>
				{
					lat1 = e.gamma;

					// after servicing a running instance would be nice
					// either by patching or just re running the whole iteration in the backgrou
					camera_rotation_z = e.beta * 0.02;
				};
			#endregion



			#region camera rotation
			var old = new { clientX = 0, clientY = 0 };

			Native.document.body.ontouchstart +=
				e =>
				{
					var n = new { e.touches[0].clientX, e.touches[0].clientY };
					old = n;
				};

			Native.document.body.ontouchmove +=
					e =>
					{
						var n = new { e.touches[0].clientX, e.touches[0].clientY };

						e.preventDefault();

						lon0 += (n.clientX - old.clientX) * 0.2;
						lat0 -= (n.clientY - old.clientY) * 0.2;

						old = n;
					};


			Native.document.body.onmousemove +=
				e =>
				{
					e.preventDefault();

					if (Native.document.pointerLockElement == Native.document.body)
					{
						lon0 += e.movementX * 0.1;
						lat0 -= e.movementY * 0.1;

						//Console.WriteLine(new { lon, lat, e.movementX, e.movementY });
					}
				};


			Native.document.body.onmouseup +=
			  e =>
			  {
				  //drag = false;
				  e.preventDefault();
			  };

			Native.document.body.onmousedown +=
				e =>
				{
					//e.CaptureMouse();

					//drag = true;
					e.preventDefault();
					Native.document.body.requestPointerLock();

				};


			Native.document.body.ondblclick +=
				e =>
				{
					e.preventDefault();

					Console.WriteLine("requestPointerLock");
				};

			#endregion

			Native.body.onmousewheel +=
				e =>
				{

					camera_rotation_z += 0.1 * e.WheelDirection; ;

				};
		}
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            //FormStyler.AtFormCreated =
            //s =>
            //{
            //    s.Context.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;

            //    //var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDrag().AttachTo(s.Context.GetHTMLTarget());
            //    var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDragWithShadow().AttachTo(s.Context.GetHTMLTarget());



            //    s.Context.GetHTMLTarget().style.backgroundColor = "#efefef";
            //    //s.Context.GetHTMLTarget().style.backgroundColor = "#A26D41";

            //};

#if AsWEBSERVER
            #region += Launched chrome.app.window
            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                // if we run as a server. we can open up on android.

                //chrome.Notification.DefaultTitle = "Nexus7";
                //chrome.Notification.DefaultIconUrl = new x128().src;
                ChromeTCPServer.TheServerWithStyledForm.Invoke(
                     AppSource.Text
                //, AtFormCreated: FormStyler.AtFormCreated

                //AtFormConstructor:
                //    f =>
                //    {
                //        //arg[0] is typeof System.Int32
                //        //script: error JSC1000: No implementation found for this native method, please implement [static System.Drawing.Color.FromArgb(System.Int32)]

                //        // X:\jsc.svn\examples\javascript\forms\Test\TestFromArgb\TestFromArgb\ApplicationControl.cs

                //        f.BackColor = System.Drawing.Color.FromArgb(0xA26D41);
                //    }
                );
                return;
            }
            #endregion
#else

            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    //chrome.runtime.UpdateAvailable += delegate
                    //{
                    //    new chrome.Notification(title: "UpdateAvailable");

                    //};

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "x360stereomidnightsun");

                        // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: new
                               {
                                   alwaysOnTop = true,
                                   visibleOnAllWorkspaces = true
                               }
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion


#endif


            // crash
            //int cubefacesizeMAX = 2048 * 2; // 6 faces, ?
            int cubefacesizeMAX = 1024; // 6 faces, ?
            int cubefacesize = cubefacesizeMAX; // 6 faces, ?
            //int cubefacesize = 1024; // 6 faces, ?
            // "X:\vr\tape1\0000x2048.png"
            // for 60hz render we may want to use float camera percision, not available for ui.
            //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x2048.png" "/sdcard/oculus/360photos/"
            //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x128.png" "/sdcard/oculus/360photos/"

            if (Environment.ProcessorCount < 8)
                //cubefacesize = 64; // 6 faces, ?

                // fast gif?
                cubefacesize = 1024; // 6 faces, ?


            // can we keep fast fps yet highp?

            // can we choose this on runtime? designtime wants fast fps, yet for end product we want highdef on our render farm?
            //const int cubefacesize = 128; // 6 faces, ?

            //var cubecameraoffsetx = 256;
            var cubecameraoffsetx = 400;


            //var uizoom = 0.1;
            //var uizoom = cubefacesize / 128f;
            var uizoom = 128f / cubefacesize;


            Native.css.style.backgroundColor = "darkcyan";
            Native.css.style.overflow = IStyle.OverflowEnum.hidden;

            Native.body.Clear();
            (Native.body.style as dynamic).webkitUserSelect = "text";

            IHTMLCanvas shader1canvas = null;




            //return;

            // Earth params
            //var radius = 0.5;
            //var radius = 1024;
            //var radius = 2048;
            //var radius = 512;
            //var radius = 256;
            //var radius = 400;

            // can we have not fly beyond moon too much?
            //var radius = 500;
            var radius = 480;

            //var segments = 32;
            var segments = 128 * 2;
            //var rotation = 6;


            //const int size = 128;
            //const int size = 256; // 6 faces, 12KB
            //const int size = 512; // 6 faces, ?

            // WebGL: drawArrays: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled.

            //const int size = 720; // 6 faces, ?
            //const int size = 1024; // 6 faces, ?
            //const int cubefacesize = 1024; // 6 faces, ?

            // THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter is set to THREE.LinearFilter or THREE.NearestFilter. ( chrome-extension://aemlnmcokphbneegoefdckonejmknohh/assets/x360stereomidnightsun/anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs.jpg )


            var far = 0xffffff;

            new IHTMLPre { new { Environment.ProcessorCount, cubefacesize } }.AttachToDocument();

            //new IHTMLPre { "can we stream it into VR, shadertoy, youtube 360, youtube stereo yet?" }.AttachToDocument();


            var sw = Stopwatch.StartNew();



            var oo = new List<THREE.Object3D>();

            var window = Native.window;


            // what about physics and that portal rendering?

            // if we are running as a chrome web server, we may also be opened as android ndk webview app
            //var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: window.aspect, near: 1, far: 2000);
            // once we update source
            // save the source
            // manually recompile 
            //cameraPX.position.z = 400;

            //// the camera should be close enough for the object to float off the FOV of PX
            //cameraPX.position.z = 200;

            // scene
            // can we make the 3D object orbit around us ?
            // and
            // stream it to vr?
            var scene = new THREE.Scene();



            // since our cube camera is somewhat a fixed thing
            // would it be easier to move mountains to come to us?
            // once we change code would chrome app be able to let VR know that a new view is available?
            var sceneg = new THREE.Group();
            sceneg.AttachTo(scene);


            // fly up?
            //sceneg.translateZ(-1024);
            // rotate the world, as the skybox then matches what we have on filesystem
            scene.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
            // yet for headtracking we shall rotate camera


            //sceneg.position.set(0, 0, -1024);
            //sceneg.position.set(0, -1024, 0);

            //scene.add(new THREE.AmbientLight(0x333333));
            //scene.add(new THREE.AmbientLight(0xffffff));
            //scene.add(new THREE.AmbientLight(0xaaaaaa));
            //scene.add(new THREE.AmbientLight(0xcccccc));
            //scene.add(new THREE.AmbientLight(0xeeeeee));
            scene.add(new THREE.AmbientLight(0xffffff));




            //var light = new THREE.DirectionalLight(0xffffff, 1);
            //// sun should be beyond moon
            ////light.position.set(-5 * virtualDistance, -3 * virtualDistance, -5 * virtualDistance);
            ////light.position.set(-15 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);

            //// where shall the light source be to see half planet?
            //light.position.set(-1 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);
            //scene.add(light);



            //var lightX = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightX" }.AttachToDocument();
            //var lightY = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightY" }.AttachToDocument();
            //var lightZ = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightZ" }.AttachToDocument();

            //new IHTMLHorizontalRule { }.AttachToDocument();

            // whats WebGLRenderTargetCube do?

            // WebGLRenderer preserveDrawingBuffer 



            var renderer0 = new THREE.WebGLRenderer(

                new
                {
                    //antialias = true,
                    alpha = true,
                    preserveDrawingBuffer = true
                }
            );

            // https://github.com/mrdoob/three.js/issues/3836

            // the construct. white bg
            //renderer0.setClearColor(0xfffff, 1);
            //renderer0.setClearColor(0x0, 1);
            renderer0.setClearColor(0x0, 0);

            //renderer.setSize(window.Width, window.Height);
            renderer0.setSize(cubefacesize, cubefacesize);

            //renderer0.domElement.AttachToDocument();
            //rendererPX.domElement.style.SetLocation(0, 0);
            //renderer0.domElement.style.SetLocation(4, 4);


            // top

            // http://stackoverflow.com/questions/27612524/can-multiple-webglrenderers-render-the-same-scene


            // need a place to show the cubemap face to GUI 
            // how does the stereo OTOY do it?
            // https://www.opengl.org/wiki/Sampler_(GLSL)

            // http://www.richardssoftware.net/Home/Post/25

            // [+X, –X, +Y, –Y, +Z, –Z] fa



            // move up
            //camera.position.set(-1200, 800, 1200);
            //var cameraoffset = new THREE.Vector3(0, 15, 0);

            // can we aniamte it?
            //var cameraoffset = new THREE.Vector3(0, 800, 1200);
            // can we have linear animation fromcenter of the map to the edge and back?
            // then do the flat earth sun orbit?
            var cameraoffset = new THREE.Vector3(
                // left?
                -512,
                // height?
                //0,
                //1600,
                //1024,

                // if the camera is in the center, would we need to move the scene?
                // we have to move the camera. as we move the scene the lights are messed up
                //2014,
                1024,

                //1200
                0
                // can we hover top of the map?
                );

            // original vieworigin
            //var cameraoffset = new THREE.Vector3(-1200, 800, 1200);

            // whatif we want more than 30sec video? 2min animation? more frames to render? 2gb disk?





            var maxfps = 60;
            //var maxlengthseconds = 60;
            var maxlengthseconds = 120;

            var maxframes = maxlengthseconds * maxfps;

            var frameIDanimation = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "frameIDanimation", @checked = false }.AttachToDocument();

            // whatif we want more than 30sec video? 2min animation? more frames to render? 2gb disk?
            var frameIDslider = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0, max = maxframes, valueAsNumber = 0, title = "frameIDslider" }.AttachToDocument();
            //var frameIDslider = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0, max = 1800, valueAsNumber = 1800 / 2, title = "frameIDslider" }.AttachToDocument();




            new IHTMLHorizontalRule { }.AttachToDocument();

            var camerax = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -32, max = 32, valueAsNumber = 0, title = "camerax" }.AttachToDocument();
            camerax.css.after.contentText = "x: ";
            new IHTMLBreak { }.AttachToDocument();

            //camerax.style.borderLeft = "1em solid red";

            // up. whats the most high a rocket can go 120km?
            //new IHTMLHorizontalRule { }.AttachToDocument();

            // how high is the bunker?
            var cameray = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -32, max = 32, valueAsNumber = 0, title = "cameray" }.AttachToDocument();
            cameray.css.after.contentText = "y: ";
            new IHTMLBreak { }.AttachToDocument();

            // we wont be going to orbit
            //new IHTMLBreak { }.AttachToDocument();
            //var camerayHigh = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = cameray.max, max = 1024 * 256, valueAsNumber = cameray.max, title = "cameray" }.AttachToDocument();
            //new IHTMLHorizontalRule { }.AttachToDocument();
            //var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();
            //var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -2048 / 2, max = 0 + 2048 / 2, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();
            var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -32, max = 32, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();
            cameraz.css.after.contentText = "z: ";

            // for render server
            var fcamerax = 0.0;
            var fcameray = 0.0;
            var fcameraz = 0.0;


            new IHTMLHorizontalRule { }.AttachToDocument();





            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151114/stereo

            // not used for this example tho...
            var itemRotation = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -180, max = 180, valueAsNumber = 0, title = "itemRotation" }.AttachToDocument();
            var spriteOffset = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0, max = 11, valueAsNumber = 0, title = "spriteOffset" }.AttachToDocument();
            //var itemRotation = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -90, max = 90, valueAsNumber = 33, title = "itemRotation" }.AttachToDocument();

            //while (await camerax.async.onchange)

            //cameray.onchange += delegate
            //{
            //    if (cameray.valueAsNumber < cameray.max)
            //        camerayHigh.valueAsNumber = camerayHigh.min;
            //};

            //camerayHigh.onmousedown += delegate
            //{
            //    //if (camerayHigh.valueAsNumber > camerayHigh.min)
            //    cameray.valueAsNumber = cameray.max;
            //};


            Action applycameraoffset = delegate
            {
                // make sure UI and gpu sync up

                var cy = cameray;



                // we wont be going to orbit

                //if (cameray.valueAsNumber < cameray.max)
                //    camerayHigh.valueAsNumber = camerayHigh.min;

                //if (camerayHigh.valueAsNumber > camerayHigh.min)
                //    cameray.valueAsNumber = cameray.max;

                //if (cameray.valueAsNumber == cameray.max)
                //    cy = camerayHigh;



                cameraoffset = new THREE.Vector3(
                    // left?
                  1.0 * (camerax + fcamerax),
                    // height?
                    //0,
                    //1600,
                    //1024,

                   // if the camera is in the center, would we need to move the scene?
                    // we have to move the camera. as we move the scene the lights are messed up
                    //2014,
                   1.0 * (cy + fcameray),

                 //1200c
                 1.0 * (cameraz + fcameraz)
                    // can we hover top of the map?
                   );
            };


            #region y
            // need to rotate90?
            var cameraNY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNY.lookAt(new THREE.Vector3(0, -1, 0));
                cameraNY.position.add(cameraoffset);
            };

            //cameraNY.lookAt(new THREE.Vector3(0, 1, 0));
            var canvasNY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasNY.canvas.title = "NY";
            canvasNY.canvas.AttachToDocument();
            canvasNY.canvas.style.transformOrigin = "0 0";
            // roslyn!
            canvasNY.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPY.lookAt(new THREE.Vector3(0, 1, 0));
                cameraPY.position.add(cameraoffset);
            };
            //cameraPY.lookAt(new THREE.Vector3(0, -1, 0));
            var canvasPY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 0);
            canvasPY.canvas.title = "PY";
            canvasPY.canvas.AttachToDocument();
            canvasPY.canvas.style.transformOrigin = "0 0";
            canvasPY.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion

            // transpose xz?

            #region x
            var cameraNX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNX.lookAt(new THREE.Vector3(0, 0, 1));
                cameraNX.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(0, 0, -1));
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNX.lookAt(new THREE.Vector3(1, 0, 0));
            var canvasNX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 2, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNX.canvas.title = "NX";
            canvasNX.canvas.AttachToDocument();
            canvasNX.canvas.style.transformOrigin = "0 0";
            canvasNX.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPX.lookAt(new THREE.Vector3(0, 0, -1));
                cameraPX.position.add(cameraoffset);
            };
            //cameraPX.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPX.lookAt(new THREE.Vector3(1, 0, 0));
            //cameraPX.lookAt(new THREE.Vector3(-1, 0, 0));

            var canvasPX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPX.canvas.title = "PX";
            canvasPX.canvas.AttachToDocument();
            canvasPX.canvas.style.transformOrigin = "0 0";
            canvasPX.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion

            // lets have the item twice the cube item size. and offset -0.5 to recenter.
            // this wont work as we are cloning the buffer for now!
            //var canvasPXitem = new CanvasRenderingContext2D(cubefacesize, cubefacesize * 2);
            var canvasPXitem = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPXitem.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasPXitem.canvas.title = "item";
            canvasPXitem.canvas.AttachToDocument();
            canvasPXitem.canvas.style.transformOrigin = "0 0";
            canvasPXitem.canvas.style.transform = "scale(" + uizoom + ")";
            canvasPXitem.canvas.style.border = "1px solid yellow";

            //canvasPXitem.fillText("hello", 1, 1, cubefacesize);

            //canvasPXitem.drawImage(
            //    //new IHTMLPre { "hello" }
            //    new IHTMLDiv { "hello world. can we draw html into 360 VR yet?" }, 0, 0, cubefacesize, cubefacesize
            //);







            // http://www.w3schools.com/tags/canvas_fillstyle.asp
            canvasPXitem.fillStyle = "red";

            // too big?
            //canvasPXitem.fillRect(
            //    x: cubefacesize / 3,
            //    y: cubefacesize / 4,
            //    w: cubefacesize / 3,
            //    h: cubefacesize / 2
            //);



            // canvasPXitem.fillRect(
            //    x: (cubefacesize - cubefacesize / 6) / 2,
            //    y: (cubefacesize - cubefacesize / 3) / 2,

            //    w: cubefacesize / 6,
            //    h: cubefacesize / 3
            //);



            #region z
            var cameraNZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, -1));
            applycameraoffset += delegate
            {
                cameraNZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNZ.lookAt(new THREE.Vector3(1, 0, 0));
                cameraNZ.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, 1));
            var canvasNZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 3, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNZ.canvas.title = "NZ";
            canvasNZ.canvas.AttachToDocument();
            canvasNZ.canvas.style.transformOrigin = "0 0";
            canvasNZ.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraPZ.lookAt(new THREE.Vector3(1, 0, 0));
            applycameraoffset += delegate
            {
                cameraPZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPZ.lookAt(new THREE.Vector3(-1, 0, 0));
                cameraPZ.position.add(cameraoffset);
            };
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, -1));
            var canvasPZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPZ.canvas.title = "PZ";
            canvasPZ.canvas.AttachToDocument();
            canvasPZ.canvas.style.transformOrigin = "0 0";
            canvasPZ.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion




            // c++ alias locals would be nice..
            var canvas0 = (IHTMLCanvas)renderer0.domElement;


            var old = new
            {



                CursorX = 0,
                CursorY = 0
            };


            var st = new Stopwatch();
            st.Start();

            //canvas0.css.active.style.cursor = IStyle.CursorEnum.move;




            // X:\jsc.svn\examples\javascript\Test\TestMouseMovement\TestMouseMovement\Application.cs


            // THREE.WebGLProgram: gl.getProgramInfoLog() C:\fakepath(78,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll
            // THREE.WebGLProgram: gl.getProgramInfoLog() (79,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll

            // http://www.roadtovr.com/youtube-confirms-stereo-3d-360-video-support-coming-soon/
            // https://www.youtube.com/watch?v=D-Wl9jAB45Q



            #region spherical
            var gl = new WebGLRenderingContext(alpha: true, preserveDrawingBuffer: true);
            var c = gl.canvas.AttachToDocument();

            //  3840x2160

            //c.style.SetSize(3840, 2160);

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150722/360-youtube

            // bots cannot get a bigger mp4 from yt, and vrideo renders 2k on gearvr.
            c.width = 3840;
            c.height = 2160;


            //c.width = 3840 * 2;
            //c.height = 2160 * 2;


            //c.width = 3840;
            //c.height = 2160;
            // 1,777777777777778

            // https://www.youtube.com/watch?v=fTfJwzRsE-w
            //c.width = 7580;
            //c.height = 3840;
            //1,973958333333333

            //7580
            //    3840

            // wont work
            //c.width = 8192;
            //c.height = 4096;


            // this has the wrong aspect?
            //c.width = 6466;
            //c.height = 3232;

            new IHTMLPre { new { c.width, c.height } }.AttachToDocument();

            //6466x3232

            //var suizoom = 720f / c.height;
            //var suizoom = 360f / c.height;
            var suizoom = 480f / c.width;

            c.style.transformOrigin = "0 0";
            c.style.transform = "scale(" + suizoom + ")";
            //c.style.backgroundColor = "yellow";
            c.style.position = IStyle.PositionEnum.absolute;

            c.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 3);

            var pass = new CubeToEquirectangular.Library.ShaderToy.EffectPass(
                       null,
                       gl,
                       precission: CubeToEquirectangular.Library.ShaderToy.DetermineShaderPrecission(gl),
                       supportDerivatives: gl.getExtension("OES_standard_derivatives") != null,
                       callback: null,
                       obj: null,
                       forceMuted: false,
                       forcePaused: false,
                //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                       outputGainNode: null
                   );

            // how shall we upload our textures?
            // can we reference GLSL.samplerCube yet?
            //pass.mInputs[0] = new samplerCube { };
            pass.mInputs[0] = new CubeToEquirectangular.Library.ShaderToy.samplerCube { };

            pass.MakeHeader_Image();
            var vs = new Shaders.ProgramFragmentShader();
            pass.NewShader_Image(vs);

            #endregion




            //var frame0 = new HTML.Images.FromAssets.tiles_regrid().AttachToDocument();
            var frame0 = new HTML.Images.FromAssets.galaxy_starfield().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets.galaxy_starfield150FOV().AttachToDocument();
            //var xor = new HTML.Images.FromAssets.Orion360_test_image_8192x4096().AttachToDocument();
            //var xor = new HTML.Images.FromAssets._2_no_clouds_4k().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().AttachToDocument();


            // 270px
            //xor.style.height = "";
            frame0.style.height = "270px";
            frame0.style.width = "480px";
            frame0.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 + 16, 8 + (int)(uizoom * cubefacesize + 8) * 3);



            var frame2 = new HTML.Images.FromAssets.galaxy_starfield().AttachToDocument();

            frame2.style.height = "270px";
            frame2.style.width = "480px";
            frame2.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 * 2 + 16 * 2, 8 + (int)(uizoom * cubefacesize + 8) * 3);




            #region DirectoryEntry
            var dir = default(DirectoryEntry);

            new IHTMLButton { "openDirectory" }.AttachToDocument().onclick += async delegate
            {
                dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
            };
            frame0.style.cursor = IStyle.CursorEnum.pointer;
            frame0.title = "save frame";


            frame0.onclick += delegate
            {
                // http://paulbourke.net/papers/vsmm2006/vsmm2006.pdf
                //            A method of creating synthetic stereoscopic panoramic images that can be implemented
                //in most rendering packages has been presented. If single panoramic pairs can be created
                //then stereoscopic panoramic movies are equally possible giving rise to the prospect of
                //movies where the viewer can interact with, at least with regard to what they choose to look
                //at.These images can be projected so as to engage the two features of the human visual
                //system that assist is giving us a sense of immersion, the feeling of “being there”. That is,
                //imagery that contains parallax information as captured from two horizontally separated eye
                //positions (stereopsis)and imagery that fills our peripheral vision.The details that define
                //how the two panoramic images should be created in rendering packages are provided, in
                //particular, how to precisely configure the virtual cameras and control the distance to zero
                //parallax.

                // grab a frame

                if (dir == null)
                {
                    // not exporting to file system?
                    var f0 = new IHTMLImage { src = gl.canvas.toDataURL() };

                    //var f0 = (IHTMLImage)gl.canvas;
                    //var f0 = (IHTMLImage)gl.canvas;
                    //var base64 = gl.canvas.toDataURL();


                    //frame0.src = base64;
                    frame0.src = f0.src;

                    // 7MB!

                    return;
                }

                //                // ---------------------------
                //IrfanView
                //---------------------------
                //Warning !
                //The file: "X:\vr\tape1\0001.jpg" is a PNG file with incorrect extension !
                //Rename ?
                //---------------------------
                //Yes   No   
                //---------------------------

                // haha this will render the thumbnail.
                //dir.WriteAllBytes("0000.png", frame0);

                //dir.WriteAllBytes("0000.png", gl.canvas);

                var glsw = Stopwatch.StartNew();
                dir.WriteAllBytes("0000.png", gl);

                new IHTMLPre { new { glsw.ElapsedMilliseconds } }.AttachToDocument();

                // {{ ElapsedMilliseconds = 1548 }}

                // 3.7MB
                // 3840x2160

            };

            #endregion

            var vsync = default(TaskCompletionSource<object>);


            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLColladaExperiment\WebGLColladaExperiment\WebGLColladaExperiment.csproj"






            // asus will hang
            // https://3dwarehouse.sketchup.com/model.html?id=fb7a0448d940e575edc01389f336fb0a
            // can we get one frame into vr?

            // cube: mesh to cast shadows



            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000 })

            //    );
            //    floor2.position.set(0, 0, -cubefacesize / 2);
            //    floor2.AttachTo(scene);
            //}
            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0x0000ff, color = 0x0000ff })

            //    );
            //    floor2.position.set(-cubefacesize / 2, 0, 0);
            //    floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);

            //    floor2.AttachTo(scene);
            //}

            //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
            //var tex0 = new THREE.Texture(new moon());
            //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
            var texPXitem = new THREE.Texture(

                //shader1canvas

                canvasPXitem.canvas

                ) { needsUpdate = true };


            var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            var floor2 = new THREE.Mesh(planeGeometry0,
                //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                new THREE.MeshPhongMaterial(
                    new
                    {

                        map = texPXitem,

                        transparent = true,
                        alphaTest = 0.5

                        //ambient = 0x00ff00,
                        //color = 0x00ff00
                    })

            );
            //floor2.position.set(0, 0, -cubefacesize  * 0.55);

            floor2.AttachTo(scene);

            applycameraoffset += delegate
            {
                texPXitem.needsUpdate = true;

                //floor2.position.set(-cubefacesize * 0.5, 0, 0);
                //floor2.position.set(-cubefacesize * 0.33, 0, 0);
                // floor2.position.set(-cubefacesize * 0.25, 0, 0);

                //floor2.position.set(-cubefacesize * 0.225, 0, 0);
                floor2.position.set(-cubefacesize * 0.23, 0, 0);

                // too close!
                //floor2.position.set(-cubefacesize * 0.20, 0, 0);

                floor2.rotation.set(0, 0, 0);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2 + radians(itemRotation.valueAsNumber));
            };






            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeEarth\ChromeEarth\Application.cs
            // X:\jsc.svn\examples\javascript\canvas\ConvertBlackToAlpha\ConvertBlackToAlpha\Application.cs
            // hidden for alpha AppWindows
            //#if FBACKGROUND

            new IHTMLBreak { }.AttachToDocument();

            var iskybox2 = new HTML.Images.FromAssets._2massAllskyGAMMA();
            var iskybox1 = new HTML.Images.FromAssets.anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs();

            var hideskybox1 = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "hide skybox1", @checked = true }.AttachToDocument();
            var hideskybox2 = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "hide skybox2", @checked = false }.AttachToDocument();

            #region drawStereoFrame
            Func<CanvasRenderingContext2D, Task> drawStereoFrame = async canvasTB =>
            {
                //var xIPD = 4.0;
                var xIPD = 6.0;

                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151203/x360stereomidnightsun

                // fake skybox?
                canvasTB.fillStyle = "darkcyan";
                canvasTB.fillRect(0, 0, c.width, c.height);

                //canvasTB.drawImage(stereoT, 0, 0, c.width, c.height, 0, 0, c.width, c.height / 2);
                //canvasTB.drawImage(stereoB, 0, 0, c.width, c.height, 0, c.height / 2, c.width, c.height / 2);

                // 12 frames in total. lets add em all
                // can we add a secondary stereo frame ? at 45deg?

                var offsetrotation = 360 / 12;

                hideskybox1.@checked = true;
                hideskybox2.@checked = false;

                // mono bg!
                floor2.visible = false;
                fcamerax = 0;
                await Native.window.async.onframe;
                await Native.window.async.onframe;

                canvasTB.drawImage(gl.canvas, 0, 0, c.width, c.height, 0, 0, c.width, c.height / 2);
                canvasTB.drawImage(gl.canvas, 0, 0, c.width, c.height, 0, c.height / 2, c.width, c.height / 2);

                // keep only bg. hide stereo sprite
                floor2.visible = true;

                //await Native.window.async.onframe;


                // we need our stereo item frame thanks. no bg.
                hideskybox1.@checked = true;
                hideskybox2.@checked = true;

                await Native.window.async.onframe;






                //fcamerax = -xIPD;
                //await Native.window.async.onframe;
                //var stereoT = new IHTMLImage { src = gl.canvas.toDataURL() };

                //fcamerax = +xIPD;

                //await Native.window.async.onframe;
                //var stereoB = new IHTMLImage { src = gl.canvas.toDataURL() };

                ////await Native.window.async.onframe;
                //await stereoB.async.oncomplete;


                // we now have a stereo sprite.
                // can we rotate it on top of the background?


                // 8K fulldome is a resolution of 8192×8192 
                // 8K UHD is a resolution of 7680 × 4320 (33.2 megapixels) 
                // 8192×4320 t
                // Digital video formats with resolutions of 4K (3840×2160) and 8K (7680×4320)


                // WebGL: CONTEXT_LOST_WEBGL: loseContext: context lost ?
                for (int stereoframei = 0; stereoframei < 12; stereoframei++)
                {
                    spriteOffset.valueAsNumber = stereoframei;

                    Console.WriteLine(new { stereoframei });
                    double ioffsetdeg = offsetrotation * stereoframei;


                    ioffsetdeg += (degrees(frameIDslider.valueAsNumber / (60 * 60 / 5.0) * Math.PI * 2));


                    // follow the moon?
                    //stars.rotateOnAxis(new THREE.Vector3(0, -1, 0),
                    //    frameIDslider.valueAsNumber / (60 * 60 / 5.0) * Math.PI * 2
                    //);


                    var ipxoffset = (int)Math.Floor(c.width * ioffsetdeg / 360);

                    ipxoffset = ipxoffset % c.width;

                    fcamerax = -xIPD;
                    await Native.window.async.onframe;
                    var stereoT = gl.canvas;
                    canvasTB.drawImage(stereoT, 0, 0, c.width, c.height, ipxoffset, 0, c.width, c.height / 2);
                    canvasTB.drawImage(stereoT, 0, 0, c.width, c.height, -c.width + ipxoffset, 0, c.width, c.height / 2);

                    fcamerax = +xIPD;
                    await Native.window.async.onframe;
                    var stereoB = gl.canvas;
                    canvasTB.drawImage(stereoB, 0, 0, c.width, c.height, ipxoffset, c.height / 2, c.width, c.height / 2);
                    canvasTB.drawImage(stereoB, 0, 0, c.width, c.height, -c.width + ipxoffset, c.height / 2, c.width, c.height / 2);
                }


                //var canvasTB8K = new CanvasRenderingContext2D(c.width * 2, c.height * 2);
                //canvasTB8K.drawImage(f0, 0, 0, c.width, c.height, 0, 0, c.width * 2, c.height);
                //canvasTB8K.drawImage(f1, 0, 0, c.width, c.height, 0, c.height, c.width * 2, c.height);

                // https://www.reddit.com/r/GearVR/comments/2vrfyu/id_suggest_makers_of_360_videos_make_them_the/
                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151114/stereo
                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151203
                // can we actually watch stereo _TB images on gearVR?

            };
            #endregion

            #region stero
            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151114/stereo
            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151112
            new IHTMLButton { "make me a stero TB image " }.AttachToDocument().With(
                async e =>
                {
                    // http://www.vrideo.com/watch/ALdE7mm
                    // https://www.youtube.com/watch?v=S3iTPxMIlCI

                    var onclick = e.async.onclick;

                    while (await onclick)
                    {




                        // keep it 4K, as hw, yt is not ready for 60fps 8K!
                        var canvasTB = new CanvasRenderingContext2D(c.width, c.height);

                        drawStereoFrame(canvasTB);

                        // gearVR will get a black screen
                        // 
                        //frame2.src = canvasTB8K.canvas.toDataURL();
                        frame2.src = canvasTB.canvas.toDataURL();


                        onclick = e.async.onclick;

                        //while (!onclick.IsCompleted)
                        //{
                        //    await Task.Delay(1000 / 30);
                        //    frame0.src = f0.src;
                        //    await Task.Delay(1000 / 30);
                        //    frame0.src = f1.src;
                        //}
                    }
                }
            );
            #endregion


            #region render 60hz 30sec
            new IHTMLButton {
                //"render 60hz 30sec"
                //$"render {maxfps}hz {maxlengthseconds}sec"
                "render " + new {maxfps} + "hz " + new {maxlengthseconds} + "sec"
            }.AttachToDocument().onclick += async e =>
            {
                e.Element.disabled = true;

                //var canvasTB = new CanvasRenderingContext2D(c.width * 2, c.height * 2);
                var canvasTB = new CanvasRenderingContext2D(c.width, c.height);


                var total = Stopwatch.StartNew();
                var status = "rendering... " + new { dir };

                new IHTMLPre { () => status }.AttachToDocument();

                if (dir == null)
                {
                    //dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
                }

                total.Restart();



                //vsync = new TaskCompletionSource<object>();
                //await vsync.Task;

                status = "rendering... vsync";

                //var frameid = 0;
                //frameIDanimation.@checked = true;
                frameIDslider.valueAsNumber = -1;

                // allow the animation values to sink in
                //vsync = new TaskCompletionSource<object>();
                //await vsync.Task;



                goto beforeframe;


                // parallax offset?

                await_nextframe:


                var filename = frameIDslider.valueAsNumber.ToString().PadLeft(5, '0') + ".jpg";
                status = "rendering... " + new { filename };

                await drawStereoFrame(canvasTB);

                //var xIPD = 4.0;


                //// left eye
                //fcamerax = -xIPD;
                //vsync = new TaskCompletionSource<object>();
                //await vsync.Task;
                //var f0 = new IHTMLImage { src = gl.canvas.toDataURL() };


                //// right eye
                //fcamerax = +xIPD;
                //vsync = new TaskCompletionSource<object>();
                //await vsync.Task;
                //var f1 = new IHTMLImage { src = gl.canvas.toDataURL() };
                //await f1.async.oncomplete;



                //canvasTB.drawImage(f0, 0, 0, c.width, c.height, 0, 0, c.width * 2, c.height);
                //canvasTB.drawImage(f1, 0, 0, c.width, c.height, 0, c.height, c.width * 2, c.height);


                // frame0 has been rendered

                var swcapture = Stopwatch.StartNew();
                status = "WriteAllBytes... " + new { filename };
                //await Native.window.async.onframe;

                // https://code.google.com/p/chromium/issues/detail?id=404301
                if (dir == null)
                {
                    frame2.src = canvasTB.canvas.toDataURL();

                    await Task.Delay(500);
                }
                else
                    await dir.WriteAllBytes(filename, canvasTB);
                //await dir.WriteAllBytes(filename, gl);
                //await dir.WriteAllBytes(filename, gl.canvas);

                status = "WriteAllBytes... done " + new { fcamerax, filename, swcapture.ElapsedMilliseconds };
                status = "rdy " + new { filename, fcamerax };
                //await Native.window.async.onframe;





                // design mode v render mode
                if (cubefacesize < cubefacesizeMAX)
                    frameIDslider.valueAsNumber += 60;
                else
                    frameIDslider.valueAsNumber++;




            beforeframe:

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);
                // fcamerax = radius * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);

                // some shaders need to know where the camera is looking from. can we tell them?

                //fcamerax = 2.2 * Math.Sin(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));
                //fcameraz = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));


                //// up
                //fcameray = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // cameraz.valueAsNumber = (int)(cameraz.max * Math.Sin(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f)));


                // up
                //fcameray = 128 * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                //fcamerax += (1.0 / 60.0);

                //fcamerax += (1.0 / 60.0) * 120;



                // 60hz 30sec
                if (frameIDslider.valueAsNumber < maxframes)
                {
                    // Blob GC? either this helms or the that we made a Blob static. 
                    await Task.Delay(11);

                    goto await_nextframe;
                }

                total.Stop();
                status = "all done " + new { frameid = frameIDslider.valueAsNumber, total.ElapsedMilliseconds };
                vsync = default(TaskCompletionSource<object>);
                // http://stackoverflow.com/questions/22899333/delete-javascript-blobs

                e.Element.disabled = false;
            };
            #endregion



            new { }.With(
                async delegate
                {

                    var tex1 = new the_midnight_sun_by_isilmetriel { };

                    await tex1.async.oncomplete;

                    // first one is 124, while others is 123?
                    var tex1w = 123;
                    //var tex1w = 120;
                    var tex1h = 626;

                    //canvasPXitem.drawImage(
                    //           tex1, 2, 2, tex1w, tex1h, 0, 0, tex1w, tex1h
                    //       );


                    // how long until jsc can upstream small updates to code?



                    //// canvasPXitem.drawImage(
                    ////     (IHTMLCanvas)renderer0.domElement,



                    ////    sx: (cubefacesize - cubefacesize / 6) / 2,
                    ////    sy: (cubefacesize - cubefacesize / 3) / 2,

                    ////    sw: cubefacesize / 6,
                    ////    sh: cubefacesize / 3,

                    ////    dx: (cubefacesize - cubefacesize / 6) / 2,
                    ////    dy: (cubefacesize - cubefacesize / 3) / 2,

                    ////    dw: cubefacesize / 6,
                    ////    dh: cubefacesize / 3
                    ////);





                    await iskybox2.async.oncomplete;

                    var bytes1 = await iskybox1.async.bytes;

                    //for (int ii = 0; ii < bytes.Length; ii += 4)
                    //{

                    //    bytes[ii + 3] = (byte)(bytes[ii + 0]);

                    //    bytes[ii + 0] = 0xff;
                    //    bytes[ii + 1] = 0xff;
                    //    bytes[ii + 2] = 0xff;
                    //}

                    var cc = new CanvasRenderingContext2D(iskybox1.width, iskybox1.height);

                    cc.bytes = bytes1;

                    //s.image = cc;
                    //s.needsUpdate = true;

                    var skybox1_material = new THREE.MeshBasicMaterial(
                            new
                            {
                                //map = THREE.ImageUtils.loadTexture(new galaxy_starfield().src),
                                map = new THREE.Texture { image = cc, needsUpdate = true },
                                side = THREE.BackSide,
                                transparent = true
                            });


                    var skybox1 = new THREE.Mesh(
                        //new THREE.SphereGeometry(far * 0.92, 64, 64),
                        //new THREE.SphereGeometry(far * 0.80, 64, 64),

                            // still zfighting
                        //new THREE.SphereGeometry(far * 0.50, 64, 64),

                            // the other option is to have a single bg and blend on tht. this is just a rotation visualization.
                            new THREE.SphereGeometry(far * 0.30, 64, 64),
                           skybox1_material
                        );

                    // http://stackoverflow.com/questions/8502150/three-js-how-can-i-dynamically-change-objects-opacity
                    //(stars_material as dynamic).opacity = 0.5;


                    hideskybox1.onchange += delegate
                    {
                        skybox1.visible = !hideskybox1.@checked;
                    };
                    skybox1.visible = !hideskybox1.@checked;


                    scene.add(skybox1);




                    applycameraoffset += delegate
                    {
                        if (frameIDanimation.@checked)
                        {
                            itemRotation.valueAsNumber = (frameIDslider.valueAsNumber / 2) % 360 - 180;

                            hideskybox1.@checked = (frameIDslider.valueAsNumber / 2 + 180) % 720 < 360;
                        }
                    };


                    // target bg
                    var skybox2 = new THREE.Mesh(
                            new THREE.SphereGeometry(far * 0.95, 64, 64),
                           new THREE.MeshBasicMaterial(
                            new
                            {
                                map = new THREE.Texture { image = iskybox2, needsUpdate = true },
                                side = THREE.BackSide,
                                transparent = true
                            })
                    );


                    skybox2.AttachTo(scene).With(
                        stars =>
                        {
                            applycameraoffset += delegate
                              {

                                  skybox2.visible = !hideskybox2.@checked;



                                  skybox1.rotation.set(0, 0, 0);
                                  // spin
                                  skybox1.rotateOnAxis(new THREE.Vector3(0, -1, 0),
                                     frameIDslider.valueAsNumber / (60 * 60 / 5.0) * Math.PI * 2
                                 );

                                  // reset
                                  stars.rotation.set(0, 0, 0);

                                  // slow rotate in place
                                  stars.rotateOnAxis(new THREE.Vector3(1, 0, 0),
                                      frameIDslider.valueAsNumber / 3600.0 * Math.PI * 2
                                  );

                                  // follow the moon?
                                  stars.rotateOnAxis(new THREE.Vector3(0, -1, 0),
                                      frameIDslider.valueAsNumber / (60 * 60 / 5.0) * Math.PI * 2
                                  );
                              };
                        }

                    );

                    // can we get our stereo sprite this way?
                    // do we get clean tiles with transparency?
                    // on x83 on frame0 we were able to hide skybox. how?
                    //hideskybox2.onchange += delegate
                    //{
                    //    skybox2.visible = !hideskybox2.@checked;
                    //};
                    //skybox2.visible = !hideskybox2.@checked;



                    Console.WriteLine("skybox added");






                    //dae.position.y = -80;

                    //dae.AttachTo(sceneg);
                    //scene.add(dae);
                    //oo.Add(dae);




                    // view-source:http://threejs.org/examples/webgl_multiple_canvases_circle.html
                    // https://threejsdoc.appspot.com/doc/three.js/src.source/extras/cameras/CubeCamera.js.html
                    Native.window.onframe +=
                        e =>
                        {
                            // let render man know..
                            if (vsync != null)
                                if (vsync.Task.IsCompleted)
                                    return;


                            //if (pause) return;
                            //if (pause.@checked)
                            //    return;


                            // can we float out of frame?
                            // haha. a bit too flickery.
                            //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.01) * 50.0;
                            //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.001) * 190.0;
                            //globesphere.position.y = Math.Sin(fcamerax * 0.001) * 90.0;
                            //clouds.position.y = Math.Cos(fcamerax * 0.001) * 90.0;

                            //sphere.rotation.y += speed;
                            //clouds.rotation.y += speed;

                            // manual rebuild?
                            // red compiler notifies laptop chrome of pending update
                            // app reloads

                            applycameraoffset();
                            renderer0.clear();





                            // spriteOffset
                            canvasPXitem.drawImage(
                                       tex1,

                                       //2px black border!
                                //((tex1w + 4) * spriteOffset.valueAsNumber) + 2,
                                       ((tex1w + 2) * spriteOffset.valueAsNumber) + 3,

                                       2, tex1w, tex1h,

                                       // dest
                                       (cubefacesize - tex1w) / 2,
                                       (cubefacesize - tex1h) / 2,

                                       tex1w, tex1h
                                   );



                            //rendererPY.clear();

                            //cameraPX.aspect = canvasPX.aspect;
                            //cameraPX.updateProjectionMatrix();

                            // um what does this do?
                            //cameraPX.position.z += (z - cameraPX.position.z) * e.delay.ElapsedMilliseconds / 200.0;
                            // mousewheel allos the camera to move closer
                            // once we see the frame in vr, can we udp sync vr tracking back to laptop?


                            //this.targetPX.x += 1;
                            //this.targetNX.x -= 1;

                            //this.targetPY.y += 1;
                            //this.targetNY.y -= 1;

                            //this.targetPZ.z += 1;
                            //this.targetNZ.z -= 1;

                            // how does the 360 or shadertoy want our cubemaps?


                            // and then rotate right?

                            // how can we render cubemap?


                            // hide everything else


                            // inversion effect?
                            //// if (hideskybox1.@checked)
                            ////     skybox1.visible = true;
                            //// else
                            ////     skybox1.visible = false;

                            //// floor2.visible = false;
                            //// renderer0.render(scene, cameraPX);
                            //// //canvasPXitem.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);


                            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151203
                            // can we draw from that special image?


                            //canvasPXitem.drawImage(
                            //    tex1, 2, 2, 124, 630 - 4, 0, 0, 124, 626
                            //);

                            //// canvasPXitem.drawImage(
                            ////     (IHTMLCanvas)renderer0.domElement,



                            ////    sx: (cubefacesize - cubefacesize / 6) / 2,
                            ////    sy: (cubefacesize - cubefacesize / 3) / 2,

                            ////    sw: cubefacesize / 6,
                            ////    sh: cubefacesize / 3,

                            ////    dx: (cubefacesize - cubefacesize / 6) / 2,
                            ////    dy: (cubefacesize - cubefacesize / 3) / 2,

                            ////    dw: cubefacesize / 6,
                            ////    dh: cubefacesize / 3
                            ////);


                            //// skybox1.visible = !hideskybox1.@checked;
                            //// floor2.visible = true;


                            #region x
                            canvasPX.clearRect(0, 0, cubefacesize, cubefacesize);
                            // upside down?
                            renderer0.render(scene, cameraPX);
                            canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                            canvasNX.clearRect(0, 0, cubefacesize, cubefacesize);
                            renderer0.render(scene, cameraNX);
                            canvasNX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            #endregion

                            #region z

                            canvasPZ.clearRect(0, 0, cubefacesize, cubefacesize);
                            renderer0.render(scene, cameraPZ);
                            canvasPZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                            canvasNZ.clearRect(0, 0, cubefacesize, cubefacesize);
                            renderer0.render(scene, cameraNZ);
                            canvasNZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            #endregion



                            #region y
                            canvasPY.clearRect(0, 0, cubefacesize, cubefacesize);
                            renderer0.render(scene, cameraPY);

                            //canvasPY.save();
                            //canvasPY.translate(0, size);
                            //canvasPY.rotate((float)(-Math.PI / 2));
                            canvasPY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            //canvasPY.restore();


                            canvasNY.clearRect(0, 0, cubefacesize, cubefacesize);
                            renderer0.render(scene, cameraNY);
                            //canvasNY.save();
                            //canvasNY.translate(size, 0);
                            //canvasNY.rotate((float)(Math.PI / 2));
                            canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            //canvasNY.restore();
                            // ?
                            #endregion


                            //renderer0.render(scene, cameraPX);


                            //rendererPY.render(scene, cameraPY);

                            // at this point we should be able to render the sphere texture

                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;


                            //var cube0 = new IHTMLImage[] {
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),

                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),


                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
                            //};

                            new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                            }.WithEachIndex(
                             (img, index) =>
                             {
                                 gl.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);

                                 //gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
                                 gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);

                                 // http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true

                                 // https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
                                 //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
                                 //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);

                                 gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);

                             }
                          );


                            //if (cameraz.valueAsNumber == 0)
                            gl.clearColor(0, 0, 0, 0);
                            //else
                            //gl4K.clearColor(0, 0, 0, 1);

                            gl.clear(gl.COLOR_BUFFER_BIT);


                            // could do dynamic resolution- fog of war or fog of FOV. where up to 150deg field of vision is encouragedm, not 360
                            pass.Paint_Image(
                               0,

                               0,
                               0,
                               0,
                               0
                                //,

                          // gl_FragCoord
                                // cannot be scaled, and can be referenced directly.
                                // need another way to scale
                                //zoom: 0.3f
                          );

                            //paintsw.Stop();


                            // what does it do?
                            gl.flush();

                            // let render man know..
                            if (vsync != null)
                                if (!vsync.Task.IsCompleted)
                                    vsync.SetResult(null);
                        };





                    Console.WriteLine("do you see it?");

                }
           );




        }
        // http://youtu.be/Lo1IU8UAutE
        // 60hz 2160 4K!

        // The equirectangular projection was used in map creation since it was invented around 100 A.D. by Marinus of Tyre. 

        //        C:\Users\Arvo> "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hzsky.png" "/sdcard/oculus/360photos/"
        //1533 KB/s(3865902 bytes in 2.461s)

        //C:\Users\Arvo> "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hznosky.png" "/sdcard/oculus/360photos/"
        //1556 KB/s(2714294 bytes in 1.703s)

        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hz2048c3840x2160.png" "/sdcard/oculus/360photos/"



        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809/chrome360hz

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809

        // the eye nor the display will be able to do any stereo
        // until tech is near matrix capability. 2019?

        // cubemap can be used for all long range scenes
        // http://www.imdb.com/title/tt0112111/?ref_=nv_sr_1


        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150808/cubemapcamera
        // subst a: s:\jsc.svn\examples\javascript\chrome\apps\WebGL\Chrome360HZ\Chrome360HZ\bin\Debug\staging\Chrome360HZ.Application\web
        // Z:\jsc.svn\examples\javascript\chrome\apps\WebGL\Chrome360HZ\Chrome360HZ\bin\Debug\staging\Chrome360HZ.Application\web

        // ColladaLoader: Empty or non-existing file (assets/Chrome360HZ/S6Edge.dae)

        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            //FormStyler.AtFormCreated =
            //s =>
            //{
            //    s.Context.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;

            //    //var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDrag().AttachTo(s.Context.GetHTMLTarget());
            //    var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDragWithShadow().AttachTo(s.Context.GetHTMLTarget());



            //    s.Context.GetHTMLTarget().style.backgroundColor = "#efefef";
            //    //s.Context.GetHTMLTarget().style.backgroundColor = "#A26D41";

            //};

#if AsWEBSERVER
            #region += Launched chrome.app.window
            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                // if we run as a server. we can open up on android.

                //chrome.Notification.DefaultTitle = "Nexus7";
                //chrome.Notification.DefaultIconUrl = new x128().src;
                ChromeTCPServer.TheServerWithStyledForm.Invoke(
                     AppSource.Text
                //, AtFormCreated: FormStyler.AtFormCreated

                //AtFormConstructor:
                //    f =>
                //    {
                //        //arg[0] is typeof System.Int32
                //        //script: error JSC1000: No implementation found for this native method, please implement [static System.Drawing.Color.FromArgb(System.Int32)]

                //        // X:\jsc.svn\examples\javascript\forms\Test\TestFromArgb\TestFromArgb\ApplicationControl.cs

                //        f.BackColor = System.Drawing.Color.FromArgb(0xA26D41);
                //    }
                );
                return;
            }
            #endregion
#else

            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    //chrome.runtime.UpdateAvailable += delegate
                    //{
                    //    new chrome.Notification(title: "UpdateAvailable");

                    //};

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "Chrome360HZ");

                        // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: new
                               {
                                   alwaysOnTop = true,
                                   visibleOnAllWorkspaces = true
                               }
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion


#endif

            //const int size = 128;
            //const int size = 256; // 6 faces, 12KB
            //const int size = 512; // 6 faces, ?

            // WebGL: drawArrays: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled.

            //const int size = 720; // 6 faces, ?
            //const int size = 1024; // 6 faces, ?
            //const int size = 1024; // 6 faces, ?
            //const int cubefacesize = 2048; // 6 faces, ?
            const int cubefacesize = 512; // 6 faces, ?



            var uizoom = 0.05;

            var far = 0xfffff;

            Native.css.style.backgroundColor = "blue";
            Native.css.style.overflow = IStyle.OverflowEnum.hidden;

            Native.body.Clear();

            new IHTMLPre { "can we stream it into VR, shadertoy, youtube 360, youtube stereo yet?" }.AttachToDocument();


            var sw = Stopwatch.StartNew();

            var pause = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "pause" }.AttachToDocument();


            pause.onchange += delegate
            {

                if (pause.@checked)
                    sw.Stop();
                else
                    sw.Start();


            };

            var oo = new List<THREE.Object3D>();

            #region scene
            var window = Native.window;


            // what about physics and that portal rendering?

            // if we are running as a chrome web server, we may also be opened as android ndk webview app
            //var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: window.aspect, near: 1, far: 2000);
            // once we update source
            // save the source
            // manually recompile 
            //cameraPX.position.z = 400;

            //// the camera should be close enough for the object to float off the FOV of PX
            //cameraPX.position.z = 200;

            // scene
            // can we make the 3D object orbit around us ?
            // and
            // stream it to vr?
            var scene = new THREE.Scene();

            var ambient = new THREE.AmbientLight(0x303030);
            scene.add(ambient);

            // should we fix jsc to do a more correct IDL?
            //var directionalLight = new THREE.DirectionalLight(0xffffff, 0.7);
            //directionalLight.position.set(0, 0, 1);
            //scene.add(directionalLight);

            #region light
            //var light = new THREE.DirectionalLight(0xffffff, 1.0);
            var light = new THREE.DirectionalLight(0xffffff, 2.5);
            //var light = new THREE.DirectionalLight(0xffffff, 2.5);
            //var light = new THREE.DirectionalLight(0xffffff, 1.5);
            //var lightOffset = new THREE.Vector3(0, 1000, 2500.0);
            var lightOffset = new THREE.Vector3(
                2000,
                700,

                // lower makes longer shadows 
                700.0
                );
            light.position.copy(lightOffset);
            light.castShadow = true;

            var xlight = light as dynamic;
            xlight.shadowMapWidth = 4096;
            xlight.shadowMapHeight = 2048;

            xlight.shadowDarkness = 0.1;
            //xlight.shadowDarkness = 0.5;

            xlight.shadowCameraNear = 10;
            xlight.shadowCameraFar = 10000;
            xlight.shadowBias = 0.00001;
            xlight.shadowCameraRight = 4000;
            xlight.shadowCameraLeft = -4000;
            xlight.shadowCameraTop = 4000;
            xlight.shadowCameraBottom = -4000;

            xlight.shadowCameraVisible = true;

            scene.add(light);
            #endregion




            // whats WebGLRenderTargetCube do?

            // WebGLRenderer preserveDrawingBuffer 



            var renderer0 = new THREE.WebGLRenderer(

                new
                {
                    antialias = true,
                    alpha = true,
                    preserveDrawingBuffer = true
                }
            );

            // https://github.com/mrdoob/three.js/issues/3836

            // the construct. white bg
            renderer0.setClearColor(0xfffff, 1);

            //renderer.setSize(window.Width, window.Height);
            renderer0.setSize(cubefacesize, cubefacesize);

            //renderer0.domElement.AttachToDocument();
            //rendererPX.domElement.style.SetLocation(0, 0);
            //renderer0.domElement.style.SetLocation(4, 4);


            // top

            // http://stackoverflow.com/questions/27612524/can-multiple-webglrenderers-render-the-same-scene


            // need a place to show the cubemap face to GUI 
            // how does the stereo OTOY do it?
            // https://www.opengl.org/wiki/Sampler_(GLSL)

            // http://www.richardssoftware.net/Home/Post/25

            // [+X, –X, +Y, –Y, +Z, –Z] fa



            // move up
            //camera.position.set(-1200, 800, 1200);
            //var cameraoffset = new THREE.Vector3(0, 15, 0);
            var cameraoffset = new THREE.Vector3(-1200, 800, 1200);

            #region y
            // need to rotate90?
            var cameraNY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            cameraNY.lookAt(new THREE.Vector3(0, -1, 0));
            cameraNY.position.add(cameraoffset);

            //cameraNY.lookAt(new THREE.Vector3(0, 1, 0));
            var canvasNY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNY.canvas.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasNY.canvas.title = "NY";
            canvasNY.canvas.AttachToDocument();
            canvasNY.canvas.style.transformOrigin = "0 0";
            canvasNY.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            cameraPY.lookAt(new THREE.Vector3(0, 1, 0));
            cameraPY.position.add(cameraoffset);
            //cameraPY.lookAt(new THREE.Vector3(0, -1, 0));
            var canvasPY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPY.canvas.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 0);
            canvasPY.canvas.title = "PY";
            canvasPY.canvas.AttachToDocument();
            canvasPY.canvas.style.transformOrigin = "0 0";
            canvasPY.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion

            // transpose xz?

            #region x
            var cameraNX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            cameraNX.lookAt(new THREE.Vector3(0, 0, 1));
            cameraNX.position.add(cameraoffset);
            //cameraNX.lookAt(new THREE.Vector3(0, 0, -1));
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNX.lookAt(new THREE.Vector3(1, 0, 0));
            var canvasNX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNX.canvas.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 2, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNX.canvas.title = "NX";
            canvasNX.canvas.AttachToDocument();
            canvasNX.canvas.style.transformOrigin = "0 0";
            canvasNX.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            cameraPX.lookAt(new THREE.Vector3(0, 0, -1));
            cameraPX.position.add(cameraoffset);
            //cameraPX.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPX.lookAt(new THREE.Vector3(1, 0, 0));
            //cameraPX.lookAt(new THREE.Vector3(-1, 0, 0));
            var canvasPX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPX.canvas.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPX.canvas.title = "PX";
            canvasPX.canvas.AttachToDocument();
            canvasPX.canvas.style.transformOrigin = "0 0";
            canvasPX.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion



            #region z
            var cameraNZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, -1));
            cameraNZ.lookAt(new THREE.Vector3(1, 0, 0));
            cameraNZ.position.add(cameraoffset);
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, 1));
            var canvasNZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNZ.canvas.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 3, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNZ.canvas.title = "NZ";
            canvasNZ.canvas.AttachToDocument();
            canvasNZ.canvas.style.transformOrigin = "0 0";
            canvasNZ.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraPZ.lookAt(new THREE.Vector3(1, 0, 0));
            cameraPZ.lookAt(new THREE.Vector3(-1, 0, 0));
            cameraPZ.position.add(cameraoffset);
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, -1));
            var canvasPZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPZ.canvas.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPZ.canvas.title = "PZ";
            canvasPZ.canvas.AttachToDocument();
            canvasPZ.canvas.style.transformOrigin = "0 0";
            canvasPZ.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion




            // c++ alias locals would be nice..
            var canvas0 = (IHTMLCanvas)renderer0.domElement;


            var old = new
            {



                CursorX = 0,
                CursorY = 0
            };


            var st = new Stopwatch();
            st.Start();

            //canvas0.css.active.style.cursor = IStyle.CursorEnum.move;

            #region onmousedown
            Native.body.onmousedown +=
                async e =>
                {
                    if (e.Element.nodeName.ToLower() != "canvas")
                        return;

                    // movementX no longer works
                    old = new
                    {


                        e.CursorX,
                        e.CursorY
                    };


                    //e.CaptureMouse();
                    var release = e.Element.CaptureMouse();
                    await e.Element.async.onmouseup;

                    release();


                };
            #endregion



            // X:\jsc.svn\examples\javascript\Test\TestMouseMovement\TestMouseMovement\Application.cs
            #region onmousemove
            Native.body.onmousemove +=
                e =>
                {
                    if (e.Element.nodeName.ToLower() != "canvas")
                    {
                        Native.body.style.cursor = IStyle.CursorEnum.@default;
                        return;
                    }

                    e.preventDefault();
                    e.stopPropagation();


                    Native.body.style.cursor = IStyle.CursorEnum.move;

                    var pointerLock = canvas0 == Native.document.pointerLockElement;


                    //Console.WriteLine(new { e.MouseButton, pointerLock, e.movementX });

                    if (e.MouseButton == IEvent.MouseButtonEnum.Left)
                    {

                        oo.WithEach(
                            x =>
                            {
                                x.rotation.y += 0.006 * (e.CursorX - old.CursorX);
                                x.rotation.x += 0.006 * (e.CursorY - old.CursorY);
                            }
                        );

                        old = new
                        {


                            e.CursorX,
                            e.CursorY
                        };



                    }

                };
            #endregion

            // THREE.WebGLProgram: gl.getProgramInfoLog() C:\fakepath(78,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll
            // THREE.WebGLProgram: gl.getProgramInfoLog() (79,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll

            // http://www.roadtovr.com/youtube-confirms-stereo-3d-360-video-support-coming-soon/
            // https://www.youtube.com/watch?v=D-Wl9jAB45Q



            #region spherical
            var gl = new WebGLRenderingContext(alpha: true, preserveDrawingBuffer: true);
            var c = gl.canvas.AttachToDocument();

            //  3840x2160

            //c.style.SetSize(3840, 2160);

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150722/360-youtube


            c.width = 3840;
            c.height = 2160;


            //c.width = 3840 * 2;
            //c.height = 2160 * 2;


            //c.width = 3840;
            //c.height = 2160;
            // 1,777777777777778

            // https://www.youtube.com/watch?v=fTfJwzRsE-w
            //c.width = 7580;
            //c.height = 3840;
            //1,973958333333333

            //7580
            //    3840

            // wont work
            //c.width = 8192;
            //c.height = 4096;


            // this has the wrong aspect?
            //c.width = 6466;
            //c.height = 3232;

            new IHTMLPre { new { c.width, c.height } }.AttachToDocument();

            //6466x3232

            //var suizoom = 720f / c.height;
            //var suizoom = 360f / c.height;
            var suizoom = 480f / c.width;

            c.style.transformOrigin = "0 0";
            c.style.transform = "scale(" + suizoom + ")";
            c.style.backgroundColor = "yellow";
            c.style.position = IStyle.PositionEnum.absolute;

            c.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 3);

            var pass = new CubeToEquirectangular.Library.ShaderToy.EffectPass(
                       null,
                       gl,
                       precission: CubeToEquirectangular.Library.ShaderToy.DetermineShaderPrecission(gl),
                       supportDerivatives: gl.getExtension("OES_standard_derivatives") != null,
                       callback: null,
                       obj: null,
                       forceMuted: false,
                       forcePaused: false,
                //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                       outputGainNode: null
                   );

            // how shall we upload our textures?
            // can we reference GLSL.samplerCube yet?
            //pass.mInputs[0] = new samplerCube { };
            pass.mInputs[0] = new CubeToEquirectangular.Library.ShaderToy.samplerCube { };

            pass.MakeHeader_Image();
            var vs = new Shaders.ProgramFragmentShader();
            pass.NewShader_Image(vs);

            #endregion




            //var frame0 = new HTML.Images.FromAssets.tiles_regrid().AttachToDocument();
            var frame0 = new HTML.Images.FromAssets.anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs().AttachToDocument();
            //var xor = new HTML.Images.FromAssets.Orion360_test_image_8192x4096().AttachToDocument();
            //var xor = new HTML.Images.FromAssets._2_no_clouds_4k().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().AttachToDocument();


            // 270px
            //xor.style.height = "";
            frame0.style.height = "270px";
            frame0.style.width = "480px";
            frame0.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 + 16, 8 + (int)(uizoom * cubefacesize + 8) * 3);


            var mesh = new THREE.Mesh(new THREE.SphereGeometry(far / 2, 50, 50),
           new THREE.MeshBasicMaterial(new
           {
               map = THREE.ImageUtils.loadTexture(
                   //new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().src
                   //new HTML.Images.FromAssets._4008650304_7f837ccbb7_b().src
                  frame0.src
                   //new WebGLEquirectangularPanorama.HTML.Images.FromAssets.PANO_20130616_222058().src
                   //new WebGLEquirectangularPanorama.HTML.Images.FromAssets.PANO_20121225_210448().src

                   )
           }));
            mesh.scale.x = -1;

            #region fixup rotation

            //mesh.rotateOnAxis(new THREE.Vector3(1, 0, 0), Math.PI / 2);
            //mesh.rotateOnAxis(new THREE.Vector3(1, 0, 0), -Math.PI / 2);
            mesh.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
            #endregion


            scene.add(mesh);


            //new IHTMLButton { }

            var dir = default(DirectoryEntry);

            new IHTMLButton { "openDirectory" }.AttachToDocument().onclick += async delegate
            {
                dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
            };

            frame0.onclick += delegate
            {
                // http://paulbourke.net/papers/vsmm2006/vsmm2006.pdf
                //            A method of creating synthetic stereoscopic panoramic images that can be implemented
                //in most rendering packages has been presented. If single panoramic pairs can be created
                //then stereoscopic panoramic movies are equally possible giving rise to the prospect of
                //movies where the viewer can interact with, at least with regard to what they choose to look
                //at.These images can be projected so as to engage the two features of the human visual
                //system that assist is giving us a sense of immersion, the feeling of “being there”. That is,
                //imagery that contains parallax information as captured from two horizontally separated eye
                //positions (stereopsis)and imagery that fills our peripheral vision.The details that define
                //how the two panoramic images should be created in rendering packages are provided, in
                //particular, how to precisely configure the virtual cameras and control the distance to zero
                //parallax.

                // grab a frame


                var f0 = new IHTMLImage { src = gl.canvas.toDataURL() };

                //var f0 = (IHTMLImage)gl.canvas;
                //var f0 = (IHTMLImage)gl.canvas;
                //var base64 = gl.canvas.toDataURL();


                //frame0.src = base64;
                frame0.src = f0.src;

                // 7MB!

                if (dir == null)
                    return;

                //                // ---------------------------
                //IrfanView
                //---------------------------
                //Warning !
                //The file: "X:\vr\tape1\0001.jpg" is a PNG file with incorrect extension !
                //Rename ?
                //---------------------------
                //Yes   No   
                //---------------------------

                // haha this will render the thumbnail.
                //dir.WriteAllBytes("0000.png", frame0);

                dir.WriteAllBytes("0000.png", f0);
                // 3.7MB
                // 3840x2160

            };



            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLColladaExperiment\WebGLColladaExperiment\WebGLColladaExperiment.csproj"

            #region WebGLRah66Comanche
            // why isnt it being found?
            // "Z:\jsc.svn\examples\javascript\WebGL\collada\WebGLRah66Comanche\WebGLRah66Comanche\WebGLRah66Comanche.csproj"
            new global::WebGLRah66Comanche.Comanche(
            ).Source.Task.ContinueWithResult(
                dae =>
                {

                    //dae.position.y = -40;
                    //dae.position.z = 280;
                    scene.add(dae);
                    //oo.Add(dae);

                    // wont do it
                    //dae.castShadow = true;

                    dae.children[0].children[0].children.WithEach(x => x.castShadow = true);


                    // the rotors?
                    dae.children[0].children[0].children.Last().children.WithEach(x => x.castShadow = true);


                    dae.scale.set(0.5, 0.5, 0.5);
                    dae.position.x = -900;
                    dae.position.z = +900;

                    // raise it up
                    dae.position.y = 400;

                    //var sw = Stopwatch.StartNew();

                    //Native.window.onframe += delegate
                    //{
                    //    //dae.children[0].children[0].children.Last().al
                    //    //dae.children[0].children[0].children.Last().rotation.z = sw.ElapsedMilliseconds * 0.01;
                    //    //dae.children[0].children[0].children.Last().rotation.x = sw.ElapsedMilliseconds * 0.01;
                    //    dae.children[0].children[0].children.Last().rotation.y = sw.ElapsedMilliseconds * 0.01;
                    //};
                }
            );
            #endregion



            #region tree
            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLGodRay\WebGLGodRay\WebGLGodRay.csproj"

            var materialScene = new THREE.MeshBasicMaterial(new { color = 0x000000, shading = THREE.FlatShading });
            var tloader = new THREE.JSONLoader();

            // http://stackoverflow.com/questions/16539736/do-not-use-system-runtime-compilerservices-dynamicattribute-use-the-dynamic
            // https://msdn.microsoft.com/en-us/library/system.runtime.compilerservices.dynamicattribute%28v=vs.110%29.aspx
            //System.Runtime.CompilerServices.DynamicAttribute

            tloader.load(

                new WebGLGodRay.Models.tree().Content.src,

                new Action<THREE.Geometry>(
                xgeometry =>
                {

                    var treeMesh = new THREE.Mesh(xgeometry, materialScene);
                    treeMesh.position.set(0, -150, -150);
                    treeMesh.position.x = -900;
                    treeMesh.position.z = -900;

                    treeMesh.position.y = 25;

                    var tsc = 400;
                    treeMesh.scale.set(tsc, tsc, tsc);

                    treeMesh.matrixAutoUpdate = false;
                    treeMesh.updateMatrix();


                    treeMesh.AttachTo(scene);

                }
                )
                );
            #endregion

            #region create field

            // THREE.PlaneGeometry: Consider using THREE.PlaneBufferGeometry for lower memory footprint.

            // could we get some film grain?
            var planeGeometry = new THREE.CubeGeometry(512, 512, 1);
            var plane = new THREE.Mesh(planeGeometry,
                    new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })

                );
            //plane.castShadow = false;
            plane.receiveShadow = true;


            {

                var parent = new THREE.Object3D();
                parent.add(plane);
                parent.rotation.x = -Math.PI / 2;
                parent.scale.set(10, 10, 10);

                scene.add(parent);
            }

            var random = new Random();
            var meshArray = new List<THREE.Mesh>();
            var geometry = new THREE.CubeGeometry(1, 1, 1);
            //var sw = Stopwatch.StartNew();

            for (var i = 3; i < 9; i++)
            {

                //THREE.MeshPhongMaterial
                var ii = new THREE.Mesh(geometry,


                    new THREE.MeshPhongMaterial(new { ambient = 0x000000, color = 0xA06040, specular = 0xA26D41, shininess = 1 })

                    //new THREE.MeshLambertMaterial(
                    //new
                    //{
                    //    color = (Convert.ToInt32(0xffffff * random.NextDouble())),
                    //    specular = 0xffaaaa,
                    //    ambient= 0x050505, 
                    //})

                    );
                ii.position.x = i % 7 * 200 - 2.5f;

                // raise it up
                ii.position.y = .5f * 100;
                ii.position.z = -1 * i * 100;
                ii.castShadow = true;
                ii.receiveShadow = true;
                //ii.scale.set(100, 100, 100 * i);
                ii.scale.set(100, 100 * i, 100);


                meshArray.Add(ii);

                scene.add(ii);

                if (i % 2 == 0)
                {
#if FWebGLHZBlendCharacter
                    #region SpeedBlendCharacter
					var _i = i;
					{ WebGLHZBlendCharacter.HTML.Pages.TexturesImages ref0; }

					var blendMesh = new THREE.SpeedBlendCharacter();
					blendMesh.load(
						new WebGLHZBlendCharacter.Models.marine_anims().Content.src,
						new Action(
							delegate
							{
								// buildScene
								//blendMesh.rotation.y = Math.PI * -135 / 180;
								blendMesh.castShadow = true;
								// we cannot scale down we want our shadows
								//blendMesh.scale.set(0.1, 0.1, 0.1);

								blendMesh.position.x = (_i + 2) % 7 * 200 - 2.5f;

								// raise it up
								//blendMesh.position.y = .5f * 100;
								blendMesh.position.z = -1 * _i * 100;


								var xtrue = true;
								// run
								blendMesh.setSpeed(1.0);

								// will in turn call THREE.AnimationHandler.play( this );
								//blendMesh.run.play();
								// this wont help. bokah does not see the animation it seems.
								//blendMesh.run.update(1);

								blendMesh.showSkeleton(!xtrue);

								scene.add(blendMesh);


								Native.window.onframe +=
								 delegate
								 {

									 blendMesh.rotation.y = Math.PI * 0.0002 * sw.ElapsedMilliseconds;



									 ii.rotation.y = Math.PI * 0.0002 * sw.ElapsedMilliseconds;

								 };

							}
						)
					);
                    #endregion
#endif
                }

            }
            #endregion


            #region HZCannon
            // "Z:\jsc.svn\examples\javascript\WebGL\HeatZeekerRTSOrto\HeatZeekerRTSOrto\HeatZeekerRTSOrto.csproj"
            new HeatZeekerRTSOrto.HZCannon().Source.Task.ContinueWithResult(
                async cube =>
                {
                    // https://github.com/mrdoob/three.js/issues/1285
                    //cube.children.WithEach(c => c.castShadow = true);

                    //cube.traverse(
                    //    new Action<THREE.Object3D>(
                    //        child =>
                    //        {
                    //            // does it work? do we need it?
                    //            //if (child is THREE.Mesh)

                    //            child.castShadow = true;
                    //            //child.receiveShadow = true;

                    //        }
                    //    )
                    //);

                    // um can edit and continue insert code going back in time?
                    cube.scale.x = 10.0;
                    cube.scale.y = 10.0;
                    cube.scale.z = 10.0;



                    //cube.castShadow = true;
                    //dae.receiveShadow = true;

                    //cube.position.x = -100;

                    ////cube.position.y = (cube.scale.y * 50) / 2;
                    //cube.position.z = Math.Floor((random() * 1000 - 500) / 50) * 50 + 25;



                    // if i want to rotate, how do I do it?
                    //cube.rotation.z = random() + Math.PI;
                    //cube.rotation.x = random() + Math.PI;
                    var sw2 = Stopwatch.StartNew();



                    scene.add(cube);
                    //interactiveObjects.Add(cube);

                    // offset is wrong
                    //while (true)
                    //{
                    //    await Native.window.async.onframe;

                    //    cube.rotation.y = Math.PI * 0.0002 * sw2.ElapsedMilliseconds;

                    //}
                }
            );
            #endregion


            #region HZCannon
            new HeatZeekerRTSOrto.HZCannon().Source.Task.ContinueWithResult(
                async cube =>
                {
                    // https://github.com/mrdoob/three.js/issues/1285
                    //cube.children.WithEach(c => c.castShadow = true);

                    //cube.traverse(
                    //    new Action<THREE.Object3D>(
                    //        child =>
                    //        {
                    //            // does it work? do we need it?
                    //            //if (child is THREE.Mesh)

                    //            child.castShadow = true;
                    //            //child.receiveShadow = true;

                    //        }
                    //    )
                    //);

                    // um can edit and continue insert code going back in time?
                    cube.scale.x = 10.0;
                    cube.scale.y = 10.0;
                    cube.scale.z = 10.0;



                    //cube.castShadow = true;
                    //dae.receiveShadow = true;


                    // jsc shat about out of band code patching?
                    cube.position.z = 600;
                    cube.position.x = -900;
                    //cube.position.y = -400;

                    //cube.position.x = -100;
                    //cube.position.y = -400;

                    ////cube.position.y = (cube.scale.y * 50) / 2;
                    //cube.position.z = Math.Floor((random() * 1000 - 500) / 50) * 50 + 25;



                    // if i want to rotate, how do I do it?
                    //cube.rotation.z = random() + Math.PI;
                    //cube.rotation.x = random() + Math.PI;
                    var sw2 = Stopwatch.StartNew();



                    scene.add(cube);
                    //interactiveObjects.Add(cube);

                    // offset is wrong
                    //while (true)
                    //{
                    //    await Native.window.async.onframe;

                    //    cube.rotation.y = Math.PI * 0.0002 * sw2.ElapsedMilliseconds;

                    //}
                }
            );
            #endregion


            #region HZBunker
            new HeatZeekerRTSOrto.HZBunker().Source.Task.ContinueWithResult(
                     cube =>
                     {
                         // https://github.com/mrdoob/three.js/issues/1285
                         //cube.children.WithEach(c => c.castShadow = true);
                         cube.castShadow = true;

                         //cube.traverse(
                         //    new Action<THREE.Object3D>(
                         //        child =>
                         //        {
                         //            // does it work? do we need it?
                         //            //if (child is THREE.Mesh)
                         //            child.castShadow = true;
                         //            //child.receiveShadow = true;

                         //        }
                         //    )
                         //);

                         // um can edit and continue insert code going back in time?
                         cube.scale.x = 10.0;
                         cube.scale.y = 10.0;
                         cube.scale.z = 10.0;

                         //cube.castShadow = true;
                         //dae.receiveShadow = true;

                         cube.position.x = -1000;
                         //cube.position.y = (cube.scale.y * 50) / 2;
                         cube.position.z = 0;

                         scene.add(cube);
                     }
                 );
            #endregion


            new Models.ColladaS6Edge().Source.Task.ContinueWithResult(
                   dae =>
                   {
                       // 90deg
                       dae.rotation.x = -Math.Cos(Math.PI);

                       //dae.scale.x = 30;
                       //dae.scale.y = 30;
                       //dae.scale.z = 30;
                       dae.position.z = -(65 - 200);





                       var scale = 0.9;

                       // jsc, do we have ILObserver available yet?
                       dae.scale.x = scale;
                       dae.scale.y = scale;
                       dae.scale.z = scale;


                       #region onmousewheel
                       Native.body.onmousewheel +=
                           e =>
                           {
                               e.preventDefault();

                               //camera.position.z = 1.5;

                               // min max. shall adjust speed also!
                               // max 4.0
                               // min 0.6
                               dae.position.z -= 10.0 * e.WheelDirection;

                               //camera.position.z = 400;
                               //dae.position.z = dae.position.z.Max(-200).Min(200);

                               //Native.document.title = new { z }.ToString();

                           };
                       #endregion


                       //dae.position.y = -80;

                       scene.add(dae);
                       oo.Add(dae);




                       // view-source:http://threejs.org/examples/webgl_multiple_canvases_circle.html
                       // https://threejsdoc.appspot.com/doc/three.js/src.source/extras/cameras/CubeCamera.js.html
                       Native.window.onframe +=
                           e =>
                           {
                               //if (pause) return;
                               //if (pause.@checked)
                               //    return;


                               // can we float out of frame?
                               // haha. a bit too flickery.
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.01) * 50.0;
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.001) * 190.0;
                               dae.position.x = Math.Sin(sw.ElapsedMilliseconds * 0.0001) * 190.0;
                               dae.position.y = Math.Cos(sw.ElapsedMilliseconds * 0.0001) * 90.0;
                               // manual rebuild?
                               // red compiler notifies laptop chrome of pending update
                               // app reloads


                               renderer0.clear();
                               //rendererPY.clear();

                               //cameraPX.aspect = canvasPX.aspect;
                               //cameraPX.updateProjectionMatrix();

                               // um what does this do?
                               //cameraPX.position.z += (z - cameraPX.position.z) * e.delay.ElapsedMilliseconds / 200.0;
                               // mousewheel allos the camera to move closer
                               // once we see the frame in vr, can we udp sync vr tracking back to laptop?


                               //this.targetPX.x += 1;
                               //this.targetNX.x -= 1;

                               //this.targetPY.y += 1;
                               //this.targetNY.y -= 1;

                               //this.targetPZ.z += 1;
                               //this.targetNZ.z -= 1;

                               // how does the 360 or shadertoy want our cubemaps?


                               // and then rotate right?

                               // how can we render cubemap?


                               #region x
                               // upside down?
                               renderer0.render(scene, cameraPX);
                               canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                               renderer0.render(scene, cameraNX);
                               canvasNX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               #endregion

                               #region z
                               renderer0.render(scene, cameraPZ);
                               canvasPZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                               renderer0.render(scene, cameraNZ);
                               canvasNZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               #endregion



                               #region y
                               renderer0.render(scene, cameraPY);

                               //canvasPY.save();
                               //canvasPY.translate(0, size);
                               //canvasPY.rotate((float)(-Math.PI / 2));
                               canvasPY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               //canvasPY.restore();


                               renderer0.render(scene, cameraNY);
                               //canvasNY.save();
                               //canvasNY.translate(size, 0);
                               //canvasNY.rotate((float)(Math.PI / 2));
                               canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               //canvasNY.restore();
                               // ?
                               #endregion


                               //renderer0.render(scene, cameraPX);


                               //rendererPY.render(scene, cameraPY);

                               // at this point we should be able to render the sphere texture

                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;


                               //var cube0 = new IHTMLImage[] {
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),

                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),


                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
                               //};

                               new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                               }.WithEachIndex(
                                   (img, index) =>
                                   {
                                       gl.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);

                                       //gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
                                       gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);

                                       // http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true

                                       // https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);

                                       gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);

                                   }
                                );


                               pass.Paint_Image(
                                     0,

                                     0,
                                     0,
                                     0,
                                     0
                                   //,

                                // gl_FragCoord
                                   // cannot be scaled, and can be referenced directly.
                                   // need another way to scale
                                   //zoom: 0.3f
                                );

                               //paintsw.Stop();


                               // what does it do?
                               gl.flush();

                           };


                   }
               );


            #endregion



            Console.WriteLine("do you see it?");
        }
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // http://stackoverflow.com/questions/29048161/how-to-export-a-three-js-scene-into-a-360-texture-for-photosphere

            Native.body.style.background = "black";

            Native.body.style.margin = "0px";
            Native.body.style.overflow = IStyle.OverflowEnum.hidden;
            Native.body.Clear();



            // https://github.com/turban/photosphere/blob/gh-pages/stolanuten.html

            var scene = new THREE.Scene();


            var renderer = new THREE.WebGLRenderer();
            renderer.setSize(Native.window.Width, Native.window.Height);
            // the thing you attach to dom
            renderer.domElement.AttachToDocument();


            // Z:\jsc.svn\examples\javascript\audio\synergy\MovingMusicByBorismus\Application.cs

            var sphere = new THREE.Mesh(
                new THREE.SphereGeometry(100, 20, 20),
                new THREE.MeshBasicMaterial(
                    new
                    {
                        //20150608_165300.jpg
                        //map = THREE.ImageUtils.loadTexture(new HTML.Images.FromAssets.stolanuten().src)
                        map = THREE.ImageUtils.loadTexture(new HTML.Images.FromAssets._20150608_165300().src)
                    }
                )
            );
            sphere.scale.x = -1;
            sphere.AttachTo(scene);

            var camera = new THREE.PerspectiveCamera(75, Native.window.aspect, 1, 1000);
            camera.position.x = 0.1;

            var controls = new THREE.OrbitControls(camera, renderer.domElement);






            Native.window.onframe +=
                delegate
                {
                    controls.update();
                    camera.position = controls.center.clone();

                    renderer.render(scene, camera);


                };



            Native.window.onresize +=
              delegate
              {
                  camera.aspect = Native.window.aspect;
                  camera.updateProjectionMatrix();

                  renderer.setSize(Native.window.Width, Native.window.Height);

              };

            // http://www.visualstudio.com/en-us/news/vs2015-vs
        }
        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151227/x360stereomoon

        // chrome cannot copy url to clipboard! 

        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {

            // THREE.WebGLRenderer 72dev
            Console.WriteLine(new { THREE.REVISION } + " back screen until everithing loads!");


            // http://smus.com/ultrasonic-networking/
            // http://chromium.googlecode.com/svn/trunk/samples/audio/shiny-drum-machine.html
            // http://chromium.googlecode.com/svn/trunk/samples/audio/box2d-js/box2d-audio.html
            // http://chromium.googlecode.com/svn/trunk/samples/audio/simple.html
            // http://chromium.googlecode.com/svn/trunk/samples/audio/oscillator-fm2.html
            // http://borismus.github.io/spectrogram/
            // http://borismus.github.io/moving-music/
            // http://smus.com/spatial-audio-web-vr/
            // can we have 360 audio yet?

            // THREE.WebGLObjects: Converting... THREE.PointCloud THREE.BufferGeometry


            new { }.With(
                async delegate
                {
                    //HTML.Audio.FromAssets.

                    // does jsc assetslibrary do jpeg? or do we need jpg/

                    Native.document.body.Clear();
                    Native.document.body.style.margin = "0px";
                    Native.document.body.style.overflow = IStyle.OverflowEnum.hidden;


                    var w = Stopwatch.StartNew();
                    Console.WriteLine("awaiting for main()...");

                    dynamic window = Native.window;


                    // X:\opensource\github\moving-music\js\audio-renderer.js
                    // why do we need this?
                    window.forest_impulse_response = new HTML.Audio.FromAssets.forest_impulse_response { }.src;
                    window.VideoRenderer_particle = new HTML.Images.FromAssets.particle { }.src;

                    // X:\opensource\github\moving-music\js\video-renderer.js
                    //var cubemapurls = new[] {
                    //    // 'posx.jpeg', 'negx.jpeg', 'posy.jpeg', 'negy.jpeg', 'posz.jpeg', 'negz.jpeg']

                    //    new HTML.Images.FromAssets.px { }.src,
                    //    new HTML.Images.FromAssets.nx { }.src,
                    //    new HTML.Images.FromAssets.py { }.src,
                    //    new HTML.Images.FromAssets.ny { }.src,
                    //    new HTML.Images.FromAssets.pz { }.src,
                    //    new HTML.Images.FromAssets.nz { }.src,
                    //};

                    //window.cubemapurls = cubemapurls;

                    new HTML.Pages.References { }.AttachToDocument();

                    while (window.main == null)
                    {
                        await Task.Delay(1500);
                        Console.WriteLine("awaiting for main()... " + new { w.ElapsedMilliseconds });
                    }


                    Console.WriteLine("awaiting for main()... " + new { window.main });



                    // escape statemachine stack
                    Native.window.requestAnimationFrame += delegate
                    {
                        //Error	1	Cannot convert anonymous method to type 'dynamic' because it is not a delegate type	Z:\jsc.svn\examples\javascript\audio\synergy\MovingMusicByBorismus\Application.cs	93	68	MovingMusicByBorismus



                        //                        Choreographer.prototype.getAudioFile = function(set, basename) {
                        //  var extension = Util.isMp3Supported() ? 'mp3' : 'ogg';
                        //  return 'snd/' + set + '/' + basename + '.' + extension;
                        //};



                        // Largely from http://learningthreejs.com/blog/2011/08/15/lets-do-a-sky/

                        window.VideoRenderer.prototype.addSkybox = IFunction.OfDelegate(
                           new Func<object>(
                               () =>
                               {
                                   // black screen??
                                   //var far = 0x9999;

                                   var far = 0x999;





                                   var sphere = new THREE.Mesh(
                                       new THREE.SphereGeometry(far, 20, 20),
                                       new THREE.MeshBasicMaterial(
                                           new
                                           {
                                               //20150608_165300.jpg
                                               //map = THREE.ImageUtils.loadTexture(new HTML.Images.FromAssets.stolanuten().src)
                                               map = THREE.ImageUtils.loadTexture(new HTML.Images.FromAssets._20150608_165300().src)
                                           }
                                       )
                                   );
                                   sphere.scale.x = -1;

                                   // wont help us.
                                   //sphere.material.opacity = 0.5;


                                   Console.WriteLine("addSkybox... " + new { window.video.scene });

                                   sphere.AttachTo((THREE.Scene)window.video.scene);

                                   return null;
                               }
                           )
                       );




                        //window.Choreographer.prototype.initVocal = (Action)
                        //window.Choreographer.prototype.initVocal = IFunction.OfDelegate(
                        //window.Choreographer.prototype.initVocal = IFunction.Of(
                        window.Choreographer.prototype.getAudioFile = IFunction.OfDelegate(
                            new Func<object, string, string>(
                                (object set, string basename) =>
                                {
                                    Console.WriteLine("window.Choreographer.prototype.getAudioFile " + new { basename });

                                    //var russian = new MovingTrack({
                                    //    src: this.getAudioFile(set, 'Russian'),
                                    //    color: 0x19414B,
                                    //  });

                                    // this.manager.addTrack(cats);


                                    //                                    view-source:54442 29925ms window.Choreographer.prototype.getAudioFile { basename = Cats }
                                    //2015-11-16 12:25:32.573 view-source:54442 29927ms window.Choreographer.prototype.getAudioFile { basename = Nimoy }
                                    //2015-11-16 12:25:32.575 view-source:54442 29929ms window.Choreographer.prototype.getAudioFile { basename = Roth }
                                    //2015-11-16 12:25:32.576 view-source:54442 29930ms window.Choreographer.prototype.getAudioFile { basename = Russian }

                                    if (basename == "Cats") return new HTML.Audio.FromAssets.Sweet_Dreams_My_Love_by_Alexander_J_Turner { }.src;
                                    //if (basename == "Cats") return new HTML.Audio.FromAssets.loop_GallinagoDelicata { }.src;
                                    //if (basename == "Nimoy") return new HTML.Audio.FromAssets.sand_run { }.src;
                                    ////if (basename == "Roth") return new HTML.Audio.FromAssets.snd_jeepengine_start { }.src;
                                    //if (basename == "Roth") return new HTML.Audio.FromAssets.heartbeat3 { }.src;

                                    if (basename == "Nimoy") return null;
                                    if (basename == "Roth") return null;

                                    // the green blob.
                                    //return new HTML.Audio.FromAssets.Russian { }.src;
                                    return new HTML.Audio.FromAssets.crickets { }.src;

                                    // yellow is str track man
                                }
                            )
                        );

                        // X:\opensource\github\moving-music\js\audio-renderer.js


                        window.main();
                    };


                    // cant reset the system. need to rewrite it .
                    #region ondropfile
                    var f = await Native.document.documentElement.async.ondropfile;
                    IHTMLAudio a = f;
                    await a.async.onloadeddata;

                    // Uncaught TypeError: track.update is not a function
                    window.isLoaded = false;
                    Console.WriteLine(new { f.name, f.size, window.manager.trackCount, window.isLoaded });

                    // :4822/view-source:54442 39610ms { name = Sweet Dreams My Love by Alexander_J_Turner.mp3, size = 29790804, trackCount = 4 }

                    //for (int i = 0; i < window.manager.trackCount; i++)


                    foreach (var id in Expando.InternalGetMemberNames((object)window.manager.tracks))
                    {
                        Console.WriteLine(new { id });

                        //window.manager.tracs[item].setAmplitude(0);

                        // https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/createPanner
                        PannerNode panner = window.audio.panners[id];

                        // Failed to set the 'buffer' property on 'AudioBufferSourceNode': The provided value is not of type 'AudioBuffer'.
                        panner.disconnect();

                        THREE.Object3D t = window.video.trackObjects[id];

                        t.parent.remove(t);
                    }




                    window.manager.trackCount = 0;
                    //window.manager.tracks = new { };
                    window.manager.tracks = new object();

                    // vsync
                    await Native.window.async.onframe;

                    var MovingTrack = new IFunction("url", "return  new MovingTrack({ src: url, color: 0x19414B});");




                    dynamic track = MovingTrack.apply(null, a.src);


                    Console.WriteLine("addTrack");
                    window.manager.addTrack(track);



                    Console.WriteLine("loadTrack_");
                    window.audio.loadTrack_(track.id);


                    Func<bool> isready = () => window.audio.ready[track.id];

                    var ready = new TaskCompletionSource<object> { };

                    new { }.With(
                        async delegate
                        {
                            while (!ready.Task.IsCompleted)
                            {
                                await Task.Delay(300);

                                bool xready = isready();

                                Console.WriteLine(new { track.id, xready });

                                if (xready)
                                    ready.SetResult(null);
                            }
                        }
                    );

                    await ready.Task;


                    Console.WriteLine("start");
                    window.audio.start();

                    window.isLoaded = true;

                    //view-source:54481 28681ms { id = b674a511-0580-7000-24ba-02bca7b09197, xready = true }
                    //2015-11-16 17:59:14.044 view-source:54481 28686ms start
                    //2015-11-16 17:59:14.051 view-source:54481 28693ms addPointCloud

                    {
                        Console.WriteLine("addPointCloud " + new { window.choreographer, window.isLoaded });

                        var t = window.video.addPointCloud(new { color = track.color });
                        window.video.trackObjects[track.id] = t;
                    }

                    Console.WriteLine("setMode " + new { window.choreographer.mode_ });
                    window.choreographer.setMode(window.choreographer.mode_);

                    #endregion




                }
            );

            // 
            // window.addEventListener('DOMContentLoaded', main);

        }
Esempio n. 9
0
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // http://threejs.org/examples/#webgl_postprocessing_godrays
            // view-source:file:///X:/opensource/github/three.js/examples/webgl_postprocessing_godrays.html

            Native.body.style.margin   = "0px";
            Native.body.style.overflow = IStyle.OverflowEnum.hidden;
            Native.body.Clear();


            var sunPosition         = new THREE.Vector3(0, 1000, -1000);
            var screenSpacePosition = new THREE.Vector3();

            var mouseX = 0;
            var mouseY = 0;

            var windowHalfX = Native.window.Width / 2;
            var windowHalfY = Native.window.Height / 2;

            //var postprocessing = { enabled : true };

            var orbitRadius = 200;

            var bgColor  = 0x000511;
            var sunColor = 0xffee00;


            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151112
            var camera = new THREE.PerspectiveCamera(70, Native.window.aspect, 1, 3000);

            camera.position.z = 200;

            var scene = new THREE.Scene();

            //

            var materialDepth = new THREE.MeshDepthMaterial(new { });


            #region tree
            // X:\jsc.svn\examples\javascript\WebGL\WebGLGodRay\WebGLGodRay\Application.cs

            var materialScene = new THREE.MeshBasicMaterial(new { color = 0x000000, shading = THREE.FlatShading });
            var loader        = new THREE.JSONLoader();

            // http://stackoverflow.com/questions/16539736/do-not-use-system-runtime-compilerservices-dynamicattribute-use-the-dynamic
            // https://msdn.microsoft.com/en-us/library/system.runtime.compilerservices.dynamicattribute%28v=vs.110%29.aspx
            //System.Runtime.CompilerServices.DynamicAttribute

            loader.load(

                new Models.tree().Content.src,

                new Action <THREE.Geometry>(
                    xgeometry =>
            {
                var treeMesh = new THREE.Mesh(xgeometry, materialScene);
                treeMesh.position.set(0, -150, -150);

                var tsc = 400;
                treeMesh.scale.set(tsc, tsc, tsc);

                treeMesh.matrixAutoUpdate = false;
                treeMesh.updateMatrix();


                treeMesh.AttachTo(scene);
            }
                    )
                );
            #endregion

            // sphere

            var geo = new THREE.SphereGeometry(1, 20, 10);

            var sphereMesh = new THREE.Mesh(geo, materialScene);

            var sc = 20;
            sphereMesh.scale.set(sc, sc, sc);

            scene.add(sphereMesh);

            var renderer = new THREE.WebGLRenderer(new { antialias = false });

            renderer.setClearColor(bgColor);
            //renderer.setPixelRatio(window.devicePixelRatio);
            renderer.setSize(Native.window.Width, Native.window.Height);
            renderer.domElement.AttachToDocument();


            renderer.autoClear   = false;
            renderer.sortObjects = false;


            var postprocessing_scene = new THREE.Scene();

            var postprocessing_camera = new THREE.OrthographicCamera(Native.window.Width / -2, Native.window.Width / 2, Native.window.Height / 2, Native.window.Height / -2, -10000, 10000);
            postprocessing_camera.position.z = 100;

            postprocessing_scene.add(postprocessing_camera);

            var pars = new { minFilter = THREE.LinearFilter, magFilter = THREE.LinearFilter, format = THREE.RGBFormat };

            var postprocessing_rtTextureColors = new THREE.WebGLRenderTarget(Native.window.Width, Native.window.Height, pars);

            // Switching the depth formats to luminance from rgb doesn't seem to work. I didn't
            // investigate further for now.
            // pars.format = THREE.LuminanceFormat;

            // I would have this quarter size and use it as one of the ping-pong render
            // targets but the aliasing causes some temporal flickering

            var postprocessing_rtTextureDepth = new THREE.WebGLRenderTarget(Native.window.Width, Native.window.Height, pars);

            // Aggressive downsize god-ray ping-pong render targets to minimize cost

            var w = Native.window.Width / 4;
            var h = Native.window.Height / 4;
            var postprocessing_rtTextureGodRays1 = new THREE.WebGLRenderTarget(w, h, pars);
            var postprocessing_rtTextureGodRays2 = new THREE.WebGLRenderTarget(w, h, pars);

            // god-ray shaders

            // X:\jsc.svn\market\synergy\THREE\THREE\opensource\gihtub\three.js\build\THREE.ShaderGodRays.idl
            // these are special <script src="js/ShaderGodRays.js"></script>
            var godraysGenShader = THREE.ShaderGodRays["godrays_generate"] as dynamic;
            var postprocessing_godrayGenUniforms       = THREE.UniformsUtils.clone(godraysGenShader.uniforms);
            var postprocessing_materialGodraysGenerate = new THREE.ShaderMaterial(new
            {
                uniforms       = postprocessing_godrayGenUniforms,
                vertexShader   = godraysGenShader.vertexShader,
                fragmentShader = godraysGenShader.fragmentShader
            });

            var godraysCombineShader = THREE.ShaderGodRays["godrays_combine"] as dynamic;
            var postprocessing_godrayCombineUniforms  = THREE.UniformsUtils.clone(godraysCombineShader.uniforms);
            var postprocessing_materialGodraysCombine = new THREE.ShaderMaterial(new
            {
                uniforms       = postprocessing_godrayCombineUniforms,
                vertexShader   = godraysCombineShader.vertexShader,
                fragmentShader = godraysCombineShader.fragmentShader
            });

            var godraysFakeSunShader = THREE.ShaderGodRays["godrays_fake_sun"] as dynamic;
            var postprocessing_godraysFakeSunUniforms = THREE.UniformsUtils.clone(godraysFakeSunShader.uniforms);
            var postprocessing_materialGodraysFakeSun = new THREE.ShaderMaterial(new
            {
                uniforms       = postprocessing_godraysFakeSunUniforms,
                vertexShader   = godraysFakeSunShader.vertexShader,
                fragmentShader = godraysFakeSunShader.fragmentShader
            });

            postprocessing_godraysFakeSunUniforms.bgColor.value.setHex(bgColor);
            postprocessing_godraysFakeSunUniforms.sunColor.value.setHex(sunColor);

            postprocessing_godrayCombineUniforms.fGodRayIntensity.value = 0.75;

            var postprocessing_quad = new THREE.Mesh(
                new THREE.PlaneBufferGeometry(Native.window.Width, Native.window.Height),
                postprocessing_materialGodraysGenerate
                );
            postprocessing_quad.position.z = -9900;
            postprocessing_scene.add(postprocessing_quad);


            #region create field

            // THREE.PlaneGeometry: Consider using THREE.PlaneBufferGeometry for lower memory footprint.
            var planeGeometry = new THREE.PlaneGeometry(1000, 1000);
            //var planeMaterial = new THREE.MeshLambertMaterial(
            //    new
            //    {
            //        //map = THREE.ImageUtils.loadTexture(new HTML.Images.FromAssets.dirt_tx().src),
            //        color = 0xA26D41
            //        //color = 0xff0000
            //    }
            //);

            //planeMaterial.map.repeat.x = 300;
            //planeMaterial.map.repeat.y = 300;
            //planeMaterial.map.wrapS = THREE.RepeatWrapping;
            //planeMaterial.map.wrapT = THREE.RepeatWrapping;
            var plane = new THREE.Mesh(planeGeometry,
                                       new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })

                                       );
            plane.castShadow    = false;
            plane.receiveShadow = true;


            {
                var parent = new THREE.Object3D();
                parent.add(plane);

                parent.position.y = -.5f * 100;

                parent.rotation.x = -Math.PI / 2;
                parent.scale.set(100, 100, 100);

                //scene.add(parent);
            }

            var random    = new Random();
            var meshArray = new List <THREE.Mesh>();
            var geometry  = new THREE.CubeGeometry(1, 1, 1);

            for (var i = 1; i < 100; i++)
            {
                //THREE.MeshPhongMaterial
                var ii = new THREE.Mesh(geometry,


                                        new THREE.MeshPhongMaterial(new { ambient = 0x000000, color = 0xA06040, specular = 0xA26D41, shininess = 1 })

                                        //new THREE.MeshLambertMaterial(
                                        //new
                                        //{
                                        //    color = (Convert.ToInt32(0xffffff * random.NextDouble())),
                                        //    specular = 0xffaaaa,
                                        //    ambient= 0x050505,
                                        //})

                                        );
                ii.position.x = i % 2 * 500 - 2.5f;

                // raise it up
                ii.position.y    = .5f * 100;
                ii.position.z    = -1 * i * 400;
                ii.castShadow    = true;
                ii.receiveShadow = true;
                //ii.scale.set(100, 100, 100 * i);
                ii.scale.set(100, 100 * i, 100);


                meshArray.Add(ii);

                scene.add(ii);
            }
            #endregion



            #region Comanche
            new Comanche().Source.Task.ContinueWithResult(
                Comanche =>
            {
                Comanche.position.y = 200;

                //dae.position.z = 280;

                Comanche.AttachTo(scene);

                //scene.add(dae);
                //oo.Add(Comanche);

                // wont do it
                //dae.castShadow = true;

                // http://stackoverflow.com/questions/15492857/any-way-to-get-a-bounding-box-from-a-three-js-object3d
                //var helper = new THREE.BoundingBoxHelper(dae, 0xff0000);
                //helper.update();
                //// If you want a visible bounding box
                //scene.add(helper);

                Comanche.children[0].children[0].children.WithEach(x => x.castShadow = true);


                // the rotors?
                Comanche.children[0].children[0].children.Last().children.WithEach(x => x.castShadow = true);


                Comanche.scale.set(0.5, 0.5, 0.5);
                //helper.scale.set(0.5, 0.5, 0.5);

                var s2w = Stopwatch.StartNew();

                Native.window.onframe += delegate
                {
                    //dae.children[0].children[0].children.Last().al
                    //dae.children[0].children[0].children.Last().rotation.z = sw.ElapsedMilliseconds * 0.01;
                    //dae.children[0].children[0].children.Last().rotation.x = sw.ElapsedMilliseconds * 0.01;
                    //rotation.y = sw.ElapsedMilliseconds * 0.01;

                    Comanche.children[0].children[0].children.Last().rotation.y = s2w.ElapsedMilliseconds * 0.001;

                    //dae.children[0].children[0].children.Last().app
                };
            }
                );
            #endregion



            var sw = Stopwatch.StartNew();

            var controls = new THREE.OrbitControls(camera, renderer.domElement);

            // Show Details	Severity	Code	Description	Project	File	Line
            //Error CS0229  Ambiguity between 'THREE.Math' and 'Math'   WebGLGodRay Application.cs  238

            Native.window.onframe +=
                delegate
            {
                //var time = IDate.now() / 4000;
                var time = sw.ElapsedMilliseconds / 4000.0;

                sphereMesh.position.x = orbitRadius * Math.Cos(time);
                sphereMesh.position.z = orbitRadius * Math.Sin(time) - 100;



                //controls.center.copy(blendMesh.position);
                //controls.center.y += radius * 2.0;
                controls.update();

                //var camOffset = camera.position.clone().sub(controls.center);
                //camOffset.normalize().multiplyScalar(750);
                camera.position = controls.center.clone();


                //camera.position.x += (mouseX - camera.position.x) * 0.036;
                //camera.position.y += (-(mouseY) - camera.position.y) * 0.036;

                //camera.lookAt(scene.position);


                // Find the screenspace position of the sun

                screenSpacePosition.copy(sunPosition).project(camera);

                screenSpacePosition.x = (screenSpacePosition.x + 1) / 2;
                screenSpacePosition.y = (screenSpacePosition.y + 1) / 2;

                // Give it to the god-ray and sun shaders

                postprocessing_godrayGenUniforms["vSunPositionScreenSpace"].value.x = screenSpacePosition.x;
                postprocessing_godrayGenUniforms["vSunPositionScreenSpace"].value.y = screenSpacePosition.y;

                postprocessing_godraysFakeSunUniforms["vSunPositionScreenSpace"].value.x = screenSpacePosition.x;
                postprocessing_godraysFakeSunUniforms["vSunPositionScreenSpace"].value.y = screenSpacePosition.y;

                // -- Draw sky and sun --

                // Clear colors and depths, will clear to sky color

                renderer.clearTarget(postprocessing_rtTextureColors, true, true, false);

                // Sun render. Runs a shader that gives a brightness based on the screen
                // space distance to the sun. Not very efficient, so i make a scissor
                // rectangle around the suns position to avoid rendering surrounding pixels.

                var sunsqH = 0.74 * Native.window.Height;     // 0.74 depends on extent of sun from shader
                var sunsqW = 0.74 * Native.window.Height;     // both depend on height because sun is aspect-corrected

                screenSpacePosition.x *= Native.window.Width;
                screenSpacePosition.y *= Native.window.Height;

                renderer.setScissor(screenSpacePosition.x - sunsqW / 2, screenSpacePosition.y - sunsqH / 2, sunsqW, sunsqH);
                renderer.enableScissorTest(true);

                postprocessing_godraysFakeSunUniforms["fAspect"].value = Native.window.aspect;

                postprocessing_scene.overrideMaterial = postprocessing_materialGodraysFakeSun;
                renderer.render(postprocessing_scene, postprocessing_camera, postprocessing_rtTextureColors);

                renderer.enableScissorTest(false);

                // -- Draw scene objects --

                // Colors

                scene.overrideMaterial = null;
                renderer.render(scene, camera, postprocessing_rtTextureColors);

                // Depth

                scene.overrideMaterial = materialDepth;
                renderer.render(scene, camera, postprocessing_rtTextureDepth, true);

                // -- Render god-rays --

                // Maximum length of god-rays (in texture space [0,1]X[0,1])

                var filterLen = 1.0;

                // Samples taken by filter

                var TAPS_PER_PASS = 6.0;

                // Pass order could equivalently be 3,2,1 (instead of 1,2,3), which
                // would start with a small filter support and grow to large. however
                // the large-to-small order produces less objectionable aliasing artifacts that
                // appear as a glimmer along the length of the beams

                // pass 1 - render into first ping-pong target

                var pass    = 1.0;
                var stepLen = filterLen * Math.Pow(TAPS_PER_PASS, -pass);

                postprocessing_godrayGenUniforms["fStepSize"].value = stepLen;
                postprocessing_godrayGenUniforms["tInput"].value    = postprocessing_rtTextureDepth;

                postprocessing_scene.overrideMaterial = postprocessing_materialGodraysGenerate;

                renderer.render(postprocessing_scene, postprocessing_camera, postprocessing_rtTextureGodRays2);

                // pass 2 - render into second ping-pong target

                pass    = 2.0;
                stepLen = filterLen * Math.Pow(TAPS_PER_PASS, -pass);

                postprocessing_godrayGenUniforms["fStepSize"].value = stepLen;
                postprocessing_godrayGenUniforms["tInput"].value    = postprocessing_rtTextureGodRays2;

                renderer.render(postprocessing_scene, postprocessing_camera, postprocessing_rtTextureGodRays1);

                // pass 3 - 1st RT

                pass    = 3.0;
                stepLen = filterLen * Math.Pow(TAPS_PER_PASS, -pass);

                postprocessing_godrayGenUniforms["fStepSize"].value = stepLen;
                postprocessing_godrayGenUniforms["tInput"].value    = postprocessing_rtTextureGodRays1;

                renderer.render(postprocessing_scene, postprocessing_camera, postprocessing_rtTextureGodRays2);

                // final pass - composite god-rays onto colors

                postprocessing_godrayCombineUniforms["tColors"].value  = postprocessing_rtTextureColors;
                postprocessing_godrayCombineUniforms["tGodRays"].value = postprocessing_rtTextureGodRays2;

                postprocessing_scene.overrideMaterial = postprocessing_materialGodraysCombine;

                renderer.render(postprocessing_scene, postprocessing_camera);
                postprocessing_scene.overrideMaterial = null;
            };



            new { }.With(
                async delegate
            {
                //while (true)
                do
                {
                    camera.aspect = Native.window.aspect;
                    camera.updateProjectionMatrix();
                    renderer.setSize(Native.window.Width, Native.window.Height);

                    // convert to bool?
                } while (await Native.window.async.onresize);
                //} while (await Native.window.async.onresize != null);
            }
                );

            //var ze = new ZeProperties();

            //ze.Show();

            //ze.treeView1.Nodes.Clear();

            //ze.Add(() => renderer);
            //ze.Add(() => controls);
            //ze.Add(() => scene);
            //ze.Left = 0;
        }
Esempio n. 10
0
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            Native.body.style.overflow = IStyle.OverflowEnum.hidden;

            // https://3dwarehouse.sketchup.com/model.html?id=e78dca4863e8572d86ea4fa6bd93bc43
            // https://3dwarehouse.sketchup.com/model.html?id=38d1045b8de1cf12b08e958a32ef3184

            var oo = new List<THREE.Object3D>();

            #region scene
            var window = Native.window;



            // scene

            var scene = new THREE.Scene();

            //var ambient = new THREE.AmbientLight(0x101030);
            //// addTrace?
            //scene.add(ambient);

            // should jsc package c# source code along here for code lense like peeking?
            new THREE.AmbientLight(0x101030).AttachTo(scene);

            var lightOffset = new THREE.Vector3(0, 1000, 1000.0);

            // why is idl showing 110?
            var light = new THREE.DirectionalLight(0xffffff, 1.0);
            //var light = new THREE.DirectionalLight(0xffffff, 1.0);
            //var light = new THREE.DirectionalLight(0xffffff, 2.5);
            //var light = new THREE.DirectionalLight(0xffffff, 1.5);
            light.position.copy(lightOffset);

            light.castShadow = true;

            var xlight = light as dynamic;
            xlight.shadowMapWidth = 4096;
            xlight.shadowMapHeight = 2048;

            xlight.shadowDarkness = 0.3;
            //xlight.shadowDarkness = 0.5;

            xlight.shadowCameraNear = 10;
            xlight.shadowCameraFar = 10000;
            xlight.shadowBias = 0.00001;
            xlight.shadowCameraRight = 4000;
            xlight.shadowCameraLeft = -4000;
            xlight.shadowCameraTop = 4000;
            xlight.shadowCameraBottom = -4000;

            xlight.shadowCameraVisible = true;

            light.AttachTo(scene);





            {

                var planeGeometry = new THREE.CubeGeometry(512, 512, 1);

                var plane = new THREE.Mesh(
                    planeGeometry,
                    material: new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                );

                plane.receiveShadow = true;


                var ZeFloor = new THREE.Object3D();
                plane.AttachTo(ZeFloor);

                ZeFloor.rotation.x = -Math.PI / 2;
                ZeFloor.scale.set(10, 10, 10);

                ZeFloor.AttachTo(scene);
            }




            var renderer = new THREE.WebGLRenderer();
            renderer.setSize(window.Width, window.Height);

            renderer.domElement.AttachToDocument();
            renderer.domElement.style.SetLocation(0, 0);

            renderer.shadowMapEnabled = true;
            renderer.shadowMapType = THREE.PCFSoftShadowMap;

            //var mouseX = 0;
            //var mouseY = 0;
            //var st = new Stopwatch();
            //st.Start();


            //Native.window.document.onmousemove +=
            //    e =>
            //    {
            //        mouseX = e.CursorX - Native.window.Width / 2;
            //        mouseY = e.CursorY - Native.window.Height / 2;
            //    };

            var camera = new THREE.PerspectiveCamera(
                //40,
                20,
                //10,

                Native.window.aspect, 2,

                // how far out do we want to zoom?
                200000
                //9000
                );
            camera.position.set(-1200, 800, -3200);

            camera.AttachTo(scene);

            var controls = new THREE.OrbitControls(camera, renderer.domElement);

            Native.window.onframe +=
                delegate
                {

                    //oo.WithEach(
                    //    x =>
                    //        x.rotation.y = (st.ElapsedMilliseconds + mouseX * 100) * 0.00001
                    //);


                    //camera.position.x += (mouseX - camera.position.x) * .05;
                    //camera.position.y += (-mouseY - camera.position.y) * .05;

                    //camera.lookAt(scene.position);

                    controls.update();
                    camera.position = controls.center.clone();

                    renderer.render(scene, camera);
                };

            Native.window.onresize +=
                delegate
                {
                    camera.aspect = window.aspect;
                    camera.updateProjectionMatrix();

                    renderer.setSize(window.Width, window.Height);

                };
            #endregion

            #region Comanche
            new Comanche().Source.Task.ContinueWithResult(
                Comanche =>
                {

                    Comanche.position.y = 200;

                    //dae.position.z = 280;

                    Comanche.AttachTo(scene);

                    //scene.add(dae);
                    oo.Add(Comanche);

                    // wont do it
                    //dae.castShadow = true;

                    // http://stackoverflow.com/questions/15492857/any-way-to-get-a-bounding-box-from-a-three-js-object3d
                    //var helper = new THREE.BoundingBoxHelper(dae, 0xff0000);
                    //helper.update();
                    //// If you want a visible bounding box
                    //scene.add(helper);

                    Comanche.children[0].children[0].children.WithEach(x => x.castShadow = true);


                    // the rotors?
                    Comanche.children[0].children[0].children.Last().children.WithEach(x => x.castShadow = true);


                    Comanche.scale.set(0.5, 0.5, 0.5);
                    //helper.scale.set(0.5, 0.5, 0.5);

                    var sw = Stopwatch.StartNew();

                    Native.window.onframe += delegate
                    {
                        //dae.children[0].children[0].children.Last().al
                        //dae.children[0].children[0].children.Last().rotation.z = sw.ElapsedMilliseconds * 0.01;
                        //dae.children[0].children[0].children.Last().rotation.x = sw.ElapsedMilliseconds * 0.01;
                        //rotation.y = sw.ElapsedMilliseconds * 0.01;

                        Comanche.children[0].children[0].children.Last().rotation.y = sw.ElapsedMilliseconds * 0.001;

                        //dae.children[0].children[0].children.Last().app
                    };
                }
            );
            #endregion


            //#region ZeProperties
            //var ze = new ZeProperties();

            //ze.Show();
            //ze.treeView1.Nodes.Clear();

            //ze.Add(() => renderer);
            //ze.Add(() => controls);
            //ze.Add(() => scene);
            //ze.Left = 0;
            //#endregion

            //f.treeView1.Nodes.Add("controls : " + typeof(THREE.OrbitControls)).Tag = controls;


        }
Esempio n. 11
0
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            // http://threejs.org/examples/#webgl_postprocessing_godrays
            // view-source:file:///X:/opensource/github/three.js/examples/webgl_postprocessing_godrays.html

            Native.body.style.margin = "0px";
            Native.body.style.overflow = IStyle.OverflowEnum.hidden;
            Native.body.Clear();


            var sunPosition = new THREE.Vector3(0, 1000, -1000);
            var screenSpacePosition = new THREE.Vector3();

            var mouseX = 0;
            var mouseY = 0;

            var windowHalfX = Native.window.Width / 2;
            var windowHalfY = Native.window.Height / 2;

            //var postprocessing = { enabled : true };

            var orbitRadius = 200;

            var bgColor = 0x000511;
            var sunColor = 0xffee00;


            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151112
            var camera = new THREE.PerspectiveCamera(70, Native.window.aspect, 1, 3000);
            camera.position.z = 200;

            var scene = new THREE.Scene();

            //

            var materialDepth = new THREE.MeshDepthMaterial(new { });


            #region tree
            // X:\jsc.svn\examples\javascript\WebGL\WebGLGodRay\WebGLGodRay\Application.cs

            var materialScene = new THREE.MeshBasicMaterial(new { color = 0x000000, shading = THREE.FlatShading });
            var loader = new THREE.JSONLoader();

            // http://stackoverflow.com/questions/16539736/do-not-use-system-runtime-compilerservices-dynamicattribute-use-the-dynamic
            // https://msdn.microsoft.com/en-us/library/system.runtime.compilerservices.dynamicattribute%28v=vs.110%29.aspx
            //System.Runtime.CompilerServices.DynamicAttribute

            loader.load(

                new Models.tree().Content.src,

                new Action<THREE.Geometry>(
                xgeometry =>
                {

                    var treeMesh = new THREE.Mesh(xgeometry, materialScene);
                    treeMesh.position.set(0, -150, -150);

                    var tsc = 400;
                    treeMesh.scale.set(tsc, tsc, tsc);

                    treeMesh.matrixAutoUpdate = false;
                    treeMesh.updateMatrix();


                    treeMesh.AttachTo(scene);

                }
                )
                );
            #endregion

            // sphere

            var geo = new THREE.SphereGeometry(1, 20, 10);

            var sphereMesh = new THREE.Mesh(geo, materialScene);

            var sc = 20;
            sphereMesh.scale.set(sc, sc, sc);

            scene.add(sphereMesh);

            var renderer = new THREE.WebGLRenderer(new { antialias = false });

            renderer.setClearColor(bgColor);
            //renderer.setPixelRatio(window.devicePixelRatio);
            renderer.setSize(Native.window.Width, Native.window.Height);
            renderer.domElement.AttachToDocument();


            renderer.autoClear = false;
            renderer.sortObjects = false;


            var postprocessing_scene = new THREE.Scene();

            var postprocessing_camera = new THREE.OrthographicCamera(Native.window.Width / -2, Native.window.Width / 2, Native.window.Height / 2, Native.window.Height / -2, -10000, 10000);
            postprocessing_camera.position.z = 100;

            postprocessing_scene.add(postprocessing_camera);

            var pars = new { minFilter = THREE.LinearFilter, magFilter = THREE.LinearFilter, format = THREE.RGBFormat };

            var postprocessing_rtTextureColors = new THREE.WebGLRenderTarget(Native.window.Width, Native.window.Height, pars);

            // Switching the depth formats to luminance from rgb doesn't seem to work. I didn't
            // investigate further for now.
            // pars.format = THREE.LuminanceFormat;

            // I would have this quarter size and use it as one of the ping-pong render
            // targets but the aliasing causes some temporal flickering

            var postprocessing_rtTextureDepth = new THREE.WebGLRenderTarget(Native.window.Width, Native.window.Height, pars);

            // Aggressive downsize god-ray ping-pong render targets to minimize cost

            var w = Native.window.Width / 4;
            var h = Native.window.Height / 4;
            var postprocessing_rtTextureGodRays1 = new THREE.WebGLRenderTarget(w, h, pars);
            var postprocessing_rtTextureGodRays2 = new THREE.WebGLRenderTarget(w, h, pars);

            // god-ray shaders

            // X:\jsc.svn\market\synergy\THREE\THREE\opensource\gihtub\three.js\build\THREE.ShaderGodRays.idl
            // these are special <script src="js/ShaderGodRays.js"></script>
            var godraysGenShader = THREE.ShaderGodRays["godrays_generate"] as dynamic;
            var postprocessing_godrayGenUniforms = THREE.UniformsUtils.clone(godraysGenShader.uniforms);
            var postprocessing_materialGodraysGenerate = new THREE.ShaderMaterial(new
            {

                uniforms = postprocessing_godrayGenUniforms,
                vertexShader = godraysGenShader.vertexShader,
                fragmentShader = godraysGenShader.fragmentShader

            });

            var godraysCombineShader = THREE.ShaderGodRays["godrays_combine"] as dynamic;
            var postprocessing_godrayCombineUniforms = THREE.UniformsUtils.clone(godraysCombineShader.uniforms);
            var postprocessing_materialGodraysCombine = new THREE.ShaderMaterial(new
            {

                uniforms = postprocessing_godrayCombineUniforms,
                vertexShader = godraysCombineShader.vertexShader,
                fragmentShader = godraysCombineShader.fragmentShader

            });

            var godraysFakeSunShader = THREE.ShaderGodRays["godrays_fake_sun"] as dynamic;
            var postprocessing_godraysFakeSunUniforms = THREE.UniformsUtils.clone(godraysFakeSunShader.uniforms);
            var postprocessing_materialGodraysFakeSun = new THREE.ShaderMaterial(new
            {

                uniforms = postprocessing_godraysFakeSunUniforms,
                vertexShader = godraysFakeSunShader.vertexShader,
                fragmentShader = godraysFakeSunShader.fragmentShader

            });

            postprocessing_godraysFakeSunUniforms.bgColor.value.setHex(bgColor);
            postprocessing_godraysFakeSunUniforms.sunColor.value.setHex(sunColor);

            postprocessing_godrayCombineUniforms.fGodRayIntensity.value = 0.75;

            var postprocessing_quad = new THREE.Mesh(
                new THREE.PlaneBufferGeometry(Native.window.Width, Native.window.Height),
                postprocessing_materialGodraysGenerate
            );
            postprocessing_quad.position.z = -9900;
            postprocessing_scene.add(postprocessing_quad);


            #region create field

            // THREE.PlaneGeometry: Consider using THREE.PlaneBufferGeometry for lower memory footprint.
            var planeGeometry = new THREE.PlaneGeometry(1000, 1000);
            //var planeMaterial = new THREE.MeshLambertMaterial(
            //    new
            //    {
            //        //map = THREE.ImageUtils.loadTexture(new HTML.Images.FromAssets.dirt_tx().src),
            //        color = 0xA26D41
            //        //color = 0xff0000
            //    }
            //);

            //planeMaterial.map.repeat.x = 300;
            //planeMaterial.map.repeat.y = 300;
            //planeMaterial.map.wrapS = THREE.RepeatWrapping;
            //planeMaterial.map.wrapT = THREE.RepeatWrapping;
            var plane = new THREE.Mesh(planeGeometry,
                    new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })

                );
            plane.castShadow = false;
            plane.receiveShadow = true;


            {

                var parent = new THREE.Object3D();
                parent.add(plane);

                parent.position.y = -.5f * 100;

                parent.rotation.x = -Math.PI / 2;
                parent.scale.set(100, 100, 100);

                //scene.add(parent);
            }

            var random = new Random();
            var meshArray = new List<THREE.Mesh>();
            var geometry = new THREE.CubeGeometry(1, 1, 1);

            for (var i = 1; i < 100; i++)
            {

                //THREE.MeshPhongMaterial
                var ii = new THREE.Mesh(geometry,


                    new THREE.MeshPhongMaterial(new { ambient = 0x000000, color = 0xA06040, specular = 0xA26D41, shininess = 1 })

                    //new THREE.MeshLambertMaterial(
                    //new
                    //{
                    //    color = (Convert.ToInt32(0xffffff * random.NextDouble())),
                    //    specular = 0xffaaaa,
                    //    ambient= 0x050505, 
                    //})

                    );
                ii.position.x = i % 2 * 500 - 2.5f;

                // raise it up
                ii.position.y = .5f * 100;
                ii.position.z = -1 * i * 400;
                ii.castShadow = true;
                ii.receiveShadow = true;
                //ii.scale.set(100, 100, 100 * i);
                ii.scale.set(100, 100 * i, 100);


                meshArray.Add(ii);

                scene.add(ii);

            }
            #endregion




            #region Comanche
            new Comanche().Source.Task.ContinueWithResult(
                Comanche =>
                {

                    Comanche.position.y = 200;

                    //dae.position.z = 280;

                    Comanche.AttachTo(scene);

                    //scene.add(dae);
                    //oo.Add(Comanche);

                    // wont do it
                    //dae.castShadow = true;

                    // http://stackoverflow.com/questions/15492857/any-way-to-get-a-bounding-box-from-a-three-js-object3d
                    //var helper = new THREE.BoundingBoxHelper(dae, 0xff0000);
                    //helper.update();
                    //// If you want a visible bounding box
                    //scene.add(helper);

                    Comanche.children[0].children[0].children.WithEach(x => x.castShadow = true);


                    // the rotors?
                    Comanche.children[0].children[0].children.Last().children.WithEach(x => x.castShadow = true);


                    Comanche.scale.set(0.5, 0.5, 0.5);
                    //helper.scale.set(0.5, 0.5, 0.5);

                    var s2w = Stopwatch.StartNew();

                    Native.window.onframe += delegate
                    {
                        //dae.children[0].children[0].children.Last().al
                        //dae.children[0].children[0].children.Last().rotation.z = sw.ElapsedMilliseconds * 0.01;
                        //dae.children[0].children[0].children.Last().rotation.x = sw.ElapsedMilliseconds * 0.01;
                        //rotation.y = sw.ElapsedMilliseconds * 0.01;

                        Comanche.children[0].children[0].children.Last().rotation.y = s2w.ElapsedMilliseconds * 0.001;

                        //dae.children[0].children[0].children.Last().app
                    };
                }
            );
            #endregion




            var sw = Stopwatch.StartNew();

            var controls = new THREE.OrbitControls(camera, renderer.domElement);

            // Show Details	Severity	Code	Description	Project	File	Line
            //Error CS0229  Ambiguity between 'THREE.Math' and 'Math'   WebGLGodRay Application.cs  238

            Native.window.onframe +=
                delegate
                {
                    //var time = IDate.now() / 4000;
                    var time = sw.ElapsedMilliseconds / 4000.0;

                    sphereMesh.position.x = orbitRadius * Math.Cos(time);
                    sphereMesh.position.z = orbitRadius * Math.Sin(time) - 100;



                    //controls.center.copy(blendMesh.position);
                    //controls.center.y += radius * 2.0;
                    controls.update();

                    //var camOffset = camera.position.clone().sub(controls.center);
                    //camOffset.normalize().multiplyScalar(750);
                    camera.position = controls.center.clone();


                    //camera.position.x += (mouseX - camera.position.x) * 0.036;
                    //camera.position.y += (-(mouseY) - camera.position.y) * 0.036;

                    //camera.lookAt(scene.position);


                    // Find the screenspace position of the sun

                    screenSpacePosition.copy(sunPosition).project(camera);

                    screenSpacePosition.x = (screenSpacePosition.x + 1) / 2;
                    screenSpacePosition.y = (screenSpacePosition.y + 1) / 2;

                    // Give it to the god-ray and sun shaders

                    postprocessing_godrayGenUniforms["vSunPositionScreenSpace"].value.x = screenSpacePosition.x;
                    postprocessing_godrayGenUniforms["vSunPositionScreenSpace"].value.y = screenSpacePosition.y;

                    postprocessing_godraysFakeSunUniforms["vSunPositionScreenSpace"].value.x = screenSpacePosition.x;
                    postprocessing_godraysFakeSunUniforms["vSunPositionScreenSpace"].value.y = screenSpacePosition.y;

                    // -- Draw sky and sun --

                    // Clear colors and depths, will clear to sky color

                    renderer.clearTarget(postprocessing_rtTextureColors, true, true, false);

                    // Sun render. Runs a shader that gives a brightness based on the screen
                    // space distance to the sun. Not very efficient, so i make a scissor
                    // rectangle around the suns position to avoid rendering surrounding pixels.

                    var sunsqH = 0.74 * Native.window.Height; // 0.74 depends on extent of sun from shader
                    var sunsqW = 0.74 * Native.window.Height; // both depend on height because sun is aspect-corrected

                    screenSpacePosition.x *= Native.window.Width;
                    screenSpacePosition.y *= Native.window.Height;

                    renderer.setScissor(screenSpacePosition.x - sunsqW / 2, screenSpacePosition.y - sunsqH / 2, sunsqW, sunsqH);
                    renderer.enableScissorTest(true);

                    postprocessing_godraysFakeSunUniforms["fAspect"].value = Native.window.aspect;

                    postprocessing_scene.overrideMaterial = postprocessing_materialGodraysFakeSun;
                    renderer.render(postprocessing_scene, postprocessing_camera, postprocessing_rtTextureColors);

                    renderer.enableScissorTest(false);

                    // -- Draw scene objects --

                    // Colors

                    scene.overrideMaterial = null;
                    renderer.render(scene, camera, postprocessing_rtTextureColors);

                    // Depth

                    scene.overrideMaterial = materialDepth;
                    renderer.render(scene, camera, postprocessing_rtTextureDepth, true);

                    // -- Render god-rays --

                    // Maximum length of god-rays (in texture space [0,1]X[0,1])

                    var filterLen = 1.0;

                    // Samples taken by filter

                    var TAPS_PER_PASS = 6.0;

                    // Pass order could equivalently be 3,2,1 (instead of 1,2,3), which
                    // would start with a small filter support and grow to large. however
                    // the large-to-small order produces less objectionable aliasing artifacts that
                    // appear as a glimmer along the length of the beams

                    // pass 1 - render into first ping-pong target

                    var pass = 1.0;
                    var stepLen = filterLen * Math.Pow(TAPS_PER_PASS, -pass);

                    postprocessing_godrayGenUniforms["fStepSize"].value = stepLen;
                    postprocessing_godrayGenUniforms["tInput"].value = postprocessing_rtTextureDepth;

                    postprocessing_scene.overrideMaterial = postprocessing_materialGodraysGenerate;

                    renderer.render(postprocessing_scene, postprocessing_camera, postprocessing_rtTextureGodRays2);

                    // pass 2 - render into second ping-pong target

                    pass = 2.0;
                    stepLen = filterLen * Math.Pow(TAPS_PER_PASS, -pass);

                    postprocessing_godrayGenUniforms["fStepSize"].value = stepLen;
                    postprocessing_godrayGenUniforms["tInput"].value = postprocessing_rtTextureGodRays2;

                    renderer.render(postprocessing_scene, postprocessing_camera, postprocessing_rtTextureGodRays1);

                    // pass 3 - 1st RT

                    pass = 3.0;
                    stepLen = filterLen * Math.Pow(TAPS_PER_PASS, -pass);

                    postprocessing_godrayGenUniforms["fStepSize"].value = stepLen;
                    postprocessing_godrayGenUniforms["tInput"].value = postprocessing_rtTextureGodRays1;

                    renderer.render(postprocessing_scene, postprocessing_camera, postprocessing_rtTextureGodRays2);

                    // final pass - composite god-rays onto colors

                    postprocessing_godrayCombineUniforms["tColors"].value = postprocessing_rtTextureColors;
                    postprocessing_godrayCombineUniforms["tGodRays"].value = postprocessing_rtTextureGodRays2;

                    postprocessing_scene.overrideMaterial = postprocessing_materialGodraysCombine;

                    renderer.render(postprocessing_scene, postprocessing_camera);
                    postprocessing_scene.overrideMaterial = null;


                };



            new { }.With(
                async delegate
                {
                    //while (true)
                    do
                    {
                        camera.aspect = Native.window.aspect;
                        camera.updateProjectionMatrix();
                        renderer.setSize(Native.window.Width, Native.window.Height);

                        // convert to bool?
                    } while (await Native.window.async.onresize);
                    //} while (await Native.window.async.onresize != null);

                }
            );

            //var ze = new ZeProperties();

            //ze.Show();

            //ze.treeView1.Nodes.Clear();

            //ze.Add(() => renderer);
            //ze.Add(() => controls);
            //ze.Add(() => scene);
            //ze.Left = 0;


        }
Esempio n. 12
0
        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            //FormStyler.AtFormCreated =
            //s =>
            //{
            //    s.Context.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;

            //    //var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDrag().AttachTo(s.Context.GetHTMLTarget());
            //    var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDragWithShadow().AttachTo(s.Context.GetHTMLTarget());



            //    s.Context.GetHTMLTarget().style.backgroundColor = "#efefef";
            //    //s.Context.GetHTMLTarget().style.backgroundColor = "#A26D41";

            //};

#if AsWEBSERVER
            #region += Launched chrome.app.window
            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                // if we run as a server. we can open up on android.

                //chrome.Notification.DefaultTitle = "Nexus7";
                //chrome.Notification.DefaultIconUrl = new x128().src;
                ChromeTCPServer.TheServerWithStyledForm.Invoke(
                     AppSource.Text
                //, AtFormCreated: FormStyler.AtFormCreated

                //AtFormConstructor:
                //    f =>
                //    {
                //        //arg[0] is typeof System.Int32
                //        //script: error JSC1000: No implementation found for this native method, please implement [static System.Drawing.Color.FromArgb(System.Int32)]

                //        // X:\jsc.svn\examples\javascript\forms\Test\TestFromArgb\TestFromArgb\ApplicationControl.cs

                //        f.BackColor = System.Drawing.Color.FromArgb(0xA26D41);
                //    }
                );
                return;
            }
            #endregion
#else

            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    //chrome.runtime.UpdateAvailable += delegate
                    //{
                    //    new chrome.Notification(title: "UpdateAvailable");

                    //};

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "x360x83");

                        // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: new
                               {
                                   alwaysOnTop = true,
                                   visibleOnAllWorkspaces = true
                               }
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion


#endif


            // GpuProcessHostUIShim: The GPU process crashed!

            var poke = new WebGLRenderingContext();

            if (poke == null)
            {
                new IHTMLPre { "GpuProcessHostUIShim: The GPU process crashed! restart process."
                }.AttachToDocument();
                return;
            }

            // are we in a RemoteApp ? software renderer?
            // this may hang the buggy rdp protocol...




            // crash
            //int cubefacesizeMAX = 2048 * 2; // 6 faces, ?
            //int cubefacesizeMAX = 2048 * 2; // 6 faces, ?
            int cubefacesizeMAX = 2048 * 1; // 6 faces, ?
            int cubefacesize = cubefacesizeMAX; // 6 faces, ?
            //int cubefacesize = 1024; // 6 faces, ?
            // "X:\vr\tape1\0000x2048.png"
            // for 60hz render we may want to use float camera percision, not available for ui.
            //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x2048.png" "/sdcard/oculus/360photos/"
            //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x128.png" "/sdcard/oculus/360photos/"

            //if (Environment.ProcessorCount < 8)
            //    //cubefacesize = 64; // 6 faces, ?

            //    // fast gif?
            //cubefacesize = 1024; // 6 faces, ?

            // not 8k..
            //cubefacesize = 512; // 6 faces, ?

            // big cubeface may be draw only half of itself?


            // can we keep fast fps yet highp?

            // can we choose this on runtime? designtime wants fast fps, yet for end product we want highdef on our render farm?
            //const int cubefacesize = 128; // 6 faces, ?

            //var cubecameraoffsetx = 256;
            var cubecameraoffsetx = 400;


            //var uizoom = 0.1;
            //var uizoom = cubefacesize / 128f;
            var uizoom = 128f / cubefacesize;


            Native.css.style.backgroundColor = "blue";
            Native.css.style.overflow = IStyle.OverflowEnum.hidden;

            Native.body.Clear();
            (Native.body.style as dynamic).webkitUserSelect = "text";

            IHTMLCanvas shader1canvas = null;




            //return;

            // Earth params
            //var radius = 0.5;
            //var radius = 1024;
            //var radius = 2048;
            //var radius = 512;
            //var radius = 256;
            //var radius = 400;

            // can we have not fly beyond moon too much?
            //var radius = 500;
            //var radius = 480;
            //var radius = -480;

            //var segments = 32;
            //var segments = 128 * 2;
            //var rotation = 6;


            //const int size = 128;
            //const int size = 256; // 6 faces, 12KB
            //const int size = 512; // 6 faces, ?

            // WebGL: drawArrays: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled.

            //const int size = 720; // 6 faces, ?
            //const int size = 1024; // 6 faces, ?
            //const int cubefacesize = 1024; // 6 faces, ?

            // THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter is set to THREE.LinearFilter or THREE.NearestFilter. ( chrome-extension://aemlnmcokphbneegoefdckonejmknohh/assets/x360x83/anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs.jpg )


            // var far = 0xffffff;

            // need a zoom effect
            // 5pixels to 33%


            // radius needs to be a bit bigger so wa cant zoom thru it
            // far image at this distance 
            var skyboxradius0 = 0 + 2048 * 4;

            var near = cubefacesize * 0.33;


            var skyboxradius = skyboxradius0 * 1.2;

            var far = skyboxradius * 2;
            //var near = cubefacesize * 0.5;
            //var near = cubefacesize * 0.4;
            //var near = cubefacesize * 0.25;

            new IHTMLPre { new { Environment.ProcessorCount, cubefacesize } }.AttachToDocument();

            //new IHTMLPre { "can we stream it into VR, shadertoy, youtube 360, youtube stereo yet?" }.AttachToDocument();


            var sw = Stopwatch.StartNew();



            var oo = new List<THREE.Object3D>();



            // what about physics and that portal rendering?

            // if we are running as a chrome web server, we may also be opened as android ndk webview app
            //var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: window.aspect, near: 1, far: 2000);
            // once we update source
            // save the source
            // manually recompile 
            //cameraPX.position.z = 400;

            //// the camera should be close enough for the object to float off the FOV of PX
            //cameraPX.position.z = 200;

            // scene
            // can we make the 3D object orbit around us ?
            // and
            // stream it to vr?
            var scene = new THREE.Scene();



            // since our cube camera is somewhat a fixed thing
            // would it be easier to move mountains to come to us?
            // once we change code would chrome app be able to let VR know that a new view is available?
            var sceneg = new THREE.Group();
            sceneg.AttachTo(scene);

            // whatif we reuse the skybox as is and skip rendering it?
            // that means we cannot rotate via sky, we have to rotate other elements in reverse.
            // can we have a checkbox to hide or render the skybox?
            var scenezooms = new THREE.Group();
            scenezooms.AttachTo(scene);




            // fly up?
            //sceneg.translateZ(-1024);
            // rotate the world, as the skybox then matches what we have on filesystem
            scene.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
            //scene.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
            // yet for headtracking we shall rotate camera


            //sceneg.position.set(0, 0, -1024);
            //sceneg.position.set(0, -1024, 0);

            //scene.add(new THREE.AmbientLight(0x333333));
            //scene.add(new THREE.AmbientLight(0xffffff));
            //scene.add(new THREE.AmbientLight(0xaaaaaa));
            //scene.add(new THREE.AmbientLight(0xcccccc));
            //scene.add(new THREE.AmbientLight(0xeeeeee));
            scene.add(new THREE.AmbientLight(0xffffff));




            //var light = new THREE.DirectionalLight(0xffffff, 1);
            //// sun should be beyond moon
            ////light.position.set(-5 * virtualDistance, -3 * virtualDistance, -5 * virtualDistance);
            ////light.position.set(-15 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);

            //// where shall the light source be to see half planet?
            //light.position.set(-1 * virtualDistance, -1 * virtualDistance, -15 * virtualDistance);
            //scene.add(light);



            //var lightX = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightX" }.AttachToDocument();
            //var lightY = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightY" }.AttachToDocument();
            //var lightZ = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -60, max = 60, valueAsNumber = 0, title = "lightZ" }.AttachToDocument();

            //new IHTMLHorizontalRule { }.AttachToDocument();

            // whats WebGLRenderTargetCube do?

            // WebGLRenderer preserveDrawingBuffer 



            var renderer0 = new THREE.WebGLRenderer(

                new
                {
                    //antialias = true,

                    alpha = true,

                    preserveDrawingBuffer = true
                }
            );

            // https://github.com/mrdoob/three.js/issues/3836

            // the construct. white bg

            // cyan?
            //renderer0.setClearColor(0xfffff, 1);
            //renderer0.setClearColor(0xfffff, 0);
            renderer0.setClearColor(0x0, 0);
            //renderer0.setClearColor(0x0, 1);

            //renderer.setSize(window.Width, window.Height);
            renderer0.setSize(cubefacesize, cubefacesize);

            //renderer0.domElement.AttachToDocument();
            //rendererPX.domElement.style.SetLocation(0, 0);
            //renderer0.domElement.style.SetLocation(4, 4);


            // top

            // http://stackoverflow.com/questions/27612524/can-multiple-webglrenderers-render-the-same-scene


            // need a place to show the cubemap face to GUI 
            // how does the stereo OTOY do it?
            // https://www.opengl.org/wiki/Sampler_(GLSL)

            // http://www.richardssoftware.net/Home/Post/25

            // [+X, –X, +Y, –Y, +Z, –Z] fa



            // move up
            //camera.position.set(-1200, 800, 1200);
            //var cameraoffset = new THREE.Vector3(0, 15, 0);

            // can we aniamte it?
            //var cameraoffset = new THREE.Vector3(0, 800, 1200);
            // can we have linear animation fromcenter of the map to the edge and back?
            // then do the flat earth sun orbit?
            var cameraoffset = new THREE.Vector3(
                // left?
                -512,
                // height?
                //0,
                //1600,
                //1024,

                // if the camera is in the center, would we need to move the scene?
                // we have to move the camera. as we move the scene the lights are messed up
                //2014,
                1024,

                //1200
                0
                // can we hover top of the map?
                );

            // original vieworigin
            //var cameraoffset = new THREE.Vector3(-1200, 800, 1200);

            // whatif we want more than 30sec video? 2min animation? more frames to render? 2gb disk?
            var frameIDslider = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0, max = 1800, valueAsNumber = 1800 / 2, title = "frameIDslider" }.AttachToDocument();












            var xframeID = 0;



            new { }.With(

                async delegate
                {

                next:

                    //Console.WriteLine("enter vsync0ambient");
                    Native.document.title = new { xframeID }.ToString();


                    vsync0ambient = new TaskCompletionSource<object>();

                    await vsync0ambient.Task;

                    await Task.Delay(1000 / 15);
                    //await Task.Delay(1000);
                    //Console.WriteLine("await vsync0ambient 5");
                    //await Task.Delay(5000);

                    xframeID++;

                    goto next;
                }

            );














            new IHTMLHorizontalRule { }.AttachToDocument();

            var camerazMIN = 0 - 2048 * 4;

            var camerax = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "camerax" }.AttachToDocument();
            // up. whats the most high a rocket can go 120km?
            new IHTMLHorizontalRule { }.AttachToDocument();


            // how high is the bunker?
            var cameray = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 2048 * 4, valueAsNumber = 0, title = "cameray" }.AttachToDocument();
            new IHTMLBreak { }.AttachToDocument();
            var camerayHigh = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = cameray.max, max = 1024 * 256, valueAsNumber = cameray.max, title = "cameray" }.AttachToDocument();
            new IHTMLHorizontalRule { }.AttachToDocument();
            //var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();
            //var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048 * 4, max = 0 + 2048 * 4, valueAsNumber = 0 - 2048 * 4, title = "cameraz" }.AttachToDocument();
            var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = camerazMIN, max = 0, valueAsNumber = camerazMIN, title = "cameraz" }.AttachToDocument();
            // the zoom hting..


            new IHTMLHorizontalRule { }.AttachToDocument();

            var skyrotup = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -450, max = 450, valueAsNumber = 0, title = "skyrotup" }.AttachToDocument();
            var skyrotright = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -450, max = 450, valueAsNumber = 0, title = "skyrotright" }.AttachToDocument();

            new IHTMLPre { () => new { skyrotup = skyrotup.valueAsNumber, skyrotright = skyrotright.valueAsNumber } }.AttachToDocument();


            new IHTMLHorizontalRule { }.AttachToDocument();

            // were we able to test for it?
            //var zoomrotup = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -450, max = 450, valueAsNumber = -12, title = "up" }.AttachToDocument();
            var zoomrotup = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -450, max = 450, valueAsNumber = 0, title = "up" }.AttachToDocument();
            var zoomrotright = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = -450, max = 450, valueAsNumber = 0, title = "right" }.AttachToDocument();

            new IHTMLPre { () => new { 
                

                // on 0 zoom we should rely on the original skybox?
                cameraz = cameraz.valueAsNumber, 
                
                zoomrotup = zoomrotup.valueAsNumber, zoomrotright = zoomrotright.valueAsNumber } }.AttachToDocument();



            // for render server
            var fcamerax = 0.0;
            var fcameray = 0.0;
            var fcameraz = 0.0;

            //while (await camerax.async.onchange)

            //cameray.onchange += delegate
            //{
            //    if (cameray.valueAsNumber < cameray.max)
            //        camerayHigh.valueAsNumber = camerayHigh.min;
            //};

            camerayHigh.onmousedown += delegate
            {
                //if (camerayHigh.valueAsNumber > camerayHigh.min)
                cameray.valueAsNumber = cameray.max;
            };


            Action applycameraoffset = delegate
            {
                // make sure UI and gpu sync up

                var cy = cameray;

                if (cameray.valueAsNumber < cameray.max)
                    camerayHigh.valueAsNumber = camerayHigh.min;

                if (camerayHigh.valueAsNumber > camerayHigh.min)
                    cameray.valueAsNumber = cameray.max;

                if (cameray.valueAsNumber == cameray.max)
                    cy = camerayHigh;



                cameraoffset = new THREE.Vector3(
                    // left?
                  1.0 * (camerax + fcamerax),
                    // height?
                    //0,
                    //1600,
                    //1024,

                   // if the camera is in the center, would we need to move the scene?
                    // we have to move the camera. as we move the scene the lights are messed up
                    //2014,
                   1.0 * (cy + fcameray),

                 //1200
                 1.0 * (cameraz + fcameraz)
                    // can we hover top of the map?
                   );
            };


            #region y
            // need to rotate90?
            var cameraNY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            applycameraoffset += delegate
            {
                cameraNY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNY.lookAt(new THREE.Vector3(0, -1, 0));
                cameraNY.position.add(cameraoffset);
            };

            //cameraNY.lookAt(new THREE.Vector3(0, 1, 0));
            var canvasNY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasNY.canvas.title = "NY";
            canvasNY.canvas.AttachToDocument();
            canvasNY.canvas.style.transformOrigin = "0 0";
            // roslyn!
            //canvasNY.canvas.style.transform = $"scale({uizoom})";
            canvasNY.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            applycameraoffset += delegate
            {
                cameraPY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPY.lookAt(new THREE.Vector3(0, 1, 0));
                cameraPY.position.add(cameraoffset);
            };
            //cameraPY.lookAt(new THREE.Vector3(0, -1, 0));
            var canvasPY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 0);
            canvasPY.canvas.title = "PY";
            canvasPY.canvas.AttachToDocument();
            canvasPY.canvas.style.transformOrigin = "0 0";
            //canvasPY.canvas.style.transform = $"scale({uizoom})";
            canvasPY.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion

            // transpose xz?

            #region x
            var cameraNX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            applycameraoffset += delegate
            {
                cameraNX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNX.lookAt(new THREE.Vector3(0, 0, 1));
                cameraNX.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(0, 0, -1));
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNX.lookAt(new THREE.Vector3(1, 0, 0));
            var canvasNX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 2, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNX.canvas.title = "NX";
            canvasNX.canvas.AttachToDocument();
            canvasNX.canvas.style.transformOrigin = "0 0";
            //canvasNX.canvas.style.transform = $"scale({uizoom})";
            canvasNX.canvas.style.transform = "scale(" + uizoom + ")";


            // front??
            var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            applycameraoffset += delegate
            {
                cameraPX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPX.lookAt(new THREE.Vector3(0, 0, -1));
                cameraPX.position.add(cameraoffset);
            };
            //cameraPX.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPX.lookAt(new THREE.Vector3(1, 0, 0));
            //cameraPX.lookAt(new THREE.Vector3(-1, 0, 0));
            var canvasPX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPX.canvas.title = "PX";
            canvasPX.canvas.AttachToDocument();
            canvasPX.canvas.style.transformOrigin = "0 0";
            //canvasPX.canvas.style.transform = $"scale({uizoom})";
            canvasPX.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion



            #region z
            var cameraNZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, -1));
            applycameraoffset += delegate
            {
                cameraNZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNZ.lookAt(new THREE.Vector3(1, 0, 0));
                cameraNZ.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, 1));
            var canvasNZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 3, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNZ.canvas.title = "NZ";
            canvasNZ.canvas.AttachToDocument();
            canvasNZ.canvas.style.transformOrigin = "0 0";
            //canvasNZ.canvas.style.transform = $"scale({uizoom})";
            canvasNZ.canvas.style.transform = "scale(" + uizoom + ")";

            var cameraPZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: near, far: far);
            //cameraPZ.lookAt(new THREE.Vector3(1, 0, 0));
            applycameraoffset += delegate
            {
                cameraPZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPZ.lookAt(new THREE.Vector3(-1, 0, 0));
                cameraPZ.position.add(cameraoffset);
            };
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, -1));
            var canvasPZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPZ.canvas.title = "PZ";
            canvasPZ.canvas.AttachToDocument();
            canvasPZ.canvas.style.transformOrigin = "0 0";
            //canvasPZ.canvas.style.transform = $"scale({uizoom})";
            canvasPZ.canvas.style.transform = "scale(" + uizoom + ")";
            #endregion




            // c++ alias locals would be nice..
            var canvas0 = (IHTMLCanvas)renderer0.domElement;


            var old = new
            {



                CursorX = 0,
                CursorY = 0
            };


            var st = new Stopwatch();
            st.Start();

            //canvas0.css.active.style.cursor = IStyle.CursorEnum.move;




            // X:\jsc.svn\examples\javascript\Test\TestMouseMovement\TestMouseMovement\Application.cs


            // THREE.WebGLProgram: gl.getProgramInfoLog() C:\fakepath(78,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll
            // THREE.WebGLProgram: gl.getProgramInfoLog() (79,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll

            // http://www.roadtovr.com/youtube-confirms-stereo-3d-360-video-support-coming-soon/
            // https://www.youtube.com/watch?v=D-Wl9jAB45Q



            #region gl4K spherical
            var gl4K = new WebGLRenderingContext(alpha: true, preserveDrawingBuffer: true);
            var c4k = gl4K.canvas.AttachToDocument();


            //  3840x2160

            //c.style.SetSize(3840, 2160);

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150722/360-youtube


            // when can we go up?
            c4k.width = 3840;
            c4k.height = 2160;

            // https://www.youtube.com/watch?v=sLprVF6d7Ug
            // 8K is 7680 4320

            // https://www.youtube.com/watch?v=RNdHaeBhT9Q
            // 8K is 7680 3840

            //c.width = 7680;
            ////c.height = 3840;
            //c.height = 4320;



            //c.width = 3840 * 2;
            //c.height = 2160 * 2;


            //c.width = 3840;
            //c.height = 2160;
            // 1,777777777777778

            // https://www.youtube.com/watch?v=fTfJwzRsE-w
            //c.width = 7580;
            //c.height = 3840;
            //1,973958333333333

            //7580
            //    3840

            // wont work
            //c.width = 8192;
            //c.height = 4096;


            // this has the wrong aspect?
            //c.width = 6466;
            //c.height = 3232;

            new IHTMLPre { new { c4k.width, c4k.height } }.AttachToDocument();

            //6466x3232

            //var suizoom = 720f / c.height;
            //var suizoom = 360f / c.height;
            var suizoom = 480f / c4k.width;

            c4k.style.backgroundColor = "yellow";
            c4k.style.transformOrigin = "0 0";
            //c.style.transform = $"scale({suizoom})";
            c4k.style.transform = "scale(" + suizoom + ")";
            //c.style.backgroundColor = "yellow";
            c4k.style.position = IStyle.PositionEnum.absolute;

            c4k.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0,
                8 + (int)(uizoom * cubefacesize + 8) * 3 + 120
                );


            // until we figure out how to fix the shader, we can try to fake it?
            // will allow atleast a nice static 8K image?
            // S6 did a 6546x3272 image. 5k?

            // 1.77
            var c8k = new CanvasRenderingContext2D(3840 * 2, 2160 * 2);

            //var c8k = new CanvasRenderingContext2D(5120, 2880);

            // 5120 x 2880 pixel

            // 8k canvas wont load in chrome?
            c8k.canvas.AttachToDocument();


            c8k.canvas.style.backgroundColor = "cyan";
            c8k.canvas.style.transformOrigin = "0% 0%";
            c8k.canvas.style.transform = "scale(" + (suizoom / 2) + ")";

            c8k.canvas.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 120,
                8 + (int)(uizoom * cubefacesize + 8) * 3 + 120 + 320
                );



            var pass = new CubeToEquirectangular.Library.ShaderToy.EffectPass(
                       null,
                       gl4K,
                       precission: CubeToEquirectangular.Library.ShaderToy.DetermineShaderPrecission(gl4K),
                       supportDerivatives: gl4K.getExtension("OES_standard_derivatives") != null,
                       callback: null,
                       obj: null,
                       forceMuted: false,
                       forcePaused: false,
                //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                       outputGainNode: null
                   );

            // how shall we upload our textures?
            // can we reference GLSL.samplerCube yet?
            //pass.mInputs[0] = new samplerCube { };
            pass.mInputs[0] = new CubeToEquirectangular.Library.ShaderToy.samplerCube { };

            pass.MakeHeader_Image();
            var vs = new Shaders.ProgramFragmentShader();
            pass.NewShader_Image(vs);

            #endregion



            // why is it flipped?

            //var frame0 = new HTML.Images.FromAssets.tiles_regrid().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets.galaxy_starfield().AttachToDocument();
            var frame0 = new HTML.Images.FromAssets._20150912_154522().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._20150912_154522recenter().AttachToDocument();


            //var frame0 = new HTML.Images.FromAssets.galaxy_starfield150FOV().AttachToDocument();
            //var xor = new HTML.Images.FromAssets.Orion360_test_image_8192x4096().AttachToDocument();
            //var xor = new HTML.Images.FromAssets._2_no_clouds_4k().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().AttachToDocument();


            // 270px
            //xor.style.height = "";
            frame0.style.height = "270px";
            frame0.style.width = "480px";
            frame0.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 + 16,
                8 + (int)(uizoom * cubefacesize + 8) * 3 + 120);




            #region DirectoryEntry
            var dir = default(DirectoryEntry);

            new IHTMLButton { "openDirectory" }.AttachToDocument().onclick += async delegate
            {
                dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
            };
            frame0.style.cursor = IStyle.CursorEnum.pointer;
            frame0.title = "save frame";


            frame0.onclick += delegate
            {
                // http://paulbourke.net/papers/vsmm2006/vsmm2006.pdf
                //            A method of creating synthetic stereoscopic panoramic images that can be implemented
                //in most rendering packages has been presented. If single panoramic pairs can be created
                //then stereoscopic panoramic movies are equally possible giving rise to the prospect of
                //movies where the viewer can interact with, at least with regard to what they choose to look
                //at.These images can be projected so as to engage the two features of the human visual
                //system that assist is giving us a sense of immersion, the feeling of “being there”. That is,
                //imagery that contains parallax information as captured from two horizontally separated eye
                //positions (stereopsis)and imagery that fills our peripheral vision.The details that define
                //how the two panoramic images should be created in rendering packages are provided, in
                //particular, how to precisely configure the virtual cameras and control the distance to zero
                //parallax.

                // grab a frame

                if (dir == null)
                {
                    var c8ksw = Stopwatch.StartNew();

                    c8k.drawImage(frame0, 0, 0, c8k.canvas.width, c8k.canvas.height);
                    c8k.drawImage(gl4K, 0, 0, c8k.canvas.width, c8k.canvas.height);


                    // not exporting to file system?
                    //var f0 = new IHTMLImage { src = gl4K.canvas.toDataURL() };
                    //var f0 = new IHTMLImage { src = c8k.canvas.toDataURL() };

                    // png would be 50mb?
                    var f0 = new IHTMLImage { src = c8k.canvas.toDataURL(quality: 0.9) };
                    // 22989ms { c8ksw = 00:00:12.12976 }
                    Console.WriteLine(new { c8ksw });

                    //var f0 = (IHTMLImage)gl.canvas;
                    //var f0 = (IHTMLImage)gl.canvas;
                    //var base64 = gl.canvas.toDataURL();


                    //frame0.src = base64;
                    frame0.src = f0.src;

                    // 7MB!

                    return;
                }

                //                // ---------------------------
                //IrfanView
                //---------------------------
                //Warning !
                //The file: "X:\vr\tape1\0001.jpg" is a PNG file with incorrect extension !
                //Rename ?
                //---------------------------
                //Yes   No   
                //---------------------------

                // haha this will render the thumbnail.
                //dir.WriteAllBytes("0000.png", frame0);

                //dir.WriteAllBytes("0000.png", gl.canvas);

                var glsw = Stopwatch.StartNew();
                dir.WriteAllBytes("0000.png", gl4K);

                new IHTMLPre { new { glsw.ElapsedMilliseconds } }.AttachToDocument();

                // {{ ElapsedMilliseconds = 1548 }}

                // 3.7MB
                // 3840x2160

            };

            #endregion


            #region render 60hz 30sec
            new IHTMLButton {
                "render 60hz 30sec"
            }.AttachToDocument().onclick += async e =>
            {
                e.Element.disabled = true;


                var total = Stopwatch.StartNew();
                var status = "rendering... " + new { dir };

                new IHTMLPre { () => status }.AttachToDocument();

                if (dir == null)
                {
                    //dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
                }

                total.Restart();



                vsync1renderman = new TaskCompletionSource<object>();
                await vsync1renderman.Task;

                status = "rendering... vsync";

                //var frameid = 0;
                frameIDslider.valueAsNumber = -1;

                goto beforeframe;


                // parallax offset?

                await_nextframe:


                //var filename = frameIDslider.valueAsNumber.ToString().PadLeft(4, '0') + ".png";
                var filename = frameIDslider.valueAsNumber.ToString().PadLeft(5, '0') + ".jpg";
                status = "rendering... " + new { filename };


                vsync1renderman = new TaskCompletionSource<object>();
                await vsync1renderman.Task;

                // frame0 has been rendered

                var swcapture = Stopwatch.StartNew();
                status = "WriteAllBytes... " + new { filename };
                //await Native.window.async.onframe;

                // https://code.google.com/p/chromium/issues/detail?id=404301
                if (dir != null)
                {
                    c8k.drawImage(frame0, 0, 0, c8k.canvas.width, c8k.canvas.height);
                    c8k.drawImage(gl4K, 0, 0, c8k.canvas.width, c8k.canvas.height);


                    //await dir.WriteAllBytes(filename, gl4K);
                    await dir.WriteAllBytes(filename, c8k);
                }

                //await dir.WriteAllBytes(filename, gl.canvas);

                status = "WriteAllBytes... done " + new { fcamerax, filename, swcapture.ElapsedMilliseconds };
                status = "rdy " + new { filename, fcamerax };
                //await Native.window.async.onframe;





                // design mode v render mode
                if (cubefacesize < cubefacesizeMAX)
                    frameIDslider.valueAsNumber += 15;
                else
                    frameIDslider.valueAsNumber++;




            beforeframe:

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);
                // fcamerax = radius * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);

                // some shaders need to know where the camera is looking from. can we tell them?

                //fcamerax = 2.2 * Math.Sin(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));
                //fcameraz = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));


                //// up
                //fcameray = 4.4 * Math.Cos(Math.PI * (frameIDslider.valueAsNumber - (60 * 30 / 2f)) / (60 * 30 / 2f));

                // cameraz.valueAsNumber = (int)(cameraz.max * Math.Sin(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f)));


                // up
                //fcameray = 128 * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                //fcamerax += (1.0 / 60.0);

                //fcamerax += (1.0 / 60.0) * 120;


                // in 30 sec can we have a zoom in and out?

                // so 15 sec at 60 fps needs to be -max z


                var a = Math.Abs(frameIDslider.valueAsNumber - (60 * 15));
                var aa = a / (60f * 15);

                //cameraz.valueAsNumber = (int)(camerazMIN * aa);
                cameraz.valueAsNumber = (int)(camerazMIN * (1.0 - aa));


                // 60hz 30sec
                if (frameIDslider.valueAsNumber < 60 * 30)
                {
                    // Blob GC? either this helms or the that we made a Blob static. 
                    //await Task.Delay(11);
                    await Task.Delay(33);

                    goto await_nextframe;
                }

                total.Stop();
                status = "all done " + new { frameid = frameIDslider.valueAsNumber, total.ElapsedMilliseconds };
                vsync1renderman = default(TaskCompletionSource<object>);
                // http://stackoverflow.com/questions/22899333/delete-javascript-blobs

                e.Element.disabled = false;
            };
            #endregion


            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLColladaExperiment\WebGLColladaExperiment\WebGLColladaExperiment.csproj"






            // asus will hang
            // https://3dwarehouse.sketchup.com/model.html?id=fb7a0448d940e575edc01389f336fb0a
            // can we get one frame into vr?

            // cube: mesh to cast shadows



            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000 })

            //    );
            //    floor2.position.set(0, 0, -cubefacesize / 2);
            //    floor2.AttachTo(scene);
            //}
            //{
            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(new { ambient = 0x0000ff, color = 0x0000ff })

            //    );
            //    floor2.position.set(-cubefacesize / 2, 0, 0);
            //    floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);

            //    floor2.AttachTo(scene);
            //}

            //var p900toCubeSize = cubefacesize / 1080f;
            var p900toCubeSize = cubefacesize / 1920f;

            //p900toCubeSize *= 0.7f;

            // where is this magic number coming from??
            p900toCubeSize *= 0.65f;
            //p900toCubeSize *= 0.5f;

            // http://stackoverflow.com/questions/17648067/three-js-drawing-two-overlapping-transparent-spheres-and-hiding-intersection



            var farimage = new output00609();
            var nearimage = new output01085();

            // front?
            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                //var tex0 = new THREE.Texture(shader1canvas) { needsUpdate = true };


                //var tex0 = new THREE.Texture(new output01027()) { needsUpdate = true };
                //var tex0 = new THREE.Texture(new output00630()) { needsUpdate = true };
                var tex0 = new THREE.Texture(farimage) { needsUpdate = true, minFilter = THREE.LinearFilter };
                applycameraoffset += delegate { tex0.needsUpdate = true; };

                //var planeGeometry0 = new THREE.PlaneGeometry(1920, 1080, 8, 8);
                var planeGeometry0 = new THREE.PlaneGeometry((int)(1920 * p900toCubeSize), (int)(1080 * p900toCubeSize), 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {
                            // black otherwise?
                            transparent = true,

                            map = tex0,


                            //ambient = 0x00ff00,
                            //color = 0x00ff00
                        })

                );

                //(floor2 as dynamic).renderDepth = 0.2;

                //floor2.position.set(0, 0, -cubefacesize  * 0.55);

                // zoom in and get 90FOV clouseup?
                floor2.position.set(-cubefacesize * 0.50 - skyboxradius0, 0, 0);
                //floor2.position.set(-skyboxradius0 - 128, 0, 0);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                //floor2.AttachTo(scene);
                floor2.AttachTo(scenezooms);
            }





            {
                //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                //var tex0 = new THREE.Texture(new moon());
                //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                //var tex0 = new THREE.Texture(shader1canvas) { needsUpdate = true };


                //var tex0 = new THREE.Texture(new output01027()) { needsUpdate = true };
                var tex0 = new THREE.Texture(nearimage) { needsUpdate = true, minFilter = THREE.LinearFilter };
                //var tex0 = new THREE.Texture(new output00630()) { needsUpdate = true };

                applycameraoffset += delegate { tex0.needsUpdate = true; };

                //var planeGeometry0 = new THREE.PlaneGeometry(1920, 1080, 8, 8);
                //var planeGeometry0 = new THREE.PlaneGeometry((int)(1920 * 0.1), (int)(1080 * 0.1), 8, 8);



                var planeGeometry0 = new THREE.PlaneGeometry((int)(1920 * p900toCubeSize), (int)(1080 * p900toCubeSize), 8, 8);
                var floor2 = new THREE.Mesh(planeGeometry0,
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                    //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                    new THREE.MeshPhongMaterial(
                        new
                        {
                            // black otherwise?
                            transparent = true,

                            map = tex0,


                            //ambient = 0x00ff00,
                            //color = 0x00ff00
                        })

                );
                //floor2.position.set(0, 0, -cubefacesize  * 0.55);
                //(floor2 as dynamic).renderDepth = 0.3;

                // zoom in and get 90FOV clouseup?
                floor2.position.set(-cubefacesize * 0.50, 0, 0);
                floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                //floor2.AttachTo(scene);
                floor2.AttachTo(scenezooms);
            }


            Action<IHTMLImage, double> AddFrame = (img, z) =>
            {
                {
                    //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
                    //var tex0 = new THREE.Texture(new moon());
                    //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
                    //var tex0 = new THREE.Texture(shader1canvas) { needsUpdate = true };


                    //var tex0 = new THREE.Texture(new output01027()) { needsUpdate = true };
                    //var tex0 = new THREE.Texture(new output00630()) { needsUpdate = true };
                    var tex0 = new THREE.Texture(img) { needsUpdate = true, minFilter = THREE.LinearFilter };
                    applycameraoffset += delegate { tex0.needsUpdate = true; };

                    //var planeGeometry0 = new THREE.PlaneGeometry(1920, 1080, 8, 8);
                    var planeGeometry0 = new THREE.PlaneGeometry((int)(1920 * p900toCubeSize), (int)(1080 * p900toCubeSize), 8, 8);
                    var floor2 = new THREE.Mesh(planeGeometry0,
                        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
                        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
                        new THREE.MeshPhongMaterial(
                            new
                            {
                                // black otherwise?
                                transparent = true,

                                map = tex0,


                                //ambient = 0x00ff00,
                                //color = 0x00ff00
                            })

                    );

                    //(floor2 as dynamic).renderDepth = 0.2;

                    //floor2.position.set(0, 0, -cubefacesize  * 0.55);

                    // zoom in and get 90FOV clouseup?
                    floor2.position.set(-cubefacesize * 0.50 - skyboxradius0 * z, 0, 0);
                    //floor2.position.set(-skyboxradius0 - 128, 0, 0);
                    floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), Math.PI / 2);
                    //floor2.AttachTo(scene);
                    floor2.AttachTo(scenezooms);
                }

            };

            new IHTMLButton { "load frames from disk " }.AttachToDocument().onclick += async e =>
            {
                // Z:\jsc.svn\examples\javascript\chrome\apps\WebGL\360\x360x83\Application.cs
                // Z:\jsc.svn\examples\javascript\chrome\apps\WebGL\360\x360azimuthal\Application.cs

                e.Element.disabled = true;

                // how do we load the files?
                var dir2 = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });

                var dir2r = dir2.createReader();

                var files2 = await dir2r.readFileEntries();

                var files2count = files2.Count();

                Console.WriteLine(new { files2count });

                // does this dir have the first and last image we already know of?
                // 55390ms { files2count = 4324 }

                var firstcandidate = files2.First();

                //Console.WriteLine(new { firstcandidate, farimage.src });
                // 19230ms { firstcandidate = [object FileEntry], src = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/assets/x360x83/output00609.png }

                // 10903ms { firstcandidate = output00001.png, farimage = output00609.png }

                var files2skip = files2.SkipWhile(firstcandidate1 => firstcandidate1.name != farimage.src.SkipUntilLastOrEmpty("/"));
                //Console.WriteLine(new { files2skip = files2skip.Count() });

                var files2take = files2skip.TakeWhile(firstcandidate1 => firstcandidate1.name != nearimage.src.SkipUntilLastOrEmpty("/"));

                //6228ms { files2count = 4324 }
                //view-source:54116 6377ms { files2take = 476 }

                // reverse?
                var files3 = files2take.ToArray();
                var files3count = files3.Count();


                //Console.WriteLine(new { files2take = files2take.Count() });

                //var step = 8;
                //var step = 1;//crashes
                //var step = 2;//crashes after load
                var step = 4;//crashes after load
                //var step = 3;//
                //var step = (int)(files3count * 0.05);
                //var step = (int)(files3count * 0.25);

                for (int i = step; i < files3count; i += step)
                {
                    //files3[i].

                    Console.WriteLine(new { i });

                    e.Element.innerText = new { step, i, files3count }.ToString();


                    //files3[i].file()
                    var ff = await files3[i].file();

                    //83fc21e4-a2da-408d-9831-571313ead641
                    //Refcount: 1
                    //Content Type: image/png
                    //Type: file
                    //Path: X:\p900\7\DCIM\100NIKON\DSCN0018\output00767.png
                    //Modification Time: Sunday, September 13, 2015 at 10:24:01 AM
                    //Length: 2,131,791

                    // are we running out of blobs?

                    var url = ff.ToObjectURL();
                    var img = new IHTMLImage(url);


                    await img.async.oncomplete;

                    //async ff =>
                    //{
                    //    var ffbytes = await ff.readAsBytes();

                    //var ffimage = (IHTMLImage)ffbytes;

                    var aa = (double)i / files3count;

                    AddFrame(img, 1.0 - aa);

                    //    }
                    //);

                    //files3[i].
                }

                e.Element.innerText = "done " + new { step, files3count }.ToString();

            };


            //{
            //    //var tex0 = new THREE.Texture { image = new moon(), needsUpdate = true };
            //    //var tex0 = new THREE.Texture(new moon());
            //    //var tex0 = new THREE.Texture(new moon()) { needsUpdate = true };
            //    var tex0 = new THREE.Texture(shader1canvas) { needsUpdate = true };

            //    applycameraoffset += delegate { tex0.needsUpdate = true; };

            //    var planeGeometry0 = new THREE.PlaneGeometry(cubefacesize, cubefacesize, 8, 8);
            //    var floor2 = new THREE.Mesh(planeGeometry0,
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xff0000, specular = 0xA26D41, shininess = 1 })
            //        //new THREE.MeshPhongMaterial(new { ambient = 0xff0000, color = 0xff0000, specular = 0xff0000 })
            //        new THREE.MeshPhongMaterial(
            //            new
            //            {

            //                map = tex0,


            //                //ambient = 0x00ff00,
            //                //color = 0x00ff00
            //            })

            //    );
            //    floor2.position.set(cubefacesize * 0.55, 0, 0);
            //    floor2.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);

            //    floor2.AttachTo(scene);
            //}





            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeEarth\ChromeEarth\Application.cs
            // X:\jsc.svn\examples\javascript\canvas\ConvertBlackToAlpha\ConvertBlackToAlpha\Application.cs
            // hidden for alpha AppWindows
            //#if FBACKGROUND


            #region galaxy_starfield
            new THREE.Texture().With(
                async s =>
                {
                    //var i = new HTML.Images.FromAssets.galaxy_starfield();
                    //var i = new HTML.Images.FromAssets.galaxy_starfield150FOV();

                    var bytes = await frame0.async.bytes;

                    //for (int ii = 0; ii < bytes.Length; ii += 4)
                    //{

                    //    bytes[ii + 3] = (byte)(bytes[ii + 0]);

                    //    bytes[ii + 0] = 0xff;
                    //    bytes[ii + 1] = 0xff;
                    //    bytes[ii + 2] = 0xff;
                    //}

                    var cc = new CanvasRenderingContext2D(frame0.width, frame0.height);

                    cc.bytes = bytes;

                    // does not do a thing?
                    //s.flipY = true;

                    s.image = cc;
                    s.needsUpdate = true;

                    var stars_material = new THREE.MeshBasicMaterial(
                            new
                            {
                                //map = THREE.ImageUtils.loadTexture(new galaxy_starfield().src),
                                map = s,
                                // both?
                                //side = THREE.BackSide,
                                transparent = true
                            });

                    // nice
                    //stars_material.opacity = 0.5;



                    // THREE.SphereGeometry = function ( radius, widthSegments, heightSegments, phiStart, phiLength, thetaStart, thetaLength ) {
                    // http://learningthreejs.com/blog/2011/10/05/performance-merging-geometry/

                    // how are we to construct geometry that has higher detail in one spot for zoom in
                    var skyboxsphere = new THREE.SphereGeometry(skyboxradius, 512, 512

                    // left to right
                        // 0 .. 45 deg

                    // center it?
                        //Math.PI - Math.PI / 4
                        //, Math.PI / 4,



                    //// up to down
                        //// 0 .. 22 deg
                        //0, Math.PI / 2
                    );


                    //
                    var stars = new THREE.Mesh(
                        // radius not used?
                        //new THREE.SphereGeometry(skyboxradius, 64, 64),
                        //new THREE.SphereGeometry(skyboxradius, 8, 8),

                            // we need to be able to zoom in!
                        //new THREE.SphereGeometry(skyboxradius, 256, 256),

                            // chrome will crash on laptop?
                        // chrome will crash on red!
                        //new THREE.SphereGeometry(skyboxradius, 1024, 1024),
                        //new THREE.SphereGeometry(skyboxradius, 1024, 1024),
                        //new THREE.SphereGeometry(skyboxradius, 512, 512),
                        //new THREE.SphereGeometry(skyboxradius, 600, 600),

                            // orr perhaps do we need detailed geometry only in specific spot?
                            skyboxsphere,

                           stars_material
                        );

                    // http://stackoverflow.com/questions/8502150/three-js-how-can-i-dynamically-change-objects-opacity
                    //(stars_material as dynamic).opacity = 0.5;

                    stars.scale.x = -1;


                    // http://stackoverflow.com/questions/31797871/three-js-alpha-on-entire-object
                    applycameraoffset += delegate
                    {
                        if (cameraz.valueAsNumber == 0)
                        {
                            // static 5k image should take over...
                            stars_material.opacity = 0.0;
                            return;
                        }


                        var a = (cameraz.valueAsNumber / (double)camerazMIN);

                        stars.rotation.set(0, 0, 0);

                        //    skyrotright
                        //stars.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * (skyrotup.valueAsNumber / 900.0));
                        stars.rotateOnAxis(new THREE.Vector3(0, 1, 0), (Math.PI / 2) * (skyrotright.valueAsNumber / 900.0));



                        stars.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * ((a * 35.0 + skyrotup.valueAsNumber) / 900.0));


                        stars_material.opacity = (1.0 - a) * 0.7 + 0.3;
                    };




                    // can we get our hrozion recentered?
                    //stars.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * (3 / 90.0));
                    //stars.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * (1.3 / 90.0));
                    //stars.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * (1.5 / 90.0));

                    applycameraoffset += delegate
                    {

                        scenezooms.rotation.set(0, 0, 0);
                        // keep skybox where it is

                        scenezooms.rotateOnAxis(new THREE.Vector3(0, 0, 1), (Math.PI / 2) * (zoomrotup.valueAsNumber / 900.0));
                        scenezooms.rotateOnAxis(new THREE.Vector3(0, 1, 0), (Math.PI / 2) * (zoomrotright.valueAsNumber / 900.0));

                    };


                    var hideskybox = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "hide skybox", @checked = true }.AttachToDocument();

                    //Native.window.onframe += delegate
                    //{
                    //    //
                    //    stars.visible = !hideskybox.@checked;
                    //};

                    hideskybox.onchange += delegate
                    {
                        //
                        stars.visible = !hideskybox.@checked;
                    };
                    stars.visible = !hideskybox.@checked;



                    scene.add(stars);
                }
           );
            #endregion


            //var NYonly = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox, title = "NY only" }.AttachToDocument();
            var PXonly = new IHTMLInput
            {
                type = ScriptCoreLib.Shared.HTMLInputTypeEnum.checkbox,
                @checked = true,
                title = "PX only"
            }.AttachToDocument();


            //new Models.ColladaS6Edge().Source.Task.ContinueWithResult(
            //       dae =>
            {



                //dae.position.y = -80;

                //dae.AttachTo(sceneg);
                //scene.add(dae);
                //oo.Add(dae);




                // view-source:http://threejs.org/examples/webgl_multiple_canvases_circle.html
                // https://threejsdoc.appspot.com/doc/three.js/src.source/extras/cameras/CubeCamera.js.html
                Native.window.onframe +=
                    e =>
                    {
                        // Z:\jsc.svn\examples\javascript\test\TestDelegateIfIfReturn\Application.cs

                        // let render man know..

                        var flag1 = vsync1renderman != null;
                        // nonroslyn!!
                        if (flag1)
                        // this if block is not detected?
                        {
                            // whats going on  with if clauses?
                            nop();

                            // wtf???
                            var flag0 = vsync1renderman.Task.IsCompleted;
                            if (flag0)
                                return;
                        }
                        if (vsync0ambient != null)
                            if (vsync0ambient.Task.IsCompleted)
                                return;

                        // 38045ms Native.window.onframe { vsync1renderman = , vsync0ambient = [object Object] }

                        //Console.WriteLine("Native.window.onframe " + new { vsync1renderman, vsync0ambient });

                        //return;

                        //if (pause) return;
                        //if (pause.@checked)
                        //    return;


                        // can we float out of frame?
                        // haha. a bit too flickery.
                        //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.01) * 50.0;
                        //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.001) * 190.0;
                        //globesphere.position.y = Math.Sin(fcamerax * 0.001) * 90.0;
                        //clouds.position.y = Math.Cos(fcamerax * 0.001) * 90.0;

                        //sphere.rotation.y += speed;
                        //clouds.rotation.y += speed;

                        // manual rebuild?
                        // red compiler notifies laptop chrome of pending update
                        // app reloads

                        applycameraoffset();
                        renderer0.clear();
                        //rendererPY.clear();

                        //cameraPX.aspect = canvasPX.aspect;
                        //cameraPX.updateProjectionMatrix();

                        // um what does this do?
                        //cameraPX.position.z += (z - cameraPX.position.z) * e.delay.ElapsedMilliseconds / 200.0;
                        // mousewheel allos the camera to move closer
                        // once we see the frame in vr, can we udp sync vr tracking back to laptop?


                        //this.targetPX.x += 1;
                        //this.targetNX.x -= 1;

                        //this.targetPY.y += 1;
                        //this.targetNY.y -= 1;

                        //this.targetPZ.z += 1;
                        //this.targetNZ.z -= 1;

                        // how does the 360 or shadertoy want our cubemaps?


                        // and then rotate right?

                        // how can we render cubemap?

                        if (cameraz.valueAsNumber == 0)
                            renderer0.setClearColor(0x0, 0);
                        else
                            renderer0.setClearColor(0x0, 1);

                        new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                        }.WithEach(cc =>
                            {




                                cc.clearRect(0, 0, cubefacesize, cubefacesize);
                            }
                        );

                        //gl.clear()

                        if (PXonly.@checked)
                        {
                            var cameraPXsw = Stopwatch.StartNew();

                            renderer0.render(scene, cameraPX);

                            // 35207ms { cameraPXsw = 00:00:00.88 }
                            //75505ms { cameraPXsw = 00:00:00.61 }
                            //Console.WriteLine(new { cameraPXsw });

                            // clear if transparent?
                            canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                            //return;

                        }
                        else
                        {

                            #region x
                            // upside down?
                            renderer0.render(scene, cameraPX);
                            canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                            renderer0.render(scene, cameraNX);
                            canvasNX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            #endregion

                            #region z
                            renderer0.render(scene, cameraPZ);
                            canvasPZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                            renderer0.render(scene, cameraNZ);
                            canvasNZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            #endregion



                            #region y
                            renderer0.render(scene, cameraPY);

                            //canvasPY.save();
                            //canvasPY.translate(0, size);
                            //canvasPY.rotate((float)(-Math.PI / 2));
                            canvasPY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            //canvasPY.restore();


                            // the floor?

                            // render only this one?
                            renderer0.render(scene, cameraNY);
                            //canvasNY.save();
                            //canvasNY.translate(size, 0);
                            //canvasNY.rotate((float)(Math.PI / 2));
                            canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                            //canvasNY.restore();
                            // ?
                            #endregion


                            //renderer0.render(scene, cameraPX);


                            //rendererPY.render(scene, cameraPY);

                            // at this point we should be able to render the sphere texture

                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
                            //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
                            //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;


                            //var cube0 = new IHTMLImage[] {
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),

                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),


                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
                            //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
                            //};

                            #region Paint_Image
                            new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                        }.WithEachIndex(
                         (img, index) =>
                         {
                             gl4K.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);

                             //gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
                             gl4K.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);

                             // http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true

                             // https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
                             //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
                             //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);

                             gl4K.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);

                         }
                      );


                            // http://stackoverflow.com/questions/11544608/how-to-clear-a-rectangle-area-in-webgl

                            if (cameraz.valueAsNumber == 0)
                                gl4K.clearColor(0, 0, 0, 0);
                            else
                                gl4K.clearColor(0, 0, 0, 1);

                            gl4K.clear(gl.COLOR_BUFFER_BIT);

                            // could do dynamic resolution- fog of war or fog of FOV. where up to 150deg field of vision is encouragedm, not 360
                            pass.Paint_Image(
                           0,

                           0,
                           0,
                           0,
                           0
                                //,

                      // gl_FragCoord
                                // cannot be scaled, and can be referenced directly.
                                // need another way to scale
                                //zoom: 0.3f
                      );

                            //paintsw.Stop();


                            // what does it do?
                            gl4K.flush();
                            #endregion


                        }

                        // let render man know..
                        if (vsync1renderman != null)
                            if (!vsync1renderman.Task.IsCompleted)
                                vsync1renderman.SetResult(null);

                        if (vsync0ambient != null)
                            if (!vsync0ambient.Task.IsCompleted)
                                vsync0ambient.SetResult(null);
                    };


            }
            //);





            Console.WriteLine("do you see it?");
        }
Esempio n. 13
0
        // http://youtu.be/Lo1IU8UAutE
        // 60hz 2160 4K!

        // The equirectangular projection was used in map creation since it was invented around 100 A.D. by Marinus of Tyre. 

        //        C:\Users\Arvo> "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hzsky.png" "/sdcard/oculus/360photos/"
        //1533 KB/s(3865902 bytes in 2.461s)

        //C:\Users\Arvo> "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hznosky.png" "/sdcard/oculus/360photos/"
        //1556 KB/s(2714294 bytes in 1.703s)

        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\hz2048c3840x2160.png" "/sdcard/oculus/360photos/"

        // "X:\vr\tape360hzlights\0000.png"
        //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "r:\vr\tape360hzlights\0000.png" "/sdcard/oculus/360photos/hzlights.png"
        // 1421 KB/s (2516443 bytes in 1.729s)



        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809/chrome360hz

        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150809

        // the eye nor the display will be able to do any stereo
        // until tech is near matrix capability. 2019?

        // cubemap can be used for all long range scenes
        // http://www.imdb.com/title/tt0112111/?ref_=nv_sr_1


        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150808/cubemapcamera
        // subst /D a:
        // subst a:  s:\jsc.svn\examples\javascript\chrome\apps\WebGL\Chrome360LightAnimation\Chrome360LightAnimation\bin\Debug\staging\Chrome360LightAnimation.Application\web
        // subst a: z:\jsc.svn\examples\javascript\chrome\apps\WebGL\Chrome360LightAnimation\Chrome360LightAnimation\bin\Debug\staging\Chrome360LightAnimation.Application\web
        // Z:\jsc.svn\examples\javascript\chrome\apps\WebGL\Chrome360LightAnimation\Chrome360LightAnimation\bin\Debug\staging\Chrome360LightAnimation.Application\web
        // what if we want to do subst in another winstat or session?

        // ColladaLoader: Empty or non-existing file (assets/Chrome360LightAnimation/S6Edge.dae)

        /// <summary>
        /// This is a javascript application.
        /// </summary>
        /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param>
        public Application(IApp page)
        {
            //FormStyler.AtFormCreated =
            //s =>
            //{
            //    s.Context.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;

            //    //var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDrag().AttachTo(s.Context.GetHTMLTarget());
            //    var x = new ChromeTCPServerWithFrameNone.HTML.Pages.AppWindowDragWithShadow().AttachTo(s.Context.GetHTMLTarget());



            //    s.Context.GetHTMLTarget().style.backgroundColor = "#efefef";
            //    //s.Context.GetHTMLTarget().style.backgroundColor = "#A26D41";

            //};

#if AsWEBSERVER
            #region += Launched chrome.app.window
            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeTCPServerAppWindow\ChromeTCPServerAppWindow\Application.cs
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                // if we run as a server. we can open up on android.

                //chrome.Notification.DefaultTitle = "Nexus7";
                //chrome.Notification.DefaultIconUrl = new x128().src;
                ChromeTCPServer.TheServerWithStyledForm.Invoke(
                     AppSource.Text
                //, AtFormCreated: FormStyler.AtFormCreated

                //AtFormConstructor:
                //    f =>
                //    {
                //        //arg[0] is typeof System.Int32
                //        //script: error JSC1000: No implementation found for this native method, please implement [static System.Drawing.Color.FromArgb(System.Int32)]

                //        // X:\jsc.svn\examples\javascript\forms\Test\TestFromArgb\TestFromArgb\ApplicationControl.cs

                //        f.BackColor = System.Drawing.Color.FromArgb(0xA26D41);
                //    }
                );
                return;
            }
            #endregion
#else

            #region += Launched chrome.app.window
            dynamic self = Native.self;
            dynamic self_chrome = self.chrome;
            object self_chrome_socket = self_chrome.socket;

            if (self_chrome_socket != null)
            {
                if (!(Native.window.opener == null && Native.window.parent == Native.window.self))
                {
                    Console.WriteLine("chrome.app.window.create, is that you?");

                    // pass thru
                }
                else
                {
                    // should jsc send a copresence udp message?
                    //chrome.runtime.UpdateAvailable += delegate
                    //{
                    //    new chrome.Notification(title: "UpdateAvailable");

                    //};

                    chrome.app.runtime.Launched += async delegate
                    {
                        // 0:12094ms chrome.app.window.create {{ href = chrome-extension://aemlnmcokphbneegoefdckonejmknohh/_generated_background_page.html }}
                        Console.WriteLine("chrome.app.window.create " + new { Native.document.location.href });

                        new chrome.Notification(title: "Chrome360LightAnimation");

                        // https://developer.chrome.com/apps/app_window#type-CreateWindowOptions
                        var xappwindow = await chrome.app.window.create(
                               Native.document.location.pathname, options: new
                               {
                                   alwaysOnTop = true,
                                   visibleOnAllWorkspaces = true
                               }
                        );

                        //xappwindow.setAlwaysOnTop

                        xappwindow.show();

                        await xappwindow.contentWindow.async.onload;

                        Console.WriteLine("chrome.app.window loaded!");
                    };


                    return;
                }
            }
            #endregion


#endif



            //const int size = 128;
            //const int size = 256; // 6 faces, 12KB
            //const int size = 512; // 6 faces, ?

            // WebGL: drawArrays: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering or is not 'texture complete'. Or the texture is Float or Half Float type with linear filtering while OES_float_linear or OES_half_float_linear extension is not enabled.

            //const int size = 720; // 6 faces, ?
            //const int size = 1024; // 6 faces, ?
            //const int cubefacesize = 1024; // 6 faces, ?

            // THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter is set to THREE.LinearFilter or THREE.NearestFilter. ( chrome-extension://aemlnmcokphbneegoefdckonejmknohh/assets/Chrome360LightAnimation/anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs.jpg )
            int cubefacesize = 2048; // 6 faces, ?
                                     // "X:\vr\tape1\0000x2048.png"
                                     // for 60hz render we may want to use float camera percision, not available for ui.
                                     //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x2048.png" "/sdcard/oculus/360photos/"
                                     //  "x:\util\android-sdk-windows\platform-tools\adb.exe" push "X:\vr\tape1\0000x128.png" "/sdcard/oculus/360photos/"

            //if (Environment.ProcessorCount < 8)
            //    //cubefacesize = 64; // 6 faces, ?
            //    cubefacesize = 128; // 6 faces, ?

            new IHTMLPre { new { Environment.ProcessorCount, cubefacesize } }.AttachToDocument();

            // can we keep fast fps yet highp?

            // can we choose this on runtime? designtime wants fast fps, yet for end product we want highdef on our render farm?
            //const int cubefacesize = 128; // 6 faces, ?

            //var cubecameraoffsetx = 256;
            var cubecameraoffsetx = 400;


            //var uizoom = 0.1;
            //var uizoom = cubefacesize / 128f;
            var uizoom = 128f / cubefacesize;

            var far = 0xffffff;

            Native.css.style.backgroundColor = "blue";
            Native.css.style.overflow = IStyle.OverflowEnum.hidden;

            Native.body.Clear();
            (Native.body.style as dynamic).webkitUserSelect = "text";

            //new IHTMLPre { "can we stream it into VR, shadertoy, youtube 360, youtube stereo yet?" }.AttachToDocument();


            var sw = Stopwatch.StartNew();



            var oo = new List<THREE.Object3D>();

            var window = Native.window;


            // what about physics and that portal rendering?

            // if we are running as a chrome web server, we may also be opened as android ndk webview app
            //var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: window.aspect, near: 1, far: 2000);
            // once we update source
            // save the source
            // manually recompile 
            //cameraPX.position.z = 400;

            //// the camera should be close enough for the object to float off the FOV of PX
            //cameraPX.position.z = 200;

            // scene
            // can we make the 3D object orbit around us ?
            // and
            // stream it to vr?
            var scene = new THREE.Scene();

            // since our cube camera is somewhat a fixed thing
            // would it be easier to move mountains to come to us?
            // once we change code would chrome app be able to let VR know that a new view is available?
            var sceneg = new THREE.Group();
            sceneg.AttachTo(scene);


            // fly up?
            //sceneg.translateZ(-1024);
            // rotate the world, as the skybox then matches what we have on filesystem
            scene.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);
            // yet for headtracking we shall rotate camera


            //sceneg.position.set(0, 0, -1024);
            //sceneg.position.set(0, -1024, 0);

            var ambient = new THREE.AmbientLight(0x303030).AttachTo(sceneg);
            //scene.add(ambient);

            // should we fix jsc to do a more correct IDL?
            //var directionalLight = new THREE.DirectionalLight(0xffffff, 0.7);
            //directionalLight.position.set(0, 0, 1);
            //scene.add(directionalLight);

            #region light
            //var light = new THREE.DirectionalLight(0xffffff, 1.0);
            var light = new THREE.DirectionalLight(0xffffff, 2.5);
            //var light = new THREE.DirectionalLight(0xffffff, 2.5);
            //var light = new THREE.DirectionalLight(0xffffff, 1.5);
            //var lightOffset = new THREE.Vector3(0, 1000, 2500.0);
            var lightOffset = new THREE.Vector3(
                2000,
                700,

                // lower makes longer shadows 
                700.0
                );
            light.position.copy(lightOffset);
            light.castShadow = true;

            var xlight = light as dynamic;
            xlight.shadowMapWidth = 4096;
            xlight.shadowMapHeight = 2048;

            xlight.shadowDarkness = 0.1;
            //xlight.shadowDarkness = 0.5;

            xlight.shadowCameraNear = 10;
            xlight.shadowCameraFar = 10000;
            xlight.shadowBias = 0.00001;
            xlight.shadowCameraRight = 4000;
            xlight.shadowCameraLeft = -4000;
            xlight.shadowCameraTop = 4000;
            xlight.shadowCameraBottom = -4000;

            // wont show if we add skybox?
            xlight.shadowCameraVisible = true;

            //scene.add(light);
            //light.AttachTo(sceneg);
            #endregion




            // whats WebGLRenderTargetCube do?

            // WebGLRenderer preserveDrawingBuffer 



            var renderer0 = new THREE.WebGLRenderer(

                new
                {
                    antialias = true,
                    alpha = true,
                    preserveDrawingBuffer = true
                }
            );


            // must enable shadows on the renderer 
            renderer0.shadowMapEnabled = true;

            // https://github.com/mrdoob/three.js/issues/3836

            // the construct. white bg
            //renderer0.setClearColor(0xfffff, 1);

            //renderer.setSize(window.Width, window.Height);
            renderer0.setSize(cubefacesize, cubefacesize);

            //renderer0.domElement.AttachToDocument();
            //rendererPX.domElement.style.SetLocation(0, 0);
            //renderer0.domElement.style.SetLocation(4, 4);


            // top

            // http://stackoverflow.com/questions/27612524/can-multiple-webglrenderers-render-the-same-scene


            // need a place to show the cubemap face to GUI 
            // how does the stereo OTOY do it?
            // https://www.opengl.org/wiki/Sampler_(GLSL)

            // http://www.richardssoftware.net/Home/Post/25

            // [+X, –X, +Y, –Y, +Z, –Z] fa



            // move up
            //camera.position.set(-1200, 800, 1200);
            //var cameraoffset = new THREE.Vector3(0, 15, 0);

            // can we aniamte it?
            //var cameraoffset = new THREE.Vector3(0, 800, 1200);
            // can we have linear animation fromcenter of the map to the edge and back?
            // then do the flat earth sun orbit?
            var cameraoffset = new THREE.Vector3(
                // left?
                -512,
                // height?
                //0,
                //1600,
                //1024,

                // if the camera is in the center, would we need to move the scene?
                // we have to move the camera. as we move the scene the lights are messed up
                //2014,
                1024,

                //1200
                0
                // can we hover top of the map?
                );

            // original vieworigin
            //var cameraoffset = new THREE.Vector3(-1200, 800, 1200);



            var camerax = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048, max = 0 + 2048, valueAsNumber = -512, title = "camerax" }.AttachToDocument();
            // up. whats the most high a rocket can go 120km?
            new IHTMLHorizontalRule { }.AttachToDocument();


            // how high is the bunker?
            var cameray = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 64, max = 2048, valueAsNumber = 1024, title = "cameray" }.AttachToDocument();
            new IHTMLBreak { }.AttachToDocument();
            var camerayHigh = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = cameray.max, max = 1024 * 256, valueAsNumber = cameray.max, title = "cameray" }.AttachToDocument();
            new IHTMLHorizontalRule { }.AttachToDocument();
            var cameraz = new IHTMLInput { type = ScriptCoreLib.Shared.HTMLInputTypeEnum.range, min = 0 - 2048, max = 0 + 2048, valueAsNumber = 0, title = "cameraz" }.AttachToDocument();

            // for render server
            var fcamerax = 0.0;
            var fcameray = 0.0;
            var fcameraz = 0.0;

            //while (await camerax.async.onchange)

            //cameray.onchange += delegate
            //{
            //    if (cameray.valueAsNumber < cameray.max)
            //        camerayHigh.valueAsNumber = camerayHigh.min;
            //};

            camerayHigh.onmousedown += delegate
            {
                //if (camerayHigh.valueAsNumber > camerayHigh.min)
                cameray.valueAsNumber = cameray.max;
            };


            Action applycameraoffset = delegate
            {
                // make sure UI and gpu sync up

                var cy = cameray;

                if (cameray.valueAsNumber < cameray.max)
                    camerayHigh.valueAsNumber = camerayHigh.min;

                if (camerayHigh.valueAsNumber > camerayHigh.min)
                    cameray.valueAsNumber = cameray.max;

                if (cameray.valueAsNumber == cameray.max)
                    cy = camerayHigh;



                cameraoffset = new THREE.Vector3(
                  // left?
                  camerax + fcamerax,
                   // height?
                   //0,
                   //1600,
                   //1024,

                   // if the camera is in the center, would we need to move the scene?
                   // we have to move the camera. as we move the scene the lights are messed up
                   //2014,
                   cy + fcameray,

                   //1200
                   cameraz + fcameraz
                   // can we hover top of the map?
                   );
            };


            #region y
            // need to rotate90?
            var cameraNY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNY.lookAt(new THREE.Vector3(0, -1, 0));
                cameraNY.position.add(cameraoffset);
            };

            //cameraNY.lookAt(new THREE.Vector3(0, 1, 0));
            var canvasNY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 2);
            canvasNY.canvas.title = "NY";
            canvasNY.canvas.AttachToDocument();
            canvasNY.canvas.style.transformOrigin = "0 0";
            canvasNY.canvas.style.transform = $"scale({uizoom})";

            var cameraPY = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPY.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPY.lookAt(new THREE.Vector3(0, 1, 0));
                cameraPY.position.add(cameraoffset);
            };
            //cameraPY.lookAt(new THREE.Vector3(0, -1, 0));
            var canvasPY = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPY.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 0);
            canvasPY.canvas.title = "PY";
            canvasPY.canvas.AttachToDocument();
            canvasPY.canvas.style.transformOrigin = "0 0";
            canvasPY.canvas.style.transform = $"scale({uizoom})";
            #endregion

            // transpose xz?

            #region x
            var cameraNX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraNX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNX.lookAt(new THREE.Vector3(0, 0, 1));
                cameraNX.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(0, 0, -1));
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNX.lookAt(new THREE.Vector3(1, 0, 0));
            var canvasNX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 2, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNX.canvas.title = "NX";
            canvasNX.canvas.AttachToDocument();
            canvasNX.canvas.style.transformOrigin = "0 0";
            canvasNX.canvas.style.transform = $"scale({uizoom})";

            var cameraPX = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            applycameraoffset += delegate
            {
                cameraPX.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPX.lookAt(new THREE.Vector3(0, 0, -1));
                cameraPX.position.add(cameraoffset);
            };
            //cameraPX.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPX.lookAt(new THREE.Vector3(1, 0, 0));
            //cameraPX.lookAt(new THREE.Vector3(-1, 0, 0));
            var canvasPX = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPX.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPX.canvas.title = "PX";
            canvasPX.canvas.AttachToDocument();
            canvasPX.canvas.style.transformOrigin = "0 0";
            canvasPX.canvas.style.transform = $"scale({uizoom})";
            #endregion



            #region z
            var cameraNZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, -1));
            applycameraoffset += delegate
            {
                cameraNZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraNZ.lookAt(new THREE.Vector3(1, 0, 0));
                cameraNZ.position.add(cameraoffset);
            };
            //cameraNX.lookAt(new THREE.Vector3(-1, 0, 0));
            //cameraNZ.lookAt(new THREE.Vector3(0, 0, 1));
            var canvasNZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasNZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 3, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasNZ.canvas.title = "NZ";
            canvasNZ.canvas.AttachToDocument();
            canvasNZ.canvas.style.transformOrigin = "0 0";
            canvasNZ.canvas.style.transform = $"scale({uizoom})";

            var cameraPZ = new THREE.PerspectiveCamera(fov: 90, aspect: 1.0, near: 1, far: far);
            //cameraPZ.lookAt(new THREE.Vector3(1, 0, 0));
            applycameraoffset += delegate
            {
                cameraPZ.position.copy(new THREE.Vector3(0, 0, 0));
                cameraPZ.lookAt(new THREE.Vector3(-1, 0, 0));
                cameraPZ.position.add(cameraoffset);
            };
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, 1));
            //cameraPZ.lookAt(new THREE.Vector3(0, 0, -1));
            var canvasPZ = new CanvasRenderingContext2D(cubefacesize, cubefacesize);
            canvasPZ.canvas.style.SetLocation(cubecameraoffsetx + (int)(uizoom * cubefacesize + 8) * 1, 8 + (int)(uizoom * cubefacesize + 8) * 1);
            canvasPZ.canvas.title = "PZ";
            canvasPZ.canvas.AttachToDocument();
            canvasPZ.canvas.style.transformOrigin = "0 0";
            canvasPZ.canvas.style.transform = $"scale({uizoom})";
            #endregion




            // c++ alias locals would be nice..
            var canvas0 = (IHTMLCanvas)renderer0.domElement;


            var old = new
            {



                CursorX = 0,
                CursorY = 0
            };


            var st = new Stopwatch();
            st.Start();

            //canvas0.css.active.style.cursor = IStyle.CursorEnum.move;

            #region onmousedown
            Native.body.onmousedown +=
                async e =>
                {
                    if (e.Element.nodeName.ToLower() != "canvas")
                        return;

                    // movementX no longer works
                    old = new
                    {


                        e.CursorX,
                        e.CursorY
                    };


                    //e.CaptureMouse();
                    var release = e.Element.CaptureMouse();
                    await e.Element.async.onmouseup;

                    release();


                };
            #endregion



            // X:\jsc.svn\examples\javascript\Test\TestMouseMovement\TestMouseMovement\Application.cs
            #region onmousemove
            Native.body.onmousemove +=
                e =>
                {
                    if (e.Element.nodeName.ToLower() != "canvas")
                    {
                        Native.body.style.cursor = IStyle.CursorEnum.@default;
                        return;
                    }

                    e.preventDefault();
                    e.stopPropagation();


                    Native.body.style.cursor = IStyle.CursorEnum.move;

                    var pointerLock = canvas0 == Native.document.pointerLockElement;


                    //Console.WriteLine(new { e.MouseButton, pointerLock, e.movementX });

                    if (e.MouseButton == IEvent.MouseButtonEnum.Left)
                    {

                        oo.WithEach(
                            x =>
                            {
                                x.rotation.y += 0.006 * (e.CursorX - old.CursorX);
                                x.rotation.x += 0.006 * (e.CursorY - old.CursorY);
                            }
                        );

                        old = new
                        {


                            e.CursorX,
                            e.CursorY
                        };



                    }

                };
            #endregion

            // THREE.WebGLProgram: gl.getProgramInfoLog() C:\fakepath(78,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll
            // THREE.WebGLProgram: gl.getProgramInfoLog() (79,3-98): warning X3557: loop only executes for 1 iteration(s), forcing loop to unroll

            // http://www.roadtovr.com/youtube-confirms-stereo-3d-360-video-support-coming-soon/
            // https://www.youtube.com/watch?v=D-Wl9jAB45Q



            #region spherical
            var gl = new WebGLRenderingContext(alpha: true, preserveDrawingBuffer: true);
            var c = gl.canvas.AttachToDocument();

            //  3840x2160

            //c.style.SetSize(3840, 2160);

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150722/360-youtube


            c.width = 3840;
            c.height = 2160;


            //c.width = 3840 * 2;
            //c.height = 2160 * 2;


            //c.width = 3840;
            //c.height = 2160;
            // 1,777777777777778

            // https://www.youtube.com/watch?v=fTfJwzRsE-w
            //c.width = 7580;
            //c.height = 3840;
            //1,973958333333333

            //7580
            //    3840

            // wont work
            //c.width = 8192;
            //c.height = 4096;


            // this has the wrong aspect?
            //c.width = 6466;
            //c.height = 3232;

            new IHTMLPre { new { c.width, c.height } }.AttachToDocument();

            //6466x3232

            //var suizoom = 720f / c.height;
            //var suizoom = 360f / c.height;
            var suizoom = 480f / c.width;

            c.style.transformOrigin = "0 0";
            c.style.transform = $"scale({suizoom})";
            //c.style.backgroundColor = "yellow";
            c.style.position = IStyle.PositionEnum.absolute;

            c.style.SetLocation(8 + (int)(uizoom * cubefacesize + 8) * 0, 8 + (int)(uizoom * cubefacesize + 8) * 3);

            var pass = new CubeToEquirectangular.Library.ShaderToy.EffectPass(
                       null,
                       gl,
                       precission: CubeToEquirectangular.Library.ShaderToy.DetermineShaderPrecission(gl),
                       supportDerivatives: gl.getExtension("OES_standard_derivatives") != null,
                       callback: null,
                       obj: null,
                       forceMuted: false,
                       forcePaused: false,
                       //quadVBO: Library.ShaderToy.createQuadVBO(gl, right: 0, top: 0),
                       outputGainNode: null
                   );

            // how shall we upload our textures?
            // can we reference GLSL.samplerCube yet?
            //pass.mInputs[0] = new samplerCube { };
            pass.mInputs[0] = new CubeToEquirectangular.Library.ShaderToy.samplerCube { };

            pass.MakeHeader_Image();
            var vs = new Shaders.ProgramFragmentShader();
            pass.NewShader_Image(vs);

            #endregion




            var frame0 = new HTML.Images.FromAssets.galaxy_starfield().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets.tiles_regrid().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets.anvil___spherical_hdri_panorama_skybox_by_macsix_d6vv4hs().AttachToDocument();
            //var xor = new HTML.Images.FromAssets.Orion360_test_image_8192x4096().AttachToDocument();
            //var xor = new HTML.Images.FromAssets._2_no_clouds_4k().AttachToDocument();
            //var frame0 = new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().AttachToDocument();


            // 270px
            //xor.style.height = "";
            frame0.style.height = "270px";
            frame0.style.width = "480px";
            frame0.style.SetLocation(
                8 + (int)(uizoom * cubefacesize + 8) * 0 + 480 + 16, 8 + (int)(uizoom * cubefacesize + 8) * 3);



            #region  skybox
            // what shall the skybox do if we reach upper altitude?
            // fade into space skybox ?
            var skybox = new THREE.Mesh(new THREE.SphereGeometry(far * 0.9, 50, 50),
           new THREE.MeshBasicMaterial(new
           {
               map = THREE.ImageUtils.loadTexture(
                  //new HTML.Images.FromAssets._2294472375_24a3b8ef46_o().src
                  //new HTML.Images.FromAssets._4008650304_7f837ccbb7_b().src
                  frame0.src
                   //new WebGLEquirectangularPanorama.HTML.Images.FromAssets.PANO_20130616_222058().src
                   //new WebGLEquirectangularPanorama.HTML.Images.FromAssets.PANO_20121225_210448().src

                   )
           }));
            skybox.scale.x = -1;
            skybox.AttachTo(sceneg);
            #endregion



            //mesh.rotateOnAxis(new THREE.Vector3(1, 0, 0), Math.PI / 2);
            //mesh.rotateOnAxis(new THREE.Vector3(1, 0, 0), -Math.PI / 2);

            // dont need the fixup. unless we want to animate the sky rotate?
            //mesh.rotateOnAxis(new THREE.Vector3(0, 1, 0), -Math.PI / 2);


            // hide the sky to see camera lines?
            //  can we show this as HUD on VR in webview?
            //skybox.visible = false;
            //scene.add(skybox);




            //new IHTMLButton { }

            #region DirectoryEntry
            var dir = default(DirectoryEntry);

            new IHTMLButton { "openDirectory" }.AttachToDocument().onclick += async delegate
            {
                dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
            };

            frame0.onclick += delegate
            {
                // http://paulbourke.net/papers/vsmm2006/vsmm2006.pdf
                //            A method of creating synthetic stereoscopic panoramic images that can be implemented
                //in most rendering packages has been presented. If single panoramic pairs can be created
                //then stereoscopic panoramic movies are equally possible giving rise to the prospect of
                //movies where the viewer can interact with, at least with regard to what they choose to look
                //at.These images can be projected so as to engage the two features of the human visual
                //system that assist is giving us a sense of immersion, the feeling of “being there”. That is,
                //imagery that contains parallax information as captured from two horizontally separated eye
                //positions (stereopsis)and imagery that fills our peripheral vision.The details that define
                //how the two panoramic images should be created in rendering packages are provided, in
                //particular, how to precisely configure the virtual cameras and control the distance to zero
                //parallax.

                // grab a frame

                if (dir == null)
                {
                    // not exporting to file system?
                    var f0 = new IHTMLImage { src = gl.canvas.toDataURL() };

                    //var f0 = (IHTMLImage)gl.canvas;
                    //var f0 = (IHTMLImage)gl.canvas;
                    //var base64 = gl.canvas.toDataURL();


                    //frame0.src = base64;
                    frame0.src = f0.src;

                    // 7MB!

                    return;
                }

                //                // ---------------------------
                //IrfanView
                //---------------------------
                //Warning !
                //The file: "X:\vr\tape1\0001.jpg" is a PNG file with incorrect extension !
                //Rename ?
                //---------------------------
                //Yes   No   
                //---------------------------

                // haha this will render the thumbnail.
                //dir.WriteAllBytes("0000.png", frame0);

                //dir.WriteAllBytes("0000.png", gl.canvas);

                var glsw = Stopwatch.StartNew();
                dir.WriteAllBytes("0000.png", gl);

                new IHTMLPre { new { glsw.ElapsedMilliseconds } }.AttachToDocument();

                // {{ ElapsedMilliseconds = 1548 }}

                // 3.7MB
                // 3840x2160

            };

            #endregion

            var vsync = default(TaskCompletionSource<object>);

            new IHTMLButton {
                "render 60hz 30sec"
            }.AttachToDocument().onclick += async e =>
            {
                e.Element.disabled = true;


                var total = Stopwatch.StartNew();
                var status = "rendering... " + new { dir };

                new IHTMLPre { () => status }.AttachToDocument();

                if (dir == null)
                {
                    //dir = (DirectoryEntry)await chrome.fileSystem.chooseEntry(new { type = "openDirectory" });
                }

                total.Restart();



                vsync = new TaskCompletionSource<object>();
                await vsync.Task;

                status = "rendering... vsync";

                var frameid = -1;

                goto beforeframe;
                //fcamerax = -15.0;

                // parallax offset?

                await_nextframe:


                var filename = frameid.ToString().PadLeft(4, '0') + ".png";
                status = "rendering... " + new { frameid, filename };


                vsync = new TaskCompletionSource<object>();
                await vsync.Task;

                // frame0 has been rendered

                var swcapture = Stopwatch.StartNew();
                status = "WriteAllBytes... " + new { filename };
                //await Native.window.async.onframe;

                if (dir != null)
                    // https://code.google.com/p/chromium/issues/detail?id=404301
                    await dir.WriteAllBytes(filename, gl);

                //await dir.WriteAllBytes(filename, gl.canvas);

                status = "WriteAllBytes... done " + new { fcamerax, filename, swcapture.ElapsedMilliseconds };
                status = "rdy " + new { filename, fcamerax };
                //await Native.window.async.onframe;



                beforeframe:

                // speed? S6 slow motion?
                // this is really slow. if we do x4x2 =x8 
                // https://www.youtube.com/watch?v=r76ULW16Ib8
                //fcamerax += 16 * (1.0 / 60.0);
                fcamerax = 128 * Math.Sin(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));
                fcameraz = 256 * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));


                // up
                fcameray = 16 * Math.Cos(Math.PI * (frameid - (60 * 30 / 2f)) / (60 * 30 / 2f));

                //fcamerax += (1.0 / 60.0);

                //fcamerax += (1.0 / 60.0) * 120;

                if (Environment.ProcessorCount < 8)
                    frameid += 30;
                else
                    frameid++;

                // 60hz 30sec
                if (frameid < 60 * 30)
                {
                    // Blob GC? either this helms or the that we made a Blob static. 
                    await Task.Delay(11);

                    goto await_nextframe;
                }

                total.Stop();
                status = "all done " + new { frameid, total.ElapsedMilliseconds };
                vsync = default(TaskCompletionSource<object>);
                // http://stackoverflow.com/questions/22899333/delete-javascript-blobs

                e.Element.disabled = false;
            };

            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLColladaExperiment\WebGLColladaExperiment\WebGLColladaExperiment.csproj"

            #region WebGLRah66Comanche
            // why isnt it being found?
            // "Z:\jsc.svn\examples\javascript\WebGL\collada\WebGLRah66Comanche\WebGLRah66Comanche\WebGLRah66Comanche.csproj"
            new global::WebGLRah66Comanche.Comanche(
            ).Source.Task.ContinueWithResult(
                dae =>
                {
                    dae.AttachTo(sceneg);
                    //dae.position.y = -40;
                    //dae.position.z = 280;
                    //scene.add(dae);
                    //oo.Add(dae);

                    // wont do it
                    //dae.castShadow = true;

                    dae.children[0].children[0].children.WithEach(x => x.castShadow = true);


                    // the rotors?
                    dae.children[0].children[0].children.Last().children.WithEach(x => x.castShadow = true);


                    dae.scale.set(0.5, 0.5, 0.5);
                    dae.position.x = -900;
                    dae.position.z = +900;

                    // raise it up
                    dae.position.y = 400;

                    //var sw = Stopwatch.StartNew();

                    //Native.window.onframe += delegate
                    //{
                    //    //dae.children[0].children[0].children.Last().al
                    //    //dae.children[0].children[0].children.Last().rotation.z = sw.ElapsedMilliseconds * 0.01;
                    //    //dae.children[0].children[0].children.Last().rotation.x = sw.ElapsedMilliseconds * 0.01;
                    //    dae.children[0].children[0].children.Last().rotation.y = sw.ElapsedMilliseconds * 0.01;
                    //};
                }
            );
            #endregion



            #region tree
            // "Z:\jsc.svn\examples\javascript\WebGL\WebGLGodRay\WebGLGodRay\WebGLGodRay.csproj"

            var materialScene = new THREE.MeshBasicMaterial(new { color = 0x000000, shading = THREE.FlatShading });
            var tloader = new THREE.JSONLoader();

            // http://stackoverflow.com/questions/16539736/do-not-use-system-runtime-compilerservices-dynamicattribute-use-the-dynamic
            // https://msdn.microsoft.com/en-us/library/system.runtime.compilerservices.dynamicattribute%28v=vs.110%29.aspx
            //System.Runtime.CompilerServices.DynamicAttribute

            tloader.load(

                new WebGLGodRay.Models.tree().Content.src,

                new Action<THREE.Geometry>(
                xgeometry =>
                {

                    var treeMesh = new THREE.Mesh(xgeometry, materialScene);
                    treeMesh.position.set(0, -150, -150);
                    treeMesh.position.x = -900;
                    treeMesh.position.z = -900;

                    treeMesh.position.y = 25;

                    var tsc = 400;
                    treeMesh.scale.set(tsc, tsc, tsc);

                    treeMesh.matrixAutoUpdate = false;
                    treeMesh.updateMatrix();


                    //treeMesh.AttachTo(scene);
                    treeMesh.AttachTo(sceneg);

                }
                )
                );
            #endregion


            // http://learningthreejs.com/blog/2013/09/16/how-to-make-the-earth-in-webgl/

            #region create floor

            // THREE.PlaneGeometry: Consider using THREE.PlaneBufferGeometry for lower memory footprint.
            // can we have our checkerboard?

            var floorColors = new[] {
                0xA26D41,
                0xA06040,
                0xAF6F4F,
                // marker to detect horizon
                0xAF0000,



                0xA26D41,
                0xA06040,
                0xAF6F4F,
                // marker to detect horizon
                0x006D00,



                0xA26D41,
                0xA06040,
                0xAF6F4F,
                // marker to detect horizon
                0x0000FF
            };


            // human eye can see up to 10miles, then horizion flattens out.
            var planeGeometry = new THREE.CubeGeometry(2048, 1, 2048);
            var planeGeometryMarkerH = new THREE.CubeGeometry(2048, 1, 2048 * 5);


            var planeGeometryMarkerV = new THREE.CubeGeometry(2048 * 5 * 4, 1, 2048 * 4);
            var planeGeometryV = new THREE.CubeGeometry(2048 * 4, 1, 2048 * 4);
            ////var floor0 = new THREE.Mesh(planeGeometry,
            ////        new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })

            ////    );
            //////plane.castShadow = false;
            ////floor0.receiveShadow = true;
            ////floor0.AttachTo(sceneg);

            ////var floor1 = new THREE.Mesh(planeGeometry,
            ////       //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
            ////       new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA06040, specular = 0xA26D41, shininess = 1 })

            ////   );
            //////plane.castShadow = false;
            ////floor1.receiveShadow = true;
            ////floor1.position.set(2048, 0, 1024);
            ////floor1.AttachTo(sceneg);

            // can we see horizon?
            for (int i = 0; i < 3 * 256; i++)
            {
                var planeGeometry0 = planeGeometry;

                if (i % 4 == 3)
                {
                    planeGeometry0 = planeGeometryMarkerH;

                    // for high altitude zoom level

                    var i4 = (i / 4);


                    var planeGeometryV0 = planeGeometryV;

                    if (i4 % 4 == 3)
                        planeGeometryV0 = planeGeometryMarkerV;

                    {
                        var floor2 = new THREE.Mesh(planeGeometryV0,
                            //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                            new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = floorColors[i4 % floorColors.Length], specular = 0xA26D41, shininess = 1 })

                        );
                        //plane.castShadow = false;
                        floor2.receiveShadow = true;
                        floor2.position.set(1024 * -i, 0, 2048 * i);
                        floor2.AttachTo(scene);
                    }


                    {
                        var floor2 = new THREE.Mesh(planeGeometryV0,
                            //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                            new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = floorColors[(i / 4) % floorColors.Length], specular = 0xA26D41, shininess = 1 })

                        );
                        //plane.castShadow = false;
                        floor2.receiveShadow = true;
                        floor2.position.set(-1024 * -i, 0, -2048 * i);
                        floor2.AttachTo(scene);
                    }
                }

                {
                    var floor2 = new THREE.Mesh(planeGeometry0,
                        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                        new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = floorColors[i % floorColors.Length], specular = 0xA26D41, shininess = 1 })

                    );
                    //plane.castShadow = false;
                    floor2.receiveShadow = true;
                    floor2.position.set(2048 * i, 0, 1024 * i);
                    floor2.AttachTo(scene);
                }

                // flipz
                {
                    var floor2 = new THREE.Mesh(planeGeometry0,
                        //new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = 0xA26D41, specular = 0xA26D41, shininess = 1 })
                        new THREE.MeshPhongMaterial(new { ambient = 0x101010, color = floorColors[i % floorColors.Length], specular = 0xA26D41, shininess = 1 })

                    );
                    //plane.castShadow = false;
                    floor2.receiveShadow = true;
                    floor2.position.set(2048 * -i, 0, 1024 * -i);
                    floor2.AttachTo(scene);
                }
            }

            #endregion



            // "shadow cameras" show the light source and direction

            // spotlight #1 -- yellow, dark shadow
            var spotlight = new THREE.SpotLight(0xffff00);
            spotlight.position.set(-60, 150, -30);
            //spotlight.shadowCameraVisible = true;
            spotlight.shadowDarkness = 0.95;
            spotlight.intensity = 2;
            // must enable shadow casting ability for the light
            spotlight.castShadow = true;
            scene.add(spotlight);

            // spotlight #2 -- red, light shadow
            var spotlight2 = new THREE.SpotLight(0xff0000);
            //var spotlight2 = new THREE.SpotLight(0xffff00);
            spotlight2.position.set(60, 150, -60);
            scene.add(spotlight2);
            //spotlight2.shadowCameraVisible = true;
            spotlight2.shadowDarkness = 0.70;
            spotlight2.intensity = 2;
            spotlight2.castShadow = true;


            // THREE.WebGLProgram: gl.getProgramInfoLog() Pixel shader sampler count exceeds MAX_TEXTURE_IMAGE_UNITS (16).
            for (int i = 0; i < 8; i++)
            {

                // spotlight #3 blue
                var spotlight3 = new THREE.SpotLight(0x0000ff);
                //var spotlight3 = new THREE.SpotLight(0x00ffff);
                spotlight3.position.set(150 * i, 80, -100);
                //spotlight3.shadowCameraVisible = true;
                spotlight3.shadowDarkness = 0.95;
                spotlight3.intensity = 2;
                spotlight3.castShadow = true;
                scene.add(spotlight3);
                // change the direction this spotlight is facing
                var lightTarget = new THREE.Object3D();
                lightTarget.position.set(150 * i, 10, -100);
                scene.add(lightTarget);
                spotlight3.target = lightTarget;



            }





            // cube: mesh to cast shadows
            #region castShadow
            var cubeGeometry = new THREE.CubeGeometry(50, 50, 50);
            var cubeMaterial = new THREE.MeshLambertMaterial(new { color = 0x888888 });

            var cube0 = new THREE.Mesh(cubeGeometry, cubeMaterial);
            //cube.position.set(0, 50, 0);
            cube0.position.set(0, 100, 0);
            // Note that the mesh is flagged to cast shadows
            cube0.castShadow = true;
            scene.add(cube0);

            // floor: mesh to receive shadows

            #endregion






            #region create walls

            var random = new Random();
            var meshArray = new List<THREE.Mesh>();
            var geometry = new THREE.CubeGeometry(1, 1, 1);
            //var sw = Stopwatch.StartNew();

            for (var i = 3; i < 9; i++)
            {

                //THREE.MeshPhongMaterial
                var ii = new THREE.Mesh(geometry,


                    new THREE.MeshPhongMaterial(new { ambient = 0x000000, color = 0xA06040, specular = 0xA26D41, shininess = 1 })

                    //new THREE.MeshLambertMaterial(
                    //new
                    //{
                    //    color = (Convert.ToInt32(0xffffff * random.NextDouble())),
                    //    specular = 0xffaaaa,
                    //    ambient= 0x050505, 
                    //})

                    );
                ii.position.x = i % 7 * 200 - 2.5f;

                // raise it up
                ii.position.y = .5f * 100 * i;
                ii.position.z = -1 * i * 100;

                ii.castShadow = true;
                ii.receiveShadow = true;
                //ii.scale.set(100, 100, 100 * i);
                ii.scale.set(100, 100 * i, 100);


                meshArray.Add(ii);

                //scene.add(ii);
                ii.AttachTo(sceneg);

                if (i % 2 == 0)
                {

                    // Z:\jsc.svn\examples\javascript\WebGL\WebGLHZBlendCharacter\WebGLHZBlendCharacter\Application.cs
#if FWebGLHZBlendCharacter
                    #region SpeedBlendCharacter
                    var _i = i;
                    { WebGLHZBlendCharacter.HTML.Pages.TexturesImages ref0; }

                    var blendMesh = new THREE.SpeedBlendCharacter();
                    blendMesh.load(
                        new WebGLHZBlendCharacter.Models.marine_anims().Content.src,
                        new Action(
                            delegate
                            {
                                // buildScene
                                //blendMesh.rotation.y = Math.PI * -135 / 180;
                                blendMesh.castShadow = true;
                                // we cannot scale down we want our shadows
                                //blendMesh.scale.set(0.1, 0.1, 0.1);

                                blendMesh.position.x = (_i + 2) % 7 * 200 - 2.5f;

                                // raise it up
                                //blendMesh.position.y = .5f * 100;
                                blendMesh.position.z = -1 * _i * 100;


                                var xtrue = true;
                                // run
                                blendMesh.setSpeed(1.0);

                                // will in turn call THREE.AnimationHandler.play( this );
                                blendMesh.run.play();
                                // this wont help. bokah does not see the animation it seems.
                                blendMesh.run.update(1);

                                blendMesh.showSkeleton(!xtrue);

                                //scene.add(blendMesh);
                                blendMesh.AttachTo(sceneg);


                                //Native.window.onframe +=
                                // delegate
                                // {

                                //     blendMesh.rotation.y = Math.PI * 0.0002 * sw.ElapsedMilliseconds;



                                //     ii.rotation.y = Math.PI * 0.0002 * sw.ElapsedMilliseconds;

                                // };

                            }
                        )
                    );
                    #endregion
#endif
                }

            }
            #endregion


            //#region HZCannon
            //// "Z:\jsc.svn\examples\javascript\WebGL\HeatZeekerRTSOrto\HeatZeekerRTSOrto\HeatZeekerRTSOrto.csproj"
            //new HeatZeekerRTSOrto.HZCannon().Source.Task.ContinueWithResult(
            //    async cube =>
            //    {
            //        // https://github.com/mrdoob/three.js/issues/1285
            //        //cube.children.WithEach(c => c.castShadow = true);

            //        //cube.traverse(
            //        //    new Action<THREE.Object3D>(
            //        //        child =>
            //        //        {
            //        //            // does it work? do we need it?
            //        //            //if (child is THREE.Mesh)

            //        //            child.castShadow = true;
            //        //            //child.receiveShadow = true;

            //        //        }
            //        //    )
            //        //);

            //        // um can edit and continue insert code going back in time?
            //        cube.scale.x = 10.0;
            //        cube.scale.y = 10.0;
            //        cube.scale.z = 10.0;



            //        //cube.castShadow = true;
            //        //dae.receiveShadow = true;

            //        //cube.position.x = -100;

            //        ////cube.position.y = (cube.scale.y * 50) / 2;
            //        //cube.position.z = Math.Floor((random() * 1000 - 500) / 50) * 50 + 25;



            //        // if i want to rotate, how do I do it?
            //        //cube.rotation.z = random() + Math.PI;
            //        //cube.rotation.x = random() + Math.PI;
            //        //var sw2 = Stopwatch.StartNew();



            //        cube.AttachTo(sceneg);
            //        //scene.add(cube);
            //        //interactiveObjects.Add(cube);

            //        // offset is wrong
            //        //while (true)
            //        //{
            //        //    await Native.window.async.onframe;

            //        //    cube.rotation.y = Math.PI * 0.0002 * sw2.ElapsedMilliseconds;

            //        //}
            //    }
            //);
            //#endregion


            #region HZCannon
            new HeatZeekerRTSOrto.HZCannon().Source.Task.ContinueWithResult(
                async cube =>
                {
                    // https://github.com/mrdoob/three.js/issues/1285
                    //cube.children.WithEach(c => c.castShadow = true);

                    //cube.traverse(
                    //    new Action<THREE.Object3D>(
                    //        child =>
                    //        {
                    //            // does it work? do we need it?
                    //            //if (child is THREE.Mesh)

                    //            child.castShadow = true;
                    //            //child.receiveShadow = true;

                    //        }
                    //    )
                    //);

                    // um can edit and continue insert code going back in time?
                    cube.scale.x = 10.0;
                    cube.scale.y = 10.0;
                    cube.scale.z = 10.0;



                    //cube.castShadow = true;
                    //dae.receiveShadow = true;


                    // jsc shat about out of band code patching?
                    cube.position.z = 600;
                    cube.position.x = -900;
                    //cube.position.y = -400;

                    //cube.position.x = -100;
                    //cube.position.y = -400;

                    ////cube.position.y = (cube.scale.y * 50) / 2;
                    //cube.position.z = Math.Floor((random() * 1000 - 500) / 50) * 50 + 25;



                    // if i want to rotate, how do I do it?
                    //cube.rotation.z = random() + Math.PI;
                    //cube.rotation.x = random() + Math.PI;
                    var sw2 = Stopwatch.StartNew();



                    //scene.add(cube);
                    cube.AttachTo(sceneg);
                    //interactiveObjects.Add(cube);

                    // offset is wrong
                    //while (true)
                    //{
                    //    await Native.window.async.onframe;

                    //    cube.rotation.y = Math.PI * 0.0002 * sw2.ElapsedMilliseconds;

                    //}
                }
            );
            #endregion


            #region HZBunker
            new HeatZeekerRTSOrto.HZBunker().Source.Task.ContinueWithResult(
                     cube =>
                     {
                         // https://github.com/mrdoob/three.js/issues/1285
                         //cube.children.WithEach(c => c.castShadow = true);
                         cube.castShadow = true;

                         //cube.traverse(
                         //    new Action<THREE.Object3D>(
                         //        child =>
                         //        {
                         //            // does it work? do we need it?
                         //            //if (child is THREE.Mesh)
                         //            child.castShadow = true;
                         //            //child.receiveShadow = true;

                         //        }
                         //    )
                         //);

                         // um can edit and continue insert code going back in time?
                         cube.scale.x = 10.0;
                         cube.scale.y = 10.0;
                         cube.scale.z = 10.0;

                         //cube.castShadow = true;
                         //dae.receiveShadow = true;

                         cube.position.x = -1000;
                         //cube.position.y = (cube.scale.y * 50) / 2;
                         cube.position.z = 0;

                         cube.AttachTo(sceneg);
                         //scene.add(cube);
                     }
                 );
            #endregion


            new Models.ColladaS6Edge().Source.Task.ContinueWithResult(
                   dae =>
                   {
                       // 90deg
                       dae.rotation.x = -Math.Cos(Math.PI);

                       //dae.scale.x = 30;
                       //dae.scale.y = 30;
                       //dae.scale.z = 30;
                       dae.position.z = -(65 - 200);





                       var scale = 0.9;

                       // jsc, do we have ILObserver available yet?
                       dae.scale.x = scale;
                       dae.scale.y = scale;
                       dae.scale.z = scale;


                       //#region onmousewheel
                       //Native.body.onmousewheel +=
                       //    e =>
                       //    {
                       //        e.preventDefault();

                       //        //camera.position.z = 1.5;

                       //        // min max. shall adjust speed also!
                       //        // max 4.0
                       //        // min 0.6
                       //        dae.position.z -= 10.0 * e.WheelDirection;

                       //        //camera.position.z = 400;
                       //        //dae.position.z = dae.position.z.Max(-200).Min(200);

                       //        //Native.document.title = new { z }.ToString();

                       //    };
                       //#endregion


                       ////dae.position.y = -80;

                       //dae.AttachTo(sceneg);
                       ////scene.add(dae);
                       //oo.Add(dae);




                       // view-source:http://threejs.org/examples/webgl_multiple_canvases_circle.html
                       // https://threejsdoc.appspot.com/doc/three.js/src.source/extras/cameras/CubeCamera.js.html
                       Native.window.onframe +=
                           e =>
                           {
                               // let render man know..
                               if (vsync != null)
                                   if (vsync.Task.IsCompleted)
                                       return;


                               //if (pause) return;
                               //if (pause.@checked)
                               //    return;


                               // can we float out of frame?
                               // haha. a bit too flickery.
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.01) * 50.0;
                               //dae.position.x = Math.Sin(e.delay.ElapsedMilliseconds * 0.001) * 190.0;
                               dae.position.x = Math.Sin(fcamerax * 0.001) * 190.0;
                               dae.position.y = Math.Cos(fcamerax * 0.001) * 90.0;
                               // manual rebuild?
                               // red compiler notifies laptop chrome of pending update
                               // app reloads

                               applycameraoffset();
                               renderer0.clear();
                               //rendererPY.clear();

                               //cameraPX.aspect = canvasPX.aspect;
                               //cameraPX.updateProjectionMatrix();

                               // um what does this do?
                               //cameraPX.position.z += (z - cameraPX.position.z) * e.delay.ElapsedMilliseconds / 200.0;
                               // mousewheel allos the camera to move closer
                               // once we see the frame in vr, can we udp sync vr tracking back to laptop?


                               //this.targetPX.x += 1;
                               //this.targetNX.x -= 1;

                               //this.targetPY.y += 1;
                               //this.targetNY.y -= 1;

                               //this.targetPZ.z += 1;
                               //this.targetNZ.z -= 1;

                               // how does the 360 or shadertoy want our cubemaps?


                               // and then rotate right?

                               // how can we render cubemap?


                               #region x
                               // upside down?
                               renderer0.render(scene, cameraPX);
                               canvasPX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                               renderer0.render(scene, cameraNX);
                               canvasNX.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               #endregion

                               #region z
                               renderer0.render(scene, cameraPZ);
                               canvasPZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);

                               renderer0.render(scene, cameraNZ);
                               canvasNZ.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               #endregion



                               #region y
                               renderer0.render(scene, cameraPY);

                               //canvasPY.save();
                               //canvasPY.translate(0, size);
                               //canvasPY.rotate((float)(-Math.PI / 2));
                               canvasPY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               //canvasPY.restore();


                               renderer0.render(scene, cameraNY);
                               //canvasNY.save();
                               //canvasNY.translate(size, 0);
                               //canvasNY.rotate((float)(Math.PI / 2));
                               canvasNY.drawImage((IHTMLCanvas)renderer0.domElement, 0, 0, cubefacesize, cubefacesize);
                               //canvasNY.restore();
                               // ?
                               #endregion


                               //renderer0.render(scene, cameraPX);


                               //rendererPY.render(scene, cameraPY);

                               // at this point we should be able to render the sphere texture

                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_X = 34069;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_X = 34070;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Y = 34071;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Y = 34072;
                               //public const uint TEXTURE_CUBE_MAP_POSITIVE_Z = 34073;
                               //public const uint TEXTURE_CUBE_MAP_NEGATIVE_Z = 34074;


                               //var cube0 = new IHTMLImage[] {
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_px(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nx(),

                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_py(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_ny(),


                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_pz(),
                               //        new CSS3DPanoramaByHumus.HTML.Images.FromAssets.humus_nz()
                               //};

                               new[] {
                                   canvasPX, canvasNX,
                                   canvasPY, canvasNY,
                                   canvasPZ, canvasNZ
                               }.WithEachIndex(
                                   (img, index) =>
                                   {
                                       gl.bindTexture(gl.TEXTURE_CUBE_MAP, pass.tex);

                                       //gl.pixelStorei(gl.UNPACK_FLIP_X_WEBGL, false);
                                       gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);

                                       // http://stackoverflow.com/questions/15364517/pixelstoreigl-unpack-flip-y-webgl-true

                                       // https://msdn.microsoft.com/en-us/library/dn302429(v=vs.85).aspx
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 0);
                                       //gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);

                                       gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + (uint)index, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img.canvas);

                                   }
                                );

                               // could do dynamic resolution- fog of war or fog of FOV. where up to 150deg field of vision is encouragedm, not 360
                               pass.Paint_Image(
                                     0,

                                     0,
                                     0,
                                     0,
                                     0
                                //,

                                // gl_FragCoord
                                // cannot be scaled, and can be referenced directly.
                                // need another way to scale
                                //zoom: 0.3f
                                );

                               //paintsw.Stop();


                               // what does it do?
                               gl.flush();

                               // let render man know..
                               if (vsync != null)
                                   if (!vsync.Task.IsCompleted)
                                       vsync.SetResult(null);
                           };


                   }
               );





            Console.WriteLine("do you see it?");
        }