public override void onCreate(Bundle savedInstanceState)
        {
            base.onCreate(savedInstanceState);

            var mDraw = new DrawOnTop(this)
            {
                text = getLocalIpAddress() + ":1112"
            };

            addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));
        }
        protected override void onCreate(Bundle savedInstanceState)
        {
            Console.WriteLine("enter OVROculus360Photos ApplicationActivity onCreate");

            base.onCreate(savedInstanceState);



            Console.WriteLine("about to convince NDK what the first image should be...");
            // http://www.flightradar24.com/18.39,37.3/2

            // http://paulbourke.net/geometry/transformationprojection/

            // http://krpano.com/download/
            // http://unrealoldfriends.activeboard.com/t47250341/creating-background-using-spacescape/?page=1

            //Convert CUBE to SPHERE droplet


            //kcube2sphere 1.18.4 - 64bit (build 2015-04-23)
            //loading...
            //loading azi_l.jpg...
            //loading azi_f.jpg...
            //loading azi_r.jpg...
            //loading azi_b.jpg...
            //loading azi_u.jpg...
            //loading azi_d.jpg...
            //done.
            //making sphere azi_sphere.tif...
            //done.

            //Press any key to continue . . .



            //C:\Windows\system32> x:\util\android-sdk-windows\platform-tools\adb.exe push X:\jsc.svn\examples\javascript\synergy\css\CSSAzimuthMapViz\CSSAzimuthMapViz\Textures  /sdcard/oculus/360Photos/
            //push: X:\jsc.svn\examples\javascript\synergy\css\CSSAzimuthMapViz\CSSAzimuthMapViz\Textures/azi_pz.jpg -> /sdcard/oculus/360Photos/azi_pz.jpg
            //push: X:\jsc.svn\examples\javascript\synergy\css\CSSAzimuthMapViz\CSSAzimuthMapViz\Textures/azi_py.jpg -> /sdcard/oculus/360Photos/azi_py.jpg
            //push: X:\jsc.svn\examples\javascript\synergy\css\CSSAzimuthMapViz\CSSAzimuthMapViz\Textures/azi_px.jpg -> /sdcard/oculus/360Photos/azi_px.jpg
            //push: X:\jsc.svn\examples\javascript\synergy\css\CSSAzimuthMapViz\CSSAzimuthMapViz\Textures/azi_nz.jpg -> /sdcard/oculus/360Photos/azi_nz.jpg
            //push: X:\jsc.svn\examples\javascript\synergy\css\CSSAzimuthMapViz\CSSAzimuthMapViz\Textures/azi_ny.jpg -> /sdcard/oculus/360Photos/azi_ny.jpg
            //push: X:\jsc.svn\examples\javascript\synergy\css\CSSAzimuthMapViz\CSSAzimuthMapViz\Textures/azi_nx.jpg -> /sdcard/oculus/360Photos/azi_nx.jpg
            //6 files pushed. 0 files skipped.
            //466 KB/s (969865 bytes in 2.030s)

            //C:\Windows\system32> x:\util\android-sdk-windows\platform-tools\adb.exe shell cp /sdcard/oculus/360Photos/humus.thm /sdcard/oculus/360Photos/azi.thm

            Action <string, string> copy =
                (from, to) =>
            {
                try
                {
                    // http://gis.stackexchange.com/questions/92907/re-project-raster-image-from-mercator-to-equirectangular

                    // https://en.wikipedia.org/wiki/List_of_map_projections
                    // Web Mercator
                    // https://xkcd.com/977/
                    // mercator?
                    var value = this.getResources().getAssets().open(from);
                    var s     = new __NetworkStream {
                        InternalInputStream = value
                    };

                    // 1,392,914
                    //var buffer = new byte[1392914];
                    var buffer = new byte[4392914];

                    var len = s.Read(buffer, 0, buffer.Length);


                    var m = new MemoryStream();

                    m.Write(buffer, 0, len);

                    //s.CopyTo(

                    File.WriteAllBytes(to, m.ToArray());
                }
                catch
                {
                    Console.WriteLine("about to convince NDK what the first image should be... fault");
                }
            };

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150807/ovroculus360photosndk
            copy("2_no_clouds_4k.jpg", "/sdcard/oculus/360Photos/0.jpg");
            //copy("1.jpg", "/sdcard/oculus/360Photos/1.jpg");



            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150724/invaders
            //copy("celestial-joshua-trees-milky-way-in-saline-va.jpg", "/sdcard/oculus/360Photos/2.jpg");


            //Implementation not found for type import :
            //type: System.IO.DirectoryInfo
            //method: System.IO.FileInfo[] GetFiles()
            //Did you forget to add the [Script] attribute?
            //Please double check the signature!

            //Path.get

            var emptyFiles =
                from pf in new DirectoryInfo("/sdcard/oculus/360Photos/").GetFiles()
                where pf.Extension.ToLower() == ".jpg"
                where pf.Length == 0
                select pf;

            foreach (var emptyFile in emptyFiles.ToArray())
            {
                Console.WriteLine(new { emptyFile });

                emptyFile.Delete();
            }


            Console.WriteLine("about to convince NDK what the first image should be... done");


            var intent                = getIntent();
            var commandString         = com.oculus.vrappframework.VrActivity.getCommandStringFromIntent(intent);
            var fromPackageNameString = com.oculus.vrappframework.VrActivity.getPackageStringFromIntent(intent);
            var uriString             = com.oculus.vrappframework.VrActivity.getUriStringFromIntent(intent);

            // D/CrashAnrDetector( 3472):     #00 pc 00092ac0  /data/app/OVROculus360Photos.Activities-1/lib/arm/libmain.so (OVR::ovrMessageQueue::PostMessage(char const*, bool, bool)+8)


            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20160103/oculus360photossdk
            this.appPtr = OVROculus360Photos.Activities.xMarshal.nativeSetAppInterface(
                this,
                fromPackageNameString,
                commandString,
                uriString
                );

            var args = new args
            {
            };

            var uploadLength   = 0L;
            var uploadPosition = 0L;

            var sw = Stopwatch.StartNew();

            #region mDraw
            var mDraw = new DrawOnTop(this)
            {
                // yes it appears top left.

                //text = "GearVR HUD"
                text = () => sw.ElapsedMilliseconds + "ms "
                       //+ "\n " + Path.GetFileName(args.filename)
                       + "\n " + args.filename

                       + "\n " + new
                {
                    upload = (int)(100 * (uploadPosition + 1) / (args.filesize + 1)) + "%",
                    uploadPosition,
                    args.filesize,

                    // can we capture pointer?

                    args.x,
                    args.y,
                    args.z,

                    //uploadLength
                }.ToString().Replace(",", ",\n")

                // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeEquirectangularPanorama\ChromeEquirectangularPanorama\Application.cs
            };
            #endregion


            //Task.Run(

            #region sendTracking
            Action <IPAddress> sendTracking = nic =>
            {
                var port = new Random().Next(16000, 40000);

                //new IHTMLPre { "about to bind... " + new { port } }.AttachToDocument();

                // where is bind async?
                var socket = new UdpClient(
                    new IPEndPoint(nic, port)
                    );


                // who is on the other end?
                var nmessage = args.x + ":" + args.y + ":" + args.z + ":0:" + args.filename;

                var data = Encoding.UTF8.GetBytes(nmessage);      //creates a variable b of type byte


                //new IHTMLPre { "about to send... " + new { data.Length } }.AttachToDocument();

                // X:\jsc.svn\examples\javascript\chrome\apps\ChromeUDPNotification\ChromeUDPNotification\Application.cs
                //Console.WriteLine("about to Send");
                // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeEquirectangularPanorama\ChromeEquirectangularPanorama\Application.cs
                socket.Send(
                    data,
                    data.Length,
                    hostname: "239.1.2.3",
                    port: 49834
                    );



                socket.Close();
            };
            #endregion



            //I/System.Console( 9109): 2395:1fb3 enter __UdpClient ctor
            //I/System.Console( 9109): 2395:1fb3 enter __UdpClient before this.Client
            //I/System.Console( 9109): 2395:1fb3 enter __UdpClient after this.Client { Client = ScriptCoreLibJava.BCLImplementation.System.Net.Sockets.__Socket@4f1c02b }
            //I/System.Console( 9109): 2395:1fb3 enter GetAllNetworkInterfaces
            //I/System.Console( 9109): 2395:1fb3 enter __UdpClient ctor

            string current = null;
            byte[] bytes   = null;

            new Thread(
                delegate()
            {
                // bg thread


                // bug out 1sec.
                Thread.Sleep(1000);
                // await gear on

                while (true)
                {
                    // collect tracking from ndk
                    // broadcast to udp


                    //Thread.Sleep(1000 / 15);

                    //var a = new
                    //{
                    //    // for java do we also do the fields?
                    //    x = 0
                    //};

                    args.filename = OVROculus360Photos.Activities.xMarshal.stringFromJNI(args);

                    //E/AndroidRuntime( 7601): Caused by: java.lang.NullPointerException: Attempt to invoke virtual method 'char[] java.lang.String.toCharArray()' on a null object reference
                    //E/AndroidRuntime( 7601):        at java.io.File.fixSlashes(File.java:185)
                    //E/AndroidRuntime( 7601):        at java.io.File.<init>(File.java:134)
                    //E/AndroidRuntime( 7601):        at ScriptCoreLibJava.BCLImplementation.System.IO.__File.Exists(__File.java:57)
                    //E/AndroidRuntime( 7601):        at OVROculus360PhotosHUD.Activities.ApplicationActivity___c__DisplayClass1d._onCreate_b__1b(ApplicationActivity___c__DisplayClass1d.java:95)



                    // uplink 144Mbps
                    // 18 MBps
                    #region udp broadcast
                    // overkill at 60hz
                    NetworkInterface.GetAllNetworkInterfaces().WithEach(
                        n =>
                    {
                        // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                        // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs

                        var IPProperties    = n.GetIPProperties();
                        var PhysicalAddress = n.GetPhysicalAddress();



                        foreach (var ip in IPProperties.UnicastAddresses)
                        {
                            // ipv4
                            if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork)
                            {
                                if (!IPAddress.IsLoopback(ip.Address))
                                {
                                    if (n.SupportsMulticast)
                                    {
                                        //fWASDC(ip.Address);
                                        //fParallax(ip.Address);
                                        //fvertexTransform(ip.Address);
                                        sendTracking(ip.Address);
                                    }
                                }
                            }
                        }
                    }
                        );



                    #endregion

                    if (args.filename != null)
                    {
                        if (File.Exists(args.filename))
                        {
                            if (current != args.filename)
                            {
                                current = args.filename;

                                var ff = new FileInfo(args.filename);

                                args.filesize = ff.Length;

                                // we are not on ui thread.
                                // HUD thread can freeze...
                                // mmap?
                                bytes = File.ReadAllBytes(args.filename);

                                // now broadcast. at 500KBps in segments.
                                // 8MB is 16 segments then.

                                if (bytes.Length > 0)
                                {
                                    NetworkInterface.GetAllNetworkInterfaces().WithEach(
                                        n =>
                                    {
                                        // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                                        // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs

                                        var IPProperties    = n.GetIPProperties();
                                        var PhysicalAddress = n.GetPhysicalAddress();



                                        foreach (var ip in IPProperties.UnicastAddresses)
                                        {
                                            // ipv4
                                            if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork)
                                            {
                                                if (!IPAddress.IsLoopback(ip.Address))
                                                {
                                                    if (n.SupportsMulticast)
                                                    {
                                                        //fWASDC(ip.Address);
                                                        //fParallax(ip.Address);
                                                        //fvertexTransform(ip.Address);
                                                        //sendTracking(ip.Address);

                                                        var port = new Random().Next(16000, 40000);

                                                        //new IHTMLPre { "about to bind... " + new { port } }.AttachToDocument();

                                                        // where is bind async?
                                                        var socket = new UdpClient(
                                                            new IPEndPoint(ip.Address, port)
                                                            );


                                                        //// who is on the other end?
                                                        //var nmessage = args.x + ":" + args.y + ":" + args.z + ":0:" + args.filename;

                                                        //var data = Encoding.UTF8.GetBytes(nmessage);      //creates a variable b of type byte

                                                        // http://stackoverflow.com/questions/25841/maximum-buffer-length-for-sendto

                                                        new { }.With(
                                                            async delegate
                                                        {
                                                            // reached too far?
                                                            if (bytes.Length == 0)
                                                            {
                                                                return;
                                                            }

                                                            var current0 = current;

                                                            var r        = new MemoryStream(bytes);
                                                            uploadLength = r.Length;

                                                            var data = new byte[65507];

                                                            next:

                                                            if (current0 != current)
                                                            {
                                                                return;
                                                            }

                                                            var cc = r.Read(data, 0, data.Length);

                                                            uploadPosition = r.Position;

                                                            if (cc <= 0)
                                                            {
                                                                return;
                                                            }

                                                            //new IHTMLPre { "about to send... " + new { data.Length } }.AttachToDocument();

                                                            // X:\jsc.svn\examples\javascript\chrome\apps\ChromeUDPNotification\ChromeUDPNotification\Application.cs
                                                            //Console.WriteLine("about to Send");
                                                            // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeEquirectangularPanorama\ChromeEquirectangularPanorama\Application.cs
                                                            await socket.SendAsync(
                                                                data,
                                                                cc,
                                                                hostname: "239.1.2.3",
                                                                port: 49000
                                                                );

                                                            //await Task.Delay(1000 / 15);
                                                            //await Task.Delay(1000 / 30);

                                                            // no corruption
                                                            await Task.Delay(1000 / 20);

                                                            goto next;
                                                        }
                                                            );

                                                        //socket.Close();
                                                    }
                                                }
                                            }
                                        }
                                    }
                                        );
                                }
                            }
                        }
                    }

                    if (uploadPosition < args.filesize)
                    {
                        mDraw.color = android.graphics.Color.YELLOW;
                    }
                    else
                    {
                        mDraw.color = android.graphics.Color.GREEN;
                    }

                    mDraw.postInvalidate();
                    Thread.Sleep(1000 / 30);
                    //Thread.Sleep(1000 / 2);
                    //Thread.Sleep(1000 / 15);
                }
            }
                ).Start();

            //this.ondispatchTouchEvent += @event =>
            //{

            //    int action = @event.getAction();
            //    float x = @event.getRawX();
            //    float y = @event.getRawY();
            //    //if (action == MotionEvent.ACTION_UP)
            //    {
            //        var halfx = 2560 / 2;
            //        var halfy = 1440 / 2;

            //        mDraw.x = (int)(500 + halfx - x);
            //        mDraw.y = (int)(600 + y - halfy);
            //        mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString();
            //        //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )");
            //    }

            //    // can we move hud around and record it to gif or mp4?

            //    return true;
            //};

            // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs
            // X:\jsc.svn\examples\java\android\AndroidLacasCameraServerActivity\AndroidLacasCameraServerActivity\ApplicationActivity.cs
            addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));


            // "x:\util\android-sdk-windows\platform-tools\adb.exe" connect 192.168.1.126:5555
            // cmd /K x:\util\android-sdk-windows\platform-tools\adb.exe logcat -s "JniUtils"  "System.Console" "art"


            // E/JniUtils(14136): couldn't get isHybridApp, (Landroid/app/Activity;)Z

            //            I/Oculus360Photos( 9199): nativeSetAppInterface
            //I/App     ( 9199): VrAppInterface::SetActivity:
            //I/App     ( 9199): new AppLocal( 0xf51512b0 0xff8b6b80 0xeef69900 )
            //I/App     ( 9199): ----------------- AppLocal::AppLocal() -----------------
            //E/JniUtils( 9199): couldn't get getInternalCacheMemoryInBytes, (Landroid/app/Activity;)J

            //            I/JniUtils(26390): Using caller's JNIEnv
            //E/JniUtils(26390): couldn't get getInstalledPackagePath, (Ljava/lang/String;)Ljava/lang/String;

            //            I/System.Console( 3652): 0e44:0001 Searching installed packages for 'com.oculus.systemactivities'
            //I/JniUtils( 3652): ovr_GetCurrentPackageName() = OVROculus360PhotosHUD.Activities
            //I/JniUtils( 3652): ovr_GetPackageCodePath() = '/data/app/OVROculus360PhotosHUD.Activities-1/base.apk'
            //W/art     ( 3652): Attempt to remove local handle scope entry from IRT, ignoring
            //W/art     ( 3652): Attempt to remove local handle scope entry from IRT, ignoring
            //W/art     ( 3652): Attempt to remove local handle scope entry from IRT, ignoring
            //I/JniUtils( 3652): ovr_GetCurrentActivityName() = OVROculus360PhotosHUD.Activities.ApplicationActivity
            //I/JniUtils( 3652): ovr_GetCurrentPackageName() = OVROculus360PhotosHUD.Activities
            //E/JniUtils( 3652): couldn't get getLocalizedString, (Ljava/lang/String;)Ljava/lang/String;
            //I/JniUtils( 4380): ovr_GetCurrentActivityName() = com.oculus.home.HomeActivity

            // ffs
        }
        protected override void onCreate(Bundle savedInstanceState)
        {
            base.onCreate(savedInstanceState);

            #region xCallback
            // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs
            var xCallback = new xCallback
            {
                onsurfaceCreated = holder =>
                {
                    if (mNativeHandle != 0)
                    {
                        GLES3JNILib.onSurfaceCreated(mNativeHandle, holder.getSurface());
                        mSurfaceHolder = holder;
                    }
                },

                onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) =>
                {
                    if (mNativeHandle != 0)
                    {
                        GLES3JNILib.onSurfaceChanged(mNativeHandle, holder.getSurface());
                        mSurfaceHolder = holder;
                    }
                },

                onsurfaceDestroyed = holder =>
                {
                    if (mNativeHandle != 0)
                    {
                        GLES3JNILib.onSurfaceDestroyed(mNativeHandle);
                        mSurfaceHolder = null;
                    }
                }
            };
            #endregion



            mView = new SurfaceView(this);
            this.setContentView(mView);



            //            E/AndroidRuntime(22718): Caused by: java.lang.NullPointerException: Attempt to invoke virtual method 'android.view.ViewParent android.view.View.getParent()' on a null object reference
            //E/AndroidRuntime(22718):        at android.view.ViewGroup.addViewInner(ViewGroup.java:4216)
            //E/AndroidRuntime(22718):        at android.view.ViewGroup.addView(ViewGroup.java:4070)
            //E/AndroidRuntime(22718):        at android.view.ViewGroup.addView(ViewGroup.java:4046)
            //E/AndroidRuntime(22718):        at com.android.internal.policy.impl.PhoneWindow.setContentView(PhoneWindow.java:478)
            //E/AndroidRuntime(22718):        at com.android.internal.policy.impl.PhoneWindow.setContentView(PhoneWindow.java:459)
            //E/AndroidRuntime(22718):        at android.app.Activity.setContentView(Activity.java:2298)

            var sw = Stopwatch.StartNew();

            #region mDraw
            var mDraw = new DrawOnTop(this)
            {
                // yes it appears top left.

                //text = "GearVR HUD"
                text = () => sw.ElapsedMilliseconds + "ms !"
            };

            //Task.Run(

            new Thread(
                delegate()
            {
                // bg thread

                while (true)
                {
                    //Thread.Sleep(1000 / 15);
                    Thread.Sleep(1000 / 30);


                    mDraw.postInvalidate();
                }
            }
                ).Start();
            #endregion


            this.ondispatchTouchEvent = @event =>
            {
                if (mNativeHandle == 0)
                {
                    return;
                }

                int   action = @event.getAction();
                float x      = @event.getRawX();
                float y      = @event.getRawY();
                //if (action == MotionEvent.ACTION_UP)
                {
                    var halfx = 2560 / 2;
                    var halfy = 1440 / 2;

                    mDraw.x    = (int)(500 + halfx - x);
                    mDraw.y    = (int)(600 + y - halfy);
                    mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString();
                    //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )");
                }
                GLES3JNILib.onTouchEvent(mNativeHandle, action, x, y);

                // can we move hud around and record it to gif or mp4?
            };

            this.ondispatchKeyEvent = @event =>
            {
                if (mNativeHandle == 0)
                {
                    return(false);
                }

                int keyCode = @event.getKeyCode();
                int action  = @event.getAction();
                if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP)
                {
                    return(base.dispatchKeyEvent(@event));
                }
                if (action == KeyEvent.ACTION_UP)
                {
                    // keycode 4
                    mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString();
                    //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )");
                }
                GLES3JNILib.onKeyEvent(mNativeHandle, keyCode, action);

                return(true);
            };

            // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs
            // X:\jsc.svn\examples\java\android\AndroidLacasCameraServerActivity\AndroidLacasCameraServerActivity\ApplicationActivity.cs
            addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));

            mView.getHolder().addCallback(xCallback);

            // Force the screen to stay on, rather than letting it dim and shut off
            // while the user is watching a movie.

            // does this disable the face sensor?
            getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON);

            // Force screen brightness to stay at maximum
            //WindowManager_LayoutParams _params = getWindow().getAttributes();
            //_params.screenBrightness = 1.0f;
            //getWindow().setAttributes(_params);

            mNativeHandle = com.oculus.gles3jni.GLES3JNILib.onCreate(this);

            // can we now overlay something on top of the surface?
        }
Exemple #4
0
        protected override void onCreate(Bundle savedInstanceState)
        {
            base.onCreate(savedInstanceState);

            Console.WriteLine("enter OVRWindWheelActivity onCreate");

            // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/



            #region xCallback
            // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs
            var xCallback = new xSurfaceHolder_Callback
            {
                onsurfaceCreated = holder =>
                {
                    Console.WriteLine("enter onsurfaceCreated " + new { appThread });
                    if (appThread == 0)
                    {
                        return;
                    }

                    // did we use it for float window?
                    //holder.setFormat(android.graphics.PixelFormat.TRANSLUCENT);

                    GLES3JNILib.onSurfaceCreated(holder.getSurface());
                    xSurfaceHolder = holder;

                    //Console.WriteLine("exit onsurfaceCreated " + new { appThread });
                },

                onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) =>
                {
                    if (appThread == 0)
                    {
                        return;
                    }

                    GLES3JNILib.onSurfaceChanged(holder.getSurface());
                    xSurfaceHolder = holder;
                },

                onsurfaceDestroyed = holder =>
                {
                    //I/System.Console( 3549): 0ddd:0001 after OVRWindWheelActivity onCreate, attach the headset!
                    //I/System.Console( 3549): 0ddd:0001 enter onsurfaceDestroyed

                    //Console.WriteLine("enter onsurfaceDestroyed");


                    if (appThread == 0)
                    {
                        return;
                    }


                    // I/DEBUG   ( 2079):     #01 pc 0000672f  /data/app/OVRWindWheelActivity.Activities-1/lib/arm/libmain.so (Java_com_oculus_gles3jni_GLES3JNILib_onSurfaceDestroyed+46)
                    GLES3JNILib.onSurfaceDestroyed();
                    xSurfaceHolder = null;
                    //appThread = 0;

                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/pui_global_menu
                }
            };
            #endregion

            // https://github.com/dalinaum/TextureViewDemo
            //  TextureView semi-translucent by calling myView.setAlpha(0.5f).
            // !! should we use TextureView instead?
            // https://groups.google.com/forum/#!topic/android-developers/jYjvm7ItpXQ
            //this.xSurfaceView.setZOrderOnTop(true);    // necessary
            //this.xSurfaceView.getHolder().setFormat(android.graphics.PixelFormat.TRANSPARENT);

            var ActivityPaused = true;



            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20160101/ovrwindwheelndk
            WifiManager wifi = (WifiManager)this.getSystemService(Context.WIFI_SERVICE);
            var         lo   = wifi.createMulticastLock("vrudp");
            lo.acquire();

            #region ReceiveAsync
            // https://www.youtube.com/watch?v=GpmKq_qg3Tk



            var HUDStylusList = new List <Action <android.graphics.Canvas> >();

            // http://uploadvr.com/vr-hmd-specs/

            Action <android.graphics.Canvas> HUDStylus = canvas =>
            {
                // video?
                // https://www.youtube.com/watch?v=JaTH_hoLDxc

                // so cool. we get to use pen in vr!s
                while (HUDStylusList.Count > 1024)
                {
                    HUDStylusList.RemoveAt(0);
                }


                foreach (var item in HUDStylusList)
                {
                    item(canvas);
                }
            };

            #region fUDPPressure
            Action <IPAddress> fUDPPressure = async nic =>
            {
                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151003/ovrwindwheelactivity
                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1
                var uu = new UdpClient(40094);

                // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs
                //args.pre = "awaiting Parallax at " + nic + " :40094";

                var oldx = 0f;
                var oldy = 0f;

                // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs
                uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic);
                while (true)
                {
                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure
                    // did we break async Continue ??
                    var ux = await uu.ReceiveAsync(); // did we jump to ui thread?


                    // discard input?
                    if (ActivityPaused)
                    {
                        continue;
                    }

                    // while we have the signal turn on torch/.


                    var m = new BinaryReader(new MemoryStream(ux.Buffer));

                    var x0 = m.ReadSingle();

                    var x = 200 + x0 * 0.1f;

                    var y0 = m.ReadSingle();

                    var y = 1200 - y0 * 0.1f;

                    var pressure = m.ReadSingle();


                    new { x, y, oldx, oldy, pressure }.With(
                        segment =>
                    {
                        var paint = new android.graphics.Paint();

                        HUDStylusList.Add(
                            canvas =>
                        {
                            //c.lineTo((int)(x * 0.1), 400 - (int)(y * 0.1));

                            //c.lineWidth = 1 + (pressure / 255.0 * 7);
                            //
                            paint.setStrokeWidth((int)(1 + (pressure / 255.0 * 6) * (pressure / 255.0 * 6)));

                            paint.setStyle(android.graphics.Paint.Style.STROKE);

                            if (pressure > 0)
                            {
                                paint.setColor(android.graphics.Color.YELLOW);
                            }
                            else
                            {
                                paint.setColor(android.graphics.Color.RED);
                            }

                            canvas.drawLine(segment.x, segment.y, segment.oldx, segment.oldy, paint);

                            canvas.drawLine(2560 / 2 + segment.x, segment.y, segment.oldx + 2560 / 2, segment.oldy, paint);
                        }

                            );
                    }
                        );


                    oldx = x;
                    oldy = y;

                    args.pen = new { x, y, pressure }.ToString();

                    //Console.WriteLine(new { args.parallax });

                    //// or marshal memory?
                    //var xy = args.mouse.Split(':');


                    //args.mousey = int.Parse(xy[1]);

                    //// getchar?
                    //args.ad = int.Parse(xy[2]);
                    //args.ws = int.Parse(xy[3]);

                    //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704
                    //args.c = int.Parse(xy[4]);
                    //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown
                    //args.mousebutton = int.Parse(xy[5]);
                    //args.mousewheel = int.Parse(xy[6]);
                }
            };
            #endregion


            #region fParallax
            Action <IPAddress> fParallax = async nic =>
            {
                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1
                var uu = new UdpClient(43834);

                // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs
                args.parallax = "awaiting Parallax at " + nic + " :43834";

                // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs
                uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic);
                while (true)
                {
                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure
                    // did we break async Continue ??
                    var x = await uu.ReceiveAsync(); // did we jump to ui thread?


                    // discard input?
                    if (ActivityPaused)
                    {
                        continue;
                    }

                    // while we have the signal turn on torch/.

                    #region await webcam feed
                    if (nogc == null)
                    {
                        // partial ?
                        var camera = android.hardware.Camera.open();
                        android.hardware.Camera.Parameters p = camera.getParameters();
                        p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_TORCH);
                        camera.setParameters(p);
                        camera.startPreview();

                        nogc = camera;
                    }
                    #endregion


                    //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer));
                    args.parallax = Encoding.UTF8.GetString(x.Buffer);

                    var xy = args.parallax.Split(':');

                    //Console.WriteLine(new { args.parallax });

                    //// or marshal memory?
                    //var xy = args.mouse.Split(':');

                    args.px = float.Parse(xy[1]);
                    args.py = float.Parse(xy[2]);
                    args.pz = float.Parse(xy[3]);

                    //args.mousey = int.Parse(xy[1]);

                    //// getchar?
                    //args.ad = int.Parse(xy[2]);
                    //args.ws = int.Parse(xy[3]);

                    //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704
                    //args.c = int.Parse(xy[4]);
                    //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown
                    //args.mousebutton = int.Parse(xy[5]);
                    //args.mousewheel = int.Parse(xy[6]);
                }
            };
            #endregion

            #region fWASDC
            var fWASDCport            = 41814;
            Action <IPAddress> fWASDC = async nic =>
            {
                var uu = new UdpClient(fWASDCport);

                args.mouse = "awaiting mouse and WASDC at " + nic + ":" + fWASDCport;

                // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs
                uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic);
                while (true)
                {
                    var x = await uu.ReceiveAsync();  // did we jump to ui thread?

                    //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer));
                    args.mouse = Encoding.UTF8.GetString(x.Buffer);

                    // or marshal memory?
                    var xy = args.mouse.Split(':');

                    args.mousex = int.Parse(xy[0]);
                    args.mousey = int.Parse(xy[1]);

                    // getchar?
                    args.ad = int.Parse(xy[2]);
                    args.ws = int.Parse(xy[3]);

                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704
                    args.c = int.Parse(xy[4]);
                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown
                    args.mousebutton = int.Parse(xy[5]);
                    args.mousewheel  = int.Parse(xy[6]);
                }
            };
            #endregion



            #region fvertexTransform
            // X:\jsc.svn\examples\java\android\vr\OVRWindWheelNDK\OVRUDPMatrix\Program.cs
            Action <IPAddress> fvertexTransform = async nic =>
            {
                var uu = new UdpClient(40014);

                //args.mouse = "awaiting vertexTransform at " + nic + " :40014";

                // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs
                uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic);
                while (true)
                {
                    var x = await uu.ReceiveAsync(); // did we jump to ui thread?

                    //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer));
                    args.vertexTransform = x.Buffer;
                }
            };
            #endregion


            NetworkInterface.GetAllNetworkInterfaces().WithEach(
                n =>
            {
                // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs

                var IPProperties    = n.GetIPProperties();
                var PhysicalAddress = n.GetPhysicalAddress();



                foreach (var ip in IPProperties.UnicastAddresses)
                {
                    // ipv4
                    if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork)
                    {
                        if (!IPAddress.IsLoopback(ip.Address))
                        {
                            if (n.SupportsMulticast)
                            {
                                fUDPPressure(ip.Address);
                                fWASDC(ip.Address);
                                fParallax(ip.Address);
                                fvertexTransform(ip.Address);
                            }
                        }
                    }
                }
            }
                );
            #endregion


            var sw = Stopwatch.StartNew();

            //var args = new object();

            // can we draw on back?



            #region mDraw
            var mDraw = new DrawOnTop(this)
            {
                // yes it appears top left.

                //text = "GearVR HUD"
                // (out) VrApi.vrapi_GetVersionString()
                text = () =>
                {
                    // can we listen to udp?
                    // like X:\jsc.svn\examples\java\android\AndroidServiceUDPNotification\AndroidServiceUDPNotification\ApplicationActivity.cs
                    // in vr if the other service is running it can display vr notification

                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150630/udp
                    // lets run it, and see if we can see some vr notifications as we skip a video



                    //if (args.total_allocated_space > 48 * 1024 * 1024)
                    //    this.recreate();


                    return
                        (sw.ElapsedMilliseconds + "ms | " + args.total_allocated_space + " bytes \n"
                         + new { vertexTransform = args.vertexTransform.Length } +"\n"
                         + args.mouse + "\n"
                         + args.parallax + "\n"
                         + args.vertexTransform.Length + "bytes udp\n"
                         + new { args.pen } +"\n"
                         //+ new { args.mousex, args.mousey } + "\n"
                         + new
                    {
                        //args.mousex,

                        // left to right
                        //args.x,
                        //args.px,

                        args.px,
                        args.py,
                        args.pz,


                        // nod up +0.7 down -0.7
                        ox = args.tracking_HeadPose_Pose_Orientation_x,

                        // -0.7 right +0.7 left
                        oy = args.tracking_HeadPose_Pose_Orientation_y

                             // tilt right -0.7 tilt left + 0.7
                             //oz = args.tracking_HeadPose_Pose_Orientation_z

                             // ??
                             //ow = args.tracking_HeadPose_Pose_Orientation_w
                    }.ToString().Replace(",", "\n"));
                }
            };

            //Task.Run(


            Func <string> safemode = () =>
            {
                return
                    (sw.ElapsedMilliseconds + "ms \n"
                     + args.total_allocated_space + " bytes \n"
                     + "GC safe mode / malloc limit..");
            };



            //    canvas.drawText(text, x + 2560 / 2, y + i * 24, paint);
            mDraw.AtDraw = canvas =>
            {
                {
                    var paint = new android.graphics.Paint();


                    paint.setStrokeWidth(16);
                    paint.setStyle(android.graphics.Paint.Style.STROKE);

                    paint.setColor(android.graphics.Color.RED);

                    canvas.drawLine(0, 0, 400, 400, paint);

                    canvas.drawLine(2560 / 2, 0, 400 + 2560 / 2, 400, paint);


                    HUDStylus(canvas);
                }


                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150717/replay
                // can w visually store tracking intel. like tvs do.
                {
                    // https://code.google.com/p/android/issues/detail?id=4086

                    var paint = new android.graphics.Paint();


                    paint.setStrokeWidth(0);
                    paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE);

                    // lets have left to right recorder as a color block

                    //// nod up +0.7 down -0.7
                    // cannot see it.
                    var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_x + 0.7) / 1.4);



                    // I/System.Console( 8999): 2327:0001 AtDraw 16 0078af2e
                    // why wont our tracking correctly show?
                    //Console.WriteLine("AtDraw 16 " + rgb_left_to_right.ToString("x8"));

                    //paint.setColor(android.graphics.Color.YELLOW);
                    paint.setColor(
                        (int)(0xff000000 | rgb_left_to_right));


                    canvas.drawRect(16, 0, 32, 32, paint);
                }

                //       ox = args.tracking_HeadPose_Pose_Orientation_x,

                //       oy = args.tracking_HeadPose_Pose_Orientation_y

                {
                    // https://code.google.com/p/android/issues/detail?id=4086

                    var paint = new android.graphics.Paint();


                    paint.setStrokeWidth(0);
                    paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE);
                    //paint.setColor(android.graphics.Color.RED);

                    // lets have left to right recorder as a color block

                    //       // -0.7 right +0.7 left
                    var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_y + 0.7) / 1.4);

                    //paint.setColor(android.graphics.Color.YELLOW);
                    paint.setColor(
                        (int)(0xff000000 | rgb_left_to_right));

                    canvas.drawRect(16 + 64, 0, 320, 32, paint);
                }
            };

            new Thread(
                delegate()
            {
                // bg thread

                while (true)
                {
                    //Thread.Sleep(1000 / 15);
                    //Thread.Sleep(1000 / 30);

                    // fullspeed



                    GLES3JNILib.stringFromJNI(args);

                    // http://developer.android.com/reference/android/graphics/Color.html
                    if (args.total_allocated_space > GLES3JNILib.safemodeMemoryLimitMB * 1024 * 1024)
                    {
                        mDraw.color = android.graphics.Color.RED;
                        mDraw.alpha = 255;

                        mDraw.text = safemode;
                        // goto secondary activity?
                    }
                    else if (args.mousebutton != 0)
                    {
                        // go a head. lean left or up
                        mDraw.color = android.graphics.Color.YELLOW;
                        mDraw.alpha = 255;
                    }
                    else
                    {
                        mDraw.color = android.graphics.Color.GREEN;

                        // not leaning in?
                        if (args.pz < 0)
                        {
                            mDraw.color = android.graphics.Color.WHITE;
                        }

                        var BaseStationEdgeX = Math.Abs(args.px) > 0.3;
                        var BaseStationEdgeY = Math.Abs(args.py) > 0.3;

                        if (BaseStationEdgeX ||
                            BaseStationEdgeY
                            )
                        {
                            // base station wont track ya for long..
                            // reorient?
                            // fade to black?
                            mDraw.color = android.graphics.Color.YELLOW;
                            mDraw.alpha = 255;
                        }
                    }

                    mDraw.postInvalidate();

                    Thread.Sleep(1000 / 60);

                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150716/ovrwindwheelactivity
                    //Thread.Sleep(1000 / 15);
                    //Thread.Sleep(1000 / 4);
                }
            }
                ).Start();
            #endregion

            #region ondispatchTouchEvent
            this.ondispatchTouchEvent = @event =>
            {
                if (appThread == 0)
                {
                    return;
                }

                int   action = @event.getAction();
                float x      = @event.getRawX();
                float y      = @event.getRawY();
                //if (action == MotionEvent.ACTION_UP)
                {
                    var halfx = 2560 / 2;
                    var halfy = 1440 / 2;

                    // touch sending int to offfset the cubes
                    this.args.x = (int)(halfx - x);
                    this.args.y = (int)(y - halfy);

                    mDraw.x = (int)(500 + halfx - x);
                    mDraw.y = (int)(600 + y - halfy);
                    //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString();
                    //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )");
                }
                GLES3JNILib.onTouchEvent(action, x, y);

                // can we move hud around and record it to gif or mp4?
            };
            #endregion

            #region ondispatchKeyEvent
            this.ondispatchKeyEvent = @event =>
            {
                if (appThread == 0)
                {
                    return(false);
                }

                int keyCode = @event.getKeyCode();
                int action  = @event.getAction();
                if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP)
                {
                    return(base.dispatchKeyEvent(@event));
                }
                if (action == KeyEvent.ACTION_UP)
                {
                    // keycode 4
                    //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString();
                    //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )");
                }
                GLES3JNILib.onKeyEvent(keyCode, action);

                return(true);
            };
            #endregion


            AtPause = delegate
            {
                ActivityPaused = true;
                GLES3JNILib.onPause();


                // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/

                if (nogc != null)
                {
                    var camera = nogc;
                    var p      = camera.getParameters();
                    p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_OFF);
                    camera.setParameters(p);
                    camera.stopPreview();

                    camera.release();
                    nogc = null;
                }
            };

            AtResume = delegate
            {
                //Console.WriteLine("enter onResume");
                ActivityPaused = false;

                // http://stackoverflow.com/questions/3527621/how-to-pause-and-resume-a-surfaceview-thread
                // http://stackoverflow.com/questions/10277694/resume-to-surfaceview-shows-black-screen
                //this.xSurfaceView.onres

                // You must ensure that the drawing thread only touches the underlying Surface while it is valid

                this.xSurfaceView = new SurfaceView(this);

                this.setContentView(xSurfaceView);
                this.addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));

                this.xSurfaceView.getHolder().addCallback(xCallback);

                GLES3JNILib.onResume();
            };

            // canw e add a camera too?

            //  stackoverflow.com/questions/20936480/how-to-make-surfaceview-transparent-background
            //this.setContentView(mDraw);
            //this.addContentView(xSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));


            // sometimes system wants to try to black the screen it seems..
            getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON);

            appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this);

            Console.WriteLine("after OVRWindWheelActivity onCreate, attach the headset!");
        }
        protected override void onCreate(Bundle savedInstanceState)
        {
            base.onCreate(savedInstanceState);

            Console.WriteLine("enter OVRVrCubeWorldSurfaceViewX onCreate");




            #region xCallback
            // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs
            var xCallback = new xCallback
            {
                onsurfaceCreated = holder =>
                {
                    //Console.WriteLine("enter onsurfaceCreated " + new { appThread });
                    if (appThread == 0)
                        return;

                    appThread.onSurfaceCreated(holder.getSurface());
                    mSurfaceHolder = holder;

                    //Console.WriteLine("exit onsurfaceCreated " + new { appThread });
                },

                onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) =>
                {
                    if (appThread == 0)
                        return;

                    appThread.onSurfaceChanged(holder.getSurface());
                    mSurfaceHolder = holder;
                },

                onsurfaceDestroyed = holder =>
                {
                    if (appThread == 0)
                        return;

                    appThread.onSurfaceDestroyed();
                    mSurfaceHolder = null;
                }
            };
            #endregion




            mView = new SurfaceView(this);
            this.setContentView(mView);

            var sw = Stopwatch.StartNew();

            #region mDraw
            var mDraw = new DrawOnTop(this)
            {
                // yes it appears top left.

                //text = "GearVR HUD"
                // (out) VrApi.vrapi_GetVersionString()
                text = () => sw.ElapsedMilliseconds + "ms " + GLES3JNILib.stringFromJNI()
            };

            //Task.Run(

            new Thread(
                delegate()
                {
                    // bg thread

                    while (true)
                    {
                        //Thread.Sleep(1000 / 15);
                        Thread.Sleep(1000 / 30);


                        mDraw.postInvalidate();
                    }
                }
            ).Start();
            #endregion

            #region ondispatchTouchEvent
            this.ondispatchTouchEvent = @event =>
            {
                if (appThread == 0)
                    return;

                int action = @event.getAction();
                float x = @event.getRawX();
                float y = @event.getRawY();
                //if (action == MotionEvent.ACTION_UP)
                {
                    var halfx = 2560 / 2;
                    var halfy = 1440 / 2;

                    mDraw.x = (int)(500 + halfx - x);
                    mDraw.y = (int)(600 + y - halfy);
                    //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString();
                    //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )");
                }
                appThread.onTouchEvent(action, x, y);

                // can we move hud around and record it to gif or mp4?
            };
            #endregion

            #region ondispatchKeyEvent
            this.ondispatchKeyEvent = @event =>
            {
                if (appThread == 0)
                    return false;

                int keyCode = @event.getKeyCode();
                int action = @event.getAction();
                if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP)
                {
                    return base.dispatchKeyEvent(@event);
                }
                if (action == KeyEvent.ACTION_UP)
                {
                    // keycode 4
                    mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString();
                    //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )");
                }
                appThread.onKeyEvent(keyCode, action);

                return true;
            };
            #endregion


            addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));

            mView.getHolder().addCallback(xCallback);

            getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON);

            appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this);

            Console.WriteLine("after OVRVrCubeWorldSurfaceViewX onCreate, attach the headset!");

        }
        protected override void onCreate(Bundle savedInstanceState)
        {
            base.onCreate(savedInstanceState);

            Console.WriteLine("enter OVRWindWheelActivity onCreate");

            // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/




            #region xCallback
            // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs
            var xCallback = new xSurfaceHolder_Callback
            {
                onsurfaceCreated = holder =>
                {
                    Console.WriteLine("enter onsurfaceCreated " + new { appThread });
                    if (appThread == 0)
                        return;

                    // did we use it for float window?
                    //holder.setFormat(android.graphics.PixelFormat.TRANSLUCENT);

                    GLES3JNILib.onSurfaceCreated(holder.getSurface());
                    xSurfaceHolder = holder;

                    //Console.WriteLine("exit onsurfaceCreated " + new { appThread });
                },

                onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) =>
                {
                    if (appThread == 0)
                        return;

                    GLES3JNILib.onSurfaceChanged(holder.getSurface());
                    xSurfaceHolder = holder;
                },

                onsurfaceDestroyed = holder =>
                {
                    //I/System.Console( 3549): 0ddd:0001 after OVRWindWheelActivity onCreate, attach the headset!
                    //I/System.Console( 3549): 0ddd:0001 enter onsurfaceDestroyed

                    //Console.WriteLine("enter onsurfaceDestroyed");


                    if (appThread == 0)
                        return;


                    // I/DEBUG   ( 2079):     #01 pc 0000672f  /data/app/OVRWindWheelActivity.Activities-1/lib/arm/libmain.so (Java_com_oculus_gles3jni_GLES3JNILib_onSurfaceDestroyed+46)
                    GLES3JNILib.onSurfaceDestroyed();
                    xSurfaceHolder = null;
                    //appThread = 0;

                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/pui_global_menu
                }
            };
            #endregion

            // https://github.com/dalinaum/TextureViewDemo
            //  TextureView semi-translucent by calling myView.setAlpha(0.5f).
            // !! should we use TextureView instead?
            // https://groups.google.com/forum/#!topic/android-developers/jYjvm7ItpXQ
            //this.xSurfaceView.setZOrderOnTop(true);    // necessary
            //this.xSurfaceView.getHolder().setFormat(android.graphics.PixelFormat.TRANSPARENT);

            var ActivityPaused = true;





            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20160101/ovrwindwheelndk
            WifiManager wifi = (WifiManager)this.getSystemService(Context.WIFI_SERVICE);
            var lo = wifi.createMulticastLock("vrudp");
            lo.acquire();

            #region ReceiveAsync
            // https://www.youtube.com/watch?v=GpmKq_qg3Tk




            var HUDStylusList = new List<Action<android.graphics.Canvas>>();

            // http://uploadvr.com/vr-hmd-specs/

            Action<android.graphics.Canvas> HUDStylus = canvas =>
            {
                // video?
                // https://www.youtube.com/watch?v=JaTH_hoLDxc

                // so cool. we get to use pen in vr!s
                while (HUDStylusList.Count > 1024)
                    HUDStylusList.RemoveAt(0);


                foreach (var item in HUDStylusList)
                {
                    item(canvas);

                }
            };

            #region fUDPPressure
            Action<IPAddress> fUDPPressure = async nic =>
            {
                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151003/ovrwindwheelactivity
                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1
                var uu = new UdpClient(40094);

                // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs
                //args.pre = "awaiting Parallax at " + nic + " :40094";

                var oldx = 0f;
                var oldy = 0f;

                // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs
                uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic);
                while (true)
                {
                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure
                    // did we break async Continue ??
                    var ux = await uu.ReceiveAsync(); // did we jump to ui thread?


                    // discard input?
                    if (ActivityPaused)
                        continue;

                    // while we have the signal turn on torch/.


                    var m = new BinaryReader(new MemoryStream(ux.Buffer));

                    var x0 = m.ReadSingle();

                    var x = 200 + x0 * 0.1f;

                    var y0 = m.ReadSingle();

                    var y = 1200 - y0 * 0.1f;

                    var pressure = m.ReadSingle();


                    new { x, y, oldx, oldy, pressure }.With(
                        segment =>
                        {
                            var paint = new android.graphics.Paint();

                            HUDStylusList.Add(
                                canvas =>
                                {
                                    //c.lineTo((int)(x * 0.1), 400 - (int)(y * 0.1));

                                    //c.lineWidth = 1 + (pressure / 255.0 * 7);
                                    // 
                                    paint.setStrokeWidth((int)(1 + (pressure / 255.0 * 6) * (pressure / 255.0 * 6)));

                                    paint.setStyle(android.graphics.Paint.Style.STROKE);

                                    if (pressure > 0)
                                        paint.setColor(android.graphics.Color.YELLOW);
                                    else
                                        paint.setColor(android.graphics.Color.RED);

                                    canvas.drawLine(segment.x, segment.y, segment.oldx, segment.oldy, paint);

                                    canvas.drawLine(2560 / 2 + segment.x, segment.y, segment.oldx + 2560 / 2, segment.oldy, paint);
                                }

                            );
                        }
                    );


                    oldx = x;
                    oldy = y;

                    args.pen = new { x, y, pressure }.ToString();

                    //Console.WriteLine(new { args.parallax });

                    //// or marshal memory?
                    //var xy = args.mouse.Split(':');


                    //args.mousey = int.Parse(xy[1]);

                    //// getchar?
                    //args.ad = int.Parse(xy[2]);
                    //args.ws = int.Parse(xy[3]);

                    //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704
                    //args.c = int.Parse(xy[4]);
                    //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown
                    //args.mousebutton = int.Parse(xy[5]);
                    //args.mousewheel = int.Parse(xy[6]);
                }
            };
            #endregion


            #region fParallax
            Action<IPAddress> fParallax = async nic =>
            {
                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1
                var uu = new UdpClient(43834);

                // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs
                args.parallax = "awaiting Parallax at " + nic + " :43834";

                // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs
                uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic);
                while (true)
                {
                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure
                    // did we break async Continue ??
                    var x = await uu.ReceiveAsync(); // did we jump to ui thread?


                    // discard input?
                    if (ActivityPaused)
                        continue;

                    // while we have the signal turn on torch/.

                    #region await webcam feed
                    if (nogc == null)
                    {

                        // partial ?
                        var camera = android.hardware.Camera.open();
                        android.hardware.Camera.Parameters p = camera.getParameters();
                        p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_TORCH);
                        camera.setParameters(p);
                        camera.startPreview();

                        nogc = camera;
                    }
                    #endregion


                    //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer));
                    args.parallax = Encoding.UTF8.GetString(x.Buffer);

                    var xy = args.parallax.Split(':');

                    //Console.WriteLine(new { args.parallax });

                    //// or marshal memory?
                    //var xy = args.mouse.Split(':');

                    args.px = float.Parse(xy[1]);
                    args.py = float.Parse(xy[2]);
                    args.pz = float.Parse(xy[3]);

                    //args.mousey = int.Parse(xy[1]);

                    //// getchar?
                    //args.ad = int.Parse(xy[2]);
                    //args.ws = int.Parse(xy[3]);

                    //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704
                    //args.c = int.Parse(xy[4]);
                    //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown
                    //args.mousebutton = int.Parse(xy[5]);
                    //args.mousewheel = int.Parse(xy[6]);
                }
            };
            #endregion

            #region fWASDC
            var fWASDCport = 41814;
            Action<IPAddress> fWASDC = async nic =>
             {
                 var uu = new UdpClient(fWASDCport);

                 args.mouse = "awaiting mouse and WASDC at " + nic + ":" + fWASDCport;

                 // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                 // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs
                 uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic);
                 while (true)
                 {
                     var x = await uu.ReceiveAsync(); // did we jump to ui thread?
                     //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer));
                     args.mouse = Encoding.UTF8.GetString(x.Buffer);

                     // or marshal memory?
                     var xy = args.mouse.Split(':');

                     args.mousex = int.Parse(xy[0]);
                     args.mousey = int.Parse(xy[1]);

                     // getchar?
                     args.ad = int.Parse(xy[2]);
                     args.ws = int.Parse(xy[3]);

                     // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704
                     args.c = int.Parse(xy[4]);
                     // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown
                     args.mousebutton = int.Parse(xy[5]);
                     args.mousewheel = int.Parse(xy[6]);
                 }
             };
            #endregion



            #region fvertexTransform
            // X:\jsc.svn\examples\java\android\vr\OVRWindWheelNDK\OVRUDPMatrix\Program.cs
            Action<IPAddress> fvertexTransform = async nic =>
            {
                var uu = new UdpClient(40014);

                //args.mouse = "awaiting vertexTransform at " + nic + " :40014";

                // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs
                uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic);
                while (true)
                {
                    var x = await uu.ReceiveAsync(); // did we jump to ui thread?
                    //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer));
                    args.vertexTransform = x.Buffer;


                }
            };
            #endregion


            NetworkInterface.GetAllNetworkInterfaces().WithEach(
                n =>
                {
                    // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                    // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs

                    var IPProperties = n.GetIPProperties();
                    var PhysicalAddress = n.GetPhysicalAddress();



                    foreach (var ip in IPProperties.UnicastAddresses)
                    {
                        // ipv4
                        if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork)
                        {
                            if (!IPAddress.IsLoopback(ip.Address))
                                if (n.SupportsMulticast)
                                {
                                    fUDPPressure(ip.Address);
                                    fWASDC(ip.Address);
                                    fParallax(ip.Address);
                                    fvertexTransform(ip.Address);
                                }
                        }
                    }




                }
            );
            #endregion


            var sw = Stopwatch.StartNew();

            //var args = new object();

            // can we draw on back?




            #region mDraw
            var mDraw = new DrawOnTop(this)
            {
                // yes it appears top left.

                //text = "GearVR HUD"
                // (out) VrApi.vrapi_GetVersionString()
                text = () =>
                {
                    // can we listen to udp?
                    // like X:\jsc.svn\examples\java\android\AndroidServiceUDPNotification\AndroidServiceUDPNotification\ApplicationActivity.cs
                    // in vr if the other service is running it can display vr notification

                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150630/udp
                    // lets run it, and see if we can see some vr notifications as we skip a video 



                    //if (args.total_allocated_space > 48 * 1024 * 1024)
                    //    this.recreate();


                    return
                        sw.ElapsedMilliseconds + "ms | " + args.total_allocated_space + " bytes \n"
                        + new { vertexTransform = args.vertexTransform.Length } + "\n"
                        + args.mouse + "\n"
                        + args.parallax + "\n"
                        + args.vertexTransform.Length + "bytes udp\n"
                        + new { args.pen } + "\n"
                        //+ new { args.mousex, args.mousey } + "\n"
                        + new
                        {
                            //args.mousex,

                            // left to right
                            //args.x,
                            //args.px,

                            args.px,
                            args.py,
                            args.pz,


                            // nod up +0.7 down -0.7
                            ox = args.tracking_HeadPose_Pose_Orientation_x,

                            // -0.7 right +0.7 left
                            oy = args.tracking_HeadPose_Pose_Orientation_y

                            // tilt right -0.7 tilt left + 0.7
                            //oz = args.tracking_HeadPose_Pose_Orientation_z

                            // ??
                            //ow = args.tracking_HeadPose_Pose_Orientation_w
                        }.ToString().Replace(",", "\n");
                }
            };

            //Task.Run(


            Func<string> safemode = () =>
            {
                return
                    sw.ElapsedMilliseconds + "ms \n"
                        + args.total_allocated_space + " bytes \n"
                    + "GC safe mode / malloc limit..";
            };




            //    canvas.drawText(text, x + 2560 / 2, y + i * 24, paint);
            mDraw.AtDraw = canvas =>
            {


                {
                    var paint = new android.graphics.Paint();


                    paint.setStrokeWidth(16);
                    paint.setStyle(android.graphics.Paint.Style.STROKE);

                    paint.setColor(android.graphics.Color.RED);

                    canvas.drawLine(0, 0, 400, 400, paint);

                    canvas.drawLine(2560 / 2, 0, 400 + 2560 / 2, 400, paint);


                    HUDStylus(canvas);
                }


                // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150717/replay
                // can w visually store tracking intel. like tvs do.
                {
                    // https://code.google.com/p/android/issues/detail?id=4086

                    var paint = new android.graphics.Paint();


                    paint.setStrokeWidth(0);
                    paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE);

                    // lets have left to right recorder as a color block

                    //// nod up +0.7 down -0.7
                    // cannot see it.
                    var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_x + 0.7) / 1.4);



                    // I/System.Console( 8999): 2327:0001 AtDraw 16 0078af2e
                    // why wont our tracking correctly show?
                    //Console.WriteLine("AtDraw 16 " + rgb_left_to_right.ToString("x8"));

                    //paint.setColor(android.graphics.Color.YELLOW);
                    paint.setColor(
                        (int)(0xff000000 | rgb_left_to_right));


                    canvas.drawRect(16, 0, 32, 32, paint);
                }

                //       ox = args.tracking_HeadPose_Pose_Orientation_x,

                //       oy = args.tracking_HeadPose_Pose_Orientation_y

                {
                    // https://code.google.com/p/android/issues/detail?id=4086

                    var paint = new android.graphics.Paint();


                    paint.setStrokeWidth(0);
                    paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE);
                    //paint.setColor(android.graphics.Color.RED);

                    // lets have left to right recorder as a color block

                    //       // -0.7 right +0.7 left
                    var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_y + 0.7) / 1.4);

                    //paint.setColor(android.graphics.Color.YELLOW);
                    paint.setColor(
                        (int)(0xff000000 | rgb_left_to_right));

                    canvas.drawRect(16 + 64, 0, 320, 32, paint);
                }
            };

            new Thread(
                delegate()
                {
                    // bg thread

                    while (true)
                    {
                        //Thread.Sleep(1000 / 15);
                        //Thread.Sleep(1000 / 30);

                        // fullspeed



                        GLES3JNILib.stringFromJNI(args);

                        // http://developer.android.com/reference/android/graphics/Color.html
                        if (args.total_allocated_space > GLES3JNILib.safemodeMemoryLimitMB * 1024 * 1024)
                        {
                            mDraw.color = android.graphics.Color.RED;
                            mDraw.alpha = 255;

                            mDraw.text = safemode;
                            // goto secondary activity?
                        }
                        else if (args.mousebutton != 0)
                        {
                            // go a head. lean left or up
                            mDraw.color = android.graphics.Color.YELLOW;
                            mDraw.alpha = 255;
                        }
                        else
                        {
                            mDraw.color = android.graphics.Color.GREEN;

                            // not leaning in?
                            if (args.pz < 0)
                            {
                                mDraw.color = android.graphics.Color.WHITE;
                            }

                            var BaseStationEdgeX = Math.Abs(args.px) > 0.3;
                            var BaseStationEdgeY = Math.Abs(args.py) > 0.3;

                            if (BaseStationEdgeX
                                || BaseStationEdgeY
                                )
                            {
                                // base station wont track ya for long..
                                // reorient?
                                // fade to black?
                                mDraw.color = android.graphics.Color.YELLOW;
                                mDraw.alpha = 255;

                            }
                        }

                        mDraw.postInvalidate();

                        Thread.Sleep(1000 / 60);

                        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150716/ovrwindwheelactivity
                        //Thread.Sleep(1000 / 15);
                        //Thread.Sleep(1000 / 4);
                    }
                }
            ).Start();
            #endregion

            #region ondispatchTouchEvent
            this.ondispatchTouchEvent = @event =>
            {
                if (appThread == 0)
                    return;

                int action = @event.getAction();
                float x = @event.getRawX();
                float y = @event.getRawY();
                //if (action == MotionEvent.ACTION_UP)
                {
                    var halfx = 2560 / 2;
                    var halfy = 1440 / 2;

                    // touch sending int to offfset the cubes
                    this.args.x = (int)(halfx - x);
                    this.args.y = (int)(y - halfy);

                    mDraw.x = (int)(500 + halfx - x);
                    mDraw.y = (int)(600 + y - halfy);
                    //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString();
                    //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )");
                }
                GLES3JNILib.onTouchEvent(action, x, y);

                // can we move hud around and record it to gif or mp4?
            };
            #endregion

            #region ondispatchKeyEvent
            this.ondispatchKeyEvent = @event =>
            {
                if (appThread == 0)
                    return false;

                int keyCode = @event.getKeyCode();
                int action = @event.getAction();
                if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP)
                {
                    return base.dispatchKeyEvent(@event);
                }
                if (action == KeyEvent.ACTION_UP)
                {
                    // keycode 4
                    //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString();
                    //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )");
                }
                GLES3JNILib.onKeyEvent(keyCode, action);

                return true;
            };
            #endregion


            AtPause = delegate
            {
                ActivityPaused = true;
                GLES3JNILib.onPause();


                // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/

                if (nogc != null)
                {
                    var camera = nogc;
                    var p = camera.getParameters();
                    p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_OFF);
                    camera.setParameters(p);
                    camera.stopPreview();

                    camera.release();
                    nogc = null;
                }
            };

            AtResume = delegate
            {
                //Console.WriteLine("enter onResume");
                ActivityPaused = false;

                // http://stackoverflow.com/questions/3527621/how-to-pause-and-resume-a-surfaceview-thread
                // http://stackoverflow.com/questions/10277694/resume-to-surfaceview-shows-black-screen
                //this.xSurfaceView.onres

                // You must ensure that the drawing thread only touches the underlying Surface while it is valid

                this.xSurfaceView = new SurfaceView(this);

                this.setContentView(xSurfaceView);
                this.addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));

                this.xSurfaceView.getHolder().addCallback(xCallback);

                GLES3JNILib.onResume();
            };

            // canw e add a camera too?

            //  stackoverflow.com/questions/20936480/how-to-make-surfaceview-transparent-background
            //this.setContentView(mDraw);
            //this.addContentView(xSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));


            // sometimes system wants to try to black the screen it seems..
            getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON);

            appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this);

            Console.WriteLine("after OVRWindWheelActivity onCreate, attach the headset!");

        }
        protected override void onCreate(Bundle savedInstanceState)
        {
            Console.WriteLine("enter OVROculus360Photos ApplicationActivity onCreate");

            base.onCreate(savedInstanceState);




            Console.WriteLine("about to convince NDK what the first image should be...");
            // http://www.flightradar24.com/18.39,37.3/2

            // http://paulbourke.net/geometry/transformationprojection/

            // http://krpano.com/download/
            // http://unrealoldfriends.activeboard.com/t47250341/creating-background-using-spacescape/?page=1

            //Convert CUBE to SPHERE droplet


            //kcube2sphere 1.18.4 - 64bit (build 2015-04-23)
            //loading...
            //loading azi_l.jpg...
            //loading azi_f.jpg...
            //loading azi_r.jpg...
            //loading azi_b.jpg...
            //loading azi_u.jpg...
            //loading azi_d.jpg...
            //done.
            //making sphere azi_sphere.tif...
            //done.

            //Press any key to continue . . .










            //C:\Windows\system32> x:\util\android-sdk-windows\platform-tools\adb.exe push X:\jsc.svn\examples\javascript\synergy\css\CSSAzimuthMapViz\CSSAzimuthMapViz\Textures  /sdcard/oculus/360Photos/
            //push: X:\jsc.svn\examples\javascript\synergy\css\CSSAzimuthMapViz\CSSAzimuthMapViz\Textures/azi_pz.jpg -> /sdcard/oculus/360Photos/azi_pz.jpg
            //push: X:\jsc.svn\examples\javascript\synergy\css\CSSAzimuthMapViz\CSSAzimuthMapViz\Textures/azi_py.jpg -> /sdcard/oculus/360Photos/azi_py.jpg
            //push: X:\jsc.svn\examples\javascript\synergy\css\CSSAzimuthMapViz\CSSAzimuthMapViz\Textures/azi_px.jpg -> /sdcard/oculus/360Photos/azi_px.jpg
            //push: X:\jsc.svn\examples\javascript\synergy\css\CSSAzimuthMapViz\CSSAzimuthMapViz\Textures/azi_nz.jpg -> /sdcard/oculus/360Photos/azi_nz.jpg
            //push: X:\jsc.svn\examples\javascript\synergy\css\CSSAzimuthMapViz\CSSAzimuthMapViz\Textures/azi_ny.jpg -> /sdcard/oculus/360Photos/azi_ny.jpg
            //push: X:\jsc.svn\examples\javascript\synergy\css\CSSAzimuthMapViz\CSSAzimuthMapViz\Textures/azi_nx.jpg -> /sdcard/oculus/360Photos/azi_nx.jpg
            //6 files pushed. 0 files skipped.
            //466 KB/s (969865 bytes in 2.030s)

            //C:\Windows\system32> x:\util\android-sdk-windows\platform-tools\adb.exe shell cp /sdcard/oculus/360Photos/humus.thm /sdcard/oculus/360Photos/azi.thm

            Action<string, string> copy =
                (from, to) =>
                {

                    try
                    {

                        // http://gis.stackexchange.com/questions/92907/re-project-raster-image-from-mercator-to-equirectangular

                        // https://en.wikipedia.org/wiki/List_of_map_projections
                        // Web Mercator
                        // https://xkcd.com/977/
                        // mercator?
                        var value = this.getResources().getAssets().open(from);
                        var s = new __NetworkStream { InternalInputStream = value };

                        // 1,392,914
                        //var buffer = new byte[1392914];
                        var buffer = new byte[4392914];

                        var len = s.Read(buffer, 0, buffer.Length);


                        var m = new MemoryStream();

                        m.Write(buffer, 0, len);

                        //s.CopyTo(

                        File.WriteAllBytes(to, m.ToArray());

                    }
                    catch
                    {
                        Console.WriteLine("about to convince NDK what the first image should be... fault");

                    }

                };

            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150807/ovroculus360photosndk
            copy("2_no_clouds_4k.jpg", "/sdcard/oculus/360Photos/0.jpg");
            //copy("1.jpg", "/sdcard/oculus/360Photos/1.jpg");



            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150724/invaders
            //copy("celestial-joshua-trees-milky-way-in-saline-va.jpg", "/sdcard/oculus/360Photos/2.jpg");


            //Implementation not found for type import :
            //type: System.IO.DirectoryInfo
            //method: System.IO.FileInfo[] GetFiles()
            //Did you forget to add the [Script] attribute?
            //Please double check the signature!

            //Path.get

            var emptyFiles =
                from pf in new DirectoryInfo("/sdcard/oculus/360Photos/").GetFiles()
                where pf.Extension.ToLower() == ".jpg"
                where pf.Length == 0
                select pf;

            foreach (var emptyFile in emptyFiles.ToArray())
            {
                Console.WriteLine(new { emptyFile });

                emptyFile.Delete();
            }


            Console.WriteLine("about to convince NDK what the first image should be... done");


            var intent = getIntent();
            var commandString = com.oculus.vrappframework.VrActivity.getCommandStringFromIntent(intent);
            var fromPackageNameString = com.oculus.vrappframework.VrActivity.getPackageStringFromIntent(intent);
            var uriString = com.oculus.vrappframework.VrActivity.getUriStringFromIntent(intent);

            // D/CrashAnrDetector( 3472):     #00 pc 00092ac0  /data/app/OVROculus360Photos.Activities-1/lib/arm/libmain.so (OVR::ovrMessageQueue::PostMessage(char const*, bool, bool)+8)


            // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20160103/oculus360photossdk
            this.appPtr = OVROculus360Photos.Activities.xMarshal.nativeSetAppInterface(
                this,
                fromPackageNameString,
                commandString,
                uriString
            );

            var args = new args
            {

            };

            var uploadLength = 0L;
            var uploadPosition = 0L;

            var sw = Stopwatch.StartNew();

            #region mDraw
            var mDraw = new DrawOnTop(this)
            {
                // yes it appears top left.

                //text = "GearVR HUD"
                text = () => sw.ElapsedMilliseconds + "ms "
                    //+ "\n " + Path.GetFileName(args.filename)
                    + "\n " + args.filename

                    + "\n " + new
                    {
                        upload = (int)(100 * (uploadPosition + 1) / (args.filesize + 1)) + "%",
                        uploadPosition,
                        args.filesize,

                        // can we capture pointer?

                        args.x,
                        args.y,
                        args.z,

                        //uploadLength
                    }.ToString().Replace(",", ",\n")

                // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeEquirectangularPanorama\ChromeEquirectangularPanorama\Application.cs
            };
            #endregion


            //Task.Run(

            #region sendTracking
            Action<IPAddress> sendTracking = nic =>
            {
                var port = new Random().Next(16000, 40000);

                //new IHTMLPre { "about to bind... " + new { port } }.AttachToDocument();

                // where is bind async?
                var socket = new UdpClient(
                     new IPEndPoint(nic, port)
                    );


                // who is on the other end?
                var nmessage = args.x + ":" + args.y + ":" + args.z + ":0:" + args.filename;

                var data = Encoding.UTF8.GetBytes(nmessage);      //creates a variable b of type byte


                //new IHTMLPre { "about to send... " + new { data.Length } }.AttachToDocument();

                // X:\jsc.svn\examples\javascript\chrome\apps\ChromeUDPNotification\ChromeUDPNotification\Application.cs
                //Console.WriteLine("about to Send");
                // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeEquirectangularPanorama\ChromeEquirectangularPanorama\Application.cs
                socket.Send(
                     data,
                     data.Length,
                     hostname: "239.1.2.3",
                     port: 49834
                 );



                socket.Close();

            };
            #endregion




            //I/System.Console( 9109): 2395:1fb3 enter __UdpClient ctor
            //I/System.Console( 9109): 2395:1fb3 enter __UdpClient before this.Client
            //I/System.Console( 9109): 2395:1fb3 enter __UdpClient after this.Client { Client = ScriptCoreLibJava.BCLImplementation.System.Net.Sockets.__Socket@4f1c02b }
            //I/System.Console( 9109): 2395:1fb3 enter GetAllNetworkInterfaces
            //I/System.Console( 9109): 2395:1fb3 enter __UdpClient ctor

            string current = null;
            byte[] bytes = null;

            new Thread(
                delegate()
                {
                    // bg thread


                    // bug out 1sec.
                    Thread.Sleep(1000);
                    // await gear on

                    while (true)
                    {
                        // collect tracking from ndk
                        // broadcast to udp


                        //Thread.Sleep(1000 / 15);

                        //var a = new
                        //{
                        //    // for java do we also do the fields?
                        //    x = 0
                        //};

                        args.filename = OVROculus360Photos.Activities.xMarshal.stringFromJNI(args);

                        //E/AndroidRuntime( 7601): Caused by: java.lang.NullPointerException: Attempt to invoke virtual method 'char[] java.lang.String.toCharArray()' on a null object reference
                        //E/AndroidRuntime( 7601):        at java.io.File.fixSlashes(File.java:185)
                        //E/AndroidRuntime( 7601):        at java.io.File.<init>(File.java:134)
                        //E/AndroidRuntime( 7601):        at ScriptCoreLibJava.BCLImplementation.System.IO.__File.Exists(__File.java:57)
                        //E/AndroidRuntime( 7601):        at OVROculus360PhotosHUD.Activities.ApplicationActivity___c__DisplayClass1d._onCreate_b__1b(ApplicationActivity___c__DisplayClass1d.java:95)



                        // uplink 144Mbps
                        // 18 MBps
                        #region udp broadcast
                        // overkill at 60hz
                        NetworkInterface.GetAllNetworkInterfaces().WithEach(
                             n =>
                             {
                                 // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                                 // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs

                                 var IPProperties = n.GetIPProperties();
                                 var PhysicalAddress = n.GetPhysicalAddress();



                                 foreach (var ip in IPProperties.UnicastAddresses)
                                 {
                                     // ipv4
                                     if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork)
                                     {
                                         if (!IPAddress.IsLoopback(ip.Address))
                                             if (n.SupportsMulticast)
                                             {
                                                 //fWASDC(ip.Address);
                                                 //fParallax(ip.Address);
                                                 //fvertexTransform(ip.Address);
                                                 sendTracking(ip.Address);
                                             }
                                     }
                                 }




                             }
                         );



                        #endregion

                        if (args.filename != null)
                            if (File.Exists(args.filename))
                            {
                                if (current != args.filename)
                                {
                                    current = args.filename;

                                    var ff = new FileInfo(args.filename);

                                    args.filesize = ff.Length;

                                    // we are not on ui thread.
                                    // HUD thread can freeze...
                                    // mmap?
                                    bytes = File.ReadAllBytes(args.filename);

                                    // now broadcast. at 500KBps in segments.
                                    // 8MB is 16 segments then.

                                    if (bytes.Length > 0)
                                        NetworkInterface.GetAllNetworkInterfaces().WithEach(
                                              n =>
                                              {
                                                  // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                                                  // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs

                                                  var IPProperties = n.GetIPProperties();
                                                  var PhysicalAddress = n.GetPhysicalAddress();



                                                  foreach (var ip in IPProperties.UnicastAddresses)
                                                  {
                                                      // ipv4
                                                      if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork)
                                                      {
                                                          if (!IPAddress.IsLoopback(ip.Address))
                                                              if (n.SupportsMulticast)
                                                              {
                                                                  //fWASDC(ip.Address);
                                                                  //fParallax(ip.Address);
                                                                  //fvertexTransform(ip.Address);
                                                                  //sendTracking(ip.Address);

                                                                  var port = new Random().Next(16000, 40000);

                                                                  //new IHTMLPre { "about to bind... " + new { port } }.AttachToDocument();

                                                                  // where is bind async?
                                                                  var socket = new UdpClient(
                                                                       new IPEndPoint(ip.Address, port)
                                                                      );


                                                                  //// who is on the other end?
                                                                  //var nmessage = args.x + ":" + args.y + ":" + args.z + ":0:" + args.filename;

                                                                  //var data = Encoding.UTF8.GetBytes(nmessage);      //creates a variable b of type byte

                                                                  // http://stackoverflow.com/questions/25841/maximum-buffer-length-for-sendto

                                                                  new { }.With(
                                                                      async delegate
                                                                      {
                                                                          // reached too far?
                                                                          if (bytes.Length == 0)
                                                                              return;

                                                                          var current0 = current;

                                                                          var r = new MemoryStream(bytes);
                                                                          uploadLength = r.Length;

                                                                          var data = new byte[65507];

                                                                      next:

                                                                          if (current0 != current)
                                                                              return;

                                                                          var cc = r.Read(data, 0, data.Length);

                                                                          uploadPosition = r.Position;

                                                                          if (cc <= 0)
                                                                              return;

                                                                          //new IHTMLPre { "about to send... " + new { data.Length } }.AttachToDocument();

                                                                          // X:\jsc.svn\examples\javascript\chrome\apps\ChromeUDPNotification\ChromeUDPNotification\Application.cs
                                                                          //Console.WriteLine("about to Send");
                                                                          // X:\jsc.svn\examples\javascript\chrome\apps\WebGL\ChromeEquirectangularPanorama\ChromeEquirectangularPanorama\Application.cs
                                                                          await socket.SendAsync(
                                                                               data,
                                                                               cc,
                                                                               hostname: "239.1.2.3",
                                                                               port: 49000
                                                                           );

                                                                          //await Task.Delay(1000 / 15);
                                                                          //await Task.Delay(1000 / 30);

                                                                          // no corruption
                                                                          await Task.Delay(1000 / 20);

                                                                          goto next;

                                                                      }
                                                                  );

                                                                  //socket.Close();
                                                              }
                                                      }
                                                  }




                                              }
                                          );
                                }
                            }

                        if (uploadPosition < args.filesize)
                            mDraw.color = android.graphics.Color.YELLOW;
                        else
                            mDraw.color = android.graphics.Color.GREEN;

                        mDraw.postInvalidate();
                        Thread.Sleep(1000 / 30);
                        //Thread.Sleep(1000 / 2);
                        //Thread.Sleep(1000 / 15);
                    }
                }
            ).Start();

            //this.ondispatchTouchEvent += @event =>
            //{

            //    int action = @event.getAction();
            //    float x = @event.getRawX();
            //    float y = @event.getRawY();
            //    //if (action == MotionEvent.ACTION_UP)
            //    {
            //        var halfx = 2560 / 2;
            //        var halfy = 1440 / 2;

            //        mDraw.x = (int)(500 + halfx - x);
            //        mDraw.y = (int)(600 + y - halfy);
            //        mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString();
            //        //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )");
            //    }

            //    // can we move hud around and record it to gif or mp4?

            //    return true;
            //};

            // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs
            // X:\jsc.svn\examples\java\android\AndroidLacasCameraServerActivity\AndroidLacasCameraServerActivity\ApplicationActivity.cs
            addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));


            // "x:\util\android-sdk-windows\platform-tools\adb.exe" connect 192.168.1.126:5555
            // cmd /K x:\util\android-sdk-windows\platform-tools\adb.exe logcat -s "JniUtils"  "System.Console" "art"


            // E/JniUtils(14136): couldn't get isHybridApp, (Landroid/app/Activity;)Z

            //            I/Oculus360Photos( 9199): nativeSetAppInterface
            //I/App     ( 9199): VrAppInterface::SetActivity:
            //I/App     ( 9199): new AppLocal( 0xf51512b0 0xff8b6b80 0xeef69900 )
            //I/App     ( 9199): ----------------- AppLocal::AppLocal() -----------------
            //E/JniUtils( 9199): couldn't get getInternalCacheMemoryInBytes, (Landroid/app/Activity;)J

            //            I/JniUtils(26390): Using caller's JNIEnv
            //E/JniUtils(26390): couldn't get getInstalledPackagePath, (Ljava/lang/String;)Ljava/lang/String;

            //            I/System.Console( 3652): 0e44:0001 Searching installed packages for 'com.oculus.systemactivities'
            //I/JniUtils( 3652): ovr_GetCurrentPackageName() = OVROculus360PhotosHUD.Activities
            //I/JniUtils( 3652): ovr_GetPackageCodePath() = '/data/app/OVROculus360PhotosHUD.Activities-1/base.apk'
            //W/art     ( 3652): Attempt to remove local handle scope entry from IRT, ignoring
            //W/art     ( 3652): Attempt to remove local handle scope entry from IRT, ignoring
            //W/art     ( 3652): Attempt to remove local handle scope entry from IRT, ignoring
            //I/JniUtils( 3652): ovr_GetCurrentActivityName() = OVROculus360PhotosHUD.Activities.ApplicationActivity
            //I/JniUtils( 3652): ovr_GetCurrentPackageName() = OVROculus360PhotosHUD.Activities
            //E/JniUtils( 3652): couldn't get getLocalizedString, (Ljava/lang/String;)Ljava/lang/String;
            //I/JniUtils( 4380): ovr_GetCurrentActivityName() = com.oculus.home.HomeActivity

            // ffs
        }
        protected override void onCreate(Bundle savedInstanceState)
        {
            base.onCreate(savedInstanceState);

            Console.WriteLine("enter OVRMyCubeWorld onCreate");




            #region xCallback
            // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs
            var xCallback = new xSurfaceHolder_Callback
            {
                onsurfaceCreated = holder =>
                {
                    Console.WriteLine("enter onsurfaceCreated " + new { appThread });
                    if (appThread == 0)
                        return;

                    // did we use it for float window?
                    //holder.setFormat(android.graphics.PixelFormat.TRANSLUCENT);

                    GLES3JNILib.onSurfaceCreated(holder.getSurface());
                    xSurfaceHolder = holder;

                    //Console.WriteLine("exit onsurfaceCreated " + new { appThread });
                },

                onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) =>
                {
                    if (appThread == 0)
                        return;

                    GLES3JNILib.onSurfaceChanged(holder.getSurface());
                    xSurfaceHolder = holder;
                },

                onsurfaceDestroyed = holder =>
                {
                    //I/System.Console( 3549): 0ddd:0001 after OVRMyCubeWorld onCreate, attach the headset!
                    //I/System.Console( 3549): 0ddd:0001 enter onsurfaceDestroyed

                    //Console.WriteLine("enter onsurfaceDestroyed");


                    if (appThread == 0)
                        return;


                    // I/DEBUG   ( 2079):     #01 pc 0000672f  /data/app/OVRMyCubeWorld.Activities-1/lib/arm/libmain.so (Java_com_oculus_gles3jni_GLES3JNILib_onSurfaceDestroyed+46)
                    GLES3JNILib.onSurfaceDestroyed();
                    xSurfaceHolder = null;
                    //appThread = 0;

                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/pui_global_menu
                }
            };
            #endregion

            // https://github.com/dalinaum/TextureViewDemo
            //  TextureView semi-translucent by calling myView.setAlpha(0.5f).
            // !! should we use TextureView instead?
            // https://groups.google.com/forum/#!topic/android-developers/jYjvm7ItpXQ
            //this.xSurfaceView.setZOrderOnTop(true);    // necessary
            //this.xSurfaceView.getHolder().setFormat(android.graphics.PixelFormat.TRANSPARENT);

            #region ReceiveAsync
            Action<IPAddress> f = async nic =>
             {
                 args.mouse = "awaiting at " + nic;

                 // Z:\jsc.svn\examples\java\android\AndroidUDPClipboard\ApplicationActivity.cs
                 // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                 // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs
                 var uu = new UdpClient(41814);
                 uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic);
                 while (true)
                 {
                     var x = await uu.ReceiveAsync(); // did we jump to ui thread?
                     //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer));
                     args.mouse = Encoding.UTF8.GetString(x.Buffer);

                     // or marshal memory?
                     var xy = args.mouse.Split(':');

                     args.mousex = int.Parse(xy[0]);
                     args.mousey = int.Parse(xy[1]);

                     // getchar?
                     args.ad = int.Parse(xy[2]);
                     args.ws = int.Parse(xy[3]);

                     // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704
                     args.c = int.Parse(xy[4]);
                     // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown
                     args.mousebutton = int.Parse(xy[5]);
                     args.mousewheel = int.Parse(xy[6]);
                 }
             };

            NetworkInterface.GetAllNetworkInterfaces().WithEach(
                n =>
                {
                    // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                    // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs

                    var IPProperties = n.GetIPProperties();
                    var PhysicalAddress = n.GetPhysicalAddress();



                    foreach (var ip in IPProperties.UnicastAddresses)
                    {
                        // ipv4
                        if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork)
                        {
                            if (!IPAddress.IsLoopback(ip.Address))
                                if (n.SupportsMulticast)
                                    f(ip.Address);
                        }
                    }




                }
            );
            #endregion


            var sw = Stopwatch.StartNew();

            //var args = new object();

            // can we draw on back?

            #region mDraw
            var mDraw = new DrawOnTop(this)
            {
                // yes it appears top left.

                //text = "GearVR HUD"
                // (out) VrApi.vrapi_GetVersionString()
                text = () =>
                {
                    // can we listen to udp?
                    // like X:\jsc.svn\examples\java\android\AndroidServiceUDPNotification\AndroidServiceUDPNotification\ApplicationActivity.cs
                    // in vr if the other service is running it can display vr notification

                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150630/udp
                    // lets run it, and see if we can see some vr notifications as we skip a video 

                    GLES3JNILib.stringFromJNI(args);



                    return sw.ElapsedMilliseconds + "ms \n"
                        + args.mouse + "\n"
                        + new { args.mousex, args.mousey } + "\n"
                        + new
                        {
                            //args.mousex,

                            // left to right
                            args.x,


                            // nod up +0.7 down -0.7
                            //ox = args.tracking_HeadPose_Pose_Orientation_x 

                            // -0.7 right +0.7 left
                            oy = args.tracking_HeadPose_Pose_Orientation_y

                            // tilt right -0.7 tilt left + 0.7
                            //oz = args.tracking_HeadPose_Pose_Orientation_z

                            // ??
                            //ow = args.tracking_HeadPose_Pose_Orientation_w
                        };
                }
            };

            //Task.Run(

            new Thread(
                delegate()
                {
                    // bg thread

                    while (true)
                    {
                        //Thread.Sleep(1000 / 15);
                        //Thread.Sleep(1000 / 30);

                        // fullspeed

                        Thread.Sleep(1000 / 60);


                        if (args.mousebutton == 0)
                        {
                            mDraw.color = android.graphics.Color.GREEN;
                            mDraw.alpha = 80;
                        }
                        else
                        {
                            mDraw.color = android.graphics.Color.YELLOW;
                            mDraw.alpha = 255;
                        }

                        mDraw.postInvalidate();
                    }
                }
            ).Start();
            #endregion

            #region ondispatchTouchEvent
            this.ondispatchTouchEvent = @event =>
            {
                if (appThread == 0)
                    return;

                int action = @event.getAction();
                float x = @event.getRawX();
                float y = @event.getRawY();
                //if (action == MotionEvent.ACTION_UP)
                {
                    var halfx = 2560 / 2;
                    var halfy = 1440 / 2;

                    // touch sending int to offfset the cubes
                    this.args.x = (int)(halfx - x);
                    this.args.y = (int)(y - halfy);

                    mDraw.x = (int)(500 + halfx - x);
                    mDraw.y = (int)(600 + y - halfy);
                    //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString();
                    //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )");
                }
                GLES3JNILib.onTouchEvent(action, x, y);

                // can we move hud around and record it to gif or mp4?
            };
            #endregion

            #region ondispatchKeyEvent
            this.ondispatchKeyEvent = @event =>
            {
                if (appThread == 0)
                    return false;

                int keyCode = @event.getKeyCode();
                int action = @event.getAction();
                if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP)
                {
                    return base.dispatchKeyEvent(@event);
                }
                if (action == KeyEvent.ACTION_UP)
                {
                    // keycode 4
                    //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString();
                    //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )");
                }
                GLES3JNILib.onKeyEvent(keyCode, action);

                return true;
            };
            #endregion



            AtResume = delegate
            {
                Console.WriteLine("enter onResume");


                // http://stackoverflow.com/questions/3527621/how-to-pause-and-resume-a-surfaceview-thread
                // http://stackoverflow.com/questions/10277694/resume-to-surfaceview-shows-black-screen
                //this.xSurfaceView.onres

                // You must ensure that the drawing thread only touches the underlying Surface while it is valid

                this.xSurfaceView = new SurfaceView(this);

                this.setContentView(xSurfaceView);
                this.addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));

                this.xSurfaceView.getHolder().addCallback(xCallback);

                GLES3JNILib.onResume();
            };

            // canw e add a camera too?

            //  stackoverflow.com/questions/20936480/how-to-make-surfaceview-transparent-background
            //this.setContentView(mDraw);
            //this.addContentView(xSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));


            // sometimes system wants to try to black the screen it seems..
            getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON);

            appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this);

            Console.WriteLine("after OVRMyCubeWorld onCreate, attach the headset!");

        }
        protected override void onCreate(Bundle savedInstanceState)
        {
            base.onCreate(savedInstanceState);

            #region xCallback
            // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs
            var xCallback = new xCallback
            {
                onsurfaceCreated = holder =>
                {
                    if (mNativeHandle != 0)
                    {
                        GLES3JNILib.onSurfaceCreated(mNativeHandle, holder.getSurface());
                        mSurfaceHolder = holder;
                    }
                },

                onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) =>
                {
                    if (mNativeHandle != 0)
                    {
                        GLES3JNILib.onSurfaceChanged(mNativeHandle, holder.getSurface());
                        mSurfaceHolder = holder;
                    }
                },

                onsurfaceDestroyed = holder =>
                {
                    if (mNativeHandle != 0)
                    {
                        GLES3JNILib.onSurfaceDestroyed(mNativeHandle);
                        mSurfaceHolder = null;
                    }
                }
            };
            #endregion






            mView = new SurfaceView(this);
            this.setContentView(mView);



            //            E/AndroidRuntime(22718): Caused by: java.lang.NullPointerException: Attempt to invoke virtual method 'android.view.ViewParent android.view.View.getParent()' on a null object reference
            //E/AndroidRuntime(22718):        at android.view.ViewGroup.addViewInner(ViewGroup.java:4216)
            //E/AndroidRuntime(22718):        at android.view.ViewGroup.addView(ViewGroup.java:4070)
            //E/AndroidRuntime(22718):        at android.view.ViewGroup.addView(ViewGroup.java:4046)
            //E/AndroidRuntime(22718):        at com.android.internal.policy.impl.PhoneWindow.setContentView(PhoneWindow.java:478)
            //E/AndroidRuntime(22718):        at com.android.internal.policy.impl.PhoneWindow.setContentView(PhoneWindow.java:459)
            //E/AndroidRuntime(22718):        at android.app.Activity.setContentView(Activity.java:2298)

            var sw = Stopwatch.StartNew();

            #region mDraw
            var mDraw = new DrawOnTop(this)
            {
                // yes it appears top left.

                //text = "GearVR HUD"
                text = () => sw.ElapsedMilliseconds + "ms !"
            };

            //Task.Run(

            new Thread(
                delegate()
                {
                    // bg thread

                    while (true)
                    {
                        //Thread.Sleep(1000 / 15);
                        Thread.Sleep(1000 / 30);


                        mDraw.postInvalidate();
                    }
                }
            ).Start();
            #endregion


            this.ondispatchTouchEvent = @event =>
            {
                if (mNativeHandle == 0)
                    return;

                int action = @event.getAction();
                float x = @event.getRawX();
                float y = @event.getRawY();
                //if (action == MotionEvent.ACTION_UP)
                {
                    var halfx = 2560 / 2;
                    var halfy = 1440 / 2;

                    mDraw.x = (int)(500 + halfx - x);
                    mDraw.y = (int)(600 + y - halfy);
                    mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString();
                    //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )");
                }
                GLES3JNILib.onTouchEvent(mNativeHandle, action, x, y);

                // can we move hud around and record it to gif or mp4?
            };

            this.ondispatchKeyEvent = @event =>
            {
                if (mNativeHandle == 0)
                    return false;

                int keyCode = @event.getKeyCode();
                int action = @event.getAction();
                if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP)
                {
                    return base.dispatchKeyEvent(@event);
                }
                if (action == KeyEvent.ACTION_UP)
                {
                    // keycode 4
                    mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString();
                    //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )");
                }
                GLES3JNILib.onKeyEvent(mNativeHandle, keyCode, action);

                return true;
            };

            // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs
            // X:\jsc.svn\examples\java\android\AndroidLacasCameraServerActivity\AndroidLacasCameraServerActivity\ApplicationActivity.cs
            addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));

            mView.getHolder().addCallback(xCallback);

            // Force the screen to stay on, rather than letting it dim and shut off
            // while the user is watching a movie.

            // does this disable the face sensor?
            getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON);

            // Force screen brightness to stay at maximum
            //WindowManager_LayoutParams _params = getWindow().getAttributes();
            //_params.screenBrightness = 1.0f;
            //getWindow().setAttributes(_params);

            mNativeHandle = com.oculus.gles3jni.GLES3JNILib.onCreate(this);

            // can we now overlay something on top of the surface?
        }
        public override void onCreate(Bundle savedInstanceState)
        {
            base.onCreate(savedInstanceState);

            var mDraw = new DrawOnTop(this)
               {
                   text = getLocalIpAddress() + ":1112"
               };

            addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));
        }
Exemple #11
0
        protected override void onCreate(Bundle savedInstanceState)
        {
            base.onCreate(savedInstanceState);

            Console.WriteLine("enter OVRMyCubeWorld onCreate");



            #region xCallback
            // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs
            var xCallback = new xSurfaceHolder_Callback
            {
                onsurfaceCreated = holder =>
                {
                    Console.WriteLine("enter onsurfaceCreated " + new { appThread });
                    if (appThread == 0)
                    {
                        return;
                    }

                    // did we use it for float window?
                    //holder.setFormat(android.graphics.PixelFormat.TRANSLUCENT);

                    GLES3JNILib.onSurfaceCreated(holder.getSurface());
                    xSurfaceHolder = holder;

                    //Console.WriteLine("exit onsurfaceCreated " + new { appThread });
                },

                onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) =>
                {
                    if (appThread == 0)
                    {
                        return;
                    }

                    GLES3JNILib.onSurfaceChanged(holder.getSurface());
                    xSurfaceHolder = holder;
                },

                onsurfaceDestroyed = holder =>
                {
                    //I/System.Console( 3549): 0ddd:0001 after OVRMyCubeWorld onCreate, attach the headset!
                    //I/System.Console( 3549): 0ddd:0001 enter onsurfaceDestroyed

                    //Console.WriteLine("enter onsurfaceDestroyed");


                    if (appThread == 0)
                    {
                        return;
                    }


                    // I/DEBUG   ( 2079):     #01 pc 0000672f  /data/app/OVRMyCubeWorld.Activities-1/lib/arm/libmain.so (Java_com_oculus_gles3jni_GLES3JNILib_onSurfaceDestroyed+46)
                    GLES3JNILib.onSurfaceDestroyed();
                    xSurfaceHolder = null;
                    //appThread = 0;

                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/pui_global_menu
                }
            };
            #endregion

            // https://github.com/dalinaum/TextureViewDemo
            //  TextureView semi-translucent by calling myView.setAlpha(0.5f).
            // !! should we use TextureView instead?
            // https://groups.google.com/forum/#!topic/android-developers/jYjvm7ItpXQ
            //this.xSurfaceView.setZOrderOnTop(true);    // necessary
            //this.xSurfaceView.getHolder().setFormat(android.graphics.PixelFormat.TRANSPARENT);

            #region ReceiveAsync
            Action <IPAddress> f = async nic =>
            {
                args.mouse = "awaiting at " + nic;

                // Z:\jsc.svn\examples\java\android\AndroidUDPClipboard\ApplicationActivity.cs
                // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs
                var uu = new UdpClient(41814);
                uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic);
                while (true)
                {
                    var x = await uu.ReceiveAsync();  // did we jump to ui thread?

                    //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer));
                    args.mouse = Encoding.UTF8.GetString(x.Buffer);

                    // or marshal memory?
                    var xy = args.mouse.Split(':');

                    args.mousex = int.Parse(xy[0]);
                    args.mousey = int.Parse(xy[1]);

                    // getchar?
                    args.ad = int.Parse(xy[2]);
                    args.ws = int.Parse(xy[3]);

                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704
                    args.c = int.Parse(xy[4]);
                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown
                    args.mousebutton = int.Parse(xy[5]);
                    args.mousewheel  = int.Parse(xy[6]);
                }
            };

            NetworkInterface.GetAllNetworkInterfaces().WithEach(
                n =>
            {
                // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs
                // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs

                var IPProperties    = n.GetIPProperties();
                var PhysicalAddress = n.GetPhysicalAddress();



                foreach (var ip in IPProperties.UnicastAddresses)
                {
                    // ipv4
                    if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork)
                    {
                        if (!IPAddress.IsLoopback(ip.Address))
                        {
                            if (n.SupportsMulticast)
                            {
                                f(ip.Address);
                            }
                        }
                    }
                }
            }
                );
            #endregion


            var sw = Stopwatch.StartNew();

            //var args = new object();

            // can we draw on back?

            #region mDraw
            var mDraw = new DrawOnTop(this)
            {
                // yes it appears top left.

                //text = "GearVR HUD"
                // (out) VrApi.vrapi_GetVersionString()
                text = () =>
                {
                    // can we listen to udp?
                    // like X:\jsc.svn\examples\java\android\AndroidServiceUDPNotification\AndroidServiceUDPNotification\ApplicationActivity.cs
                    // in vr if the other service is running it can display vr notification

                    // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150630/udp
                    // lets run it, and see if we can see some vr notifications as we skip a video

                    GLES3JNILib.stringFromJNI(args);



                    return(sw.ElapsedMilliseconds + "ms \n"
                           + args.mouse + "\n"
                           + new { args.mousex, args.mousey } +"\n"
                           + new
                    {
                        //args.mousex,

                        // left to right
                        args.x,


                        // nod up +0.7 down -0.7
                        //ox = args.tracking_HeadPose_Pose_Orientation_x

                        // -0.7 right +0.7 left
                        oy = args.tracking_HeadPose_Pose_Orientation_y

                             // tilt right -0.7 tilt left + 0.7
                             //oz = args.tracking_HeadPose_Pose_Orientation_z

                             // ??
                             //ow = args.tracking_HeadPose_Pose_Orientation_w
                    });
                }
            };

            //Task.Run(

            new Thread(
                delegate()
            {
                // bg thread

                while (true)
                {
                    //Thread.Sleep(1000 / 15);
                    //Thread.Sleep(1000 / 30);

                    // fullspeed

                    Thread.Sleep(1000 / 60);


                    if (args.mousebutton == 0)
                    {
                        mDraw.color = android.graphics.Color.GREEN;
                        mDraw.alpha = 80;
                    }
                    else
                    {
                        mDraw.color = android.graphics.Color.YELLOW;
                        mDraw.alpha = 255;
                    }

                    mDraw.postInvalidate();
                }
            }
                ).Start();
            #endregion

            #region ondispatchTouchEvent
            this.ondispatchTouchEvent = @event =>
            {
                if (appThread == 0)
                {
                    return;
                }

                int   action = @event.getAction();
                float x      = @event.getRawX();
                float y      = @event.getRawY();
                //if (action == MotionEvent.ACTION_UP)
                {
                    var halfx = 2560 / 2;
                    var halfy = 1440 / 2;

                    // touch sending int to offfset the cubes
                    this.args.x = (int)(halfx - x);
                    this.args.y = (int)(y - halfy);

                    mDraw.x = (int)(500 + halfx - x);
                    mDraw.y = (int)(600 + y - halfy);
                    //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString();
                    //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )");
                }
                GLES3JNILib.onTouchEvent(action, x, y);

                // can we move hud around and record it to gif or mp4?
            };
            #endregion

            #region ondispatchKeyEvent
            this.ondispatchKeyEvent = @event =>
            {
                if (appThread == 0)
                {
                    return(false);
                }

                int keyCode = @event.getKeyCode();
                int action  = @event.getAction();
                if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP)
                {
                    return(base.dispatchKeyEvent(@event));
                }
                if (action == KeyEvent.ACTION_UP)
                {
                    // keycode 4
                    //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString();
                    //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )");
                }
                GLES3JNILib.onKeyEvent(keyCode, action);

                return(true);
            };
            #endregion



            AtResume = delegate
            {
                Console.WriteLine("enter onResume");


                // http://stackoverflow.com/questions/3527621/how-to-pause-and-resume-a-surfaceview-thread
                // http://stackoverflow.com/questions/10277694/resume-to-surfaceview-shows-black-screen
                //this.xSurfaceView.onres

                // You must ensure that the drawing thread only touches the underlying Surface while it is valid

                this.xSurfaceView = new SurfaceView(this);

                this.setContentView(xSurfaceView);
                this.addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));

                this.xSurfaceView.getHolder().addCallback(xCallback);

                GLES3JNILib.onResume();
            };

            // canw e add a camera too?

            //  stackoverflow.com/questions/20936480/how-to-make-surfaceview-transparent-background
            //this.setContentView(mDraw);
            //this.addContentView(xSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));


            // sometimes system wants to try to black the screen it seems..
            getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON);

            appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this);

            Console.WriteLine("after OVRMyCubeWorld onCreate, attach the headset!");
        }
Exemple #12
0
        protected override void onCreate(Bundle savedInstanceState)
        {
            base.onCreate(savedInstanceState);

            Console.WriteLine("enter OVRVrCubeWorldSurfaceViewX onCreate");



            #region xCallback
            // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs
            var xCallback = new xCallback
            {
                onsurfaceCreated = holder =>
                {
                    //Console.WriteLine("enter onsurfaceCreated " + new { appThread });
                    if (appThread == 0)
                    {
                        return;
                    }

                    appThread.onSurfaceCreated(holder.getSurface());
                    mSurfaceHolder = holder;

                    //Console.WriteLine("exit onsurfaceCreated " + new { appThread });
                },

                onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) =>
                {
                    if (appThread == 0)
                    {
                        return;
                    }

                    appThread.onSurfaceChanged(holder.getSurface());
                    mSurfaceHolder = holder;
                },

                onsurfaceDestroyed = holder =>
                {
                    if (appThread == 0)
                    {
                        return;
                    }

                    appThread.onSurfaceDestroyed();
                    mSurfaceHolder = null;
                }
            };
            #endregion



            mView = new SurfaceView(this);
            this.setContentView(mView);

            var sw = Stopwatch.StartNew();

            #region mDraw
            var mDraw = new DrawOnTop(this)
            {
                // yes it appears top left.

                //text = "GearVR HUD"
                // (out) VrApi.vrapi_GetVersionString()
                text = () => sw.ElapsedMilliseconds + "ms " + GLES3JNILib.stringFromJNI()
            };

            //Task.Run(

            new Thread(
                delegate()
            {
                // bg thread

                while (true)
                {
                    //Thread.Sleep(1000 / 15);
                    Thread.Sleep(1000 / 30);


                    mDraw.postInvalidate();
                }
            }
                ).Start();
            #endregion

            #region ondispatchTouchEvent
            this.ondispatchTouchEvent = @event =>
            {
                if (appThread == 0)
                {
                    return;
                }

                int   action = @event.getAction();
                float x      = @event.getRawX();
                float y      = @event.getRawY();
                //if (action == MotionEvent.ACTION_UP)
                {
                    var halfx = 2560 / 2;
                    var halfy = 1440 / 2;

                    mDraw.x = (int)(500 + halfx - x);
                    mDraw.y = (int)(600 + y - halfy);
                    //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString();
                    //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )");
                }
                appThread.onTouchEvent(action, x, y);

                // can we move hud around and record it to gif or mp4?
            };
            #endregion

            #region ondispatchKeyEvent
            this.ondispatchKeyEvent = @event =>
            {
                if (appThread == 0)
                {
                    return(false);
                }

                int keyCode = @event.getKeyCode();
                int action  = @event.getAction();
                if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP)
                {
                    return(base.dispatchKeyEvent(@event));
                }
                if (action == KeyEvent.ACTION_UP)
                {
                    // keycode 4
                    mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString();
                    //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )");
                }
                appThread.onKeyEvent(keyCode, action);

                return(true);
            };
            #endregion


            addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));

            mView.getHolder().addCallback(xCallback);

            getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON);

            appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this);

            Console.WriteLine("after OVRVrCubeWorldSurfaceViewX onCreate, attach the headset!");
        }