protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); Console.WriteLine("enter OVRWindWheelActivity onCreate"); // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/ #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xSurfaceHolder_Callback { onsurfaceCreated = holder => { Console.WriteLine("enter onsurfaceCreated " + new { appThread }); if (appThread == 0) { return; } // did we use it for float window? //holder.setFormat(android.graphics.PixelFormat.TRANSLUCENT); GLES3JNILib.onSurfaceCreated(holder.getSurface()); xSurfaceHolder = holder; //Console.WriteLine("exit onsurfaceCreated " + new { appThread }); }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appThread == 0) { return; } GLES3JNILib.onSurfaceChanged(holder.getSurface()); xSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { //I/System.Console( 3549): 0ddd:0001 after OVRWindWheelActivity onCreate, attach the headset! //I/System.Console( 3549): 0ddd:0001 enter onsurfaceDestroyed //Console.WriteLine("enter onsurfaceDestroyed"); if (appThread == 0) { return; } // I/DEBUG ( 2079): #01 pc 0000672f /data/app/OVRWindWheelActivity.Activities-1/lib/arm/libmain.so (Java_com_oculus_gles3jni_GLES3JNILib_onSurfaceDestroyed+46) GLES3JNILib.onSurfaceDestroyed(); xSurfaceHolder = null; //appThread = 0; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/pui_global_menu } }; #endregion // https://github.com/dalinaum/TextureViewDemo // TextureView semi-translucent by calling myView.setAlpha(0.5f). // !! should we use TextureView instead? // https://groups.google.com/forum/#!topic/android-developers/jYjvm7ItpXQ //this.xSurfaceView.setZOrderOnTop(true); // necessary //this.xSurfaceView.getHolder().setFormat(android.graphics.PixelFormat.TRANSPARENT); var ActivityPaused = true; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20160101/ovrwindwheelndk WifiManager wifi = (WifiManager)this.getSystemService(Context.WIFI_SERVICE); var lo = wifi.createMulticastLock("vrudp"); lo.acquire(); #region ReceiveAsync // https://www.youtube.com/watch?v=GpmKq_qg3Tk var HUDStylusList = new List <Action <android.graphics.Canvas> >(); // http://uploadvr.com/vr-hmd-specs/ Action <android.graphics.Canvas> HUDStylus = canvas => { // video? // https://www.youtube.com/watch?v=JaTH_hoLDxc // so cool. we get to use pen in vr!s while (HUDStylusList.Count > 1024) { HUDStylusList.RemoveAt(0); } foreach (var item in HUDStylusList) { item(canvas); } }; #region fUDPPressure Action <IPAddress> fUDPPressure = async nic => { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151003/ovrwindwheelactivity // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1 var uu = new UdpClient(40094); // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs //args.pre = "awaiting Parallax at " + nic + " :40094"; var oldx = 0f; var oldy = 0f; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure // did we break async Continue ?? var ux = await uu.ReceiveAsync(); // did we jump to ui thread? // discard input? if (ActivityPaused) { continue; } // while we have the signal turn on torch/. var m = new BinaryReader(new MemoryStream(ux.Buffer)); var x0 = m.ReadSingle(); var x = 200 + x0 * 0.1f; var y0 = m.ReadSingle(); var y = 1200 - y0 * 0.1f; var pressure = m.ReadSingle(); new { x, y, oldx, oldy, pressure }.With( segment => { var paint = new android.graphics.Paint(); HUDStylusList.Add( canvas => { //c.lineTo((int)(x * 0.1), 400 - (int)(y * 0.1)); //c.lineWidth = 1 + (pressure / 255.0 * 7); // paint.setStrokeWidth((int)(1 + (pressure / 255.0 * 6) * (pressure / 255.0 * 6))); paint.setStyle(android.graphics.Paint.Style.STROKE); if (pressure > 0) { paint.setColor(android.graphics.Color.YELLOW); } else { paint.setColor(android.graphics.Color.RED); } canvas.drawLine(segment.x, segment.y, segment.oldx, segment.oldy, paint); canvas.drawLine(2560 / 2 + segment.x, segment.y, segment.oldx + 2560 / 2, segment.oldy, paint); } ); } ); oldx = x; oldy = y; args.pen = new { x, y, pressure }.ToString(); //Console.WriteLine(new { args.parallax }); //// or marshal memory? //var xy = args.mouse.Split(':'); //args.mousey = int.Parse(xy[1]); //// getchar? //args.ad = int.Parse(xy[2]); //args.ws = int.Parse(xy[3]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 //args.c = int.Parse(xy[4]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown //args.mousebutton = int.Parse(xy[5]); //args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fParallax Action <IPAddress> fParallax = async nic => { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1 var uu = new UdpClient(43834); // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs args.parallax = "awaiting Parallax at " + nic + " :43834"; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure // did we break async Continue ?? var x = await uu.ReceiveAsync(); // did we jump to ui thread? // discard input? if (ActivityPaused) { continue; } // while we have the signal turn on torch/. #region await webcam feed if (nogc == null) { // partial ? var camera = android.hardware.Camera.open(); android.hardware.Camera.Parameters p = camera.getParameters(); p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_TORCH); camera.setParameters(p); camera.startPreview(); nogc = camera; } #endregion //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.parallax = Encoding.UTF8.GetString(x.Buffer); var xy = args.parallax.Split(':'); //Console.WriteLine(new { args.parallax }); //// or marshal memory? //var xy = args.mouse.Split(':'); args.px = float.Parse(xy[1]); args.py = float.Parse(xy[2]); args.pz = float.Parse(xy[3]); //args.mousey = int.Parse(xy[1]); //// getchar? //args.ad = int.Parse(xy[2]); //args.ws = int.Parse(xy[3]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 //args.c = int.Parse(xy[4]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown //args.mousebutton = int.Parse(xy[5]); //args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fWASDC var fWASDCport = 41814; Action <IPAddress> fWASDC = async nic => { var uu = new UdpClient(fWASDCport); args.mouse = "awaiting mouse and WASDC at " + nic + ":" + fWASDCport; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.mouse = Encoding.UTF8.GetString(x.Buffer); // or marshal memory? var xy = args.mouse.Split(':'); args.mousex = int.Parse(xy[0]); args.mousey = int.Parse(xy[1]); // getchar? args.ad = int.Parse(xy[2]); args.ws = int.Parse(xy[3]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 args.c = int.Parse(xy[4]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown args.mousebutton = int.Parse(xy[5]); args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fvertexTransform // X:\jsc.svn\examples\java\android\vr\OVRWindWheelNDK\OVRUDPMatrix\Program.cs Action <IPAddress> fvertexTransform = async nic => { var uu = new UdpClient(40014); //args.mouse = "awaiting vertexTransform at " + nic + " :40014"; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.vertexTransform = x.Buffer; } }; #endregion NetworkInterface.GetAllNetworkInterfaces().WithEach( n => { // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs var IPProperties = n.GetIPProperties(); var PhysicalAddress = n.GetPhysicalAddress(); foreach (var ip in IPProperties.UnicastAddresses) { // ipv4 if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork) { if (!IPAddress.IsLoopback(ip.Address)) { if (n.SupportsMulticast) { fUDPPressure(ip.Address); fWASDC(ip.Address); fParallax(ip.Address); fvertexTransform(ip.Address); } } } } } ); #endregion var sw = Stopwatch.StartNew(); //var args = new object(); // can we draw on back? #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" // (out) VrApi.vrapi_GetVersionString() text = () => { // can we listen to udp? // like X:\jsc.svn\examples\java\android\AndroidServiceUDPNotification\AndroidServiceUDPNotification\ApplicationActivity.cs // in vr if the other service is running it can display vr notification // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150630/udp // lets run it, and see if we can see some vr notifications as we skip a video //if (args.total_allocated_space > 48 * 1024 * 1024) // this.recreate(); return (sw.ElapsedMilliseconds + "ms | " + args.total_allocated_space + " bytes \n" + new { vertexTransform = args.vertexTransform.Length } +"\n" + args.mouse + "\n" + args.parallax + "\n" + args.vertexTransform.Length + "bytes udp\n" + new { args.pen } +"\n" //+ new { args.mousex, args.mousey } + "\n" + new { //args.mousex, // left to right //args.x, //args.px, args.px, args.py, args.pz, // nod up +0.7 down -0.7 ox = args.tracking_HeadPose_Pose_Orientation_x, // -0.7 right +0.7 left oy = args.tracking_HeadPose_Pose_Orientation_y // tilt right -0.7 tilt left + 0.7 //oz = args.tracking_HeadPose_Pose_Orientation_z // ?? //ow = args.tracking_HeadPose_Pose_Orientation_w }.ToString().Replace(",", "\n")); } }; //Task.Run( Func <string> safemode = () => { return (sw.ElapsedMilliseconds + "ms \n" + args.total_allocated_space + " bytes \n" + "GC safe mode / malloc limit.."); }; // canvas.drawText(text, x + 2560 / 2, y + i * 24, paint); mDraw.AtDraw = canvas => { { var paint = new android.graphics.Paint(); paint.setStrokeWidth(16); paint.setStyle(android.graphics.Paint.Style.STROKE); paint.setColor(android.graphics.Color.RED); canvas.drawLine(0, 0, 400, 400, paint); canvas.drawLine(2560 / 2, 0, 400 + 2560 / 2, 400, paint); HUDStylus(canvas); } // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150717/replay // can w visually store tracking intel. like tvs do. { // https://code.google.com/p/android/issues/detail?id=4086 var paint = new android.graphics.Paint(); paint.setStrokeWidth(0); paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE); // lets have left to right recorder as a color block //// nod up +0.7 down -0.7 // cannot see it. var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_x + 0.7) / 1.4); // I/System.Console( 8999): 2327:0001 AtDraw 16 0078af2e // why wont our tracking correctly show? //Console.WriteLine("AtDraw 16 " + rgb_left_to_right.ToString("x8")); //paint.setColor(android.graphics.Color.YELLOW); paint.setColor( (int)(0xff000000 | rgb_left_to_right)); canvas.drawRect(16, 0, 32, 32, paint); } // ox = args.tracking_HeadPose_Pose_Orientation_x, // oy = args.tracking_HeadPose_Pose_Orientation_y { // https://code.google.com/p/android/issues/detail?id=4086 var paint = new android.graphics.Paint(); paint.setStrokeWidth(0); paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE); //paint.setColor(android.graphics.Color.RED); // lets have left to right recorder as a color block // // -0.7 right +0.7 left var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_y + 0.7) / 1.4); //paint.setColor(android.graphics.Color.YELLOW); paint.setColor( (int)(0xff000000 | rgb_left_to_right)); canvas.drawRect(16 + 64, 0, 320, 32, paint); } }; new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 30); // fullspeed GLES3JNILib.stringFromJNI(args); // http://developer.android.com/reference/android/graphics/Color.html if (args.total_allocated_space > GLES3JNILib.safemodeMemoryLimitMB * 1024 * 1024) { mDraw.color = android.graphics.Color.RED; mDraw.alpha = 255; mDraw.text = safemode; // goto secondary activity? } else if (args.mousebutton != 0) { // go a head. lean left or up mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } else { mDraw.color = android.graphics.Color.GREEN; // not leaning in? if (args.pz < 0) { mDraw.color = android.graphics.Color.WHITE; } var BaseStationEdgeX = Math.Abs(args.px) > 0.3; var BaseStationEdgeY = Math.Abs(args.py) > 0.3; if (BaseStationEdgeX || BaseStationEdgeY ) { // base station wont track ya for long.. // reorient? // fade to black? mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } } mDraw.postInvalidate(); Thread.Sleep(1000 / 60); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150716/ovrwindwheelactivity //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 4); } } ).Start(); #endregion #region ondispatchTouchEvent this.ondispatchTouchEvent = @event => { if (appThread == 0) { return; } int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; // touch sending int to offfset the cubes this.args.x = (int)(halfx - x); this.args.y = (int)(y - halfy); mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } GLES3JNILib.onTouchEvent(action, x, y); // can we move hud around and record it to gif or mp4? }; #endregion #region ondispatchKeyEvent this.ondispatchKeyEvent = @event => { if (appThread == 0) { return(false); } int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return(base.dispatchKeyEvent(@event)); } if (action == KeyEvent.ACTION_UP) { // keycode 4 //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } GLES3JNILib.onKeyEvent(keyCode, action); return(true); }; #endregion AtPause = delegate { ActivityPaused = true; GLES3JNILib.onPause(); // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/ if (nogc != null) { var camera = nogc; var p = camera.getParameters(); p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_OFF); camera.setParameters(p); camera.stopPreview(); camera.release(); nogc = null; } }; AtResume = delegate { //Console.WriteLine("enter onResume"); ActivityPaused = false; // http://stackoverflow.com/questions/3527621/how-to-pause-and-resume-a-surfaceview-thread // http://stackoverflow.com/questions/10277694/resume-to-surfaceview-shows-black-screen //this.xSurfaceView.onres // You must ensure that the drawing thread only touches the underlying Surface while it is valid this.xSurfaceView = new SurfaceView(this); this.setContentView(xSurfaceView); this.addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); this.xSurfaceView.getHolder().addCallback(xCallback); GLES3JNILib.onResume(); }; // canw e add a camera too? // stackoverflow.com/questions/20936480/how-to-make-surfaceview-transparent-background //this.setContentView(mDraw); //this.addContentView(xSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); // sometimes system wants to try to black the screen it seems.. getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this); Console.WriteLine("after OVRWindWheelActivity onCreate, attach the headset!"); }
protected override void onResume() { base.onResume(); GLES3JNILib.onResume(mNativeHandle); }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); Console.WriteLine("enter OVRMyCubeWorld onCreate"); #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xSurfaceHolder_Callback { onsurfaceCreated = holder => { Console.WriteLine("enter onsurfaceCreated " + new { appThread }); if (appThread == 0) { return; } // did we use it for float window? //holder.setFormat(android.graphics.PixelFormat.TRANSLUCENT); GLES3JNILib.onSurfaceCreated(holder.getSurface()); xSurfaceHolder = holder; //Console.WriteLine("exit onsurfaceCreated " + new { appThread }); }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appThread == 0) { return; } GLES3JNILib.onSurfaceChanged(holder.getSurface()); xSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { //I/System.Console( 3549): 0ddd:0001 after OVRMyCubeWorld onCreate, attach the headset! //I/System.Console( 3549): 0ddd:0001 enter onsurfaceDestroyed //Console.WriteLine("enter onsurfaceDestroyed"); if (appThread == 0) { return; } // I/DEBUG ( 2079): #01 pc 0000672f /data/app/OVRMyCubeWorld.Activities-1/lib/arm/libmain.so (Java_com_oculus_gles3jni_GLES3JNILib_onSurfaceDestroyed+46) GLES3JNILib.onSurfaceDestroyed(); xSurfaceHolder = null; //appThread = 0; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/pui_global_menu } }; #endregion // https://github.com/dalinaum/TextureViewDemo // TextureView semi-translucent by calling myView.setAlpha(0.5f). // !! should we use TextureView instead? // https://groups.google.com/forum/#!topic/android-developers/jYjvm7ItpXQ //this.xSurfaceView.setZOrderOnTop(true); // necessary //this.xSurfaceView.getHolder().setFormat(android.graphics.PixelFormat.TRANSPARENT); #region ReceiveAsync Action <IPAddress> f = async nic => { args.mouse = "awaiting at " + nic; // Z:\jsc.svn\examples\java\android\AndroidUDPClipboard\ApplicationActivity.cs // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs var uu = new UdpClient(41814); uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.mouse = Encoding.UTF8.GetString(x.Buffer); // or marshal memory? var xy = args.mouse.Split(':'); args.mousex = int.Parse(xy[0]); args.mousey = int.Parse(xy[1]); // getchar? args.ad = int.Parse(xy[2]); args.ws = int.Parse(xy[3]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 args.c = int.Parse(xy[4]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown args.mousebutton = int.Parse(xy[5]); args.mousewheel = int.Parse(xy[6]); } }; NetworkInterface.GetAllNetworkInterfaces().WithEach( n => { // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs var IPProperties = n.GetIPProperties(); var PhysicalAddress = n.GetPhysicalAddress(); foreach (var ip in IPProperties.UnicastAddresses) { // ipv4 if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork) { if (!IPAddress.IsLoopback(ip.Address)) { if (n.SupportsMulticast) { f(ip.Address); } } } } } ); #endregion var sw = Stopwatch.StartNew(); //var args = new object(); // can we draw on back? #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" // (out) VrApi.vrapi_GetVersionString() text = () => { // can we listen to udp? // like X:\jsc.svn\examples\java\android\AndroidServiceUDPNotification\AndroidServiceUDPNotification\ApplicationActivity.cs // in vr if the other service is running it can display vr notification // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150630/udp // lets run it, and see if we can see some vr notifications as we skip a video GLES3JNILib.stringFromJNI(args); return(sw.ElapsedMilliseconds + "ms \n" + args.mouse + "\n" + new { args.mousex, args.mousey } +"\n" + new { //args.mousex, // left to right args.x, // nod up +0.7 down -0.7 //ox = args.tracking_HeadPose_Pose_Orientation_x // -0.7 right +0.7 left oy = args.tracking_HeadPose_Pose_Orientation_y // tilt right -0.7 tilt left + 0.7 //oz = args.tracking_HeadPose_Pose_Orientation_z // ?? //ow = args.tracking_HeadPose_Pose_Orientation_w }); } }; //Task.Run( new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 30); // fullspeed Thread.Sleep(1000 / 60); if (args.mousebutton == 0) { mDraw.color = android.graphics.Color.GREEN; mDraw.alpha = 80; } else { mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } mDraw.postInvalidate(); } } ).Start(); #endregion #region ondispatchTouchEvent this.ondispatchTouchEvent = @event => { if (appThread == 0) { return; } int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; // touch sending int to offfset the cubes this.args.x = (int)(halfx - x); this.args.y = (int)(y - halfy); mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } GLES3JNILib.onTouchEvent(action, x, y); // can we move hud around and record it to gif or mp4? }; #endregion #region ondispatchKeyEvent this.ondispatchKeyEvent = @event => { if (appThread == 0) { return(false); } int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return(base.dispatchKeyEvent(@event)); } if (action == KeyEvent.ACTION_UP) { // keycode 4 //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } GLES3JNILib.onKeyEvent(keyCode, action); return(true); }; #endregion AtResume = delegate { Console.WriteLine("enter onResume"); // http://stackoverflow.com/questions/3527621/how-to-pause-and-resume-a-surfaceview-thread // http://stackoverflow.com/questions/10277694/resume-to-surfaceview-shows-black-screen //this.xSurfaceView.onres // You must ensure that the drawing thread only touches the underlying Surface while it is valid this.xSurfaceView = new SurfaceView(this); this.setContentView(xSurfaceView); this.addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); this.xSurfaceView.getHolder().addCallback(xCallback); GLES3JNILib.onResume(); }; // canw e add a camera too? // stackoverflow.com/questions/20936480/how-to-make-surfaceview-transparent-background //this.setContentView(mDraw); //this.addContentView(xSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); // sometimes system wants to try to black the screen it seems.. getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this); Console.WriteLine("after OVRMyCubeWorld onCreate, attach the headset!"); }