public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { Camera.Parameters parameters = camera.getParameters(); //parameters.setPreviewSize(640, 480); parameters.setPictureFormat(android.graphics.PixelFormat.JPEG); camera.setParameters(parameters); camera.startPreview(); }
public Preview(Context context) : base(context) { mHolder = getHolder(); mHolder.addCallback(this); // http://developer.android.com/reference/android/view/SurfaceHolder.html#SURFACE_TYPE_PUSH_BUFFERS mHolder.setType(3); }
public void surfaceCreated(SurfaceHolder holder) { // http://developer.android.com/reference/android/hardware/Camera.html#open(int) camera = Camera.open(0); try { camera.setPreviewDisplay(holder); if (oncamera != null) oncamera(camera); } catch { } }
public void surfaceCreated(SurfaceHolder holder) { // http://developer.android.com/reference/android/hardware/Camera.html#open(int) camera = Camera.open(0); try { camera.setPreviewDisplay(holder); if (oncamera != null) { oncamera(camera); } } catch { } }
//deletes all obects in a surface unit if no more world units are in it public void deleteSurface(SurfaceUnit su) { SurfaceHolder sh = null; surfList.TryGetValue(su, out sh); sh.wuCount--; //if there are no more world units that exist in the surface unit, delete the surface unit and all objects in it if (sh.wuCount == 0) { foreach (WorldObject wo in sh.objects) { Build.destroyObject(wo); } surfList.Remove(su); } }
public void surfaceChanged(SurfaceHolder arg0, int format, int width, int height) { onsurfaceChanged(arg0, format, width, height); }
public void setPreviewDisplay(SurfaceHolder holder) { }
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) { if (yield_surfaceChanged != null) yield_surfaceChanged(); }
public void surfaceDestroyed(SurfaceHolder value) { System.Console.WriteLine("surfaceDestroyed"); }
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) { System.Console.WriteLine("surfaceChanged"); }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); Console.WriteLine("enter OVRWindWheelActivity onCreate"); // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/ #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xSurfaceHolder_Callback { onsurfaceCreated = holder => { Console.WriteLine("enter onsurfaceCreated " + new { appThread }); if (appThread == 0) { return; } // did we use it for float window? //holder.setFormat(android.graphics.PixelFormat.TRANSLUCENT); GLES3JNILib.onSurfaceCreated(holder.getSurface()); xSurfaceHolder = holder; //Console.WriteLine("exit onsurfaceCreated " + new { appThread }); }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appThread == 0) { return; } GLES3JNILib.onSurfaceChanged(holder.getSurface()); xSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { //I/System.Console( 3549): 0ddd:0001 after OVRWindWheelActivity onCreate, attach the headset! //I/System.Console( 3549): 0ddd:0001 enter onsurfaceDestroyed //Console.WriteLine("enter onsurfaceDestroyed"); if (appThread == 0) { return; } // I/DEBUG ( 2079): #01 pc 0000672f /data/app/OVRWindWheelActivity.Activities-1/lib/arm/libmain.so (Java_com_oculus_gles3jni_GLES3JNILib_onSurfaceDestroyed+46) GLES3JNILib.onSurfaceDestroyed(); xSurfaceHolder = null; //appThread = 0; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/pui_global_menu } }; #endregion // https://github.com/dalinaum/TextureViewDemo // TextureView semi-translucent by calling myView.setAlpha(0.5f). // !! should we use TextureView instead? // https://groups.google.com/forum/#!topic/android-developers/jYjvm7ItpXQ //this.xSurfaceView.setZOrderOnTop(true); // necessary //this.xSurfaceView.getHolder().setFormat(android.graphics.PixelFormat.TRANSPARENT); var ActivityPaused = true; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20160101/ovrwindwheelndk WifiManager wifi = (WifiManager)this.getSystemService(Context.WIFI_SERVICE); var lo = wifi.createMulticastLock("vrudp"); lo.acquire(); #region ReceiveAsync // https://www.youtube.com/watch?v=GpmKq_qg3Tk var HUDStylusList = new List <Action <android.graphics.Canvas> >(); // http://uploadvr.com/vr-hmd-specs/ Action <android.graphics.Canvas> HUDStylus = canvas => { // video? // https://www.youtube.com/watch?v=JaTH_hoLDxc // so cool. we get to use pen in vr!s while (HUDStylusList.Count > 1024) { HUDStylusList.RemoveAt(0); } foreach (var item in HUDStylusList) { item(canvas); } }; #region fUDPPressure Action <IPAddress> fUDPPressure = async nic => { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151003/ovrwindwheelactivity // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1 var uu = new UdpClient(40094); // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs //args.pre = "awaiting Parallax at " + nic + " :40094"; var oldx = 0f; var oldy = 0f; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure // did we break async Continue ?? var ux = await uu.ReceiveAsync(); // did we jump to ui thread? // discard input? if (ActivityPaused) { continue; } // while we have the signal turn on torch/. var m = new BinaryReader(new MemoryStream(ux.Buffer)); var x0 = m.ReadSingle(); var x = 200 + x0 * 0.1f; var y0 = m.ReadSingle(); var y = 1200 - y0 * 0.1f; var pressure = m.ReadSingle(); new { x, y, oldx, oldy, pressure }.With( segment => { var paint = new android.graphics.Paint(); HUDStylusList.Add( canvas => { //c.lineTo((int)(x * 0.1), 400 - (int)(y * 0.1)); //c.lineWidth = 1 + (pressure / 255.0 * 7); // paint.setStrokeWidth((int)(1 + (pressure / 255.0 * 6) * (pressure / 255.0 * 6))); paint.setStyle(android.graphics.Paint.Style.STROKE); if (pressure > 0) { paint.setColor(android.graphics.Color.YELLOW); } else { paint.setColor(android.graphics.Color.RED); } canvas.drawLine(segment.x, segment.y, segment.oldx, segment.oldy, paint); canvas.drawLine(2560 / 2 + segment.x, segment.y, segment.oldx + 2560 / 2, segment.oldy, paint); } ); } ); oldx = x; oldy = y; args.pen = new { x, y, pressure }.ToString(); //Console.WriteLine(new { args.parallax }); //// or marshal memory? //var xy = args.mouse.Split(':'); //args.mousey = int.Parse(xy[1]); //// getchar? //args.ad = int.Parse(xy[2]); //args.ws = int.Parse(xy[3]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 //args.c = int.Parse(xy[4]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown //args.mousebutton = int.Parse(xy[5]); //args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fParallax Action <IPAddress> fParallax = async nic => { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1 var uu = new UdpClient(43834); // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs args.parallax = "awaiting Parallax at " + nic + " :43834"; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure // did we break async Continue ?? var x = await uu.ReceiveAsync(); // did we jump to ui thread? // discard input? if (ActivityPaused) { continue; } // while we have the signal turn on torch/. #region await webcam feed if (nogc == null) { // partial ? var camera = android.hardware.Camera.open(); android.hardware.Camera.Parameters p = camera.getParameters(); p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_TORCH); camera.setParameters(p); camera.startPreview(); nogc = camera; } #endregion //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.parallax = Encoding.UTF8.GetString(x.Buffer); var xy = args.parallax.Split(':'); //Console.WriteLine(new { args.parallax }); //// or marshal memory? //var xy = args.mouse.Split(':'); args.px = float.Parse(xy[1]); args.py = float.Parse(xy[2]); args.pz = float.Parse(xy[3]); //args.mousey = int.Parse(xy[1]); //// getchar? //args.ad = int.Parse(xy[2]); //args.ws = int.Parse(xy[3]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 //args.c = int.Parse(xy[4]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown //args.mousebutton = int.Parse(xy[5]); //args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fWASDC var fWASDCport = 41814; Action <IPAddress> fWASDC = async nic => { var uu = new UdpClient(fWASDCport); args.mouse = "awaiting mouse and WASDC at " + nic + ":" + fWASDCport; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.mouse = Encoding.UTF8.GetString(x.Buffer); // or marshal memory? var xy = args.mouse.Split(':'); args.mousex = int.Parse(xy[0]); args.mousey = int.Parse(xy[1]); // getchar? args.ad = int.Parse(xy[2]); args.ws = int.Parse(xy[3]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 args.c = int.Parse(xy[4]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown args.mousebutton = int.Parse(xy[5]); args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fvertexTransform // X:\jsc.svn\examples\java\android\vr\OVRWindWheelNDK\OVRUDPMatrix\Program.cs Action <IPAddress> fvertexTransform = async nic => { var uu = new UdpClient(40014); //args.mouse = "awaiting vertexTransform at " + nic + " :40014"; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.vertexTransform = x.Buffer; } }; #endregion NetworkInterface.GetAllNetworkInterfaces().WithEach( n => { // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs var IPProperties = n.GetIPProperties(); var PhysicalAddress = n.GetPhysicalAddress(); foreach (var ip in IPProperties.UnicastAddresses) { // ipv4 if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork) { if (!IPAddress.IsLoopback(ip.Address)) { if (n.SupportsMulticast) { fUDPPressure(ip.Address); fWASDC(ip.Address); fParallax(ip.Address); fvertexTransform(ip.Address); } } } } } ); #endregion var sw = Stopwatch.StartNew(); //var args = new object(); // can we draw on back? #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" // (out) VrApi.vrapi_GetVersionString() text = () => { // can we listen to udp? // like X:\jsc.svn\examples\java\android\AndroidServiceUDPNotification\AndroidServiceUDPNotification\ApplicationActivity.cs // in vr if the other service is running it can display vr notification // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150630/udp // lets run it, and see if we can see some vr notifications as we skip a video //if (args.total_allocated_space > 48 * 1024 * 1024) // this.recreate(); return (sw.ElapsedMilliseconds + "ms | " + args.total_allocated_space + " bytes \n" + new { vertexTransform = args.vertexTransform.Length } +"\n" + args.mouse + "\n" + args.parallax + "\n" + args.vertexTransform.Length + "bytes udp\n" + new { args.pen } +"\n" //+ new { args.mousex, args.mousey } + "\n" + new { //args.mousex, // left to right //args.x, //args.px, args.px, args.py, args.pz, // nod up +0.7 down -0.7 ox = args.tracking_HeadPose_Pose_Orientation_x, // -0.7 right +0.7 left oy = args.tracking_HeadPose_Pose_Orientation_y // tilt right -0.7 tilt left + 0.7 //oz = args.tracking_HeadPose_Pose_Orientation_z // ?? //ow = args.tracking_HeadPose_Pose_Orientation_w }.ToString().Replace(",", "\n")); } }; //Task.Run( Func <string> safemode = () => { return (sw.ElapsedMilliseconds + "ms \n" + args.total_allocated_space + " bytes \n" + "GC safe mode / malloc limit.."); }; // canvas.drawText(text, x + 2560 / 2, y + i * 24, paint); mDraw.AtDraw = canvas => { { var paint = new android.graphics.Paint(); paint.setStrokeWidth(16); paint.setStyle(android.graphics.Paint.Style.STROKE); paint.setColor(android.graphics.Color.RED); canvas.drawLine(0, 0, 400, 400, paint); canvas.drawLine(2560 / 2, 0, 400 + 2560 / 2, 400, paint); HUDStylus(canvas); } // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150717/replay // can w visually store tracking intel. like tvs do. { // https://code.google.com/p/android/issues/detail?id=4086 var paint = new android.graphics.Paint(); paint.setStrokeWidth(0); paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE); // lets have left to right recorder as a color block //// nod up +0.7 down -0.7 // cannot see it. var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_x + 0.7) / 1.4); // I/System.Console( 8999): 2327:0001 AtDraw 16 0078af2e // why wont our tracking correctly show? //Console.WriteLine("AtDraw 16 " + rgb_left_to_right.ToString("x8")); //paint.setColor(android.graphics.Color.YELLOW); paint.setColor( (int)(0xff000000 | rgb_left_to_right)); canvas.drawRect(16, 0, 32, 32, paint); } // ox = args.tracking_HeadPose_Pose_Orientation_x, // oy = args.tracking_HeadPose_Pose_Orientation_y { // https://code.google.com/p/android/issues/detail?id=4086 var paint = new android.graphics.Paint(); paint.setStrokeWidth(0); paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE); //paint.setColor(android.graphics.Color.RED); // lets have left to right recorder as a color block // // -0.7 right +0.7 left var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_y + 0.7) / 1.4); //paint.setColor(android.graphics.Color.YELLOW); paint.setColor( (int)(0xff000000 | rgb_left_to_right)); canvas.drawRect(16 + 64, 0, 320, 32, paint); } }; new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 30); // fullspeed GLES3JNILib.stringFromJNI(args); // http://developer.android.com/reference/android/graphics/Color.html if (args.total_allocated_space > GLES3JNILib.safemodeMemoryLimitMB * 1024 * 1024) { mDraw.color = android.graphics.Color.RED; mDraw.alpha = 255; mDraw.text = safemode; // goto secondary activity? } else if (args.mousebutton != 0) { // go a head. lean left or up mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } else { mDraw.color = android.graphics.Color.GREEN; // not leaning in? if (args.pz < 0) { mDraw.color = android.graphics.Color.WHITE; } var BaseStationEdgeX = Math.Abs(args.px) > 0.3; var BaseStationEdgeY = Math.Abs(args.py) > 0.3; if (BaseStationEdgeX || BaseStationEdgeY ) { // base station wont track ya for long.. // reorient? // fade to black? mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } } mDraw.postInvalidate(); Thread.Sleep(1000 / 60); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150716/ovrwindwheelactivity //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 4); } } ).Start(); #endregion #region ondispatchTouchEvent this.ondispatchTouchEvent = @event => { if (appThread == 0) { return; } int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; // touch sending int to offfset the cubes this.args.x = (int)(halfx - x); this.args.y = (int)(y - halfy); mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } GLES3JNILib.onTouchEvent(action, x, y); // can we move hud around and record it to gif or mp4? }; #endregion #region ondispatchKeyEvent this.ondispatchKeyEvent = @event => { if (appThread == 0) { return(false); } int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return(base.dispatchKeyEvent(@event)); } if (action == KeyEvent.ACTION_UP) { // keycode 4 //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } GLES3JNILib.onKeyEvent(keyCode, action); return(true); }; #endregion AtPause = delegate { ActivityPaused = true; GLES3JNILib.onPause(); // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/ if (nogc != null) { var camera = nogc; var p = camera.getParameters(); p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_OFF); camera.setParameters(p); camera.stopPreview(); camera.release(); nogc = null; } }; AtResume = delegate { //Console.WriteLine("enter onResume"); ActivityPaused = false; // http://stackoverflow.com/questions/3527621/how-to-pause-and-resume-a-surfaceview-thread // http://stackoverflow.com/questions/10277694/resume-to-surfaceview-shows-black-screen //this.xSurfaceView.onres // You must ensure that the drawing thread only touches the underlying Surface while it is valid this.xSurfaceView = new SurfaceView(this); this.setContentView(xSurfaceView); this.addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); this.xSurfaceView.getHolder().addCallback(xCallback); GLES3JNILib.onResume(); }; // canw e add a camera too? // stackoverflow.com/questions/20936480/how-to-make-surfaceview-transparent-background //this.setContentView(mDraw); //this.addContentView(xSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); // sometimes system wants to try to black the screen it seems.. getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this); Console.WriteLine("after OVRWindWheelActivity onCreate, attach the headset!"); }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xCallback { onsurfaceCreated = holder => { if (mNativeHandle != 0) { GLES3JNILib.onSurfaceCreated(mNativeHandle, holder.getSurface()); mSurfaceHolder = holder; } }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (mNativeHandle != 0) { GLES3JNILib.onSurfaceChanged(mNativeHandle, holder.getSurface()); mSurfaceHolder = holder; } }, onsurfaceDestroyed = holder => { if (mNativeHandle != 0) { GLES3JNILib.onSurfaceDestroyed(mNativeHandle); mSurfaceHolder = null; } } }; #endregion mView = new SurfaceView(this); this.setContentView(mView); // E/AndroidRuntime(22718): Caused by: java.lang.NullPointerException: Attempt to invoke virtual method 'android.view.ViewParent android.view.View.getParent()' on a null object reference //E/AndroidRuntime(22718): at android.view.ViewGroup.addViewInner(ViewGroup.java:4216) //E/AndroidRuntime(22718): at android.view.ViewGroup.addView(ViewGroup.java:4070) //E/AndroidRuntime(22718): at android.view.ViewGroup.addView(ViewGroup.java:4046) //E/AndroidRuntime(22718): at com.android.internal.policy.impl.PhoneWindow.setContentView(PhoneWindow.java:478) //E/AndroidRuntime(22718): at com.android.internal.policy.impl.PhoneWindow.setContentView(PhoneWindow.java:459) //E/AndroidRuntime(22718): at android.app.Activity.setContentView(Activity.java:2298) var sw = Stopwatch.StartNew(); #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" text = () => sw.ElapsedMilliseconds + "ms !" }; //Task.Run( new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); Thread.Sleep(1000 / 30); mDraw.postInvalidate(); } } ).Start(); #endregion this.ondispatchTouchEvent = @event => { if (mNativeHandle == 0) return; int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } GLES3JNILib.onTouchEvent(mNativeHandle, action, x, y); // can we move hud around and record it to gif or mp4? }; this.ondispatchKeyEvent = @event => { if (mNativeHandle == 0) return false; int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return base.dispatchKeyEvent(@event); } if (action == KeyEvent.ACTION_UP) { // keycode 4 mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } GLES3JNILib.onKeyEvent(mNativeHandle, keyCode, action); return true; }; // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs // X:\jsc.svn\examples\java\android\AndroidLacasCameraServerActivity\AndroidLacasCameraServerActivity\ApplicationActivity.cs addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); mView.getHolder().addCallback(xCallback); // Force the screen to stay on, rather than letting it dim and shut off // while the user is watching a movie. // does this disable the face sensor? getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); // Force screen brightness to stay at maximum //WindowManager_LayoutParams _params = getWindow().getAttributes(); //_params.screenBrightness = 1.0f; //getWindow().setAttributes(_params); mNativeHandle = com.oculus.gles3jni.GLES3JNILib.onCreate(this); // can we now overlay something on top of the surface? }
public void surfaceDestroyed(SurfaceHolder holder) { camera.stopPreview(); camera.release(); camera = null; }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); Console.WriteLine("enter OVRMyCubeWorld onCreate"); #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xSurfaceHolder_Callback { onsurfaceCreated = holder => { Console.WriteLine("enter onsurfaceCreated " + new { appThread }); if (appThread == 0) return; // did we use it for float window? //holder.setFormat(android.graphics.PixelFormat.TRANSLUCENT); GLES3JNILib.onSurfaceCreated(holder.getSurface()); xSurfaceHolder = holder; //Console.WriteLine("exit onsurfaceCreated " + new { appThread }); }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appThread == 0) return; GLES3JNILib.onSurfaceChanged(holder.getSurface()); xSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { //I/System.Console( 3549): 0ddd:0001 after OVRMyCubeWorld onCreate, attach the headset! //I/System.Console( 3549): 0ddd:0001 enter onsurfaceDestroyed //Console.WriteLine("enter onsurfaceDestroyed"); if (appThread == 0) return; // I/DEBUG ( 2079): #01 pc 0000672f /data/app/OVRMyCubeWorld.Activities-1/lib/arm/libmain.so (Java_com_oculus_gles3jni_GLES3JNILib_onSurfaceDestroyed+46) GLES3JNILib.onSurfaceDestroyed(); xSurfaceHolder = null; //appThread = 0; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/pui_global_menu } }; #endregion // https://github.com/dalinaum/TextureViewDemo // TextureView semi-translucent by calling myView.setAlpha(0.5f). // !! should we use TextureView instead? // https://groups.google.com/forum/#!topic/android-developers/jYjvm7ItpXQ //this.xSurfaceView.setZOrderOnTop(true); // necessary //this.xSurfaceView.getHolder().setFormat(android.graphics.PixelFormat.TRANSPARENT); #region ReceiveAsync Action<IPAddress> f = async nic => { args.mouse = "awaiting at " + nic; // Z:\jsc.svn\examples\java\android\AndroidUDPClipboard\ApplicationActivity.cs // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs var uu = new UdpClient(41814); uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.mouse = Encoding.UTF8.GetString(x.Buffer); // or marshal memory? var xy = args.mouse.Split(':'); args.mousex = int.Parse(xy[0]); args.mousey = int.Parse(xy[1]); // getchar? args.ad = int.Parse(xy[2]); args.ws = int.Parse(xy[3]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 args.c = int.Parse(xy[4]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown args.mousebutton = int.Parse(xy[5]); args.mousewheel = int.Parse(xy[6]); } }; NetworkInterface.GetAllNetworkInterfaces().WithEach( n => { // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs var IPProperties = n.GetIPProperties(); var PhysicalAddress = n.GetPhysicalAddress(); foreach (var ip in IPProperties.UnicastAddresses) { // ipv4 if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork) { if (!IPAddress.IsLoopback(ip.Address)) if (n.SupportsMulticast) f(ip.Address); } } } ); #endregion var sw = Stopwatch.StartNew(); //var args = new object(); // can we draw on back? #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" // (out) VrApi.vrapi_GetVersionString() text = () => { // can we listen to udp? // like X:\jsc.svn\examples\java\android\AndroidServiceUDPNotification\AndroidServiceUDPNotification\ApplicationActivity.cs // in vr if the other service is running it can display vr notification // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150630/udp // lets run it, and see if we can see some vr notifications as we skip a video GLES3JNILib.stringFromJNI(args); return sw.ElapsedMilliseconds + "ms \n" + args.mouse + "\n" + new { args.mousex, args.mousey } + "\n" + new { //args.mousex, // left to right args.x, // nod up +0.7 down -0.7 //ox = args.tracking_HeadPose_Pose_Orientation_x // -0.7 right +0.7 left oy = args.tracking_HeadPose_Pose_Orientation_y // tilt right -0.7 tilt left + 0.7 //oz = args.tracking_HeadPose_Pose_Orientation_z // ?? //ow = args.tracking_HeadPose_Pose_Orientation_w }; } }; //Task.Run( new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 30); // fullspeed Thread.Sleep(1000 / 60); if (args.mousebutton == 0) { mDraw.color = android.graphics.Color.GREEN; mDraw.alpha = 80; } else { mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } mDraw.postInvalidate(); } } ).Start(); #endregion #region ondispatchTouchEvent this.ondispatchTouchEvent = @event => { if (appThread == 0) return; int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; // touch sending int to offfset the cubes this.args.x = (int)(halfx - x); this.args.y = (int)(y - halfy); mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } GLES3JNILib.onTouchEvent(action, x, y); // can we move hud around and record it to gif or mp4? }; #endregion #region ondispatchKeyEvent this.ondispatchKeyEvent = @event => { if (appThread == 0) return false; int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return base.dispatchKeyEvent(@event); } if (action == KeyEvent.ACTION_UP) { // keycode 4 //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } GLES3JNILib.onKeyEvent(keyCode, action); return true; }; #endregion AtResume = delegate { Console.WriteLine("enter onResume"); // http://stackoverflow.com/questions/3527621/how-to-pause-and-resume-a-surfaceview-thread // http://stackoverflow.com/questions/10277694/resume-to-surfaceview-shows-black-screen //this.xSurfaceView.onres // You must ensure that the drawing thread only touches the underlying Surface while it is valid this.xSurfaceView = new SurfaceView(this); this.setContentView(xSurfaceView); this.addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); this.xSurfaceView.getHolder().addCallback(xCallback); GLES3JNILib.onResume(); }; // canw e add a camera too? // stackoverflow.com/questions/20936480/how-to-make-surfaceview-transparent-background //this.setContentView(mDraw); //this.addContentView(xSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); // sometimes system wants to try to black the screen it seems.. getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this); Console.WriteLine("after OVRMyCubeWorld onCreate, attach the headset!"); }
public static void setTryPreviewDisplay(this android.hardware.Camera c, SurfaceHolder i) { try { c.setPreviewDisplay(i); } catch { throw; } }
public void surfaceCreated(SurfaceHolder value) { if (yield_surfaceCreated != null) yield_surfaceCreated(); }
public void surfaceCreated(SurfaceHolder value) { onsurfaceCreated(value); }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); Console.WriteLine("enter OVRWindWheelActivity onCreate"); // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/ #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xSurfaceHolder_Callback { onsurfaceCreated = holder => { Console.WriteLine("enter onsurfaceCreated " + new { appThread }); if (appThread == 0) return; // did we use it for float window? //holder.setFormat(android.graphics.PixelFormat.TRANSLUCENT); GLES3JNILib.onSurfaceCreated(holder.getSurface()); xSurfaceHolder = holder; //Console.WriteLine("exit onsurfaceCreated " + new { appThread }); }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appThread == 0) return; GLES3JNILib.onSurfaceChanged(holder.getSurface()); xSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { //I/System.Console( 3549): 0ddd:0001 after OVRWindWheelActivity onCreate, attach the headset! //I/System.Console( 3549): 0ddd:0001 enter onsurfaceDestroyed //Console.WriteLine("enter onsurfaceDestroyed"); if (appThread == 0) return; // I/DEBUG ( 2079): #01 pc 0000672f /data/app/OVRWindWheelActivity.Activities-1/lib/arm/libmain.so (Java_com_oculus_gles3jni_GLES3JNILib_onSurfaceDestroyed+46) GLES3JNILib.onSurfaceDestroyed(); xSurfaceHolder = null; //appThread = 0; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/pui_global_menu } }; #endregion // https://github.com/dalinaum/TextureViewDemo // TextureView semi-translucent by calling myView.setAlpha(0.5f). // !! should we use TextureView instead? // https://groups.google.com/forum/#!topic/android-developers/jYjvm7ItpXQ //this.xSurfaceView.setZOrderOnTop(true); // necessary //this.xSurfaceView.getHolder().setFormat(android.graphics.PixelFormat.TRANSPARENT); var ActivityPaused = true; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20160101/ovrwindwheelndk WifiManager wifi = (WifiManager)this.getSystemService(Context.WIFI_SERVICE); var lo = wifi.createMulticastLock("vrudp"); lo.acquire(); #region ReceiveAsync // https://www.youtube.com/watch?v=GpmKq_qg3Tk var HUDStylusList = new List<Action<android.graphics.Canvas>>(); // http://uploadvr.com/vr-hmd-specs/ Action<android.graphics.Canvas> HUDStylus = canvas => { // video? // https://www.youtube.com/watch?v=JaTH_hoLDxc // so cool. we get to use pen in vr!s while (HUDStylusList.Count > 1024) HUDStylusList.RemoveAt(0); foreach (var item in HUDStylusList) { item(canvas); } }; #region fUDPPressure Action<IPAddress> fUDPPressure = async nic => { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151003/ovrwindwheelactivity // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1 var uu = new UdpClient(40094); // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs //args.pre = "awaiting Parallax at " + nic + " :40094"; var oldx = 0f; var oldy = 0f; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure // did we break async Continue ?? var ux = await uu.ReceiveAsync(); // did we jump to ui thread? // discard input? if (ActivityPaused) continue; // while we have the signal turn on torch/. var m = new BinaryReader(new MemoryStream(ux.Buffer)); var x0 = m.ReadSingle(); var x = 200 + x0 * 0.1f; var y0 = m.ReadSingle(); var y = 1200 - y0 * 0.1f; var pressure = m.ReadSingle(); new { x, y, oldx, oldy, pressure }.With( segment => { var paint = new android.graphics.Paint(); HUDStylusList.Add( canvas => { //c.lineTo((int)(x * 0.1), 400 - (int)(y * 0.1)); //c.lineWidth = 1 + (pressure / 255.0 * 7); // paint.setStrokeWidth((int)(1 + (pressure / 255.0 * 6) * (pressure / 255.0 * 6))); paint.setStyle(android.graphics.Paint.Style.STROKE); if (pressure > 0) paint.setColor(android.graphics.Color.YELLOW); else paint.setColor(android.graphics.Color.RED); canvas.drawLine(segment.x, segment.y, segment.oldx, segment.oldy, paint); canvas.drawLine(2560 / 2 + segment.x, segment.y, segment.oldx + 2560 / 2, segment.oldy, paint); } ); } ); oldx = x; oldy = y; args.pen = new { x, y, pressure }.ToString(); //Console.WriteLine(new { args.parallax }); //// or marshal memory? //var xy = args.mouse.Split(':'); //args.mousey = int.Parse(xy[1]); //// getchar? //args.ad = int.Parse(xy[2]); //args.ws = int.Parse(xy[3]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 //args.c = int.Parse(xy[4]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown //args.mousebutton = int.Parse(xy[5]); //args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fParallax Action<IPAddress> fParallax = async nic => { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1 var uu = new UdpClient(43834); // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs args.parallax = "awaiting Parallax at " + nic + " :43834"; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure // did we break async Continue ?? var x = await uu.ReceiveAsync(); // did we jump to ui thread? // discard input? if (ActivityPaused) continue; // while we have the signal turn on torch/. #region await webcam feed if (nogc == null) { // partial ? var camera = android.hardware.Camera.open(); android.hardware.Camera.Parameters p = camera.getParameters(); p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_TORCH); camera.setParameters(p); camera.startPreview(); nogc = camera; } #endregion //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.parallax = Encoding.UTF8.GetString(x.Buffer); var xy = args.parallax.Split(':'); //Console.WriteLine(new { args.parallax }); //// or marshal memory? //var xy = args.mouse.Split(':'); args.px = float.Parse(xy[1]); args.py = float.Parse(xy[2]); args.pz = float.Parse(xy[3]); //args.mousey = int.Parse(xy[1]); //// getchar? //args.ad = int.Parse(xy[2]); //args.ws = int.Parse(xy[3]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 //args.c = int.Parse(xy[4]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown //args.mousebutton = int.Parse(xy[5]); //args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fWASDC var fWASDCport = 41814; Action<IPAddress> fWASDC = async nic => { var uu = new UdpClient(fWASDCport); args.mouse = "awaiting mouse and WASDC at " + nic + ":" + fWASDCport; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.mouse = Encoding.UTF8.GetString(x.Buffer); // or marshal memory? var xy = args.mouse.Split(':'); args.mousex = int.Parse(xy[0]); args.mousey = int.Parse(xy[1]); // getchar? args.ad = int.Parse(xy[2]); args.ws = int.Parse(xy[3]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 args.c = int.Parse(xy[4]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown args.mousebutton = int.Parse(xy[5]); args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fvertexTransform // X:\jsc.svn\examples\java\android\vr\OVRWindWheelNDK\OVRUDPMatrix\Program.cs Action<IPAddress> fvertexTransform = async nic => { var uu = new UdpClient(40014); //args.mouse = "awaiting vertexTransform at " + nic + " :40014"; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.vertexTransform = x.Buffer; } }; #endregion NetworkInterface.GetAllNetworkInterfaces().WithEach( n => { // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs var IPProperties = n.GetIPProperties(); var PhysicalAddress = n.GetPhysicalAddress(); foreach (var ip in IPProperties.UnicastAddresses) { // ipv4 if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork) { if (!IPAddress.IsLoopback(ip.Address)) if (n.SupportsMulticast) { fUDPPressure(ip.Address); fWASDC(ip.Address); fParallax(ip.Address); fvertexTransform(ip.Address); } } } } ); #endregion var sw = Stopwatch.StartNew(); //var args = new object(); // can we draw on back? #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" // (out) VrApi.vrapi_GetVersionString() text = () => { // can we listen to udp? // like X:\jsc.svn\examples\java\android\AndroidServiceUDPNotification\AndroidServiceUDPNotification\ApplicationActivity.cs // in vr if the other service is running it can display vr notification // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150630/udp // lets run it, and see if we can see some vr notifications as we skip a video //if (args.total_allocated_space > 48 * 1024 * 1024) // this.recreate(); return sw.ElapsedMilliseconds + "ms | " + args.total_allocated_space + " bytes \n" + new { vertexTransform = args.vertexTransform.Length } + "\n" + args.mouse + "\n" + args.parallax + "\n" + args.vertexTransform.Length + "bytes udp\n" + new { args.pen } + "\n" //+ new { args.mousex, args.mousey } + "\n" + new { //args.mousex, // left to right //args.x, //args.px, args.px, args.py, args.pz, // nod up +0.7 down -0.7 ox = args.tracking_HeadPose_Pose_Orientation_x, // -0.7 right +0.7 left oy = args.tracking_HeadPose_Pose_Orientation_y // tilt right -0.7 tilt left + 0.7 //oz = args.tracking_HeadPose_Pose_Orientation_z // ?? //ow = args.tracking_HeadPose_Pose_Orientation_w }.ToString().Replace(",", "\n"); } }; //Task.Run( Func<string> safemode = () => { return sw.ElapsedMilliseconds + "ms \n" + args.total_allocated_space + " bytes \n" + "GC safe mode / malloc limit.."; }; // canvas.drawText(text, x + 2560 / 2, y + i * 24, paint); mDraw.AtDraw = canvas => { { var paint = new android.graphics.Paint(); paint.setStrokeWidth(16); paint.setStyle(android.graphics.Paint.Style.STROKE); paint.setColor(android.graphics.Color.RED); canvas.drawLine(0, 0, 400, 400, paint); canvas.drawLine(2560 / 2, 0, 400 + 2560 / 2, 400, paint); HUDStylus(canvas); } // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150717/replay // can w visually store tracking intel. like tvs do. { // https://code.google.com/p/android/issues/detail?id=4086 var paint = new android.graphics.Paint(); paint.setStrokeWidth(0); paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE); // lets have left to right recorder as a color block //// nod up +0.7 down -0.7 // cannot see it. var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_x + 0.7) / 1.4); // I/System.Console( 8999): 2327:0001 AtDraw 16 0078af2e // why wont our tracking correctly show? //Console.WriteLine("AtDraw 16 " + rgb_left_to_right.ToString("x8")); //paint.setColor(android.graphics.Color.YELLOW); paint.setColor( (int)(0xff000000 | rgb_left_to_right)); canvas.drawRect(16, 0, 32, 32, paint); } // ox = args.tracking_HeadPose_Pose_Orientation_x, // oy = args.tracking_HeadPose_Pose_Orientation_y { // https://code.google.com/p/android/issues/detail?id=4086 var paint = new android.graphics.Paint(); paint.setStrokeWidth(0); paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE); //paint.setColor(android.graphics.Color.RED); // lets have left to right recorder as a color block // // -0.7 right +0.7 left var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_y + 0.7) / 1.4); //paint.setColor(android.graphics.Color.YELLOW); paint.setColor( (int)(0xff000000 | rgb_left_to_right)); canvas.drawRect(16 + 64, 0, 320, 32, paint); } }; new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 30); // fullspeed GLES3JNILib.stringFromJNI(args); // http://developer.android.com/reference/android/graphics/Color.html if (args.total_allocated_space > GLES3JNILib.safemodeMemoryLimitMB * 1024 * 1024) { mDraw.color = android.graphics.Color.RED; mDraw.alpha = 255; mDraw.text = safemode; // goto secondary activity? } else if (args.mousebutton != 0) { // go a head. lean left or up mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } else { mDraw.color = android.graphics.Color.GREEN; // not leaning in? if (args.pz < 0) { mDraw.color = android.graphics.Color.WHITE; } var BaseStationEdgeX = Math.Abs(args.px) > 0.3; var BaseStationEdgeY = Math.Abs(args.py) > 0.3; if (BaseStationEdgeX || BaseStationEdgeY ) { // base station wont track ya for long.. // reorient? // fade to black? mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } } mDraw.postInvalidate(); Thread.Sleep(1000 / 60); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150716/ovrwindwheelactivity //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 4); } } ).Start(); #endregion #region ondispatchTouchEvent this.ondispatchTouchEvent = @event => { if (appThread == 0) return; int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; // touch sending int to offfset the cubes this.args.x = (int)(halfx - x); this.args.y = (int)(y - halfy); mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } GLES3JNILib.onTouchEvent(action, x, y); // can we move hud around and record it to gif or mp4? }; #endregion #region ondispatchKeyEvent this.ondispatchKeyEvent = @event => { if (appThread == 0) return false; int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return base.dispatchKeyEvent(@event); } if (action == KeyEvent.ACTION_UP) { // keycode 4 //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } GLES3JNILib.onKeyEvent(keyCode, action); return true; }; #endregion AtPause = delegate { ActivityPaused = true; GLES3JNILib.onPause(); // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/ if (nogc != null) { var camera = nogc; var p = camera.getParameters(); p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_OFF); camera.setParameters(p); camera.stopPreview(); camera.release(); nogc = null; } }; AtResume = delegate { //Console.WriteLine("enter onResume"); ActivityPaused = false; // http://stackoverflow.com/questions/3527621/how-to-pause-and-resume-a-surfaceview-thread // http://stackoverflow.com/questions/10277694/resume-to-surfaceview-shows-black-screen //this.xSurfaceView.onres // You must ensure that the drawing thread only touches the underlying Surface while it is valid this.xSurfaceView = new SurfaceView(this); this.setContentView(xSurfaceView); this.addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); this.xSurfaceView.getHolder().addCallback(xCallback); GLES3JNILib.onResume(); }; // canw e add a camera too? // stackoverflow.com/questions/20936480/how-to-make-surfaceview-transparent-background //this.setContentView(mDraw); //this.addContentView(xSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); // sometimes system wants to try to black the screen it seems.. getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this); Console.WriteLine("after OVRWindWheelActivity onCreate, attach the headset!"); }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); Console.WriteLine("enter OVRVrCubeWorldSurfaceViewX onCreate"); #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xCallback { onsurfaceCreated = holder => { //Console.WriteLine("enter onsurfaceCreated " + new { appThread }); if (appThread == 0) return; appThread.onSurfaceCreated(holder.getSurface()); mSurfaceHolder = holder; //Console.WriteLine("exit onsurfaceCreated " + new { appThread }); }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appThread == 0) return; appThread.onSurfaceChanged(holder.getSurface()); mSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { if (appThread == 0) return; appThread.onSurfaceDestroyed(); mSurfaceHolder = null; } }; #endregion mView = new SurfaceView(this); this.setContentView(mView); var sw = Stopwatch.StartNew(); #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" // (out) VrApi.vrapi_GetVersionString() text = () => sw.ElapsedMilliseconds + "ms " + GLES3JNILib.stringFromJNI() }; //Task.Run( new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); Thread.Sleep(1000 / 30); mDraw.postInvalidate(); } } ).Start(); #endregion #region ondispatchTouchEvent this.ondispatchTouchEvent = @event => { if (appThread == 0) return; int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } appThread.onTouchEvent(action, x, y); // can we move hud around and record it to gif or mp4? }; #endregion #region ondispatchKeyEvent this.ondispatchKeyEvent = @event => { if (appThread == 0) return false; int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return base.dispatchKeyEvent(@event); } if (action == KeyEvent.ACTION_UP) { // keycode 4 mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } appThread.onKeyEvent(keyCode, action); return true; }; #endregion addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); mView.getHolder().addCallback(xCallback); getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this); Console.WriteLine("after OVRVrCubeWorldSurfaceViewX onCreate, attach the headset!"); }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); Console.WriteLine("enter OVRMyCubeWorld onCreate"); #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xSurfaceHolder_Callback { onsurfaceCreated = holder => { Console.WriteLine("enter onsurfaceCreated " + new { appThread }); if (appThread == 0) { return; } // did we use it for float window? //holder.setFormat(android.graphics.PixelFormat.TRANSLUCENT); GLES3JNILib.onSurfaceCreated(holder.getSurface()); xSurfaceHolder = holder; //Console.WriteLine("exit onsurfaceCreated " + new { appThread }); }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appThread == 0) { return; } GLES3JNILib.onSurfaceChanged(holder.getSurface()); xSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { //I/System.Console( 3549): 0ddd:0001 after OVRMyCubeWorld onCreate, attach the headset! //I/System.Console( 3549): 0ddd:0001 enter onsurfaceDestroyed //Console.WriteLine("enter onsurfaceDestroyed"); if (appThread == 0) { return; } // I/DEBUG ( 2079): #01 pc 0000672f /data/app/OVRMyCubeWorld.Activities-1/lib/arm/libmain.so (Java_com_oculus_gles3jni_GLES3JNILib_onSurfaceDestroyed+46) GLES3JNILib.onSurfaceDestroyed(); xSurfaceHolder = null; //appThread = 0; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/pui_global_menu } }; #endregion // https://github.com/dalinaum/TextureViewDemo // TextureView semi-translucent by calling myView.setAlpha(0.5f). // !! should we use TextureView instead? // https://groups.google.com/forum/#!topic/android-developers/jYjvm7ItpXQ //this.xSurfaceView.setZOrderOnTop(true); // necessary //this.xSurfaceView.getHolder().setFormat(android.graphics.PixelFormat.TRANSPARENT); #region ReceiveAsync Action <IPAddress> f = async nic => { args.mouse = "awaiting at " + nic; // Z:\jsc.svn\examples\java\android\AndroidUDPClipboard\ApplicationActivity.cs // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs var uu = new UdpClient(41814); uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.mouse = Encoding.UTF8.GetString(x.Buffer); // or marshal memory? var xy = args.mouse.Split(':'); args.mousex = int.Parse(xy[0]); args.mousey = int.Parse(xy[1]); // getchar? args.ad = int.Parse(xy[2]); args.ws = int.Parse(xy[3]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 args.c = int.Parse(xy[4]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown args.mousebutton = int.Parse(xy[5]); args.mousewheel = int.Parse(xy[6]); } }; NetworkInterface.GetAllNetworkInterfaces().WithEach( n => { // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs var IPProperties = n.GetIPProperties(); var PhysicalAddress = n.GetPhysicalAddress(); foreach (var ip in IPProperties.UnicastAddresses) { // ipv4 if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork) { if (!IPAddress.IsLoopback(ip.Address)) { if (n.SupportsMulticast) { f(ip.Address); } } } } } ); #endregion var sw = Stopwatch.StartNew(); //var args = new object(); // can we draw on back? #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" // (out) VrApi.vrapi_GetVersionString() text = () => { // can we listen to udp? // like X:\jsc.svn\examples\java\android\AndroidServiceUDPNotification\AndroidServiceUDPNotification\ApplicationActivity.cs // in vr if the other service is running it can display vr notification // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150630/udp // lets run it, and see if we can see some vr notifications as we skip a video GLES3JNILib.stringFromJNI(args); return(sw.ElapsedMilliseconds + "ms \n" + args.mouse + "\n" + new { args.mousex, args.mousey } +"\n" + new { //args.mousex, // left to right args.x, // nod up +0.7 down -0.7 //ox = args.tracking_HeadPose_Pose_Orientation_x // -0.7 right +0.7 left oy = args.tracking_HeadPose_Pose_Orientation_y // tilt right -0.7 tilt left + 0.7 //oz = args.tracking_HeadPose_Pose_Orientation_z // ?? //ow = args.tracking_HeadPose_Pose_Orientation_w }); } }; //Task.Run( new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 30); // fullspeed Thread.Sleep(1000 / 60); if (args.mousebutton == 0) { mDraw.color = android.graphics.Color.GREEN; mDraw.alpha = 80; } else { mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } mDraw.postInvalidate(); } } ).Start(); #endregion #region ondispatchTouchEvent this.ondispatchTouchEvent = @event => { if (appThread == 0) { return; } int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; // touch sending int to offfset the cubes this.args.x = (int)(halfx - x); this.args.y = (int)(y - halfy); mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } GLES3JNILib.onTouchEvent(action, x, y); // can we move hud around and record it to gif or mp4? }; #endregion #region ondispatchKeyEvent this.ondispatchKeyEvent = @event => { if (appThread == 0) { return(false); } int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return(base.dispatchKeyEvent(@event)); } if (action == KeyEvent.ACTION_UP) { // keycode 4 //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } GLES3JNILib.onKeyEvent(keyCode, action); return(true); }; #endregion AtResume = delegate { Console.WriteLine("enter onResume"); // http://stackoverflow.com/questions/3527621/how-to-pause-and-resume-a-surfaceview-thread // http://stackoverflow.com/questions/10277694/resume-to-surfaceview-shows-black-screen //this.xSurfaceView.onres // You must ensure that the drawing thread only touches the underlying Surface while it is valid this.xSurfaceView = new SurfaceView(this); this.setContentView(xSurfaceView); this.addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); this.xSurfaceView.getHolder().addCallback(xCallback); GLES3JNILib.onResume(); }; // canw e add a camera too? // stackoverflow.com/questions/20936480/how-to-make-surfaceview-transparent-background //this.setContentView(mDraw); //this.addContentView(xSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); // sometimes system wants to try to black the screen it seems.. getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this); Console.WriteLine("after OVRMyCubeWorld onCreate, attach the headset!"); }
//a working name //builds all the objects in a certain surface unit //or if it already exists, increase its wuCount public void CreateSurfaceObjects(SurfaceUnit su) { //creates an empty surface holder SurfaceHolder sh = null; //Debug.Log (su); //only make the objects in this unit if it has not already been generated //and add it to the list so it is not generated again if (!surfList.TryGetValue(su, out sh)) { sh = new SurfaceHolder(); curSH = sh; //connect class wide reference surfList.Add(su, sh); //instance a random number generator with seed based on the su position sent through a hash function //NOTE: the last 1 parameter is used as a kind of planet identifier, but this may not be needed //System.Random rand = new System.Random((int)WorldManager.hash.GetHash(su.u, su.v, (int)su.side, 1)); System.Random rand = RandomHandler.surfaceRandom(su); //NOTE: all objects that are in the rect list are always in the radial list //create a list of radial collisions //(x,y,radius) List <RadialCol> radCols = new List <RadialCol>(); //create a list of rectangular collisions //List<Vector4> rectCols = new List<Vector4>(); //first add all roads, then buildings, then natural things //build all transportation segments and add them to the collision lists //buildTransport(su); //create a list of samples List <Sub> samples = new List <Sub>(); for (int i = 0; i < numSamples; i++) { SurfacePos surfPos = new SurfacePos(su.side, su.u + (float)rand.NextDouble(), su.v + (float)rand.NextDouble()); //convert the surface position to world position Vector3 worldPos = UnitConverter.getWP(surfPos, radius, sideLength); //TODO: make function to only retrieve the substance and not vox val float val; Sub sub; planet.noise.getVoxData(worldPos, out val, out sub); //Debug.Log(sub); samples.Add(sub); } foreach (Blueprint bp in blueprints) { int amount = bp.getAmount(samples, new WorldPos(), rand.NextDouble()); for (int i = 0; i < amount; i++) { //possibly later method to build all object in this su within the blueprint class and return a list //WorldObject wo = bp.buildObject(rand); Mesh mesh = bp.buildObject(rand.Next()); //choose random x and y position within the su float u = (float)rand.NextDouble(); float v = (float)rand.NextDouble(); //choose random rotation(will not be random for things like buildings later) Quaternion surfRot = Quaternion.Euler((float)rand.NextDouble() * 360, (float)rand.NextDouble() * 360, (float)rand.NextDouble() * 360); //the global surfaceposition of the object SurfacePos surfPos = new SurfacePos(su.side, su.u + u, su.v + v); //convert the surface position and rotation to world position and rotation Vector3 worldPos = UnitConverter.getWP(surfPos, radius, sideLength); Quaternion worldRot = getWorldRot(worldPos, surfRot, su.side); //adjust from point on sphere to correct altitude worldPos = planet.noise.altitudePos(worldPos); //build(intantiate) the actual gameobject MobileObjects wo = Build.buildObject <Rock>(worldPos, worldRot); //wo.setReferences(); wo.Render(); wo.setMesh(mesh); sh.objects.Add(wo); //add it to the surface holder list } } /*int count = rand.Next(30); * * * // MyDebug.placeMarker(UnitConverter.getWP(new SurfacePos(su.side, su.u, su.v), radius, sideLength)); * for(int i = 0; i<count; i++) * { * //Vector3 pos = new Vector3( * //choose random x and y position within the su * //Vector2 surfPos = new Vector2((float)rand.NextDouble(), (float)rand.NextDouble()); * float u = (float)rand.NextDouble(); * float v = (float)rand.NextDouble(); * * //choose random rotation(will not be random for things like buildings later) * Quaternion surfRot = Quaternion.Euler(0, (float)rand.NextDouble()*360, 0); * //Debug.Log(surfRot.eulerAngles); * //temp radius of tree used for testing * float wuRadius = 2; * //radius in world units/length of a surface unit = radius in surface units(less than 1) * float suRadius = wuRadius/suLength; * //Debug.Log("suRadius is " + suRadius); * * bool isColliding = false; * foreach(RadialCol oth in radCols) * { * //distance formula(move to struct later) * //if the distance between the two centers - their radii is less than zero, they are colliding * if(Mathf.Sqrt((oth.u-u)*(oth.u-u)+(oth.v-v)*(oth.v-v))-suRadius-oth.radius<0) * { * isColliding = true; * //Debug.Log("samwell"); * break; * } * } * //for the time being, if something is colliding, just discard it * //later it may be moved slightly or completely repositioned * if(isColliding) * { * continue; * } * * //add this obect to the radial collision list * //later, create the RadialCol object initially(replace x y and suRadius) * radCols.Add(new RadialCol(u,v,suRadius)); * * //surfacepos of the tree * SurfacePos treeSurf = new SurfacePos(su.side, su.u + u, su.v + v); * //Debug.Log (u + " " + v + " " + su.u + " " + su.v); * //convert to world unit and rotation * Vector3 worldPos = UnitConverter.getWP(treeSurf, radius, sideLength); * Quaternion worldRot = getWorldRot(worldPos, surfRot, su.side); * //Debug.Log (treeSurf+ " " + worldPos + " " + surfRot.eulerAngles + " " + worldRot); * * //adjust the pos to the correct altitude, later move to function * //worldPos = worldPos.normalized*planet.noise.getAltitude(worldPos); * worldPos = planet.noise.altitudePos(worldPos); * * //GameObject.Instantiate(tree, treeWorld, Quaternion.identity); * //build the tree object(adds it to builtobjects list and maybe eventually add it to the render list * //buildObject<TestTree>(worldPos, worldRot, sh).init(); * * //build(intantiate) the object * WorldObject wo = Build.buildObject<TestTree>(worldPos, worldRot); * wo.Render(); * sh.objects.Add(wo);//add it to the surface holder list * //wo.init();//initailize it (normally has parameters) * * }*/ /*GameObject go = Resources.Load("Test things/rottest") as GameObject; * Vector3 pos = UnitConverter.getWP(new SurfacePos(su.side, su.u+0.5f, su.v+0.5f), radius, sideLength); * Quaternion rot = getWorldRot(pos, Quaternion.identity, su.side); * * GameObject.Instantiate(go, pos, rot);*/ curSH = null; //disconnect reference to avoid possible confusion later/catch errors } //increase the worldunit count of the surface holder sh.wuCount++; }
public void surfaceCreated(SurfaceHolder value) { yield_surfaceCreated(); }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xCallback { onsurfaceCreated = holder => { if (mNativeHandle != 0) { GLES3JNILib.onSurfaceCreated(mNativeHandle, holder.getSurface()); mSurfaceHolder = holder; } }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (mNativeHandle != 0) { GLES3JNILib.onSurfaceChanged(mNativeHandle, holder.getSurface()); mSurfaceHolder = holder; } }, onsurfaceDestroyed = holder => { if (mNativeHandle != 0) { GLES3JNILib.onSurfaceDestroyed(mNativeHandle); mSurfaceHolder = null; } } }; #endregion mView = new SurfaceView(this); this.setContentView(mView); // E/AndroidRuntime(22718): Caused by: java.lang.NullPointerException: Attempt to invoke virtual method 'android.view.ViewParent android.view.View.getParent()' on a null object reference //E/AndroidRuntime(22718): at android.view.ViewGroup.addViewInner(ViewGroup.java:4216) //E/AndroidRuntime(22718): at android.view.ViewGroup.addView(ViewGroup.java:4070) //E/AndroidRuntime(22718): at android.view.ViewGroup.addView(ViewGroup.java:4046) //E/AndroidRuntime(22718): at com.android.internal.policy.impl.PhoneWindow.setContentView(PhoneWindow.java:478) //E/AndroidRuntime(22718): at com.android.internal.policy.impl.PhoneWindow.setContentView(PhoneWindow.java:459) //E/AndroidRuntime(22718): at android.app.Activity.setContentView(Activity.java:2298) var sw = Stopwatch.StartNew(); #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" text = () => sw.ElapsedMilliseconds + "ms !" }; //Task.Run( new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); Thread.Sleep(1000 / 30); mDraw.postInvalidate(); } } ).Start(); #endregion this.ondispatchTouchEvent = @event => { if (mNativeHandle == 0) { return; } int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } GLES3JNILib.onTouchEvent(mNativeHandle, action, x, y); // can we move hud around and record it to gif or mp4? }; this.ondispatchKeyEvent = @event => { if (mNativeHandle == 0) { return(false); } int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return(base.dispatchKeyEvent(@event)); } if (action == KeyEvent.ACTION_UP) { // keycode 4 mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } GLES3JNILib.onKeyEvent(mNativeHandle, keyCode, action); return(true); }; // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs // X:\jsc.svn\examples\java\android\AndroidLacasCameraServerActivity\AndroidLacasCameraServerActivity\ApplicationActivity.cs addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); mView.getHolder().addCallback(xCallback); // Force the screen to stay on, rather than letting it dim and shut off // while the user is watching a movie. // does this disable the face sensor? getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); // Force screen brightness to stay at maximum //WindowManager_LayoutParams _params = getWindow().getAttributes(); //_params.screenBrightness = 1.0f; //getWindow().setAttributes(_params); mNativeHandle = com.oculus.gles3jni.GLES3JNILib.onCreate(this); // can we now overlay something on top of the surface? }
public void surfaceCreated(SurfaceHolder value) { System.Console.WriteLine("surfaceCreated"); yield_surfaceCreated(); }
public void surfaceDestroyed(SurfaceHolder value) { onsurfaceDestroyed(value); }
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) { }
public void surfaceDestroyed(SurfaceHolder value) { }
protected override void onCreate(android.os.Bundle savedInstanceState) { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20160102/x360videos // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151006 base.onCreate(savedInstanceState); nativeOnCreate(this); //setCurrentLanguage(Locale.getDefault().getLanguage()); // Create the SoundPool soundPool = new SoundPool(3 /* voices */, AudioManager.STREAM_MUSIC, 100); soundPoolSoundIds = new java.util.ArrayList<int>(); soundPoolSoundNames = new java.util.ArrayList<string>(); AudioManager audioMgr; audioMgr = (AudioManager)getSystemService(Context.AUDIO_SERVICE); var rate = audioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); var size = audioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); System.Console.WriteLine("rate = " + rate); System.Console.WriteLine("size = " + size); // Check preferences SharedPreferences prefs = getApplicationContext().getSharedPreferences("oculusvr", MODE_PRIVATE); var username = prefs.getString("username", "guest"); System.Console.WriteLine("username = "******"action:" + intent.getAction()); System.Console.WriteLine("type:" + intent.getType()); System.Console.WriteLine("fromPackageName:" + fromPackageNameString); System.Console.WriteLine("command:" + commandString); System.Console.WriteLine("uri:" + uriString); SurfaceView sv = new SurfaceView(this); setContentView(sv); #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xCallback { onsurfaceCreated = holder => { if (appPtr == 0) return; nativeSurfaceCreated(appPtr, holder.getSurface()); mSurfaceHolder = holder; }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appPtr == 0) return; nativeSurfaceChanged(appPtr, holder.getSurface()); mSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { if (appPtr == 0) return; nativeSurfaceDestroyed(appPtr); mSurfaceHolder = holder; } }; #endregion sv.getHolder().addCallback(xCallback); // Force the screen to stay on, rather than letting it dim and shut off // while the user is watching a movie. //getWindow().addFlags( WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON ); //// Force screen brightness to stay at maximum //WindowManager.LayoutParams params = getWindow().getAttributes(); //params.screenBrightness = 1.0f; //getWindow().setAttributes( params ); this.ondispatchTouchEvent += (e) => { int action = e.getAction(); float x = e.getRawX(); float y = e.getRawY(); Log.d(TAG, "onTouch dev:" + e.getDeviceId() + " act:" + action + " ind:" + e.getActionIndex() + " @ " + x + "," + y); nativeTouch(appPtr, action, x, y); return true; }; this.ondispatchKeyEvent = (e) => { bool down; int keyCode = e.getKeyCode(); int deviceId = e.getDeviceId(); if (e.getAction() == KeyEvent.ACTION_DOWN) { down = true; } else if (e.getAction() == KeyEvent.ACTION_UP) { down = false; } else { Log.d(TAG, "source " + e.getSource() + " action " + e.getAction() + " keyCode " + keyCode); return base.dispatchKeyEvent(e); } Log.d(TAG, "source " + e.getSource() + " keyCode " + keyCode + " down " + down + " repeatCount " + e.getRepeatCount()); if (keyCode == KeyEvent.KEYCODE_VOLUME_UP) { if (down) { adjustVolume(1); } return true; } if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) { if (down) { adjustVolume(-1); } return true; } // Joypads will register as keyboards, but keyboards won't // register as position classes // || event.getSource() != 16777232) // Volume buttons are source 257 if (e.getSource() == 1281) { // do we have one we can test with? //keyCode |= JoyEvent.BUTTON_JOYPAD_FLAG; } return buttonEvent(deviceId, keyCode, down, e.getRepeatCount()); }; }
protected override void onCreate(android.os.Bundle savedInstanceState) { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20160102/x360videos // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151006 base.onCreate(savedInstanceState); nativeOnCreate(this); //setCurrentLanguage(Locale.getDefault().getLanguage()); // Create the SoundPool soundPool = new SoundPool(3 /* voices */, AudioManager.STREAM_MUSIC, 100); soundPoolSoundIds = new java.util.ArrayList <int>(); soundPoolSoundNames = new java.util.ArrayList <string>(); AudioManager audioMgr; audioMgr = (AudioManager)getSystemService(Context.AUDIO_SERVICE); var rate = audioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); var size = audioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); System.Console.WriteLine("rate = " + rate); System.Console.WriteLine("size = " + size); // Check preferences SharedPreferences prefs = getApplicationContext().getSharedPreferences("oculusvr", MODE_PRIVATE); var username = prefs.getString("username", "guest"); System.Console.WriteLine("username = "******"action:" + intent.getAction()); System.Console.WriteLine("type:" + intent.getType()); System.Console.WriteLine("fromPackageName:" + fromPackageNameString); System.Console.WriteLine("command:" + commandString); System.Console.WriteLine("uri:" + uriString); SurfaceView sv = new SurfaceView(this); setContentView(sv); #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xCallback { onsurfaceCreated = holder => { if (appPtr == 0) { return; } nativeSurfaceCreated(appPtr, holder.getSurface()); mSurfaceHolder = holder; }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appPtr == 0) { return; } nativeSurfaceChanged(appPtr, holder.getSurface()); mSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { if (appPtr == 0) { return; } nativeSurfaceDestroyed(appPtr); mSurfaceHolder = holder; } }; #endregion sv.getHolder().addCallback(xCallback); // Force the screen to stay on, rather than letting it dim and shut off // while the user is watching a movie. //getWindow().addFlags( WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON ); //// Force screen brightness to stay at maximum //WindowManager.LayoutParams params = getWindow().getAttributes(); //params.screenBrightness = 1.0f; //getWindow().setAttributes( params ); this.ondispatchTouchEvent += (e) => { int action = e.getAction(); float x = e.getRawX(); float y = e.getRawY(); Log.d(TAG, "onTouch dev:" + e.getDeviceId() + " act:" + action + " ind:" + e.getActionIndex() + " @ " + x + "," + y); nativeTouch(appPtr, action, x, y); return(true); }; this.ondispatchKeyEvent = (e) => { bool down; int keyCode = e.getKeyCode(); int deviceId = e.getDeviceId(); if (e.getAction() == KeyEvent.ACTION_DOWN) { down = true; } else if (e.getAction() == KeyEvent.ACTION_UP) { down = false; } else { Log.d(TAG, "source " + e.getSource() + " action " + e.getAction() + " keyCode " + keyCode); return(base.dispatchKeyEvent(e)); } Log.d(TAG, "source " + e.getSource() + " keyCode " + keyCode + " down " + down + " repeatCount " + e.getRepeatCount()); if (keyCode == KeyEvent.KEYCODE_VOLUME_UP) { if (down) { adjustVolume(1); } return(true); } if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) { if (down) { adjustVolume(-1); } return(true); } // Joypads will register as keyboards, but keyboards won't // register as position classes // || event.getSource() != 16777232) // Volume buttons are source 257 if (e.getSource() == 1281) { // do we have one we can test with? //keyCode |= JoyEvent.BUTTON_JOYPAD_FLAG; } return(buttonEvent(deviceId, keyCode, down, e.getRepeatCount())); }; }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); Console.WriteLine("enter OVRVrCubeWorldSurfaceViewX onCreate"); #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xCallback { onsurfaceCreated = holder => { //Console.WriteLine("enter onsurfaceCreated " + new { appThread }); if (appThread == 0) { return; } appThread.onSurfaceCreated(holder.getSurface()); mSurfaceHolder = holder; //Console.WriteLine("exit onsurfaceCreated " + new { appThread }); }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appThread == 0) { return; } appThread.onSurfaceChanged(holder.getSurface()); mSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { if (appThread == 0) { return; } appThread.onSurfaceDestroyed(); mSurfaceHolder = null; } }; #endregion mView = new SurfaceView(this); this.setContentView(mView); var sw = Stopwatch.StartNew(); #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" // (out) VrApi.vrapi_GetVersionString() text = () => sw.ElapsedMilliseconds + "ms " + GLES3JNILib.stringFromJNI() }; //Task.Run( new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); Thread.Sleep(1000 / 30); mDraw.postInvalidate(); } } ).Start(); #endregion #region ondispatchTouchEvent this.ondispatchTouchEvent = @event => { if (appThread == 0) { return; } int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } appThread.onTouchEvent(action, x, y); // can we move hud around and record it to gif or mp4? }; #endregion #region ondispatchKeyEvent this.ondispatchKeyEvent = @event => { if (appThread == 0) { return(false); } int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return(base.dispatchKeyEvent(@event)); } if (action == KeyEvent.ACTION_UP) { // keycode 4 mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } appThread.onKeyEvent(keyCode, action); return(true); }; #endregion addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); mView.getHolder().addCallback(xCallback); getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this); Console.WriteLine("after OVRVrCubeWorldSurfaceViewX onCreate, attach the headset!"); }