// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151212/androidudpclipboard // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20160101/ovrwindwheelndk // connect s6 via usb . // turn on wifi! // kill adb //"x:\util\android-sdk-windows\platform-tools\adb.exe" tcpip 5555 // restarting in TCP mode port: 5555 //13: wlan0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc pfifo_fast state UP qlen 1000 // inet 192.168.1.126/24 brd 192.168.1.255 scope global wlan0 // valid_lft forever preferred_lft forever // on red // "x:\util\android-sdk-windows\platform-tools\adb.exe" connect 192.168.1.126:5555 // connected to 192.168.1.126:5555 // https://sites.google.com/a/jsc-solutions.net/backlog/knowledge-base/2015/201511/20151121 // http://stackoverflow.com/questions/17513502/support-for-multi-window-app-development protected override void onCreate(global::android.os.Bundle savedInstanceState) { // http://www.dreamincode.net/forums/topic/130521-android-part-iii-dynamic-layouts/ base.onCreate(savedInstanceState); var sv = new ScrollView(this); var ll = new LinearLayout(this); ll.setOrientation(LinearLayout.VERTICAL); sv.addView(ll); var b = new Button(this); b.setText("Vibrate!"); var sw = Stopwatch.StartNew(); Action cleanup = delegate { }; Notification reuse = null; var notificationIntent = new Intent(this, typeof(ApplicationActivity).ToClass()); var contentIntent = PendingIntent.getActivity(this, 0, notificationIntent, 0); Action <string> SetClipboard = value => { Console.WriteLine("SetClipboard " + new { value }); this.runOnUiThread( delegate { cleanup(); b.setText(value); if (reuse != null) { reuse.setLatestEventInfo( this, contentTitle: value, contentText: "", contentIntent: contentIntent); return; } var xNotificationManager = (NotificationManager)this.getSystemService(Activity.NOTIFICATION_SERVICE); // see http://developer.android.com/reference/android/app/Notification.html var xNotification = new Notification( //android.R.drawable.ic_dialog_alert, android.R.drawable.ic_menu_view, //tickerText: "not used?", tickerText: value, when: 0 //java.lang.System.currentTimeMillis() ); //notification.defaults |= Notification.DEFAULT_SOUND; // flags = Notification.FLAG_ONGOING_EVENT var FLAG_ONGOING_EVENT = 0x00000002; //notification.flags |= Notification.FLAG_ONGOING_EVENT; //xNotification.flags |= FLAG_ONGOING_EVENT; xNotification.setLatestEventInfo( this, contentTitle: value, contentText: "", contentIntent: contentIntent); //notification.defaults |= Notification.DEFAULT_VIBRATE; //notification.defaults |= Notification.DEFAULT_LIGHTS; // http://androiddrawableexplorer.appspot.com/ var id = (int)sw.ElapsedMilliseconds; xNotificationManager.notify(id, xNotification); var xVibrator = (Vibrator)this.getSystemService(Context.VIBRATOR_SERVICE); xVibrator.vibrate(600); #region setPrimaryClip android.content.ClipboardManager clipboard = (android.content.ClipboardManager)getSystemService(CLIPBOARD_SERVICE); ClipData clip = ClipData.newPlainText("label", value); clipboard.setPrimaryClip(clip); #endregion reuse = xNotification; cleanup += delegate { // https://developer.android.com/reference/android/app/Notification.html if (xNotification == null) { return; } xNotificationManager.cancel(id); }; } ); }; b.AtClick( delegate { SetClipboard("hello"); } ); // Z:\jsc.svn\examples\c\android\Test\TestNDKUDP\TestNDKUDP\xNativeActivity.cs #region lets listen to incoming udp // could we define our chrome app inline in here? // or in a chrome app. could we define the android app inline? #region ReceiveAsync Action <IPAddress> f = async nic => { b.setText("awaiting at " + nic); WifiManager wifi = (WifiManager)this.getSystemService(Context.WIFI_SERVICE); var lo = wifi.createMulticastLock("udp:49814"); lo.acquire(); // Z:\jsc.svn\examples\java\android\AndroidUDPClipboard\ApplicationActivity.cs // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs var uu = new UdpClient(49814); uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { // cannot get data from RED? var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); var data = Encoding.UTF8.GetString(x.Buffer); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown SetClipboard(data); } }; // WithEach defined at? NetworkInterface.GetAllNetworkInterfaces().WithEach( n => { // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs var IPProperties = n.GetIPProperties(); var PhysicalAddress = n.GetPhysicalAddress(); foreach (var ip in IPProperties.UnicastAddresses) { // ipv4 if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork) { if (!IPAddress.IsLoopback(ip.Address)) { if (n.SupportsMulticast) { f(ip.Address); } } } } } ); #endregion #endregion // jsc could pass this ptr to ctor for context.. var t = new EditText(this) { }; t.AttachTo(ll); ll.addView(b); this.setContentView(sv); //this.ShowLongToast("http://my.jsc-solutions.net x"); }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); Console.WriteLine("enter OVRWindWheelActivity onCreate"); // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/ #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xSurfaceHolder_Callback { onsurfaceCreated = holder => { Console.WriteLine("enter onsurfaceCreated " + new { appThread }); if (appThread == 0) { return; } // did we use it for float window? //holder.setFormat(android.graphics.PixelFormat.TRANSLUCENT); GLES3JNILib.onSurfaceCreated(holder.getSurface()); xSurfaceHolder = holder; //Console.WriteLine("exit onsurfaceCreated " + new { appThread }); }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appThread == 0) { return; } GLES3JNILib.onSurfaceChanged(holder.getSurface()); xSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { //I/System.Console( 3549): 0ddd:0001 after OVRWindWheelActivity onCreate, attach the headset! //I/System.Console( 3549): 0ddd:0001 enter onsurfaceDestroyed //Console.WriteLine("enter onsurfaceDestroyed"); if (appThread == 0) { return; } // I/DEBUG ( 2079): #01 pc 0000672f /data/app/OVRWindWheelActivity.Activities-1/lib/arm/libmain.so (Java_com_oculus_gles3jni_GLES3JNILib_onSurfaceDestroyed+46) GLES3JNILib.onSurfaceDestroyed(); xSurfaceHolder = null; //appThread = 0; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/pui_global_menu } }; #endregion // https://github.com/dalinaum/TextureViewDemo // TextureView semi-translucent by calling myView.setAlpha(0.5f). // !! should we use TextureView instead? // https://groups.google.com/forum/#!topic/android-developers/jYjvm7ItpXQ //this.xSurfaceView.setZOrderOnTop(true); // necessary //this.xSurfaceView.getHolder().setFormat(android.graphics.PixelFormat.TRANSPARENT); var ActivityPaused = true; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20160101/ovrwindwheelndk WifiManager wifi = (WifiManager)this.getSystemService(Context.WIFI_SERVICE); var lo = wifi.createMulticastLock("vrudp"); lo.acquire(); #region ReceiveAsync // https://www.youtube.com/watch?v=GpmKq_qg3Tk var HUDStylusList = new List <Action <android.graphics.Canvas> >(); // http://uploadvr.com/vr-hmd-specs/ Action <android.graphics.Canvas> HUDStylus = canvas => { // video? // https://www.youtube.com/watch?v=JaTH_hoLDxc // so cool. we get to use pen in vr!s while (HUDStylusList.Count > 1024) { HUDStylusList.RemoveAt(0); } foreach (var item in HUDStylusList) { item(canvas); } }; #region fUDPPressure Action <IPAddress> fUDPPressure = async nic => { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151003/ovrwindwheelactivity // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1 var uu = new UdpClient(40094); // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs //args.pre = "awaiting Parallax at " + nic + " :40094"; var oldx = 0f; var oldy = 0f; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure // did we break async Continue ?? var ux = await uu.ReceiveAsync(); // did we jump to ui thread? // discard input? if (ActivityPaused) { continue; } // while we have the signal turn on torch/. var m = new BinaryReader(new MemoryStream(ux.Buffer)); var x0 = m.ReadSingle(); var x = 200 + x0 * 0.1f; var y0 = m.ReadSingle(); var y = 1200 - y0 * 0.1f; var pressure = m.ReadSingle(); new { x, y, oldx, oldy, pressure }.With( segment => { var paint = new android.graphics.Paint(); HUDStylusList.Add( canvas => { //c.lineTo((int)(x * 0.1), 400 - (int)(y * 0.1)); //c.lineWidth = 1 + (pressure / 255.0 * 7); // paint.setStrokeWidth((int)(1 + (pressure / 255.0 * 6) * (pressure / 255.0 * 6))); paint.setStyle(android.graphics.Paint.Style.STROKE); if (pressure > 0) { paint.setColor(android.graphics.Color.YELLOW); } else { paint.setColor(android.graphics.Color.RED); } canvas.drawLine(segment.x, segment.y, segment.oldx, segment.oldy, paint); canvas.drawLine(2560 / 2 + segment.x, segment.y, segment.oldx + 2560 / 2, segment.oldy, paint); } ); } ); oldx = x; oldy = y; args.pen = new { x, y, pressure }.ToString(); //Console.WriteLine(new { args.parallax }); //// or marshal memory? //var xy = args.mouse.Split(':'); //args.mousey = int.Parse(xy[1]); //// getchar? //args.ad = int.Parse(xy[2]); //args.ws = int.Parse(xy[3]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 //args.c = int.Parse(xy[4]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown //args.mousebutton = int.Parse(xy[5]); //args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fParallax Action <IPAddress> fParallax = async nic => { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1 var uu = new UdpClient(43834); // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs args.parallax = "awaiting Parallax at " + nic + " :43834"; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure // did we break async Continue ?? var x = await uu.ReceiveAsync(); // did we jump to ui thread? // discard input? if (ActivityPaused) { continue; } // while we have the signal turn on torch/. #region await webcam feed if (nogc == null) { // partial ? var camera = android.hardware.Camera.open(); android.hardware.Camera.Parameters p = camera.getParameters(); p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_TORCH); camera.setParameters(p); camera.startPreview(); nogc = camera; } #endregion //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.parallax = Encoding.UTF8.GetString(x.Buffer); var xy = args.parallax.Split(':'); //Console.WriteLine(new { args.parallax }); //// or marshal memory? //var xy = args.mouse.Split(':'); args.px = float.Parse(xy[1]); args.py = float.Parse(xy[2]); args.pz = float.Parse(xy[3]); //args.mousey = int.Parse(xy[1]); //// getchar? //args.ad = int.Parse(xy[2]); //args.ws = int.Parse(xy[3]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 //args.c = int.Parse(xy[4]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown //args.mousebutton = int.Parse(xy[5]); //args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fWASDC var fWASDCport = 41814; Action <IPAddress> fWASDC = async nic => { var uu = new UdpClient(fWASDCport); args.mouse = "awaiting mouse and WASDC at " + nic + ":" + fWASDCport; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.mouse = Encoding.UTF8.GetString(x.Buffer); // or marshal memory? var xy = args.mouse.Split(':'); args.mousex = int.Parse(xy[0]); args.mousey = int.Parse(xy[1]); // getchar? args.ad = int.Parse(xy[2]); args.ws = int.Parse(xy[3]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 args.c = int.Parse(xy[4]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown args.mousebutton = int.Parse(xy[5]); args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fvertexTransform // X:\jsc.svn\examples\java\android\vr\OVRWindWheelNDK\OVRUDPMatrix\Program.cs Action <IPAddress> fvertexTransform = async nic => { var uu = new UdpClient(40014); //args.mouse = "awaiting vertexTransform at " + nic + " :40014"; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.vertexTransform = x.Buffer; } }; #endregion NetworkInterface.GetAllNetworkInterfaces().WithEach( n => { // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs var IPProperties = n.GetIPProperties(); var PhysicalAddress = n.GetPhysicalAddress(); foreach (var ip in IPProperties.UnicastAddresses) { // ipv4 if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork) { if (!IPAddress.IsLoopback(ip.Address)) { if (n.SupportsMulticast) { fUDPPressure(ip.Address); fWASDC(ip.Address); fParallax(ip.Address); fvertexTransform(ip.Address); } } } } } ); #endregion var sw = Stopwatch.StartNew(); //var args = new object(); // can we draw on back? #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" // (out) VrApi.vrapi_GetVersionString() text = () => { // can we listen to udp? // like X:\jsc.svn\examples\java\android\AndroidServiceUDPNotification\AndroidServiceUDPNotification\ApplicationActivity.cs // in vr if the other service is running it can display vr notification // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150630/udp // lets run it, and see if we can see some vr notifications as we skip a video //if (args.total_allocated_space > 48 * 1024 * 1024) // this.recreate(); return (sw.ElapsedMilliseconds + "ms | " + args.total_allocated_space + " bytes \n" + new { vertexTransform = args.vertexTransform.Length } +"\n" + args.mouse + "\n" + args.parallax + "\n" + args.vertexTransform.Length + "bytes udp\n" + new { args.pen } +"\n" //+ new { args.mousex, args.mousey } + "\n" + new { //args.mousex, // left to right //args.x, //args.px, args.px, args.py, args.pz, // nod up +0.7 down -0.7 ox = args.tracking_HeadPose_Pose_Orientation_x, // -0.7 right +0.7 left oy = args.tracking_HeadPose_Pose_Orientation_y // tilt right -0.7 tilt left + 0.7 //oz = args.tracking_HeadPose_Pose_Orientation_z // ?? //ow = args.tracking_HeadPose_Pose_Orientation_w }.ToString().Replace(",", "\n")); } }; //Task.Run( Func <string> safemode = () => { return (sw.ElapsedMilliseconds + "ms \n" + args.total_allocated_space + " bytes \n" + "GC safe mode / malloc limit.."); }; // canvas.drawText(text, x + 2560 / 2, y + i * 24, paint); mDraw.AtDraw = canvas => { { var paint = new android.graphics.Paint(); paint.setStrokeWidth(16); paint.setStyle(android.graphics.Paint.Style.STROKE); paint.setColor(android.graphics.Color.RED); canvas.drawLine(0, 0, 400, 400, paint); canvas.drawLine(2560 / 2, 0, 400 + 2560 / 2, 400, paint); HUDStylus(canvas); } // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150717/replay // can w visually store tracking intel. like tvs do. { // https://code.google.com/p/android/issues/detail?id=4086 var paint = new android.graphics.Paint(); paint.setStrokeWidth(0); paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE); // lets have left to right recorder as a color block //// nod up +0.7 down -0.7 // cannot see it. var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_x + 0.7) / 1.4); // I/System.Console( 8999): 2327:0001 AtDraw 16 0078af2e // why wont our tracking correctly show? //Console.WriteLine("AtDraw 16 " + rgb_left_to_right.ToString("x8")); //paint.setColor(android.graphics.Color.YELLOW); paint.setColor( (int)(0xff000000 | rgb_left_to_right)); canvas.drawRect(16, 0, 32, 32, paint); } // ox = args.tracking_HeadPose_Pose_Orientation_x, // oy = args.tracking_HeadPose_Pose_Orientation_y { // https://code.google.com/p/android/issues/detail?id=4086 var paint = new android.graphics.Paint(); paint.setStrokeWidth(0); paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE); //paint.setColor(android.graphics.Color.RED); // lets have left to right recorder as a color block // // -0.7 right +0.7 left var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_y + 0.7) / 1.4); //paint.setColor(android.graphics.Color.YELLOW); paint.setColor( (int)(0xff000000 | rgb_left_to_right)); canvas.drawRect(16 + 64, 0, 320, 32, paint); } }; new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 30); // fullspeed GLES3JNILib.stringFromJNI(args); // http://developer.android.com/reference/android/graphics/Color.html if (args.total_allocated_space > GLES3JNILib.safemodeMemoryLimitMB * 1024 * 1024) { mDraw.color = android.graphics.Color.RED; mDraw.alpha = 255; mDraw.text = safemode; // goto secondary activity? } else if (args.mousebutton != 0) { // go a head. lean left or up mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } else { mDraw.color = android.graphics.Color.GREEN; // not leaning in? if (args.pz < 0) { mDraw.color = android.graphics.Color.WHITE; } var BaseStationEdgeX = Math.Abs(args.px) > 0.3; var BaseStationEdgeY = Math.Abs(args.py) > 0.3; if (BaseStationEdgeX || BaseStationEdgeY ) { // base station wont track ya for long.. // reorient? // fade to black? mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } } mDraw.postInvalidate(); Thread.Sleep(1000 / 60); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150716/ovrwindwheelactivity //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 4); } } ).Start(); #endregion #region ondispatchTouchEvent this.ondispatchTouchEvent = @event => { if (appThread == 0) { return; } int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; // touch sending int to offfset the cubes this.args.x = (int)(halfx - x); this.args.y = (int)(y - halfy); mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } GLES3JNILib.onTouchEvent(action, x, y); // can we move hud around and record it to gif or mp4? }; #endregion #region ondispatchKeyEvent this.ondispatchKeyEvent = @event => { if (appThread == 0) { return(false); } int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return(base.dispatchKeyEvent(@event)); } if (action == KeyEvent.ACTION_UP) { // keycode 4 //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } GLES3JNILib.onKeyEvent(keyCode, action); return(true); }; #endregion AtPause = delegate { ActivityPaused = true; GLES3JNILib.onPause(); // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/ if (nogc != null) { var camera = nogc; var p = camera.getParameters(); p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_OFF); camera.setParameters(p); camera.stopPreview(); camera.release(); nogc = null; } }; AtResume = delegate { //Console.WriteLine("enter onResume"); ActivityPaused = false; // http://stackoverflow.com/questions/3527621/how-to-pause-and-resume-a-surfaceview-thread // http://stackoverflow.com/questions/10277694/resume-to-surfaceview-shows-black-screen //this.xSurfaceView.onres // You must ensure that the drawing thread only touches the underlying Surface while it is valid this.xSurfaceView = new SurfaceView(this); this.setContentView(xSurfaceView); this.addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); this.xSurfaceView.getHolder().addCallback(xCallback); GLES3JNILib.onResume(); }; // canw e add a camera too? // stackoverflow.com/questions/20936480/how-to-make-surfaceview-transparent-background //this.setContentView(mDraw); //this.addContentView(xSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); // sometimes system wants to try to black the screen it seems.. getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this); Console.WriteLine("after OVRWindWheelActivity onCreate, attach the headset!"); }
protected override void onCreate(global::android.os.Bundle savedInstanceState) { // http://www.dreamincode.net/forums/topic/130521-android-part-iii-dynamic-layouts/ // https://forums.oculus.com/viewtopic.php?f=67&t=27999 var stringFromJNI = xMarshal.stringFromJNI(this); Console.WriteLine(new { stringFromJNI }); //var refSystemActivities = typeof(global::com.oculus.systemutils.Ev); // loaded by // X:\opensource\ovr_sdk_mobile_1.0.0.0\VrAppSupport\VrSound\Src\SoundPool.cpp // "X:\opensource\ovr_sdk_mobile_1.0.0.0\VrAppSupport\VrSound\Projects\Android\src\com\oculus\sound\SoundPooler.java" //var refSoundPooler = typeof(global::com.oculus.sound.SoundPooler); //var refSoundPooler = typeof(global::com.oculus.sound.xSoundPooler); // static const char * videosDirectory = "Oculus/360Videos/"; var mp4 = from pf in new DirectoryInfo("/sdcard/oculus/360Videos/").GetFiles() where pf.Extension.ToLower() == ".mp4" //where pf.Length == 0 select pf; var mp4count = mp4.Count(); Console.WriteLine(new { mp4count }); foreach (var item in mp4) { Console.WriteLine(item.FullName); } var refVolumeReceiver = new com.oculus.vrgui.xVolumeReceiver { }; var refSoundPooler = new com.oculus.sound.xSoundPooler { }; var refVrLocale = typeof(global::com.oculus.vrlocale.VrLocale); //var refVolumeReceiver = typeof(global::com.oculus.vrgui.VolumeReceiver); //[javac] W:\src\x360video\Activities\ApplicationActivity.java:21: error: SoundPooler is not public in com.oculus.sound; cannot be accessed from outside package //[javac] import com.oculus.sound.SoundPooler; //[javac] ^ var refSystemActivities = typeof(global::com.oculus.systemutils.SystemActivities); Console.WriteLine("enter onCreate " + new { refSystemActivities, refSoundPooler, refVrLocale, refVolumeReceiver }); base.onCreate(savedInstanceState); Intent intent = getIntent(); String commandString = com.oculus.vrappframework.VrActivity.getCommandStringFromIntent(intent); String fromPackageNameString = com.oculus.vrappframework.VrActivity.getPackageStringFromIntent(intent); String uriString = com.oculus.vrappframework.VrActivity.getUriStringFromIntent(intent); Console.WriteLine("onCreate " + new { fromPackageNameString, commandString, uriString }); var p = com.oculus.oculus360videossdk.MainActivity.nativeSetAppInterface(this, fromPackageNameString, commandString, uriString); base_setAppPtr(p); audioManager = (AudioManager)getSystemService(Context.AUDIO_SERVICE); #region lets listen to incoming udp // could we define our chrome app inline in here? // or in a chrome app. could we define the android app inline? #region ReceiveAsync Action <IPAddress> f = async nic => { //b.setText("awaiting at " + nic); WifiManager wifi = (WifiManager)this.getSystemService(Context.WIFI_SERVICE); var lo = wifi.createMulticastLock("udp:39814"); lo.acquire(); // Z:\jsc.svn\examples\java\android\AndroidUDPClipboard\ApplicationActivity.cs // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs var uu = new UdpClient(39814); uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { // cannot get data from RED? var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); //var data = Encoding.UTF8.GetString(x.Buffer); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown //SetClipboard(data); var md5string = x.Buffer.ToHexString(); var lookup = startMovieLookup.ContainsKey(md5string); Console.WriteLine(new { md5string, lookup }); if (lookup) { // this wont work if we are paused. this.startMovieFromUDP( startMovieLookup[md5string] ); } } }; // WithEach defined at? NetworkInterface.GetAllNetworkInterfaces().WithEach( n => { // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs var IPProperties = n.GetIPProperties(); var PhysicalAddress = n.GetPhysicalAddress(); foreach (var ip in IPProperties.UnicastAddresses) { // ipv4 if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork) { if (!IPAddress.IsLoopback(ip.Address)) { if (n.SupportsMulticast) { f(ip.Address); } } } } } ); #endregion #endregion }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); var sv = new ScrollView(this); var b2 = new Button(this); { var ll = new LinearLayout(this); //ll.setOrientation(LinearLayout.VERTICAL); sv.addView(ll); var b1 = new Button(this).AttachTo(ll); b1.WithText("LANBroadcastListener createMulticastLock"); var c = 0; b1.AtClick( v => { // server error { Message = , StackTrace = android.os.NetworkOnMainThreadException //at android.os.StrictMode$AndroidBlockGuardPolicy.onNetwork(StrictMode.java:1117) //at libcore.io.BlockGuardOs.sendto(BlockGuardOs.java:175) //at libcore.io.IoBridge.sendto(IoBridge.java:473) //at java.net.PlainDatagramSocketImpl.send(PlainDatagramSocketImpl.java:182) //at java.net.DatagramSocket.send(DatagramSocket.java:284) new Thread( delegate() { try { var socket = new DatagramSocket(); //construct a datagram socket and binds it to the available port and the localhos c++; var b = Encoding.UTF8.GetBytes(c + " hi from jvm!"); //creates a variable b of type byte var dgram = new DatagramPacket((sbyte[])(object)b, b.Length, InetAddress.getByName("239.1.2.3"), 40404); //sends the packet details, length of the packet,destination address and the port number as parameters to the DatagramPacket //dgram.setData(b); //System.Console.WriteLine( // "Sending " + b.Length + " bytes to " + dgram.getAddress() + ":" + dgram.getPort());//standard error output stream socket.send(dgram); //send the datagram packet from this port } catch (Exception ex) { System.Console.WriteLine("server error " + new { ex.Message, ex.StackTrace }); } } ) { Name = "sender" }.Start(); } ); b2.setText("The other button!"); ll.addView(b2); this.setContentView(sv); } // http://www.zzzxo.com/q/answers-android-device-not-receiving-multicast-package-13221736.html new Thread( delegate() { // http://stackoverflow.com/questions/12610415/multicast-receiver-malfunction // http://answers.unity3d.com/questions/250732/android-build-is-not-receiving-udp-broadcasts.html // Acquire multicast lock wifi = (WifiManager)getSystemService(Context.WIFI_SERVICE); multicastLock = wifi.createMulticastLock("multicastLock"); //multicastLock.setReferenceCounted(true); multicastLock.acquire(); System.Console.WriteLine("LANBroadcastListener ready..."); try { byte[] b = new byte[0x100]; // https://code.google.com/p/android/issues/detail?id=40003 var port = 40404; MulticastSocket socket = new MulticastSocket(port); // must bind receive side socket.setBroadcast(true); socket.setReuseAddress(true); socket.setTimeToLive(30); socket.setReceiveBufferSize(0x100); // https://code.google.com/p/android/issues/detail?id=40003 // http://stackoverflow.com/questions/6550618/multicast-support-on-android-in-hotspot-tethering-mode // http://www.massapi.com/class/java/net/InetSocketAddress.java.html // http://www.javadocexamples.com/java/net/MulticastSocket/joinGroup(SocketAddress%20mcastaddr,NetworkInterface%20netIf).html // http://grokbase.com/t/hadoop/common-issues/117jsjk8d7/jira-created-hadoop-7472-rpc-client-should-deal-with-the-ip-address-changes var group = InetAddress.getByName("239.1.2.3"); var groupSockAddr = new InetSocketAddress(group, port); // what lan interfaces do we have? socket.joinGroup(groupSockAddr, NetworkInterface.getByName("wlan0") ); System.Console.WriteLine("LANBroadcastListener joinGroup..."); while (true) { DatagramPacket dgram = new DatagramPacket((sbyte[])(object)b, b.Length); socket.receive(dgram); // blocks until a datagram is received var bytes = new MemoryStream((byte[])(object)dgram.getData(), 0, dgram.getLength()); var listen = Encoding.UTF8.GetString(bytes.ToArray()); System.Console.WriteLine("Received " + dgram.getLength() + " bytes from " + dgram.getAddress()); //dgram.setLength(b.Length); // must reset length field!s } } catch { System.Console.WriteLine("client error"); } } ) { Name = "client" }.Start(); }