public _OnPreDrawListener_166(SurfaceView _enclosing) { this._enclosing = _enclosing; }
public _SurfaceHolder_682(SurfaceView _enclosing) { this._enclosing = _enclosing; }
public _Handler_113(SurfaceView _enclosing) { this._enclosing = _enclosing; }
public _OnScrollChangedListener_131(SurfaceView _enclosing) { this._enclosing = _enclosing; }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); Console.WriteLine("enter OVRVrCubeWorldSurfaceViewX onCreate"); #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xCallback { onsurfaceCreated = holder => { //Console.WriteLine("enter onsurfaceCreated " + new { appThread }); if (appThread == 0) return; appThread.onSurfaceCreated(holder.getSurface()); mSurfaceHolder = holder; //Console.WriteLine("exit onsurfaceCreated " + new { appThread }); }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appThread == 0) return; appThread.onSurfaceChanged(holder.getSurface()); mSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { if (appThread == 0) return; appThread.onSurfaceDestroyed(); mSurfaceHolder = null; } }; #endregion mView = new SurfaceView(this); this.setContentView(mView); var sw = Stopwatch.StartNew(); #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" // (out) VrApi.vrapi_GetVersionString() text = () => sw.ElapsedMilliseconds + "ms " + GLES3JNILib.stringFromJNI() }; //Task.Run( new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); Thread.Sleep(1000 / 30); mDraw.postInvalidate(); } } ).Start(); #endregion #region ondispatchTouchEvent this.ondispatchTouchEvent = @event => { if (appThread == 0) return; int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } appThread.onTouchEvent(action, x, y); // can we move hud around and record it to gif or mp4? }; #endregion #region ondispatchKeyEvent this.ondispatchKeyEvent = @event => { if (appThread == 0) return false; int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return base.dispatchKeyEvent(@event); } if (action == KeyEvent.ACTION_UP) { // keycode 4 mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } appThread.onKeyEvent(keyCode, action); return true; }; #endregion addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); mView.getHolder().addCallback(xCallback); getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this); Console.WriteLine("after OVRVrCubeWorldSurfaceViewX onCreate, attach the headset!"); }
public static File InternalTakePicture(int num = 0) { var DIRECTORY_DCIM = global::android.os.Environment.DIRECTORY_DCIM; var path = global::android.os.Environment.getExternalStoragePublicDirectory(DIRECTORY_DCIM).getAbsolutePath(); path += "/Camera"; //var SAVE_PATH = android.os.Environment.getExternalStoragePublicDirectory( // android.os.Environment.DIRECTORY_PICTURES //) + "/"; var n = DateTime.Now; var f = new File(path + "/shot" + n.Ticks + ".jpg"); //I/System.Console(31472): enter TakePicture //W/CameraService( 128): CameraService::connect X (pid 31472) rejected (existing client). //I/System.Console(31472): error takePicture { Message = Fail to connect to camera service, StackTrace = java.lang.RuntimeException: Fail to connect to camera service //I/System.Console(31472): at android.hardware.Camera.native_setup(Native Method) //I/System.Console(31472): at android.hardware.Camera.<init>(Camera.java:340) //I/System.Console(31472): at android.hardware.Camera.open(Camera.java:302) var camera = android.hardware.Camera.open(num); // W/CameraService( 128): CameraService::connect X (pid 2499) rejected (existing client). //D/dalvikvm( 2499): GC_CONCURRENT freed 873K, 12% free 7525K/8544K, paused 4ms+4ms, total 59ms //D/dalvikvm( 2499): WAIT_FOR_CONCURRENT_GC blocked 14ms //I/System.Console( 2499): error takePicture { Message = Fail to connect to camera service, StackTrace = java.lang.RuntimeException: Fail to connect to camera service //I/System.Console( 2499): at android.hardware.Camera.native_setup(Native Method) //I/System.Console( 2499): at android.hardware.Camera.<init>(Camera.java:340) //I/System.Console( 2499): at android.hardware.Camera.open(Camera.java:302) //I/System.Console( 2499): at CameraExperiment.foo.InternalTakePicture(foo.java:65) var p = camera.getParameters(); p.setRotation(0); //camera.stopFaceDetection(); var s = p.getSupportedPictureSizes(); var min = default(android.hardware.Camera.Size); for (int i = 0; i < s.size(); i++) { var size = (android.hardware.Camera.Size)s.get(i); // I/System.Console( 6058): before takePicture { f = /mnt/sdcard/Pictures/shot.jpg } //I/System.Console( 6058): { size = android.hardware.Camera$Size@4fde180 } System.Console.WriteLine(new { size.width, size.height }); if (min == null) min = size; else if (min.width > size.width) min = size; } System.Console.WriteLine("before setPictureSize "); p.setPictureSize(min.width, min.height); //E/CameraHardwareSec( 84): android::status_t android::CameraHardwareSec::setSceneModeParameter(const android::CameraParameters&): unmatched focus_mode(continuous-picture) //E/CameraHardwareSec( 84): virtual android::status_t android::CameraHardwareSec::setParameters(const android::CameraParameters&): Failed to setting scene mode var focusModes = p.getSupportedFocusModes(); var NextFocus = android.hardware.Camera.Parameters.FOCUS_MODE_FIXED; for (int i = 0; i < focusModes.size(); i++) { var focusMode = (string)focusModes.get(i); if (focusMode == android.hardware.Camera.Parameters.FOCUS_MODE_INFINITY) NextFocus = android.hardware.Camera.Parameters.FOCUS_MODE_INFINITY; System.Console.WriteLine(new { focusMode }); } // I/System.Console(31232): before setPictureSize //I/System.Console(31232): { focusMode = fixed } //I/System.Console(31232): before setFocusMode //E/NvOmxCameraSettingsParser( 128): Failed substring capabilities check, unsupported parameter: 'infinity', original: fixed //E/NvOmxCameraSettingsParser( 128): extractChanges: Invalid parameter! //E/NvOmxCamera( 128): setParameters: Invalid parameters //I/System.Console(31232): error takePicture { Message = setParameters failed, StackTrace = java.lang.RuntimeException: setParameters failed // { focusMode = auto } // { focusMode = infinity } // { focusMode = macro } // before setFocusMode //9): android::status_t android::CameraHardwareSec::setSceneModeParameter(const android::CameraParameters&): unmatched focus_mode(fixed) //9): virtual android::status_t android::CameraHardwareSec::setParameters(const android::CameraParameters&): Failed to setting scene mode // error takePicture { Message = setParameters failed, StackTrace = java.lang.RuntimeException: setParameters failed // at android.hardware.Camera.native_setParameters(Native Method) // at android.hardware.Camera.setParameters(Camera.java:950) // at CameraExperiment.foo.InternalTakePicture(foo.java:105) // E/SecCamera( 84): ERR(int android::fimc_v4l2_s_ctrl(int, unsigned int, unsigned int)):VIDIOC_S_CTRL(id = 0x800005b (91), value = 0) failed ret = -1 //E/SecCamera( 84): ERR(int android::SecCamera::setFaceDetect(int)):Fail on V4L2_CID_CAMERA_FACE_DETECTION //E/SecCamera( 84): ERR(int android::fimc_v4l2_s_ctrl(int, unsigned int, unsigned int)):VIDIOC_S_CTRL(id = 0x8000063 (99), value = 6) failed ret = -1 //E/SecCamera( 84): ERR(int android::SecCamera::setFocusMode(int)):Fail on V4L2_CID_CAMERA_FOCUS_MODE //E/CameraHardwareSec( 84): android::status_t android::CameraHardwareSec::setSceneModeParameter(const android::CameraParameters&): mSecCamera->setFocusMode(6) fail //E/CameraHardwareSec( 84): virtual android::status_t android::CameraHardwareSec::setParameters(const android::CameraParameters&): Failed to setting scene mode //E/SecCamera( 84): ERR(int android::fimc_v4l2_s_ctrl(int, unsigned int, unsigned int)):VIDIOC_S_CTRL(id = 0x800006c (108), value = 1) failed ret = -1 //E/SecCamera( 84): ERR(int android::SecCamera::setBatchReflection()):Fail on V4L2_CID_CAMERA_BATCH_REFLECTION //E/CameraHardwareSec( 84): ERR(virtual android::status_t android::CameraHardwareSec::setParameters(const android::CameraParameters&)):Fail on mSecCamera->setBatchCmd System.Console.WriteLine("before setFocusMode " + new { NextFocus }); //p.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_INFINITY); p.setFocusMode(NextFocus); // E/SecCamera( 84): ERR(int android::fimc_poll(pollfd*)):No data in 10 secs.. //I/ShotSingle( 84): CAMERA_MSG_COMPRESSED_IMAGE camera.setParameters(p); // http://stackoverflow.com/questions/9744790/android-possible-to-camera-capture-without-a-preview var b = new EventWaitHandle(false, EventResetMode.ManualReset); System.Console.WriteLine("before startPreview "); Action done = delegate { }; try { // #5 java.lang.RuntimeException: Can't create handler inside thread that has not called Looper.prepare() (ScriptCoreLib.Android.ThreadLocalContextReference.CurrentContext as Activity).With( aa => { aa.runOnUiThread( new f { y = delegate { try { // D/Camera ( 2464): app passed NULL surface System.Console.WriteLine("before getHolder "); // the nexus 7 and droid x both don't support the passing of a dummy surfaceview to a camera object. Your response that all camera things must created in the activity is false. I was able to instantiate a camera within a thread by passing it a view just fine. // here, the unused surface view and holder var dummy = new SurfaceView(ScriptCoreLib.Android.ThreadLocalContextReference.CurrentContext); // missing for android 2.2 //dummy.setScaleX(0f); //dummy.setScaleY(0f); var h = dummy.getHolder(); // http://developer.android.com/reference/android/view/SurfaceHolder.html#SURFACE_TYPE_PUSH_BUFFERS var SURFACE_TYPE_PUSH_BUFFERS = 0x00000003; h.setType(SURFACE_TYPE_PUSH_BUFFERS); h.addCallback( new XSurfaceHolder_Callback { yield_surfaceCreated = delegate { System.Console.WriteLine("at yield_surfaceCreated "); try { camera.setPreviewDisplay(h); camera.startPreview(); System.Console.WriteLine("after startPreview "); b.Set(); } catch { throw; } } } ); //h.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); aa.addContentView(dummy, new android.widget.LinearLayout.LayoutParams( android.widget.LinearLayout.LayoutParams.WRAP_CONTENT, android.widget.LinearLayout.LayoutParams.WRAP_CONTENT ) ); done = delegate { aa.runOnUiThread( new f { y = delegate { // https://groups.google.com/forum/?fromgroups#!topic/android-developers/liph4z9LnFA // how to Orphanize?? dummy.setVisibility(View.GONE); } } ); }; } catch { throw; } } } ); } ); } catch { throw; } b.WaitOne(); //camera.@lock(); var a = new EventWaitHandle(false, EventResetMode.ManualReset); //var b = new EventWaitHandle(false, EventResetMode.ManualReset); // E/SecCamera( 84): ERR(int android::fimc_v4l2_s_ctrl(int, unsigned int, unsigned int)):VIDIOC_S_CTRL(id = 0x800005d (93), value = 1) failed ret = -1 //E/SecCamera( 84): ERR(int android::SecCamera::setAutofocus()):Fail on V4L2_CID_CAMERA_SET_AUTO_FOCUS //E/CameraHardwareSec( 84): ERR(int android::CameraHardwareSec::autoFocusThread()):Fail on mSecCamera->setAutofocus() //System.Console.WriteLine("before autoFocus " + new { f }); //// //camera.autoFocus( // new XAutoFocus // { // yield = delegate // { // System.Console.WriteLine("at autoFocus " + new { f }); // https://github.com/mozilla-b2g/android-device-crespo/blob/master/libcamera/SecCamera.cpp // E/SecCamera( 84): ERR(int android::fimc_poll(pollfd*)):No data in 10 secs.. //I/ShotSingle( 84): CAMERA_MSG_COMPRESSED_IMAGE //D/dalvikvm( 6608): GC_FOR_MALLOC freed 607K, 58% free 2856K/6727K, external 2013K/2108K, paused 18ms //I/dalvikvm-heap( 6608): Grow heap (frag case) to 7.847MB for 614416-byte allocation //D/dalvikvm( 6608): GC_FOR_MALLOC freed 46K, 54% free 3410K/7367K, external 2013K/2108K, paused 13ms //I/System.Console( 6608): enter XCameraPictureCallback { Length = 0 } //I/System.Console( 6608): exit XCameraPictureCallback //for (int i = 0; i < 11; i++) //{ // System.Console.WriteLine("warming up camera machine... " + i); // Thread.Sleep(1000); //} // http://stackoverflow.com/questions/15279911/using-camera-without-preview-or-surface-in-android // http://handycodeworks.com/?p=19 // you are required to call startPreview() first before calling takePicture() System.Console.WriteLine("before takePicture " + new { f }); camera.setErrorCallback( new XErrorCallback { yield = (err, c) => { System.Console.WriteLine(new { err }); } } ); // preview ready? var at_setPreviewCallback = new EventWaitHandle(false, EventResetMode.ManualReset); System.Console.WriteLine("before setPreviewCallback "); // is this of any use? camera.setOneShotPreviewCallback( new XCameraPreviewCallback { yield = delegate { at_setPreviewCallback.Set(); } } ); at_setPreviewCallback.WaitOne(); System.Console.WriteLine("after setPreviewCallback "); Thread.Sleep(150); camera.takePicture( null, null, new XCameraPictureCallback { yield = (data, c) => { System.Console.WriteLine("enter XCameraPictureCallback " + new { data.Length }); if (data.Length > 0) { var bmp = BitmapFactory.decodeByteArray(data, 0, data.Length); File directory = new File(path); directory.mkdirs(); ByteArrayOutputStream bytes = new ByteArrayOutputStream(); bmp.compress(Bitmap.CompressFormat.JPEG, 100, bytes); try { f.createNewFile(); FileOutputStream fo = new FileOutputStream(f); fo.write(bytes.toByteArray()); } catch { throw; } } System.Console.WriteLine("exit XCameraPictureCallback"); camera.release(); done(); //[javac] V:\src\CameraExperiment\ApplicationWebService___c__DisplayClass2.java:54: cannot find symbol //[javac] symbol : method Set() //[javac] location: class ScriptCoreLibJava.BCLImplementation.System.Threading.__AutoResetEvent //[javac] this.a.Set(); //[javac] ^ a.Set(); } } ); // I/System.Console( 6264): before takePicture { f = /mnt/sdcard/Pictures/shot.jpg } //I/System.Console( 6264): { width = 2560, height = 1920 } //I/System.Console( 6264): { width = 2560, height = 1536 } //I/System.Console( 6264): { width = 2048, height = 1536 } //I/System.Console( 6264): { width = 2048, height = 1232 } //I/System.Console( 6264): { width = 1600, height = 1200 } //I/System.Console( 6264): { width = 1600, height = 960 } //I/System.Console( 6264): { width = 800, height = 480 } //I/System.Console( 6264): { width = 640, height = 480 } //I/ShotSingle( 84): ShotSingle::takePicture start //I/ShotSingle( 84): ShotSingle::takePicture end //I/System.Console( 6264): after takePicture // } // } //); System.Console.WriteLine("will wait for takePicture to complete ... " + new { f }); a.WaitOne(); return f; }
protected override void onCreate(android.os.Bundle savedInstanceState) { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20160102/x360videos // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151006 base.onCreate(savedInstanceState); nativeOnCreate(this); //setCurrentLanguage(Locale.getDefault().getLanguage()); // Create the SoundPool soundPool = new SoundPool(3 /* voices */, AudioManager.STREAM_MUSIC, 100); soundPoolSoundIds = new java.util.ArrayList<int>(); soundPoolSoundNames = new java.util.ArrayList<string>(); AudioManager audioMgr; audioMgr = (AudioManager)getSystemService(Context.AUDIO_SERVICE); var rate = audioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); var size = audioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); System.Console.WriteLine("rate = " + rate); System.Console.WriteLine("size = " + size); // Check preferences SharedPreferences prefs = getApplicationContext().getSharedPreferences("oculusvr", MODE_PRIVATE); var username = prefs.getString("username", "guest"); System.Console.WriteLine("username = "******"action:" + intent.getAction()); System.Console.WriteLine("type:" + intent.getType()); System.Console.WriteLine("fromPackageName:" + fromPackageNameString); System.Console.WriteLine("command:" + commandString); System.Console.WriteLine("uri:" + uriString); SurfaceView sv = new SurfaceView(this); setContentView(sv); #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xCallback { onsurfaceCreated = holder => { if (appPtr == 0) return; nativeSurfaceCreated(appPtr, holder.getSurface()); mSurfaceHolder = holder; }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appPtr == 0) return; nativeSurfaceChanged(appPtr, holder.getSurface()); mSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { if (appPtr == 0) return; nativeSurfaceDestroyed(appPtr); mSurfaceHolder = holder; } }; #endregion sv.getHolder().addCallback(xCallback); // Force the screen to stay on, rather than letting it dim and shut off // while the user is watching a movie. //getWindow().addFlags( WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON ); //// Force screen brightness to stay at maximum //WindowManager.LayoutParams params = getWindow().getAttributes(); //params.screenBrightness = 1.0f; //getWindow().setAttributes( params ); this.ondispatchTouchEvent += (e) => { int action = e.getAction(); float x = e.getRawX(); float y = e.getRawY(); Log.d(TAG, "onTouch dev:" + e.getDeviceId() + " act:" + action + " ind:" + e.getActionIndex() + " @ " + x + "," + y); nativeTouch(appPtr, action, x, y); return true; }; this.ondispatchKeyEvent = (e) => { bool down; int keyCode = e.getKeyCode(); int deviceId = e.getDeviceId(); if (e.getAction() == KeyEvent.ACTION_DOWN) { down = true; } else if (e.getAction() == KeyEvent.ACTION_UP) { down = false; } else { Log.d(TAG, "source " + e.getSource() + " action " + e.getAction() + " keyCode " + keyCode); return base.dispatchKeyEvent(e); } Log.d(TAG, "source " + e.getSource() + " keyCode " + keyCode + " down " + down + " repeatCount " + e.getRepeatCount()); if (keyCode == KeyEvent.KEYCODE_VOLUME_UP) { if (down) { adjustVolume(1); } return true; } if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) { if (down) { adjustVolume(-1); } return true; } // Joypads will register as keyboards, but keyboards won't // register as position classes // || event.getSource() != 16777232) // Volume buttons are source 257 if (e.getSource() == 1281) { // do we have one we can test with? //keyCode |= JoyEvent.BUTTON_JOYPAD_FLAG; } return buttonEvent(deviceId, keyCode, down, e.getRepeatCount()); }; }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); Console.WriteLine("enter OVRWindWheelActivity onCreate"); // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/ #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xSurfaceHolder_Callback { onsurfaceCreated = holder => { Console.WriteLine("enter onsurfaceCreated " + new { appThread }); if (appThread == 0) return; // did we use it for float window? //holder.setFormat(android.graphics.PixelFormat.TRANSLUCENT); GLES3JNILib.onSurfaceCreated(holder.getSurface()); xSurfaceHolder = holder; //Console.WriteLine("exit onsurfaceCreated " + new { appThread }); }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appThread == 0) return; GLES3JNILib.onSurfaceChanged(holder.getSurface()); xSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { //I/System.Console( 3549): 0ddd:0001 after OVRWindWheelActivity onCreate, attach the headset! //I/System.Console( 3549): 0ddd:0001 enter onsurfaceDestroyed //Console.WriteLine("enter onsurfaceDestroyed"); if (appThread == 0) return; // I/DEBUG ( 2079): #01 pc 0000672f /data/app/OVRWindWheelActivity.Activities-1/lib/arm/libmain.so (Java_com_oculus_gles3jni_GLES3JNILib_onSurfaceDestroyed+46) GLES3JNILib.onSurfaceDestroyed(); xSurfaceHolder = null; //appThread = 0; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/pui_global_menu } }; #endregion // https://github.com/dalinaum/TextureViewDemo // TextureView semi-translucent by calling myView.setAlpha(0.5f). // !! should we use TextureView instead? // https://groups.google.com/forum/#!topic/android-developers/jYjvm7ItpXQ //this.xSurfaceView.setZOrderOnTop(true); // necessary //this.xSurfaceView.getHolder().setFormat(android.graphics.PixelFormat.TRANSPARENT); var ActivityPaused = true; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20160101/ovrwindwheelndk WifiManager wifi = (WifiManager)this.getSystemService(Context.WIFI_SERVICE); var lo = wifi.createMulticastLock("vrudp"); lo.acquire(); #region ReceiveAsync // https://www.youtube.com/watch?v=GpmKq_qg3Tk var HUDStylusList = new List<Action<android.graphics.Canvas>>(); // http://uploadvr.com/vr-hmd-specs/ Action<android.graphics.Canvas> HUDStylus = canvas => { // video? // https://www.youtube.com/watch?v=JaTH_hoLDxc // so cool. we get to use pen in vr!s while (HUDStylusList.Count > 1024) HUDStylusList.RemoveAt(0); foreach (var item in HUDStylusList) { item(canvas); } }; #region fUDPPressure Action<IPAddress> fUDPPressure = async nic => { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151003/ovrwindwheelactivity // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1 var uu = new UdpClient(40094); // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs //args.pre = "awaiting Parallax at " + nic + " :40094"; var oldx = 0f; var oldy = 0f; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure // did we break async Continue ?? var ux = await uu.ReceiveAsync(); // did we jump to ui thread? // discard input? if (ActivityPaused) continue; // while we have the signal turn on torch/. var m = new BinaryReader(new MemoryStream(ux.Buffer)); var x0 = m.ReadSingle(); var x = 200 + x0 * 0.1f; var y0 = m.ReadSingle(); var y = 1200 - y0 * 0.1f; var pressure = m.ReadSingle(); new { x, y, oldx, oldy, pressure }.With( segment => { var paint = new android.graphics.Paint(); HUDStylusList.Add( canvas => { //c.lineTo((int)(x * 0.1), 400 - (int)(y * 0.1)); //c.lineWidth = 1 + (pressure / 255.0 * 7); // paint.setStrokeWidth((int)(1 + (pressure / 255.0 * 6) * (pressure / 255.0 * 6))); paint.setStyle(android.graphics.Paint.Style.STROKE); if (pressure > 0) paint.setColor(android.graphics.Color.YELLOW); else paint.setColor(android.graphics.Color.RED); canvas.drawLine(segment.x, segment.y, segment.oldx, segment.oldy, paint); canvas.drawLine(2560 / 2 + segment.x, segment.y, segment.oldx + 2560 / 2, segment.oldy, paint); } ); } ); oldx = x; oldy = y; args.pen = new { x, y, pressure }.ToString(); //Console.WriteLine(new { args.parallax }); //// or marshal memory? //var xy = args.mouse.Split(':'); //args.mousey = int.Parse(xy[1]); //// getchar? //args.ad = int.Parse(xy[2]); //args.ws = int.Parse(xy[3]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 //args.c = int.Parse(xy[4]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown //args.mousebutton = int.Parse(xy[5]); //args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fParallax Action<IPAddress> fParallax = async nic => { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1 var uu = new UdpClient(43834); // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs args.parallax = "awaiting Parallax at " + nic + " :43834"; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure // did we break async Continue ?? var x = await uu.ReceiveAsync(); // did we jump to ui thread? // discard input? if (ActivityPaused) continue; // while we have the signal turn on torch/. #region await webcam feed if (nogc == null) { // partial ? var camera = android.hardware.Camera.open(); android.hardware.Camera.Parameters p = camera.getParameters(); p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_TORCH); camera.setParameters(p); camera.startPreview(); nogc = camera; } #endregion //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.parallax = Encoding.UTF8.GetString(x.Buffer); var xy = args.parallax.Split(':'); //Console.WriteLine(new { args.parallax }); //// or marshal memory? //var xy = args.mouse.Split(':'); args.px = float.Parse(xy[1]); args.py = float.Parse(xy[2]); args.pz = float.Parse(xy[3]); //args.mousey = int.Parse(xy[1]); //// getchar? //args.ad = int.Parse(xy[2]); //args.ws = int.Parse(xy[3]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 //args.c = int.Parse(xy[4]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown //args.mousebutton = int.Parse(xy[5]); //args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fWASDC var fWASDCport = 41814; Action<IPAddress> fWASDC = async nic => { var uu = new UdpClient(fWASDCport); args.mouse = "awaiting mouse and WASDC at " + nic + ":" + fWASDCport; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.mouse = Encoding.UTF8.GetString(x.Buffer); // or marshal memory? var xy = args.mouse.Split(':'); args.mousex = int.Parse(xy[0]); args.mousey = int.Parse(xy[1]); // getchar? args.ad = int.Parse(xy[2]); args.ws = int.Parse(xy[3]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 args.c = int.Parse(xy[4]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown args.mousebutton = int.Parse(xy[5]); args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fvertexTransform // X:\jsc.svn\examples\java\android\vr\OVRWindWheelNDK\OVRUDPMatrix\Program.cs Action<IPAddress> fvertexTransform = async nic => { var uu = new UdpClient(40014); //args.mouse = "awaiting vertexTransform at " + nic + " :40014"; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.vertexTransform = x.Buffer; } }; #endregion NetworkInterface.GetAllNetworkInterfaces().WithEach( n => { // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs var IPProperties = n.GetIPProperties(); var PhysicalAddress = n.GetPhysicalAddress(); foreach (var ip in IPProperties.UnicastAddresses) { // ipv4 if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork) { if (!IPAddress.IsLoopback(ip.Address)) if (n.SupportsMulticast) { fUDPPressure(ip.Address); fWASDC(ip.Address); fParallax(ip.Address); fvertexTransform(ip.Address); } } } } ); #endregion var sw = Stopwatch.StartNew(); //var args = new object(); // can we draw on back? #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" // (out) VrApi.vrapi_GetVersionString() text = () => { // can we listen to udp? // like X:\jsc.svn\examples\java\android\AndroidServiceUDPNotification\AndroidServiceUDPNotification\ApplicationActivity.cs // in vr if the other service is running it can display vr notification // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150630/udp // lets run it, and see if we can see some vr notifications as we skip a video //if (args.total_allocated_space > 48 * 1024 * 1024) // this.recreate(); return sw.ElapsedMilliseconds + "ms | " + args.total_allocated_space + " bytes \n" + new { vertexTransform = args.vertexTransform.Length } + "\n" + args.mouse + "\n" + args.parallax + "\n" + args.vertexTransform.Length + "bytes udp\n" + new { args.pen } + "\n" //+ new { args.mousex, args.mousey } + "\n" + new { //args.mousex, // left to right //args.x, //args.px, args.px, args.py, args.pz, // nod up +0.7 down -0.7 ox = args.tracking_HeadPose_Pose_Orientation_x, // -0.7 right +0.7 left oy = args.tracking_HeadPose_Pose_Orientation_y // tilt right -0.7 tilt left + 0.7 //oz = args.tracking_HeadPose_Pose_Orientation_z // ?? //ow = args.tracking_HeadPose_Pose_Orientation_w }.ToString().Replace(",", "\n"); } }; //Task.Run( Func<string> safemode = () => { return sw.ElapsedMilliseconds + "ms \n" + args.total_allocated_space + " bytes \n" + "GC safe mode / malloc limit.."; }; // canvas.drawText(text, x + 2560 / 2, y + i * 24, paint); mDraw.AtDraw = canvas => { { var paint = new android.graphics.Paint(); paint.setStrokeWidth(16); paint.setStyle(android.graphics.Paint.Style.STROKE); paint.setColor(android.graphics.Color.RED); canvas.drawLine(0, 0, 400, 400, paint); canvas.drawLine(2560 / 2, 0, 400 + 2560 / 2, 400, paint); HUDStylus(canvas); } // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150717/replay // can w visually store tracking intel. like tvs do. { // https://code.google.com/p/android/issues/detail?id=4086 var paint = new android.graphics.Paint(); paint.setStrokeWidth(0); paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE); // lets have left to right recorder as a color block //// nod up +0.7 down -0.7 // cannot see it. var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_x + 0.7) / 1.4); // I/System.Console( 8999): 2327:0001 AtDraw 16 0078af2e // why wont our tracking correctly show? //Console.WriteLine("AtDraw 16 " + rgb_left_to_right.ToString("x8")); //paint.setColor(android.graphics.Color.YELLOW); paint.setColor( (int)(0xff000000 | rgb_left_to_right)); canvas.drawRect(16, 0, 32, 32, paint); } // ox = args.tracking_HeadPose_Pose_Orientation_x, // oy = args.tracking_HeadPose_Pose_Orientation_y { // https://code.google.com/p/android/issues/detail?id=4086 var paint = new android.graphics.Paint(); paint.setStrokeWidth(0); paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE); //paint.setColor(android.graphics.Color.RED); // lets have left to right recorder as a color block // // -0.7 right +0.7 left var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_y + 0.7) / 1.4); //paint.setColor(android.graphics.Color.YELLOW); paint.setColor( (int)(0xff000000 | rgb_left_to_right)); canvas.drawRect(16 + 64, 0, 320, 32, paint); } }; new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 30); // fullspeed GLES3JNILib.stringFromJNI(args); // http://developer.android.com/reference/android/graphics/Color.html if (args.total_allocated_space > GLES3JNILib.safemodeMemoryLimitMB * 1024 * 1024) { mDraw.color = android.graphics.Color.RED; mDraw.alpha = 255; mDraw.text = safemode; // goto secondary activity? } else if (args.mousebutton != 0) { // go a head. lean left or up mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } else { mDraw.color = android.graphics.Color.GREEN; // not leaning in? if (args.pz < 0) { mDraw.color = android.graphics.Color.WHITE; } var BaseStationEdgeX = Math.Abs(args.px) > 0.3; var BaseStationEdgeY = Math.Abs(args.py) > 0.3; if (BaseStationEdgeX || BaseStationEdgeY ) { // base station wont track ya for long.. // reorient? // fade to black? mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } } mDraw.postInvalidate(); Thread.Sleep(1000 / 60); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150716/ovrwindwheelactivity //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 4); } } ).Start(); #endregion #region ondispatchTouchEvent this.ondispatchTouchEvent = @event => { if (appThread == 0) return; int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; // touch sending int to offfset the cubes this.args.x = (int)(halfx - x); this.args.y = (int)(y - halfy); mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } GLES3JNILib.onTouchEvent(action, x, y); // can we move hud around and record it to gif or mp4? }; #endregion #region ondispatchKeyEvent this.ondispatchKeyEvent = @event => { if (appThread == 0) return false; int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return base.dispatchKeyEvent(@event); } if (action == KeyEvent.ACTION_UP) { // keycode 4 //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } GLES3JNILib.onKeyEvent(keyCode, action); return true; }; #endregion AtPause = delegate { ActivityPaused = true; GLES3JNILib.onPause(); // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/ if (nogc != null) { var camera = nogc; var p = camera.getParameters(); p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_OFF); camera.setParameters(p); camera.stopPreview(); camera.release(); nogc = null; } }; AtResume = delegate { //Console.WriteLine("enter onResume"); ActivityPaused = false; // http://stackoverflow.com/questions/3527621/how-to-pause-and-resume-a-surfaceview-thread // http://stackoverflow.com/questions/10277694/resume-to-surfaceview-shows-black-screen //this.xSurfaceView.onres // You must ensure that the drawing thread only touches the underlying Surface while it is valid this.xSurfaceView = new SurfaceView(this); this.setContentView(xSurfaceView); this.addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); this.xSurfaceView.getHolder().addCallback(xCallback); GLES3JNILib.onResume(); }; // canw e add a camera too? // stackoverflow.com/questions/20936480/how-to-make-surfaceview-transparent-background //this.setContentView(mDraw); //this.addContentView(xSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); // sometimes system wants to try to black the screen it seems.. getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this); Console.WriteLine("after OVRWindWheelActivity onCreate, attach the headset!"); }
// can we send a zip file? public static void Invoke(int index, Action<string> y, int frames = 4) { #region log var st = new Stopwatch(); st.Start(); Action<string> log = x => { var z = (st.Elapsed + " " + x); Console.WriteLine(z); y(z); }; #endregion log("getting ready..."); var DIRECTORY_DCIM = global::android.os.Environment.DIRECTORY_DCIM; var path = global::android.os.Environment.getExternalStoragePublicDirectory(DIRECTORY_DCIM).getAbsolutePath(); path += "/Camera"; //var n = DateTime.Now; //var f = new java.io.File(path + "/shot" + n.Ticks + ".jpg"); var camera = default(android.hardware.Camera); var surface = default(SurfaceView); try { // PreviewCallbackWithBuffer { cc = 0, Length = 1048576 } // W/CameraService( 84): CameraService::connect X (pid 2117) rejected (existing client). log("android.hardware.Camera.open... " + new { index }); camera = android.hardware.Camera.open(index); log("android.hardware.Camera.open... done "); var PreviewFormat = ImageFormat.UNKNOWN; #region setParameters var p = camera.getParameters(); // The size of the buffer must match the values described above. // Gets the supported preview formats. NV21 is always supported. // http://developer.android.com/reference/android/hardware/Camera.Parameters.html#getSupportedPreviewFormats() #region SupportedPictureFormat foreach (int SupportedPictureFormat in p.getSupportedPictureFormats().AsEnumerable()) { Console.WriteLine(new { SupportedPictureFormat }); } //p.getSupportedPictureFormats().With( // pformats => // { // for (int i = 0; i < pformats.size(); i++) // { // var SupportedPictureFormat = (int)pformats.get(i); // Console.WriteLine(new { SupportedPictureFormat }); // } // } // ); #endregion #region SupportedPreviewFormat foreach (int SupportedPreviewFormat in p.getSupportedPreviewFormats().AsEnumerable()) { if (SupportedPreviewFormat == ImageFormat.NV21) PreviewFormat = SupportedPreviewFormat; else if (PreviewFormat == ImageFormat.UNKNOWN) PreviewFormat = SupportedPreviewFormat; log("" + new { SupportedPreviewFormat }); Console.WriteLine(new { SupportedPreviewFormat }); } //p.getSupportedPreviewFormats().With( // pformats => // { // for (int i = 0; i < pformats.size(); i++) // { // var SupportedPreviewFormat = (int)pformats.get(i); // if (SupportedPreviewFormat == ImageFormat.NV21) // PreviewFormat = SupportedPreviewFormat; // else if (PreviewFormat == ImageFormat.UNKNOWN) // PreviewFormat = SupportedPreviewFormat; // log("" + new { SupportedPreviewFormat }); // } // } //); #endregion //p.setPictureFormat(ImageFormat.YV12); p.setPreviewFormat(PreviewFormat); //D/DOMX ( 127): ERROR: failed check:(eError == OMX_ErrorNone) || (eError == OMX_ErrorNoMore) - returning error: 0x80001005 - Error returned from OMX API in ducati //E/CameraHAL( 127): Error while configuring rotation 0x80001005 // http://questiontrack.com/galaxy-nexus-specificaly-camera-startpreview-failed-993603.html // http://stackoverflow.com/questions/16839869/orientation-error-causing-crash ////p.setRotation(0); #region getSupportedPreviewSizes //var s = p.getSupportedPreviewSizes(); var min = default(android.hardware.Camera.Size); //for (int i = 0; i < s.size(); i++) foreach (android.hardware.Camera.Size size in p.getSupportedPreviewSizes().AsEnumerable()) { //var size = (android.hardware.Camera.Size)s.get(i); // I/System.Console( 6058): before takePicture { f = /mnt/sdcard/Pictures/shot.jpg } //I/System.Console( 6058): { size = android.hardware.Camera$Size@4fde180 } var SupportedPreviewSize = new { size.width, size.height }; log("" + new { SupportedPreviewSize }); if (min == null) min = size; else if (min.width > size.width) min = size; } #endregion p.setPreviewSize(min.width, min.height); //For formats besides YV12, the size of the buffer is determined by multiplying the // preview image width, height, and bytes per pixel. The width and height can be read // from getPreviewSize(). Bytes per pixel can be computed from getBitsPerPixel(int) / 8, // using the image format from getPreviewFormat(). //p.setPictureSize(min.width, min.height); // I/System.Console( 2860): { width = 640, height = 480, bytesperpixel = 0, buffersize = 0 } // the number of bits per pixel of the given format or -1 if the format doesn't exist or is not supported. //var bytesperpixel = (ImageFormat.getBitsPerPixel(ImageFormat.NV21) / 8); // http://stackoverflow.com/questions/13703596/mediacodec-and-camera-colorspaces-dont-match // https://code.google.com/p/android/issues/detail?id=37655 var bitsperpixel = (ImageFormat.getBitsPerPixel(PreviewFormat)); var buffersize = min.width * min.height / 8 * bitsperpixel; // 12 // http://www.fourcc.org/yuv.php //var buffersize = 460800; log("" + new { min.width, min.height, bitsperpixel, buffersize }); #region setFocusMode var focusModes = p.getSupportedFocusModes(); var NextFocus = android.hardware.Camera.Parameters.FOCUS_MODE_FIXED; for (int i = 0; i < focusModes.size(); i++) { var focusMode = (string)focusModes.get(i); if (focusMode == android.hardware.Camera.Parameters.FOCUS_MODE_INFINITY) NextFocus = android.hardware.Camera.Parameters.FOCUS_MODE_INFINITY; System.Console.WriteLine(new { focusMode }); } p.setFocusMode(NextFocus); #endregion camera.setParameters(p); #endregion // preview layout size: 736/1216 buffersize = 1843200; // E/Camera-JNI( 3148): Manually set buffer was too small! Expected 460800 bytes, but got 307200! camera.addCallbackBuffer(new sbyte[buffersize]); camera.addCallbackBuffer(new sbyte[buffersize]); camera.addCallbackBuffer(new sbyte[buffersize]); camera.addCallbackBuffer(new sbyte[buffersize]); camera.addCallbackBuffer(new sbyte[buffersize]); var a = new EventWaitHandle(false, EventResetMode.ManualReset); // Task.ContinueWith // await (ScriptCoreLib.Android.ThreadLocalContextReference.CurrentContext as Activity).StartNew( aa => { log("at runOnUiThread..."); // solve issue with callback not being called: release the camera and try again. It usually works. //To solve issue with rotation 0x80001005: restart app / service // http://stackoverflow.com/questions/13546788/camera-takepicture-not-working-on-my-jb-gb-froyo-phones aa.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); #region setErrorCallback camera.setErrorCallback( new XErrorCallback { yield = (err, c) => { log("" + new { err }); } } ); #endregion surface = new SurfaceView(ScriptCoreLib.Android.ThreadLocalContextReference.CurrentContext); var holder = surface.getHolder(); // http://developer.android.com/reference/android/view/SurfaceHolder.html#SURFACE_TYPE_PUSH_BUFFERS var SURFACE_TYPE_PUSH_BUFFERS = 0x00000003; holder.setType(SURFACE_TYPE_PUSH_BUFFERS); log("setPreviewCallbackWithBuffer"); var cc = 0; //E/CameraHardwareSec( 84): int android::CameraHardwareSec::previewThread(): mSkipFrame(3) > 0 //E/CameraHardwareSec( 84): int android::CameraHardwareSec::previewThread(): mSkipFrame(2) > 0 //E/CameraHardwareSec( 84): int android::CameraHardwareSec::previewThread(): mSkipFrame(1) > 0 // http://stackoverflow.com/questions/16878042/camera-not-working-in-google-nexus-tablet #region camera.PreviewCallbackWithBuffer camera.PreviewCallbackWithBuffer( (rawdata, c) => { if (surface == null) { // W/CameraHardwareSec( 84): virtual android::status_t android::CameraHardwareSec::cancelPicture() : not supported, just returning NO_ERROR return; } // 10080.0ms PreviewCallbackWithBuffer enter { cc = 58, Length = 460800 } //10119.0ms PreviewCallbackWithBuffer compressToJpeg done { cc = 58, Elapsed = 39.0ms } //10174.0ms PreviewCallbackWithBuffer ToBase64String done { cc = 58, Elapsed = 94.0ms } var xcc = cc; log("PreviewCallbackWithBuffer enter " + new { xcc, rawdata.Length }); // failed to flush { Length = 14619 } //new Thread( // delegate() { if (surface == null) { // W/CameraHardwareSec( 84): virtual android::status_t android::CameraHardwareSec::cancelPicture() : not supported, just returning NO_ERROR return; } var cst = new Stopwatch(); cst.Start(); // http://stackoverflow.com/questions/3426614/android-converting-from-nv21-preview-format-on-nexus-one-to-jpeg // http://developer.android.com/reference/android/graphics/YuvImage.html //Caused by: java.lang.IllegalArgumentException: only support ImageFormat.NV21 and ImageFormat.YUY2 for now //at android.graphics.YuvImage.<init>(YuvImage.java:82) // https://code.google.com/p/android/issues/detail?id=823 // https://code.google.com/p/android-source-browsing/source/browse/graphics/java/android/graphics/YuvImage.java?repo=platform--frameworks--base&name=android-cts-4.1_r1 var yuv = new YuvImage( rawdata, PreviewFormat, min.width, min.height, null ); var m = new java.io.ByteArrayOutputStream(); yuv.compressToJpeg( new Rect(0, 0, min.width, min.height), 20, m); var data = (byte[])(object)m.toByteArray(); log("PreviewCallbackWithBuffer compressToJpeg done " + new { xcc, cst.Elapsed }); Console.WriteLine("compressToJpeg " + new { data.Length } ); var src = "data:image/jpg;base64," + Convert.ToBase64String( data ); log("PreviewCallbackWithBuffer ToBase64String done " + new { xcc, cst.Elapsed }); y(src); //PreviewCallbackWithBuffer { cc = 0, Length = 1048576 } if (surface == null) { // W/CameraHardwareSec( 84): virtual android::status_t android::CameraHardwareSec::cancelPicture() : not supported, just returning NO_ERROR return; } //camera.addCallbackBuffer(); camera.addCallbackBuffer(new sbyte[buffersize]); log("PreviewCallbackWithBuffer exit " + new { xcc, cst.Elapsed }); if (xcc == frames) { //dummy.setVisibility(View.GONE); //dummy = null; // Caused by: android.view.ViewRoot$CalledFromWrongThreadException: Only the original thread that created a view hierarchy can touch its views. aa.StartNew( delegate { if (surface != null) { surface.setVisibility(View.GONE); surface = null; } a.Set(); } ); } } //).Start(); cc++; } ); #endregion #region holder.surfaceChanged holder.surfaceChanged( delegate { log("surfaceChanged?"); } ); #endregion #region holder.surfaceCreated holder.surfaceCreated( delegate { log("surfaceCreated!"); // http://stackoverflow.com/questions/12098298/android-camera-app-passed-null-surface // http://stackoverflow.com/questions/16945524/app-passed-null-surface-while-taking-a-picture-without-a-surfaceview // app passed NULL surface log("before setPreviewDisplay, delay"); Thread.Sleep(400); // https://code.google.com/p/zxing/source/browse/trunk/android/src/com/google/zxing/client/android/camera/CameraManager.java // http://stackoverflow.com/questions/16945524/app-passed-null-surface-while-taking-a-picture-without-a-surfaceview // http://stackoverflow.com/questions/4852740/surfaceview-getholder-not-returning-surfaceholder log("before setPreviewDisplay " + new { holder }); // inside surface changed? //if (surface == 0) //{ // LOGE("app passed NULL surface"); // return NO_INIT; //} // https://android.googlesource.com/platform/frameworks/native/+/a6938bab1f6fa76ae98ebbe44f4e534e05fa0993/libs/ui/Camera.cpp camera.setTryPreviewDisplay(holder); log("after setPreviewDisplay"); log("startPreview, delay"); Thread.Sleep(200); log("startPreview"); camera.startPreview(); log("after startPreview"); } ); #endregion #region addContentView //surface.setBackgroundColor(Color.argb(0x0F, 255, 0, 0)); var pp = new android.widget.LinearLayout.LayoutParams( android.widget.LinearLayout.LayoutParams.FILL_PARENT, android.widget.LinearLayout.LayoutParams.FILL_PARENT ); //pp.setMargins(64, 64, 64, 64); aa.addContentView(surface, pp); #endregion } ); a.WaitOne(); log("PreviewCallbackWithBuffer done"); } catch (Exception ex) { log("error: " + new { ex.Message, ex.StackTrace }); throw new Exception("", ex); } finally { log("finally"); // using if (camera != null) { camera.stopPreview(); camera.release(); camera = null; } } }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); Console.WriteLine("enter OVRMyCubeWorld onCreate"); #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xSurfaceHolder_Callback { onsurfaceCreated = holder => { Console.WriteLine("enter onsurfaceCreated " + new { appThread }); if (appThread == 0) return; // did we use it for float window? //holder.setFormat(android.graphics.PixelFormat.TRANSLUCENT); GLES3JNILib.onSurfaceCreated(holder.getSurface()); xSurfaceHolder = holder; //Console.WriteLine("exit onsurfaceCreated " + new { appThread }); }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appThread == 0) return; GLES3JNILib.onSurfaceChanged(holder.getSurface()); xSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { //I/System.Console( 3549): 0ddd:0001 after OVRMyCubeWorld onCreate, attach the headset! //I/System.Console( 3549): 0ddd:0001 enter onsurfaceDestroyed //Console.WriteLine("enter onsurfaceDestroyed"); if (appThread == 0) return; // I/DEBUG ( 2079): #01 pc 0000672f /data/app/OVRMyCubeWorld.Activities-1/lib/arm/libmain.so (Java_com_oculus_gles3jni_GLES3JNILib_onSurfaceDestroyed+46) GLES3JNILib.onSurfaceDestroyed(); xSurfaceHolder = null; //appThread = 0; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/pui_global_menu } }; #endregion // https://github.com/dalinaum/TextureViewDemo // TextureView semi-translucent by calling myView.setAlpha(0.5f). // !! should we use TextureView instead? // https://groups.google.com/forum/#!topic/android-developers/jYjvm7ItpXQ //this.xSurfaceView.setZOrderOnTop(true); // necessary //this.xSurfaceView.getHolder().setFormat(android.graphics.PixelFormat.TRANSPARENT); #region ReceiveAsync Action<IPAddress> f = async nic => { args.mouse = "awaiting at " + nic; // Z:\jsc.svn\examples\java\android\AndroidUDPClipboard\ApplicationActivity.cs // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs var uu = new UdpClient(41814); uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.mouse = Encoding.UTF8.GetString(x.Buffer); // or marshal memory? var xy = args.mouse.Split(':'); args.mousex = int.Parse(xy[0]); args.mousey = int.Parse(xy[1]); // getchar? args.ad = int.Parse(xy[2]); args.ws = int.Parse(xy[3]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 args.c = int.Parse(xy[4]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown args.mousebutton = int.Parse(xy[5]); args.mousewheel = int.Parse(xy[6]); } }; NetworkInterface.GetAllNetworkInterfaces().WithEach( n => { // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs var IPProperties = n.GetIPProperties(); var PhysicalAddress = n.GetPhysicalAddress(); foreach (var ip in IPProperties.UnicastAddresses) { // ipv4 if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork) { if (!IPAddress.IsLoopback(ip.Address)) if (n.SupportsMulticast) f(ip.Address); } } } ); #endregion var sw = Stopwatch.StartNew(); //var args = new object(); // can we draw on back? #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" // (out) VrApi.vrapi_GetVersionString() text = () => { // can we listen to udp? // like X:\jsc.svn\examples\java\android\AndroidServiceUDPNotification\AndroidServiceUDPNotification\ApplicationActivity.cs // in vr if the other service is running it can display vr notification // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150630/udp // lets run it, and see if we can see some vr notifications as we skip a video GLES3JNILib.stringFromJNI(args); return sw.ElapsedMilliseconds + "ms \n" + args.mouse + "\n" + new { args.mousex, args.mousey } + "\n" + new { //args.mousex, // left to right args.x, // nod up +0.7 down -0.7 //ox = args.tracking_HeadPose_Pose_Orientation_x // -0.7 right +0.7 left oy = args.tracking_HeadPose_Pose_Orientation_y // tilt right -0.7 tilt left + 0.7 //oz = args.tracking_HeadPose_Pose_Orientation_z // ?? //ow = args.tracking_HeadPose_Pose_Orientation_w }; } }; //Task.Run( new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 30); // fullspeed Thread.Sleep(1000 / 60); if (args.mousebutton == 0) { mDraw.color = android.graphics.Color.GREEN; mDraw.alpha = 80; } else { mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } mDraw.postInvalidate(); } } ).Start(); #endregion #region ondispatchTouchEvent this.ondispatchTouchEvent = @event => { if (appThread == 0) return; int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; // touch sending int to offfset the cubes this.args.x = (int)(halfx - x); this.args.y = (int)(y - halfy); mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } GLES3JNILib.onTouchEvent(action, x, y); // can we move hud around and record it to gif or mp4? }; #endregion #region ondispatchKeyEvent this.ondispatchKeyEvent = @event => { if (appThread == 0) return false; int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return base.dispatchKeyEvent(@event); } if (action == KeyEvent.ACTION_UP) { // keycode 4 //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } GLES3JNILib.onKeyEvent(keyCode, action); return true; }; #endregion AtResume = delegate { Console.WriteLine("enter onResume"); // http://stackoverflow.com/questions/3527621/how-to-pause-and-resume-a-surfaceview-thread // http://stackoverflow.com/questions/10277694/resume-to-surfaceview-shows-black-screen //this.xSurfaceView.onres // You must ensure that the drawing thread only touches the underlying Surface while it is valid this.xSurfaceView = new SurfaceView(this); this.setContentView(xSurfaceView); this.addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); this.xSurfaceView.getHolder().addCallback(xCallback); GLES3JNILib.onResume(); }; // canw e add a camera too? // stackoverflow.com/questions/20936480/how-to-make-surfaceview-transparent-background //this.setContentView(mDraw); //this.addContentView(xSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); // sometimes system wants to try to black the screen it seems.. getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this); Console.WriteLine("after OVRMyCubeWorld onCreate, attach the headset!"); }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xCallback { onsurfaceCreated = holder => { if (mNativeHandle != 0) { GLES3JNILib.onSurfaceCreated(mNativeHandle, holder.getSurface()); mSurfaceHolder = holder; } }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (mNativeHandle != 0) { GLES3JNILib.onSurfaceChanged(mNativeHandle, holder.getSurface()); mSurfaceHolder = holder; } }, onsurfaceDestroyed = holder => { if (mNativeHandle != 0) { GLES3JNILib.onSurfaceDestroyed(mNativeHandle); mSurfaceHolder = null; } } }; #endregion mView = new SurfaceView(this); this.setContentView(mView); // E/AndroidRuntime(22718): Caused by: java.lang.NullPointerException: Attempt to invoke virtual method 'android.view.ViewParent android.view.View.getParent()' on a null object reference //E/AndroidRuntime(22718): at android.view.ViewGroup.addViewInner(ViewGroup.java:4216) //E/AndroidRuntime(22718): at android.view.ViewGroup.addView(ViewGroup.java:4070) //E/AndroidRuntime(22718): at android.view.ViewGroup.addView(ViewGroup.java:4046) //E/AndroidRuntime(22718): at com.android.internal.policy.impl.PhoneWindow.setContentView(PhoneWindow.java:478) //E/AndroidRuntime(22718): at com.android.internal.policy.impl.PhoneWindow.setContentView(PhoneWindow.java:459) //E/AndroidRuntime(22718): at android.app.Activity.setContentView(Activity.java:2298) var sw = Stopwatch.StartNew(); #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" text = () => sw.ElapsedMilliseconds + "ms !" }; //Task.Run( new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); Thread.Sleep(1000 / 30); mDraw.postInvalidate(); } } ).Start(); #endregion this.ondispatchTouchEvent = @event => { if (mNativeHandle == 0) return; int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } GLES3JNILib.onTouchEvent(mNativeHandle, action, x, y); // can we move hud around and record it to gif or mp4? }; this.ondispatchKeyEvent = @event => { if (mNativeHandle == 0) return false; int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return base.dispatchKeyEvent(@event); } if (action == KeyEvent.ACTION_UP) { // keycode 4 mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } GLES3JNILib.onKeyEvent(mNativeHandle, keyCode, action); return true; }; // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs // X:\jsc.svn\examples\java\android\AndroidLacasCameraServerActivity\AndroidLacasCameraServerActivity\ApplicationActivity.cs addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); mView.getHolder().addCallback(xCallback); // Force the screen to stay on, rather than letting it dim and shut off // while the user is watching a movie. // does this disable the face sensor? getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); // Force screen brightness to stay at maximum //WindowManager_LayoutParams _params = getWindow().getAttributes(); //_params.screenBrightness = 1.0f; //getWindow().setAttributes(_params); mNativeHandle = com.oculus.gles3jni.GLES3JNILib.onCreate(this); // can we now overlay something on top of the surface? }