public void Save(Stream stream) { //FileOutputStream _javastream = new FileOutputStream(filePath); java.io.ByteArrayOutputStream byteStream = new java.io.ByteArrayOutputStream(); ObjectOutput objOut = new DotnetObjectOutputStream(byteStream); this._javaPackage.writeExternal(objOut); stream.Write(byteStream.toByteArray(), 0, byteStream.toByteArray().Length); byteStream.close(); }
public static byte[] ProcessXslFo(string xslfo, string name) { var foUserAgent = FopFactory.newFOUserAgent(); foUserAgent.setCreator("Crispin (Apache FOP 2.1 via IKVM)"); foUserAgent.setTitle(name); var outputStream = new java.io.ByteArrayOutputStream(); var fop = FopFactory.newFop(org.apache.xmlgraphics.util.MimeConstants.__Fields.MIME_PDF, foUserAgent, outputStream); var transformerFactory = new com.sun.org.apache.xalan.@internal.xsltc.trax.TransformerFactoryImpl(); var transformer = transformerFactory.newTransformer(); var source = new StreamSource(new java.io.StringReader(xslfo)); var result = new SAXResult(fop.getDefaultHandler()); transformer.transform(source, result); /* * Adding the page count requires a second pass. This should be configurable * by the report itself. * */ /* transformer.setParameter("page-count", fop.getResults().getPageCount().ToString()); transformer.transform(src, res); * */ outputStream.close(); return outputStream.toByteArray(); }
private static void StreamPDF(string XMLFile, string XSLTFile) { // Load the style sheet. XslCompiledTransform xslt = new XslCompiledTransform(); xslt.Load(XMLFile); XmlDocument objSourceData = new XmlDocument(); //Load the Source XML Document objSourceData.Load(XSLTFile); // Execute the transform and output the results to a file. MemoryStream ms = new MemoryStream(); xslt.Transform(objSourceData, null, ms); //Convert the Byte Array from MemoryStream to SByte Array sbyte[] inputFOBytes = ToSByteArray(ms.ToArray()); InputSource inputFoFile = new org.xml.sax.InputSource(new ByteArrayInputStream(inputFOBytes)); ByteArrayOutputStream bos = new java.io.ByteArrayOutputStream(); org.apache.fop.apps.Driver dr = new org.apache.fop.apps.Driver(inputFoFile, bos); dr.setRenderer(org.apache.fop.apps.Driver.RENDER_PDF); dr.run(); //Convert the SByte Array to Byte Array to stream to the Browser byte[] getBytes = ToByteArray(bos.toByteArray()); MemoryStream msPdf = new MemoryStream(getBytes); Response.ContentType = "application/pdf"; Response.AddHeader("Content-disposition", "filename=output.pdf"); Response.OutputStream.Write(getBytes, 0, getBytes.Length); Response.OutputStream.Flush(); Response.OutputStream.Close(); }
public static byte[] ProcessXslFo(string xslfo, string name) { var foUserAgent = FopFactory.newFOUserAgent(); foUserAgent.setCreator("Crispin (Apache FOP 2.1 via IKVM)"); foUserAgent.setTitle(name); var outputStream = new java.io.ByteArrayOutputStream(); var fop = FopFactory.newFop(org.apache.xmlgraphics.util.MimeConstants.__Fields.MIME_PDF, foUserAgent, outputStream); var transformerFactory = new com.sun.org.apache.xalan.@internal.xsltc.trax.TransformerFactoryImpl(); var transformer = transformerFactory.newTransformer(); var source = new StreamSource(new java.io.StringReader(xslfo)); var result = new SAXResult(fop.getDefaultHandler()); transformer.transform(source, result); /* * Adding the page count requires a second pass. This should be configurable * by the report itself. * */ /* * transformer.setParameter("page-count", fop.getResults().getPageCount().ToString()); * transformer.transform(src, res); * */ outputStream.close(); return(outputStream.toByteArray()); }
public virtual RewriterResults rewrite(sRequest request, sResponse original, MutableContent content) { ByteArrayOutputStream baos = new ByteArrayOutputStream((content.getContent().Length * 110) / 100); OutputStreamWriter output = new OutputStreamWriter(baos); String mimeType = original.getHeader("Content-Type"); if (request.RewriteMimeType != null) { mimeType = request.RewriteMimeType; } GadgetSpec spec = null; if (request.Gadget != null) { spec = _specFactory.getGadgetSpec(request.Gadget.toJavaUri(), false); } if (rewrite(spec, request.getUri(), content, mimeType, output)) { content.setContent(Encoding.Default.GetString(baos.toByteArray())); return RewriterResults.cacheableIndefinitely(); } return null; }
private void SetDrawable(out Texture2D button, int keyCode) { OuyaController.ButtonData buttonData; buttonData = OuyaController.getButtonData(keyCode); if (null == buttonData) { button = null; return; } if (null == buttonData.buttonDrawable) { button = null; return; } BitmapDrawable drawable = (BitmapDrawable)buttonData.buttonDrawable; if (null == drawable) { button = null; return; } Bitmap bitmap = drawable.getBitmap(); if (null == bitmap) { button = null; return; } ByteArrayOutputStream stream = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream); if (stream.size() == 0) { button = null; } else { button = new Texture2D(0, 0); button.LoadImage(stream.toByteArray()); } stream.close(); }
private static byte[] InternalReadBytes(string filepath, bool thumb = true) { var mImageData = (sbyte[])(object)System.IO.File.ReadAllBytes(filepath); if (thumb) { // http://stackoverflow.com/questions/2577221/android-how-to-create-runtime-thumbnail int THUMBNAIL_HEIGHT = 96; //int THUMBNAIL_WIDTH = 66; var imageBitmap = BitmapFactory.decodeByteArray(mImageData, 0, mImageData.Length); float width = imageBitmap.getWidth(); float height = imageBitmap.getHeight(); float ratio = width / height; imageBitmap = Bitmap.createScaledBitmap(imageBitmap, (int)(THUMBNAIL_HEIGHT * ratio), THUMBNAIL_HEIGHT, false); //int padding = (THUMBNAIL_WIDTH - imageBitmap.getWidth()) / 2; //imageView.setPadding(padding, 0, padding, 0); //imageView.setImageBitmap(imageBitmap); ByteArrayOutputStream baos = new ByteArrayOutputStream(); // http://developer.android.com/reference/android/graphics/Bitmap.html imageBitmap.compress(Bitmap.CompressFormat.PNG, 0, baos); mImageData = baos.toByteArray(); } return (byte[])(object)mImageData; }
public static File InternalTakePicture(int num = 0) { var DIRECTORY_DCIM = global::android.os.Environment.DIRECTORY_DCIM; var path = global::android.os.Environment.getExternalStoragePublicDirectory(DIRECTORY_DCIM).getAbsolutePath(); path += "/Camera"; //var SAVE_PATH = android.os.Environment.getExternalStoragePublicDirectory( // android.os.Environment.DIRECTORY_PICTURES //) + "/"; var n = DateTime.Now; var f = new File(path + "/shot" + n.Ticks + ".jpg"); //I/System.Console(31472): enter TakePicture //W/CameraService( 128): CameraService::connect X (pid 31472) rejected (existing client). //I/System.Console(31472): error takePicture { Message = Fail to connect to camera service, StackTrace = java.lang.RuntimeException: Fail to connect to camera service //I/System.Console(31472): at android.hardware.Camera.native_setup(Native Method) //I/System.Console(31472): at android.hardware.Camera.<init>(Camera.java:340) //I/System.Console(31472): at android.hardware.Camera.open(Camera.java:302) var camera = android.hardware.Camera.open(num); // W/CameraService( 128): CameraService::connect X (pid 2499) rejected (existing client). //D/dalvikvm( 2499): GC_CONCURRENT freed 873K, 12% free 7525K/8544K, paused 4ms+4ms, total 59ms //D/dalvikvm( 2499): WAIT_FOR_CONCURRENT_GC blocked 14ms //I/System.Console( 2499): error takePicture { Message = Fail to connect to camera service, StackTrace = java.lang.RuntimeException: Fail to connect to camera service //I/System.Console( 2499): at android.hardware.Camera.native_setup(Native Method) //I/System.Console( 2499): at android.hardware.Camera.<init>(Camera.java:340) //I/System.Console( 2499): at android.hardware.Camera.open(Camera.java:302) //I/System.Console( 2499): at CameraExperiment.foo.InternalTakePicture(foo.java:65) var p = camera.getParameters(); p.setRotation(0); //camera.stopFaceDetection(); var s = p.getSupportedPictureSizes(); var min = default(android.hardware.Camera.Size); for (int i = 0; i < s.size(); i++) { var size = (android.hardware.Camera.Size)s.get(i); // I/System.Console( 6058): before takePicture { f = /mnt/sdcard/Pictures/shot.jpg } //I/System.Console( 6058): { size = android.hardware.Camera$Size@4fde180 } System.Console.WriteLine(new { size.width, size.height }); if (min == null) min = size; else if (min.width > size.width) min = size; } System.Console.WriteLine("before setPictureSize "); p.setPictureSize(min.width, min.height); //E/CameraHardwareSec( 84): android::status_t android::CameraHardwareSec::setSceneModeParameter(const android::CameraParameters&): unmatched focus_mode(continuous-picture) //E/CameraHardwareSec( 84): virtual android::status_t android::CameraHardwareSec::setParameters(const android::CameraParameters&): Failed to setting scene mode var focusModes = p.getSupportedFocusModes(); var NextFocus = android.hardware.Camera.Parameters.FOCUS_MODE_FIXED; for (int i = 0; i < focusModes.size(); i++) { var focusMode = (string)focusModes.get(i); if (focusMode == android.hardware.Camera.Parameters.FOCUS_MODE_INFINITY) NextFocus = android.hardware.Camera.Parameters.FOCUS_MODE_INFINITY; System.Console.WriteLine(new { focusMode }); } // I/System.Console(31232): before setPictureSize //I/System.Console(31232): { focusMode = fixed } //I/System.Console(31232): before setFocusMode //E/NvOmxCameraSettingsParser( 128): Failed substring capabilities check, unsupported parameter: 'infinity', original: fixed //E/NvOmxCameraSettingsParser( 128): extractChanges: Invalid parameter! //E/NvOmxCamera( 128): setParameters: Invalid parameters //I/System.Console(31232): error takePicture { Message = setParameters failed, StackTrace = java.lang.RuntimeException: setParameters failed // { focusMode = auto } // { focusMode = infinity } // { focusMode = macro } // before setFocusMode //9): android::status_t android::CameraHardwareSec::setSceneModeParameter(const android::CameraParameters&): unmatched focus_mode(fixed) //9): virtual android::status_t android::CameraHardwareSec::setParameters(const android::CameraParameters&): Failed to setting scene mode // error takePicture { Message = setParameters failed, StackTrace = java.lang.RuntimeException: setParameters failed // at android.hardware.Camera.native_setParameters(Native Method) // at android.hardware.Camera.setParameters(Camera.java:950) // at CameraExperiment.foo.InternalTakePicture(foo.java:105) // E/SecCamera( 84): ERR(int android::fimc_v4l2_s_ctrl(int, unsigned int, unsigned int)):VIDIOC_S_CTRL(id = 0x800005b (91), value = 0) failed ret = -1 //E/SecCamera( 84): ERR(int android::SecCamera::setFaceDetect(int)):Fail on V4L2_CID_CAMERA_FACE_DETECTION //E/SecCamera( 84): ERR(int android::fimc_v4l2_s_ctrl(int, unsigned int, unsigned int)):VIDIOC_S_CTRL(id = 0x8000063 (99), value = 6) failed ret = -1 //E/SecCamera( 84): ERR(int android::SecCamera::setFocusMode(int)):Fail on V4L2_CID_CAMERA_FOCUS_MODE //E/CameraHardwareSec( 84): android::status_t android::CameraHardwareSec::setSceneModeParameter(const android::CameraParameters&): mSecCamera->setFocusMode(6) fail //E/CameraHardwareSec( 84): virtual android::status_t android::CameraHardwareSec::setParameters(const android::CameraParameters&): Failed to setting scene mode //E/SecCamera( 84): ERR(int android::fimc_v4l2_s_ctrl(int, unsigned int, unsigned int)):VIDIOC_S_CTRL(id = 0x800006c (108), value = 1) failed ret = -1 //E/SecCamera( 84): ERR(int android::SecCamera::setBatchReflection()):Fail on V4L2_CID_CAMERA_BATCH_REFLECTION //E/CameraHardwareSec( 84): ERR(virtual android::status_t android::CameraHardwareSec::setParameters(const android::CameraParameters&)):Fail on mSecCamera->setBatchCmd System.Console.WriteLine("before setFocusMode " + new { NextFocus }); //p.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_INFINITY); p.setFocusMode(NextFocus); // E/SecCamera( 84): ERR(int android::fimc_poll(pollfd*)):No data in 10 secs.. //I/ShotSingle( 84): CAMERA_MSG_COMPRESSED_IMAGE camera.setParameters(p); // http://stackoverflow.com/questions/9744790/android-possible-to-camera-capture-without-a-preview var b = new EventWaitHandle(false, EventResetMode.ManualReset); System.Console.WriteLine("before startPreview "); Action done = delegate { }; try { // #5 java.lang.RuntimeException: Can't create handler inside thread that has not called Looper.prepare() (ScriptCoreLib.Android.ThreadLocalContextReference.CurrentContext as Activity).With( aa => { aa.runOnUiThread( new f { y = delegate { try { // D/Camera ( 2464): app passed NULL surface System.Console.WriteLine("before getHolder "); // the nexus 7 and droid x both don't support the passing of a dummy surfaceview to a camera object. Your response that all camera things must created in the activity is false. I was able to instantiate a camera within a thread by passing it a view just fine. // here, the unused surface view and holder var dummy = new SurfaceView(ScriptCoreLib.Android.ThreadLocalContextReference.CurrentContext); // missing for android 2.2 //dummy.setScaleX(0f); //dummy.setScaleY(0f); var h = dummy.getHolder(); // http://developer.android.com/reference/android/view/SurfaceHolder.html#SURFACE_TYPE_PUSH_BUFFERS var SURFACE_TYPE_PUSH_BUFFERS = 0x00000003; h.setType(SURFACE_TYPE_PUSH_BUFFERS); h.addCallback( new XSurfaceHolder_Callback { yield_surfaceCreated = delegate { System.Console.WriteLine("at yield_surfaceCreated "); try { camera.setPreviewDisplay(h); camera.startPreview(); System.Console.WriteLine("after startPreview "); b.Set(); } catch { throw; } } } ); //h.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); aa.addContentView(dummy, new android.widget.LinearLayout.LayoutParams( android.widget.LinearLayout.LayoutParams.WRAP_CONTENT, android.widget.LinearLayout.LayoutParams.WRAP_CONTENT ) ); done = delegate { aa.runOnUiThread( new f { y = delegate { // https://groups.google.com/forum/?fromgroups#!topic/android-developers/liph4z9LnFA // how to Orphanize?? dummy.setVisibility(View.GONE); } } ); }; } catch { throw; } } } ); } ); } catch { throw; } b.WaitOne(); //camera.@lock(); var a = new EventWaitHandle(false, EventResetMode.ManualReset); //var b = new EventWaitHandle(false, EventResetMode.ManualReset); // E/SecCamera( 84): ERR(int android::fimc_v4l2_s_ctrl(int, unsigned int, unsigned int)):VIDIOC_S_CTRL(id = 0x800005d (93), value = 1) failed ret = -1 //E/SecCamera( 84): ERR(int android::SecCamera::setAutofocus()):Fail on V4L2_CID_CAMERA_SET_AUTO_FOCUS //E/CameraHardwareSec( 84): ERR(int android::CameraHardwareSec::autoFocusThread()):Fail on mSecCamera->setAutofocus() //System.Console.WriteLine("before autoFocus " + new { f }); //// //camera.autoFocus( // new XAutoFocus // { // yield = delegate // { // System.Console.WriteLine("at autoFocus " + new { f }); // https://github.com/mozilla-b2g/android-device-crespo/blob/master/libcamera/SecCamera.cpp // E/SecCamera( 84): ERR(int android::fimc_poll(pollfd*)):No data in 10 secs.. //I/ShotSingle( 84): CAMERA_MSG_COMPRESSED_IMAGE //D/dalvikvm( 6608): GC_FOR_MALLOC freed 607K, 58% free 2856K/6727K, external 2013K/2108K, paused 18ms //I/dalvikvm-heap( 6608): Grow heap (frag case) to 7.847MB for 614416-byte allocation //D/dalvikvm( 6608): GC_FOR_MALLOC freed 46K, 54% free 3410K/7367K, external 2013K/2108K, paused 13ms //I/System.Console( 6608): enter XCameraPictureCallback { Length = 0 } //I/System.Console( 6608): exit XCameraPictureCallback //for (int i = 0; i < 11; i++) //{ // System.Console.WriteLine("warming up camera machine... " + i); // Thread.Sleep(1000); //} // http://stackoverflow.com/questions/15279911/using-camera-without-preview-or-surface-in-android // http://handycodeworks.com/?p=19 // you are required to call startPreview() first before calling takePicture() System.Console.WriteLine("before takePicture " + new { f }); camera.setErrorCallback( new XErrorCallback { yield = (err, c) => { System.Console.WriteLine(new { err }); } } ); // preview ready? var at_setPreviewCallback = new EventWaitHandle(false, EventResetMode.ManualReset); System.Console.WriteLine("before setPreviewCallback "); // is this of any use? camera.setOneShotPreviewCallback( new XCameraPreviewCallback { yield = delegate { at_setPreviewCallback.Set(); } } ); at_setPreviewCallback.WaitOne(); System.Console.WriteLine("after setPreviewCallback "); Thread.Sleep(150); camera.takePicture( null, null, new XCameraPictureCallback { yield = (data, c) => { System.Console.WriteLine("enter XCameraPictureCallback " + new { data.Length }); if (data.Length > 0) { var bmp = BitmapFactory.decodeByteArray(data, 0, data.Length); File directory = new File(path); directory.mkdirs(); ByteArrayOutputStream bytes = new ByteArrayOutputStream(); bmp.compress(Bitmap.CompressFormat.JPEG, 100, bytes); try { f.createNewFile(); FileOutputStream fo = new FileOutputStream(f); fo.write(bytes.toByteArray()); } catch { throw; } } System.Console.WriteLine("exit XCameraPictureCallback"); camera.release(); done(); //[javac] V:\src\CameraExperiment\ApplicationWebService___c__DisplayClass2.java:54: cannot find symbol //[javac] symbol : method Set() //[javac] location: class ScriptCoreLibJava.BCLImplementation.System.Threading.__AutoResetEvent //[javac] this.a.Set(); //[javac] ^ a.Set(); } } ); // I/System.Console( 6264): before takePicture { f = /mnt/sdcard/Pictures/shot.jpg } //I/System.Console( 6264): { width = 2560, height = 1920 } //I/System.Console( 6264): { width = 2560, height = 1536 } //I/System.Console( 6264): { width = 2048, height = 1536 } //I/System.Console( 6264): { width = 2048, height = 1232 } //I/System.Console( 6264): { width = 1600, height = 1200 } //I/System.Console( 6264): { width = 1600, height = 960 } //I/System.Console( 6264): { width = 800, height = 480 } //I/System.Console( 6264): { width = 640, height = 480 } //I/ShotSingle( 84): ShotSingle::takePicture start //I/ShotSingle( 84): ShotSingle::takePicture end //I/System.Console( 6264): after takePicture // } // } //); System.Console.WriteLine("will wait for takePicture to complete ... " + new { f }); a.WaitOne(); return f; }
private static byte[] InternalReadBytes(string filepath, bool thumb = true) { //I/System.Console(28925): enter InternalReadBytes {{ filepath = //storage/emulated/0/DCIM/Camera/IMG_20150110_160133.jpg, thumb = true, ElapsedMilliseconds = 0 }} //I/System.Console(28925): #10 GET /thumb//storage/emulated/0/DCIM/Camera/IMG_20150110_160133.jpg HTTP/1.1 error: //I/System.Console(28925): #10 java.lang.NullPointerException: Attempt to get length of null array //I/System.Console(28925): #10 java.lang.NullPointerException: Attempt to get length of null array //I/System.Console(28925): at com.abstractatech.dcimgalleryapp.ApplicationWebService.InternalReadBytes(ApplicationWebService.java:285) //I/System.Console(28925): at com.abstractatech.dcimgalleryapp.ApplicationWebService.Handler(ApplicationWebService.java:197) var sw = Stopwatch.StartNew(); System.Console.WriteLine("enter InternalReadBytes " + new { filepath, thumb, sw.ElapsedMilliseconds }); //var mImageData = (sbyte[])(object)System.IO.File.ReadAllBytes(filepath); if (!thumb) { return System.IO.File.ReadAllBytes(filepath); } // X:\jsc.svn\examples\javascript\android\EXIFThumbnail\EXIFThumbnail\ApplicationWebService.cs // [javac] V:\src\com\abstractatech\dcimgalleryapp\ApplicationWebService.java:263: error: unreported exception ImageProcessingException; must be caught or declared to be thrown //[javac] metadata1 = ImageMetadataReader.readMetadata(new File(filepath)); var mImageData = default(byte[]); try { #if xmetadata //<package id="AndroidMetadataExtractor" version="1.0.0.0" targetFramework="net40" /> var m = ImageMetadataReader.readMetadata(new File(filepath)); // http://stackoverflow.com/questions/10166373/metadata-extraction-java var t = typeof(com.drew.metadata.exif.ExifThumbnailDirectory).ToClass(); if (m.containsDirectory(t)) { var x = (com.drew.metadata.exif.ExifThumbnailDirectory)m.getDirectory(t); System.Console.WriteLine( filepath ); mImageData = x.getThumbnailData(); } #endif } catch { // skip } if (mImageData == null) { var smImageData = (sbyte[])(object)System.IO.File.ReadAllBytes(filepath); // http://stackoverflow.com/questions/2577221/android-how-to-create-runtime-thumbnail int THUMBNAIL_HEIGHT = 96; //int THUMBNAIL_WIDTH = 66; var imageBitmap = android.graphics.BitmapFactory.decodeByteArray(smImageData, 0, smImageData.Length); float width = imageBitmap.getWidth(); float height = imageBitmap.getHeight(); float ratio = width / height; imageBitmap = android.graphics.Bitmap.createScaledBitmap(imageBitmap, (int)(THUMBNAIL_HEIGHT * ratio), THUMBNAIL_HEIGHT, false); //int padding = (THUMBNAIL_WIDTH - imageBitmap.getWidth()) / 2; //imageView.setPadding(padding, 0, padding, 0); //imageView.setImageBitmap(imageBitmap); ByteArrayOutputStream baos = new ByteArrayOutputStream(); // http://developer.android.com/reference/android/graphics/Bitmap.html imageBitmap.compress(android.graphics.Bitmap.CompressFormat.PNG, 0, baos); mImageData = (byte[])(object)baos.toByteArray(); } //I/System.Console(29677): exit InternalReadBytes {{ filepath = //storage/emulated/0/DCIM/Camera/IMG_20150110_160133.jpg, thumb = true, ElapsedMilliseconds = 318 }} //I/System.Console(29677): exit InternalReadBytes {{ filepath = //storage/emulated/0/DCIM/Camera/IMG_20150111_110950.jpg, thumb = true, ElapsedMilliseconds = 453 }} //I/System.Console(29677): exit InternalReadBytes {{ filepath = //storage/emulated/0/DCIM/Camera/IMG_20150111_110957.jpg, thumb = true, ElapsedMilliseconds = 508 }} //I/System.Console(29677): exit InternalReadBytes {{ filepath = //storage/emulated/0/DCIM/Camera/IMG_20150110_160140.jpg, thumb = true, ElapsedMilliseconds = 478 }} System.Console.WriteLine("exit InternalReadBytes " + new { filepath, thumb, sw.ElapsedMilliseconds }); //Error 112 'Console' is an ambiguous reference between 'java.io.Console' and 'System.Console' X:\jsc.svn\examples\javascript\android\com.abstractatech.dcimgalleryapp\com.abstractatech.dcimgalleryapp\ApplicationWebService.cs 711 13 com.abstractatech.dcimgalleryapp //Error 113 'java.io.Console' does not contain a definition for 'WriteLine' X:\jsc.svn\examples\javascript\android\com.abstractatech.dcimgalleryapp\com.abstractatech.dcimgalleryapp\ApplicationWebService.cs 711 21 com.abstractatech.dcimgalleryapp return mImageData; }
public virtual byte[] encode(BufferedImage bufferedImage) { ByteArrayOutputStream arrayOutputStream = new ByteArrayOutputStream(); this.encode(bufferedImage, (OutputStream) arrayOutputStream); return arrayOutputStream.toByteArray(); }
void AddWidgets(List<OuyaMod> ouyaMods, bool searchByInstalled, bool searchByPublished) { StringBuilder sb = new StringBuilder(); foreach (OuyaMod ouyaMod in ouyaMods) { WidgetOuyaMod widget = new WidgetOuyaMod() { m_instance = ouyaMod, m_category = ouyaMod.getCategory(), m_description = ouyaMod.getDescription(), m_isDownloading = ouyaMod.isDownloading(), m_isFlagged = ouyaMod.isFlagged(), m_isInstalled = ouyaMod.isInstalled(), m_isPublished = ouyaMod.isPublished(), m_metaData = ouyaMod.getMetaData(), m_ratingCount = ouyaMod.getRatingCount(), m_ratingAverage = ouyaMod.getRatingAverage(), m_title = ouyaMod.getTitle(), m_userRating = ouyaMod.getUserRating(), m_searchByInstalled = searchByInstalled, m_searchByPublished = searchByPublished, }; if (sb.Length > 0) { sb.Remove(0, sb.Length); } foreach (string filename in ouyaMod.getFilenames()) { sb.Append(filename); sb.Append(","); using (InputStream inputStream = ouyaMod.openFile(filename)) { byte[] buffer = new byte[100000]; int readAmount = inputStream.read(ref buffer); inputStream.close(); byte[] copy = new byte[readAmount]; Array.Copy(buffer, copy, readAmount); sb.Append("***"); string content = System.Text.UTF8Encoding.UTF8.GetString(copy); sb.Append(content); } } widget.m_filenames = sb.ToString(); List<OuyaModScreenshot> screenshots = ouyaMod.getScreenshots(); widget.m_screenshots = new Texture2D[screenshots.Count]; widget.m_thumbnails = new Texture2D[screenshots.Count]; for (int index = 0; index < screenshots.Count; ++index) { using (OuyaModScreenshot ouyaModScreenshot = screenshots[index]) { if (null != ouyaModScreenshot) { using (Bitmap bitmap = ouyaModScreenshot.getImage()) { if (null != bitmap) { using (ByteArrayOutputStream stream = new ByteArrayOutputStream()) { bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream); if (stream.size() >= 0) { Texture2D texture = new Texture2D(0, 0); texture.LoadImage(stream.toByteArray()); widget.m_screenshots[index] = texture; } stream.close(); } } } using (Bitmap bitmap = ouyaModScreenshot.getThumbnail()) { if (null != bitmap) { using (ByteArrayOutputStream stream = new ByteArrayOutputStream()) { bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream); if (stream.size() >= 0) { Texture2D texture = new Texture2D(0, 0); texture.LoadImage(stream.toByteArray()); widget.m_thumbnails[index] = texture; } stream.close(); } } } } } } if (sb.Length > 0) { sb.Remove(0, sb.Length); } foreach (string tag in ouyaMod.getTags()) { sb.Append(tag); sb.Append(","); } widget.m_tags = sb.ToString(); m_widgets.Add(widget); if (m_widgets.Count == 1) { m_focusManager.Mappings[m_btnCreate].Down = widget.m_buttonPublish; m_focusManager.Mappings[widget.m_buttonPublish] = new FocusManager.ButtonMapping() { Up = m_btnCreate, Right = widget.m_buttonDelete, }; m_focusManager.Mappings[widget.m_buttonDelete] = new FocusManager.ButtonMapping() { Up = m_btnCreate, Left = widget.m_buttonPublish, Right = widget.m_buttonDownload, }; m_focusManager.Mappings[widget.m_buttonDownload] = new FocusManager.ButtonMapping() { Up = m_btnCreate, Left = widget.m_buttonDelete, Right = widget.m_buttonRate, }; m_focusManager.Mappings[widget.m_buttonRate] = new FocusManager.ButtonMapping() { Up = m_btnCreate, Left = widget.m_buttonDownload, Right = widget.m_buttonEdit, }; m_focusManager.Mappings[widget.m_buttonEdit] = new FocusManager.ButtonMapping() { Up = m_btnCreate, Left = widget.m_buttonRate, Right = widget.m_buttonFlag, }; m_focusManager.Mappings[widget.m_buttonFlag] = new FocusManager.ButtonMapping() { Up = m_btnCreate, Left = widget.m_buttonEdit, }; } else { WidgetOuyaMod lastWidget = m_widgets[m_widgets.Count - 2]; m_focusManager.Mappings[lastWidget.m_buttonPublish].Down = widget.m_buttonPublish; m_focusManager.Mappings[lastWidget.m_buttonDelete].Down = widget.m_buttonDelete; m_focusManager.Mappings[lastWidget.m_buttonDownload].Down = widget.m_buttonDownload; m_focusManager.Mappings[lastWidget.m_buttonRate].Down = widget.m_buttonRate; m_focusManager.Mappings[lastWidget.m_buttonEdit].Down = widget.m_buttonEdit; m_focusManager.Mappings[lastWidget.m_buttonFlag].Down = widget.m_buttonFlag; m_focusManager.Mappings[widget.m_buttonPublish] = new FocusManager.ButtonMapping() { Up = lastWidget.m_buttonPublish, Right = widget.m_buttonDelete, }; m_focusManager.Mappings[widget.m_buttonDelete] = new FocusManager.ButtonMapping() { Up = lastWidget.m_buttonDelete, Left = widget.m_buttonPublish, Right = widget.m_buttonDownload, }; m_focusManager.Mappings[widget.m_buttonDownload] = new FocusManager.ButtonMapping() { Up = lastWidget.m_buttonDownload, Left = widget.m_buttonDelete, Right = widget.m_buttonRate, }; m_focusManager.Mappings[widget.m_buttonRate] = new FocusManager.ButtonMapping() { Up = lastWidget.m_buttonRate, Left = widget.m_buttonDownload, Right = widget.m_buttonEdit, }; m_focusManager.Mappings[widget.m_buttonEdit] = new FocusManager.ButtonMapping() { Up = lastWidget.m_buttonEdit, Left = widget.m_buttonRate, Right = widget.m_buttonFlag, }; m_focusManager.Mappings[widget.m_buttonFlag] = new FocusManager.ButtonMapping() { Up = lastWidget.m_buttonFlag, Left = widget.m_buttonEdit, }; } } }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); var sv = new ScrollView(this); var ll = new LinearLayout(this); //ll.setOrientation(LinearLayout.VERTICAL); sv.addView(ll); var b = new Button(this); ll.addView(b); var p = new Preview(this); b.WithText("take a picture"); p.oncamera = camera => b.AtClick( v => { camera.takePicture(null, null, new takePicture_handler { handler = data => { b.WithText("at click"); try { // http://stackoverflow.com/questions/11874273/android-nexus-7-jelly-bean-startpreview-takepicture-calling-getcamerastereomode //E/NvOmxCamera( 126): OMX_ERRORTYPE android::NvOmxCamera::getCameraStereoMode(NvxComponent*, NvOmxCameraUserStereoMode&): Error: invalid NVX mode 0. //E/NvOmxCamera( 126): OMX_ERRORTYPE android::NvOmxCamera::getCameraStereoModeAndCaptureInfo(NvxComponent*, NvOmxCameraUserStereoMode&, NVX_STEREOCAPTUREINFO&): getCameraStereoMode failed with 0x00000000 //D/NvOsDebugPrintf( 126): NvMMLiteJPEGEncSetAttribute: Incorrect value 0 for stereo capture type //E/NvOmxCameraSettings( 126): OMX_ERRORTYPE android::programStereoInfo(OMX_HANDLETYPE, const NVX_STEREOCAPTUREINFO&, android::NvxWrappers*): pNvxWrappers->OMX_SetConfigIL failed with 0x80001005 //D/NvOsDebugPrintf( 126): Tryproc: INBuffer-Values of Width and Height 1280 960 //D/dalvikvm(29535): GC_FOR_ALLOC freed 6686K, 52% free 7716K/15943K, paused 25ms, total 27ms var SAVE_PATH = android.os.Environment.getExternalStoragePublicDirectory( android.os.Environment.DIRECTORY_PICTURES ); SAVE_PATH.mkdirs(); var bmp = android.graphics.BitmapFactory.decodeByteArray(data, 0, data.Length); ByteArrayOutputStream bytes = new ByteArrayOutputStream(); bmp.compress(android.graphics.Bitmap.CompressFormat.JPEG, 100, bytes); File f = new File(SAVE_PATH.ToString() + "/hello2.jpg"); f.createNewFile(); FileOutputStream fo = new FileOutputStream(f); fo.write(bytes.toByteArray()); Intent intent = new Intent(); intent.setAction(android.content.Intent.ACTION_VIEW); var imgUri = android.net.Uri.fromFile(f); intent.setDataAndType(imgUri, "image/*"); b.WithText("done!"); startActivity(intent); } // Error 1 The type caught or thrown must be derived from System.Exception y:\jsc.svn\examples\java\android\AndroidCameraActivity\AndroidCameraActivity\ApplicationActivity.cs 154 52 AndroidCameraActivity catch (Exception ex) { b.WithText("saving.. error! " + ex.Message); //throw; } } } ); } ); this.setContentView(p); this.addContentView(sv, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); }
/** * Creates a new area code map serializing the provided area code map to a stream and then reading * this stream. The resulting area code map is expected to be strictly equal to the provided one * from which it was generated. */ private static AreaCodeMap createNewAreaCodeMap(AreaCodeMap areaCodeMap) { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteArrayOutputStream); areaCodeMap.writeExternal(objectOutputStream); objectOutputStream.flush(); AreaCodeMap newAreaCodeMap = new AreaCodeMap(); newAreaCodeMap.readExternal( new ObjectInputStream(new ByteArrayInputStream(byteArrayOutputStream.toByteArray()))); return newAreaCodeMap; }
protected internal virtual bool writeImageData() { int num1 = this.height; int num2 = 0; this.bytesPerPixel = !this.encodeAlpha ? 3 : 4; Deflater deflater = new Deflater(this.compressionLevel); ByteArrayOutputStream arrayOutputStream = new ByteArrayOutputStream(1024); DeflaterOutputStream deflaterOutputStream = new DeflaterOutputStream((OutputStream) arrayOutputStream, deflater); IOException ioException1; IOException ioException2; while (true) { int num3; int[] numArray1; PixelGrabber pixelGrabber; try { if (num1 > 0) { int num4 = (int) short.MaxValue; int num5 = this.width * (this.bytesPerPixel + 1); int num6 = -1; num3 = Math.max(Math.min(num5 != num6 ? num4 / num5 : -num4, num1), 1); numArray1 = new int[this.width * num3]; pixelGrabber = new PixelGrabber(this.image, 0, num2, this.width, num3, numArray1, 0, this.width); try { pixelGrabber.grabPixels(); } catch (Exception ex) { int num7 = 2; if (ByteCodeHelper.MapException<Exception>(ex, (ByteCodeHelper.MapFlags) num7) == null) throw; else break; } } else goto label_33; } catch (IOException ex) { int num4 = 1; ioException1 = (IOException) ByteCodeHelper.MapException<IOException>((Exception) ex, (ByteCodeHelper.MapFlags) num4); goto label_8; } try { if ((pixelGrabber.getStatus() & 128) != 0) { System.get_err().println("image fetch aborted or errored"); return false; } else { byte[] pixels = new byte[this.width * num3 * this.bytesPerPixel + num3]; if (this.filter == 1) this.leftBytes = new byte[16]; if (this.filter == 2) this.priorRow = new byte[this.width * this.bytesPerPixel]; int num4 = 0; int startPos = 1; for (int index1 = 0; index1 < this.width * num3; ++index1) { int num5 = index1; int num6 = this.width; int num7 = -1; if ((num6 != num7 ? num5 % num6 : 0) == 0) { byte[] numArray2 = pixels; int index2 = num4; ++num4; int num8 = (int) (sbyte) this.filter; numArray2[index2] = (byte) num8; startPos = num4; } byte[] numArray3 = pixels; int index3 = num4; int num9 = num4 + 1; int num10 = (int) (sbyte) (numArray1[index1] >> 16 & (int) byte.MaxValue); numArray3[index3] = (byte) num10; byte[] numArray4 = pixels; int index4 = num9; int num11 = num9 + 1; int num12 = (int) (sbyte) (numArray1[index1] >> 8 & (int) byte.MaxValue); numArray4[index4] = (byte) num12; byte[] numArray5 = pixels; int index5 = num11; num4 = num11 + 1; int num13 = (int) (sbyte) (numArray1[index1] & (int) byte.MaxValue); numArray5[index5] = (byte) num13; if (this.encodeAlpha) { byte[] numArray2 = pixels; int index2 = num4; ++num4; int num8 = (int) (sbyte) (numArray1[index1] >> 24 & (int) byte.MaxValue); numArray2[index2] = (byte) num8; } int num14 = index1; int num15 = this.width; int num16 = -1; if ((num15 != num16 ? num14 % num15 : 0) == this.width - 1 && this.filter != 0) { if (this.filter == 1) this.filterSub(pixels, startPos, this.width); if (this.filter == 2) this.filterUp(pixels, startPos, this.width); } } deflaterOutputStream.write(pixels, 0, num4); num2 += num3; num1 -= num3; } } catch (IOException ex) { int num4 = 1; ioException2 = (IOException) ByteCodeHelper.MapException<IOException>((Exception) ex, (ByteCodeHelper.MapFlags) num4); goto label_32; } } IOException ioException3; try { System.get_err().println("interrupted waiting for pixels!"); return false; } catch (IOException ex) { int num3 = 1; ioException3 = (IOException) ByteCodeHelper.MapException<IOException>((Exception) ex, (ByteCodeHelper.MapFlags) num3); } IOException ioException4 = ioException3; goto label_37; label_8: ioException4 = ioException1; goto label_37; label_32: ioException4 = ioException2; goto label_37; label_33: int num17; IOException ioException5; try { deflaterOutputStream.close(); byte[] data = arrayOutputStream.toByteArray(); int length = data.Length; this.crc.reset(); this.bytePos = this.writeInt4(length, this.bytePos); this.bytePos = this.writeBytes(PngEncoder.__\u003C\u003EIDAT, this.bytePos); this.crc.update(PngEncoder.__\u003C\u003EIDAT); this.bytePos = this.writeBytes(data, length, this.bytePos); this.crc.update(data, 0, length); this.crcValue = this.crc.getValue(); this.bytePos = this.writeInt4((int) this.crcValue, this.bytePos); deflater.finish(); deflater.end(); num17 = 1; } catch (IOException ex) { int num3 = 1; ioException5 = (IOException) ByteCodeHelper.MapException<IOException>((Exception) ex, (ByteCodeHelper.MapFlags) num3); goto label_36; } return num17 != 0; label_36: ioException4 = ioException5; label_37: System.get_err().println(Throwable.instancehelper_toString((Exception) ioException4)); return false; }
// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150926 // https://sites.google.com/a/jsc-solutions.net/backlog/knowledge-base/2015/201511/20151121 // connect s6 via usb . // turn on wifi! // kill adb //"x:\util\android-sdk-windows\platform-tools\adb.exe" tcpip 5555 // restarting in TCP mode port: 5555 //13: wlan0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc pfifo_fast state UP qlen 1000 // inet 192.168.1.126/24 brd 192.168.1.255 scope global wlan0 // valid_lft forever preferred_lft forever // on red // "x:\util\android-sdk-windows\platform-tools\adb.exe" connect 192.168.1.126:5555 // connected to 192.168.1.126:5555 // "x:\util\android-sdk-windows\platform-tools\adb.exe" shell am start -n "AndroidListApplications.Activities/.ApplicationWebServiceActivity" /// <summary> /// This Method is a javascript callable method. /// </summary> /// <param name="e">A parameter from javascript.</param> /// <param name="yield">A callback to javascript.</param> public Task queryIntentActivities(yield_ACTION_MAIN yield) { var context = ThreadLocalContextReference.CurrentContext; // http://stackoverflow.com/questions/2695746/how-to-get-a-list-of-installed-android-applications-and-pick-one-to-run // https://play.google.com/store/apps/details?id=com.flopcode.android.inspector var mainIntent = new Intent(Intent.ACTION_MAIN, null); mainIntent.addCategory(Intent.CATEGORY_LAUNCHER); var pm = context.getPackageManager(); var pkgAppsList = pm.queryIntentActivitiesEnumerable(mainIntent) .OrderBy((android.content.pm.ResolveInfo k) => k.activityInfo.packageName) .WithEach( r => { // http://stackoverflow.com/questions/6344694/get-foreground-application-icon-convert-to-base64 var label = (string)(object)pm.getApplicationLabel(r.activityInfo.applicationInfo); var icon_base64 = ""; try { var icon = pm.getApplicationIcon(r.activityInfo.applicationInfo); if (icon != null) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); // bitmap.compress(CompressFormat.PNG, 0, outputStream); BitmapDrawable bitDw = ((BitmapDrawable)icon); Bitmap bitmap = bitDw.getBitmap(); ByteArrayOutputStream stream = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream); var bitmapByte = (byte[])(object)stream.toByteArray(); // this will take forever //icon_base64 = Convert.ToBase64String(bitmapByte); //bitmapByte = Base64.encode(bitmapByte,Base64.DEFAULT); //System.out.println("..length of image..."+bitmapByte.length); } } catch { } yield( r.activityInfo.applicationInfo.packageName, r.activityInfo.name, icon_base64: icon_base64, label: label ); } ); //yield_done(); return new object().ToTaskResult(); }