Inheritance: java.io.OutputStream
Ejemplo n.º 1
0
        public static byte[] ProcessXslFo(string xslfo, string name)
        {
            var foUserAgent = FopFactory.newFOUserAgent();

            foUserAgent.setCreator("Crispin (Apache FOP 2.1 via IKVM)");
            foUserAgent.setTitle(name);

            var outputStream = new java.io.ByteArrayOutputStream();

            var fop = FopFactory.newFop(org.apache.xmlgraphics.util.MimeConstants.__Fields.MIME_PDF, foUserAgent, outputStream);

            var transformerFactory = new com.sun.org.apache.xalan.@internal.xsltc.trax.TransformerFactoryImpl();
            var transformer        = transformerFactory.newTransformer();

            var source = new StreamSource(new java.io.StringReader(xslfo));

            var result = new SAXResult(fop.getDefaultHandler());

            transformer.transform(source, result);

            /*
             * Adding the page count requires a second pass. This should be configurable
             * by the report itself.
             * */
            /*
             * transformer.setParameter("page-count", fop.getResults().getPageCount().ToString());
             * transformer.transform(src, res);
             * */

            outputStream.close();

            return(outputStream.toByteArray());
        }
Ejemplo n.º 2
0
        public static byte[] ProcessXslFo(string xslfo, string name)
        {
            var foUserAgent = FopFactory.newFOUserAgent();
            foUserAgent.setCreator("Crispin (Apache FOP 2.1 via IKVM)");
            foUserAgent.setTitle(name);

            var outputStream = new java.io.ByteArrayOutputStream();

            var fop = FopFactory.newFop(org.apache.xmlgraphics.util.MimeConstants.__Fields.MIME_PDF, foUserAgent, outputStream);

            var transformerFactory = new com.sun.org.apache.xalan.@internal.xsltc.trax.TransformerFactoryImpl();
            var transformer = transformerFactory.newTransformer();

            var source = new StreamSource(new java.io.StringReader(xslfo));

            var result = new SAXResult(fop.getDefaultHandler());

            transformer.transform(source, result);

            /*
             * Adding the page count requires a second pass. This should be configurable
             * by the report itself.
             * */
            /*
            transformer.setParameter("page-count", fop.getResults().getPageCount().ToString());
            transformer.transform(src, res);
             * */

            outputStream.close();

            return outputStream.toByteArray();
        }
Ejemplo n.º 3
0
        static void Main()
        {
            // Path to the folder with models extracted from `stanford-corenlp-3.7.0-models.jar`
            var jarRoot = @"..\..\..\..\paket-files\nlp.stanford.edu\stanford-corenlp-full-2016-10-31\models";

            // Text for processing
            var text = "Kosgi Santosh sent an email to Stanford University. He didn't get a reply.";

            // Annotation pipeline configuration
            var props = new Properties();
            props.setProperty("annotators", "tokenize, ssplit, pos, lemma, parse, ner,dcoref");
            props.setProperty("ner.useSUTime", "0");

            // We should change current directory, so StanfordCoreNLP could find all the model files automatically
            var curDir = Environment.CurrentDirectory;
            Directory.SetCurrentDirectory(jarRoot);
            var pipeline = new StanfordCoreNLP(props);
            Directory.SetCurrentDirectory(curDir);

            // Annotation
            var annotation = new Annotation(text);
            pipeline.annotate(annotation);

            // Result - Pretty Print
            using (var stream = new ByteArrayOutputStream())
            {
                pipeline.prettyPrint(annotation, new PrintWriter(stream));
                Console.WriteLine(stream.toString());
                stream.close();
            }
        }
Ejemplo n.º 4
0
        public virtual RewriterResults rewrite(sRequest request, sResponse original, MutableContent content)
        {
            ByteArrayOutputStream baos = new ByteArrayOutputStream((content.getContent().Length * 110) / 100);
            OutputStreamWriter output = new OutputStreamWriter(baos);
            String mimeType = original.getHeader("Content-Type");
            if (request.RewriteMimeType != null)
            {
                mimeType = request.RewriteMimeType;
            }
            GadgetSpec spec = null;
            if (request.Gadget != null)
            {
                spec = _specFactory.getGadgetSpec(request.Gadget.toJavaUri(), false);
            }
            if (rewrite(spec, request.getUri(),
                        content,
                        mimeType,
                        output))
            {
                content.setContent(Encoding.Default.GetString(baos.toByteArray()));
                return RewriterResults.cacheableIndefinitely();

            }

            return null;
        }
Ejemplo n.º 5
0
        public String testFreemarker()
        {
            Assembly _assembly;
            _assembly = Assembly.GetExecutingAssembly();
            //Console.WriteLine(_assembly.
            try{
                Configuration cfg = new Configuration();
                cfg.setDirectoryForTemplateLoading(new File("template"));
                //cfg.setDirectoryForTemplateLoading(new File(""));
                cfg.setObjectWrapper(new DefaultObjectWrapper());

                Template temp = cfg.getTemplate("c.ftl");

                Map root = new HashMap();
                root.put("codeGen", this);

                ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
                Writer output = new OutputStreamWriter(outputStream);

                temp.process(root, output);
                output.flush();
                //System.Console.WriteLine(outputStream.toString());
                return outputStream.toString();

            }
            catch (IOException exception) {

            } catch (TemplateException exception) {

            }
            return "";
        }
Ejemplo n.º 6
0
        public void StanfordCoreNlpDemoThatChangeCurrentDirectory()
        {
            const string Text = "Kosgi Santosh sent an email to Stanford University. He didn't get a reply.";

            // Annotation pipeline configuration
            var props = new Properties();
            props.setProperty("annotators", "tokenize, ssplit, pos, lemma, ner, parse, dcoref");
            props.setProperty("sutime.binders", "0");

            // we should change current directory so StanfordCoreNLP could find all the model files
            var curDir = Environment.CurrentDirectory;
            Directory.SetCurrentDirectory(Config.JarRoot);
            var pipeline = new edu.stanford.nlp.pipeline.StanfordCoreNLP(props);
            Directory.SetCurrentDirectory(curDir);

            // Annotation
            var annotation = new Annotation(Text);
            pipeline.annotate(annotation);
    
            // Result - Pretty Print
            using (var stream = new ByteArrayOutputStream())
            {
                pipeline.prettyPrint(annotation, new PrintWriter(stream));
                Console.WriteLine(stream.toString());
            }

            this.CustomAnnotationPrint(annotation);
        }
Ejemplo n.º 7
0
    private static void StreamPDF(string XMLFile, string XSLTFile)
    {
        // Load the style sheet.
        XslCompiledTransform xslt = new XslCompiledTransform();

        xslt.Load(XMLFile);
        XmlDocument objSourceData = new XmlDocument();

        //Load the Source XML Document
        objSourceData.Load(XSLTFile);

        // Execute the transform and output the results to a file.
        MemoryStream ms = new MemoryStream();

        xslt.Transform(objSourceData, null, ms);

        //Convert the Byte Array from MemoryStream to SByte Array
        sbyte[]               inputFOBytes = ToSByteArray(ms.ToArray());
        InputSource           inputFoFile  = new org.xml.sax.InputSource(new ByteArrayInputStream(inputFOBytes));
        ByteArrayOutputStream bos          = new java.io.ByteArrayOutputStream();

        org.apache.fop.apps.Driver dr = new org.apache.fop.apps.Driver(inputFoFile, bos);
        dr.setRenderer(org.apache.fop.apps.Driver.RENDER_PDF);
        dr.run();

        //Convert the SByte Array to Byte Array to stream to the Browser
        byte[]       getBytes = ToByteArray(bos.toByteArray());
        MemoryStream msPdf    = new MemoryStream(getBytes);

        Response.ContentType = "application/pdf";
        Response.AddHeader("Content-disposition", "filename=output.pdf");
        Response.OutputStream.Write(getBytes, 0, getBytes.Length);
        Response.OutputStream.Flush();
        Response.OutputStream.Close();
    }
Ejemplo n.º 8
0
        public void Save(Stream stream)
        {
            //FileOutputStream _javastream = new FileOutputStream(filePath);
            java.io.ByteArrayOutputStream byteStream = new java.io.ByteArrayOutputStream();
            ObjectOutput objOut = new DotnetObjectOutputStream(byteStream);

            this._javaPackage.writeExternal(objOut);
            stream.Write(byteStream.toByteArray(), 0, byteStream.toByteArray().Length);
            byteStream.close();
        }
Ejemplo n.º 9
0
 public void compress(Bitmap.CompressFormat format, int quality, ByteArrayOutputStream stream)
 {
     if (_instance == IntPtr.Zero)
     {
         Debug.LogError("_instance is not initialized");
         return;
     }
     if (_jmCompress == IntPtr.Zero)
     {
         Debug.LogError("_jmCompress is not initialized");
         return;
     }
     AndroidJNI.CallVoidMethod(_instance, _jmCompress, new jvalue[] { new jvalue() { l = format.Instance }, new jvalue() { i = quality }, new jvalue() { l = stream.Instance } });
 }
Ejemplo n.º 10
0
        private string getAnnotation(string input)
        {
            Annotation annotation = new Annotation(input);
            pipeline.annotate(annotation);

            string result = "";
            using (var stream = new ByteArrayOutputStream())
            {
                pipeline.conllPrint(annotation, new PrintWriter(stream));
                result += stream.toString();
                stream.close();
            }
            return result;
        }
Ejemplo n.º 11
0
        public void CustomAnnotationPrint(Annotation annotation)
        {
            Console.WriteLine("-------------");
            Console.WriteLine("Custom print:");
            Console.WriteLine("-------------");
            var sentences = (ArrayList)annotation.get(new CoreAnnotations.SentencesAnnotation().getClass());
            foreach(CoreMap sentence in sentences)
            {
                Console.WriteLine("\n\nSentence : '{0}'", sentence);

                var tokens = (ArrayList)sentence.get(new CoreAnnotations.TokensAnnotation().getClass());
                foreach (CoreLabel token in tokens)
                {
                    var word = token.get(new CoreAnnotations.TextAnnotation().getClass());
                    var pos  = token.get(new CoreAnnotations.PartOfSpeechAnnotation().getClass());
                    var ner  = token.get(new CoreAnnotations.NamedEntityTagAnnotation().getClass());
                    Console.WriteLine("{0} \t[pos={1}; ner={2}]", word, pos, ner);
                }

                Console.WriteLine("\nTree:");
                var tree = (Tree)sentence.get(new TreeCoreAnnotations.TreeAnnotation().getClass());
                using(var stream = new ByteArrayOutputStream())
                {
                    tree.pennPrint(new PrintWriter(stream));
                    Console.WriteLine("The first sentence parsed is:\n {0}", stream.toString());
                }

                Console.WriteLine("\nDependencies:");
                var deps = (SemanticGraph)sentence.get(new SemanticGraphCoreAnnotations.CollapsedDependenciesAnnotation().getClass());
                foreach (SemanticGraphEdge edge in deps.edgeListSorted().toArray())
                {
                    var gov = edge.getGovernor();
                    var dep = edge.getDependent();
                    Console.WriteLine(
                        "{0}({1}-{2},{3}-{4})", edge.getRelation(), 
                        gov.word(), gov.index(), dep.word(), dep.index());
                }
            }
        }
        // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150926
        // https://sites.google.com/a/jsc-solutions.net/backlog/knowledge-base/2015/201511/20151121

        //        connect s6 via usb .
        // turn on wifi!
        // kill adb

        //"x:\util\android-sdk-windows\platform-tools\adb.exe"  tcpip 5555
        // restarting in TCP mode port: 5555

        //13: wlan0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc pfifo_fast state UP qlen 1000
        //    inet 192.168.1.126/24 brd 192.168.1.255 scope global wlan0
        //       valid_lft forever preferred_lft forever

        // on red
        // "x:\util\android-sdk-windows\platform-tools\adb.exe" connect  192.168.1.126:5555
        // connected to 192.168.1.126:5555


        // "x:\util\android-sdk-windows\platform-tools\adb.exe" shell am start -n "AndroidListApplications.Activities/.ApplicationWebServiceActivity"


        /// <summary>
        /// This Method is a javascript callable method.
        /// </summary>
        /// <param name="e">A parameter from javascript.</param>
        /// <param name="yield">A callback to javascript.</param>
        public Task queryIntentActivities(yield_ACTION_MAIN yield)
        {
            var context = ThreadLocalContextReference.CurrentContext;


            // http://stackoverflow.com/questions/2695746/how-to-get-a-list-of-installed-android-applications-and-pick-one-to-run
            // https://play.google.com/store/apps/details?id=com.flopcode.android.inspector

            var mainIntent = new Intent(Intent.ACTION_MAIN, null);

            mainIntent.addCategory(Intent.CATEGORY_LAUNCHER);


            var pm = context.getPackageManager();

            var pkgAppsList = pm.queryIntentActivitiesEnumerable(mainIntent)
                .OrderBy((android.content.pm.ResolveInfo k) => k.activityInfo.packageName)
                .WithEach(
                r =>
                {
                    // http://stackoverflow.com/questions/6344694/get-foreground-application-icon-convert-to-base64

                    var label = (string)(object)pm.getApplicationLabel(r.activityInfo.applicationInfo);

                    var icon_base64 = "";

                    try
                    {
                        var icon = pm.getApplicationIcon(r.activityInfo.applicationInfo);

                        if (icon != null)
                        {

                            ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
                            // bitmap.compress(CompressFormat.PNG, 0, outputStream); 

                            BitmapDrawable bitDw = ((BitmapDrawable)icon);
                            Bitmap bitmap = bitDw.getBitmap();
                            ByteArrayOutputStream stream = new ByteArrayOutputStream();
                            bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream);
                            var bitmapByte = (byte[])(object)stream.toByteArray();

                            // this will take forever
                            //icon_base64 = Convert.ToBase64String(bitmapByte);

                            //bitmapByte = Base64.encode(bitmapByte,Base64.DEFAULT);
                            //System.out.println("..length of image..."+bitmapByte.length);
                        }
                    }
                    catch
                    {

                    }


                    yield(
                        r.activityInfo.applicationInfo.packageName,
                        r.activityInfo.name,

                        icon_base64: icon_base64,
                        label: label
                    );
                }
            );

            //yield_done();

            return new object().ToTaskResult();
        }
    private void SetDrawable(out Texture2D button, int keyCode)
    {
        OuyaController.ButtonData buttonData;
        buttonData = OuyaController.getButtonData(keyCode);
        if (null == buttonData)
        {
            button = null;
            return;
        }

        if (null == buttonData.buttonDrawable)
        {
            button = null;
            return;
        }

        BitmapDrawable drawable = (BitmapDrawable)buttonData.buttonDrawable;
        if (null == drawable)
        {
            button = null;
            return;
        }

        Bitmap bitmap = drawable.getBitmap();
        if (null == bitmap)
        {
            button = null;
            return;
        }

        ByteArrayOutputStream stream = new ByteArrayOutputStream();
        bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream);
        if (stream.size() == 0)
        {
            button = null;
        }
        else
        {
            button = new Texture2D(0, 0);
            button.LoadImage(stream.toByteArray());
        }
        stream.close();
    }
   /**
      * Gets the size of the provided area code map storage. The map storage passed-in will be filled
      * as a result.
      */
   private static int getSizeOfAreaCodeMapStorage(AreaCodeMapStorageStrategy mapStorage,
 SortedMap<Integer, String> areaCodeMap)
   {
       mapStorage.readFromSortedMap(areaCodeMap);
       ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
       ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteArrayOutputStream);
       mapStorage.writeExternal(objectOutputStream);
       objectOutputStream.flush();
       int sizeOfStorage = byteArrayOutputStream.size();
       objectOutputStream.close();
       return sizeOfStorage;
   }
        private static byte[] InternalReadBytes(string filepath, bool thumb = true)
        {
            var mImageData = (sbyte[])(object)System.IO.File.ReadAllBytes(filepath);

            if (thumb)
            {
                // http://stackoverflow.com/questions/2577221/android-how-to-create-runtime-thumbnail
                int THUMBNAIL_HEIGHT = 96;

                //int THUMBNAIL_WIDTH = 66;

                var imageBitmap = BitmapFactory.decodeByteArray(mImageData, 0, mImageData.Length);
                float width = imageBitmap.getWidth();
                float height = imageBitmap.getHeight();
                float ratio = width / height;
                imageBitmap = Bitmap.createScaledBitmap(imageBitmap, (int)(THUMBNAIL_HEIGHT * ratio), THUMBNAIL_HEIGHT, false);

                //int padding = (THUMBNAIL_WIDTH - imageBitmap.getWidth()) / 2;
                //imageView.setPadding(padding, 0, padding, 0);
                //imageView.setImageBitmap(imageBitmap);



                ByteArrayOutputStream baos = new ByteArrayOutputStream();
                // http://developer.android.com/reference/android/graphics/Bitmap.html
                imageBitmap.compress(Bitmap.CompressFormat.PNG, 0, baos);
                mImageData = baos.toByteArray();

            }

            return (byte[])(object)mImageData;
        }
        public static File InternalTakePicture(int num = 0)
        {
            var DIRECTORY_DCIM = global::android.os.Environment.DIRECTORY_DCIM;


            var path = global::android.os.Environment.getExternalStoragePublicDirectory(DIRECTORY_DCIM).getAbsolutePath();
            path += "/Camera";


            //var SAVE_PATH = android.os.Environment.getExternalStoragePublicDirectory(
            //    android.os.Environment.DIRECTORY_PICTURES
            //) + "/";

            var n = DateTime.Now;

            var f = new File(path + "/shot" + n.Ticks + ".jpg");

            //I/System.Console(31472): enter TakePicture
            //W/CameraService(  128): CameraService::connect X (pid 31472) rejected (existing client).
            //I/System.Console(31472): error takePicture { Message = Fail to connect to camera service, StackTrace = java.lang.RuntimeException: Fail to connect to camera service
            //I/System.Console(31472):        at android.hardware.Camera.native_setup(Native Method)
            //I/System.Console(31472):        at android.hardware.Camera.<init>(Camera.java:340)
            //I/System.Console(31472):        at android.hardware.Camera.open(Camera.java:302)
            var camera = android.hardware.Camera.open(num);

            //            W/CameraService(  128): CameraService::connect X (pid 2499) rejected (existing client).
            //D/dalvikvm( 2499): GC_CONCURRENT freed 873K, 12% free 7525K/8544K, paused 4ms+4ms, total 59ms
            //D/dalvikvm( 2499): WAIT_FOR_CONCURRENT_GC blocked 14ms
            //I/System.Console( 2499): error takePicture { Message = Fail to connect to camera service, StackTrace = java.lang.RuntimeException: Fail to connect to camera service
            //I/System.Console( 2499):        at android.hardware.Camera.native_setup(Native Method)
            //I/System.Console( 2499):        at android.hardware.Camera.<init>(Camera.java:340)
            //I/System.Console( 2499):        at android.hardware.Camera.open(Camera.java:302)
            //I/System.Console( 2499):        at CameraExperiment.foo.InternalTakePicture(foo.java:65)

            var p = camera.getParameters();

            p.setRotation(0);

            //camera.stopFaceDetection();

            var s = p.getSupportedPictureSizes();

            var min = default(android.hardware.Camera.Size);

            for (int i = 0; i < s.size(); i++)
            {
                var size = (android.hardware.Camera.Size)s.get(i);

                //                I/System.Console( 6058): before takePicture { f = /mnt/sdcard/Pictures/shot.jpg }
                //I/System.Console( 6058): { size = android.hardware.Camera$Size@4fde180 }

                System.Console.WriteLine(new { size.width, size.height });

                if (min == null)
                    min = size;
                else if (min.width > size.width)
                    min = size;


            }

            System.Console.WriteLine("before setPictureSize ");
            p.setPictureSize(min.width, min.height);

            //E/CameraHardwareSec(   84): android::status_t android::CameraHardwareSec::setSceneModeParameter(const android::CameraParameters&): unmatched focus_mode(continuous-picture)
            //E/CameraHardwareSec(   84): virtual android::status_t android::CameraHardwareSec::setParameters(const android::CameraParameters&): Failed to setting scene mode


            var focusModes = p.getSupportedFocusModes();
            var NextFocus = android.hardware.Camera.Parameters.FOCUS_MODE_FIXED;

            for (int i = 0; i < focusModes.size(); i++)
            {
                var focusMode = (string)focusModes.get(i);

                if (focusMode == android.hardware.Camera.Parameters.FOCUS_MODE_INFINITY)
                    NextFocus = android.hardware.Camera.Parameters.FOCUS_MODE_INFINITY;

                System.Console.WriteLine(new { focusMode });
            }

            //            I/System.Console(31232): before setPictureSize
            //I/System.Console(31232): { focusMode = fixed }
            //I/System.Console(31232): before setFocusMode
            //E/NvOmxCameraSettingsParser(  128): Failed substring capabilities check, unsupported parameter: 'infinity', original: fixed
            //E/NvOmxCameraSettingsParser(  128): extractChanges: Invalid parameter!
            //E/NvOmxCamera(  128): setParameters: Invalid parameters
            //I/System.Console(31232): error takePicture { Message = setParameters failed, StackTrace = java.lang.RuntimeException: setParameters failed

            // { focusMode = auto }
            // { focusMode = infinity }
            // { focusMode = macro }
            // before setFocusMode
            //9): android::status_t android::CameraHardwareSec::setSceneModeParameter(const android::CameraParameters&): unmatched focus_mode(fixed)
            //9): virtual android::status_t android::CameraHardwareSec::setParameters(const android::CameraParameters&): Failed to setting scene mode
            // error takePicture { Message = setParameters failed, StackTrace = java.lang.RuntimeException: setParameters failed
            //        at android.hardware.Camera.native_setParameters(Native Method)
            //        at android.hardware.Camera.setParameters(Camera.java:950)
            //        at CameraExperiment.foo.InternalTakePicture(foo.java:105)

            //            E/SecCamera(   84): ERR(int android::fimc_v4l2_s_ctrl(int, unsigned int, unsigned int)):VIDIOC_S_CTRL(id = 0x800005b (91), value = 0) failed ret = -1
            //E/SecCamera(   84): ERR(int android::SecCamera::setFaceDetect(int)):Fail on V4L2_CID_CAMERA_FACE_DETECTION
            //E/SecCamera(   84): ERR(int android::fimc_v4l2_s_ctrl(int, unsigned int, unsigned int)):VIDIOC_S_CTRL(id = 0x8000063 (99), value = 6) failed ret = -1
            //E/SecCamera(   84): ERR(int android::SecCamera::setFocusMode(int)):Fail on V4L2_CID_CAMERA_FOCUS_MODE
            //E/CameraHardwareSec(   84): android::status_t android::CameraHardwareSec::setSceneModeParameter(const android::CameraParameters&): mSecCamera->setFocusMode(6) fail
            //E/CameraHardwareSec(   84): virtual android::status_t android::CameraHardwareSec::setParameters(const android::CameraParameters&): Failed to setting scene mode
            //E/SecCamera(   84): ERR(int android::fimc_v4l2_s_ctrl(int, unsigned int, unsigned int)):VIDIOC_S_CTRL(id = 0x800006c (108), value = 1) failed ret = -1
            //E/SecCamera(   84): ERR(int android::SecCamera::setBatchReflection()):Fail on V4L2_CID_CAMERA_BATCH_REFLECTION
            //E/CameraHardwareSec(   84): ERR(virtual android::status_t android::CameraHardwareSec::setParameters(const android::CameraParameters&)):Fail on mSecCamera->setBatchCmd


            System.Console.WriteLine("before setFocusMode " + new { NextFocus });
            //p.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_INFINITY);
            p.setFocusMode(NextFocus);


            //            E/SecCamera(   84): ERR(int android::fimc_poll(pollfd*)):No data in 10 secs..
            //I/ShotSingle(   84): CAMERA_MSG_COMPRESSED_IMAGE

            camera.setParameters(p);

            // http://stackoverflow.com/questions/9744790/android-possible-to-camera-capture-without-a-preview
            var b = new EventWaitHandle(false, EventResetMode.ManualReset);
            System.Console.WriteLine("before startPreview ");


            Action done = delegate { };

            try
            {
                // #5 java.lang.RuntimeException: Can't create handler inside thread that has not called Looper.prepare()

                (ScriptCoreLib.Android.ThreadLocalContextReference.CurrentContext as Activity).With(
                aa =>
                {
                    aa.runOnUiThread(
                       new f
                       {
                           y = delegate
                           {
                               try
                               {
                                   // D/Camera  ( 2464): app passed NULL surface

                                   System.Console.WriteLine("before getHolder ");

                                   //  the nexus 7 and droid x both don't support the passing of a dummy surfaceview to a camera object. Your response that all camera things must created in the activity is false. I was able to instantiate a camera within a thread by passing it a view just fine. 

                                   // here, the unused surface view and holder
                                   var dummy = new SurfaceView(ScriptCoreLib.Android.ThreadLocalContextReference.CurrentContext);

                                   // missing for android 2.2
                                   //dummy.setScaleX(0f);
                                   //dummy.setScaleY(0f);

                                   var h = dummy.getHolder();

                                   // http://developer.android.com/reference/android/view/SurfaceHolder.html#SURFACE_TYPE_PUSH_BUFFERS
                                   var SURFACE_TYPE_PUSH_BUFFERS = 0x00000003;
                                   h.setType(SURFACE_TYPE_PUSH_BUFFERS);

                                   h.addCallback(
                                       new XSurfaceHolder_Callback
                                       {
                                           yield_surfaceCreated = delegate
                                           {
                                               System.Console.WriteLine("at yield_surfaceCreated ");

                                               try
                                               {

                                                   camera.setPreviewDisplay(h);
                                                   camera.startPreview();

                                                   System.Console.WriteLine("after startPreview ");

                                                   b.Set();
                                               }
                                               catch
                                               {
                                                   throw;
                                               }
                                           }
                                       }
                                   );

                                   //h.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);


                                   aa.addContentView(dummy, new android.widget.LinearLayout.LayoutParams(
                                     android.widget.LinearLayout.LayoutParams.WRAP_CONTENT,
                                     android.widget.LinearLayout.LayoutParams.WRAP_CONTENT
                                     )
                                   );

                                   done = delegate
                                   {
                                       aa.runOnUiThread(
                                          new f
                                          {
                                              y = delegate
                                              {
                                                  // https://groups.google.com/forum/?fromgroups#!topic/android-developers/liph4z9LnFA

                                                  // how to Orphanize??
                                                  dummy.setVisibility(View.GONE);

                                              }
                                          }
                                      );

                                   };



                               }
                               catch
                               {
                                   throw;
                               }

                           }
                       }
                        );
                }
                );


            }
            catch
            {
                throw;
            }
            b.WaitOne();

            //camera.@lock();
            var a = new EventWaitHandle(false, EventResetMode.ManualReset);
            //var b = new EventWaitHandle(false, EventResetMode.ManualReset);

            //            E/SecCamera(   84): ERR(int android::fimc_v4l2_s_ctrl(int, unsigned int, unsigned int)):VIDIOC_S_CTRL(id = 0x800005d (93), value = 1) failed ret = -1
            //E/SecCamera(   84): ERR(int android::SecCamera::setAutofocus()):Fail on V4L2_CID_CAMERA_SET_AUTO_FOCUS
            //E/CameraHardwareSec(   84): ERR(int android::CameraHardwareSec::autoFocusThread()):Fail on mSecCamera->setAutofocus()

            //System.Console.WriteLine("before autoFocus " + new { f });

            ////

            //camera.autoFocus(
            //    new XAutoFocus
            //    {
            //        yield = delegate
            //        {
            //            System.Console.WriteLine("at autoFocus " + new { f });

            // https://github.com/mozilla-b2g/android-device-crespo/blob/master/libcamera/SecCamera.cpp



            //            E/SecCamera(   84): ERR(int android::fimc_poll(pollfd*)):No data in 10 secs..
            //I/ShotSingle(   84): CAMERA_MSG_COMPRESSED_IMAGE
            //D/dalvikvm( 6608): GC_FOR_MALLOC freed 607K, 58% free 2856K/6727K, external 2013K/2108K, paused 18ms
            //I/dalvikvm-heap( 6608): Grow heap (frag case) to 7.847MB for 614416-byte allocation
            //D/dalvikvm( 6608): GC_FOR_MALLOC freed 46K, 54% free 3410K/7367K, external 2013K/2108K, paused 13ms
            //I/System.Console( 6608): enter XCameraPictureCallback { Length = 0 }
            //I/System.Console( 6608): exit XCameraPictureCallback

            //for (int i = 0; i < 11; i++)
            //{
            //    System.Console.WriteLine("warming up camera machine... " + i);
            //    Thread.Sleep(1000);

            //}




            // http://stackoverflow.com/questions/15279911/using-camera-without-preview-or-surface-in-android
            // http://handycodeworks.com/?p=19
            // you are required to call startPreview() first before calling takePicture()
            System.Console.WriteLine("before takePicture " + new { f });

            camera.setErrorCallback(
                new XErrorCallback
                {
                    yield = (err, c) =>
                    {
                        System.Console.WriteLine(new { err });
                    }

                }
            );

            // preview ready?

            var at_setPreviewCallback = new EventWaitHandle(false, EventResetMode.ManualReset);

            System.Console.WriteLine("before setPreviewCallback ");
            // is this of any use?
            camera.setOneShotPreviewCallback(
                new XCameraPreviewCallback
                {
                    yield = delegate
                    {
                        at_setPreviewCallback.Set();
                    }
                }
            );

            at_setPreviewCallback.WaitOne();
            System.Console.WriteLine("after setPreviewCallback ");
            Thread.Sleep(150);


            camera.takePicture(
                null, null,
                new XCameraPictureCallback
                {
                    yield = (data, c) =>
                    {
                        System.Console.WriteLine("enter XCameraPictureCallback " + new { data.Length });

                        if (data.Length > 0)
                        {
                            var bmp = BitmapFactory.decodeByteArray(data, 0, data.Length);



                            File directory = new File(path);
                            directory.mkdirs();

                            ByteArrayOutputStream bytes = new ByteArrayOutputStream();
                            bmp.compress(Bitmap.CompressFormat.JPEG, 100, bytes);


                            try
                            {
                                f.createNewFile();

                                FileOutputStream fo = new FileOutputStream(f);
                                fo.write(bytes.toByteArray());
                            }
                            catch
                            {
                                throw;
                            }
                        }
                        System.Console.WriteLine("exit XCameraPictureCallback");

                        camera.release();

                        done();

                        //[javac] V:\src\CameraExperiment\ApplicationWebService___c__DisplayClass2.java:54: cannot find symbol
                        //[javac] symbol  : method Set()
                        //[javac] location: class ScriptCoreLibJava.BCLImplementation.System.Threading.__AutoResetEvent
                        //[javac]         this.a.Set();
                        //[javac]               ^

                        a.Set();
                    }
                }
            );




            //            I/System.Console( 6264): before takePicture { f = /mnt/sdcard/Pictures/shot.jpg }
            //I/System.Console( 6264): { width = 2560, height = 1920 }
            //I/System.Console( 6264): { width = 2560, height = 1536 }
            //I/System.Console( 6264): { width = 2048, height = 1536 }
            //I/System.Console( 6264): { width = 2048, height = 1232 }
            //I/System.Console( 6264): { width = 1600, height = 1200 }
            //I/System.Console( 6264): { width = 1600, height = 960 }
            //I/System.Console( 6264): { width = 800, height = 480 }
            //I/System.Console( 6264): { width = 640, height = 480 }
            //I/ShotSingle(   84): ShotSingle::takePicture start
            //I/ShotSingle(   84): ShotSingle::takePicture end
            //I/System.Console( 6264): after takePicture
            //        }
            //    }
            //);



            System.Console.WriteLine("will wait for takePicture to complete ... " + new { f });
            a.WaitOne();
            return f;
        }
        private static byte[] InternalReadBytes(string filepath, bool thumb = true)
        {
            //I/System.Console(28925): enter InternalReadBytes {{ filepath = //storage/emulated/0/DCIM/Camera/IMG_20150110_160133.jpg, thumb = true, ElapsedMilliseconds = 0 }}
            //I/System.Console(28925): #10 GET /thumb//storage/emulated/0/DCIM/Camera/IMG_20150110_160133.jpg HTTP/1.1 error:
            //I/System.Console(28925): #10 java.lang.NullPointerException: Attempt to get length of null array
            //I/System.Console(28925): #10 java.lang.NullPointerException: Attempt to get length of null array
            //I/System.Console(28925):        at com.abstractatech.dcimgalleryapp.ApplicationWebService.InternalReadBytes(ApplicationWebService.java:285)
            //I/System.Console(28925):        at com.abstractatech.dcimgalleryapp.ApplicationWebService.Handler(ApplicationWebService.java:197)

            var sw = Stopwatch.StartNew();
            System.Console.WriteLine("enter InternalReadBytes " + new { filepath, thumb, sw.ElapsedMilliseconds });

            //var mImageData = (sbyte[])(object)System.IO.File.ReadAllBytes(filepath);

            if (!thumb)
            {
                return System.IO.File.ReadAllBytes(filepath);
            }
            // X:\jsc.svn\examples\javascript\android\EXIFThumbnail\EXIFThumbnail\ApplicationWebService.cs

            //                [javac] V:\src\com\abstractatech\dcimgalleryapp\ApplicationWebService.java:263: error: unreported exception ImageProcessingException; must be caught or declared to be thrown
            //[javac]             metadata1 = ImageMetadataReader.readMetadata(new File(filepath));


            var mImageData = default(byte[]);

            try
            {
#if xmetadata
  //<package id="AndroidMetadataExtractor" version="1.0.0.0" targetFramework="net40" />
                    var m = ImageMetadataReader.readMetadata(new File(filepath));

                    // http://stackoverflow.com/questions/10166373/metadata-extraction-java

                    var t = typeof(com.drew.metadata.exif.ExifThumbnailDirectory).ToClass();
                    if (m.containsDirectory(t))
                    {
                        var x = (com.drew.metadata.exif.ExifThumbnailDirectory)m.getDirectory(t);

                        System.Console.WriteLine(
                           filepath
                        );

                        mImageData = x.getThumbnailData();

                    }
#endif

            }
            catch
            {
                // skip
            }

            if (mImageData == null)
            {
                var smImageData = (sbyte[])(object)System.IO.File.ReadAllBytes(filepath);


                // http://stackoverflow.com/questions/2577221/android-how-to-create-runtime-thumbnail
                int THUMBNAIL_HEIGHT = 96;

                //int THUMBNAIL_WIDTH = 66;

                var imageBitmap = android.graphics.BitmapFactory.decodeByteArray(smImageData, 0, smImageData.Length);
                float width = imageBitmap.getWidth();
                float height = imageBitmap.getHeight();
                float ratio = width / height;
                imageBitmap = android.graphics.Bitmap.createScaledBitmap(imageBitmap, (int)(THUMBNAIL_HEIGHT * ratio), THUMBNAIL_HEIGHT, false);

                //int padding = (THUMBNAIL_WIDTH - imageBitmap.getWidth()) / 2;
                //imageView.setPadding(padding, 0, padding, 0);
                //imageView.setImageBitmap(imageBitmap);



                ByteArrayOutputStream baos = new ByteArrayOutputStream();
                // http://developer.android.com/reference/android/graphics/Bitmap.html
                imageBitmap.compress(android.graphics.Bitmap.CompressFormat.PNG, 0, baos);
                mImageData = (byte[])(object)baos.toByteArray();
            }

            //I/System.Console(29677): exit InternalReadBytes {{ filepath = //storage/emulated/0/DCIM/Camera/IMG_20150110_160133.jpg, thumb = true, ElapsedMilliseconds = 318 }}
            //I/System.Console(29677): exit InternalReadBytes {{ filepath = //storage/emulated/0/DCIM/Camera/IMG_20150111_110950.jpg, thumb = true, ElapsedMilliseconds = 453 }}
            //I/System.Console(29677): exit InternalReadBytes {{ filepath = //storage/emulated/0/DCIM/Camera/IMG_20150111_110957.jpg, thumb = true, ElapsedMilliseconds = 508 }}
            //I/System.Console(29677): exit InternalReadBytes {{ filepath = //storage/emulated/0/DCIM/Camera/IMG_20150110_160140.jpg, thumb = true, ElapsedMilliseconds = 478 }}
            System.Console.WriteLine("exit InternalReadBytes " + new { filepath, thumb, sw.ElapsedMilliseconds });

//Error	112	'Console' is an ambiguous reference between 'java.io.Console' and 'System.Console'	X:\jsc.svn\examples\javascript\android\com.abstractatech.dcimgalleryapp\com.abstractatech.dcimgalleryapp\ApplicationWebService.cs	711	13	com.abstractatech.dcimgalleryapp
//Error	113	'java.io.Console' does not contain a definition for 'WriteLine'	X:\jsc.svn\examples\javascript\android\com.abstractatech.dcimgalleryapp\com.abstractatech.dcimgalleryapp\ApplicationWebService.cs	711	21	com.abstractatech.dcimgalleryapp


            return mImageData;
        }
        /**
           * Creates a new area code map serializing the provided area code map to a stream and then reading
           * this stream. The resulting area code map is expected to be strictly equal to the provided one
           * from which it was generated.
           */
        private static AreaCodeMap createNewAreaCodeMap(AreaCodeMap areaCodeMap)
        {
            ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
            ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteArrayOutputStream);
            areaCodeMap.writeExternal(objectOutputStream);
            objectOutputStream.flush();

            AreaCodeMap newAreaCodeMap = new AreaCodeMap();
            newAreaCodeMap.readExternal(
            new ObjectInputStream(new ByteArrayInputStream(byteArrayOutputStream.toByteArray())));
            return newAreaCodeMap;
        }
 public virtual byte[] encode(BufferedImage bufferedImage)
 {
   ByteArrayOutputStream arrayOutputStream = new ByteArrayOutputStream();
   this.encode(bufferedImage, (OutputStream) arrayOutputStream);
   return arrayOutputStream.toByteArray();
 }
Ejemplo n.º 20
0
 public void Save(Stream stream)
 {
     //FileOutputStream _javastream = new FileOutputStream(filePath);
     java.io.ByteArrayOutputStream byteStream = new java.io.ByteArrayOutputStream();
     ObjectOutput objOut = new DotnetObjectOutputStream(byteStream);
     this._javaPackage.writeExternal(objOut);
     stream.Write(byteStream.toByteArray(), 0, byteStream.toByteArray().Length);
     byteStream.close();
 }
    void AddWidgets(List<OuyaMod> ouyaMods, bool searchByInstalled, bool searchByPublished)
    {
        StringBuilder sb = new StringBuilder();
        foreach (OuyaMod ouyaMod in ouyaMods)
        {
            WidgetOuyaMod widget = new WidgetOuyaMod()
            {
                m_instance = ouyaMod,
                m_category = ouyaMod.getCategory(),
                m_description = ouyaMod.getDescription(),
                m_isDownloading = ouyaMod.isDownloading(),
                m_isFlagged = ouyaMod.isFlagged(),
                m_isInstalled = ouyaMod.isInstalled(),
                m_isPublished = ouyaMod.isPublished(),
                m_metaData = ouyaMod.getMetaData(),
                m_ratingCount = ouyaMod.getRatingCount(),
                m_ratingAverage = ouyaMod.getRatingAverage(),
                m_title = ouyaMod.getTitle(),
                m_userRating = ouyaMod.getUserRating(),
                m_searchByInstalled = searchByInstalled,
                m_searchByPublished = searchByPublished,
            };
            if (sb.Length > 0)
            {
                sb.Remove(0, sb.Length);
            }
            foreach (string filename in ouyaMod.getFilenames())
            {
                sb.Append(filename);
                sb.Append(",");

                using (InputStream inputStream = ouyaMod.openFile(filename))
                {
                    byte[] buffer = new byte[100000];
                    int readAmount = inputStream.read(ref buffer);
                    inputStream.close();

                    byte[] copy = new byte[readAmount];
                    Array.Copy(buffer, copy, readAmount);

                    sb.Append("***");
                    string content = System.Text.UTF8Encoding.UTF8.GetString(copy);
                    sb.Append(content);
                }
            }
            widget.m_filenames = sb.ToString();
            List<OuyaModScreenshot> screenshots = ouyaMod.getScreenshots();
            widget.m_screenshots = new Texture2D[screenshots.Count];
            widget.m_thumbnails = new Texture2D[screenshots.Count];
            for (int index = 0; index < screenshots.Count; ++index)
            {
                using (OuyaModScreenshot ouyaModScreenshot = screenshots[index])
                {
                    if (null != ouyaModScreenshot)
                    {
                        using (Bitmap bitmap = ouyaModScreenshot.getImage())
                        {
                            if (null != bitmap)
                            {
                                using (ByteArrayOutputStream stream = new ByteArrayOutputStream())
                                {
                                    bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream);
                                    if (stream.size() >= 0)
                                    {
                                        Texture2D texture = new Texture2D(0, 0);
                                        texture.LoadImage(stream.toByteArray());
                                        widget.m_screenshots[index] = texture;
                                    }
                                    stream.close();
                                }
                            }
                        }

                        using (Bitmap bitmap = ouyaModScreenshot.getThumbnail())
                        {
                            if (null != bitmap)
                            {
                                using (ByteArrayOutputStream stream = new ByteArrayOutputStream())
                                {
                                    bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream);
                                    if (stream.size() >= 0)
                                    {
                                        Texture2D texture = new Texture2D(0, 0);
                                        texture.LoadImage(stream.toByteArray());
                                        widget.m_thumbnails[index] = texture;
                                    }
                                    stream.close();
                                }
                            }
                        }
                    }
                }
            }
            if (sb.Length > 0)
            {
                sb.Remove(0, sb.Length);
            }
            foreach (string tag in ouyaMod.getTags())
            {
                sb.Append(tag);
                sb.Append(",");
            }
            widget.m_tags = sb.ToString();
            
            m_widgets.Add(widget);

            if (m_widgets.Count == 1)
            {
                m_focusManager.Mappings[m_btnCreate].Down = widget.m_buttonPublish;
                m_focusManager.Mappings[widget.m_buttonPublish] = new FocusManager.ButtonMapping()
                {
                    Up = m_btnCreate,
                    Right = widget.m_buttonDelete,
                };
                m_focusManager.Mappings[widget.m_buttonDelete] = new FocusManager.ButtonMapping()
                {
                    Up = m_btnCreate,
                    Left = widget.m_buttonPublish,
                    Right = widget.m_buttonDownload,
                };
                m_focusManager.Mappings[widget.m_buttonDownload] = new FocusManager.ButtonMapping()
                {
                    Up = m_btnCreate,
                    Left = widget.m_buttonDelete,
                    Right = widget.m_buttonRate,
                };
                m_focusManager.Mappings[widget.m_buttonRate] = new FocusManager.ButtonMapping()
                {
                    Up = m_btnCreate,
                    Left = widget.m_buttonDownload,
                    Right = widget.m_buttonEdit,
                };
                m_focusManager.Mappings[widget.m_buttonEdit] = new FocusManager.ButtonMapping()
                {
                    Up = m_btnCreate,
                    Left = widget.m_buttonRate,
                    Right = widget.m_buttonFlag,
                };
                m_focusManager.Mappings[widget.m_buttonFlag] = new FocusManager.ButtonMapping()
                {
                    Up = m_btnCreate,
                    Left = widget.m_buttonEdit,
                };
            }
            else
            {
                WidgetOuyaMod lastWidget = m_widgets[m_widgets.Count - 2];
                m_focusManager.Mappings[lastWidget.m_buttonPublish].Down = widget.m_buttonPublish;
                m_focusManager.Mappings[lastWidget.m_buttonDelete].Down = widget.m_buttonDelete;
                m_focusManager.Mappings[lastWidget.m_buttonDownload].Down = widget.m_buttonDownload;
                m_focusManager.Mappings[lastWidget.m_buttonRate].Down = widget.m_buttonRate;
                m_focusManager.Mappings[lastWidget.m_buttonEdit].Down = widget.m_buttonEdit;
                m_focusManager.Mappings[lastWidget.m_buttonFlag].Down = widget.m_buttonFlag;
                m_focusManager.Mappings[widget.m_buttonPublish] = new FocusManager.ButtonMapping()
                {
                    Up = lastWidget.m_buttonPublish,
                    Right = widget.m_buttonDelete,
                };
                m_focusManager.Mappings[widget.m_buttonDelete] = new FocusManager.ButtonMapping()
                {
                    Up = lastWidget.m_buttonDelete,
                    Left = widget.m_buttonPublish,
                    Right = widget.m_buttonDownload,
                };
                m_focusManager.Mappings[widget.m_buttonDownload] = new FocusManager.ButtonMapping()
                {
                    Up = lastWidget.m_buttonDownload,
                    Left = widget.m_buttonDelete,
                    Right = widget.m_buttonRate,
                };
                m_focusManager.Mappings[widget.m_buttonRate] = new FocusManager.ButtonMapping()
                {
                    Up = lastWidget.m_buttonRate,
                    Left = widget.m_buttonDownload,
                    Right = widget.m_buttonEdit,
                };
                m_focusManager.Mappings[widget.m_buttonEdit] = new FocusManager.ButtonMapping()
                {
                    Up = lastWidget.m_buttonEdit,
                    Left = widget.m_buttonRate,
                    Right = widget.m_buttonFlag,
                };
                m_focusManager.Mappings[widget.m_buttonFlag] = new FocusManager.ButtonMapping()
                {
                    Up = lastWidget.m_buttonFlag,
                    Left = widget.m_buttonEdit,
                };
            }
        }
    }
Ejemplo n.º 22
0
 public virtual string toString()
 {
   ByteArrayOutputStream arrayOutputStream = new ByteArrayOutputStream();
   new TextListener(new PrintStream((OutputStream) arrayOutputStream)).testRunFinished(this.result);
   return arrayOutputStream.toString();
 }
        protected override void onCreate(Bundle savedInstanceState)
        {
            base.onCreate(savedInstanceState);

            var sv = new ScrollView(this);
            var ll = new LinearLayout(this);
            //ll.setOrientation(LinearLayout.VERTICAL);
            sv.addView(ll);


            var b = new Button(this);
            ll.addView(b);

            var p = new Preview(this);

            b.WithText("take a picture");

            p.oncamera =
                camera =>
                    b.AtClick(
                        v =>
                        {
                            camera.takePicture(null, null,
                                new takePicture_handler
                                {
                                    handler =
                                        data =>
                                        {
                                            b.WithText("at click");
                                            try
                                            {
                                                // http://stackoverflow.com/questions/11874273/android-nexus-7-jelly-bean-startpreview-takepicture-calling-getcamerastereomode

                                                //E/NvOmxCamera(  126): OMX_ERRORTYPE android::NvOmxCamera::getCameraStereoMode(NvxComponent*, NvOmxCameraUserStereoMode&): Error: invalid NVX mode 0.
                                                //E/NvOmxCamera(  126): OMX_ERRORTYPE android::NvOmxCamera::getCameraStereoModeAndCaptureInfo(NvxComponent*, NvOmxCameraUserStereoMode&, NVX_STEREOCAPTUREINFO&): getCameraStereoMode failed with 0x00000000
                                                //D/NvOsDebugPrintf(  126): NvMMLiteJPEGEncSetAttribute: Incorrect value 0 for stereo capture type
                                                //E/NvOmxCameraSettings(  126): OMX_ERRORTYPE android::programStereoInfo(OMX_HANDLETYPE, const NVX_STEREOCAPTUREINFO&, android::NvxWrappers*): pNvxWrappers->OMX_SetConfigIL failed with 0x80001005
                                                //D/NvOsDebugPrintf(  126): Tryproc: INBuffer-Values of Width and Height 1280 960
                                                //D/dalvikvm(29535): GC_FOR_ALLOC freed 6686K, 52% free 7716K/15943K, paused 25ms, total 27ms


                                                var SAVE_PATH = android.os.Environment.getExternalStoragePublicDirectory(
                                                    android.os.Environment.DIRECTORY_PICTURES
                                                    );


                                                SAVE_PATH.mkdirs();

                                                var bmp = android.graphics.BitmapFactory.decodeByteArray(data, 0, data.Length);

                                                ByteArrayOutputStream bytes = new ByteArrayOutputStream();
                                                bmp.compress(android.graphics.Bitmap.CompressFormat.JPEG, 100, bytes);

                                                File f = new File(SAVE_PATH.ToString() + "/hello2.jpg");



                                                f.createNewFile();

                                                FileOutputStream fo = new FileOutputStream(f);
                                                fo.write(bytes.toByteArray());

                                                Intent intent = new Intent();
                                                intent.setAction(android.content.Intent.ACTION_VIEW);

                                                var imgUri = android.net.Uri.fromFile(f);

                                                intent.setDataAndType(imgUri, "image/*");

                                                b.WithText("done!");

                                                startActivity(intent);

                                            }
                                            // Error	1	The type caught or thrown must be derived from System.Exception	y:\jsc.svn\examples\java\android\AndroidCameraActivity\AndroidCameraActivity\ApplicationActivity.cs	154	52	AndroidCameraActivity
                                            catch (Exception ex)
                                            {
                                                b.WithText("saving.. error! " + ex.Message);

                                                //throw;
                                            }


                                        }
                                }
                            );

                        }
                    );


            this.setContentView(p);
            this.addContentView(sv, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));
        }
Ejemplo n.º 24
0
        public void StanfordCoreNlpDemoManualConfiguration()
        {
            Console.WriteLine(Environment.CurrentDirectory);
            const string Text = "Kosgi Santosh sent an email to Stanford University. He didn't get a reply.";

            // Annotation pipeline configuration
            var props = new Properties();

            props.setProperty("annotators", "tokenize, ssplit, pos, lemma, ner, parse, dcoref");
            props.setProperty("pos.model", Config.GetModel(@"pos-tagger\english-bidirectional\english-bidirectional-distsim.tagger"));
            props.setProperty("ner.model", Config.GetModel(@"ner\english.all.3class.distsim.crf.ser.gz"));
            props.setProperty("parse.model", Config.GetModel(@"lexparser\englishPCFG.ser.gz"));
    
            props.setProperty("dcoref.demonym", Config.GetModel(@"dcoref\demonyms.txt"));
            props.setProperty("dcoref.states", Config.GetModel(@"dcoref\state-abbreviations.txt"));
            props.setProperty("dcoref.animate", Config.GetModel(@"dcoref\animate.unigrams.txt"));
            props.setProperty("dcoref.inanimate", Config.GetModel(@"dcoref\inanimate.unigrams.txt"));
            props.setProperty("dcoref.male", Config.GetModel(@"dcoref\male.unigrams.txt"));
            props.setProperty("dcoref.neutral", Config.GetModel(@"dcoref\neutral.unigrams.txt"));
            props.setProperty("dcoref.female", Config.GetModel(@"dcoref\female.unigrams.txt"));
            props.setProperty("dcoref.plural", Config.GetModel(@"dcoref\plural.unigrams.txt"));
            props.setProperty("dcoref.singular", Config.GetModel(@"dcoref\singular.unigrams.txt"));
            props.setProperty("dcoref.countries", Config.GetModel(@"dcoref\countries"));
            props.setProperty("dcoref.extra.gender", Config.GetModel(@"dcoref\namegender.combine.txt"));
            props.setProperty("dcoref.states.provinces", Config.GetModel(@"dcoref\statesandprovinces"));
            props.setProperty("dcoref.singleton.predictor", Config.GetModel(@"dcoref\singleton.predictor.ser"));
            props.setProperty("dcoref.big.gender.number", Config.GetModel(@"dcoref\gender.data.gz"));

            var sutimeRules = new[] {
                                      Config.GetModel(@"sutime\defs.sutime.txt"),
                                      Config.GetModel(@"sutime\english.holidays.sutime.txt"),
                                      Config.GetModel(@"sutime\english.sutime.txt")
                                  };
            props.setProperty("sutime.rules", String.Join(",", sutimeRules));
            props.setProperty("sutime.binders", "0");

            var pipeline = new edu.stanford.nlp.pipeline.StanfordCoreNLP(props);

            // Annotation
            var annotation = new Annotation(Text);
            pipeline.annotate(annotation);
    
            // Result - Pretty Print
            using (var stream = new ByteArrayOutputStream())
            {
                pipeline.prettyPrint(annotation, new PrintWriter(stream));
                Console.WriteLine(stream.toString());
            }

            this.CustomAnnotationPrint(annotation);
        }
Ejemplo n.º 25
0
        /// <summary>
        /// Runs text processing using StanfordCoreNLP procject.
        /// </summary>
        /// <param name="text">Text to process</param>
        public void RunCoreNLP(String text)
        {
            // needs to be before Annotation(text)
            // otherwise it throws error
            StanfordCoreNLP pipeLine = Pipeline;
            Annotation annotation = new Annotation(text);
            pipeLine.annotate(annotation);

            if (_redirectOutputToFile)
            {
                FileStream filestream = new FileStream(_redirectOutputToFileFileName, FileMode.OpenOrCreate, FileAccess.Write);
                var streamwriter = new StreamWriter(filestream);
                streamwriter.AutoFlush = true;
                Console.SetOut(streamwriter);
                Console.SetError(streamwriter);
            }

            if (_verbose)
            {
                // Result - Pretty Print
                using (ByteArrayOutputStream stream = new ByteArrayOutputStream())
                {
                    pipeLine.prettyPrint(annotation, new PrintWriter(stream));
                    Console.WriteLine(stream.toString());
                    stream.close();
                }
            }

            _notes = Parse(annotation);
            PrintNotes(_notes);
        }
Ejemplo n.º 26
0
    protected internal virtual bool writeImageData()
    {
      int num1 = this.height;
      int num2 = 0;
      this.bytesPerPixel = !this.encodeAlpha ? 3 : 4;
      Deflater deflater = new Deflater(this.compressionLevel);
      ByteArrayOutputStream arrayOutputStream = new ByteArrayOutputStream(1024);
      DeflaterOutputStream deflaterOutputStream = new DeflaterOutputStream((OutputStream) arrayOutputStream, deflater);
      IOException ioException1;
      IOException ioException2;
      while (true)
      {
        int num3;
        int[] numArray1;
        PixelGrabber pixelGrabber;
        try
        {
          if (num1 > 0)
          {
            int num4 = (int) short.MaxValue;
            int num5 = this.width * (this.bytesPerPixel + 1);
            int num6 = -1;
            num3 = Math.max(Math.min(num5 != num6 ? num4 / num5 : -num4, num1), 1);
            numArray1 = new int[this.width * num3];
            pixelGrabber = new PixelGrabber(this.image, 0, num2, this.width, num3, numArray1, 0, this.width);
            try
            {
              pixelGrabber.grabPixels();
            }
            catch (Exception ex)
            {
              int num7 = 2;
              if (ByteCodeHelper.MapException<Exception>(ex, (ByteCodeHelper.MapFlags) num7) == null)
                throw;
              else
                break;
            }
          }
          else
            goto label_33;
        }
        catch (IOException ex)
        {
          int num4 = 1;
          ioException1 = (IOException) ByteCodeHelper.MapException<IOException>((Exception) ex, (ByteCodeHelper.MapFlags) num4);
          goto label_8;
        }
        try
        {
          if ((pixelGrabber.getStatus() & 128) != 0)
          {
            System.get_err().println("image fetch aborted or errored");
            return false;
          }
          else
          {
            byte[] pixels = new byte[this.width * num3 * this.bytesPerPixel + num3];
            if (this.filter == 1)
              this.leftBytes = new byte[16];
            if (this.filter == 2)
              this.priorRow = new byte[this.width * this.bytesPerPixel];
            int num4 = 0;
            int startPos = 1;
            for (int index1 = 0; index1 < this.width * num3; ++index1)
            {
              int num5 = index1;
              int num6 = this.width;
              int num7 = -1;
              if ((num6 != num7 ? num5 % num6 : 0) == 0)
              {
                byte[] numArray2 = pixels;
                int index2 = num4;
                ++num4;
                int num8 = (int) (sbyte) this.filter;
                numArray2[index2] = (byte) num8;
                startPos = num4;
              }
              byte[] numArray3 = pixels;
              int index3 = num4;
              int num9 = num4 + 1;
              int num10 = (int) (sbyte) (numArray1[index1] >> 16 & (int) byte.MaxValue);
              numArray3[index3] = (byte) num10;
              byte[] numArray4 = pixels;
              int index4 = num9;
              int num11 = num9 + 1;
              int num12 = (int) (sbyte) (numArray1[index1] >> 8 & (int) byte.MaxValue);
              numArray4[index4] = (byte) num12;
              byte[] numArray5 = pixels;
              int index5 = num11;
              num4 = num11 + 1;
              int num13 = (int) (sbyte) (numArray1[index1] & (int) byte.MaxValue);
              numArray5[index5] = (byte) num13;
              if (this.encodeAlpha)
              {
                byte[] numArray2 = pixels;
                int index2 = num4;
                ++num4;
                int num8 = (int) (sbyte) (numArray1[index1] >> 24 & (int) byte.MaxValue);
                numArray2[index2] = (byte) num8;
              }
              int num14 = index1;
              int num15 = this.width;
              int num16 = -1;
              if ((num15 != num16 ? num14 % num15 : 0) == this.width - 1 && this.filter != 0)
              {
                if (this.filter == 1)
                  this.filterSub(pixels, startPos, this.width);
                if (this.filter == 2)
                  this.filterUp(pixels, startPos, this.width);
              }
            }
            deflaterOutputStream.write(pixels, 0, num4);
            num2 += num3;
            num1 -= num3;
          }
        }
        catch (IOException ex)
        {
          int num4 = 1;
          ioException2 = (IOException) ByteCodeHelper.MapException<IOException>((Exception) ex, (ByteCodeHelper.MapFlags) num4);
          goto label_32;
        }
      }
      IOException ioException3;
      try
      {
        System.get_err().println("interrupted waiting for pixels!");
        return false;
      }
      catch (IOException ex)
      {
        int num3 = 1;
        ioException3 = (IOException) ByteCodeHelper.MapException<IOException>((Exception) ex, (ByteCodeHelper.MapFlags) num3);
      }
      IOException ioException4 = ioException3;
      goto label_37;
label_8:
      ioException4 = ioException1;
      goto label_37;
label_32:
      ioException4 = ioException2;
      goto label_37;
label_33:
      int num17;
      IOException ioException5;
      try
      {
        deflaterOutputStream.close();
        byte[] data = arrayOutputStream.toByteArray();
        int length = data.Length;
        this.crc.reset();
        this.bytePos = this.writeInt4(length, this.bytePos);
        this.bytePos = this.writeBytes(PngEncoder.__\u003C\u003EIDAT, this.bytePos);
        this.crc.update(PngEncoder.__\u003C\u003EIDAT);
        this.bytePos = this.writeBytes(data, length, this.bytePos);
        this.crc.update(data, 0, length);
        this.crcValue = this.crc.getValue();
        this.bytePos = this.writeInt4((int) this.crcValue, this.bytePos);
        deflater.finish();
        deflater.end();
        num17 = 1;
      }
      catch (IOException ex)
      {
        int num3 = 1;
        ioException5 = (IOException) ByteCodeHelper.MapException<IOException>((Exception) ex, (ByteCodeHelper.MapFlags) num3);
        goto label_36;
      }
      return num17 != 0;
label_36:
      ioException4 = ioException5;
label_37:
      System.get_err().println(Throwable.instancehelper_toString((Exception) ioException4));
      return false;
    }