Esempio n. 1
0
        protected override void OnCreate(Bundle savedInstanceState)
        {
            base.OnCreate(savedInstanceState);

            SetContentView(Resource.Layout.Result);

            try
            {
                UCropView uCropView = FindViewById <UCropView>(Resource.Id.ucrop);
                uCropView.CropImageView.SetImageUri(Intent.Data, null);
                uCropView.OverlayView.SetShowCropFrame(false);
                uCropView.OverlayView.SetShowCropGrid(false);
                uCropView.OverlayView.SetDimmedColor(Android.Graphics.Color.Transparent);
            }
            catch (Exception e)
            {
                Log.Error(TAG, "setImageUri", e);
                Toast.MakeText(this, e.Message, ToastLength.Long).Show();
            }

            Android.Graphics.BitmapFactory.Options options = new Android.Graphics.BitmapFactory.Options();
            options.InJustDecodeBounds = true;
            Android.Graphics.BitmapFactory.DecodeFile(new Java.IO.File(Intent.Data.Path).AbsolutePath, options);

            SetSupportActionBar(FindViewById <Android.Support.V7.Widget.Toolbar>(Resource.Id.toolbar));
            if (SupportActionBar != null)
            {
                SupportActionBar.SetDisplayHomeAsUpEnabled(true);
                SupportActionBar.Title = GetString(Resource.String.format_crop_result_d_d, options.OutWidth, options.OutHeight);
            }
        }
        /// <summary>
        /// Resize image bitmap
        /// </summary>
        /// <param name="photoPath"></param>
        /// <param name="newPhotoPath"></param>
        public void ResizeBitmaps(string photoPath, string newPhotoPath)
        {
            Android.Graphics.BitmapFactory.Options options = new Android.Graphics.BitmapFactory.Options();
            options.InPreferredConfig = Android.Graphics.Bitmap.Config.Argb8888;
            Android.Graphics.Bitmap bitmap        = Android.Graphics.BitmapFactory.DecodeFile(photoPath, options);
            Android.Graphics.Bitmap croppedBitmap = null;

            if (bitmap.Width >= bitmap.Height)
            {
                croppedBitmap = Android.Graphics.Bitmap.CreateBitmap(
                    bitmap,
                    bitmap.Width / 2 - bitmap.Height / 2,
                    0,
                    bitmap.Height,
                    bitmap.Height);
            }
            else
            {
                croppedBitmap = Android.Graphics.Bitmap.CreateBitmap(
                    bitmap,
                    0,
                    bitmap.Height / 2 - bitmap.Width / 2,
                    bitmap.Width,
                    bitmap.Width);
            }

            System.IO.FileStream stream = null;

            try
            {
                stream = new System.IO.FileStream(newPhotoPath, System.IO.FileMode.Create);
                croppedBitmap.Compress(Android.Graphics.Bitmap.CompressFormat.Png, 100, stream);
            }
            catch
            {
                //System.Diagnostics.Debug.WriteLineIf(App.Debugging, "Failed to close: " + ex.ToString());
            }
            finally
            {
                try
                {
                    if (stream != null)
                    {
                        stream.Close();
                    }

                    croppedBitmap.Recycle();
                    croppedBitmap.Dispose();
                    croppedBitmap = null;

                    bitmap.Recycle();
                    bitmap.Dispose();
                    bitmap = null;
                }
                catch
                {
                    //Debug.WriteLineIf(App.Debugging, "Failed to close: " + ex.ToString());
                }
            }
        }
Esempio n. 3
0
        public Texture(Context context, string fileName)
        {
            if (string.IsNullOrEmpty(fileName))
            {
                return;
            }
            GLES20.GlGenTextures(1, textureHandle, 0); //init 1 texture storage handle
            if (textureHandle[0] != 0)
            {
                //Android.Graphics cose class Matrix exists at both Android.Graphics and Android.OpenGL and this is only sample of using
                Android.Graphics.BitmapFactory.Options options = new Android.Graphics.BitmapFactory.Options();
                options.InScaled = false; // No pre-scaling
                int id = context.Resources.GetIdentifier(fileName, "drawable", context.PackageName);
                Android.Graphics.Bitmap bitmap = Android.Graphics.BitmapFactory.DecodeResource(context.Resources, id, options);
                GLES20.GlBindTexture(GLES20.GlTexture2d, textureHandle[0]);
                GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMinFilter, GLES20.GlNearest);
                GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMagFilter, GLES20.GlNearest);
                GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureWrapS, GLES20.GlClampToEdge);
                GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureWrapT, GLES20.GlClampToEdge);
                GLUtils.TexImage2D(GLES20.GlTexture2d, 0, bitmap, 0);
                bitmap.Recycle();

                handle = textureHandle[0];
            }
        }
Esempio n. 4
0
        async void Process()
        {
            var progress = this.CreateProgress("Progresso", "Processando...");

            progress.Show();

            var filename = Path.GetFileName(filesList[index]);
            var options  = new Android.Graphics.BitmapFactory.Options {
                InScaled = false
            };

            using (var bmp = await Android.Graphics.BitmapFactory.DecodeFileAsync(folder + Java.IO.File.Separator + filename, options))
            {
                var handle = bmp.LockPixels();
                var info   = bmp.GetBitmapInfo();

                int predict = Wrapper.predictDigit(handle, (int)info.Height, (int)info.Height);
                TextResult.Text = "" + predict;

                if (Preview.Drawable is Android.Graphics.Drawables.BitmapDrawable temp)
                {
                    temp.Bitmap.Recycle();
                }
                Preview.SetImageBitmap(bmp);
            }
            progress.Dismiss();
        }
Esempio n. 5
0
        public async Task <string> Save(string filePath, string outputFolder, ImageFileType type)
        {
            var opts = new Android.Graphics.BitmapFactory.Options();

            opts.InPreferredConfig = Android.Graphics.Bitmap.Config.Argb8888;
            Android.Graphics.Bitmap bitmap = await Android.Graphics.BitmapFactory.DecodeFileAsync(filePath, opts);

            var outputpath = Path.Combine(outputFolder, Path.ChangeExtension(Path.GetFileName(filePath), type.ToString()));
            var stream     = new FileStream(outputpath, FileMode.Create);

            switch (type)
            {
            case ImageFileType.PNG:
                bitmap.Compress(Android.Graphics.Bitmap.CompressFormat.Png, 100, stream);
                break;

            case ImageFileType.JPG:
                bitmap.Compress(Android.Graphics.Bitmap.CompressFormat.Jpeg, 100, stream);
                break;

            default:
                throw new NotImplementedException();
            }
            stream.Close();

            return(outputpath);
        }
        private Size GetBitmapSize(string fileName)
        {
            var options = new Android.Graphics.BitmapFactory.Options {
                InJustDecodeBounds = true
            };

            Android.Graphics.BitmapFactory.DecodeFile(fileName, options);
            return(new Size(options.OutWidth, options.OutHeight));
        }
Esempio n. 7
0
        public byte[] ResizeImage(byte[] source, float maxWidth, float maxHeight)
        {
            try
            {
                {
                    var options = new Android.Graphics.BitmapFactory.Options()
                    {
                        InJustDecodeBounds = false,
                        InPurgeable        = true,
                    };

                    using (var image = Android.Graphics.BitmapFactory.DecodeStream(new MemoryStream(source)))//(sourceFile, options))
                    {
                        if (image != null)
                        {
                            var sourceSize = new Size((int)image.GetBitmapInfo().Height, (int)image.GetBitmapInfo().Width);

                            var maxResizeFactor = Math.Min(maxWidth / sourceSize.Width, maxHeight / sourceSize.Height);

                            byte[] res = null;
                            if (maxResizeFactor > 0.9)
                            {
                                return(source);
                            }
                            else
                            {
                                var width  = (int)(maxResizeFactor * sourceSize.Width);
                                var height = (int)(maxResizeFactor * sourceSize.Height);

                                using (var bitmapScaled = Android.Graphics.Bitmap.CreateScaledBitmap(image, height, width, true))
                                {
                                    using (MemoryStream stream = new MemoryStream())
                                    {
                                        bitmapScaled.Compress(Android.Graphics.Bitmap.CompressFormat.Jpeg, 95, stream);
                                        res = stream.ToArray();
                                    }
                                    bitmapScaled.Recycle();
                                }
                            }

                            image.Recycle();
                            return(res);
                        }
                        else
                        {
                            return(null);
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine("ResizeImage failed:" + "  " + ex.Message);
                return(source);
            }
            return(null);
        }
Esempio n. 8
0
        public static Bitmap OpenFileBitmap(Context context, string folder, string fileName)
        {
#if __ANDROID__
            //get Android file resource ID by file name from Resource/drawable/[fileName]
            int id = context.Resources.GetIdentifier(fileName, folder, context.PackageName);
            //try decode image resource from diferent image formats (JPG, PNG) to BitMap (BMP)
            BitmapFactory.Options options = new Android.Graphics.BitmapFactory.Options();
            options.InScaled = false; // No pre-scaling
            return(BitmapFactory.DecodeResource(context.Resources, id, options));
#else
            string path = System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location) + @"\Resources\" + folder + @"\" + fileName;
            return(new Bitmap(path));
#endif
        }
Esempio n. 9
0
        public static int CalculateInSampleSize(Android.Graphics.BitmapFactory.Options options, int reqWidth, int reqHeight)
        {
            // Raw height and width of image
            var height       = (float)options.OutHeight;
            var width        = (float)options.OutWidth;
            var inSampleSize = 1D;

            if (height > reqHeight || width > reqWidth)
            {
                inSampleSize = width > height
                ? height / reqHeight
                : width / reqWidth;
            }

            return((int)inSampleSize);
        }
        public override View GetView(int position, View convertView, ViewGroup parent)
        {
            var view = convertView;
            SongMultiSelectAdapterViewHolder holder = null;

            if (view != null)
            {
                holder = view.Tag as SongMultiSelectAdapterViewHolder;
            }

            if (holder == null)
            {
                holder = new SongMultiSelectAdapterViewHolder();
                var inflater = context.GetSystemService(Context.LayoutInflaterService).JavaCast <LayoutInflater>();
                //replace with your item and your holder items
                //comment back in
                view             = inflater.Inflate(Resource.Layout.SongMultiSelectRow, parent, false);
                holder.Title     = view.FindViewById <TextView>(Resource.Id.SongMultiSelectRow_title);
                holder.ImageView = view.FindViewById <ImageView>(Resource.Id.SongMultiSelectRow_imageView);
                holder.Validator = view.FindViewById <Switch>(Resource.Id.SongMultiSelectRow_Validator);
                holder.Validator.CheckedChange += Validator_CheckedChange;

                view.Tag = holder;
            }
            holder.Title.Text = data[position].Data.FullTitle;
            if (data[position].Data.ContainsImage)
            {
                holder.ImageView.SetImageBitmap(data[position].Data.Image);
            }
            else
            {
                using (Android.Graphics.BitmapFactory.Options opt = new Android.Graphics.BitmapFactory.Options())
                {
                    opt.OutHeight    = 48;
                    opt.OutWidth     = 48;
                    opt.InSampleSize = 2;
                    holder.ImageView.SetImageBitmap(Android.Graphics.BitmapFactory.DecodeResource(context.Resources, Resource.Drawable.Note, opt));
                }
            }
            holder.Validator.Checked = data[position].Selected;

            //fill in your items
            //holder.Title.Text = "new text here";

            return(view);
        }
Esempio n. 11
0
        async void CreateNotification(string title, string description)
        {
            var notificationManager = GetSystemService(Context.NotificationService) as NotificationManager;

            var uiIntent = new Intent(this, typeof(MainActivity));

            NotificationCompat.Builder builder = new NotificationCompat.Builder(this);

            Android.Graphics.BitmapFactory.Options options = new Android.Graphics.BitmapFactory.Options
            {
                InJustDecodeBounds = true
            };

            Android.Graphics.Bitmap largeIcon = await Android.Graphics.BitmapFactory.DecodeResourceAsync(Resources, Resource.Drawable.icon, options);

            Helpers.ToastHelper.ProcessNotification(this, notificationManager, uiIntent, builder, title, description, largeIcon);
        }
Esempio n. 12
0
		public static int LoadGlTexture (Resources res, int resId)
		{
			var texture = new int[1];
			GLES20.GlGenTextures (1, texture, 0);
			if (texture [0] == 0)
				throw new InvalidOperationException ("Can't create texture");
			var options = new Android.Graphics.BitmapFactory.Options {
				InScaled = false
			};
			var bmp = Android.Graphics.BitmapFactory.DecodeResource (res, resId, options);
			GLES20.GlBindTexture (GLES20.GlTexture2d, texture [0]);
			GLES20.GlTexParameteri (GLES20.GlTexture2d, GLES20.GlTextureMinFilter, GLES20.GlNearest);
			GLES20.GlTexParameteri (GLES20.GlTexture2d, GLES20.GlTextureMagFilter, GLES20.GlNearest);

			GLUtils.TexImage2D (GLES20.GlTexture2d, 0, bmp, 0);
			bmp.Recycle ();

			return texture [0];
		}
Esempio n. 13
0
        public static Android.Graphics.Bitmap DecodeSampledBitmapFromResource(String filename, int reqWidth, int reqHeight)
        {
            // First decode with inJustDecodeBounds=true to check dimensions
            var options = new Android.Graphics.BitmapFactory.Options
            {
                InJustDecodeBounds = true,
            };

            using (var dispose = Android.Graphics.BitmapFactory.DecodeFile(filename, options))
            {
            }

            // Calculate inSampleSize
            options.InSampleSize = CalculateInSampleSize(options, reqWidth, reqHeight);

            // Decode bitmap with inSampleSize set
            options.InJustDecodeBounds = false;
            return(Android.Graphics.BitmapFactory.DecodeFile(filename, options));
        }
Esempio n. 14
0
        public override View GetView(int position, View convertView, ViewGroup parent)
        {
            var view = convertView;
            SongAdapterViewHolder holder = null;

            if (view != null)
            {
                holder = view.Tag as SongAdapterViewHolder;
            }

            if (holder == null)
            {
                holder = new SongAdapterViewHolder();
                var inflater = context.GetSystemService(Context.LayoutInflaterService).JavaCast <LayoutInflater>();
                //replace with your item and your holder items
                //comment back in

                view             = inflater.Inflate(Resource.Layout.SongAdapterRow, parent, false);
                holder.Title     = view.FindViewById <TextView>(Resource.Id.SongAdapterRow_titleViewLeft);
                holder.ImageView = view.FindViewById <ImageView>(Resource.Id.SongAdapterRow_imageViewLeft);
                holder.LL        = view.FindViewById <LinearLayout>(Resource.Id.SongAdapterRow_LinearLayout);
                view.Tag         = holder;
            }

            holder.Title.Text = tracks[position].Name;
            if (tracks[position].ContainsImage)
            {
                holder.ImageView.SetImageBitmap(tracks[position].Image);
            }
            else
            {
                using (Android.Graphics.BitmapFactory.Options opt = new Android.Graphics.BitmapFactory.Options())
                {
                    opt.OutHeight    = 48;
                    opt.OutWidth     = 48;
                    opt.InSampleSize = 2;
                    holder.ImageView.SetImageBitmap(Android.Graphics.BitmapFactory.DecodeResource(context.Resources, Resource.Drawable.Note, opt));
                }
            }
            return(view);
        }
Esempio n. 15
0
        public byte[] GetThumbnailBytes(byte[] imageData, float width, float height)
        {
            // Load the bitmap
            Android.Graphics.BitmapFactory.Options options = new Android.Graphics.BitmapFactory.Options(); // Create object of bitmapfactory's option method for further option use
            options.InPurgeable = true;                                                                    // inPurgeable is used to free up memory while required
            Android.Graphics.Bitmap originalImage = Android.Graphics.BitmapFactory.DecodeByteArray(imageData, 0, imageData.Length, options);

            float newHeight = 0;
            float newWidth  = 0;

            var originalHeight = originalImage.Height;
            var originalWidth  = originalImage.Width;

            if (originalHeight > originalWidth)
            {
                newHeight = height;
                float ratio = originalHeight / height;
                newWidth = originalWidth / ratio;
            }
            else
            {
                newWidth = width;
                float ratio = originalWidth / width;
                newHeight = originalHeight / ratio;
            }

            Android.Graphics.Bitmap resizedImage = Android.Graphics.Bitmap.CreateScaledBitmap(originalImage, (int)newWidth, (int)newHeight, true);

            originalImage.Recycle();

            using (MemoryStream ms = new MemoryStream())
            {
                resizedImage.Compress(Android.Graphics.Bitmap.CompressFormat.Png, 100, ms);

                resizedImage.Recycle();

                return(ms.ToArray());
            }
        }
        /// <summary>
        /// Переопределяемый метод формирования окончательного вида элемента
        /// </summary>
        /// <param name="item"></param>
        /// <param name="extras">Объект, возвращаемый методом LoadItem</param>
        /// <param name="convertView"></param>
        /// <param name="parent"></param>
        /// <returns></returns>
        public override void FormFinalView(CategoryShort item, Bitmap extras, View itemView)
        {
            //Формируем ссылку на объект фото
            Bitmap photo = extras;        //extras as Bitmap;

            //Если фото существует
            if (photo != null)
            {
                //Отображаем загруженное фото
            }
            else
            {
                //Отображаем картинку по умолчанию
                var options = new Android.Graphics.BitmapFactory.Options
                {
                    InJustDecodeBounds = false,
                };
                Context context = Android.App.Application.Context;
                Android.Content.Res.Resources res = context.Resources;
                photo = Android.Graphics.BitmapFactory.DecodeResource(res, Resource.Drawable.noimage, options);
            }
            itemView.FindViewById <ImageView>(Resource.Id.ImageCategory).SetImageBitmap(photo);
        }
Esempio n. 17
0
        public static int LoadGlTexture(Resources res, int resId)
        {
            var texture = new int[1];

            GLES20.GlGenTextures(1, texture, 0);
            if (texture [0] == 0)
            {
                throw new InvalidOperationException("Can't create texture");
            }
            var options = new Android.Graphics.BitmapFactory.Options {
                InScaled = false
            };
            var bmp = Android.Graphics.BitmapFactory.DecodeResource(res, resId, options);

            GLES20.GlBindTexture(GLES20.GlTexture2d, texture [0]);
            GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMinFilter, GLES20.GlNearest);
            GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMagFilter, GLES20.GlNearest);

            GLUtils.TexImage2D(GLES20.GlTexture2d, 0, bmp, 0);
            bmp.Recycle();

            return(texture [0]);
        }
        private Size GetBitmapSize(string fileName) {
            var options = new Android.Graphics.BitmapFactory.Options { InJustDecodeBounds = true };
            Android.Graphics.BitmapFactory.DecodeFile(fileName, options);
            return new Size(options.OutWidth, options.OutHeight);

        }
        private Android.Graphics.Bitmap LoadAndResizeBitmap(string fileName, Size newSize) {
            var exif = new ExifInterface(fileName);

            var width = exif.GetAttributeInt(ExifInterface.TagImageWidth, 100);
            var height = exif.GetAttributeInt(ExifInterface.TagImageLength, 80);
            var orientation = exif.GetAttribute(ExifInterface.TagOrientation);


            // We calculate the ratio that we need to resize the image by
            // in order to fit the requested dimensions.

            var inSampleSize = 1.0;

            if (newSize.Height < height || newSize.Width < width) {
                inSampleSize = newSize.Width > newSize.Height
                    ? newSize.Height / height
                        : newSize.Width / width;
            }

            var options = new Android.Graphics.BitmapFactory.Options {
                InJustDecodeBounds = false,
                InSampleSize = (int)inSampleSize
            };
            // Now we will load the image and have BitmapFactory resize it for us.
            var resizedBitmap = Android.Graphics.BitmapFactory.DecodeFile(fileName, options);

            var rotate = false;
            
            switch (orientation) {
                case "1": // landscape
                case "3": // landscape
                    if (width < height)
                        rotate = true;
                    break;
                case "8":
                case "4":
                case "6": // portrait
                    if (width > height)
                        rotate = true;
                    break;
                case "0": //undefined
                default:
                    break;
            }

            if (rotate) {
                var mtx = new Android.Graphics.Matrix();
                mtx.PreRotate(90);
                resizedBitmap = Android.Graphics.Bitmap.CreateBitmap(resizedBitmap, 0, 0, resizedBitmap.Width, resizedBitmap.Height, mtx, false);
                mtx.Dispose();
                mtx = null;

            }


            return resizedBitmap;
        }
        /// <summary>
        /// Rotate via EXIF information
        /// </summary>
        /// <param name="photoPath"></param>
        /// <returns></returns>
        public byte[] RotateImage(string photoPath)
        {
            Android.Graphics.BitmapFactory.Options options = new Android.Graphics.BitmapFactory.Options();
            options.InPreferredConfig = Android.Graphics.Bitmap.Config.Argb8888;
            Android.Graphics.Bitmap bitmap = Android.Graphics.BitmapFactory.DecodeFile(photoPath, options);

            try
            {
                Android.Media.ExifInterface exifInterface = new Android.Media.ExifInterface(photoPath);
                int orientation = exifInterface.GetAttributeInt(Android.Media.ExifInterface.TagOrientation, (int)Android.Media.Orientation.Normal);

                int rotate = 0;

                switch (orientation)
                {
                case (int)Android.Media.Orientation.Normal:
                    rotate = 0;
                    break;

                case (int)Android.Media.Orientation.Rotate90:
                    rotate = 90;
                    break;

                case (int)Android.Media.Orientation.Rotate270:
                    rotate = 270;
                    break;

                case (int)Android.Media.Orientation.Rotate180:
                    rotate = 180;
                    break;

                default:
                    rotate = 0;
                    break;
                }

                using (var ms = new System.IO.MemoryStream())
                {
                    Android.Graphics.Bitmap croppedBitmap = null;

                    Android.Graphics.Matrix mtx = new Android.Graphics.Matrix();
                    mtx.PreRotate(rotate);

                    if (bitmap.Width >= bitmap.Height)
                    {
                        croppedBitmap = Android.Graphics.Bitmap.CreateBitmap(
                            bitmap,
                            bitmap.Width / 2 - bitmap.Height / 2,
                            0,
                            bitmap.Height,
                            bitmap.Height,
                            mtx,
                            false);
                    }
                    else
                    {
                        croppedBitmap = Android.Graphics.Bitmap.CreateBitmap(
                            bitmap,
                            0,
                            bitmap.Height / 2 - bitmap.Width / 2,
                            bitmap.Width,
                            bitmap.Width,
                            mtx,
                            false);
                    }

                    croppedBitmap.Compress(Android.Graphics.Bitmap.CompressFormat.Png, 100, ms);

                    croppedBitmap.Recycle();
                    croppedBitmap.Dispose();
                    croppedBitmap = null;

                    mtx.Dispose();
                    mtx = null;

                    bitmap.Recycle();
                    bitmap.Dispose();
                    bitmap = null;

                    return(ms.ToArray());
                }
            }
            catch
            {
                // <!-- Fail out -->
            }

            return(null);
        }
Esempio n. 21
0
        //Внутренний метод для получения фото
        private List <Android.Graphics.Bitmap> GetEventsPhotosInternal(EventShort[] NewEvents)             //it was Photo[]
        {
            List <Android.Graphics.Bitmap> newPhotos = new List <Android.Graphics.Bitmap>();

            if (CheckServerDataPresent(true))                           //Если данные от сервера есть
            {
                //TO DO
                //using (var client = new UserClientExchange())
                //{
                //    var syncSessionId = client.Login("Emeri", "Emeri", new Captcha());
                //    Photo[] serverPhotos = new Photo[NewEvents.Length];

                //    //Цикл по всем событиям
                //    for (int i = 0; i < NewEvents.Length; i += 1)
                //    {
                //        //Обработка информации о фотках
                //        if (NewEvents[i].PrimaryPhotoId.HasValue)          //Если у события есть фото   NewEvents[i]. ||
                //        {
                //            try
                //            {
                //                for (int howManyTimesTryDownload = 0; howManyTimesTryDownload < 3; howManyTimesTryDownload++)
                //                {
                //                    //Проверяем, нет ли у нас уже фото
                //                    string filename = "EventMainPhoto_" + NewEvents[i].PrimaryPhotoId;
                //                    if (!File.Exists(Path.Combine(WorkingInetAndSQL.destinationPath, filename)))         //Если фото нет на диске, то загружаем очередное фото и помещаем в List
                //                    {
                //                        serverPhotos[i] = client.GetPhoto(syncSessionId, NewEvents[i].PrimaryPhotoId.Value, true);

                //                        //Добавляем Bitmap в List
                //                        Android.Graphics.Bitmap bmp = Android.Graphics.BitmapFactory.DecodeByteArray(serverPhotos[i].Data, 0, serverPhotos[i].Data.Length);
                //                        newPhotos.Add(bmp);
                //                        //Сохраняем фотку на диск
                //                        using (var stream = new BufferedStream(File.OpenWrite(Path.Combine(WorkingInetAndSQL.destinationPath, filename))))
                //                            bmp.Compress(Android.Graphics.Bitmap.CompressFormat.Png, 100, stream);
                //                    }
                //                    else                         //Если же они уже есть на диске, просто берем с диска
                //                    {
                //                        Android.Graphics.Bitmap bmp = Android.Graphics.BitmapFactory.DecodeFile(Path.Combine(WorkingInetAndSQL.destinationPath, filename));
                //                        newPhotos.Add(bmp);
                //                    }

                //                    howManyTimesTryDownload = 3;
                //                }
                //            }
                //            catch (Exception e)
                //            {
                //                var k = e.Message;
                //                //И пробуем работать без Интернета!
                //            }



                //        }
                //        else                                        //Если фото нет, используем стандартное фото категории
                //        {
                //            var options = new Android.Graphics.BitmapFactory.Options
                //            {
                //                InJustDecodeBounds = false,
                //            };
                //            Android.Graphics.Bitmap bmp = Android.Graphics.BitmapFactory.DecodeResource(res, Resource.Drawable.Leo, options);
                //            newPhotos.Add(bmp);
                //        }

                //    }

                //    client.Logout(syncSessionId);
                //}
            }
            else                              //Если данных от сервера нет
            {
                //Цикл по всем событиям
                for (int i = 0; i < NewEvents.Length; i += 1)
                {
                    //Обработка информации о фотках
                    if (NewEvents[i].PrimaryPhotoId.HasValue)              //Если у события есть фото
                    {
                        string filename = "EventMainPhoto_" + NewEvents[i].PrimaryPhotoId;
                        if (File.Exists(Path.Combine(WorkingInetAndSQL.destinationPath, filename)))             //Если они уже есть на диске, просто берем с диска
                        {
                            Android.Graphics.Bitmap bmp = Android.Graphics.BitmapFactory.DecodeFile(Path.Combine(WorkingInetAndSQL.destinationPath, filename));
                            newPhotos.Add(bmp);
                        }
                        else                    //Если у события вообще есть фото, но на диске нет
                        {
                            var options = new Android.Graphics.BitmapFactory.Options
                            {
                                InJustDecodeBounds = false,
                            };
                            Context context = Android.App.Application.Context; Android.Content.Res.Resources res = context.Resources;
                            Android.Graphics.Bitmap bmp = Android.Graphics.BitmapFactory.DecodeResource(res, Resource.Drawable.Leo, options);
                            newPhotos.Add(bmp);
                        }
                    }

                    else                        //Если у события фото нет
                    {
                        var options = new Android.Graphics.BitmapFactory.Options
                        {
                            InJustDecodeBounds = false,
                        };
                        Context context = Android.App.Application.Context; Android.Content.Res.Resources res = context.Resources;
                        Android.Graphics.Bitmap bmp = Android.Graphics.BitmapFactory.DecodeResource(res, Resource.Drawable.Leo, options);
                        newPhotos.Add(bmp);
                    }
                }
            }
            return(newPhotos);
        }
        private Android.Graphics.Bitmap LoadAndResizeBitmap(string fileName, Size newSize)
        {
            var exif = new ExifInterface(fileName);

            var width       = exif.GetAttributeInt(ExifInterface.TagImageWidth, 100);
            var height      = exif.GetAttributeInt(ExifInterface.TagImageLength, 80);
            var orientation = exif.GetAttribute(ExifInterface.TagOrientation);


            // We calculate the ratio that we need to resize the image by
            // in order to fit the requested dimensions.

            var inSampleSize = 1.0;

            if (newSize.Height < height || newSize.Width < width)
            {
                inSampleSize = newSize.Width > newSize.Height
                    ? newSize.Height / height
                        : newSize.Width / width;
            }

            var options = new Android.Graphics.BitmapFactory.Options {
                InJustDecodeBounds = false,
                InSampleSize       = (int)inSampleSize
            };
            // Now we will load the image and have BitmapFactory resize it for us.
            var resizedBitmap = Android.Graphics.BitmapFactory.DecodeFile(fileName, options);

            var rotate = false;

            switch (orientation)
            {
            case "1":     // landscape
            case "3":     // landscape
                if (width < height)
                {
                    rotate = true;
                }
                break;

            case "8":
            case "4":
            case "6":     // portrait
                if (width > height)
                {
                    rotate = true;
                }
                break;

            case "0":     //undefined
            default:
                break;
            }

            if (rotate)
            {
                var mtx = new Android.Graphics.Matrix();
                mtx.PreRotate(90);
                resizedBitmap = Android.Graphics.Bitmap.CreateBitmap(resizedBitmap, 0, 0, resizedBitmap.Width, resizedBitmap.Height, mtx, false);
                mtx.Dispose();
                mtx = null;
            }


            return(resizedBitmap);
        }
Esempio n. 23
0
        public void OnSurfaceCreated(IGL10 gl, Javax.Microedition.Khronos.Egl.EGLConfig config)
        {
            const float coord = 1.0f;

            ObjParser model3D = new ObjParser();

            List <byte[]> test1 = model3D.ParsedObject(context, "buggy");

            float[] vertexArray = new float[test1[0].Length / 4];
            System.Buffer.BlockCopy(test1[0], 0, vertexArray, 0, (int)test1[0].Length);

            modelVerticesData = vertexArray;

            FloatBuffer mTriangleVertices = ByteBuffer.AllocateDirect(modelVerticesData.Length * mBytesPerFloat).Order(ByteOrder.NativeOrder()).AsFloatBuffer();

            mTriangleVertices.Put(modelVerticesData).Flip();

            // Cube colors
            // R, G, B, A
            float[] modelColorsData =
            {
                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f
            };

            FloatBuffer mTriangleColors = ByteBuffer.AllocateDirect(modelColorsData.Length * mBytesPerFloat).Order(ByteOrder.NativeOrder()).AsFloatBuffer();

            mTriangleColors.Put(modelColorsData).Flip();


            float[] textureUVMapArray = new float[test1[1].Length / 4];
            System.Buffer.BlockCopy(test1[1], 0, textureUVMapArray, 0, (int)test1[1].Length);

            modelTextureUVMapData = textureUVMapArray;

            FloatBuffer mTriangleTextureUVMap = ByteBuffer.AllocateDirect(modelTextureUVMapData.Length * mBytesPerFloat).Order(ByteOrder.NativeOrder()).AsFloatBuffer();

            mTriangleTextureUVMap.Put(modelTextureUVMapData).Flip();



            //Data buffers to VBO
            GLES20.GlGenBuffers(3, VBOBuffers, 0); //2 buffers for vertices, texture and colors

            GLES20.GlBindBuffer(GLES20.GlArrayBuffer, VBOBuffers[0]);
            GLES20.GlBufferData(GLES20.GlArrayBuffer, mTriangleVertices.Capacity() * mBytesPerFloat, mTriangleVertices, GLES20.GlStaticDraw);

            GLES20.GlBindBuffer(GLES20.GlArrayBuffer, VBOBuffers[1]);
            GLES20.GlBufferData(GLES20.GlArrayBuffer, mTriangleColors.Capacity() * mBytesPerFloat, mTriangleColors, GLES20.GlStaticDraw);

            GLES20.GlBindBuffer(GLES20.GlArrayBuffer, VBOBuffers[2]);
            GLES20.GlBufferData(GLES20.GlArrayBuffer, mTriangleTextureUVMap.Capacity() * mBytesPerFloat, mTriangleTextureUVMap, GLES20.GlStaticDraw);

            GLES20.GlBindBuffer(GLES20.GlArrayBuffer, 0);

            //Load and setup texture

            GLES20.GlGenTextures(1, textureHandle, 0); //init 1 texture storage handle
            if (textureHandle[0] != 0)
            {
                //Android.Graphics cose class Matrix exists at both Android.Graphics and Android.OpenGL and this is only sample of using
                Android.Graphics.BitmapFactory.Options options = new Android.Graphics.BitmapFactory.Options();
                options.InScaled = false; // No pre-scaling
                Android.Graphics.Bitmap bitmap = Android.Graphics.BitmapFactory.DecodeResource(context.Resources, Resource.Drawable.iam, options);
                GLES20.GlBindTexture(GLES20.GlTexture2d, textureHandle[0]);
                GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMinFilter, GLES20.GlNearest);
                GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMagFilter, GLES20.GlNearest);
                GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureWrapS, GLES20.GlClampToEdge);
                GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureWrapT, GLES20.GlClampToEdge);
                GLUtils.TexImage2D(GLES20.GlTexture2d, 0, bitmap, 0);
                bitmap.Recycle();
            }

            //Ask android to run RAM garbage cleaner
            System.GC.Collect();

            //Setup OpenGL ES
            GLES20.GlClearColor(coord, coord, coord, coord);
            // GLES20.GlEnable(GLES20.GlDepthTest); //uncoment if needs enabled dpeth test
            GLES20.GlEnable(2884); // GlCullFace == 2884 see OpenGL documentation to this constant value
            GLES20.GlCullFace(GLES20.GlBack);


            // Position the eye behind the origin.
            float eyeX = 0.0f;
            float eyeY = 0.0f;
            float eyeZ = 4.5f;

            // We are looking toward the distance
            float lookX = 0.0f;
            float lookY = 0.0f;
            float lookZ = -5.0f;

            // Set our up vector. This is where our head would be pointing were we holding the camera.
            float upX = 0.0f;
            float upY = coord;
            float upZ = 0.0f;

            // Set the view matrix. This matrix can be said to represent the camera position.
            // NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
            // view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
            Matrix.SetLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);

            string vertexShader =
                "uniform mat4 u_MVPMatrix;      \n"         // A constant representing the combined model/view/projection matrix.
                + "attribute vec4 a_Position;     \n"       // Per-vertex position information we will pass in.
                + "attribute vec4 a_Color;        \n"       // Per-vertex color information we will pass in.
                + "varying vec4 v_Color;          \n"       // This will be passed into the fragment shader.
                + "attribute vec2 a_TextureCoord; \n"
                + "varying vec2 v_TextureCoord;   \n"
                + "void main()                    \n"       // The entry point for our vertex shader.
                + "{                              \n"
                + "   v_TextureCoord = a_TextureCoord; \n"  // Pass the color through to the fragment shader. It will be interpolated across the triangle.
                + "   v_Color = a_Color;          \n"       // Pass the color through to the fragment shader. It will be interpolated across the triangle.
                + "   gl_Position = u_MVPMatrix   \n"       // gl_Position is a special variable used to store the final position.
                + "                 * a_Position; \n"       // Multiply the vertex by the matrix to get the final point in normalized screen coordinates.
                + "}                              \n";

            string fragmentShader =
                "precision mediump float;       \n"     // Set the default precision to medium. We don't need as high of a
                                                        // precision in the fragment shader.
                + "varying vec4 v_Color;          \n"   // This is the color from the vertex shader interpolated across the triangle per fragment.
                + "varying vec2 v_TextureCoord;   \n"
                + "uniform sampler2D u_Texture;   \n"
                + "void main()                    \n"                           // The entry point for our fragment shader.
                + "{                              \n"
                + "   gl_FragColor = texture2D(u_Texture, v_TextureCoord);  \n" // Pass the color directly through the pipeline.
                + "}                              \n";


            vertexShader   = string.Empty;
            fragmentShader = string.Empty;

            int          resourceId   = context.Resources.GetIdentifier("vertexshadervladimir1", "raw", context.PackageName);
            Stream       fileStream   = context.Resources.OpenRawResource(resourceId);
            StreamReader streamReader = new StreamReader(fileStream);

            string line = string.Empty;

            while ((line = streamReader.ReadLine()) != null)
            {
                vertexShader += line + "\n";
            }

            resourceId   = context.Resources.GetIdentifier("fragmentshadervladimir1", "raw", context.PackageName);
            fileStream   = context.Resources.OpenRawResource(resourceId);
            streamReader = new StreamReader(fileStream);
            while ((line = streamReader.ReadLine()) != null)
            {
                fragmentShader += line + "\n";
            }

            int vertexShaderHandle = GLES20.GlCreateShader(GLES20.GlVertexShader);

            if (vertexShaderHandle != 0)
            {
                // Pass in the shader source.
                GLES20.GlShaderSource(vertexShaderHandle, vertexShader);

                // Compile the shader.
                GLES20.GlCompileShader(vertexShaderHandle);

                // Get the compilation status.
                int[] compileStatus = new int[1];
                GLES20.GlGetShaderiv(vertexShaderHandle, GLES20.GlCompileStatus, compileStatus, 0);

                // If the compilation failed, delete the shader.
                if (compileStatus[0] == 0)
                {
                    GLES20.GlDeleteShader(vertexShaderHandle);
                    vertexShaderHandle = 0;
                }
            }

            if (vertexShaderHandle == 0)
            {
                throw new Exception("Error creating vertex shader.");
            }

            // Load in the fragment shader shader.
            int fragmentShaderHandle = GLES20.GlCreateShader(GLES20.GlFragmentShader);

            if (fragmentShaderHandle != 0)
            {
                // Pass in the shader source.
                GLES20.GlShaderSource(fragmentShaderHandle, fragmentShader);

                // Compile the shader.
                GLES20.GlCompileShader(fragmentShaderHandle);

                // Get the compilation status.
                int[] compileStatus = new int[1];
                GLES20.GlGetShaderiv(fragmentShaderHandle, GLES20.GlCompileStatus, compileStatus, 0);

                // If the compilation failed, delete the shader.
                if (compileStatus[0] == 0)
                {
                    GLES20.GlDeleteShader(fragmentShaderHandle);
                    fragmentShaderHandle = 0;
                }
            }

            if (fragmentShaderHandle == 0)
            {
                throw new Exception("Error creating fragment shader.");
            }

            // Create a program object and store the handle to it.
            int programHandle = GLES20.GlCreateProgram();

            if (programHandle != 0)
            {
                // Bind the vertex shader to the program.
                GLES20.GlAttachShader(programHandle, vertexShaderHandle);

                // Bind the fragment shader to the program.
                GLES20.GlAttachShader(programHandle, fragmentShaderHandle);

                // Bind attributes
                GLES20.GlBindAttribLocation(programHandle, 0, "a_Position");
                GLES20.GlBindAttribLocation(programHandle, 1, "a_Color");
                GLES20.GlBindAttribLocation(programHandle, 2, "a_TextureCoord");

                // Link the two shaders together into a program.
                GLES20.GlLinkProgram(programHandle);

                // Get the link status.
                int[] linkStatus = new int[1];
                GLES20.GlGetProgramiv(programHandle, GLES20.GlLinkStatus, linkStatus, 0);

                // If the link failed, delete the program.
                if (linkStatus[0] == 0)
                {
                    GLES20.GlDeleteProgram(programHandle);
                    programHandle = 0;
                }
            }

            if (programHandle == 0)
            {
                throw new Exception("Error creating program.");
            }

            // Set program handles. These will later be used to pass in values to the program.
            mMVPMatrixHandle    = GLES20.GlGetUniformLocation(programHandle, "u_MVPMatrix");
            mPositionHandle     = GLES20.GlGetAttribLocation(programHandle, "a_Position");
            mColorHandle        = GLES20.GlGetAttribLocation(programHandle, "a_Color");
            mTextureCoordHandle = GLES20.GlGetAttribLocation(programHandle, "a_TextureCoord");
            mTextureHandle      = GLES20.GlGetUniformLocation(programHandle, "u_Texture");


            // Tell OpenGL to use this program when rendering.
            GLES20.GlUseProgram(programHandle);
        }
Esempio n. 24
0
        public void OnSurfaceCreated(IGL10 gl, Javax.Microedition.Khronos.Egl.EGLConfig config)
        {
            const float coord = 1.0f;

            // Cube coords
            // X, Y, Z = 1 vertex * 3 = 1 face * 12 = 1 cube
            float[] triangleVerticesData =
            {
                -coord, -coord, -coord,
                -coord, -coord, coord,
                -coord, coord,  coord,

                coord,  coord,  -coord,
                -coord, -coord, -coord,
                -coord, coord,  -coord,

                coord,  -coord, coord,
                -coord, -coord, -coord,
                coord,  -coord, -coord,

                coord,  coord,  -coord,
                coord,  -coord, -coord,
                -coord, -coord, -coord,

                -coord, -coord, -coord,
                -coord, coord,  coord,
                -coord, coord,  -coord,

                coord,  -coord, coord,
                -coord, -coord, coord,
                -coord, -coord, -coord,

                -coord, coord,  coord,
                -coord, -coord, coord,
                coord,  -coord, coord,

                coord,  coord,  coord,
                coord,  -coord, -coord,
                coord,  coord,  -coord,

                coord,  -coord, -coord,
                coord,  coord,  coord,
                coord,  -coord, coord,

                coord,  coord,  coord,
                coord,  coord,  -coord,
                -coord, coord,  -coord,

                coord,  coord,  coord,
                -coord, coord,  -coord,
                -coord, coord,  coord,

                coord,  coord,  coord,
                -coord, coord,  coord,
                coord,  -coord, coord
            };

            FloatBuffer mTriangleVertices = ByteBuffer.AllocateDirect(triangleVerticesData.Length * mBytesPerFloat).Order(ByteOrder.NativeOrder()).AsFloatBuffer();

            mTriangleVertices.Put(triangleVerticesData).Flip();

            // Cube colors
            // R, G, B, A
            float[] triangleColorsData =
            {
                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f,

                1.0f, 0.0f, 0.0f, 0.5f,
                0.0f, 0.5f, 1.0f, 1.0f,
                0.0f, 1.0f, 0.0f, 1.0f
            };

            FloatBuffer mTriangleColors = ByteBuffer.AllocateDirect(triangleColorsData.Length * mBytesPerFloat).Order(ByteOrder.NativeOrder()).AsFloatBuffer();

            mTriangleColors.Put(triangleColorsData).Flip();

            //Cube texture UV Map
            float[] triangleTextureUVMapData =
            {
                0.0f, 0.0f,
                0.0f, 1.0f,
                1.0f, 0.0f,

                0.0f, 0.0f,
                0.0f, 1.0f,
                1.0f, 0.0f,

                0.0f, 0.0f,
                0.0f, 1.0f,
                1.0f, 0.0f,

                0.0f, 0.0f,
                0.0f, 1.0f,
                1.0f, 0.0f,

                0.0f, 0.0f,
                0.0f, 1.0f,
                1.0f, 0.0f,

                0.0f, 0.0f,
                0.0f, 1.0f,
                1.0f, 0.0f,

                0.0f, 0.0f,
                0.0f, 1.0f,
                1.0f, 0.0f,

                0.0f, 0.0f,
                0.0f, 1.0f,
                1.0f, 0.0f,

                0.0f, 0.0f,
                0.0f, 1.0f,
                1.0f, 0.0f,

                0.0f, 0.0f,
                0.0f, 1.0f,
                1.0f, 0.0f,

                0.0f, 0.0f,
                0.0f, 1.0f,
                1.0f, 0.0f,

                0.0f, 0.0f,
                0.0f, 1.0f,
                1.0f, 0.0f
            };

            FloatBuffer mTriangleTextureUVMap = ByteBuffer.AllocateDirect(triangleTextureUVMapData.Length * mBytesPerFloat).Order(ByteOrder.NativeOrder()).AsFloatBuffer();

            mTriangleTextureUVMap.Put(triangleTextureUVMapData).Flip();

            //triagles normals
            //This normal array is not right, it is spacialy DO FOR demonstrate how normals work with faces when light is calculated at shader program
            float[] triangleNormalData =
            {
                // Front face
                0.0f,   0.0f,  1.0f,
                0.0f,   0.0f,  1.0f,
                0.0f,   0.0f,  1.0f,
                1.0f,   0.0f,  0.0f,
                1.0f,   0.0f,  0.0f,
                1.0f,   0.0f,  0.0f,

                // Right face
                1.0f,   0.0f,  0.0f,
                1.0f,   0.0f,  0.0f,
                1.0f,   0.0f,  0.0f,
                1.0f,   0.0f,  0.0f,
                1.0f,   0.0f,  0.0f,
                1.0f,   0.0f,  0.0f,

                // Back face
                0.0f,   0.0f, -1.0f,
                0.0f,   0.0f, -1.0f,
                0.0f,   0.0f, -1.0f,
                0.0f,   0.0f, -1.0f,
                0.0f,   0.0f, -1.0f,
                0.0f,   0.0f, -1.0f,

                // Left face
                -1.0f,  0.0f,  0.0f,
                -1.0f,  0.0f,  0.0f,
                -1.0f,  0.0f,  0.0f,
                -1.0f,  0.0f,  0.0f,
                -1.0f,  0.0f,  0.0f,
                -1.0f,  0.0f,  0.0f,

                // Top face
                0.0f,   1.0f,  0.0f,
                0.0f,   1.0f,  0.0f,
                0.0f,   1.0f,  0.0f,
                0.0f,   1.0f,  0.0f,
                0.0f,   1.0f,  0.0f,
                0.0f,   1.0f,  0.0f,

                // Bottom face
                0.0f,  -1.0f,  0.0f,
                0.0f,  -1.0f,  0.0f,
                0.0f,  -1.0f,  0.0f,
                0.0f,  -1.0f,  0.0f,
                0.0f,  -1.0f,  0.0f,
                0.0f,  -1.0f, 0.0f
            };

            FloatBuffer mTriangleNormal = ByteBuffer.AllocateDirect(triangleNormalData.Length * mBytesPerFloat).Order(ByteOrder.NativeOrder()).AsFloatBuffer();

            mTriangleNormal.Put(triangleNormalData).Flip();

            //Data buffers to VBO
            GLES20.GlGenBuffers(4, VBOBuffers, 0); //2 buffers for vertices, texture and colors

            GLES20.GlBindBuffer(GLES20.GlArrayBuffer, VBOBuffers[0]);
            GLES20.GlBufferData(GLES20.GlArrayBuffer, mTriangleVertices.Capacity() * mBytesPerFloat, mTriangleVertices, GLES20.GlStaticDraw);

            GLES20.GlBindBuffer(GLES20.GlArrayBuffer, VBOBuffers[1]);
            GLES20.GlBufferData(GLES20.GlArrayBuffer, mTriangleColors.Capacity() * mBytesPerFloat, mTriangleColors, GLES20.GlStaticDraw);

            GLES20.GlBindBuffer(GLES20.GlArrayBuffer, VBOBuffers[2]);
            GLES20.GlBufferData(GLES20.GlArrayBuffer, mTriangleTextureUVMap.Capacity() * mBytesPerFloat, mTriangleTextureUVMap, GLES20.GlStaticDraw);

            GLES20.GlBindBuffer(GLES20.GlArrayBuffer, VBOBuffers[3]);
            GLES20.GlBufferData(GLES20.GlArrayBuffer, mTriangleNormal.Capacity() * mBytesPerFloat, mTriangleNormal, GLES20.GlStaticDraw);

            GLES20.GlBindBuffer(GLES20.GlArrayBuffer, 0);

            //Load and setup texture

            GLES20.GlGenTextures(1, textureHandle, 0); //init 1 texture storage handle
            if (textureHandle[0] != 0)
            {
                //Android.Graphics cose class Matrix exists at both Android.Graphics and Android.OpenGL and this is only sample of using
                Android.Graphics.BitmapFactory.Options options = new Android.Graphics.BitmapFactory.Options();
                options.InScaled = false; // No pre-scaling
                Android.Graphics.Bitmap bitmap = Android.Graphics.BitmapFactory.DecodeResource(context.Resources, Resource.Drawable.texture1, options);
                GLES20.GlBindTexture(GLES20.GlTexture2d, textureHandle[0]);
                GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMinFilter, GLES20.GlNearest);
                GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMagFilter, GLES20.GlNearest);
                GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureWrapS, GLES20.GlClampToEdge);
                GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureWrapT, GLES20.GlClampToEdge);
                GLUtils.TexImage2D(GLES20.GlTexture2d, 0, bitmap, 0);
                bitmap.Recycle();
            }

            //Ask android to run RAM garbage cleaner
            System.GC.Collect();

            //Setup OpenGL ES
            GLES20.GlClearColor(0.0f, 0.0f, 0.0f, 0.0f);
            // GLES20.GlEnable(GLES20.GlDepthTest); //uncoment if needs enabled dpeth test
            GLES20.GlEnable(2884); // GlCullFace == 2884 see OpenGL documentation to this constant value
            GLES20.GlCullFace(GLES20.GlBack);


            // Position the eye behind the origin.
            float eyeX = 0.0f;
            float eyeY = 0.0f;
            float eyeZ = 4.5f;

            // We are looking toward the distance
            float lookX = 0.0f;
            float lookY = 0.0f;
            float lookZ = -5.0f;

            // Set our up vector. This is where our head would be pointing were we holding the camera.
            float upX = 0.0f;
            float upY = coord;
            float upZ = 0.0f;

            // Set the view matrix. This matrix can be said to represent the camera position.
            // NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
            // view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
            Matrix.SetLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);

            //all "attribute" variables is "triagles" VBO (arrays) items representation
            //a_Possition[0] <=> a_Color[0] <=> a_TextureCoord[0] <=> a_Normal[0]
            //a_Possition[1] <=> a_Color[1] <=> a_TextureCoord[1] <=> a_Normal[1]
            //...
            //a_Possition[n] <=> a_Color[n] <=> a_TextureCoord[n] <=> a_Normal[n] -- where "n" is object buffers length
            //-> HOW MANY faces in your object (model) in VBO -> how many times the vertex shader will be called by OpenGL
            string vertexShader =
                "uniform mat4 u_MVPMatrix;      \n"         // A constant representing the combined model/view/projection matrix.
                + "uniform vec3 u_LightPos;       \n"       // A constant representing the light source position
                + "attribute vec4 a_Position;     \n"       // Per-vertex position information we will pass in. (it means vec4[x,y,z,w] but we put only x,y,z at this sample
                + "attribute vec4 a_Color;        \n"       // Per-vertex color information we will pass in.
                + "varying vec4 v_Color;          \n"       // This will be passed into the fragment shader.
                + "attribute vec2 a_TextureCoord; \n"       // Per-vertex texture UVMap information we will pass in.
                + "varying vec2 v_TextureCoord;   \n"       // This will be passed into the fragment shader.
                + "attribute vec3 a_Normal;       \n"       // Per-vertex normals information we will pass in.
                + "void main()                    \n"       // The entry point for our vertex shader.
                + "{                              \n"
                //light calculation section for fragment shader
                + "   vec3 modelViewVertex = vec3(u_MVPMatrix * a_Position);\n"
                + "   vec3 modelViewNormal = vec3(u_MVPMatrix * vec4(a_Normal, 0.0));\n"
                + "   float distance = length(u_LightPos - modelViewVertex);\n"
                + "   vec3 lightVector = normalize(u_LightPos - modelViewVertex);\n"
                + "   float diffuse = max(dot(modelViewNormal, lightVector), 0.1);\n"
                + "   diffuse = diffuse * (1.0 / (1.0 + (0.25 * distance * distance)));\n"
                + "   v_Color = a_Color * vec4(diffuse);\n" //Pass the color with light aspect to fragment shader
                + "   v_TextureCoord = a_TextureCoord; \n"  // Pass the texture coordinate through to the fragment shader. It will be interpolated across the triangle.
                + "   gl_Position = u_MVPMatrix   \n"       // gl_Position is a special variable used to store the final position.
                + "                 * a_Position; \n"       // Multiply the vertex by the matrix to get the final point in normalized screen coordinates.
                + "}                              \n";

            string fragmentShader =
                "precision mediump float;       \n"                                       // Set the default precision to medium. We don't need as high of a
                                                                                          // precision in the fragment shader.
                + "varying vec4 v_Color;          \n"                                     // This is the color from the vertex shader interpolated across the triangle per fragment.
                + "varying vec2 v_TextureCoord;   \n"                                     // This is the texture coordinate from the vertex shader interpolated across the triangle per fragment.
                + "uniform sampler2D u_Texture;   \n"                                     // This is the texture image handler
                + "void main()                    \n"                                     // The entry point for our fragment shader.
                + "{                              \n"
                + "   gl_FragColor = texture2D(u_Texture, v_TextureCoord) * v_Color;  \n" // Pass the color directly through the pipeline.
                + "}                              \n";

            int vertexShaderHandle = GLES20.GlCreateShader(GLES20.GlVertexShader);

            if (vertexShaderHandle != 0)
            {
                // Pass in the shader source.
                GLES20.GlShaderSource(vertexShaderHandle, vertexShader);

                // Compile the shader.
                GLES20.GlCompileShader(vertexShaderHandle);

                // Get the compilation status.
                int[] compileStatus = new int[1];
                GLES20.GlGetShaderiv(vertexShaderHandle, GLES20.GlCompileStatus, compileStatus, 0);

                // If the compilation failed, delete the shader.
                if (compileStatus[0] == 0)
                {
                    GLES20.GlDeleteShader(vertexShaderHandle);
                    vertexShaderHandle = 0;
                }
            }

            if (vertexShaderHandle == 0)
            {
                throw new Exception("Error creating vertex shader.");
            }

            // Load in the fragment shader shader.
            int fragmentShaderHandle = GLES20.GlCreateShader(GLES20.GlFragmentShader);

            if (fragmentShaderHandle != 0)
            {
                // Pass in the shader source.
                GLES20.GlShaderSource(fragmentShaderHandle, fragmentShader);

                // Compile the shader.
                GLES20.GlCompileShader(fragmentShaderHandle);

                // Get the compilation status.
                int[] compileStatus = new int[1];
                GLES20.GlGetShaderiv(fragmentShaderHandle, GLES20.GlCompileStatus, compileStatus, 0);

                // If the compilation failed, delete the shader.
                if (compileStatus[0] == 0)
                {
                    GLES20.GlDeleteShader(fragmentShaderHandle);
                    fragmentShaderHandle = 0;
                }
            }

            if (fragmentShaderHandle == 0)
            {
                throw new Exception("Error creating fragment shader.");
            }

            // Create a program object and store the handle to it.
            int programHandle = GLES20.GlCreateProgram();

            if (programHandle != 0)
            {
                // Bind the vertex shader to the program.
                GLES20.GlAttachShader(programHandle, vertexShaderHandle);

                // Bind the fragment shader to the program.
                GLES20.GlAttachShader(programHandle, fragmentShaderHandle);

                // Bind attributes
                GLES20.GlBindAttribLocation(programHandle, 0, "a_Position");
                GLES20.GlBindAttribLocation(programHandle, 1, "a_Color");
                GLES20.GlBindAttribLocation(programHandle, 2, "a_TextureCoord");
                GLES20.GlBindAttribLocation(programHandle, 3, "a_Normal");

                // Link the two shaders together into a program.
                GLES20.GlLinkProgram(programHandle);

                // Get the link status.
                int[] linkStatus = new int[1];
                GLES20.GlGetProgramiv(programHandle, GLES20.GlLinkStatus, linkStatus, 0);

                // If the link failed, delete the program.
                if (linkStatus[0] == 0)
                {
                    GLES20.GlDeleteProgram(programHandle);
                    programHandle = 0;
                }
            }

            if (programHandle == 0)
            {
                throw new Exception("Error creating program.");
            }

            // Set program handles. These will later be used to pass in values to the program.
            mMVPMatrixHandle    = GLES20.GlGetUniformLocation(programHandle, "u_MVPMatrix");
            mLightPos           = GLES20.GlGetUniformLocation(programHandle, "u_LightPos");
            mPositionHandle     = GLES20.GlGetAttribLocation(programHandle, "a_Position");
            mColorHandle        = GLES20.GlGetAttribLocation(programHandle, "a_Color");
            mTextureCoordHandle = GLES20.GlGetAttribLocation(programHandle, "a_TextureCoord");
            mNormalHandle       = GLES20.GlGetAttribLocation(programHandle, "a_Normal");
            mTextureHandle      = GLES20.GlGetUniformLocation(programHandle, "u_Texture");


            // Tell OpenGL to use this program when rendering.
            GLES20.GlUseProgram(programHandle);
        }
Esempio n. 25
0
        /// <summary>
        /// Take a picture and perform previews
        /// </summary>
        /// <param name="data"></param>
        /// <param name="camera"></param>
        void Camera.IPictureCallback.OnPictureTaken(byte[] data, Android.Hardware.Camera camera)
        {
			if (data != null && _box != null)
            {
				if (!_processDialog.IsShowing) {
					_processDialog.SetMessage ("Waiting for image results...");
					_processDialog.Show ();
				}
				Android.Graphics.BitmapFactory.Options options = new Android.Graphics.BitmapFactory.Options ();
				options.InJustDecodeBounds = true;
				options.InSampleSize = 2;
				options.InJustDecodeBounds = false;
				options.InTempStorage = new byte[16 * 1024];
                Android.Graphics.Bitmap bmp = Android.Graphics.BitmapFactory.DecodeByteArray(data, 0, data.Length, options);
                data = null;
				GC.Collect ();
                Android.Graphics.Bitmap rotatedBitmap;
                if (bmp.Width > bmp.Height)
                {
                    Android.Graphics.Matrix mtx = new Android.Graphics.Matrix();
                    mtx.SetRotate(90);
                    rotatedBitmap = Android.Graphics.Bitmap.CreateBitmap(bmp, 0, 0, bmp.Width, bmp.Height, mtx, false);
                    bmp.Recycle();
                    mtx.Dispose();
                    mtx = null;
                }
                else
                {
                    rotatedBitmap = bmp.Copy(bmp.GetConfig(), true);
                    bmp.Recycle();
                }
				GC.Collect ();
                double ratioX = (double)rotatedBitmap.Width / (double)screenWidth;
                double ratioY = (double)rotatedBitmap.Height / (double)screenHeight;
                int startX = (int)(ratioX * (double)(_box.MidX - _box.Width / 2));
                int startY = (int)(ratioY * (double)(_box.MidY - _box.Height / 2));
                int width = (int)(ratioX * (double)_box.Width);
                int height = (int)(ratioY * (double)_box.Height);
                Android.Graphics.Bitmap croppedBitmap = Android.Graphics.Bitmap.CreateBitmap(rotatedBitmap, startX, startY, width, height);
                rotatedBitmap.Recycle();
				GC.Collect ();
                bitmap = croppedBitmap.Copy(croppedBitmap.GetConfig(), true);
                PerformPreviews(croppedBitmap);
            }
            else
            {
                Toast.MakeText(this, "Data null error", ToastLength.Long).Show();
            }
        }