private void init() { circlePaint = new Paint(Paint.ANTI_ALIAS_FLAG); circlePaint.Style = Paint.Style.FILL; circlePaint.Color = Color.RED; circleRadius = 10; }
/// <hide></hide> public NinePatch(android.graphics.NinePatch patch) { mBitmap = patch.mBitmap; mChunk = patch.mChunk; mSrcName = patch.mSrcName; if (patch.mPaint != null) { mPaint = new android.graphics.Paint(patch.mPaint); } validateNinePatchChunk(mBitmap.nativeInstance, mChunk); }
public TextDrawable(string text, int textSize, int textColor) { mText = text; mPaint = new Paint(); mPaint.Color = textColor; mPaint.TextSize = textSize; mPaint.AntiAlias = true; // mPaint.setShadowLayer(6f, 0, 0, Color.BLACK); mPaint.Style = Paint.Style.FILL; mPaint.TextAlign = Paint.Align.CENTER; }
public FaceRectView(Context context, AttributeSet attrs, int defStyle) : base(context, attrs, defStyle) { mPaint = new Paint(); mPaint.Style = Paint.Style.STROKE; mMatrix = new Matrix(); mAspectRatio = new Matrix(); mRevisionZoomRect = new RectF(); mActualRect = new RectF(); mBoundRect = new RectF(); }
protected override void onDraw(android.graphics.Canvas canvas) { var paint = new android.graphics.Paint(); paint.setStyle(android.graphics.Paint.Style.STROKE); //paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE); paint.setColor(android.graphics.Color.RED); paint.setTextSize(textSize); var text = this.text(); canvas.drawText(text, x, y, paint); canvas.drawText(text, x + 2560 / 2, y, paint); base.onDraw(canvas); }
public override void drawPoints(float[] pts, int offset, int count, android.graphics.Paint paint) { throw new System.NotImplementedException(); }
/// <summary>Create a new number picker</summary> /// <param name="context">the application environment.</param> /// <param name="attrs">a collection of attributes.</param> /// <param name="defStyle">The default style to apply to this view.</param> public NumberPicker(android.content.Context context, android.util.AttributeSet attrs , int defStyle) : base(context, attrs, defStyle) { android.content.res.TypedArray attributesArray = context.obtainStyledAttributes(attrs , [email protected], defStyle, 0); mSolidColor = attributesArray.getColor([email protected]_solidColor , 0); mFlingable = attributesArray.getBoolean([email protected]_flingable , true); mSelectionDivider = attributesArray.getDrawable([email protected]_selectionDivider ); int defSelectionDividerHeight = (int)android.util.TypedValue.applyDimension(android.util.TypedValue .COMPLEX_UNIT_DIP, UNSCALED_DEFAULT_SELECTION_DIVIDER_HEIGHT, getResources().getDisplayMetrics ()); mSelectionDividerHeight = attributesArray.getDimensionPixelSize([email protected] .styleable.NumberPicker_selectionDividerHeight, defSelectionDividerHeight); attributesArray.recycle(); mShowInputControlsAnimimationDuration = getResources().getInteger([email protected] .integer.config_longAnimTime); setWillNotDraw(false); setSelectorWheelState(SELECTOR_WHEEL_STATE_NONE); android.view.LayoutInflater inflater = (android.view.LayoutInflater)getContext(). getSystemService(android.content.Context.LAYOUT_INFLATER_SERVICE); inflater.inflate([email protected]_picker, this, true); android.view.View.OnClickListener onClickListener = new _OnClickListener_537(this ); android.view.View.OnLongClickListener onLongClickListener = new _OnLongClickListener_552 (this); mIncrementButton = (android.widget.ImageButton)findViewById([email protected] .increment); mIncrementButton.setOnClickListener(onClickListener); mIncrementButton.setOnLongClickListener(onLongClickListener); mDecrementButton = (android.widget.ImageButton)findViewById([email protected] .decrement); mDecrementButton.setOnClickListener(onClickListener); mDecrementButton.setOnLongClickListener(onLongClickListener); mInputText = (android.widget.EditText)findViewById([email protected]_input ); mInputText.setOnFocusChangeListener(new _OnFocusChangeListener_576(this)); mInputText.setFilters(new android.text.InputFilter[] { new android.widget.NumberPicker .InputTextFilter(this) }); mInputText.setRawInputType(android.text.InputTypeClass.TYPE_CLASS_NUMBER); mTouchSlop = android.view.ViewConfiguration.getTapTimeout(); android.view.ViewConfiguration configuration = android.view.ViewConfiguration.get (context); mTouchSlop = configuration.getScaledTouchSlop(); mMinimumFlingVelocity = configuration.getScaledMinimumFlingVelocity(); mMaximumFlingVelocity = configuration.getScaledMaximumFlingVelocity() / SELECTOR_MAX_FLING_VELOCITY_ADJUSTMENT; mTextSize = (int)mInputText.getTextSize(); android.graphics.Paint paint = new android.graphics.Paint(); paint.setAntiAlias(true); paint.setTextAlign(android.graphics.Paint.Align.CENTER); paint.setTextSize(mTextSize); paint.setTypeface(mInputText.getTypeface()); android.content.res.ColorStateList colors = mInputText.getTextColors(); int color = colors.getColorForState(ENABLED_STATE_SET, android.graphics.Color.WHITE ); paint.setColor(color); mSelectorWheelPaint = paint; mDimSelectorWheelAnimator = android.animation.ObjectAnimator.ofInt(this, PROPERTY_SELECTOR_PAINT_ALPHA , SELECTOR_WHEEL_BRIGHT_ALPHA, SELECTOR_WHEEL_DIM_ALPHA); android.animation.ObjectAnimator showIncrementButton = android.animation.ObjectAnimator .ofFloat(mIncrementButton, PROPERTY_BUTTON_ALPHA, BUTTON_ALPHA_TRANSPARENT, BUTTON_ALPHA_OPAQUE ); android.animation.ObjectAnimator showDecrementButton = android.animation.ObjectAnimator .ofFloat(mDecrementButton, PROPERTY_BUTTON_ALPHA, BUTTON_ALPHA_TRANSPARENT, BUTTON_ALPHA_OPAQUE ); mShowInputControlsAnimator = new android.animation.AnimatorSet(); mShowInputControlsAnimator.playTogether(mDimSelectorWheelAnimator, showIncrementButton , showDecrementButton); mShowInputControlsAnimator.addListener(new _AnimatorListenerAdapter_626(this)); // create the fling and adjust scrollers mFlingScroller = new android.widget.Scroller(getContext(), null, true); mAdjustScroller = new android.widget.Scroller(getContext(), new android.view.animation.DecelerateInterpolator (2.5f)); updateInputTextView(); updateIncrementAndDecrementButtonsVisibilityState(); if (mFlingable) { if (isInEditMode()) { setSelectorWheelState(SELECTOR_WHEEL_STATE_SMALL); } else { // Start with shown selector wheel and hidden controls. When made // visible hide the selector and fade-in the controls to suggest // fling interaction. setSelectorWheelState(SELECTOR_WHEEL_STATE_LARGE); hideInputControls(); } } }
protected override void onDraw(android.graphics.Canvas canvas) { { var paint = new android.graphics.Paint(); paint.setStyle(android.graphics.Paint.Style.STROKE); //paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE); //paint.setColor(android.graphics.Color.RED); //paint.setColor(android.graphics.Color.YELLOW); paint.setColor(color); paint.setTextSize(textSize); paint.setAlpha(alpha); var a = this.text().Split('\n'); a.WithEachIndex( (text, i) => { canvas.drawText(text, x, y + i * 24, paint); canvas.drawText(text, x + 2560 / 2, y + i * 24, paint); } ); } if (AtDraw != null) AtDraw(canvas); base.onDraw(canvas); }
/// <summary> /// Returns an immutable bitmap from subset of the source bitmap, /// transformed by the optional matrix. /// </summary> /// <remarks> /// Returns an immutable bitmap from subset of the source bitmap, /// transformed by the optional matrix. It is /// initialized with the same density as the original bitmap. /// </remarks> /// <param name="source">The bitmap we are subsetting</param> /// <param name="x">The x coordinate of the first pixel in source</param> /// <param name="y">The y coordinate of the first pixel in source</param> /// <param name="width">The number of pixels in each row</param> /// <param name="height">The number of rows</param> /// <param name="m">Optional matrix to be applied to the pixels</param> /// <param name="filter"> /// true if the source should be filtered. /// Only applies if the matrix contains more than just /// translation. /// </param> /// <returns>A bitmap that represents the specified subset of source</returns> /// <exception cref="System.ArgumentException"> /// if the x, y, width, height values are /// outside of the dimensions of the source bitmap. /// </exception> public static android.graphics.Bitmap createBitmap(android.graphics.Bitmap source , int x, int y, int width, int height, android.graphics.Matrix m, bool filter) { checkXYSign(x, y); checkWidthHeight(width, height); if (x + width > source.getWidth()) { throw new System.ArgumentException("x + width must be <= bitmap.width()"); } if (y + height > source.getHeight()) { throw new System.ArgumentException("y + height must be <= bitmap.height()"); } // check if we can just return our argument unchanged if (!source.isMutable() && x == 0 && y == 0 && width == source.getWidth() && height == source.getHeight() && (m == null || m.isIdentity())) { return source; } int neww = width; int newh = height; android.graphics.Canvas canvas = new android.graphics.Canvas(); android.graphics.Bitmap bitmap; android.graphics.Paint paint; android.graphics.Rect srcR = new android.graphics.Rect(x, y, x + width, y + height ); android.graphics.RectF dstR = new android.graphics.RectF(0, 0, width, height); android.graphics.Bitmap.Config newConfig = android.graphics.Bitmap.Config.ARGB_8888; android.graphics.Bitmap.Config config = source.getConfig(); // GIF files generate null configs, assume ARGB_8888 if (config != null) { switch (config) { case android.graphics.Bitmap.Config.RGB_565: { newConfig = android.graphics.Bitmap.Config.RGB_565; break; } case android.graphics.Bitmap.Config.ALPHA_8: { newConfig = android.graphics.Bitmap.Config.ALPHA_8; break; } case android.graphics.Bitmap.Config.ARGB_4444: case android.graphics.Bitmap.Config.ARGB_8888: default: { //noinspection deprecation newConfig = android.graphics.Bitmap.Config.ARGB_8888; break; } } } if (m == null || m.isIdentity()) { bitmap = createBitmap(neww, newh, newConfig, source.hasAlpha()); paint = null; } else { // not needed bool transformed = !m.rectStaysRect(); android.graphics.RectF deviceR = new android.graphics.RectF(); m.mapRect(deviceR, dstR); neww = Sharpen.Util.Round(deviceR.width()); newh = Sharpen.Util.Round(deviceR.height()); bitmap = createBitmap(neww, newh, transformed ? android.graphics.Bitmap.Config.ARGB_8888 : newConfig, transformed || source.hasAlpha()); canvas.translate(-deviceR.left, -deviceR.top); canvas.concat(m); paint = new android.graphics.Paint(); paint.setFilterBitmap(filter); if (transformed) { paint.setAntiAlias(true); } } // The new bitmap was created from a known bitmap source so assume that // they use the same density bitmap.mDensity = source.mDensity; canvas.setBitmap(bitmap); canvas.drawBitmap(source, srcR, dstR, paint); canvas.setBitmap(null); return bitmap; }
private void init() { mLayerPaint = new android.graphics.Paint(); }
public override void drawBitmap(int[] colors, int offset, int stride, int x, int y, int width, int height, bool hasAlpha, android.graphics.Paint paint) { throw new System.NotImplementedException(); }
public override void drawText(string text, float x, float y, android.graphics.Paint paint) { throw new System.NotImplementedException(); }
public override void drawBitmap(android.graphics.Bitmap bitmap, android.graphics.Matrix matrix, android.graphics.Paint paint) { throw new System.NotImplementedException(); }
public override void drawBitmap(android.graphics.Bitmap bitmap, android.graphics.Rect src, android.graphics.RectF dst, android.graphics.Paint paint) { throw new System.NotImplementedException(); }
/// <summary>Draws the specified layer onto this canvas.</summary> /// <remarks>Draws the specified layer onto this canvas.</remarks> /// <param name="layer">The layer to composite on this canvas</param> /// <param name="x">The left coordinate of the layer</param> /// <param name="y">The top coordinate of the layer</param> /// <param name="paint">The paint used to draw the layer</param> internal abstract void drawHardwareLayer(android.view.HardwareLayer layer, float x, float y, android.graphics.Paint paint);
void android.text.style.LineBackgroundSpan.drawBackground(android.graphics.Canvas arg0, android.graphics.Paint arg1, int arg2, int arg3, int arg4, int arg5, int arg6, java.lang.CharSequence arg7, int arg8, int arg9, int arg10) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (!IsClrObject) { @__env.CallVoidMethod(this.JvmHandle, global::android.text.style.LineBackgroundSpan_._drawBackground8293, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg3), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg4), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg5), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg6), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg7), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg8), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg9), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg10)); } else { @__env.CallNonVirtualVoidMethod(this.JvmHandle, global::android.text.style.LineBackgroundSpan_.staticClass, global::android.text.style.LineBackgroundSpan_._drawBackground8293, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg3), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg4), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg5), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg6), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg7), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg8), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg9), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg10)); } }
public void drawLeadingMargin(android.graphics.Canvas arg0, android.graphics.Paint arg1, int arg2, int arg3, int arg4, int arg5, int arg6, string arg7, int arg8, int arg9, bool arg10, android.text.Layout arg11) { drawLeadingMargin(arg0, arg1, arg2, arg3, arg4, arg5, arg6, (global::java.lang.CharSequence)(global::java.lang.String) arg7, arg8, arg9, arg10, arg11); }
public override void drawRect(android.graphics.Rect r, android.graphics.Paint paint ) { throw new System.NotImplementedException(); }
public override void drawBitmapMesh(android.graphics.Bitmap bitmap, int meshWidth , int meshHeight, float[] verts, int vertOffset, int[] colors, int colorOffset, android.graphics.Paint paint) { throw new System.NotImplementedException(); }
public override void drawText(char[] text, int index, int count, float x, float y , android.graphics.Paint paint) { throw new System.NotImplementedException(); }
public override void drawCircle(float cx, float cy, float radius, android.graphics.Paint paint) { throw new System.NotImplementedException(); }
public virtual void drawLeadingMargin(android.graphics.Canvas c, android.graphics.Paint p, int x, int dir, int top, int baseline, int bottom, java.lang.CharSequence text , int start, int end, bool first, android.text.Layout layout) { throw new System.NotImplementedException(); }
public override void drawLine(float startX, float startY, float stopX, float stopY , android.graphics.Paint paint) { throw new System.NotImplementedException(); }
public virtual void setStroke(int width, int color, float dashWidth, float dashGap ) { mGradientState.setStroke(width, color, dashWidth, dashGap); if (mStrokePaint == null) { mStrokePaint = new android.graphics.Paint(android.graphics.Paint.ANTI_ALIAS_FLAG); mStrokePaint.setStyle(android.graphics.Paint.Style.STROKE); } mStrokePaint.setStrokeWidth(width); mStrokePaint.setColor(color); android.graphics.DashPathEffect e = null; if (dashWidth > 0) { e = new android.graphics.DashPathEffect(new float[] { dashWidth, dashGap }, 0); } mStrokePaint.setPathEffect(e); invalidateSelf(); }
public override void drawLines(float[] pts, android.graphics.Paint paint) { throw new System.NotImplementedException(); }
public TextPaint(android.graphics.Paint arg0) : base(global::MonoJavaBridge.JNIEnv.ThreadEnv) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::MonoJavaBridge.JniLocalHandle handle = @__env.NewObject(android.text.TextPaint.staticClass, global::android.text.TextPaint._TextPaint7893, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); Init(@__env, handle); }
public override void drawTextOnPath(char[] text, int index, int count, android.graphics.Path path, float hOffset, float vOffset, android.graphics.Paint paint) { throw new System.NotImplementedException(); }
protected override void onCreate(Bundle savedInstanceState) { base.onCreate(savedInstanceState); Console.WriteLine("enter OVRWindWheelActivity onCreate"); // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/ #region xCallback // X:\jsc.svn\examples\java\android\synergy\OVRVrCubeWorldSurfaceView\OVRVrCubeWorldSurfaceView\ApplicationActivity.cs var xCallback = new xSurfaceHolder_Callback { onsurfaceCreated = holder => { Console.WriteLine("enter onsurfaceCreated " + new { appThread }); if (appThread == 0) return; // did we use it for float window? //holder.setFormat(android.graphics.PixelFormat.TRANSLUCENT); GLES3JNILib.onSurfaceCreated(holder.getSurface()); xSurfaceHolder = holder; //Console.WriteLine("exit onsurfaceCreated " + new { appThread }); }, onsurfaceChanged = (SurfaceHolder holder, int format, int width, int height) => { if (appThread == 0) return; GLES3JNILib.onSurfaceChanged(holder.getSurface()); xSurfaceHolder = holder; }, onsurfaceDestroyed = holder => { //I/System.Console( 3549): 0ddd:0001 after OVRWindWheelActivity onCreate, attach the headset! //I/System.Console( 3549): 0ddd:0001 enter onsurfaceDestroyed //Console.WriteLine("enter onsurfaceDestroyed"); if (appThread == 0) return; // I/DEBUG ( 2079): #01 pc 0000672f /data/app/OVRWindWheelActivity.Activities-1/lib/arm/libmain.so (Java_com_oculus_gles3jni_GLES3JNILib_onSurfaceDestroyed+46) GLES3JNILib.onSurfaceDestroyed(); xSurfaceHolder = null; //appThread = 0; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/pui_global_menu } }; #endregion // https://github.com/dalinaum/TextureViewDemo // TextureView semi-translucent by calling myView.setAlpha(0.5f). // !! should we use TextureView instead? // https://groups.google.com/forum/#!topic/android-developers/jYjvm7ItpXQ //this.xSurfaceView.setZOrderOnTop(true); // necessary //this.xSurfaceView.getHolder().setFormat(android.graphics.PixelFormat.TRANSPARENT); var ActivityPaused = true; // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20160101/ovrwindwheelndk WifiManager wifi = (WifiManager)this.getSystemService(Context.WIFI_SERVICE); var lo = wifi.createMulticastLock("vrudp"); lo.acquire(); #region ReceiveAsync // https://www.youtube.com/watch?v=GpmKq_qg3Tk var HUDStylusList = new List<Action<android.graphics.Canvas>>(); // http://uploadvr.com/vr-hmd-specs/ Action<android.graphics.Canvas> HUDStylus = canvas => { // video? // https://www.youtube.com/watch?v=JaTH_hoLDxc // so cool. we get to use pen in vr!s while (HUDStylusList.Count > 1024) HUDStylusList.RemoveAt(0); foreach (var item in HUDStylusList) { item(canvas); } }; #region fUDPPressure Action<IPAddress> fUDPPressure = async nic => { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151003/ovrwindwheelactivity // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1 var uu = new UdpClient(40094); // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs //args.pre = "awaiting Parallax at " + nic + " :40094"; var oldx = 0f; var oldy = 0f; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure // did we break async Continue ?? var ux = await uu.ReceiveAsync(); // did we jump to ui thread? // discard input? if (ActivityPaused) continue; // while we have the signal turn on torch/. var m = new BinaryReader(new MemoryStream(ux.Buffer)); var x0 = m.ReadSingle(); var x = 200 + x0 * 0.1f; var y0 = m.ReadSingle(); var y = 1200 - y0 * 0.1f; var pressure = m.ReadSingle(); new { x, y, oldx, oldy, pressure }.With( segment => { var paint = new android.graphics.Paint(); HUDStylusList.Add( canvas => { //c.lineTo((int)(x * 0.1), 400 - (int)(y * 0.1)); //c.lineWidth = 1 + (pressure / 255.0 * 7); // paint.setStrokeWidth((int)(1 + (pressure / 255.0 * 6) * (pressure / 255.0 * 6))); paint.setStyle(android.graphics.Paint.Style.STROKE); if (pressure > 0) paint.setColor(android.graphics.Color.YELLOW); else paint.setColor(android.graphics.Color.RED); canvas.drawLine(segment.x, segment.y, segment.oldx, segment.oldy, paint); canvas.drawLine(2560 / 2 + segment.x, segment.y, segment.oldx + 2560 / 2, segment.oldy, paint); } ); } ); oldx = x; oldy = y; args.pen = new { x, y, pressure }.ToString(); //Console.WriteLine(new { args.parallax }); //// or marshal memory? //var xy = args.mouse.Split(':'); //args.mousey = int.Parse(xy[1]); //// getchar? //args.ad = int.Parse(xy[2]); //args.ws = int.Parse(xy[3]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 //args.c = int.Parse(xy[4]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown //args.mousebutton = int.Parse(xy[5]); //args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fParallax Action<IPAddress> fParallax = async nic => { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150712-1 var uu = new UdpClient(43834); // X:\jsc.svn\examples\javascript\chrome\apps\ChromeFlashlightTracker\ChromeFlashlightTracker\Application.cs args.parallax = "awaiting Parallax at " + nic + " :43834"; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151001/udppenpressure // did we break async Continue ?? var x = await uu.ReceiveAsync(); // did we jump to ui thread? // discard input? if (ActivityPaused) continue; // while we have the signal turn on torch/. #region await webcam feed if (nogc == null) { // partial ? var camera = android.hardware.Camera.open(); android.hardware.Camera.Parameters p = camera.getParameters(); p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_TORCH); camera.setParameters(p); camera.startPreview(); nogc = camera; } #endregion //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.parallax = Encoding.UTF8.GetString(x.Buffer); var xy = args.parallax.Split(':'); //Console.WriteLine(new { args.parallax }); //// or marshal memory? //var xy = args.mouse.Split(':'); args.px = float.Parse(xy[1]); args.py = float.Parse(xy[2]); args.pz = float.Parse(xy[3]); //args.mousey = int.Parse(xy[1]); //// getchar? //args.ad = int.Parse(xy[2]); //args.ws = int.Parse(xy[3]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 //args.c = int.Parse(xy[4]); //// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown //args.mousebutton = int.Parse(xy[5]); //args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fWASDC var fWASDCport = 41814; Action<IPAddress> fWASDC = async nic => { var uu = new UdpClient(fWASDCport); args.mouse = "awaiting mouse and WASDC at " + nic + ":" + fWASDCport; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.mouse = Encoding.UTF8.GetString(x.Buffer); // or marshal memory? var xy = args.mouse.Split(':'); args.mousex = int.Parse(xy[0]); args.mousey = int.Parse(xy[1]); // getchar? args.ad = int.Parse(xy[2]); args.ws = int.Parse(xy[3]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704 args.c = int.Parse(xy[4]); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150704/mousedown args.mousebutton = int.Parse(xy[5]); args.mousewheel = int.Parse(xy[6]); } }; #endregion #region fvertexTransform // X:\jsc.svn\examples\java\android\vr\OVRWindWheelNDK\OVRUDPMatrix\Program.cs Action<IPAddress> fvertexTransform = async nic => { var uu = new UdpClient(40014); //args.mouse = "awaiting vertexTransform at " + nic + " :40014"; // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\examples\java\android\LANBroadcastListener\LANBroadcastListener\ApplicationActivity.cs uu.JoinMulticastGroup(IPAddress.Parse("239.1.2.3"), nic); while (true) { var x = await uu.ReceiveAsync(); // did we jump to ui thread? //Console.WriteLine("ReceiveAsync done " + Encoding.UTF8.GetString(x.Buffer)); args.vertexTransform = x.Buffer; } }; #endregion NetworkInterface.GetAllNetworkInterfaces().WithEach( n => { // X:\jsc.svn\examples\java\android\forms\FormsUDPJoinGroup\FormsUDPJoinGroup\ApplicationControl.cs // X:\jsc.svn\core\ScriptCoreLibJava\BCLImplementation\System\Net\NetworkInformation\NetworkInterface.cs var IPProperties = n.GetIPProperties(); var PhysicalAddress = n.GetPhysicalAddress(); foreach (var ip in IPProperties.UnicastAddresses) { // ipv4 if (ip.Address.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork) { if (!IPAddress.IsLoopback(ip.Address)) if (n.SupportsMulticast) { fUDPPressure(ip.Address); fWASDC(ip.Address); fParallax(ip.Address); fvertexTransform(ip.Address); } } } } ); #endregion var sw = Stopwatch.StartNew(); //var args = new object(); // can we draw on back? #region mDraw var mDraw = new DrawOnTop(this) { // yes it appears top left. //text = "GearVR HUD" // (out) VrApi.vrapi_GetVersionString() text = () => { // can we listen to udp? // like X:\jsc.svn\examples\java\android\AndroidServiceUDPNotification\AndroidServiceUDPNotification\ApplicationActivity.cs // in vr if the other service is running it can display vr notification // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150630/udp // lets run it, and see if we can see some vr notifications as we skip a video //if (args.total_allocated_space > 48 * 1024 * 1024) // this.recreate(); return sw.ElapsedMilliseconds + "ms | " + args.total_allocated_space + " bytes \n" + new { vertexTransform = args.vertexTransform.Length } + "\n" + args.mouse + "\n" + args.parallax + "\n" + args.vertexTransform.Length + "bytes udp\n" + new { args.pen } + "\n" //+ new { args.mousex, args.mousey } + "\n" + new { //args.mousex, // left to right //args.x, //args.px, args.px, args.py, args.pz, // nod up +0.7 down -0.7 ox = args.tracking_HeadPose_Pose_Orientation_x, // -0.7 right +0.7 left oy = args.tracking_HeadPose_Pose_Orientation_y // tilt right -0.7 tilt left + 0.7 //oz = args.tracking_HeadPose_Pose_Orientation_z // ?? //ow = args.tracking_HeadPose_Pose_Orientation_w }.ToString().Replace(",", "\n"); } }; //Task.Run( Func<string> safemode = () => { return sw.ElapsedMilliseconds + "ms \n" + args.total_allocated_space + " bytes \n" + "GC safe mode / malloc limit.."; }; // canvas.drawText(text, x + 2560 / 2, y + i * 24, paint); mDraw.AtDraw = canvas => { { var paint = new android.graphics.Paint(); paint.setStrokeWidth(16); paint.setStyle(android.graphics.Paint.Style.STROKE); paint.setColor(android.graphics.Color.RED); canvas.drawLine(0, 0, 400, 400, paint); canvas.drawLine(2560 / 2, 0, 400 + 2560 / 2, 400, paint); HUDStylus(canvas); } // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150717/replay // can w visually store tracking intel. like tvs do. { // https://code.google.com/p/android/issues/detail?id=4086 var paint = new android.graphics.Paint(); paint.setStrokeWidth(0); paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE); // lets have left to right recorder as a color block //// nod up +0.7 down -0.7 // cannot see it. var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_x + 0.7) / 1.4); // I/System.Console( 8999): 2327:0001 AtDraw 16 0078af2e // why wont our tracking correctly show? //Console.WriteLine("AtDraw 16 " + rgb_left_to_right.ToString("x8")); //paint.setColor(android.graphics.Color.YELLOW); paint.setColor( (int)(0xff000000 | rgb_left_to_right)); canvas.drawRect(16, 0, 32, 32, paint); } // ox = args.tracking_HeadPose_Pose_Orientation_x, // oy = args.tracking_HeadPose_Pose_Orientation_y { // https://code.google.com/p/android/issues/detail?id=4086 var paint = new android.graphics.Paint(); paint.setStrokeWidth(0); paint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE); //paint.setColor(android.graphics.Color.RED); // lets have left to right recorder as a color block // // -0.7 right +0.7 left var rgb_left_to_right = (int)(0xffffff * (args.tracking_HeadPose_Pose_Orientation_y + 0.7) / 1.4); //paint.setColor(android.graphics.Color.YELLOW); paint.setColor( (int)(0xff000000 | rgb_left_to_right)); canvas.drawRect(16 + 64, 0, 320, 32, paint); } }; new Thread( delegate() { // bg thread while (true) { //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 30); // fullspeed GLES3JNILib.stringFromJNI(args); // http://developer.android.com/reference/android/graphics/Color.html if (args.total_allocated_space > GLES3JNILib.safemodeMemoryLimitMB * 1024 * 1024) { mDraw.color = android.graphics.Color.RED; mDraw.alpha = 255; mDraw.text = safemode; // goto secondary activity? } else if (args.mousebutton != 0) { // go a head. lean left or up mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } else { mDraw.color = android.graphics.Color.GREEN; // not leaning in? if (args.pz < 0) { mDraw.color = android.graphics.Color.WHITE; } var BaseStationEdgeX = Math.Abs(args.px) > 0.3; var BaseStationEdgeY = Math.Abs(args.py) > 0.3; if (BaseStationEdgeX || BaseStationEdgeY ) { // base station wont track ya for long.. // reorient? // fade to black? mDraw.color = android.graphics.Color.YELLOW; mDraw.alpha = 255; } } mDraw.postInvalidate(); Thread.Sleep(1000 / 60); // https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20150716/ovrwindwheelactivity //Thread.Sleep(1000 / 15); //Thread.Sleep(1000 / 4); } } ).Start(); #endregion #region ondispatchTouchEvent this.ondispatchTouchEvent = @event => { if (appThread == 0) return; int action = @event.getAction(); float x = @event.getRawX(); float y = @event.getRawY(); //if (action == MotionEvent.ACTION_UP) { var halfx = 2560 / 2; var halfy = 1440 / 2; // touch sending int to offfset the cubes this.args.x = (int)(halfx - x); this.args.y = (int)(y - halfy); mDraw.x = (int)(500 + halfx - x); mDraw.y = (int)(600 + y - halfy); //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { x, y, action }.ToString(); //Console.WriteLine(" ::dispatchTouchEvent( " + action + ", " + x + ", " + y + " )"); } GLES3JNILib.onTouchEvent(action, x, y); // can we move hud around and record it to gif or mp4? }; #endregion #region ondispatchKeyEvent this.ondispatchKeyEvent = @event => { if (appThread == 0) return false; int keyCode = @event.getKeyCode(); int action = @event.getAction(); if (action != KeyEvent.ACTION_DOWN && action != KeyEvent.ACTION_UP) { return base.dispatchKeyEvent(@event); } if (action == KeyEvent.ACTION_UP) { // keycode 4 //mDraw.text = () => sw.ElapsedMilliseconds + "ms \n" + new { keyCode, action }.ToString(); //Log.v(TAG, "GLES3JNIActivity::dispatchKeyEvent( " + keyCode + ", " + action + " )"); } GLES3JNILib.onKeyEvent(keyCode, action); return true; }; #endregion AtPause = delegate { ActivityPaused = true; GLES3JNILib.onPause(); // http://www.mkyong.com/android/how-to-turn-onoff-camera-ledflashlight-in-android/ if (nogc != null) { var camera = nogc; var p = camera.getParameters(); p.setFlashMode(android.hardware.Camera.Parameters.FLASH_MODE_OFF); camera.setParameters(p); camera.stopPreview(); camera.release(); nogc = null; } }; AtResume = delegate { //Console.WriteLine("enter onResume"); ActivityPaused = false; // http://stackoverflow.com/questions/3527621/how-to-pause-and-resume-a-surfaceview-thread // http://stackoverflow.com/questions/10277694/resume-to-surfaceview-shows-black-screen //this.xSurfaceView.onres // You must ensure that the drawing thread only touches the underlying Surface while it is valid this.xSurfaceView = new SurfaceView(this); this.setContentView(xSurfaceView); this.addContentView(mDraw, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); this.xSurfaceView.getHolder().addCallback(xCallback); GLES3JNILib.onResume(); }; // canw e add a camera too? // stackoverflow.com/questions/20936480/how-to-make-surfaceview-transparent-background //this.setContentView(mDraw); //this.addContentView(xSurfaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); // sometimes system wants to try to black the screen it seems.. getWindow().addFlags(WindowManager_LayoutParams.FLAG_KEEP_SCREEN_ON); appThread = com.oculus.gles3jni.GLES3JNILib.onCreate(this); Console.WriteLine("after OVRWindWheelActivity onCreate, attach the headset!"); }
public override void drawTextOnPath(string text, android.graphics.Path path, float hOffset, float vOffset, android.graphics.Paint paint) { throw new System.NotImplementedException(); }
public virtual void clearRect() { mPaint = new Paint(); mPaint.Color = Color.BLUE; mPaint.Style = Paint.Style.STROKE; mPaint.StrokeWidth = 3; mRect = new Rect(); mOffset = new Point(); mGravity = GRAVITY_START; }
public override void drawVertices(android.graphics.Canvas.VertexMode mode, int vertexCount , float[] verts, int vertOffset, float[] texs, int texOffset, int[] colors, int colorOffset, short[] indices, int indexOffset, int indexCount, android.graphics.Paint paint) { throw new System.NotImplementedException(); }
public virtual void draw(android.graphics.Canvas canvas) { if (mState == STATE_NONE) { // No need to draw anything return; } int y = mThumbY; int viewWidth = mList.getWidth(); android.widget.FastScroller.ScrollFade scrollFade = mScrollFade; int alpha = -1; if (mState == STATE_EXIT) { alpha = scrollFade.getAlpha(); if (alpha < android.widget.FastScroller.ScrollFade.ALPHA_MAX / 2) { mThumbDrawable.setAlpha(alpha * 2); } int left = 0; switch (mPosition) { case android.view.View.SCROLLBAR_POSITION_DEFAULT: case android.view.View.SCROLLBAR_POSITION_RIGHT: { left = viewWidth - (mThumbW * alpha) / android.widget.FastScroller.ScrollFade.ALPHA_MAX; break; } case android.view.View.SCROLLBAR_POSITION_LEFT: { left = -mThumbW + (mThumbW * alpha) / android.widget.FastScroller.ScrollFade.ALPHA_MAX; break; } } mThumbDrawable.setBounds(left, 0, left + mThumbW, mThumbH); mChangedBounds = true; } if (mTrackDrawable != null) { android.graphics.Rect thumbBounds = mThumbDrawable.getBounds(); int left = thumbBounds.left; int halfThumbHeight = (thumbBounds.bottom - thumbBounds.top) / 2; int trackWidth = mTrackDrawable.getIntrinsicWidth(); int trackLeft = (left + mThumbW / 2) - trackWidth / 2; mTrackDrawable.setBounds(trackLeft, halfThumbHeight, trackLeft + trackWidth, mList .getHeight() - halfThumbHeight); mTrackDrawable.draw(canvas); } canvas.translate(0, y); mThumbDrawable.draw(canvas); canvas.translate(0, -y); // If user is dragging the scroll bar, draw the alphabet overlay if (mState == STATE_DRAGGING && mDrawOverlay) { if (mOverlayPosition == OVERLAY_AT_THUMB) { int left = 0; switch (mPosition) { case android.view.View.SCROLLBAR_POSITION_DEFAULT: case android.view.View.SCROLLBAR_POSITION_RIGHT: default: { left = System.Math.Max(0, mThumbDrawable.getBounds().left - mThumbW - mOverlaySize ); break; } case android.view.View.SCROLLBAR_POSITION_LEFT: { left = System.Math.Min(mThumbDrawable.getBounds().right + mThumbW, mList.getWidth () - mOverlaySize); break; } } int top = System.Math.Max(0, System.Math.Min(y + (mThumbH - mOverlaySize) / 2, mList .getHeight() - mOverlaySize)); android.graphics.RectF pos = mOverlayPos; pos.left = left; pos.right = pos.left + mOverlaySize; pos.top = top; pos.bottom = pos.top + mOverlaySize; if (mOverlayDrawable != null) { mOverlayDrawable.setBounds((int)pos.left, (int)pos.top, (int)pos.right, (int)pos. bottom); } } mOverlayDrawable.draw(canvas); android.graphics.Paint paint = mPaint; float descent = paint.descent(); android.graphics.RectF rectF = mOverlayPos; android.graphics.Rect tmpRect = mTmpRect; mOverlayDrawable.getPadding(tmpRect); int hOff = (tmpRect.right - tmpRect.left) / 2; int vOff = (tmpRect.bottom - tmpRect.top) / 2; canvas.drawText(mSectionText, (int)(rectF.left + rectF.right) / 2 - hOff, (int)(rectF .bottom + rectF.top) / 2 + mOverlaySize / 4 - descent - vOff, paint); } else { if (mState == STATE_EXIT) { if (alpha == 0) { // Done with exit setState(STATE_NONE); } else { if (mTrackDrawable != null) { mList.invalidate(viewWidth - mThumbW, 0, viewWidth, mList.getHeight()); } else { mList.invalidate(viewWidth - mThumbW, y, viewWidth, y + mThumbH); } } } } }
private void recordShaderBitmap(android.graphics.Paint paint) { throw new System.NotImplementedException(); }
public override void drawRect(float left, float top, float right, float bottom, android.graphics.Paint paint) { throw new System.NotImplementedException(); }
public virtual android.graphics.Paint getPaint() { if (mPaint == null) { mPaint = new android.graphics.Paint(); mPaint.setDither(DEFAULT_DITHER); } return mPaint; }
public override void drawRoundRect(android.graphics.RectF rect, float rx, float ry , android.graphics.Paint paint) { throw new System.NotImplementedException(); }
internal BitmapState(android.graphics.drawable.BitmapDrawable.BitmapState bitmapState ) : this(bitmapState.mBitmap) { mChangingConfigurations = bitmapState.mChangingConfigurations; mGravity = bitmapState.mGravity; mTileModeX = bitmapState.mTileModeX; mTileModeY = bitmapState.mTileModeY; mTargetDensity = bitmapState.mTargetDensity; mPaint = new android.graphics.Paint(bitmapState.mPaint); mRebuildShader = bitmapState.mRebuildShader; }
public override void drawText(java.lang.CharSequence text, int start, int end, float x, float y, android.graphics.Paint paint) { throw new System.NotImplementedException(); }
public override void draw(android.graphics.Canvas canvas) { if (!ensureValidRect()) { // nothing to draw return; } // remember the alpha values, in case we temporarily overwrite them // when we modulate them with mAlpha int prevFillAlpha = mFillPaint.getAlpha(); int prevStrokeAlpha = mStrokePaint != null ? mStrokePaint.getAlpha() : 0; // compute the modulate alpha values int currFillAlpha = modulateAlpha(prevFillAlpha); int currStrokeAlpha = modulateAlpha(prevStrokeAlpha); bool haveStroke = currStrokeAlpha > 0 && mStrokePaint.getStrokeWidth() > 0; bool haveFill = currFillAlpha > 0; android.graphics.drawable.GradientDrawable.GradientState st = mGradientState; bool useLayer = haveStroke && haveFill && st.mShape != LINE && currStrokeAlpha < 255 && (mAlpha < 255 || mColorFilter != null); if (useLayer) { if (mLayerPaint == null) { mLayerPaint = new android.graphics.Paint(); } mLayerPaint.setDither(mDither); mLayerPaint.setAlpha(mAlpha); mLayerPaint.setColorFilter(mColorFilter); float rad = mStrokePaint.getStrokeWidth(); canvas.saveLayer(mRect.left - rad, mRect.top - rad, mRect.right + rad, mRect.bottom + rad, mLayerPaint, android.graphics.Canvas.HAS_ALPHA_LAYER_SAVE_FLAG); // don't perform the filter in our individual paints // since the layer will do it for us mFillPaint.setColorFilter(null); mStrokePaint.setColorFilter(null); } else { mFillPaint.setAlpha(currFillAlpha); mFillPaint.setDither(mDither); mFillPaint.setColorFilter(mColorFilter); if (haveStroke) { mStrokePaint.setAlpha(currStrokeAlpha); mStrokePaint.setDither(mDither); mStrokePaint.setColorFilter(mColorFilter); } } switch (st.mShape) { case RECTANGLE: { if (st.mRadiusArray != null) { if (mPathIsDirty || mRectIsDirty) { mPath.reset(); mPath.addRoundRect(mRect, st.mRadiusArray, android.graphics.Path.Direction.CW); mPathIsDirty = mRectIsDirty = false; } canvas.drawPath(mPath, mFillPaint); if (haveStroke) { canvas.drawPath(mPath, mStrokePaint); } } else { if (st.mRadius > 0.0f) { // since the caller is only giving us 1 value, we will force // it to be square if the rect is too small in one dimension // to show it. If we did nothing, Skia would clamp the rad // independently along each axis, giving us a thin ellipse // if the rect were very wide but not very tall float rad = st.mRadius; float r = System.Math.Min(mRect.width(), mRect.height()) * 0.5f; if (rad > r) { rad = r; } canvas.drawRoundRect(mRect, rad, rad, mFillPaint); if (haveStroke) { canvas.drawRoundRect(mRect, rad, rad, mStrokePaint); } } else { canvas.drawRect(mRect, mFillPaint); if (haveStroke) { canvas.drawRect(mRect, mStrokePaint); } } } break; } case OVAL: { canvas.drawOval(mRect, mFillPaint); if (haveStroke) { canvas.drawOval(mRect, mStrokePaint); } break; } case LINE: { android.graphics.RectF r = mRect; float y = r.centerY(); canvas.drawLine(r.left, y, r.right, y, mStrokePaint); break; } case RING: { android.graphics.Path path = buildRing(st); canvas.drawPath(path, mFillPaint); if (haveStroke) { canvas.drawPath(path, mStrokePaint); } break; } } if (useLayer) { canvas.restore(); } else { mFillPaint.setAlpha(prevFillAlpha); if (haveStroke) { mStrokePaint.setAlpha(prevStrokeAlpha); } } }
public override void drawPatch(android.graphics.Bitmap bitmap, byte[] chunks, android.graphics.RectF dst, android.graphics.Paint paint) { throw new System.NotImplementedException(); }
private void init(android.content.Context context) { // Get both the scrollbar states drawables android.content.res.TypedArray ta = context.getTheme().obtainStyledAttributes(ATTRS ); useThumbDrawable(context, ta.getDrawable(THUMB_DRAWABLE)); mTrackDrawable = ta.getDrawable(TRACK_DRAWABLE); mOverlayDrawableLeft = ta.getDrawable(PREVIEW_BACKGROUND_LEFT); mOverlayDrawableRight = ta.getDrawable(PREVIEW_BACKGROUND_RIGHT); mOverlayPosition = ta.getInt(OVERLAY_POSITION, OVERLAY_FLOATING); mScrollCompleted = true; getSectionsFromIndexer(); mOverlaySize = context.getResources().getDimensionPixelSize([email protected] .fastscroll_overlay_size); mOverlayPos = new android.graphics.RectF(); mScrollFade = new android.widget.FastScroller.ScrollFade(this); mPaint = new android.graphics.Paint(); mPaint.setAntiAlias(true); mPaint.setTextAlign(android.graphics.Paint.Align.CENTER); mPaint.setTextSize(mOverlaySize / 2); android.content.res.ColorStateList textColor = ta.getColorStateList(TEXT_COLOR); int textColorNormal = textColor.getDefaultColor(); mPaint.setColor(textColorNormal); mPaint.setStyle(android.graphics.Paint.Style.FILL_AND_STROKE); // to show mOverlayDrawable properly if (mList.getWidth() > 0 && mList.getHeight() > 0) { onSizeChanged(mList.getWidth(), mList.getHeight(), 0, 0); } mState = STATE_NONE; refreshDrawableState(); ta.recycle(); mScaledTouchSlop = android.view.ViewConfiguration.get(context).getScaledTouchSlop (); mMatchDragPosition = context.getApplicationInfo().targetSdkVersion >= android.os.Build .VERSION_CODES.HONEYCOMB; setScrollbarPosition(mList.getVerticalScrollbarPosition()); }
internal ShapeState(android.graphics.drawable.ShapeDrawable.ShapeState orig) { if (orig != null) { mPaint = orig.mPaint; mShape = orig.mShape; mPadding = orig.mPadding; mIntrinsicWidth = orig.mIntrinsicWidth; mIntrinsicHeight = orig.mIntrinsicHeight; mAlpha = orig.mAlpha; mShaderFactory = orig.mShaderFactory; } else { mPaint = new android.graphics.Paint(android.graphics.Paint.ANTI_ALIAS_FLAG); } }
public override void drawBitmap(android.graphics.Bitmap bitmap, float left, float top, android.graphics.Paint paint) { throw new System.NotImplementedException(); }
public abstract void draw(android.graphics.Canvas canvas, java.lang.CharSequence text, int start, int end, float x, int top, int y, int bottom, android.graphics.Paint paint);
public abstract int getSize(android.graphics.Paint paint, java.lang.CharSequence text, int start, int end, android.graphics.Paint.FontMetricsInt fm);
public virtual void setPaint(android.graphics.Paint p) { mPaint = p; }
public override void inflate(android.content.res.Resources r, org.xmlpull.v1.XmlPullParser parser, android.util.AttributeSet attrs) { base.inflate(r, parser, attrs); android.content.res.TypedArray a = r.obtainAttributes(attrs, [email protected]. styleable.BitmapDrawable); int id = a.getResourceId([email protected]_src, 0); if (id == 0) { throw new org.xmlpull.v1.XmlPullParserException(parser.getPositionDescription() + ": <bitmap> requires a valid src attribute"); } android.graphics.Bitmap bitmap = android.graphics.BitmapFactory.decodeResource(r, id); if (bitmap == null) { throw new org.xmlpull.v1.XmlPullParserException(parser.getPositionDescription() + ": <bitmap> requires a valid src attribute"); } mBitmapState.mBitmap = bitmap; setBitmap(bitmap); setTargetDensity(r.getDisplayMetrics()); android.graphics.Paint paint = mBitmapState.mPaint; paint.setAntiAlias(a.getBoolean([email protected]_antialias , paint.isAntiAlias())); paint.setFilterBitmap(a.getBoolean([email protected]_filter , paint.isFilterBitmap())); paint.setDither(a.getBoolean([email protected]_dither, paint.isDither())); setGravity(a.getInt([email protected]_gravity, android.view.Gravity .FILL)); int tileMode = a.getInt([email protected]_tileMode, -1 ); if (tileMode != -1) { switch (tileMode) { case 0: { setTileModeXY(android.graphics.Shader.TileMode.CLAMP, android.graphics.Shader.TileMode .CLAMP); break; } case 1: { setTileModeXY(android.graphics.Shader.TileMode.REPEAT, android.graphics.Shader.TileMode .REPEAT); break; } case 2: { setTileModeXY(android.graphics.Shader.TileMode.MIRROR, android.graphics.Shader.TileMode .MIRROR); break; } } } a.recycle(); }
internal void initializeWithState(android.graphics.drawable.GradientDrawable.GradientState state) { if (state.mHasSolidColor) { mFillPaint.setColor(state.mSolidColor); } mPadding = state.mPadding; if (state.mStrokeWidth >= 0) { mStrokePaint = new android.graphics.Paint(android.graphics.Paint.ANTI_ALIAS_FLAG); mStrokePaint.setStyle(android.graphics.Paint.Style.STROKE); mStrokePaint.setStrokeWidth(state.mStrokeWidth); mStrokePaint.setColor(state.mStrokeColor); if (state.mStrokeDashWidth != 0.0f) { android.graphics.DashPathEffect e = new android.graphics.DashPathEffect(new float [] { state.mStrokeDashWidth, state.mStrokeDashGap }, 0); mStrokePaint.setPathEffect(e); } } }