public static void ConnectROIs(GestureDetector gestureDetector) { // Activity: MainActivity List<ROI> mainROIs = new List<ROI>(); mainROIs.Add(ROIData.CreateROI("Music", 1, 1, "", 1, 1)); mainROIs.Add(ROIData.CreateROI("GPS", 2, 1, "", 1, 2)); mainROIs.Add(ROIData.CreateROI("Contacts", 3, 1, "", 2, 1)); mainROIs.Add(ROIData.CreateROI("Extras", 4, 1, "", 2, 2)); ROISet main = new ROISet(mainROIs, "StartMenu", true); gestureDetector.RegisterROISet(main); }
// Update is called once per frame void Update () { if (_Reader != null) { if (_Sensor.IsAvailable) kinectEnabled = true; var frame = _Reader.AcquireLatestFrame(); if (frame != null) { if (_Data == null) { _Data = new Body[_Sensor.BodyFrameSource.BodyCount]; } frame.GetAndRefreshBodyData(_Data); frame.Dispose(); frame = null; // we may have lost/acquired bodies, so update the corresponding gesture detectors // loop through all bodies to see if any of the gesture detectors need to be updated for (int i = 0; i < _Data.Length; ++i) { if(gestureDetectorList.Count <= i) { //Debug.Log(i); GestureDetector detector = new GestureDetector(_Sensor, this); this.gestureDetectorList.Add(detector); } Body body = _Data[i]; ulong trackingId = body.TrackingId; GestureDetector currentDetector = gestureDetectorList[i]; if(currentDetector.shotFired) { shotFired(currentDetector.TrackingId); } // if the current body TrackingId changed, update the corresponding gesture detector with the new value if (trackingId != this.gestureDetectorList[i].TrackingId) { this.gestureDetectorList[i].TrackingId = trackingId; // if the current body is tracked, unpause its detector to get VisualGestureBuilderFrameArrived events // if the current body is not tracked, pause its detector so we don't waste resources trying to get invalid gesture results this.gestureDetectorList[i].IsPaused = trackingId == 0; } } } } }
public TaskFragment( ) : base( ) { GestureDetector = new GestureDetector(Rock.Mobile.PlatformSpecific.Android.Core.Context, new TaskFragmentGestureDetector(this)); }
protected override void OnResume() { base.OnResume(); configRepository = new ConfigRepository(database); configuracion = configRepository.GetConfig(); loginService = new LoginService(); messageService = new MessageService(); errorText = new ErrorText(); try { client = loginService.Connect(); if (client.IsUserAuthorized()) { usuario = client.Session.TLUser; } } catch (Exception ex) { this.FinishAffinity(); } try { _chats = messageRepository.GetMessagesByPhoneWithoutSeen(extra); contact = contactRepository.GetContactByPhone(extra); messageRepository.MarkMessagesAsRead(extra); var total = _chats.Sum(x => x.Mensaje.Length); if (_chats.Count > 0) { textToSpeak = $"Los mensajes nuevos de {contact.FirstName} {contact.LastName} son: "; if (total < 3900) { for (int i = 0; i < _chats.Count; i++) { int j = i + 1; if (i == _chats.Count - 1) { textToSpeak += $" y {j.ToString()} {_chats[i].Mensaje}"; } else { textToSpeak += $"{j.ToString()} {_chats[i].Mensaje}, "; } } textToSpeak += ". ¿Quiere responder?"; accion = "responder"; } else { LeerMensajesSinLeer(); } } else { textToSpeak = "¿Quiere leer desde una fecha, buscar por un mensaje, no hacer nada o volver atrás?"; accion = "leer"; } } catch (Exception ex) { textToSpeak = "Ha ocurrido un error al acceder a la base de datos. ¿Quiere leer desde una fecha, buscar por un mensaje, no hacer nada o volver atrás?"; accion = "leer"; } speechReco = SpeechRecognizer.CreateSpeechRecognizer(this.ApplicationContext); speechReco.SetRecognitionListener(this); intentReco = new Intent(RecognizerIntent.ActionRecognizeSpeech); intentReco.PutExtra(RecognizerIntent.ExtraLanguagePreference, "es"); intentReco.PutExtra(RecognizerIntent.ExtraCallingPackage, this.PackageName); intentReco.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelWebSearch); intentReco.PutExtra(RecognizerIntent.ExtraMaxResults, 1); gestureDetector = new GestureDetector(this); toSpeech = new TextToSpeech(this, this); }
public void SetLastPostJoint(GestureDetector gd, int postIdx, int jointIdx) { m_lastPostJoint = gd[postIdx - 1][jointIdx]; m_gd = gd; m_lastPostIdx = postIdx - 1; }
public override Widget build(BuildContext context) { ColorScheme colorScheme = Theme.of(context).colorScheme; MaterialLocalizations localizations = MaterialLocalizations.of(context); TextTheme textTheme = Theme.of(context).textTheme; TextStyle dayStyle = textTheme.caption; Color enabledDayColor = colorScheme.onSurface.withOpacity(0.87f); Color disabledDayColor = colorScheme.onSurface.withOpacity(0.38f); Color selectedDayColor = colorScheme.onPrimary; Color selectedDayBackground = colorScheme.primary; Color todayColor = colorScheme.primary; int year = displayedMonth.Year; int month = displayedMonth.Month; int daysInMonth = utils.getDaysInMonth(year, month); int dayOffset = utils.firstDayOffset(year, month, localizations); List <Widget> dayItems = new List <Widget>(); // 1-based day of month, e.g. 1-31 for January, and 1-29 for February on // a leap year. int day = -dayOffset; while (day < daysInMonth) { day++; if (day < 1) { dayItems.Add(new Container()); } else { DateTime dayToBuild = new DateTime(year, month, day); bool isDisabled = dayToBuild > lastDate || dayToBuild < firstDate || (selectableDayPredicate != null && !selectableDayPredicate(dayToBuild)); BoxDecoration decoration = null; Color dayColor = enabledDayColor; bool isSelectedDay = utils.isSameDay(selectedDate, dayToBuild); if (isSelectedDay) { // The selected day gets a circle background highlight, and a // contrasting text color. dayColor = selectedDayColor; decoration = new BoxDecoration( color: selectedDayBackground, shape: BoxShape.circle ); } else if (isDisabled) { dayColor = disabledDayColor; } else if (utils.isSameDay(currentDate, dayToBuild)) { // The current day gets a different text color and a circle stroke // border. dayColor = todayColor; decoration = new BoxDecoration( border: Border.all(color: todayColor, width: 1), shape: BoxShape.circle ); } Widget dayWidget = new Container( decoration: decoration, child: new Center( child: new Text(localizations.formatDecimal(day), style: dayStyle.apply(color: dayColor)) ) ); if (!isDisabled) { dayWidget = new GestureDetector( behavior: HitTestBehavior.opaque, onTap: () => onChanged(dayToBuild), child: dayWidget ); } dayItems.Add(dayWidget); } } return(new Padding( padding: EdgeInsets.symmetric( horizontal: material_._monthPickerHorizontalPadding ), child: GridView.custom( physics: new ClampingScrollPhysics(), gridDelegate: material_._dayPickerGridDelegate, childrenDelegate: new SliverChildListDelegate( dayItems, addRepaintBoundaries: false ) ) )); }
Widget _buildSearchHistory(List <string> searchHistoryList) { if (searchHistoryList == null || searchHistoryList.Count <= 0) { return(new Container()); } var widgets = new List <Widget> { new Container( margin: EdgeInsets.only(top: 24, bottom: 10), child: new Row( mainAxisAlignment: MainAxisAlignment.spaceBetween, children: new List <Widget> { new Text( "搜索历史", style: CTextStyle.PXLargeBody4 ), new CustomButton( padding: EdgeInsets.only(8, 8, 0, 8), onPressed: () => { ActionSheetUtils.showModalActionSheet( new ActionSheet( title: "确定清除搜索历史记录?", items: new List <ActionSheetItem> { new ActionSheetItem("确定", ActionType.destructive, () => this.widget.actionModel.deleteAllSearchHistory()), new ActionSheetItem("取消", ActionType.cancel) } ) ); }, child: new Text( "清空", style: CTextStyle.PRegularBody4 ) ) } ) ) }; searchHistoryList.ForEach(item => { var child = new GestureDetector( onTap: () => this._searchArticle(item), child: new Container( height: 44, color: CColors.White, child: new Row( mainAxisAlignment: MainAxisAlignment.spaceBetween, children: new List <Widget> { new Expanded( child: new Text( data: item, maxLines: 1, overflow: TextOverflow.ellipsis, style: CTextStyle.PLargeBody ) ), new CustomButton( padding: EdgeInsets.only(8, 8, 0, 8), onPressed: () => this.widget.actionModel.deleteSearchHistory(item), child: new Icon( Icons.close, size: 16, color: Color.fromRGBO(199, 203, 207, 1) ) ) } ) ) ); widgets.Add(child); }); return(new Container( padding: EdgeInsets.symmetric(horizontal: 16), color: CColors.White, child: new Column( crossAxisAlignment: CrossAxisAlignment.start, children: widgets ) )); }
static Widget _buildButtons(List <ActionSheetItem> items) { if (items == null || items.Count <= 0) { return(new Container()); } List <Widget> widgets = new List <Widget>(); List <Widget> normalWidgets = new List <Widget>(); List <Widget> destructiveWidgets = new List <Widget>(); List <Widget> cancelWidgets = new List <Widget>(); items.ForEach(item => { Color titleColor; switch (item.type) { case ActionType.normal: titleColor = CColors.TextBody; break; case ActionType.cancel: titleColor = CColors.Cancel; break; case ActionType.destructive: titleColor = CColors.Error; break; default: titleColor = CColors.TextBody; break; } Widget widget = new GestureDetector( onTap: () => { ActionSheetUtils.hiddenModalPopup(); item.onTap?.Invoke(); }, child: new Container( alignment: Alignment.center, height: 49, color: CColors.White, child: new Text( data: item.title, style: CTextStyle.PLargeBody.copyWith(color: titleColor) ) ) ); var divider = new CustomDivider( height: 1, color: CColors.Separator2 ); if (item.type == ActionType.destructive) { destructiveWidgets.Add(item: widget); destructiveWidgets.Add(item: divider); } else if (item.type == ActionType.cancel) { cancelWidgets.Add(new CustomDivider(height: 4, color: CColors.Separator2)); cancelWidgets.Add(item: widget); } else { normalWidgets.Add(item: widget); normalWidgets.Add(item: divider); } }); widgets.AddRange(collection: normalWidgets); widgets.AddRange(collection: destructiveWidgets); widgets.AddRange(collection: cancelWidgets); return(new Column( children: widgets )); }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); SetContentView(Resource.Layout.Main); mSurfaceView = FindViewById <GLSurfaceView>(Resource.Id.surfaceview); mDisplayRotationHelper = new DisplayRotationHelper(this); Java.Lang.Exception exception = null; string message = null; try { mSession = new Session(/*context=*/ this); } catch (UnavailableArcoreNotInstalledException e) { message = "Please install ARCore"; exception = e; } catch (UnavailableApkTooOldException e) { message = "Please update ARCore"; exception = e; } catch (UnavailableSdkTooOldException e) { message = "Please update this app"; exception = e; } catch (Java.Lang.Exception e) { exception = e; message = "This device does not support AR"; } if (message != null) { Toast.MakeText(this, message, ToastLength.Long).Show(); return; } // Create default config, check is supported, create session from that config. var config = new Google.AR.Core.Config(mSession); if (!mSession.IsSupported(config)) { Toast.MakeText(this, "This device does not support AR", ToastLength.Long).Show(); Finish(); return; } mSession.Configure(config); mGestureDetector = new Android.Views.GestureDetector(this, new SimpleTapGestureDetector { SingleTapUpHandler = (MotionEvent arg) => { onSingleTap(arg); return(true); }, DownHandler = (MotionEvent arg) => true }); mSurfaceView.SetOnTouchListener(this); // Set up renderer. mSurfaceView.PreserveEGLContextOnPause = true; mSurfaceView.SetEGLContextClientVersion(2); mSurfaceView.SetEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending. mSurfaceView.SetRenderer(this); mSurfaceView.RenderMode = Rendermode.Continuously; }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); ConfigurationManager.Initialise(PCLAppConfig.FileSystemStream.PortableStream.Current); DependencyInjection.RegisterInterfaces(); cmdOpenCamera = ConfigurationManager.AppSettings["CmdOpenCamera"]; cmdTakePhoto = ConfigurationManager.AppSettings["CmdTakePhoto"]; cmdOpenCart = ConfigurationManager.AppSettings["CmdOpenCart"]; cmdOpenList = ConfigurationManager.AppSettings["CmdOpenList"]; cmdHelp = ConfigurationManager.AppSettings["CmdHelp"]; cmdRemind = ConfigurationManager.AppSettings["CmdRemind"]; cmdTutorialRequest = ConfigurationManager.AppSettings["CmdTutorialRequest"]; cmdTutorialLikeShopLens = ConfigurationManager.AppSettings["CmdTutorialLikeShopLens"]; userGuidPrefKey = ConfigurationManager.AppSettings["UserGuidPrefKey"]; shopLensDbContext = ConnectToDatabase(); prefs = PreferenceManager.GetDefaultSharedPreferences(this); talkBackEnabledIntentKey = ConfigurationManager.AppSettings["TalkBackKey"]; voicePrefs = new ActivityPreferences(this, ConfigurationManager.AppSettings["VoicePrefs"]); CheckVoicePrefs(); talkBackEnabled = IsTalkBackEnabled(); if (!talkBackEnabled) { InitiateNoTalkBackMode(); } // Set our view from the "main" layout resource. SetContentView(Resource.Layout.Main); camera2Frag = Camera2Fragment.NewInstance(this, this); if (savedInstanceState == null) { new Thread(() => { FragmentManager.BeginTransaction().Replace(Resource.Id.container, camera2Frag).Commit(); } ).Start(); } drawerLayout = FindViewById <DrawerLayout>(Resource.Id.DrawerLayout); toolbar = FindViewById <SupportToolbar>(Resource.Id.Toolbar); navView = FindViewById <NavigationView>(Resource.Id.NavView); rootView = FindViewById <CoordinatorLayout>(Resource.Id.root_view); drawerToggle = new Android.Support.V7.App.ActionBarDrawerToggle( this, drawerLayout, Resource.String.openDrawer, Resource.String.closeDrawer ); drawerLayout.AddDrawerListener(drawerToggle); SetSupportActionBar(toolbar); SupportActionBar.SetDisplayHomeAsUpEnabled(true); SupportActionBar.SetHomeButtonEnabled(true); drawerToggle.SyncState(); gestureListener = new GestureListener(); gestureListener.LeftEvent += GestureLeft; gestureDetector = new GestureDetector(this, gestureListener); navView.NavigationItemSelected += (sender, e) => { switch (e.MenuItem.ItemId) { case Resource.Id.NavItemShoppingCart: StartCartIntent(); break; case Resource.Id.NavItemShoppingList: StartListIntent(); break; } }; }
protected override void OnElementChanged(ElementChangedEventArgs <ARView> e) { base.OnElementChanged(e); if (e.OldElement != null || Element == null) { return; } try { mSurfaceView = new GLSurfaceView(_context); mDisplayRotationHelper = new DisplayRotationHelper(_context); Java.Lang.Exception exception = null; string message = null; try { mSession = new Session(_context); } //catch (UnavailableArcoreNotInstalledException ex) //{ // message = "Please install ARCore"; // exception = ex; //} //catch (UnavailableApkTooOldException ex) //{ // message = "Please update ARCore"; // exception = ex; //} //catch (UnavailableSdkTooOldException ex) //{ // message = "Please update this app"; // exception = ex; //} catch (Java.Lang.Exception ex) { exception = ex; message = "This device does not support AR"; } if (message != null) { Toast.MakeText(_context, message, ToastLength.Long).Show(); return; } var config = new Google.AR.Core.Config(mSession); if (!mSession.IsSupported(config)) { Toast.MakeText(_context, "This device does not support AR", ToastLength.Long).Show(); return; } mGestureDetector = new Android.Views.GestureDetector(_context, new SimpleTapGestureDetector { SingleTapUpHandler = (MotionEvent arg) => { onSingleTap(arg); return(true); }, DownHandler = (MotionEvent arg) => true }); mSurfaceView.SetOnTouchListener(this); mSurfaceView.PreserveEGLContextOnPause = true; mSurfaceView.SetEGLContextClientVersion(2); mSurfaceView.SetEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending. mSurfaceView.SetRenderer(this); mSurfaceView.RenderMode = Rendermode.Continuously; SetNativeControl(mSurfaceView); mSession.Resume(); mSurfaceView.OnResume(); mDisplayRotationHelper.OnResume(); showLoadingMessage(); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(@" ERROR: ", ex.Message); } }
protected override void OnCreate(Bundle bundle) { base.OnCreate(bundle); int total = 0; float numberOfC = 0; float numberOfM = 0; float accuracy = 0; // Set our view from the "main" layout resource SetContentView(Resource.Layout.DiceScreen); // Data passed - Category max string catMax = Intent.GetStringExtra("catMax") ?? "Data is not available!"; int categoryMax = Int32.Parse(catMax); var numberResult = FindViewById <TextView> (Resource.Id.numberResult); var matchNumber = FindViewById <TextView> (Resource.Id.matchNumber); var categoryText = FindViewById <TextView> (Resource.Id.categoryText); var titleText = FindViewById <TextView> (Resource.Id.titleText); var totalScore = FindViewById <TextView> (Resource.Id.totalScore); var numberOfClicks = FindViewById <TextView> (Resource.Id.numberOfClicks); var numberOfMatches = FindViewById <TextView> (Resource.Id.numberOfMatches); Button diceButton = FindViewById <Button> (Resource.Id.diceButton); ISharedPreferences MDGPrefs = GetSharedPreferences(MDG_DATA, FileCreationMode.Private); ISharedPreferencesEditor MDGEditor = MDGPrefs.Edit(); //prefs.Edit ().Clear ().Apply (); //HashSet<String> categoryList = new HashSet<String> (); // numberResult.TextSize = 110; matchNumber.TextSize = 130; categoryText.TextSize = 25; titleText.TextSize = 27; categoryText.Text = "Category: 1-" + categoryMax; // Radio Buttons clicked diceButton.Click += delegate { int resultR = RandomNumber(1, categoryMax); numberResult.Text = resultR.ToString(); int resultM = RandomNumber(1, 36); matchNumber.Text = resultM.ToString(); // See if it matches & display number of matches & accuracy if (resultR == resultM) { Toast.MakeText(this, "Awesome Stuff! It's a MATCH!", ToastLength.Long).Show(); // Number of matches & accuracy calculation numberOfM += 1; } // Add total & display it total += resultR; totalScore.Text = "Total: " + total.ToString(); // Number of clicks counter numberOfC += 1; numberOfClicks.Text = "Number of Clicks: " + numberOfC.ToString(); if (numberOfC != 0) { accuracy = (numberOfM / numberOfC) * 100; } if (numberOfM == 0) { numberOfMatches.Text = "Number of Matches: 0" + " - " + "0%"; } else { numberOfMatches.Text = "Number of Matches: " + numberOfM.ToString() + " - " + accuracy.ToString(".0##") + "%"; } // Statistics for all categories in MDG if (categoryMax == 6) { MDGEditor.PutInt("totalScoreSix", total); MDGEditor.PutFloat("numberOfClicksSix", numberOfC); MDGEditor.PutFloat("numberOfMathcesSix", numberOfM); } else if (categoryMax == 12) { MDGEditor.PutInt("totalScoreTwelve", total); MDGEditor.PutFloat("numberOfClicksTwelve", numberOfC); MDGEditor.PutFloat("numberOfMathcesTwelve", numberOfM); } else if (categoryMax == 18) { MDGEditor.PutInt("totalScoreEighteen", total); MDGEditor.PutFloat("numberOfClicksEighteen", numberOfC); MDGEditor.PutFloat("numberOfMathcesEighteen", numberOfM); } else if (categoryMax == 24) { MDGEditor.PutInt("totalScoreTwentyfour", total); MDGEditor.PutFloat("numberOfClicksTwentyfour", numberOfC); MDGEditor.PutFloat("numberOfMathcesTwentyfour", numberOfM); } else if (categoryMax == 30) { MDGEditor.PutInt("totalScoreThirty", total); MDGEditor.PutFloat("numberOfClicksThirty", numberOfC); MDGEditor.PutFloat("numberOfMathcesThirty", numberOfM); } else if (categoryMax == 36) { MDGEditor.PutInt("totalScoreThirtysix", total); MDGEditor.PutFloat("numberOfClicksThirtysix", numberOfC); MDGEditor.PutFloat("numberOfMathcesThirtysix", numberOfM); } MDGEditor.Apply(); }; // Gesture Detection gestureDetector = new GestureDetector(this); }
// Use this for initialization void Start() { GestureDetector.addListener(this); player = this.GetComponent <Player> (); }
//----------------------------------- //Constructor //----------------------------------- public MainWindow() { settings = new MouseAppSettings(true); //get the kinect this.kinectSensor = KinectSensor.GetDefault(); //open the reader for color frames this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader(); //wire handler for frame arrival this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived; //create the colorFrameDescription from the ColorFrameSource FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra); // create the bitmap to display this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // get the coordinate mapper this.coordinateMapper = this.kinectSensor.CoordinateMapper; // Create the drawing group we'll use for drawing this.drawingGroup = new DrawingGroup(); // Create an image source that we can use in our image control this.colorOverlay = new DrawingImage(this.drawingGroup); // get size of color space this.displayWidth = colorFrameDescription.Width; this.displayHeight = colorFrameDescription.Height; this.deskHeight = Screen.PrimaryScreen.Bounds.Height; this.deskWidth = Screen.PrimaryScreen.Bounds.Width; handDispBuffer = new List<float>(); handDispTimes = new List<TimeSpan>(); handStates = new List<HandState>(); detectors = new List<GestureDetector>(); int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureDetector detector = new GestureDetector(this.kinectSensor, "TwistWrist"); detectors.Add(detector); } activeTime = TimeSpan.MinValue; handStateTime = TimeSpan.MinValue; driftStartTime = TimeSpan.MinValue; handStillPoint = new Point3D(); //Mouse cursor and PHIZ attributes phiz = new PHIZTracker(); kinectCoreWindow = KinectCoreWindow.GetForCurrentThread(); kinectCoreWindow.PointerMoved += kinectCoreWindow_PointerMoved; // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // use the window object as the view model in this simple example this.DataContext = this; // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; handBrush = new Brush[5]; handBrush[(int)(HandState.Closed)] = handClosedBrush; handBrush[(int)(HandState.Open)] = handOpenBrush; handBrush[(int)(HandState.Lasso)] = handLassoBrush; handBrush[(int)(HandState.Unknown)] = handUnknownBrush; handBrush[(int)(HandState.NotTracked)] = handNotTrackedBrush; InitializeComponent(); this.OptionsColumn.Width = new System.Windows.GridLength(140); this.OptionsRow.Height = new System.Windows.GridLength(35); UpdateBoundaries(); sim = new InputSimulator(); }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); SetContentView(Resource.Layout.Main); mSurfaceView = FindViewById <GLSurfaceView>(Resource.Id.surfaceview); mDisplayRotationHelper = new DisplayRotationHelper(this); Java.Lang.Exception exception = null; string message = null; try { mSession = new Session(/*context=*/ this); } catch (UnavailableArcoreNotInstalledException e) { message = "Please install ARCore"; exception = e; } catch (UnavailableApkTooOldException e) { message = "Please update ARCore"; exception = e; } catch (UnavailableSdkTooOldException e) { message = "Please update this app"; exception = e; } catch (Java.Lang.Exception e) { exception = e; message = "This device does not support AR"; } if (message != null) { Toast.MakeText(this, message, ToastLength.Long).Show(); return; } // Create default config, check is supported, create session from that config. var config = new Google.AR.Core.Config(mSession); if (!mSession.IsSupported(config)) { Toast.MakeText(this, "This device does not support AR", ToastLength.Long).Show(); Finish(); return; } AugmentedImageDatabase imageDatabase = new AugmentedImageDatabase(mSession); Bitmap bitmap = null; try { var inputStream = Assets.Open("qrcode.png"); bitmap = BitmapFactory.DecodeStream(inputStream); } catch (IOException e) { Log.Error(TAG, "I/O exception loading augmented image bitmap.", e); } index = imageDatabase.AddImage("interstellar", bitmap, 0.01f); config.AugmentedImageDatabase = imageDatabase; mSession.Configure(config); mGestureDetector = new GestureDetector(this, new TapGestureDetector { SingleTapUpHandler = (MotionEvent arg) => { OnSingleTap(arg); return(true); }, DownHandler = (MotionEvent arg) => true }); mSurfaceView.SetOnTouchListener(this); // Set up renderer. mSurfaceView.PreserveEGLContextOnPause = true; mSurfaceView.SetEGLContextClientVersion(2); mSurfaceView.SetEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending. mSurfaceView.SetRenderer(this); mSurfaceView.RenderMode = Rendermode.Continuously; }
public OnSwipeTouchListener() { gestureDetector = new GestureDetector(new GestureListener(this)); }
// Start is called before the first frame update void Start() { gestureDetector = new GestureDetector(); }
public TabbedPageCustomRenderer() { _listener = new GesutreListener(); _detector = new GestureDetector(_listener); }
/// <summary> /// Initialize the detector with a listener(this) /// </summary> public GesturesContentViewRenderer() { _detector = new GestureDetector(this); }
public TransparentPageRenderer(Context context) : base(context) { gestureDetector = new GestureDetector(context, this); density = Resources.DisplayMetrics.Density; }
public override Widget build(BuildContext context) { List <Widget> _gestureChildren = new List <Widget>(); List <Color> _backgroundColors = new List <Color>(); int index = 0; int selectedIndex = 0; int pressedIndex = 0; foreach (T currentKey in widget.children.Keys) { selectedIndex = (widget.groupValue.Equals(currentKey)) ? index : selectedIndex; pressedIndex = (_pressedKey.Equals(currentKey)) ? index : pressedIndex; TextStyle textStyle = DefaultTextStyle.of(context).style.copyWith( color: getTextColor(index, currentKey) ); IconThemeData iconTheme = new IconThemeData( color: getTextColor(index, currentKey) ); Widget child = new Center( child: widget.children[currentKey] ); child = new GestureDetector( onTapDown: (TapDownDetails _event) => { _onTapDown(currentKey); }, onTapCancel: _onTapCancel, onTap: () => { _onTap(currentKey); }, child: new IconTheme( data: iconTheme, child: new DefaultTextStyle( style: textStyle, child: child ) ) ); _backgroundColors.Add(getBackgroundColor(index, currentKey)); _gestureChildren.Add(child); index += 1; } Widget box = new _SegmentedControlRenderWidget <T>( children: _gestureChildren, selectedIndex: selectedIndex, pressedIndex: pressedIndex, backgroundColors: _backgroundColors, borderColor: _borderColor ); return(new Padding( padding: widget.padding ?? CupertinoSegmentedControlsUtils._kHorizontalItemPadding, child: new UnconstrainedBox( constrainedAxis: Axis.horizontal, child: box ) )); }
private void SetGestureDetector() => _gestureDetector = new GestureDetector(Context, new CardsGestureListener(OnSwiped));
protected override void Dispose(bool disposing) { if (_isDisposed) { return; } if (disposing) { if (Element != null) { Element.CloseRequested -= OnCloseRequested; } if (_detector != null) { _detector.Dispose(); _detector = null; } if (_scrollParent != null) { if (_scrollParent is ScrollView scrollView) { scrollView.Scrolled -= OnParentScrolled; } if (_scrollParent is ListView listView) { listView.Scrolled -= OnParentScrolled; } if (_scrollParent is Xamarin.Forms.CollectionView collectionView) { collectionView.Scrolled -= OnParentScrolled; } } if (_contentView != null) { _contentView.RemoveFromParent(); _contentView.Dispose(); _contentView = null; } if (_actionView != null) { _actionView.RemoveFromParent(); _actionView.Dispose(); _actionView = null; } if (_initialPoint != null) { _initialPoint.Dispose(); _initialPoint = null; } } _isDisposed = true; base.Dispose(disposing); }
public GestureFrameRenderer() { _listener = new CustomGestureListener(); _detector = new GestureDetector(_listener); }
protected override void OnCreate(Bundle bundle) { base.OnCreate(bundle); // Create your application here var mContext = this; SetContentView(Resource.Layout.Fullscreen_view); mViewFlipper = (AdapterViewFlipper)FindViewById(Resource.Id.adpater_view_flipper); var listener = new SwipeOnGestureListener(AppConstant.SWIPE_MIN_DISTANCE, AppConstant.SWIPE_THRESHOLD_VELOCITY); var detector = new GestureDetector(listener); mViewFlipper.Touch += (sender, e) => { detector.OnTouchEvent(e.Event); }; listener.Next += (sender, e) => { mViewFlipper.SetInAnimation(mContext, Resource.Animation.left_in_obj); mViewFlipper.SetOutAnimation(mContext, Resource.Animation.left_out_obj); mViewFlipper.ShowNext(); }; listener.Previous += (sender, e) => { mViewFlipper.SetInAnimation(mContext, Resource.Animation.right_in_obj); mViewFlipper.SetOutAnimation(mContext, Resource.Animation.right_out_obj); mViewFlipper.ShowPrevious(); }; int position = Intent.GetIntExtra("position", 0); var imagePaths = Utils.GetFileInfos(AppConstant.PHOTO_ALBUM, AppConstant.FILE_EXTN).Select(x => x.FullName); adapter = new ViewAdapter(() => imagePaths.Count(), (pos, oldView) => { var viewLayout = oldView != null ? oldView : this.LayoutInflater.Inflate(Resource.Layout.Fullscreen_image, mViewFlipper, false); var imgDisplay = (TouchImageView)viewLayout.FindViewById(Resource.Id.imgDisplay); imgDisplay.SetScaleType(ImageView.ScaleType.CenterInside); if (viewLayout != oldView) { // close button click event var btnClose = viewLayout.FindViewById(Resource.Id.btnClose); btnClose.Click += (sender, e) => this.Finish(); } //BitmapFactory.Options options = new BitmapFactory.Options(); //options.InPreferredConfig = Bitmap.Config.Argb8888; //Bitmap bitmap = BitmapFactory.DecodeFile(imagePaths.ElementAt(pos), options); Task.Run <Bitmap>(() => Utils.LoadAndResizeBitmap(imagePaths.ElementAt(pos), Resources.DisplayMetrics.WidthPixels, Resources.DisplayMetrics.HeightPixels)).ContinueWith(x => imgDisplay.SetImageBitmap(x.Result), TaskScheduler.FromCurrentSynchronizationContext()); return(viewLayout); }); mViewFlipper.Adapter = adapter; // displaying selected image first mViewFlipper.SetSelection(position); }
/*public override bool OnInterceptTouchEvent (MotionEvent ev) * { * // Only accept single touch * if (ev.PointerCount != 1) * return false; * ev.OffsetLocation (0, TranslationY); * return CaptureMovementCheck (ev); * }*/ public override bool OnTouchEvent(MotionEvent e) { if (paneGestureDetector == null) { var l = new DoubleTapListener(() => SetState(Opened && FullyOpened ? State.Opened : State.FullyOpened)); paneGestureDetector = new GestureDetector(Context, l); } paneGestureDetector.OnTouchEvent(e); e.OffsetLocation(0, TranslationY); if (e.Action == MotionEventActions.Down) { CaptureMovementCheck(e); return(true); } if (!isTracking && !CaptureMovementCheck(e)) { return(true); } if (e.Action != MotionEventActions.Move || MoveDirectionTest(e)) { velocityTracker.AddMovement(e); } if (e.Action == MotionEventActions.Move) { var y = e.GetY(); // We don't want to go beyond startY if (state == State.Opened && y > startY || state == State.FullyOpened && y < startY) { return(true); } // We reset the velocity tracker in case a movement goes back to its origin if (state == State.Opened && y > oldY || state == State.FullyOpened && y < oldY) { velocityTracker.Clear(); } var traveledDistance = (int)Math.Round(Math.Abs(y - startY)); if (state == State.Opened) { traveledDistance = OffsetForState(State.Opened) - traveledDistance; } SetNewOffset(traveledDistance); oldY = y; } else if (e.Action == MotionEventActions.Up) { velocityTracker.ComputeCurrentVelocity(1000, maxFlingVelocity); if (Math.Abs(velocityTracker.YVelocity) > minFlingVelocity && Math.Abs(velocityTracker.YVelocity) < maxFlingVelocity) { SetState(state == State.FullyOpened ? State.Opened : State.FullyOpened); } else if (state == State.FullyOpened && contentOffsetY > Height / 2) { SetState(State.Opened); } else if (state == State.Opened && contentOffsetY < Height / 2) { SetState(State.FullyOpened); } else { SetState(state); } preTracking = isTracking = false; velocityTracker.Clear(); velocityTracker.Recycle(); } return(true); }
public bool OnTouch(View view, MotionEvent e) { switch (e.Action) { //--Down-- case MotionEventActions.Down: _x = (int)e.GetX(); _y = (int)e.GetY(); var layoutParams = new RelativeLayout.LayoutParams( RelativeLayout.LayoutParams.WrapContent, RelativeLayout.LayoutParams.WrapContent); var imgView = new ImageView(this); var size = (int)new Random().Next(100, 200); var randomShape = Utility.GetRandomShape(); layoutParams.SetMargins(_x, _y, 0, 0); imgView.LayoutParameters = layoutParams; imgView.LayoutParameters.Width = size; imgView.LayoutParameters.Height = size; imgView.Tag = randomShape.ToString(); if (randomShape == Shape.Square) { //if-square imgView.SetImageResource(Resource.Drawable.Square); } else if (randomShape == Shape.Circle) { //if-circle imgView.SetImageResource(Resource.Drawable.Circle); } //sets background SetShapeBackground(imgView, shape: randomShape); //add to main-view ((ViewGroup)view).AddView(imgView); //OnTouchEvent imgView.Touch += ImgView_Touch; //detect double-tap _gestureDetector = new GestureDetector(new GestureListener()); _gestureDetector.DoubleTap += GestureDetector_DoubleTap; break; //--Move-- case MotionEventActions.Move: break; //--default-- default: return(false); } return(true); }
public void init(Context context) { // the detector handles all the gestures dectector = new GestureDetector(this); ItemLongClick += HandleItemLongClick; }
private CardView CreateCardViewScheme1(string symbol, string description, string chgText, string timeText, string valueText, string numberOfCoinsText, string priceText, Color chgColor, int changeIcon) { CardView cardView = new CardView(Context); int cardViewWidth; //175 cardViewWidth = (GetScreenWidthInPixels() - DpToPixels(6)) / 2; //210 GridLayout.LayoutParams gridLayoutParams = new GridLayout.LayoutParams(); gridLayoutParams.Width = cardViewWidth; gridLayoutParams.Height = DpToPixels(170); gridLayoutParams.LeftMargin = DpToPixels(2); gridLayoutParams.RightMargin = DpToPixels(2); gridLayoutParams.TopMargin = DpToPixels(2); RelativeLayout rl = new RelativeLayout(cardView.Context); cardView.Tag = new Identifier(symbol); cardView.AddView(rl); GestureDetector _gestureDetector = new GestureDetector(cardView.Context, new GestureListener()); _gestureDetector.DoubleTap += (object sender, GestureDetector.DoubleTapEventArgs e) => { //apply double tap code here //var activity = (MainActivity)this.Activity; //activity.DisplayCandleChart(symbolDoubleTapped); Toast.MakeText(Context, "Double click", ToastLength.Short).Show(); }; cardView.Touch += (object sender, View.TouchEventArgs e) => { CardView cv = sender as CardView; if (cv != null) { symbolDoubleTapped = ((Identifier)cv.Tag).Name; } _gestureDetector.OnTouchEvent(e.Event); }; Button topColorStripe = new Button(cardView.Context); //topColorStripe.SetBackgroundColor(CryptoCoinColors.GetCryptoCoinColor(symbol)); RelativeLayout.LayoutParams lpbb = CreateRelativeLayoutParams(); lpbb.Width = ViewGroup.LayoutParams.MatchParent; lpbb.Height = DpToPixels(6); topColorStripe.LayoutParameters = lpbb; topColorStripe.Id = 100; rl.AddView(topColorStripe); TextView symTV = CreateTextViewEx(symbol, Color.Black, 30, TypefaceStyle.Bold, cardView.Context); //symTV.LayoutParameters = CreateRelativeLayoutParams(); RelativeLayout.LayoutParams lps = CreateRelativeLayoutParams(); lps.AddRule(LayoutRules.Below, topColorStripe.Id); symTV.LayoutParameters = lps; symTV.Id = CARDVIEW_SYMBOL_TEXT_VIEW_ID; rl.AddView(symTV); ImageView iv = new ImageView(cardView.Context); RelativeLayout.LayoutParams lpi = CreateRelativeLayoutParams(); lpi.AddRule(LayoutRules.RightOf, symTV.Id); lpi.AddRule(LayoutRules.AlignTop, symTV.Id); lpi.TopMargin = DpToPixels(5); iv.LayoutParameters = lpi; iv.SetImageResource(changeIcon); iv.Id = CARDVIEW_IMAGE_UP_DOWN_ID; rl.AddView(iv); TextView descTV = CreateTextViewEx(description, Color.Gray, 12, TypefaceStyle.Normal, cardView.Context); RelativeLayout.LayoutParams lpd = CreateRelativeLayoutParams(); lpd.AddRule(LayoutRules.Below, symTV.Id); lpd.LeftMargin = DpToPixels(2); lpd.TopMargin = DpToPixels(-10); descTV.LayoutParameters = lpd; descTV.Id = CARDVIEW_DESCRIPTION_TEXT_VIEW_ID; rl.AddView(descTV); TextView chgTV = CreateTextViewEx(chgText, chgColor, 12, TypefaceStyle.Normal, cardView.Context); RelativeLayout.LayoutParams lpc = CreateRelativeLayoutParams(); lpc.AddRule(LayoutRules.RightOf, iv.Id); lpc.AddRule(LayoutRules.AlignTop, iv.Id); lpc.TopMargin = DpToPixels(2); chgTV.LayoutParameters = lpc; chgTV.Id = CARDVIEW_CHANGE_TEXT_VIEW_ID; rl.AddView(chgTV); TextView timeTV = CreateTextViewEx(timeText, Color.Gray, 8, TypefaceStyle.Normal, cardView.Context); RelativeLayout.LayoutParams lpt = CreateRelativeLayoutParams(); lpt.AddRule(LayoutRules.Below, chgTV.Id); lpt.AddRule(LayoutRules.AlignLeft, chgTV.Id); timeTV.LayoutParameters = lpt; timeTV.Id = CARDVIEW_LAST_UPDATE_TEXT_VIEW_ID; rl.AddView(timeTV); TextView valueLabelTV = CreateTextViewEx("Value", Color.Gray, 10, TypefaceStyle.Normal, cardView.Context); RelativeLayout.LayoutParams lpvl = CreateRelativeLayoutParams(); lpvl.AddRule(LayoutRules.Below, descTV.Id); lpvl.LeftMargin = DpToPixels(2); lpvl.TopMargin = DpToPixels(17); valueLabelTV.LayoutParameters = lpvl; valueLabelTV.Id = CARDVIEW_VALUE_LABEL_TEXT_VIEW_ID; rl.AddView(valueLabelTV); TextView valueTV = CreateTextViewEx(valueText, Color.ParseColor("#EFCC00"), 22, TypefaceStyle.Bold, cardView.Context); RelativeLayout.LayoutParams lpv = CreateRelativeLayoutParams(); lpv.AddRule(LayoutRules.Below, valueLabelTV.Id); lpv.LeftMargin = DpToPixels(2); lpv.TopMargin = DpToPixels(-3); valueTV.LayoutParameters = lpv; valueTV.Id = CARDVIEW_VALUE_TEXT_VIEW_ID; rl.AddView(valueTV); TextView coinsLabelTV = CreateTextViewEx("Number of coins", Color.Gray, 10, TypefaceStyle.Normal, cardView.Context); RelativeLayout.LayoutParams lpnc = CreateRelativeLayoutParams(); lpnc.AddRule(LayoutRules.Below, valueTV.Id); lpnc.LeftMargin = DpToPixels(2); lpnc.TopMargin = DpToPixels(2); coinsLabelTV.LayoutParameters = lpnc; coinsLabelTV.Id = CARDVIEW_NUMBER_OF_COINS_LABEL_TEXT_VIEW_ID; rl.AddView(coinsLabelTV); TextView coinsTV = CreateTextViewEx(numberOfCoinsText, Color.Black, 12, TypefaceStyle.Normal, cardView.Context); RelativeLayout.LayoutParams lpcv = CreateRelativeLayoutParams(); lpcv.AddRule(LayoutRules.Below, coinsLabelTV.Id); lpcv.LeftMargin = DpToPixels(2); lpnc.TopMargin = DpToPixels(-1); coinsTV.LayoutParameters = lpcv; coinsTV.Id = CARDVIEW_NUMBER_OF_COINS_TEXT_VIEW_ID; rl.AddView(coinsTV); TextView priceTV = CreateTextViewEx(priceText, chgColor, 14, TypefaceStyle.Bold, cardView.Context); RelativeLayout.LayoutParams lpp = CreateRelativeLayoutParams(); lpp.AddRule(LayoutRules.Below, descTV.Id); lpp.LeftMargin = DpToPixels(2); lpp.TopMargin = DpToPixels(-5); priceTV.LayoutParameters = lpp; priceTV.Id = CARDVIEW_PRICE_TEXT_VIEW_ID; rl.AddView(priceTV); Android.Widget.Toolbar tb = new Android.Widget.Toolbar(cardView.Context); RelativeLayout.LayoutParams lpb = CreateRelativeLayoutParams(); lpb.AddRule(LayoutRules.Below, coinsTV.Id); lpb.TopMargin = -15; tb.Tag = new Identifier(symbol); tb.InflateMenu(Resource.Menu.CardToolbarMenu); tb.MenuItemClick += (sender, e) => { Android.Widget.Toolbar tbx = sender as Android.Widget.Toolbar; string sym = ((Identifier)tb.Tag).Name; var activity = (MainActivity)this.Activity; switch (e.Item.ItemId) { case (Resource.Id.cardmenu_qrcode): activity.ShowCryptoCurrencyAddressFragment(sym); break; case (Resource.Id.cardmenu_chart): activity.ShowCandleFragment(sym); break; case (Resource.Id.cardmenu_transactions): activity.ShowTransactionFragment(sym); break; case (Resource.Id.cardmenu_send): activity.ShowSendFragment(sym); break; case (Resource.Id.cardmenu_receive): activity.ShowReceiveFragment(sym); break; default: break; } }; tb.LayoutParameters = lpb; tb.Id = 11; rl.AddView(tb); cardView.LayoutParameters = gridLayoutParams; return(cardView); }
public MainWindow() { Instance = this; this.InitializeComponent(); this.Activate(); this.Focus(); CreateData.Instance.GetAllVideos(); CreateData.Instance.GetNewsFromFile(); CreateData.Instance.GetGames(); CreateData.Instance.GetAllTimetable(); NewsUpdateThread.Instance.StartUpdating(); AppDomain.CurrentDomain.UnhandledException += (sender, s) => { Exception e = (Exception)s.ExceptionObject; Log(e.ToString()); }; if (!adminMode) { AppDomain.CurrentDomain.ProcessExit += ReOpenApp; AppDomain.CurrentDomain.UnhandledException += ReOpenAppInException; Cursor = Cursors.None; } else { Cursor = Cursors.Arrow; } KinectRegion.SetKinectRegion(this, kinectRegion); ((App)Application.Current).KinectRegion = kinectRegion; kinectRegion.KinectSensor = KinectSensor.GetDefault(); BodyFrameReader bodyFrameReader = this.kinectRegion.KinectSensor.BodyFrameSource.OpenReader(); bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; DispatcherTimer TimeTimer = new DispatcherTimer( TimeSpan.FromSeconds(1), DispatcherPriority.Normal, (sender, e) => { DateTime dateTime = DateTime.Now; //new DateTime(DateTime.Now.Year, 2, 10, 12, 20, 00); Time.Text = MireaDateTime.Instance.GetTime(dateTime); Para.Text = MireaDateTime.Instance.GetPara(dateTime); Date.Text = MireaDateTime.Instance.GetDay(dateTime); Week.Text = MireaDateTime.Instance.GetWeek(dateTime); }, Dispatcher); DispatcherTimer CheckOutTimer = new DispatcherTimer( TimeSpan.FromMilliseconds(100), DispatcherPriority.Normal, (sender, e) => { CheckPersonIsRemoved(); }, Dispatcher); HandHelper = new HandOverHelper(kinectRegion, Dispatcher); string GesturePath = $@"{AppDomain.CurrentDomain.BaseDirectory}\GesturesDatabase\KinectGesture.gbd"; if (File.Exists(GesturePath)) { int maxBodies = this.kinectRegion.KinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureDetector detector = new GestureDetector(this.kinectRegion.KinectSensor); detector.OnGestureFired += () => { content.NavigateTo(new EggVideo()); }; this.gestureDetectorList.Add(detector); } } ControlsBasicsWindow.Topmost = !adminMode; Settings.Instance.SettingsUpdated += Settings_SettingsUpdated; content.OpenBackgroundVideo(); }
private void CreateGestureDetector() => _gestureDetector = new GestureDetector(new CardsGestureListener(OnSwiped));
public MyRvItemTouchListener(GestureDetector gestureDetector) { GestureDetector = gestureDetector; }
public DoubleTappableView(Context context, IAttributeSet attrs) : base(context, attrs) { _gestureDetector = new GestureDetector(context, new GestureListener()); }
public GestureFrameRenderer(Context context) : base(context) { _listener = new CustomGestureListener(); _detector = new GestureDetector(context, _listener); }
// this class is loaded when we want to open kinect after using the app manually void OnOpenSensor(object sender, RoutedEventArgs e) // called when the button is clicked to open sensor { // just to tell kinect started speaker = new SpeechSynthesizer(); // Used for getting output voice from computer speaker.Speak("App is started"); System.Windows.Forms.MessageBox.Show("Kinect started - Click on Map Now"); // FOR THE VGB ONE // open the sensor this.kinectSensor.Open(); // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List<GestureDetector>(); // set our data context objects for display in UI // It is usd to show the bone stucture in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // As Kinect V2 can detect upto 6 bdies and so can detect gesture from all six bodies so we can used that by assigning gesture detector for each body // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; // if body is six then it goes to check for each. for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f, webView1,gestureState.Text); // i is body number,webview1 is the html control which have to pass to access it in this method. // others defiend later GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); } // FOR THE JOINT ONE if (_sensor != null) { _sensor.Open(); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; } // FOR THE SOUND this.sensor = KinectSensor.GetDefault(); // open the sensor this.sensor.Open(); // grab the audio stream IReadOnlyList<AudioBeam> audioBeamList = this.sensor.AudioSource.AudioBeams; System.IO.Stream audioStream = audioBeamList[0].OpenInputStream(); // create the convert stream this.convertStream = new KinectAudioStream(audioStream); RecognizerInfo ri = TryGetKinectRecognizer(); // these are engine where we put what to analyse in speech recogniition it can not be done if we can use bing speech recognizer but it is not possible to use now in germany as in beta phase this.speechEngine = new SpeechRecognitionEngine(ri.Id); // speech recognition engine var directions = new Choices(); // direction is the vaariable used to direct where we have to go directions.Add(new SemanticResultValue("ZoomIn", "ZOOMIN")); // first one is the text which system see in audio and if match then it save the second name in it which we can access to run what we want directions.Add(new SemanticResultValue("ZoomIn", "ZOOMIN")); directions.Add(new SemanticResultValue("In", "ZOOMIN")); directions.Add(new SemanticResultValue("ZoomOut", "ZOOMOUT")); directions.Add(new SemanticResultValue("Out", "ZOOMOUT")); directions.Add(new SemanticResultValue("ZoomOut", "ZOOMOUT")); directions.Add(new SemanticResultValue("Left", "LEFT")); directions.Add(new SemanticResultValue("Right", "RIGHT")); directions.Add(new SemanticResultValue("Up", "UP")); directions.Add(new SemanticResultValue("Down", "DOWN")); // places directions.Add(new SemanticResultValue("Go India", "INDIA")); directions.Add(new SemanticResultValue("india", "INDIA")); directions.Add(new SemanticResultValue("Go AMERICA", "AMERICA")); directions.Add(new SemanticResultValue("america", "AMERICA")); directions.Add(new SemanticResultValue("Go SanDiego", "SANDIEGO")); directions.Add(new SemanticResultValue("SanDiego", "SANDIEGO")); directions.Add(new SemanticResultValue("MY PLACE", "MYPLACE")); // this will not work through window app as cant activate gps through eo web browser directions.Add(new SemanticResultValue("San Francisco Bay", "SANFRANCISCO")); directions.Add(new SemanticResultValue("Mount Everest", "MOUNTEVEREST")); directions.Add(new SemanticResultValue("Grand Canyon", "GRANDCANYON")); directions.Add(new SemanticResultValue("hannover", "HANOVER")); directions.Add(new SemanticResultValue("newyork", "NEWYORK")); directions.Add(new SemanticResultValue("Delhi", "DELHI")); directions.Add(new SemanticResultValue("Goa", "GOA")); directions.Add(new SemanticResultValue("Mumbai", "MUMBAI")); directions.Add(new SemanticResultValue("Banglore", "BANGLORE")); directions.Add(new SemanticResultValue("Europe", "EUROPE")); directions.Add(new SemanticResultValue("Germany", "GERMANY")); directions.Add(new SemanticResultValue("Switzerland", "SWITZERLAND")); directions.Add(new SemanticResultValue("Amsterdam", "AMSTERDAM")); directions.Add(new SemanticResultValue("Belgium", "BELGIUM")); directions.Add(new SemanticResultValue("Hildesheim", "HILDESHEIM")); directions.Add(new SemanticResultValue("Hamburg", "HAMBURG")); directions.Add(new SemanticResultValue("Berlin", "BERLIN")); directions.Add(new SemanticResultValue("Prague", "PRAGUE")); directions.Add(new SemanticResultValue("Sylt", "SYLT")); directions.Add(new SemanticResultValue("Paris", "PARIS")); directions.Add(new SemanticResultValue("Great Pyramid", "GREAT")); directions.Add(new SemanticResultValue("Effiel Tower", "Tower")); directions.Add(new SemanticResultValue("Taj Mahal", "TAj")); directions.Add(new SemanticResultValue("Pisa", "PISA")); directions.Add(new SemanticResultValue("Venice", "VENICE")); //tools directions.Add(new SemanticResultValue("Fly", "FLY")); directions.Add(new SemanticResultValue("walk", "WALK")); directions.Add(new SemanticResultValue("valk", "WALK")); directions.Add(new SemanticResultValue("Back", "BACK")); directions.Add(new SemanticResultValue("photo", "PHOTO")); directions.Add(new SemanticResultValue("PHOTO", "PHOTO")); var gb = new GrammarBuilder { Culture = ri.Culture }; // to run recognizer gb.Append(directions); gb.AppendWildcard(); // this is used so that second word of saying wil not be count to detect the acccuracy of word var grr = new Grammar(gb); this.speechEngine.LoadGrammar(grr); // to load the grammer this.speechEngine.SpeechRecognized += this.SpeechRecognized; // called if speech recognized this.speechEngine.SpeechRecognitionRejected += this.SpeechRejected; // called if speech rejected // let the convertStream know speech is going active this.convertStream.SpeechActive = true; this.speechEngine.SetInputToAudioStream( this.convertStream, new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); this.speechEngine.RecognizeAsync(RecognizeMode.Multiple); }