/// <summary> /// Called when a new Sensor Frame is received. Updates the stored frames. /// </summary> /// <param name="frame"></param> private void OnNewSensorFrame(SensorFrame frame) { _currentSensorFrames.Add(frame); _lastSensorFrame = frame; OnSensorsOrGestureUpdated(frame); }
private void Update() { if (_calibrating) { // While calibrating, continuously sample the gyroscope and wait for it to fall below a motion // threshold. When that happens, or a timeout is exceeded, grab a sample from the rotation sensor and // use that as the reference rotation. SensorFrame frame = _wearableControl.LastSensorFrame; bool didWaitEnough = Time.unscaledTime > _calibrationStartTime + _minCalibrationTime; bool isStationary = frame.angularVelocity.value.magnitude < _calibrationMotionThreshold; bool didTimeout = Time.unscaledTime > _calibrationStartTime + _maxCalibrationTime; if ((didWaitEnough && isStationary) || didTimeout) { _referenceRotation = frame.rotation; _calibrating = false; // Pass along the reference to the rotation matcher on the widget. _widgetRotationMatcher.SetRelativeReference(frame.rotation); if (CalibrationCompleted != null) { CalibrationCompleted.Invoke(); } // Spawn the first target after calibration completes. Invoke("SpawnTarget", _spawnDelay); } } }
public static byte[] EncodeSensorFrameAlloc(SensorFrame frame) { byte[] buffer = new byte[_headerSize + Marshal.SizeOf(typeof(SensorFrame)) + _footerSize]; int index = 0; EncodeSensorFrame(buffer, ref index, frame); return(buffer); }
/// <summary> /// Called when a new Sensor Frame is received. Updates the stored frames. /// </summary> /// <param name="frame"></param> private void OnNewSensorFrame(SensorFrame frame) { _currentSensorFrames.Add(frame); _lastSensorFrame = frame; OnSensorsUpdated(frame); if (frame.gestureId != GestureId.None) { OnGestureDetected(frame.gestureId); } }
/// <summary> /// Encode a <see cref="SensorFrame"/> into the buffer /// </summary> /// <param name="buffer"></param> /// <param name="index"></param> /// <param name="frame"></param> public static void EncodeSensorFrame(byte[] buffer, ref int index, SensorFrame frame) { // Encode header PacketHeader header = new PacketHeader(PacketTypeCode.SensorFrame); SerializePacket(buffer, ref index, header); // Encode payload SerializePacket(buffer, ref index, frame); // Encode footer SerializePacket(buffer, ref index, _footer); }
public static void DescribeFrame(SensorFrame frame) { Console.WriteLine("Sensor type: {0}", frame.SensorType); Console.WriteLine("Timestamp: {0}", frame.Timestamp); Console.WriteLine("Index: {0}", frame.FrameIndex); Console.WriteLine("Video Mode: {0}, {1} byte(s) pps, {2}x{3} @ {4} fps.", frame.VideoMode.PixelFormat, frame.VideoMode.PixelFormat.BytesPerPixel(), frame.VideoMode.ResolutionX, frame.VideoMode.ResolutionY, frame.VideoMode.Fps); Console.WriteLine("Clipping: {0}", frame.CroppingEnabled); Console.WriteLine("Crop: x - {0}, y - {1}", frame.CropOriginX, frame.CropOriginY); Console.WriteLine("Stride: {0}", frame.Stride); Console.WriteLine("Data size: {0} byte(s)", frame.Data.Size); }
void FixedUpdate() { if (wearableControl.ConnectedDevice == null) { return; } SensorFrame sensorFrame = wearableControl.LastSensorFrame; float moveHorizontal = sensorFrame.rotation.value.z; float moveVertical = sensorFrame.rotation.value.x; Vector3 movement = new Vector3(moveHorizontal, 0.0f, moveVertical); rb.AddForce(movement * speed); }
// Update is called once per frame void FixedUpdate() { if (wearableControl.ConnectedDevice == null) { return; } SensorFrame sensorFrame = wearableControl.LastSensorFrame; float moveHorizontal = -sensorFrame.rotationSixDof.value.z; float moveVertical = sensorFrame.rotationSixDof.value.x; // Disable keyboard behavior // float moveHorizontal = Input.GetAxis("Horizontal"); // float moveVertical = Input.GetAxis("Vertical"); Vector3 movement = new Vector3(moveHorizontal, 0.0f, moveVertical); rb.AddForce(movement * speed); }
private void Update() { if (_wearableControl.ConnectedDevice == null) { return; } // Since we are not integrating values, it's fine to just take the most recent frame. SensorFrame frame = _wearableControl.LastSensorFrame; // Clamp the measurement uncertainty to the desired range. float angle = Mathf.Clamp(frame.rotation.measurementUncertainty, _minAngle, _maxAngle); // The unscaled geometry of the cone subtends an angle of 15 degrees; find a new scale that makes the // cone subtend the correct number of degrees without clipping through the sphere. float xyScale = Mathf.Tan(angle * Mathf.Deg2Rad) / Mathf.Tan(15.0f * Mathf.Deg2Rad); float zScale = Mathf.Cos(angle * Mathf.Deg2Rad); transform.localScale = new Vector3(xyScale, xyScale, zScale); }
private void Update() { // If no device is connected, skip this frame. if (_wearableControl.ConnectedDevice == null) { return; } // Get a frame of sensor data. Since no integration is being performed, we can safely ignore all // intermediate frames and just grab the most recent. SensorFrame frame = _wearableControl.LastSensorFrame; // Measure the similarity between the current rotation and the reference rotation from 0 to 1. // The absolute value is needed here because q and -q represent the same rotation. float similarity = Mathf.Abs(Quaternion.Dot(frame.rotation, _matcher.ReferenceRotation)); // Scale the alpha along with similarity, so that the reference glasses begin fading out at 0.9 (~35º) and // finish fading out at 0.95 (~25º). float alpha = 0.25f * Mathf.Clamp01((0.95f - similarity) / 0.05f); _frameFadeMaterial.color = new Color(1.0f, 1.0f, 1.0f, alpha); }
void Update() { if (_calibrating) { SensorFrame frame = _wearableControl.LastSensorFrame; bool didWaitEnough = Time.unscaledTime > _calibrationStartTime + 5; bool isStationary = frame.angularVelocity.value.magnitude < 1; bool didTimeout = Time.unscaledTime > _calibrationStartTime + 10; if ((didWaitEnough && isStationary) || didTimeout) { _referenceRotation = frame.rotation; _calibrating = false; // Pass along the reference to the rotation matcher on the widget. _matcher.SetRelativeReference(frame.rotation); if (CalibrationCompleted != null) { CalibrationCompleted.Invoke(); } } } }
// Get the latest frame from hololens media // frame source group -- not needed unsafe void UpdateHoloLensMediaFrameSourceGroup() { #if ENABLE_WINMD_SUPPORT if (!_mediaFrameSourceGroupsStarted || _pvMediaFrameSourceGroup == null || _shortDepthMediaFrameSourceGroup == null) { return; } // Get latest sensor frames // Photo video SensorFrame latestPvCameraFrame = _pvMediaFrameSourceGroup.GetLatestSensorFrame( _sensorType); // ToF Depth SensorFrame latestShortDepthCameraFrame = _shortDepthMediaFrameSourceGroup.GetLatestSensorFrame( _sensorTypeResearch); // Initialize depth pv mapper class to cache // the resulting depth transform. if (!_isDepthPvMapperInit && latestShortDepthCameraFrame != null) { _depthPvMapper = new DepthPvMapper( latestShortDepthCameraFrame); _isDepthPvMapperInit = true; } // Map depth frames to photo video camera // with from/to range and specified radius. SensorFrame latestPvDepthFrame = _depthPvMapper.MapDepthToPV( latestPvCameraFrame, latestShortDepthCameraFrame, depthRangeFrom, depthRangeTo, patchRadius); // Convert the frame to be unity viewable var pvDepthFrame = SoftwareBitmap.Convert( latestPvDepthFrame.SoftwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore); // Display the incoming pv frames as a texture. // Set texture to the desired renderer Destroy(_pvDepthTexture); _pvDepthTexture = new Texture2D( pvDepthFrame.PixelWidth, pvDepthFrame.PixelHeight, TextureFormat.BGRA32, false); // Get byte array, update unity material with texture (RGBA) byte *inBytesPV = GetByteArrayFromSoftwareBitmap(pvDepthFrame); _pvDepthTexture.LoadRawTextureData((IntPtr)inBytesPV, pvDepthFrame.PixelWidth * pvDepthFrame.PixelHeight * 4); _pvDepthTexture.Apply(); _pvDepthMaterial.mainTexture = _pvDepthTexture; myText.text = "Began streaming sensor frames. Double tap to end streaming."; #endif }
private void Update() { if (_collected) { return; } if (_wearableControl.ConnectedDevice == null) { return; } // Get the latest rotation from the device SensorFrame frame = _wearableControl.LastSensorFrame; // Apply a reference rotation then calculate the relative "forward" vector of the device. Vector3 forward = (_inverseReference * frame.rotation) * Vector3.forward; // Calculate the direction from the parent to the target Vector3 targetDir = transform.localPosition.normalized; // Scale the similarity between these two vectors to the range [0, 1] and use this to control the layered audio. float closeness = 0.5f + 0.5f * Vector3.Dot(forward, targetDir); _sfx.Closeness = closeness; if (Vector3.Angle(forward, targetDir) < Mathf.Min(frame.rotation.measurementUncertainty + _targetMargin, _maxTargetWidth)) { // If the glasses are pointing within a margin of the target, fill the charge. _charge += _chargeFillRate * Time.deltaTime; if (!_targetLocked) { _sfx.PlayLockSting(); _targetLocked = true; } _animator.SetBool(_animationIsChargingParameter, true); } else { // Otherwise, drain the charge. _charge -= _chargeEmptyRate * Time.deltaTime; _targetLocked = false; _animator.SetBool(_animationIsChargingParameter, false); } // If the charge exceeds 1, "collect" the target. The animator will automatically destroy this target // at the end of the destruction animation. if (_charge > 1.0f) { _collected = true; if (Collected != null) { Collected.Invoke(this); } _animator.SetBool(_animationIsFullyChargedParameter, true); _animator.SetBool(_animationIsChargingParameter, false); _sfx.PlayCollectSting(); _sfx.FadeOutAudio(); } // Clamp the charge within [0, 1] _charge = Mathf.Clamp01(_charge); // Scale the target from 0.5 to 1.5 with charge. transform.localScale = Vector3.one * (0.5f + _charge); // Set the fill level based on charge _sfx.SetChargeLevel(_charge); }
// Get the latest frame from hololens media // frame source group -- not needed unsafe void UpdateHoloLensMediaFrameSourceGroup() { #if ENABLE_WINMD_SUPPORT if (!_mediaFrameSourceGroupsStarted || _pvMediaFrameSourceGroup == null) { return; } // Destroy all marker gameobject instances from prior frames // otherwise game objects will pile on top of marker if (_markerGOs.Count != 0) { foreach (var marker in _markerGOs) { Destroy(marker); } } // Get latest sensor frames // Photo video SensorFrame latestPvCameraFrame = _pvMediaFrameSourceGroup.GetLatestSensorFrame( _sensorType); if (latestPvCameraFrame == null) { return; } // Detect ArUco markers in current frame // https://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html#void%20Rodrigues(InputArray%20src,%20OutputArray%20dst,%20OutputArray%20jacobian) IList <DetectedArUcoMarker> detectedArUcoMarkers = new List <DetectedArUcoMarker>(); detectedArUcoMarkers = _arUcoMarkerTracker.DetectArUcoMarkersInFrame(latestPvCameraFrame); // If we detect a marker, display if (detectedArUcoMarkers.Count != 0) { foreach (var detectedMarker in detectedArUcoMarkers) { // Get pose from OpenCV and format for Unity Vector3 position = CvUtils.Vec3FromFloat3(detectedMarker.Position); position.y *= -1f; Quaternion rotation = CvUtils.RotationQuatFromRodrigues(CvUtils.Vec3FromFloat3(detectedMarker.Rotation)); Matrix4x4 cameraToWorldUnity = CvUtils.Mat4x4FromFloat4x4(detectedMarker.CameraToWorldUnity); Matrix4x4 transformUnityCamera = CvUtils.TransformInUnitySpace(position, rotation); // Use camera to world transform to get world pose of marker Matrix4x4 transformUnityWorld = cameraToWorldUnity * transformUnityCamera; // Instantiate game object marker in world coordinates var thisGo = Instantiate( markerGo, CvUtils.GetVectorFromMatrix(transformUnityWorld), CvUtils.GetQuatFromMatrix(transformUnityWorld)) as GameObject; // Scale the game object to the size of the markers thisGo.transform.localScale = new Vector3(markerSize, markerSize, markerSize); _markerGOs.Add(thisGo); } } // Convert the frame to be unity viewable var pvFrame = SoftwareBitmap.Convert( latestPvCameraFrame.SoftwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore); // Display the incoming pv frames as a texture. // Set texture to the desired renderer Destroy(_pvTexture); _pvTexture = new Texture2D( pvFrame.PixelWidth, pvFrame.PixelHeight, TextureFormat.BGRA32, false); // Get byte array, update unity material with texture (RGBA) byte *inBytesPV = GetByteArrayFromSoftwareBitmap(pvFrame); _pvTexture.LoadRawTextureData((IntPtr)inBytesPV, pvFrame.PixelWidth * pvFrame.PixelHeight * 4); _pvTexture.Apply(); _pvMaterial.mainTexture = _pvTexture; myText.text = "Began streaming sensor frames. Double tap to end streaming."; #endif }
// Update is called once per frame // Mode - 1:Help, 2:Classic, 3:Memory void Update() { if (Input.GetKeyDown("1")) { if (GameMode == 0) { GameMode = 1; Instruction.SetActive(true); //playaudio(13); timeold = Time.time; } else if (GameMode == 1) { GameMode = 0; Instruction.SetActive(false); timeold = Time.time; } } else if (Input.GetKeyDown("2")) { if (initialize == 0) { wearableControl = WearableControl.Instance; WearableRequirement requirement = GetComponent <WearableRequirement>(); if (requirement == null) { requirement = gameObject.AddComponent <WearableRequirement>(); } requirement.EnableSensor(SensorId.RotationSixDof); requirement.SetSensorUpdateInterval(SensorUpdateInterval.FortyMs); //Debug.Log("Game Started : i = "+ i); initialize = 1; } GameMode = 2; playaudio(4); timeold = Time.time; } else if (Input.GetKeyDown("h")) { score = score - 100; T_Score.text = "Score : " + score; if (fb_flip == 1) { T_FBStatus.text = "Front-Back : Flipped"; } else { T_FBStatus.text = "Front-Back : Normal"; } if (lr_flip == 1) { T_LRStatus.text = "Left-Right : Flipped"; } else { T_LRStatus.text = "Left-Right : Normal"; } T_Hint.text = ""; } if (GameMode > 1) { SensorFrame sensorFrame = wearableControl.LastSensorFrame; movez = sensorFrame.rotationSixDof.value.eulerAngles.z; movex = sensorFrame.rotationSixDof.value.eulerAngles.x; } if (GameMode == 0) { if (Time.time - timeold >= delay) { playaudio(14); timeold = Time.time; } } /*else if (GameMode == 1){ * if(Time.time-timeold >= delay){ * GameMode = 0; * T_Score.text = " "; * T_Lives.text = " "; * T_Difficulty.text = " "; * T_LRStatus.text = " "; * T_FBStatus.text = " "; * } * }*/ else if (GameMode == 2) { if (Time.time - timeold >= delay) { //Debug.Log("Actually Started : i = "+ i); info_mode1 = 0; // Checking the head position : Pitch & Roll if (ctr != 0 && info_mode == 0) { //Debug.Log(r_o+","+movex+","+movez); //Debug.Log("Before Checking : i = "+ i); if (lr_flip == 0) { if (r_o == 1 && !((movez > 270) && (movez < 360 - threshold))) { Wrong_Action(); } else if (r_o == 3 && !((movez > threshold) && (movez < 90))) { Wrong_Action(); } } else { if (r_o == 3 && !((movez > 270) && (movez < 360 - threshold))) { Wrong_Action(); } else if (r_o == 1 && !((movez > threshold) && (movez < 90))) { Wrong_Action(); } } if (fb_flip == 0) { if (r_o == 0 && !((movex > threshold) && (movex < 90))) { Wrong_Action(); } else if (r_o == 2 && !((movex > 270) && (movex < 360 - threshold))) { Wrong_Action(); } } else { if (r_o == 2 && !((movex > threshold) && (movex < 90))) { Wrong_Action(); } else if (r_o == 0 && !((movex > 270) && (movex < 360 - threshold))) { Wrong_Action(); } } } if (ctr > 0 && info_mode == 0 && info_mode1 == 0) { score = score + difficulty; T_Score.text = "Score : " + score; Cont_Correct++; playaudio(15); info_mode1 = 1; if (Cont_Correct == 5) { Cont_Correct = 0; Lives++; playaudio(12); T_Lives.text = "Lives : " + Lives; //info_mode1=1; } } info_mode = info_mode1; // Change level after every 5 steps if (GameMode != 0 && ctr == 5 && info_mode == 0) { difficulty++; T_FBStatus.text = " "; T_LRStatus.text = " "; T_Hint.text = "Press 'h' for hint"; Cont_Correct = 0; reaction_time = reaction_time * (0.9); if (difficulty == 16) { playaudio(10); } else { r1 = Random.Range(1, 4); // 1:LR_Flip, 2:FB_Flip, 3:Both if (difficulty <= 5) { while (r1 == 3) { r1 = Random.Range(1, 4); } } playaudio(5 + r1); ctr = 0; } info_mode = 1; T_Difficulty.text = "Difficulty : " + difficulty; } timeold = Time.time; if (GameMode != 0 && info_mode == 0) { //Debug.Log("Before Playing : i = "+ i); r = Random.Range(0, 4); while (r == r_o) { r = Random.Range(0, 4); } r_o = r; playaudio(r); ctr++; } } } }
// Update is called once per frame void Update() { //get and store the new acceleration vector SensorFrame frame = _wearableControl.LastSensorFrame; Vector3 newAcceleration = frame.acceleration; //calculate the difference in accelerations Vector3 diff = lastFrameAcceleration - newAcceleration; //store the current acceleration for the next iteration lastFrameAcceleration = newAcceleration; //Debug.Log("X:" + lastFrameAcceleration.X); //Debug.Log("Y:" + lastFrameAcceleration.Y); //Debug.Log("Z:" + lastFrameAcceleration.Z); Debug.Log("Acceleration vector:" + lastFrameAcceleration); string my_str = lastFrameAcceleration.ToString(); string[] words = my_str.Split(','); Debug.Log(" The Vertical Acceleration is: (" + words[1].Trim() + ")"); decimal vertAccelVal = decimal.Parse(words[1].Trim(), CultureInfo.InvariantCulture); words[0] = words[0].Replace("(", ""); words[2] = words[2].Replace(")", ""); //Debug.Log(" words at 0 (" + words[0] + ")"); //Debug.Log(" words at 1 (" + words[1].Trim() + ")"); //Debug.Log(" words at 2 (" + words[2].Trim() + ")"); decimal xVal = decimal.Parse(words[1], CultureInfo.InvariantCulture); decimal YVal = decimal.Parse(words[2].Trim(), CultureInfo.InvariantCulture); //decimal xVal = Convert.ToDecimal(words[0]); counter++; Debug.Log("counter is " + counter); //fall is only able to trigger after certain grace period if ((vertAccelVal < 5) && (counter >= 750) && (countdown >= 100 || countdown == 0)) { //Debug.Log("YOU ARE FALLING"); fallen = true; headShaken = false; countdown = 0; //stp.Start(); } //Debug.Log("COUNTDOWN IS " + countdown); // if ( ( Math.Abs(xVal - YVal) <= 5 ) && countdown>= 100) { // //fallen = false; // //headShaken = true;` // Debug.Log("CANCEL THAT"); // countdown = 0; // fallen = false; // } // else{ //Debug.Log("YOU ARE NOT FALLING"); if (fallen == true) { audios.enabled = true; Debug.Log("FALL DETECTED"); //start incrementing third counter countdown++; //audios.Play(); } //Debug.Log("YEEEEEEEE " + Math.Abs(xVal - YVal)); //Debug.Log("Yoo00 " + (xVal - YVal) ); //counter2++; //check if user is canceling the fall if (headShaken == true) { //if they do cancel, just reset everything fallen = false; audios.enabled = false; Debug.Log("SMS Cancelled"); } //if a certain amount of times has passed and user hasnt canceled the call, send it // if(true){ if (countdown == 700 && fallen == true) { Debug.Log("Sending text message"); textIsSent = true; fallen = false; //send the text // Find your Account Sid and Token at twilio.com/console // DANGER! This is insecure. See http://twil.io/secure const string accountSid = ""; const string authToken = ""; TwilioClient.Init(accountSid, authToken); var message = MessageResource.Create( body: "Your family member may have fallen", from: new Twilio.Types.PhoneNumber("+14805256961"), to: new Twilio.Types.PhoneNumber("+16475541964") ); Console.WriteLine(message.Sid); } if (textIsSent == true) { Debug.Log("TEXT HAS BEEN SENT"); } }
/// <summary> /// Consume a packet from the buffer if possible, then advance the buffer index. /// </summary> /// <param name="buffer">Byte buffer to decode</param> /// <param name="index">(Ref) Index to read into buffer</param> /// <exception cref="WearableProxyProtocolException">Thrown when a packet cannot be decoded and the buffer /// must be discarded.</exception> /// <exception cref="IndexOutOfRangeException">Thrown when a packet was partially consumed but ran out of /// buffer contents.</exception> public override void ProcessPacket(byte[] buffer, ref int index) { PacketTypeCode packetType = DecodePacketType(buffer, ref index); switch (packetType) { case PacketTypeCode.KeepAlive: { CheckFooter(buffer, ref index); if (KeepAlive != null) { KeepAlive.Invoke(); } break; } case PacketTypeCode.PingQuery: { CheckFooter(buffer, ref index); if (PingQuery != null) { PingQuery.Invoke(); } break; } case PacketTypeCode.PingResponse: { CheckFooter(buffer, ref index); if (PingResponse != null) { PingResponse.Invoke(); } break; } case PacketTypeCode.SensorFrame: { SensorFrame frame = DecodeSensorFrame(buffer, ref index); CheckFooter(buffer, ref index); if (NewSensorFrame != null) { NewSensorFrame.Invoke(frame); } break; } case PacketTypeCode.DeviceList: { Device[] devices = DecodeDeviceList(buffer, ref index); CheckFooter(buffer, ref index); if (DeviceList != null) { DeviceList.Invoke(devices); } break; } case PacketTypeCode.ConnectionStatus: { Device? device; ConnectionState status = DecodeConnectionStatus(buffer, ref index, out device); CheckFooter(buffer, ref index); if (ConnectionStatus != null) { ConnectionStatus.Invoke(status, device); } break; } case PacketTypeCode.ConfigStatus: { WearableDeviceConfig config = DeserializeDeviceConfig(buffer, ref index); CheckFooter(buffer, ref index); if (ConfigStatus != null) { ConfigStatus.Invoke(config); } break; } case PacketTypeCode.SetRssiFilter: case PacketTypeCode.InitiateDeviceSearch: case PacketTypeCode.StopDeviceSearch: case PacketTypeCode.ConnectToDevice: case PacketTypeCode.DisconnectFromDevice: case PacketTypeCode.QueryConnectionStatus: case PacketTypeCode.SetNewConfig: case PacketTypeCode.QueryConfig: // This is a known, but contextually-invalid packet type throw new WearableProxyProtocolException(WearableConstants.ProxyProviderInvalidPacketError); default: // This is an unknown or invalid packet type throw new WearableProxyProtocolException(WearableConstants.ProxyProviderInvalidPacketError); } }
/// <summary> /// Consume a packet from the buffer if possible, then advance the buffer index. /// </summary> /// <param name="buffer">Byte buffer to decode</param> /// <param name="index">(Ref) Index to read into buffer</param> /// <exception cref="WearableProxyProtocolException">Thrown when a packet cannot be decoded and the buffer /// must be discarded.</exception> /// <exception cref="IndexOutOfRangeException">Thrown when a packet was partially consumed but ran out of /// buffer contents.</exception> public override void ProcessPacket(byte[] buffer, ref int index) { PacketTypeCode packetType = DecodePacketType(buffer, ref index); switch (packetType) { case PacketTypeCode.KeepAlive: { CheckFooter(buffer, ref index); if (KeepAlive != null) { KeepAlive.Invoke(); } break; } case PacketTypeCode.PingQuery: { CheckFooter(buffer, ref index); if (PingQuery != null) { PingQuery.Invoke(); } break; } case PacketTypeCode.PingResponse: { CheckFooter(buffer, ref index); if (PingResponse != null) { PingResponse.Invoke(); } break; } case PacketTypeCode.SensorFrame: { SensorFrame frame = DecodeSensorFrame(buffer, ref index); CheckFooter(buffer, ref index); if (NewSensorFrame != null) { NewSensorFrame.Invoke(frame); } break; } case PacketTypeCode.DeviceList: { Device[] devices = DecodeDeviceList(buffer, ref index); CheckFooter(buffer, ref index); if (DeviceList != null) { DeviceList.Invoke(devices); } break; } case PacketTypeCode.ConnectionStatus: { Device? device; ConnectionState status = DecodeConnectionStatus(buffer, ref index, out device); CheckFooter(buffer, ref index); if (ConnectionStatus != null) { ConnectionStatus.Invoke(status, device); } break; } case PacketTypeCode.SensorStatus: { bool enabled; SensorId sensor = DecodeSensorStatus(buffer, ref index, out enabled); CheckFooter(buffer, ref index); if (SensorStatus != null) { SensorStatus.Invoke(sensor, enabled); } break; } case PacketTypeCode.UpdateIntervalValue: { SensorUpdateInterval rate = DecodeUpdateInterval(buffer, ref index); CheckFooter(buffer, ref index); if (SensorUpdateIntervalValue != null) { SensorUpdateIntervalValue.Invoke(rate); } break; } case PacketTypeCode.GestureStatus: { bool enabled; GestureId gesture = DecodeGestureStatus(buffer, ref index, out enabled); CheckFooter(buffer, ref index); if (GestureStatus != null) { GestureStatus.Invoke(gesture, enabled); } break; } case PacketTypeCode.RotationSourceValue: { RotationSensorSource source = DecodeRotationSource(buffer, ref index); CheckFooter(buffer, ref index); if (RotationSourceValue != null) { RotationSourceValue.Invoke(source); } break; } case PacketTypeCode.SensorControl: case PacketTypeCode.SetRssiFilter: case PacketTypeCode.InitiateDeviceSearch: case PacketTypeCode.StopDeviceSearch: case PacketTypeCode.ConnectToDevice: case PacketTypeCode.DisconnectFromDevice: case PacketTypeCode.QueryConnectionStatus: case PacketTypeCode.QueryUpdateInterval: case PacketTypeCode.SetUpdateInterval: case PacketTypeCode.QuerySensorStatus: case PacketTypeCode.GestureControl: case PacketTypeCode.QueryRotationSource: case PacketTypeCode.SetRotationSource: // This is a known, but contextually-invalid packet type throw new WearableProxyProtocolException(WearableConstants.ProxyProviderInvalidPacketError); default: // This is an unknown or invalid packet type throw new WearableProxyProtocolException(WearableConstants.ProxyProviderInvalidPacketError); } }