private void OnPartRotate(NetworkInMessage msg) { // We read the user ID but we don't use it here msg.ReadInt64(); // Get the part number and then the part based on it var number = msg.ReadInt16(); var part = partList[number]; part.SetData(msg.ReadFloat(), msg.ReadFloat(), msg.ReadInt16() > 0, msg.ReadInt16() > 0); inControl = false; }
void AppStageUpdate(NetworkInMessage msg) { var userId = msg.ReadInt64(); if (userId == CustomMessages.Instance.localUserID) { return; } var state = (AppState)msg.ReadInt16(); if (state == AppState.WaitingForGameStart) { var usersCount = SharingSessionTracker.Instance.UserIds.Count; usersReady++; } // TODO: }
/// <summary> /// Called when a remote user sends an object transform. /// </summary> /// <param name="msg"></param> private void UpdateObjectTransform(NetworkInMessage msg) { // Parse the message // get userID, but we're not using it long userID = msg.ReadInt64(); // get Position Vector3 objectPos = SyncMessaging.Instance.ReadVector3(msg); // get Rotation Quaternion objectRota = SyncMessaging.Instance.ReadQuaternion(msg); // get Scale Vector3 objectScale = SyncMessaging.Instance.ReadVector3(msg); // Read out object number. var objNumber = msg.ReadInt16(); // Now we know which objNumber and the new transform, so we update ours. listOfGameObjToKeepSynced[objNumber].transform.localPosition = objectPos; listOfGameObjToKeepSynced[objNumber].transform.localRotation = objectRota; listOfGameObjToKeepSynced[objNumber].transform.localScale = objectScale; // Finally, set "old" = "new" so that it can detect new changes in position. AssignOldTransform(objNumber); }
// Called when reading in Kinect data void ReceiveData(NetworkInMessage msg) { // 1) Read message ID type byte msgID = msg.ReadByte(); //Debug.Log("Current msg id: " + msgID); int length = 0; // store message length switch (currentState) { case State.WaitingForGeneral: if (msgID != (byte)MsgID.GENERAL) { //Debug.Log("ERROR. Initial message not received."); currentState = State.WaitingForGeneral; return; } // A mesh will only be created once. // After the first pass, this state will be skipped // to Waiting for Depth1 if (!generalReceived) { // 2) Read message length length = msg.ReadInt32(); // 3) Read the data _Width = msg.ReadInt32(); _Height = msg.ReadInt32(); int vertices = _Width * _Height; _DepthData = new short[vertices]; _RedColorData = new byte[vertices]; _GreenColorData = new byte[vertices]; _BlueColorData = new byte[vertices]; //t1 = DateTime.Now.Millisecond - t0; //Debug.Log("Time for general:" + t1); generalReceived = true; currentState = State.CreateMesh; Debug.Log("Success! Initial msg processed."); return; } currentState = State.WaitingForDepth1; break; case State.CreateMesh: Debug.Log("Creating mesh"); CreateMesh(); // Create an empty mesh currentState = State.WaitingForDepth1; break; case State.WaitingForDepth1: if (msgID != (byte)MsgID.DEPTH1) { //Debug.Log("ERROR. Depth1 data not received."); currentState = State.WaitingForDepth1; return; } // 2) Read msg length length = msg.ReadInt32(); //depthIndex = 0; // 3) Read the data for (int i = 0; i < length; i++) { _DepthData[i] = msg.ReadInt16(); //depthIndex++; } /*if ( (2 * length) > MAX_PACKET_SIZE ) // will need to read two messages * { * currentState = State.WaitingForDepth2; * } else * { * currentState = State.RefreshMesh; * }*/ currentState = State.RefreshMesh; Debug.Log("Success! Depth1 msg processed."); break; case State.WaitingForDepth2: if (msgID != (byte)MsgID.DEPTH2) { //Debug.Log("ERROR. Depth2 data not received."); currentState = State.WaitingForDepth1; return; } length = msg.ReadInt32(); for (int i = depthIndex; i < length; i++) { _DepthData[i] = msg.ReadInt16(); } currentState = State.RefreshMesh; //Debug.Log("Success! Depth2 msg processed."); break; case State.RefreshMesh: RefreshMesh(); currentState = State.WaitingForDepth1; Debug.Log("Refreshing mesh"); break; } }
/// <summary> /// Now that we've gotten a message, examine it and dissect the audio data. /// </summary> /// <param name="connection"></param> /// <param name="message"></param> public void OnMessageReceived(NetworkConnection connection, NetworkInMessage message) { byte headerSize = message.ReadByte(); Int32 pack = message.ReadInt32(); int version = this.versionExtractor.GetBitsValue(pack); int audioStreamCount = this.audioStreamCountExtractor.GetBitsValue(pack); int channelCount = this.channelCountExtractor.GetBitsValue(pack); int sampleRate = this.sampleRateExtractor.GetBitsValue(pack); int sampleType = this.sampleTypeExtractor.GetBitsValue(pack); int bytesPerSample = sizeof(float); if (sampleType == 1) { bytesPerSample = sizeof(Int16); } int sampleCount = this.sampleCountExtractor.GetBitsValue(pack); int codecType = this.codecTypeExtractor.GetBitsValue(pack); int sequenceNumber = this.sequenceNumberExtractor.GetBitsValue(pack); Int32 extendedSampleRate = 0; if (sampleRate == 0) { extendedSampleRate = message.ReadInt32(); } this.prominentSpeakerCount = 0; for (int i = 0; i < audioStreamCount; i++) { float averageAmplitude = message.ReadFloat(); UInt32 hrtfSourceID = (UInt32)message.ReadInt32(); Vector3 hrtfPosition = new Vector3(); Vector3 hrtfDirection = new Vector3(); if (hrtfSourceID != 0) { hrtfPosition.x = message.ReadFloat(); hrtfPosition.y = message.ReadFloat(); hrtfPosition.z = message.ReadFloat(); hrtfDirection.x = message.ReadFloat(); hrtfDirection.y = message.ReadFloat(); hrtfDirection.z = message.ReadFloat(); Vector3 cameraPosRelativeToGlobalAnchor = Vector3.zero; Vector3 cameraDirectionRelativeToGlobalAnchor = Vector3.zero; if (this.GlobalAnchorTransform != null) { cameraPosRelativeToGlobalAnchor = MathUtils.TransformPointFromTo( null, this.GlobalAnchorTransform, Camera.main.transform.position); cameraDirectionRelativeToGlobalAnchor = MathUtils.TransformDirectionFromTo( null, this.GlobalAnchorTransform, Camera.main.transform.position); } cameraPosRelativeToGlobalAnchor.Normalize(); cameraDirectionRelativeToGlobalAnchor.Normalize(); Vector3 soundVector = hrtfPosition - cameraPosRelativeToGlobalAnchor; soundVector.Normalize(); // x is forward float fltx = (m_DropOffMaximum / DropOffMaximumMetres) * Vector3.Dot(soundVector, cameraDirectionRelativeToGlobalAnchor); // y is right Vector3 myRight = Quaternion.Euler(0, 90, 0) * cameraDirectionRelativeToGlobalAnchor; float flty = -(m_PanMaximum / PanMaximumMetres) * Vector3.Dot(soundVector, myRight); // z is up Vector3 myUp = Quaternion.Euler(90, 0, 0) * cameraDirectionRelativeToGlobalAnchor; float fltz = (m_PanMaximum / PanMaximumMetres) * Vector3.Dot(soundVector, myUp); if (this.ShowHRTFInfo) { UnityEngine.Debug.Log("hrtf = " + fltx + ", " + flty + ", " + fltz); } // Hacky distance check so we don't get too close to source. Vector3 flt = new Vector3(fltx, flty, fltz); if (flt.magnitude < (MinimumDistance * m_DropOffMaximum)) { flt = flt.normalized * MinimumDistance * m_DropOffMaximum; fltx = flt.x; flty = flt.y; fltz = flt.z; } AddProminentSpeaker(hrtfSourceID, averageAmplitude, fltx, flty, fltz); } for (int j = 0; j < channelCount; j++) { // if uncompressed, size = sampleCount Int16 size = (Int16)sampleCount; if (codecType != 0) { // if compressed, size is first 2 bytes, sampleCount should be number of bytes after decompression size = message.ReadInt16(); } // make this array big enough to hold all of the uncompressed data only if the // buffer is not the right size, minimize new operations if (this.networkPacketBufferBytes.Length != sampleCount * bytesPerSample) { this.networkPacketBufferBytes = new byte[sampleCount * bytesPerSample]; } message.ReadArray(this.networkPacketBufferBytes, (uint)(size * bytesPerSample)); if (codecType != 0) { // in place decompression please - should fill out the data buffer // ... } if (hrtfSourceID > 0) { // hrtf processing here } internalStore(this.networkPacketBufferBytes, 0, this.networkPacketBufferBytes.Length); } } }
// Called when reading in Kinect data void ReceiveData(NetworkInMessage msg) { byte msgID = msg.ReadByte(); switch (currentState) { case State.WaitingForGeneral: if (msgID != (byte)MsgID.GENERAL) { Debug.Log("ERROR. Initial message not received."); } _ClipWidth = msg.ReadInt32(); _ClipHeight = msg.ReadInt32(); int vertices = _ClipWidth * _ClipHeight; _DepthData = new ushort[vertices]; _RedColorData = new float[vertices]; _GreenColorData = new float[vertices]; _BlueColorData = new float[vertices]; currentState = State.WaitingForRed; break; case State.WaitingForDepth1: if (msgID != (byte)MsgID.DEPTH1) { currentState = State.WaitingForDepth1; } else { int length = msg.ReadInt32(); for (int i = 0; i < length; i++) { _DepthData[i] = (ushort)msg.ReadInt16(); depthIndex++; } currentState = State.WaitingForDepth2; } break; case State.WaitingForDepth2: if (msgID != (byte)MsgID.DEPTH2) { currentState = State.WaitingForDepth1; } else { int length = msg.ReadInt32(); for (int i = depthIndex; i < length; i++) { _DepthData[i] = (ushort)msg.ReadInt16(); } currentState = State.WaitingForRed; } break; case State.WaitingForRed: if (msgID != (byte)MsgID.RED) { currentState = State.WaitingForDepth1; } else { int length = msg.ReadInt32(); for (int i = 0; i < length; i++) { _RedColorData[i] = (float)msg.ReadByte() / 255f; } currentState = State.WaitingForGreen; } break; case State.WaitingForGreen: if (msgID != (byte)MsgID.GREEN) { currentState = State.WaitingForDepth1; } else { int length = msg.ReadInt32(); for (int i = 0; i < length; i++) { _GreenColorData[i] = (float)msg.ReadByte() / 255f; } currentState = State.WaitingForBlue; } break; case State.WaitingForBlue: if (msgID == (byte)MsgID.BLUE) { int length = msg.ReadInt32(); for (int i = 0; i < length; i++) { _BlueColorData[i] = (float)msg.ReadByte() / 255f; } } currentState = State.WaitingForDepth1; break; } }
// Called when reading in Kinect data void ReceiveData(NetworkInMessage msg) { // 1) Read message ID type byte msgID = msg.ReadByte(); currentState = (DataReceiver.State)msgID; Debug.Log("Current msg id: " + msgID); float t0 = DateTime.Now.Millisecond; float t1; int length = 0; switch (currentState) { case State.WaitingForGeneral: /*if (msgID != (byte)MsgID.GENERAL) * { * Debug.Log("ERROR. Initial message not received."); * currentState = State.WaitingForGeneral; * } * else * {*/ if (!generalReceived) { // 2) Read message length length = msg.ReadInt32(); Debug.Log("Message length: " + length); // 3) Read the data width = msg.ReadInt32(); height = msg.ReadInt32(); int vertices = width * height; _DepthData = new short[vertices]; _RedColorData = new float[vertices]; _GreenColorData = new float[vertices]; _BlueColorData = new float[vertices]; currentState = State.WaitingForDepth1; Debug.Log("Width: " + width); Debug.Log("Height: " + height); Debug.Log("Success! Initial msg processed."); t1 = DateTime.Now.Millisecond - t0; Debug.Log("Time for general:" + t1); generalReceived = true; //} } break; case State.WaitingForDepth1: /*if (msgID != (byte)MsgID.DEPTH1) * { * * Debug.Log("ERROR. Depth1 data not received."); * currentState = State.WaitingForGeneral; * return; * * } else * {*/ // 2) Read msg length length = msg.ReadInt32(); Debug.Log("Message length: " + length); depthIndex = 0; // 3) Read the data for (int i = 0; i < length; i++) { _DepthData[i] = msg.ReadInt16(); depthIndex++; } Debug.Log("index: " + depthIndex); currentState = State.WaitingForDepth2; //currentState = State.WaitingForGeneral; Debug.Log("Success! Depth1 msg processed."); Debug.Log("First value: " + _DepthData[140]); t1 = DateTime.Now.Millisecond - t0; Debug.Log("Time for depth1:" + t1); //} break; case State.WaitingForDepth2: /* if (msgID != (byte)MsgID.DEPTH2) * { * Debug.Log("ERROR. Depth2 data not received."); * currentState = State.WaitingForGeneral; * * } * else * {*/ length = msg.ReadInt32(); for (int i = depthIndex; i < length; i++) { _DepthData[i] = msg.ReadInt16(); } Debug.Log("last depth data 2: " + _DepthData[length]); Debug.Log("Success! Depth2 msg processed."); currentState = State.WaitingForGeneral; // } break; /* * case State.WaitingForRed: * * if (msgID != (byte)MsgID.RED) * { * * currentState = State.WaitingForGeneral; * * } * else * { * * int length = msg.ReadInt32(); * for (int i = 0; i < length; i++) * { * _RedColorData[i] = ( (float)(uint)msg.ReadByte() ) / 255f; * } * * currentState = State.WaitingForGreen; * * } * * break; * * case State.WaitingForGreen: * * if (msgID != (byte)MsgID.GREEN) * { * * currentState = State.WaitingForGeneral; * * } * else * { * * int length = msg.ReadInt32(); * for (int i = 0; i < length; i++) * { * _GreenColorData[i] = ((float)(uint)msg.ReadByte()) / 255f; * } * * currentState = State.WaitingForBlue; * * } * * break; * * case State.WaitingForBlue: * * if (msgID == (byte)MsgID.BLUE){ * * int length = msg.ReadInt32(); * for (int i = 0; i < length; i++) * { * _BlueColorData[i] = ((float)(uint)msg.ReadByte()) / 255f; * } * * } * * currentState = State.WaitingForGeneral; * * break; */ } }
/// <summary> /// Now that we've gotten a message, examine it and dissect the audio data. /// </summary> /// <param name="connection"></param> /// <param name="message"></param> public void OnMessageReceived(NetworkConnection connection, NetworkInMessage message) { // Unused byte headerSize message.ReadByte(); Int32 pack = message.ReadInt32(); // Unused int version versionExtractor.GetBitsValue(pack); int audioStreamCount = audioStreamCountExtractor.GetBitsValue(pack); int channelCount = channelCountExtractor.GetBitsValue(pack); int sampleRate = sampleRateExtractor.GetBitsValue(pack); int sampleType = sampleTypeExtractor.GetBitsValue(pack); int bytesPerSample = sizeof(float); if (sampleType == 1) { bytesPerSample = sizeof(Int16); } int sampleCount = sampleCountExtractor.GetBitsValue(pack); int codecType = codecTypeExtractor.GetBitsValue(pack); // Unused int sequenceNumber sequenceNumberExtractor.GetBitsValue(pack); if (sampleRate == 0) { // Unused int extendedSampleRate message.ReadInt32(); } try { audioDataMutex.WaitOne(); prominentSpeakerCount = 0; for (int i = 0; i < audioStreamCount; i++) { float averageAmplitude = message.ReadFloat(); UInt32 hrtfSourceID = (UInt32)message.ReadInt32(); Vector3 hrtfPosition = new Vector3(); Vector3 hrtfDirection = new Vector3(); if (hrtfSourceID != 0) { hrtfPosition.x = message.ReadFloat(); hrtfPosition.y = message.ReadFloat(); hrtfPosition.z = message.ReadFloat(); hrtfDirection.x = message.ReadFloat(); hrtfDirection.y = message.ReadFloat(); hrtfDirection.z = message.ReadFloat(); Vector3 cameraPosRelativeToGlobalAnchor = Vector3.zero; Vector3 cameraDirectionRelativeToGlobalAnchor = Vector3.zero; if (GlobalAnchorTransform != null) { cameraPosRelativeToGlobalAnchor = MathUtils.TransformPointFromTo( null, GlobalAnchorTransform, CameraCache.Main.transform.position); cameraDirectionRelativeToGlobalAnchor = MathUtils.TransformDirectionFromTo( null, GlobalAnchorTransform, CameraCache.Main.transform.position); } cameraPosRelativeToGlobalAnchor.Normalize(); cameraDirectionRelativeToGlobalAnchor.Normalize(); Vector3 soundVector = hrtfPosition - cameraPosRelativeToGlobalAnchor; soundVector.Normalize(); // x is forward float fltx = (KDropOffMaximum / DropOffMaximumMetres) * Vector3.Dot(soundVector, cameraDirectionRelativeToGlobalAnchor); // y is right Vector3 myRight = Quaternion.Euler(0, 90, 0) * cameraDirectionRelativeToGlobalAnchor; float flty = -(KPanMaximum / PanMaximumMetres) * Vector3.Dot(soundVector, myRight); // z is up Vector3 myUp = Quaternion.Euler(90, 0, 0) * cameraDirectionRelativeToGlobalAnchor; float fltz = (KPanMaximum / PanMaximumMetres) * Vector3.Dot(soundVector, myUp); // Hacky distance check so we don't get too close to source. Vector3 flt = new Vector3(fltx, flty, fltz); if (flt.magnitude < (MinimumDistance * KDropOffMaximum)) { flt = flt.normalized * MinimumDistance * KDropOffMaximum; fltx = flt.x; flty = flt.y; fltz = flt.z; } AddProminentSpeaker(hrtfSourceID, averageAmplitude, fltx, flty, fltz); } for (int j = 0; j < channelCount; j++) { // if uncompressed, size = sampleCount Int16 size = (Int16)sampleCount; if (codecType != 0) { // if compressed, size is first 2 bytes, sampleCount should be number of bytes after decompression size = message.ReadInt16(); } // make this array big enough to hold all of the uncompressed data only if the // buffer is not the right size, minimize new operations int totalBytes = size * bytesPerSample; if (networkPacketBufferBytes.Length != totalBytes) { networkPacketBufferBytes = new byte[totalBytes]; } message.ReadArray(networkPacketBufferBytes, (uint)(totalBytes)); if (codecType != 0) { // in place decompression please - should fill out the data buffer // ... } if (hrtfSourceID > 0) { // TODO hrtf processing here } circularBuffer.Write(networkPacketBufferBytes, 0, networkPacketBufferBytes.Length); } } } catch (Exception e) { Debug.LogError(e.Message); } finally { audioDataMutex.ReleaseMutex(); } }
public short ReadDepth(NetworkInMessage msg) { return((short)msg.ReadInt16()); }