// 读取Cube的位置 public static Vector3 ReadCubePosition(NetworkInMessage msg) { // 读取用户编号,但不使用 msg.ReadInt64(); // 依次读取XYZ,这个和发送Cube时,写入参数顺序是一致的 return(new Vector3(msg.ReadFloat(), msg.ReadFloat(), msg.ReadFloat())); }
private void OnPartRotate(NetworkInMessage msg) { // We read the user ID but we don't use it here msg.ReadInt64(); // Get the part number and then the part based on it var number = msg.ReadInt16(); var part = partList[number]; part.SetData(msg.ReadFloat(), msg.ReadFloat(), msg.ReadInt16() > 0, msg.ReadInt16() > 0); inControl = false; }
void OnStageTransfrom(NetworkInMessage msg) { msg.ReadInt64(); transform.localPosition = CustomMessages.Instance.ReadVector3(msg) + adjustPosition; transform.localRotation = CustomMessages.Instance.ReadQuaternion(msg); transform.localScale = CustomMessages.Instance.ReadVector3(msg); float renderGot = msg.ReadFloat(); bool statusgot = false; if (renderGot == 0.0f) { statusgot = false; if (render == true && statusgot == false) { DisableModel(); } } else if (renderGot == 1.0f) { statusgot = true; if (render == false && statusgot == true) { EnableModel(); } } else { Debug.Log("RENDER STATUS ERROR"); } }
void OnUpdateBrush(NetworkInMessage msg) { long userId = msg.ReadInt64(); if (!UsersBrushDictionary.ContainsKey(userId)) { P3D_Brush brush = new P3D_Brush(); brush.Color = DefaultBrush.Color; brush.Shape = DefaultBrush.Shape; brush.Size = DefaultBrush.Size; UsersBrushDictionary.Add(userId, brush); } P3D_Brush userBrush = UsersBrushDictionary[userId]; userBrush.Shape = DefaultBrush.Shape; userBrush.Color = new Color(msg.ReadFloat(), msg.ReadFloat(), msg.ReadFloat(), msg.ReadFloat()); userBrush.Size = new Vector2(msg.ReadFloat(), msg.ReadFloat()); UsersBrushDictionary[userId] = userBrush; }
void OnPaintbucket(NetworkInMessage msg) { // We read the user ID but we don't use it here. msg.ReadInt64(); string instanceUid = msg.ReadString(); if (!ActiveModelsDictionary.ContainsKey(new Guid(instanceUid))) { return; } float r = msg.ReadFloat(); float g = msg.ReadFloat(); float b = msg.ReadFloat(); float a = msg.ReadFloat(); GameObject model = ActiveModelsDictionary[new Guid(instanceUid)]; model.GetComponent <TexturePainter>().Paintbucket(new Color(r, g, b, a)); }
/// <summary> /// <summary>Read Float from message. Don't forget read user id first (long)</summary> /// </summary> /// <param name="msg"></param> /// <returns></returns> public SharingData ReadFloat(NetworkInMessage msg, bool onlyValue = false) { long userId = 0; string tag = ""; // firstly we read user_id and message tag if onlyValue == false ReadIdAndTag(msg, onlyValue, out userId, out tag); var value = msg.ReadFloat(); // lastly we read value return(new SharingData(userId, tag, value)); }
private void OnMessageReceived(NetworkConnection connection, NetworkInMessage msg) { var messageType = msg.ReadByte(); var userId = msg.ReadInt64(); string messageKey = msg.ReadString(); var floatCount = msg.ReadInt32(); UnityEngine.Debug.Log($"message type: {messageType}, key:{messageKey}, count:{floatCount}"); var floats = new List <float>(); for (var i = 0; i < floatCount; i++) { floats.Add(msg.ReadFloat()); } var functionToCall = _messageHandlers[(HoloMessageType)messageType]; functionToCall?.Invoke(userId, messageKey, floats); }
private void UpdateAnimationHash(NetworkInMessage msg) { msg.ReadInt64(); int animationHash = msg.ReadInt32(); int animationType = msg.ReadInt32(); float animationValue = msg.ReadFloat(); if (NetworkAnimator != null) { if (animatorHashes == null) { animatorHashes = NetworkAnimator.parameters; } for (var i = 0; i < animatorHashes.Length; i++) { if (animatorHashes[i].nameHash == animationHash) { switch (animationType) { case (int)NetworkMessages.AnimationTypes.Boolean: NetworkAnimator.SetBool(animationHash, animationValue >= 0.5); break; case (int)NetworkMessages.AnimationTypes.Integer: NetworkAnimator.SetInteger(animationHash, (int)animationValue); break; case (int)NetworkMessages.AnimationTypes.Float: NetworkAnimator.SetFloat(animationHash, animationValue); break; case (int)NetworkMessages.AnimationTypes.Trigger: NetworkAnimator.SetTrigger(animationHash); break; default: break; } } } } }
public Quaternion ReadQuaternion(NetworkInMessage msg) { return(new Quaternion(msg.ReadFloat(), msg.ReadFloat(), msg.ReadFloat(), msg.ReadFloat())); }
public Vector3 ReadVector3(NetworkInMessage msg) { return(new Vector3(msg.ReadFloat(), msg.ReadFloat(), msg.ReadFloat())); }
public static Vector3 ReadCubePostion(NetworkInMessage msg) { msg.ReadInt64(); return(new Vector3(msg.ReadFloat(), msg.ReadFloat(), msg.ReadFloat())); }
/// <summary> /// Now that we've gotten a message, examine it and dissect the audio data. /// </summary> /// <param name="connection"></param> /// <param name="message"></param> public void OnMessageReceived(NetworkConnection connection, NetworkInMessage message) { byte headerSize = message.ReadByte(); Int32 pack = message.ReadInt32(); int version = this.versionExtractor.GetBitsValue(pack); int audioStreamCount = this.audioStreamCountExtractor.GetBitsValue(pack); int channelCount = this.channelCountExtractor.GetBitsValue(pack); int sampleRate = this.sampleRateExtractor.GetBitsValue(pack); int sampleType = this.sampleTypeExtractor.GetBitsValue(pack); int bytesPerSample = sizeof(float); if (sampleType == 1) { bytesPerSample = sizeof(Int16); } int sampleCount = this.sampleCountExtractor.GetBitsValue(pack); int codecType = this.codecTypeExtractor.GetBitsValue(pack); int sequenceNumber = this.sequenceNumberExtractor.GetBitsValue(pack); Int32 extendedSampleRate = 0; if (sampleRate == 0) { extendedSampleRate = message.ReadInt32(); } this.prominentSpeakerCount = 0; for (int i = 0; i < audioStreamCount; i++) { float averageAmplitude = message.ReadFloat(); UInt32 hrtfSourceID = (UInt32)message.ReadInt32(); Vector3 hrtfPosition = new Vector3(); Vector3 hrtfDirection = new Vector3(); if (hrtfSourceID != 0) { hrtfPosition.x = message.ReadFloat(); hrtfPosition.y = message.ReadFloat(); hrtfPosition.z = message.ReadFloat(); hrtfDirection.x = message.ReadFloat(); hrtfDirection.y = message.ReadFloat(); hrtfDirection.z = message.ReadFloat(); Vector3 cameraPosRelativeToGlobalAnchor = Vector3.zero; Vector3 cameraDirectionRelativeToGlobalAnchor = Vector3.zero; if (this.GlobalAnchorTransform != null) { cameraPosRelativeToGlobalAnchor = MathUtils.TransformPointFromTo( null, this.GlobalAnchorTransform, Camera.main.transform.position); cameraDirectionRelativeToGlobalAnchor = MathUtils.TransformDirectionFromTo( null, this.GlobalAnchorTransform, Camera.main.transform.position); } cameraPosRelativeToGlobalAnchor.Normalize(); cameraDirectionRelativeToGlobalAnchor.Normalize(); Vector3 soundVector = hrtfPosition - cameraPosRelativeToGlobalAnchor; soundVector.Normalize(); // x is forward float fltx = (m_DropOffMaximum / DropOffMaximumMetres) * Vector3.Dot(soundVector, cameraDirectionRelativeToGlobalAnchor); // y is right Vector3 myRight = Quaternion.Euler(0, 90, 0) * cameraDirectionRelativeToGlobalAnchor; float flty = -(m_PanMaximum / PanMaximumMetres) * Vector3.Dot(soundVector, myRight); // z is up Vector3 myUp = Quaternion.Euler(90, 0, 0) * cameraDirectionRelativeToGlobalAnchor; float fltz = (m_PanMaximum / PanMaximumMetres) * Vector3.Dot(soundVector, myUp); if (this.ShowHRTFInfo) { UnityEngine.Debug.Log("hrtf = " + fltx + ", " + flty + ", " + fltz); } // Hacky distance check so we don't get too close to source. Vector3 flt = new Vector3(fltx, flty, fltz); if (flt.magnitude < (MinimumDistance * m_DropOffMaximum)) { flt = flt.normalized * MinimumDistance * m_DropOffMaximum; fltx = flt.x; flty = flt.y; fltz = flt.z; } AddProminentSpeaker(hrtfSourceID, averageAmplitude, fltx, flty, fltz); } for (int j = 0; j < channelCount; j++) { // if uncompressed, size = sampleCount Int16 size = (Int16)sampleCount; if (codecType != 0) { // if compressed, size is first 2 bytes, sampleCount should be number of bytes after decompression size = message.ReadInt16(); } // make this array big enough to hold all of the uncompressed data only if the // buffer is not the right size, minimize new operations if (this.networkPacketBufferBytes.Length != sampleCount * bytesPerSample) { this.networkPacketBufferBytes = new byte[sampleCount * bytesPerSample]; } message.ReadArray(this.networkPacketBufferBytes, (uint)(size * bytesPerSample)); if (codecType != 0) { // in place decompression please - should fill out the data buffer // ... } if (hrtfSourceID > 0) { // hrtf processing here } internalStore(this.networkPacketBufferBytes, 0, this.networkPacketBufferBytes.Length); } } }
public Vector3 ReadVector3(NetworkInMessage msg) { Vector3 readvector = new Vector3(msg.ReadFloat(), msg.ReadFloat(), msg.ReadFloat()); return(readvector); }
/// <summary> /// Now that we've gotten a message, examine it and dissect the audio data. /// </summary> /// <param name="connection"></param> /// <param name="message"></param> public void OnMessageReceived(NetworkConnection connection, NetworkInMessage message) { // Unused byte headerSize message.ReadByte(); Int32 pack = message.ReadInt32(); // Unused int version versionExtractor.GetBitsValue(pack); int audioStreamCount = audioStreamCountExtractor.GetBitsValue(pack); int channelCount = channelCountExtractor.GetBitsValue(pack); int sampleRate = sampleRateExtractor.GetBitsValue(pack); int sampleType = sampleTypeExtractor.GetBitsValue(pack); int bytesPerSample = sizeof(float); if (sampleType == 1) { bytesPerSample = sizeof(Int16); } int sampleCount = sampleCountExtractor.GetBitsValue(pack); int codecType = codecTypeExtractor.GetBitsValue(pack); // Unused int sequenceNumber sequenceNumberExtractor.GetBitsValue(pack); if (sampleRate == 0) { // Unused int extendedSampleRate message.ReadInt32(); } try { audioDataMutex.WaitOne(); prominentSpeakerCount = 0; for (int i = 0; i < audioStreamCount; i++) { float averageAmplitude = message.ReadFloat(); UInt32 hrtfSourceID = (UInt32)message.ReadInt32(); Vector3 hrtfPosition = new Vector3(); Vector3 hrtfDirection = new Vector3(); if (hrtfSourceID != 0) { hrtfPosition.x = message.ReadFloat(); hrtfPosition.y = message.ReadFloat(); hrtfPosition.z = message.ReadFloat(); hrtfDirection.x = message.ReadFloat(); hrtfDirection.y = message.ReadFloat(); hrtfDirection.z = message.ReadFloat(); Vector3 cameraPosRelativeToGlobalAnchor = Vector3.zero; Vector3 cameraDirectionRelativeToGlobalAnchor = Vector3.zero; if (GlobalAnchorTransform != null) { cameraPosRelativeToGlobalAnchor = MathUtils.TransformPointFromTo( null, GlobalAnchorTransform, CameraCache.Main.transform.position); cameraDirectionRelativeToGlobalAnchor = MathUtils.TransformDirectionFromTo( null, GlobalAnchorTransform, CameraCache.Main.transform.position); } cameraPosRelativeToGlobalAnchor.Normalize(); cameraDirectionRelativeToGlobalAnchor.Normalize(); Vector3 soundVector = hrtfPosition - cameraPosRelativeToGlobalAnchor; soundVector.Normalize(); // x is forward float fltx = (KDropOffMaximum / DropOffMaximumMetres) * Vector3.Dot(soundVector, cameraDirectionRelativeToGlobalAnchor); // y is right Vector3 myRight = Quaternion.Euler(0, 90, 0) * cameraDirectionRelativeToGlobalAnchor; float flty = -(KPanMaximum / PanMaximumMetres) * Vector3.Dot(soundVector, myRight); // z is up Vector3 myUp = Quaternion.Euler(90, 0, 0) * cameraDirectionRelativeToGlobalAnchor; float fltz = (KPanMaximum / PanMaximumMetres) * Vector3.Dot(soundVector, myUp); // Hacky distance check so we don't get too close to source. Vector3 flt = new Vector3(fltx, flty, fltz); if (flt.magnitude < (MinimumDistance * KDropOffMaximum)) { flt = flt.normalized * MinimumDistance * KDropOffMaximum; fltx = flt.x; flty = flt.y; fltz = flt.z; } AddProminentSpeaker(hrtfSourceID, averageAmplitude, fltx, flty, fltz); } for (int j = 0; j < channelCount; j++) { // if uncompressed, size = sampleCount Int16 size = (Int16)sampleCount; if (codecType != 0) { // if compressed, size is first 2 bytes, sampleCount should be number of bytes after decompression size = message.ReadInt16(); } // make this array big enough to hold all of the uncompressed data only if the // buffer is not the right size, minimize new operations int totalBytes = size * bytesPerSample; if (networkPacketBufferBytes.Length != totalBytes) { networkPacketBufferBytes = new byte[totalBytes]; } message.ReadArray(networkPacketBufferBytes, (uint)(totalBytes)); if (codecType != 0) { // in place decompression please - should fill out the data buffer // ... } if (hrtfSourceID > 0) { // TODO hrtf processing here } circularBuffer.Write(networkPacketBufferBytes, 0, networkPacketBufferBytes.Length); } } } catch (Exception e) { Debug.LogError(e.Message); } finally { audioDataMutex.ReleaseMutex(); } }
private void updateRotation(NetworkInMessage msg) { msg.ReadInt64(); targetRotation = new Quaternion(msg.ReadFloat(), msg.ReadFloat(), msg.ReadFloat(), msg.ReadFloat()); }
void GetData(NetworkInMessage msg, long userId) { var data = msg.ReadFloat(); Debug.Log("Got a Data: " + data); }
public float ReadFloat(NetworkInMessage msg) { return(msg.ReadFloat()); }
// read Quaternion as 4 floats private Quaternion GetQuaternion(NetworkInMessage msg) { return(new Quaternion(msg.ReadFloat(), msg.ReadFloat(), msg.ReadFloat(), msg.ReadFloat())); }
// read Vector3 as 3 floats private Vector3 GetVector3(NetworkInMessage msg) { return(new Vector3(msg.ReadFloat(), msg.ReadFloat(), msg.ReadFloat())); }