예제 #1
0
    void OnTexture2DReceived(NetworkInMessage msg)
    {
        // We read the user ID but we don't use it here.
        msg.ReadInt64();

        string instanceUid = msg.ReadString();

        if (!ActiveModelsDictionary.ContainsKey(new Guid(instanceUid)))
        {
            return;
        }

        int w = msg.ReadInt32();
        int h = msg.ReadInt32();

        uint len = (uint)msg.ReadInt32();

        byte[] data = new byte[len];

        msg.ReadArray(data, len);

        Texture2D texture = new Texture2D(w, h);

        texture.LoadImage(data);

        GameObject model = ActiveModelsDictionary[new Guid(instanceUid)];

        model.GetComponent <TexturePainter>().SetTexture(texture);
    }
예제 #2
0
        /// <summary>
        /// Called when a remote user sends a head transform.
        /// </summary>
        /// <param name="msg"></param>
        void UpdateHeadTransform(NetworkInMessage msg)
        {
            // Parse the message
            long userID = msg.ReadInt64();
            long ipSize = msg.ReadInt64();

            byte[] ipData = new byte[(uint)ipSize];
            msg.ReadArray(ipData, (uint)ipSize);

            Vector3    headPos = SV_CustomMessages.Instance.ReadVector3(msg);
            Quaternion headRot = SV_CustomMessages.Instance.ReadQuaternion(msg);

            RemoteHeadInfo headInfo = GetRemoteHeadInfo(userID);

            if (headInfo.HeadObject != null)
            {
#if UNITY_EDITOR
                if (HolographicCameraManager.Instance != null &&
                    HolographicCameraManager.Instance.tppcUser != null &&
                    HolographicCameraManager.Instance.tppcUser.GetID() == userID)
                {
                    if (SV_Cam == null)
                    {
                        SV_Cam = headInfo.HeadObject.AddComponent <SV_Camera>();
                    }

                    if (networkMovement == null)
                    {
                        networkMovement = headInfo.HeadObject.AddComponent <NetworkMovement>();
                    }

                    // update SV_cam info
                    SV_Cam.info = headInfo;
                    // update SV_cam position and rotation
                    SV_Cam.position = headPos;
                    SV_Cam.rotation = headRot;

                    networkMovement.AddTransform(
                        new NetworkMovement.NetworkTransform()
                    {
                        Position = headPos,
                        Rotation = headRot
                    }
                        );
                }
                else
#endif
                {
                    headInfo.HeadObject.transform.localPosition = headPos;
                    headInfo.HeadObject.transform.localRotation = headRot;
                }

                if (string.IsNullOrEmpty(headInfo.IP))
                {
                    headInfo.IP = System.Text.Encoding.ASCII.GetString(ipData);
                }
            }
        }
예제 #3
0
    // private method for reading string
    private string GetString(NetworkInMessage msg)
    {
        long strSize = msg.ReadInt64();

        byte[] strData = new byte[(uint)strSize];
        msg.ReadArray(strData, (uint)strSize);

        return(System.Text.Encoding.ASCII.GetString(strData));
    }
        public string ReadIP(NetworkInMessage msg)
        {
            long ipSize = msg.ReadInt64();

            byte[] ipData = new byte[(uint)ipSize];
            msg.ReadArray(ipData, (uint)ipSize);

            return(System.Text.Encoding.ASCII.GetString(ipData));
        }
    public byte[] ReadByteArray(NetworkInMessage msg)
    {
        int length = msg.ReadInt32();

        byte[] result = new byte[1354753];

        msg.ReadArray(result, (uint)length);

        return(result);
    }
    public String ReadString(NetworkInMessage msg)
    {
        int length = msg.ReadInt32();

        byte[] result = new byte[1354753];

        msg.ReadArray(result, (uint)length);

        return(System.Text.Encoding.Default.GetString(result));
    }
예제 #7
0
    public static byte[] ReadIRImageByArray(NetworkInMessage msg)
    {
        byte[] tempImage;
        int    length = 0;

        msg.ReadInt64();
        length    = msg.ReadInt32();
        tempImage = new byte[length];

        msg.ReadArray(tempImage, Convert.ToUInt32(length));

        return(tempImage);
    }
예제 #8
0
    /// <summary>
    /// Called when a remote user sends a head transform.
    /// </summary>
    /// <param name="msg"></param>
    private void SharedVoiceLine(NetworkInMessage msg)
    {
        // Parse the message
        long userID = msg.ReadInt64();

        Vector3    Vpos = CustomMessages.Instance.ReadVector3(msg);
        Quaternion Vrot = CustomMessages.Instance.ReadQuaternion(msg);

        byte[] kariByte = new byte[1024];
        msg.ReadArray(kariByte, (uint)kariByte.Length);
        string Vstr = System.Text.Encoding.UTF8.GetString(kariByte);

        //string Vstr = msg.ReadString();
        MakeVoiceLine(Vpos, Vrot, Vstr);
    }
예제 #9
0
    /// <summary>
    /// Called when a remote user sends a head transform.
    /// </summary>
    /// <param name="msg"></param>
    private void SharedSticky(NetworkInMessage msg)
    {
        // Parse the message
        long userID = msg.ReadInt64();

        Vector3 StickyPos = CustomMessages.Instance.ReadVector3(msg);

        byte[] kariByte = new byte[1024];
        msg.ReadArray(kariByte, (uint)kariByte.Length);
        //string message = msg.ReadString();
        int colorIndex = msg.ReadInt32();

        string message = System.Text.Encoding.UTF8.GetString(kariByte);

        Debug.Log("message:" + message);
        //Debug.Log("colorIndex=" + colorIndex);
        MakeShareSticky(message, StickyPos, colorIndex);
    }
예제 #10
0
        void OnEditorUser(NetworkInMessage msg)
        {
            msg.ReadInt64();

            long ipSize = msg.ReadInt64();

            byte[] ipData = new byte[(uint)ipSize];
            msg.ReadArray(ipData, (uint)ipSize);
            string editorIP = System.Text.Encoding.ASCII.GetString(ipData).Trim();

            long editorUserID = msg.ReadInt64();

            if (HolographicCameraManager.Instance.LocalComputerIP.Trim() == String.Empty ||
                HolographicCameraManager.Instance.LocalComputerIP.Trim() == editorIP)
            {
                User user = SharingStage.Instance.SessionUsersTracker.GetUserById((int)editorUserID);
                if (user != null)
                {
                    HolographicCameraManager.Instance.editorUser = user;
                }
            }
        }
예제 #11
0
        /// <summary>
        /// Called when a remote user sends a head transform.
        /// </summary>
        /// <param name="msg"></param>
        void UpdateHeadTransform(NetworkInMessage msg)
        {
            // Parse the message
            long userID = msg.ReadInt64();
            long ipSize = msg.ReadInt64();

            byte[] ipData = new byte[(uint)ipSize];
            msg.ReadArray(ipData, (uint)ipSize);

            Vector3    headPos = SpectatorView.SV_CustomMessages.Instance.ReadVector3(msg);
            Quaternion headRot = SpectatorView.SV_CustomMessages.Instance.ReadQuaternion(msg);

            RemoteHeadInfo headInfo = GetRemoteHeadInfo(userID);

            if (headInfo.HeadObject != null)
            {
#if UNITY_EDITOR
                if (SpectatorView.HolographicCameraManager.Instance != null &&
                    SpectatorView.HolographicCameraManager.Instance.tppcUser != null &&
                    SpectatorView.HolographicCameraManager.Instance.tppcUser.GetID() == userID)
                {
                    SetHologramPose(headRot.x, headRot.y, headRot.z, headRot.w,
                                    headPos.x, headPos.y, headPos.z);
                }
                else
#endif
                {
                    headInfo.HeadObject.transform.localPosition = headPos;
                    headInfo.HeadObject.transform.localRotation = headRot;
                }

                if (string.IsNullOrEmpty(headInfo.IP))
                {
                    headInfo.IP = System.Text.Encoding.ASCII.GetString(ipData);
                }
            }
        }
예제 #12
0
    /// <summary>
    /// Now that we've gotten a message, examine it and dissect the audio data.
    /// </summary>
    /// <param name="connection"></param>
    /// <param name="message"></param>
    public void OnMessageReceived(NetworkConnection connection, NetworkInMessage message)
    {
        byte headerSize = message.ReadByte();

        Int32 pack = message.ReadInt32();

        int version          = this.versionExtractor.GetBitsValue(pack);
        int audioStreamCount = this.audioStreamCountExtractor.GetBitsValue(pack);
        int channelCount     = this.channelCountExtractor.GetBitsValue(pack);
        int sampleRate       = this.sampleRateExtractor.GetBitsValue(pack);
        int sampleType       = this.sampleTypeExtractor.GetBitsValue(pack);
        int bytesPerSample   = sizeof(float);

        if (sampleType == 1)
        {
            bytesPerSample = sizeof(Int16);
        }

        int sampleCount    = this.sampleCountExtractor.GetBitsValue(pack);
        int codecType      = this.codecTypeExtractor.GetBitsValue(pack);
        int sequenceNumber = this.sequenceNumberExtractor.GetBitsValue(pack);

        Int32 extendedSampleRate = 0;

        if (sampleRate == 0)
        {
            extendedSampleRate = message.ReadInt32();
        }

        this.prominentSpeakerCount = 0;

        for (int i = 0; i < audioStreamCount; i++)
        {
            float   averageAmplitude = message.ReadFloat();
            UInt32  hrtfSourceID     = (UInt32)message.ReadInt32();
            Vector3 hrtfPosition     = new Vector3();
            Vector3 hrtfDirection    = new Vector3();
            if (hrtfSourceID != 0)
            {
                hrtfPosition.x = message.ReadFloat();
                hrtfPosition.y = message.ReadFloat();
                hrtfPosition.z = message.ReadFloat();

                hrtfDirection.x = message.ReadFloat();
                hrtfDirection.y = message.ReadFloat();
                hrtfDirection.z = message.ReadFloat();

                Vector3 cameraPosRelativeToGlobalAnchor       = Vector3.zero;
                Vector3 cameraDirectionRelativeToGlobalAnchor = Vector3.zero;

                if (this.GlobalAnchorTransform != null)
                {
                    cameraPosRelativeToGlobalAnchor = MathUtils.TransformPointFromTo(
                        null,
                        this.GlobalAnchorTransform,
                        Camera.main.transform.position);
                    cameraDirectionRelativeToGlobalAnchor = MathUtils.TransformDirectionFromTo(
                        null,
                        this.GlobalAnchorTransform,
                        Camera.main.transform.position);
                }

                cameraPosRelativeToGlobalAnchor.Normalize();
                cameraDirectionRelativeToGlobalAnchor.Normalize();

                Vector3 soundVector = hrtfPosition - cameraPosRelativeToGlobalAnchor;
                soundVector.Normalize();

                // x is forward
                float fltx = (m_DropOffMaximum / DropOffMaximumMetres) * Vector3.Dot(soundVector, cameraDirectionRelativeToGlobalAnchor);
                // y is right
                Vector3 myRight = Quaternion.Euler(0, 90, 0) * cameraDirectionRelativeToGlobalAnchor;
                float   flty    = -(m_PanMaximum / PanMaximumMetres) * Vector3.Dot(soundVector, myRight);
                // z is up
                Vector3 myUp = Quaternion.Euler(90, 0, 0) * cameraDirectionRelativeToGlobalAnchor;
                float   fltz = (m_PanMaximum / PanMaximumMetres) * Vector3.Dot(soundVector, myUp);

                if (this.ShowHRTFInfo)
                {
                    UnityEngine.Debug.Log("hrtf = " + fltx + ", " + flty + ", " + fltz);
                }

                // Hacky distance check so we don't get too close to source.
                Vector3 flt = new Vector3(fltx, flty, fltz);
                if (flt.magnitude < (MinimumDistance * m_DropOffMaximum))
                {
                    flt  = flt.normalized * MinimumDistance * m_DropOffMaximum;
                    fltx = flt.x;
                    flty = flt.y;
                    fltz = flt.z;
                }

                AddProminentSpeaker(hrtfSourceID, averageAmplitude, fltx, flty, fltz);
            }

            for (int j = 0; j < channelCount; j++)
            {
                // if uncompressed, size = sampleCount
                Int16 size = (Int16)sampleCount;
                if (codecType != 0)
                {
                    // if compressed, size is first 2 bytes, sampleCount should be number of bytes after decompression
                    size = message.ReadInt16();
                }

                // make this array big enough to hold all of the uncompressed data only if the
                // buffer is not the right size, minimize new operations
                if (this.networkPacketBufferBytes.Length != sampleCount * bytesPerSample)
                {
                    this.networkPacketBufferBytes = new byte[sampleCount * bytesPerSample];
                }
                message.ReadArray(this.networkPacketBufferBytes, (uint)(size * bytesPerSample));

                if (codecType != 0)
                {
                    // in place decompression please - should fill out the data buffer
                    // ...
                }

                if (hrtfSourceID > 0)
                {
                    // hrtf processing here
                }

                internalStore(this.networkPacketBufferBytes, 0, this.networkPacketBufferBytes.Length);
            }
        }
    }
        /// <summary>
        /// Now that we've gotten a message, examine it and dissect the audio data.
        /// </summary>
        /// <param name="connection"></param>
        /// <param name="message"></param>
        public void OnMessageReceived(NetworkConnection connection, NetworkInMessage message)
        {
            // Unused byte headerSize
            message.ReadByte();

            Int32 pack = message.ReadInt32();

            // Unused int version
            versionExtractor.GetBitsValue(pack);
            int audioStreamCount = audioStreamCountExtractor.GetBitsValue(pack);
            int channelCount     = channelCountExtractor.GetBitsValue(pack);
            int sampleRate       = sampleRateExtractor.GetBitsValue(pack);
            int sampleType       = sampleTypeExtractor.GetBitsValue(pack);
            int bytesPerSample   = sizeof(float);

            if (sampleType == 1)
            {
                bytesPerSample = sizeof(Int16);
            }

            int sampleCount = sampleCountExtractor.GetBitsValue(pack);
            int codecType   = codecTypeExtractor.GetBitsValue(pack);

            // Unused int sequenceNumber
            sequenceNumberExtractor.GetBitsValue(pack);

            if (sampleRate == 0)
            {
                // Unused int extendedSampleRate
                message.ReadInt32();
            }

            try
            {
                audioDataMutex.WaitOne();

                prominentSpeakerCount = 0;

                for (int i = 0; i < audioStreamCount; i++)
                {
                    float   averageAmplitude = message.ReadFloat();
                    UInt32  hrtfSourceID     = (UInt32)message.ReadInt32();
                    Vector3 hrtfPosition     = new Vector3();
                    Vector3 hrtfDirection    = new Vector3();
                    if (hrtfSourceID != 0)
                    {
                        hrtfPosition.x = message.ReadFloat();
                        hrtfPosition.y = message.ReadFloat();
                        hrtfPosition.z = message.ReadFloat();

                        hrtfDirection.x = message.ReadFloat();
                        hrtfDirection.y = message.ReadFloat();
                        hrtfDirection.z = message.ReadFloat();

                        Vector3 cameraPosRelativeToGlobalAnchor       = Vector3.zero;
                        Vector3 cameraDirectionRelativeToGlobalAnchor = Vector3.zero;

                        if (GlobalAnchorTransform != null)
                        {
                            cameraPosRelativeToGlobalAnchor = MathUtils.TransformPointFromTo(
                                null,
                                GlobalAnchorTransform,
                                CameraCache.Main.transform.position);
                            cameraDirectionRelativeToGlobalAnchor = MathUtils.TransformDirectionFromTo(
                                null,
                                GlobalAnchorTransform,
                                CameraCache.Main.transform.position);
                        }

                        cameraPosRelativeToGlobalAnchor.Normalize();
                        cameraDirectionRelativeToGlobalAnchor.Normalize();

                        Vector3 soundVector = hrtfPosition - cameraPosRelativeToGlobalAnchor;
                        soundVector.Normalize();

                        // x is forward
                        float fltx = (KDropOffMaximum / DropOffMaximumMetres) * Vector3.Dot(soundVector, cameraDirectionRelativeToGlobalAnchor);
                        // y is right
                        Vector3 myRight = Quaternion.Euler(0, 90, 0) * cameraDirectionRelativeToGlobalAnchor;
                        float   flty    = -(KPanMaximum / PanMaximumMetres) * Vector3.Dot(soundVector, myRight);
                        // z is up
                        Vector3 myUp = Quaternion.Euler(90, 0, 0) * cameraDirectionRelativeToGlobalAnchor;
                        float   fltz = (KPanMaximum / PanMaximumMetres) * Vector3.Dot(soundVector, myUp);

                        // Hacky distance check so we don't get too close to source.
                        Vector3 flt = new Vector3(fltx, flty, fltz);
                        if (flt.magnitude < (MinimumDistance * KDropOffMaximum))
                        {
                            flt  = flt.normalized * MinimumDistance * KDropOffMaximum;
                            fltx = flt.x;
                            flty = flt.y;
                            fltz = flt.z;
                        }

                        AddProminentSpeaker(hrtfSourceID, averageAmplitude, fltx, flty, fltz);
                    }

                    for (int j = 0; j < channelCount; j++)
                    {
                        // if uncompressed, size = sampleCount
                        Int16 size = (Int16)sampleCount;
                        if (codecType != 0)
                        {
                            // if compressed, size is first 2 bytes, sampleCount should be number of bytes after decompression
                            size = message.ReadInt16();
                        }

                        // make this array big enough to hold all of the uncompressed data only if the
                        // buffer is not the right size, minimize new operations
                        int totalBytes = size * bytesPerSample;
                        if (networkPacketBufferBytes.Length != totalBytes)
                        {
                            networkPacketBufferBytes = new byte[totalBytes];
                        }
                        message.ReadArray(networkPacketBufferBytes, (uint)(totalBytes));

                        if (codecType != 0)
                        {
                            // in place decompression please - should fill out the data buffer
                            // ...
                        }

                        if (hrtfSourceID > 0)
                        {
                            // TODO hrtf processing here
                        }

                        circularBuffer.Write(networkPacketBufferBytes, 0, networkPacketBufferBytes.Length);
                    }
                }
            }
            catch (Exception e)
            {
                Debug.LogError(e.Message);
            }
            finally
            {
                audioDataMutex.ReleaseMutex();
            }
        }