ReadInt32() 공개 메소드

public ReadInt32 ( ) : int
리턴 int
예제 #1
0
        void OnSpatialMapping(HoloToolkit.Sharing.NetworkInMessage msg)
        {
#if UNITY_EDITOR
            if (SpatialMappingManager.Instance == null)
            {
                return;
            }

            SpectatorView.RemoteSpatialMappingSource rsms = SpatialMappingManager.Instance.GetComponent <SpectatorView.RemoteSpatialMappingSource>();
            if (rsms == null)
            {
                return;
            }

            msg.ReadInt64();

            List <Vector3> vertices  = new List <Vector3>();
            List <Vector3> normals   = new List <Vector3>();
            List <int>     triangles = new List <int>();

            int vertexCount   = msg.ReadInt32();
            int normalCount   = msg.ReadInt32();
            int triangleCount = msg.ReadInt32();

            for (int i = 0; i < vertexCount; i++)
            {
                Vector3 vertex = SpectatorView.SV_CustomMessages.Instance.ReadVector3(msg);
                vertices.Add(vertex);
            }

            for (int i = 0; i < normalCount; i++)
            {
                Vector3 normal = SpectatorView.SV_CustomMessages.Instance.ReadVector3(msg);
                normals.Add(normal);
            }

            for (int i = 0; i < triangleCount; i++)
            {
                int index = msg.ReadInt32();
                triangles.Add(index);
            }

            SpatialMappingManager.Instance.transform.parent        = transform;
            SpatialMappingManager.Instance.transform.localPosition = Vector3.zero;
            SpatialMappingManager.Instance.transform.localRotation = Quaternion.identity;

            rsms.AddSurface(vertices, normals, triangles);
#endif
        }
예제 #2
0
        void OnTimeOffset(HoloToolkit.Sharing.NetworkInMessage msg)
        {
#if !UNITY_EDITOR
            float eventTime = Time.time;
            msg.ReadInt64();

            int timeOffset = msg.ReadInt32();
            if (timeOffset > 0)
            {
                timeOffset *= -1;
            }

            colorFrameReceivedTimeS = eventTime;
            prevTimeOffsetNS        = timeOffset;

            Vector3    pos;
            Quaternion rot;

            if (GetHeadTransform(WorldManager.GetNativeISpatialCoordinateSystemPtr(),
                                 (int)timeOffset,
                                 out rot.x, out rot.y, out rot.z, out rot.w,
                                 out pos.x, out pos.y, out pos.z))
            {
                // Transform the head position and rotation from world space into local space
                Vector3    HeadPos = this.transform.InverseTransformPoint(pos);
                Quaternion HeadRot = Quaternion.Inverse(this.transform.rotation) * rot;
                SpectatorView.SV_CustomMessages.Instance.SendHeadTransform(HeadPos, HeadRot);
            }
#endif
        }
        /// <summary>
        /// Now that we've gotten a message, examine it and dissect the audio data.
        /// </summary>
        /// <param name="connection"></param>
        /// <param name="message"></param>
        public void OnMessageReceived(NetworkConnection connection, NetworkInMessage message)
        {
            // Unused byte headerSize
            message.ReadByte();

            Int32 pack = message.ReadInt32();

            // Unused int version
            versionExtractor.GetBitsValue(pack);
            int audioStreamCount = audioStreamCountExtractor.GetBitsValue(pack);
            int channelCount     = channelCountExtractor.GetBitsValue(pack);
            int sampleRate       = sampleRateExtractor.GetBitsValue(pack);
            int sampleType       = sampleTypeExtractor.GetBitsValue(pack);
            int bytesPerSample   = sizeof(float);

            if (sampleType == 1)
            {
                bytesPerSample = sizeof(Int16);
            }

            int sampleCount = sampleCountExtractor.GetBitsValue(pack);
            int codecType   = codecTypeExtractor.GetBitsValue(pack);

            // Unused int sequenceNumber
            sequenceNumberExtractor.GetBitsValue(pack);

            if (sampleRate == 0)
            {
                // Unused int extendedSampleRate
                message.ReadInt32();
            }

            try
            {
                audioDataMutex.WaitOne();

                prominentSpeakerCount = 0;

                for (int i = 0; i < audioStreamCount; i++)
                {
                    float   averageAmplitude = message.ReadFloat();
                    UInt32  hrtfSourceID     = (UInt32)message.ReadInt32();
                    Vector3 hrtfPosition     = new Vector3();
                    Vector3 hrtfDirection    = new Vector3();
                    if (hrtfSourceID != 0)
                    {
                        hrtfPosition.x = message.ReadFloat();
                        hrtfPosition.y = message.ReadFloat();
                        hrtfPosition.z = message.ReadFloat();

                        hrtfDirection.x = message.ReadFloat();
                        hrtfDirection.y = message.ReadFloat();
                        hrtfDirection.z = message.ReadFloat();

                        Vector3 cameraPosRelativeToGlobalAnchor       = Vector3.zero;
                        Vector3 cameraDirectionRelativeToGlobalAnchor = Vector3.zero;

                        if (GlobalAnchorTransform != null)
                        {
                            cameraPosRelativeToGlobalAnchor = MathUtils.TransformPointFromTo(
                                null,
                                GlobalAnchorTransform,
                                Camera.main.transform.position);
                            cameraDirectionRelativeToGlobalAnchor = MathUtils.TransformDirectionFromTo(
                                null,
                                GlobalAnchorTransform,
                                Camera.main.transform.position);
                        }

                        cameraPosRelativeToGlobalAnchor.Normalize();
                        cameraDirectionRelativeToGlobalAnchor.Normalize();

                        Vector3 soundVector = hrtfPosition - cameraPosRelativeToGlobalAnchor;
                        soundVector.Normalize();

                        // x is forward
                        float fltx = (kDropOffMaximum / DropOffMaximumMetres) * Vector3.Dot(soundVector, cameraDirectionRelativeToGlobalAnchor);
                        // y is right
                        Vector3 myRight = Quaternion.Euler(0, 90, 0) * cameraDirectionRelativeToGlobalAnchor;
                        float   flty    = -(kPanMaximum / PanMaximumMetres) * Vector3.Dot(soundVector, myRight);
                        // z is up
                        Vector3 myUp = Quaternion.Euler(90, 0, 0) * cameraDirectionRelativeToGlobalAnchor;
                        float   fltz = (kPanMaximum / PanMaximumMetres) * Vector3.Dot(soundVector, myUp);

                        // Hacky distance check so we don't get too close to source.
                        Vector3 flt = new Vector3(fltx, flty, fltz);
                        if (flt.magnitude < (MinimumDistance * kDropOffMaximum))
                        {
                            flt  = flt.normalized * MinimumDistance * kDropOffMaximum;
                            fltx = flt.x;
                            flty = flt.y;
                            fltz = flt.z;
                        }

                        AddProminentSpeaker(hrtfSourceID, averageAmplitude, fltx, flty, fltz);
                    }

                    for (int j = 0; j < channelCount; j++)
                    {
                        // if uncompressed, size = sampleCount
                        Int16 size = (Int16)sampleCount;
                        if (codecType != 0)
                        {
                            // if compressed, size is first 2 bytes, sampleCount should be number of bytes after decompression
                            size = message.ReadInt16();
                        }

                        // make this array big enough to hold all of the uncompressed data only if the
                        // buffer is not the right size, minimize new operations
                        int totalBytes = size * bytesPerSample;
                        if (networkPacketBufferBytes.Length != totalBytes)
                        {
                            networkPacketBufferBytes = new byte[totalBytes];
                        }
                        message.ReadArray(networkPacketBufferBytes, (uint)(totalBytes));

                        if (codecType != 0)
                        {
                            // in place decompression please - should fill out the data buffer
                            // ...
                        }

                        if (hrtfSourceID > 0)
                        {
                            // hrtf processing here
                        }

                        circularBuffer.Write(networkPacketBufferBytes, 0, networkPacketBufferBytes.Length);
                    }
                }
            }
            catch (Exception e)
            {
                Debug.LogError(e.Message);
            }
            finally
            {
                audioDataMutex.ReleaseMutex();
            }
        }
예제 #4
0
 public int ReadInt(NetworkInMessage msg)
 {
     return(msg.ReadInt32());     // This is a x86 system and therefore should read 32 bits.
 }