public WriteArray ( byte data, uint length ) : void | ||
data | byte | |
length | uint | |
Результат | void |
public void SendColorData(MsgTag tag, byte[] colorData) { // If we are connected to a session, broadcast our info if (this.serverConnection != null && this.serverConnection.IsConnected()) { // RED message NetworkOutMessage msg = CreateMessage((int)TestMessageID.StartID); msg.Write((byte)tag); // 4) Add message uint len = (uint)colorData.Length; //Debug.Log("length of color data in cs2 is :" + len); msg.WriteArray(colorData, len); //Debug.Log("MSG.WRITE ARRAY IN CS2" + msg.ToString()); /*for (int i = 0; i < colorData.Length; i += BYTES_PER_PIXEL) * { * * }*/ // Send the message as a broadcast this.serverConnection.Broadcast( msg, MessagePriority.Immediate, MessageReliability.UnreliableSequenced, MessageChannel.Avatar); } }
private void SendFixedSizedChunk(NetworkConnection connection, byte[] data, int dataSize) { System.DateTime currentTime = System.DateTime.Now; float seconds = (float)(currentTime - timeOfLastPacketSend).TotalSeconds; timeOfLastPacketSend = currentTime; if (seconds < 10.0) { if (worstTimeBetweenPackets < seconds) { worstTimeBetweenPackets = seconds; } if (ShowInterPacketTime) { UnityEngine.Debug.Log("Microphone: Millisecs since last sent: " + seconds * 1000.0 + " Worst: " + worstTimeBetweenPackets * 1000.0); } } int clientId = SharingStage.Instance.Manager.GetLocalUser().GetID(); // pack the header NetworkOutMessage msg = connection.CreateMessage((byte)MessageID.AudioSamples); int dataCountFloats = dataSize / 4; msg.Write((byte)5); // 8 byte header size Int32 pack = 0; versionPacker.SetBits(ref pack, 1); // version audioStreamCountPacker.SetBits(ref pack, 1); // AudioStreamCount channelCountPacker.SetBits(ref pack, 1); // ChannelCount sampleRatePacker.SetBits(ref pack, sampleRateType); // SampleRate: 1 = 16000, 3 = 48000 sampleTypePacker.SetBits(ref pack, 0); // SampleType sampleCountPacker.SetBits(ref pack, dataCountFloats); // SampleCount (data count is in bytes and the actual data is in floats, so div by 4) codecTypePacker.SetBits(ref pack, 0); // CodecType mutePacker.SetBits(ref pack, Mute ? 1 : 0); sequenceNumberPacker.SetBits(ref pack, sequenceNumber++); sequenceNumber %= 32; msg.Write((int)pack); // the packed bits // This is where stream data starts. Write all data for one stream msg.Write((float)0.0f); // average amplitude. Not needed in direction from client to server. msg.Write((int)clientId); // non-zero client ID for this client. // HRTF position bits Vector3 cameraPosRelativeToGlobalAnchor = Vector3.zero; Vector3 cameraDirectionRelativeToGlobalAnchor = Vector3.zero; if (GlobalAnchorTransform != null) { cameraPosRelativeToGlobalAnchor = MathUtils.TransformPointFromTo( null, GlobalAnchorTransform, Camera.main.transform.position); cameraDirectionRelativeToGlobalAnchor = MathUtils.TransformDirectionFromTo( null, GlobalAnchorTransform, Camera.main.transform.position); } cameraPosRelativeToGlobalAnchor.Normalize(); cameraDirectionRelativeToGlobalAnchor.Normalize(); // Camera position msg.Write(cameraPosRelativeToGlobalAnchor.x); msg.Write(cameraPosRelativeToGlobalAnchor.y); msg.Write(cameraPosRelativeToGlobalAnchor.z); // HRTF direction bits msg.Write(cameraDirectionRelativeToGlobalAnchor.x); msg.Write(cameraDirectionRelativeToGlobalAnchor.y); msg.Write(cameraDirectionRelativeToGlobalAnchor.z); msg.WriteArray(data, (uint)dataCountFloats * 4); connection.Send(msg, MessagePriority.Immediate, MessageReliability.ReliableOrdered, MessageChannel.Audio, true); }