public void EndsWithUtf8String(string s, string pattern) { Utf8String u8s = new Utf8String(s); Utf8String u8pattern = new Utf8String(pattern); Assert.Equal(s.EndsWith(pattern), u8s.EndsWith(u8pattern)); }
public void BasicEncodingDecoding() { var list = new List<byte>(); for(int value=0; value < 256; value++) { list.Add((byte)value); } var testBytes = list.ToArray(); for (int value = 0; value < 256; value++) { var sourceBytes = testBytes.Slice(0, value + 1); var encodedBytes = new byte[Base64.ComputeEncodedLength(sourceBytes.Length)].Slice(); var encodedBytesCount = Base64.Encode(sourceBytes, encodedBytes); Assert.Equal(encodedBytes.Length, encodedBytesCount); var encodedText = new Utf8String(encodedBytes).ToString(); var expectedText = Convert.ToBase64String(testBytes, 0, value + 1); Assert.Equal(expectedText, encodedText); var decodedBytes = new byte[sourceBytes.Length]; var decodedByteCount = Base64.Decode(encodedBytes, decodedBytes.Slice()); Assert.Equal(sourceBytes.Length, decodedByteCount); for (int i=0; i<decodedBytes.Length; i++) { Assert.Equal(sourceBytes[i], decodedBytes[i]); } } }
public void DecodeInPlace() { var list = new List<byte>(); for (int value = 0; value < 256; value++) { list.Add((byte)value); } var testBytes = list.ToArray(); for (int value = 0; value < 256; value++) { var sourceBytes = testBytes.Slice(0, value + 1); var buffer = new byte[Base64.ComputeEncodedLength(sourceBytes.Length)]; var bufferSlice = buffer.Slice(); Base64.Encode(sourceBytes, bufferSlice); var encodedText = new Utf8String(bufferSlice).ToString(); var expectedText = Convert.ToBase64String(testBytes, 0, value + 1); Assert.Equal(expectedText, encodedText); var decodedByteCount = Base64.DecodeInPlace(bufferSlice); Assert.Equal(sourceBytes.Length, decodedByteCount); for (int i = 0; i < decodedByteCount; i++) { Assert.Equal(sourceBytes[i], buffer[i]); } } }
public static bool TryParse(Utf8String utf8Text, out uint value, out int bytesConsumed) { Precondition.Require(utf8Text.Length > 0); value = 0; bytesConsumed = 0; for (int byteIndex = 0; byteIndex < utf8Text.Length; byteIndex++) { byte nextByte = (byte)utf8Text[byteIndex]; if (nextByte < '0' || nextByte > '9') { if (bytesConsumed == 0) { value = default(uint); return false; } else { return true; } } uint candidate = value * 10; candidate += (uint)nextByte - '0'; if (candidate >= value) { value = candidate; } bytesConsumed++; } return true; }
public JsonReader(Utf8String str) { _str = str; _index = 0; _insideObject = 0; _insideArray = 0; TokenType = 0; }
public bool Equals(Utf8String other) { if (Length != other.Length) return false; for(int i=0; i<Length; i++) { if (_bytes[i] != other.Bytes[i]) return false; } return true; }
public void NonAllocatingRead() { var jsonText = new Utf8String("{\"First\":\"John\",\"Age\":25}"); JsonDynamicObject json = JsonDynamicObject.Parse(jsonText); Assert.Equal(new Utf8String("John"), json.First()); Assert.Equal(25U, json.Age()); }
public JsonReader(string str) { _str = new Utf8String(str); _index = 0; _insideObject = 0; _insideArray = 0; TokenType = JsonTokenType.Start; }
public JsonReader(string str) { _str = new Utf8String(str); _index = 0; _insideObject = 0; _insideArray = 0; TokenType = 0; _length = _str.Length; }
public unsafe void LengthPointerTest() { byte[] utf8Bytes = Encoding.UTF8.GetBytes("1258"); fixed (byte* bytes = utf8Bytes) { var utf8String = new Utf8String(bytes, utf8Bytes.Length); Assert.Equal(4, utf8Bytes.Length); } }
//[Fact(Skip = "issue #869")] public void Bug869DoesNotRepro() { var bytes = new byte[] { 0xF0, 0xA4, 0xAD, 0xA2 }; var utf8String = new Utf8String(bytes); var str = "𤭢"; var strFromUtf8 = utf8String.ToString(); Assert.Equal(str, strFromUtf8); }
public JsonReader(string str) { _str = new Utf8String(str).Trim(); _index = 0; _insideObject = 0; _insideArray = 0; TokenType = 0; _jsonStartIsObject = (byte)_str[0] == '{'; }
public void EagerWrite() { dynamic json = new JsonDynamicObject(); json.First = "John"; var formatter = new ArrayFormatter(1024, EncodingData.InvariantUtf8); formatter.Append((JsonDynamicObject)json); var formattedText = new Utf8String(formatter.Formatted); Assert.Equal(new Utf8String("{\"First\":\"John\"}"), formattedText); }
public static bool TryParse(Utf8String utf8Text, FormattingData cultureAndEncodingInfo, Format.Parsed numericFormat, out ulong value, out int bytesConsumed) { // Precondition replacement if (utf8Text.Length < 1) { value = 0; bytesConsumed = 0; return false; } value = 0; bytesConsumed = 0; if (cultureAndEncodingInfo.IsInvariantUtf8) { for (int byteIndex = 0; byteIndex < utf8Text.Length; byteIndex++) { byte nextByteVal = (byte)((byte)utf8Text[byteIndex] - '0'); if (nextByteVal > 9) { if (bytesConsumed == 0) { value = default(ulong); return false; } else { return true; } } else if (value > UInt64.MaxValue / 10) // overflow { value = 0; bytesConsumed = 0; return false; } else if (UInt64.MaxValue - value * 10 < (ulong)(nextByteVal)) // overflow { value = 0; bytesConsumed = 0; return false; } ulong candidate = value * 10 + nextByteVal; value = candidate; bytesConsumed++; } return true; } return false; }
public unsafe void ParseUtf8StringToUInt32(string text, uint expectedValue, int expectedConsumed) { var utf8 = new Utf8String(text); uint parsedValue; int bytesConsumed; bool result = PrimitiveParser.TryParseUInt32(utf8, out parsedValue, out bytesConsumed); Assert.True(result); Assert.Equal(expectedValue, parsedValue); Assert.Equal(expectedConsumed, bytesConsumed); }
public void CodePointValidation(string s) { var utf8string = new Utf8String(Encoding.UTF8.GetBytes(s)); IEnumerator<UnicodeCodePoint> codePoints = utf8string.GetEnumerator(); for (int i = 0; i < s.Length; i++) { Assert.True(codePoints.MoveNext()); Assert.Equal((uint)s[i], (uint)codePoints.Current); } Assert.False(codePoints.MoveNext()); }
public void NestedEagerWrite() { var jsonText = new Utf8String("{\"FirstName\":\"John\",\"LastName\":\"Smith\",\"Address\":{\"Street\":\"21 2nd Street\",\"City\":\"New York\",\"State\":\"NY\",\"Zip\":\"10021-3100\"},\"IsAlive\":true,\"Age\":25,\"Spouse\":null}"); JsonDynamicObject json = JsonDynamicObject.Parse(jsonText, 100); var formatter = new ArrayFormatter(1024, EncodingData.InvariantUtf8); formatter.Append(json); var formattedText = new Utf8String(formatter.Formatted); // The follwoing check only works given the current implmentation of Dictionary. // If the implementation changes, the properties might round trip to different places in the JSON text. Assert.Equal(jsonText, formattedText); }
public void ConstructFromString() { foreach (StringWithDescription testData in StringsWithDescription()) { string s = testData.String; int iterations = testData.Iterations; _timer.Restart(); while (iterations-- != 0) { Utf8String utf8s = new Utf8String(s); } PrintTime(testData); } }
/// <summary> /// Serializes this instance of the ClientMessage1 class into a sequence of /// bytes according to the requirements of the SRP specification. /// </summary> /// <returns>A sequence of bytes representing this instance of the /// ClientMessage1 class.</returns> /// <exception cref="OverflowException">Thrown if the cummultative length /// of the serialized data fields exceeds the maximum number of bytes /// allowed as per SRP specification.</exception> /// <remarks>SRP specification imposes a limit of 2,147,483,643 bytes on /// the serialized data.</remarks> public byte[] Serialize() { byte[] username = new Utf8String(Username).Serialize(), authId = new Utf8String(AuthId).Serialize(), sessionId = new Utf8String(SessionId).Serialize(), nonce = new OctetSequence(ClientNonce).Serialize(); int length = username.Length + authId.Length + sessionId.Length + nonce.Length; return new ByteBuilder() .Append(length, true) .Append(username) .Append(authId) .Append(sessionId) .Append(nonce) .ToArray(); }
/// <summary> /// Serializes this instance of the ClientMessage2 class into a sequence of /// bytes according to the requirements of the SRP specification. /// </summary> /// <returns>A sequence of bytes representing this instance of the /// ClientMessage2 class.</returns> /// <exception cref="OverflowException">Thrown if the cummultative length /// of the serialized data fields exceeds the maximum number of bytes /// allowed as per SRP specification.</exception> /// <remarks>SRP specification imposes a limit of 2,147,483,643 bytes on /// the serialized data.</remarks> public byte[] Serialize() { byte[] publicKey = PublicKey.Serialize(), M1 = new OctetSequence(Proof).Serialize(), cIV = new OctetSequence(InitialVector).Serialize(), options = new Utf8String(BuildOptionsString()).Serialize(); int length = publicKey.Length + M1.Length + cIV.Length + options.Length; return new ByteBuilder() .Append(length, true) .Append(publicKey) .Append(M1) .Append(options) .Append(cIV) .ToArray(); }
public void AsciiStringEnumerators(string s) { Utf8String u8s = new Utf8String(Encoding.UTF8.GetBytes(s)); Utf8String.Enumerator e = u8s.GetEnumerator(); Utf8String.CodePointEnumerator cpe = u8s.CodePoints.GetEnumerator(); Assert.Equal(s.Length, u8s.Length); for (int i = 0; i < s.Length; i++) { Assert.True(e.MoveNext()); Assert.True(cpe.MoveNext()); Assert.Equal((byte)s[i], (byte)u8s[i]); Assert.Equal(u8s[i], e.Current); Assert.Equal((byte)s[i], (byte)(uint)cpe.Current); } }
public JsonReader(Utf8String str) { _str = str; _index = 0; TokenType = JsonTokenType.ObjectStart; _insideArray = false; _mapping = new Dictionary<Utf8CodeUnit, JsonTokenType> { {CurlyOpenString, JsonTokenType.ObjectStart}, {CurlyCloseString, JsonTokenType.ObjectEnd}, {SquareOpenString, JsonTokenType.ArrayStart}, {SquareCloseString, JsonTokenType.ArrayEnd}, {QuoteString, JsonTokenType.PropertyName} }; }
public static bool TryParseUInt32(Utf8String text, out uint value, out int bytesConsumed) { // Precondition replacement if (text.Length < 1) { value = 0; bytesConsumed = 0; return false; } value = 0; bytesConsumed = 0; for (int byteIndex = 0; byteIndex < text.Length; byteIndex++) { byte nextByteVal = (byte)((byte)text[byteIndex] - '0'); if (nextByteVal > 9) { if (bytesConsumed == 0) { value = default(uint); return false; } else { return true; } } else if (value > UInt32.MaxValue / 10) // overflow { value = 0; bytesConsumed = 0; return false; } else if (value > 0 && UInt32.MaxValue - value * 10 < nextByteVal) // overflow { value = 0; bytesConsumed = 0; return false; } uint candidate = value * 10 + nextByteVal; value = candidate; bytesConsumed++; } return true; }
public void CodePointEnumeratorsTests(string s) { Utf8String u8s = new Utf8String(s); TestCodePointForwardEnumerator(s, u8s); TestCodePointReverseEnumerator(s, u8s); byte[] bytes = u8s.CopyBytes(); unsafe { fixed (byte* pinnedBytes = bytes) { Utf8String u8sFromBytePointer = new Utf8String(new Span<byte>(pinnedBytes, u8s.Length)); TestCodePointForwardEnumerator(s, u8sFromBytePointer); TestCodePointReverseEnumerator(s, u8sFromBytePointer); } } }
public List<Value> GetValueFromPropertyName(Utf8String str, Object obj) { var values = new List<Value>(); if (obj == null || obj.Members == null) return values; foreach (var member in obj.Members) { if (member == null || member.Pairs == null) return values; foreach (var pair in member.Pairs) { if (pair == null || pair.Value == null) return values; if (pair.Value.Type == Value.ValueType.Object) { values.AddRange(GetValueFromPropertyName(str, pair.Value.ObjectValue)); } if (pair.Value.Type == Value.ValueType.Array) { if (pair.Value.ArrayValue == null || pair.Value.ArrayValue.Elements == null) return values; foreach (var element in pair.Value.ArrayValue.Elements) { if (element == null || element.Values == null) return values; foreach (var value in element.Values) { if (value != null && value.Type == Value.ValueType.Object) { values.AddRange(GetValueFromPropertyName(str, value.ObjectValue)); } } } } if (new Utf8String(pair.Name) == str) { values.Add(pair.Value); } } } return values; }
public void EnumerateCodePointsConstructFromByteArray() { foreach (StringWithDescription testData in StringsWithDescription()) { string s = testData.String; Utf8String utf8s = new Utf8String(s); utf8s = new Utf8String(utf8s.CopyBytes()); int iterations = testData.Iterations; _timer.Restart(); while (iterations-- != 0) { foreach (UnicodeCodePoint codePoint in utf8s.CodePoints) { } } PrintTime(testData); } }
public bool TryGetValue(Utf8String propertyName, out JsonObject value) { var record = Record; if (record.Length == 0) { throw new KeyNotFoundException(); } if (record.Type != JsonValueType.Object) { throw new NullReferenceException(); } for (int i = DbRow.Size; i <= _db.Length; i += DbRow.Size) { record = _db.Slice(i).Read<DbRow>(); if (!record.IsSimpleValue) { i += record.Length * DbRow.Size; continue; } if (new Utf8String(_values.Slice(record.Location, record.Length)) == propertyName) { int newStart = i + DbRow.Size; int newEnd = newStart + DbRow.Size; record = _db.Slice(newStart).Read<DbRow>(); if (!record.IsSimpleValue) { newEnd = newEnd + DbRow.Size * record.Length; } value = new JsonObject(_values, _db.Slice(newStart, newEnd - newStart)); return true; } var valueType = _db.Slice(i + DbRow.Size + 8).Read<JsonValueType>(); if (valueType != JsonValueType.Object && valueType != JsonValueType.Array) { i += DbRow.Size; } } value = default(JsonObject); return false; }
public void ConstructFromString() { TestCase[] testCases = new TestCase[] { new TestCase(GetRandomString(5, 32, 126), "Short ASCII string", 6000000), new TestCase(GetRandomString(5, 32, 0xD7FF), "Short string", 6000000), new TestCase(GetRandomString(50000, 32, 126), "Long ASCII string", 600), new TestCase(GetRandomString(50000, 32, 0xD7FF), "Long string", 600) }; foreach (TestCase testData in testCases) { string s = testData.String; int iterations = testData.Iterations; _timer.Restart(); while (iterations-- != 0) { Utf8String utf8s = new Utf8String(s); } PrintTime(testData); } }
private static bool IsTrue(Utf8String text) { if (text.Length < 4) return false; byte firstChar = text[0]; if (firstChar != 't' && firstChar != 'T') return false; byte secondChar = text[1]; if (secondChar != 'r' && secondChar != 'R') return false; byte thirdChar = text[2]; if (thirdChar != 'u' && thirdChar != 'U') return false; byte fourthChar = text[3]; if (fourthChar != 'e' && fourthChar != 'E') return false; return true; }
public void EnumerateCodePointsConstructFromByteArray() { TestCase[] testCases = new TestCase[] { new TestCase(GetRandomString(5, 32, 126), "Short ASCII string", 25000000), new TestCase(GetRandomString(5, 32, 0xD7FF), "Short string", 25000000), new TestCase(GetRandomString(50000, 32, 126), "Long ASCII string", 2500), new TestCase(GetRandomString(50000, 32, 0xD7FF), "Long string", 2500) }; foreach (TestCase testData in testCases) { string s = testData.String; Utf8String utf8s = new Utf8String(s); utf8s = new Utf8String(utf8s.CopyBytes()); int iterations = testData.Iterations; _timer.Restart(); while (iterations-- != 0) { foreach (UnicodeCodePoint codePoint in utf8s.CodePoints) { } } PrintTime(testData); } }
public static void Run() { var utf8RawData = new byte[] { 0x7B, 0x20, 0x22, 0x6B, 0x65, 0x79, 0x22, 0x3A, 0x20, 0x22, 0x61, 0xE3, 0x81, 0x82, 0xF0, 0x9F, 0x98, 0x80, 0x22, 0x20, 0x7D }; var utf16RawData = new char[] { '{', ' ', '"', 'k', 'e', 'y', '"', ':', ' ', '"', 'a', 'あ', (char)0xD83D, (char)0xDE00, '"', ' ', '}' }; // string 型 { // UTF-8 → UTF-16 の変換でヒープ確保が必要 var s1 = System.Text.Encoding.UTF8.GetString(utf8RawData); // string 型は char[] を受け取る場合でも、内部でコピーを作るのでヒープ確保発生 var s2 = new string(utf16RawData); // string.Substring もコピー発生 var sub = s1.Substring(10, 4); Console.WriteLine(sub); } // Utf8String 型 { // ヒープ確保しない実装 var s = new Utf8String(utf8RawData); // インデックスでの文字取得はできない。s[0] は byte 単位のアクセスになる // コード ポイントの取り出しには CodePoints を使う // foreach もすべて構造体で展開されるのでヒープ確保不要 foreach (var c in s) { Console.WriteLine(c); } // Substring もコピー不要な実装になっている var sub = s.Substring(10, 8); foreach (var c in sub) { Console.WriteLine(c); } } // string 型 { // 内部でコピーしているので… var s1 = new string(utf16RawData); var s2 = new string(utf16RawData); // 元データを書き換えても utf16RawData[0] = '['; utf16RawData[16] = ']'; // 影響は出ない Console.WriteLine(s1); // { "key": "aあ😀" } Console.WriteLine(s2); // { "key": "aあ😀" } } // Utf8String 型 { // データを共有しているので… var s1 = new Utf8String(utf8RawData); var s2 = new Utf8String(utf8RawData); //98, 227, 129, 132, 240, 159, 144, 136 // 元データを書き換えると utf8RawData[10] = 98; utf8RawData[11] = 227; utf8RawData[12] = 129; utf8RawData[13] = 132; utf8RawData[14] = 240; utf8RawData[15] = 159; utf8RawData[16] = 144; utf8RawData[17] = 136; // 影響がある Console.WriteLine(s1); // { "key": "bい🐈" } Console.WriteLine(s2); // { "key": "bい🐈" } Console.WriteLine(s1.Substring(10, 8)); // bい🐈 } }
public JsonProperty(JsonDynamicObject obj, Utf8String name) { _object = obj; //TODO: no spans on the heap //_name = name; }
public JsonValue(Utf8String value, JsonValueType type = JsonValueType.String) { _value = value; _object = null; _type = type; }
public static JsonDynamicObject Parse(ReadOnlySpan <byte> utf8, int expectedNumberOfProperties = -1) { Stack <JsonDynamicObject> stack = new Stack <JsonDynamicObject>(); if (expectedNumberOfProperties == -1) { expectedNumberOfProperties = utf8.Length >> 3; } var properties = new Dictionary <JsonProperty, JsonValue>(expectedNumberOfProperties); stack.Push(new JsonDynamicObject(properties)); var reader = new Utf8JsonReader(utf8); while (reader.Read()) { switch (reader.TokenType) { case JsonTokenType.PropertyName: var name = new Utf8String(reader.Value); reader.Read(); // Move to the value token var type = reader.ValueType; var current = stack.Peek(); var property = new JsonProperty(current, name); switch (type) { case JsonValueType.String: current._properties[property] = new JsonValue(new Utf8String(reader.Value)); break; case JsonValueType.Object: // TODO: could this be lazy? Could this reuse the root JsonObject (which would store non-allocating JsonDom)? var newObj = new JsonDynamicObject(properties); current._properties[property] = new JsonValue(newObj); stack.Push(newObj); break; case JsonValueType.True: current._properties[property] = new JsonValue(type); break; case JsonValueType.False: current._properties[property] = new JsonValue(type); break; case JsonValueType.Null: current._properties[property] = new JsonValue(type); break; case JsonValueType.Number: current._properties[property] = new JsonValue(new Utf8String(reader.Value), type); break; case JsonValueType.Array: throw new NotImplementedException("array support not implemented yet."); default: throw new NotSupportedException(); } break; case JsonTokenType.StartObject: break; case JsonTokenType.EndObject: if (stack.Count != 1) { stack.Pop(); } break; case JsonTokenType.StartArray: throw new NotImplementedException("array support not implemented yet."); case JsonTokenType.EndArray: case JsonTokenType.Value: break; default: throw new NotSupportedException(); } } return(stack.Peek()); }
public JsonValue(JsonValueType type) { _type = type; _value = default(Utf8String); _object = null; }
public JsonProperty(JsonDynamicObject obj, Utf8String name) { _object = obj; _name = name; }
public BlobNode ReadOnlyDataBlob(Utf8String name, byte[] blobData, int alignment) { return(_readOnlyDataBlobs.GetOrAdd(new Tuple <Utf8String, byte[], int>(name, blobData, alignment))); }
public TypeConstraintsTests() { _anyUtf8String = "anyString" u8; }
public static void WriteLine(Utf8String str) { Write(str); WriteLine(); }
public CreateGuildParams(Utf8String name, Utf8String region) { Name = name; Region = region; }
private static TextEncoder CreateEncoder(string localeId, Stream resourceStream) { const int maxIdLength = 15; const int recordSize = 20; var b1 = resourceStream.ReadByte(); var b2 = resourceStream.ReadByte(); var numberOfIDs = b1 * 256 + b2; var indexSize = numberOfIDs * 20; var index = new byte[indexSize]; resourceStream.Read(index, 0, indexSize); byte[] idBytes = new byte[maxIdLength]; int idByteCount; if (!TextEncoder.Utf8.TryEncode(localeId, new Span <byte>(idBytes), out idByteCount)) { throw new Exception("bad locale id"); } var id = new Utf8String(idBytes.AsSpan().Slice(0, idByteCount)); int recordStart = -1; for (int record = 0; record < numberOfIDs; record++) { var indexId = index.AsSpan().Slice(record * recordSize, idByteCount); if (id.Equals(new Utf8String(indexId))) // found record { var indexData = index.AsSpan().Slice(record * recordSize + maxIdLength); recordStart = 0; recordStart += indexData[3] * 256 * 256 * 256; recordStart += indexData[2] * 256 * 256; recordStart += indexData[1] * 256; recordStart += indexData[0]; break; } } if (recordStart == -1) { throw new Exception("local not found"); } resourceStream.Position = recordStart; const int bufferSize = 512; var data = new byte[bufferSize]; var bytesRead = resourceStream.Read(data, 0, bufferSize); // TODO: maybe we should store length in the index var numberOfStrings = ReadUInt16At(data, 0); Debug.Assert(numberOfStrings == 17); var utf16digitsAndSymbols = new byte[numberOfStrings][]; for (int stringIndex = 0; stringIndex < numberOfStrings; stringIndex++) { var stringStart = ReadUInt16At(data, stringIndex * 2 + 1); var stringLength = ReadUInt16At(data, stringIndex * 2 + 2); utf16digitsAndSymbols[stringIndex] = new byte[stringLength]; Array.Copy(data, stringStart, utf16digitsAndSymbols[stringIndex], 0, stringLength); } return(TextEncoder.CreateUtf16Encoder(utf16digitsAndSymbols)); }
public JsonValue(JsonDynamicObject obj) { _value = default(Utf8String); _object = obj; _type = JsonValueType.Object; }