public void CopyTo_Roundtrip_MatchesOriginalInput(char[] values) { int sizeOfArray = sizeof(char) * values.Length; IntPtr ptr = Marshal.AllocCoTaskMem(sizeOfArray); try { Marshal.Copy(values, 0, ptr, values.Length); char[] array1 = new char[values.Length]; Marshal.Copy(ptr, array1, 0, values.Length); Assert.Equal <char>(values, array1); Marshal.Copy(values, 2, ptr, values.Length - 4); char[] array2 = new char[values.Length]; Marshal.Copy(ptr, array2, 2, values.Length - 4); Assert.Equal <char>(values.AsSpan(2, values.Length - 4).ToArray(), array2.AsSpan(2, values.Length - 4).ToArray()); } finally { Marshal.FreeCoTaskMem(ptr); } }
public static unsafe string ToString(ReadOnlySpan <byte> bytes, Casing casing = Casing.Upper) { Span <char> result = stackalloc char[0]; if (bytes.Length > 16) { var array = new char[bytes.Length * 2]; result = array.AsSpan(); } else { result = stackalloc char[bytes.Length * 2]; } int pos = 0; foreach (byte b in bytes) { ToCharsBuffer(b, result, pos, casing); pos += 2; } return(result.ToString()); }
public string AddStrings(string num1, string num2) { var length = num1.Length >= num2.Length ? num1.Length + 1 : num2.Length + 1; var max = length; char[] list = new char[length]; int num1_index = num1.Length - 1; int num2_index = num2.Length - 1; int add = 0; while (num1_index >= 0 || num2_index >= 0) { int tem1 = num1_index >= 0 ? num1[num1_index] - '0' : 0; int tem2 = num2_index >= 0 ? num2[num2_index] - '0' : 0; if (tem1 + tem2 + add >= 10) { list[length - 1] = (char)('0' + tem1 + tem2 + add - 10); add = 1; } else { list[length - 1] = (char)('0' + tem1 + tem2 + add); add = 0; } num1_index--; num2_index--; length--; } if (add != 0) { list[--length] = (char)('0' + add); } var tem = list.AsSpan(length, max - length); return(new string(tem)); }
public async Task <string> SolvePart2Async(Stream input) { var line = await input.ReadLineAsync(); var layers = line.AsEnumerable() .Batch(_param.Width * _param.Height) .Select(l => l.ToArray()) .ToArray(); var image = new char[_param.Width * _param.Height]; for (int i = 0; i < _param.Width * _param.Height; i++) { foreach (var layer in layers) { if (layer[i] == '2') { continue; } image[i] = layer[i] == '0' ? ' ' : 'X'; break; } } string s = ""; for (int i = 0; i < _param.Width * _param.Height; i += _param.Width) { s += new string(image.AsSpan(i, _param.Width)) + Environment.NewLine; } _logger.LogInformation(s); // todo ocr return(null); }
public static void TryCopyT61String( PublicEncodingRules ruleSet, string inputHex, string expectedValue) { byte[] inputData = inputHex.HexToByteArray(); char[] output = new char[expectedValue.Length]; AsnReader reader = new AsnReader(inputData, (AsnEncodingRules)ruleSet); bool copied; int charsWritten; if (output.Length > 0) { output[0] = 'a'; copied = reader.TryCopyCharacterString( UniversalTagNumber.T61String, output.AsSpan(0, expectedValue.Length - 1), out charsWritten); Assert.False(copied, "reader.TryCopyT61String - too short"); Assert.Equal(0, charsWritten); Assert.Equal('a', output[0]); } copied = reader.TryCopyCharacterString( UniversalTagNumber.T61String, output, out charsWritten); Assert.True(copied, "reader.TryCopyT61String"); string actualValue = new string(output, 0, charsWritten); Assert.Equal(expectedValue, actualValue); }
public static string ToString(ReadOnlySpan <byte> bytes, Casing casing = Casing.Upper) { // ReSharper disable once RedundantAssignment Span <char> result = stackalloc char[0]; if (bytes.Length > 16) { var array = new char[bytes.Length * 2]; result = array.AsSpan(); } else { result = stackalloc char[bytes.Length * 2]; } var pos = 0; foreach (var b in bytes) { ToCharsBuffer(b, result, pos, casing); pos += 2; } return(result.ToString()); }
public void StringDestinationShorterThanSourceDecodeRequestLineThrows() { var source = new char[2]; Assert.Throws <ArgumentException>(() => UrlDecoder.DecodeRequestLine(source.AsSpan(), source.AsSpan(0, 1))); }
public static void GetChars_Span_MatchesEncodingGetChars() { const string TextString = "hello world"; Encoding e = Encoding.UTF8; byte[] textBytes = e.GetBytes(TextString); char[] chars = new char[TextString.Length]; Assert.Equal(chars.Length, e.GetDecoder().GetChars(textBytes.AsReadOnlySpan(), chars.AsSpan(), flush: true)); Assert.Equal(TextString, new string(chars)); }
public void RunTest() { Console.WriteLine("-- BEGIN TEST --"); int encodingCharCount = Encoding.UTF8.GetCharCount(_data.Span); Console.WriteLine($"Encoding.UTF8.GetCharCount returned {encodingCharCount}."); { ReadOnlySpan <byte> input = _data.Span; int runeIterCharCount = 0; while (!input.IsEmpty) { Rune.DecodeFromUtf8(input, out Rune thisRune, out int bytesConsumed); runeIterCharCount += thisRune.Utf16SequenceLength; // ok if U+FFFD replacement input = input.Slice(bytesConsumed); } Console.WriteLine($"Rune iteration said there were {runeIterCharCount} UTF-16 chars."); if (encodingCharCount != runeIterCharCount) { throw new Exception("Rune iteration char count mismatch!!"); } } char[] chars = new char[encodingCharCount]; int charsWritten = Encoding.UTF8.GetChars(_data.Span, chars); Console.WriteLine($"Encoding.UTF8.GetChars returned {charsWritten} chars written."); if (encodingCharCount != charsWritten) { throw new Exception("GetChars return value mismatch!!"); } { ReadOnlySpan <byte> inputUtf8 = _data.Span; ReadOnlySpan <char> inputUtf16 = chars; while (!inputUtf8.IsEmpty && !inputUtf16.IsEmpty) { Rune.DecodeFromUtf8(inputUtf8, out Rune inputUtf8Rune, out int bytesConsumed); Rune.DecodeFromUtf16(inputUtf16, out Rune inputUtf16Rune, out int charsConsumed); if (inputUtf8Rune != inputUtf16Rune) { throw new Exception("Enumerating runes mismatch!!"); } inputUtf8 = inputUtf8.Slice(bytesConsumed); inputUtf16 = inputUtf16.Slice(charsConsumed); } if (inputUtf8.Length != inputUtf16.Length) { throw new Exception("Rune enumeration returned mismatched lengths!"); } } Console.WriteLine("Running ToUtf16 with replace=true and exact size buffer."); { char[] chars2 = new char[chars.Length]; OperationStatus opStatus = Utf8.ToUtf16(_data.Span, chars2, out int bytesReadJustNow, out int charsWrittenJustNow, replaceInvalidSequences: true, isFinalBlock: true); if (opStatus != OperationStatus.Done) { throw new Exception("Utf8.ToUtf16 returned wrong OperationStatus!!"); } if (bytesReadJustNow != _data.Memory.Length) { throw new Exception("Utf8.ToUtf16 didn't read entire input!!"); } if (charsWrittenJustNow != chars2.Length) { throw new Exception("Utf8.ToUtf16 didn't fill entire response buffer!!"); } if (!chars.SequenceEqual(chars2)) { throw new Exception("Utf8.ToUtf16 returned different data than Encoding.UTF8.GetChars!!"); } } Console.WriteLine("Running ToUtf16 with replace=true and extra large buffer."); { char[] chars2 = new char[chars.Length + 1024]; OperationStatus opStatus = Utf8.ToUtf16(_data.Span, chars2, out int bytesReadJustNow, out int charsWrittenJustNow, replaceInvalidSequences: true, isFinalBlock: true); if (opStatus != OperationStatus.Done) { throw new Exception("Utf8.ToUtf16 returned wrong OperationStatus!!"); } if (bytesReadJustNow != _data.Memory.Length) { throw new Exception("Utf8.ToUtf16 didn't read entire input!!"); } if (charsWrittenJustNow != chars.Length) { throw new Exception("Utf8.ToUtf16 didn't fill entire response buffer!!"); } if (!chars2.AsSpan(0, charsWrittenJustNow).SequenceEqual(chars)) { throw new Exception("Utf8.ToUtf16 returned different data than Encoding.UTF8.GetChars!!"); } } Console.WriteLine("Running ToUtf16 with replace=false and extra large buffer."); { ReadOnlySpan <byte> input = _data.Span; Span <char> output = new char[chars.Length + 1024]; while (!input.IsEmpty) { OperationStatus opStatus = Utf8.ToUtf16(input, output, out int bytesReadJustNow, out int charsWrittenJustNow, replaceInvalidSequences: false, isFinalBlock: true); ReadOnlySpan <byte> dataReadJustNow = input.Slice(0, bytesReadJustNow); ReadOnlySpan <char> dataWrittenJustNow = output.Slice(0, charsWrittenJustNow); while (!dataReadJustNow.IsEmpty && !dataWrittenJustNow.IsEmpty) { OperationStatus utf8Status = Rune.DecodeFromUtf8(dataReadJustNow, out Rune inputUtf8Rune, out int bytesConsumed); OperationStatus utf16Status = Rune.DecodeFromUtf16(dataWrittenJustNow, out Rune inputUtf16Rune, out int charsConsumed); if (utf8Status != OperationStatus.Done) { throw new Exception("DecodeFromUtf8 returned unexpected value!!"); } if (utf16Status != OperationStatus.Done) { throw new Exception("DecodeFromUtf16 returned unexpected value!!"); } if (inputUtf8Rune != inputUtf16Rune) { throw new Exception("Enumerating runes mismatch!!"); } dataReadJustNow = dataReadJustNow.Slice(bytesConsumed); dataWrittenJustNow = dataWrittenJustNow.Slice(charsConsumed); } if (dataReadJustNow.Length != dataWrittenJustNow.Length) { throw new Exception("Unexpected length mismatch!!"); } input = input.Slice(bytesReadJustNow); if (opStatus != OperationStatus.Done) { // Skip over invalid data Rune.DecodeFromUtf8(input, out _, out int bytesToSkip); input = input.Slice(bytesToSkip); } } } Console.WriteLine("Trying custom decoder replacement."); { // use a custom replacement string Encoding encoding = Encoding.GetEncoding("utf-8", EncoderFallback.ExceptionFallback, new DecoderReplacementFallback("{BAD}")); string decoded = encoding.GetString(_data.Span); ReadOnlySpan <byte> input = _data.Span; char[] decoded2 = new char[decoded.Length]; StringBuilder builder = new StringBuilder(); while (!input.IsEmpty) { OperationStatus opStatus = Utf8.ToUtf16(input, decoded2, out int bytesReadJustNow, out int charsWrittenJustNow, replaceInvalidSequences: false, isFinalBlock: true); builder.Append(decoded2, 0, charsWrittenJustNow); input = input.Slice(bytesReadJustNow); if (opStatus != OperationStatus.Done) { // Skip over invalid data Rune.DecodeFromUtf8(input, out _, out int bytesToSkip); input = input.Slice(bytesToSkip); builder.Append("{BAD}"); } } if (new string(decoded) != builder.ToString()) { throw new Exception("Custom decoder replacement failed!!"); } } Console.WriteLine("-- END TEST - SUCCESS --"); }
public static void ExpectedTag_IgnoresConstructed( AsnEncodingRules ruleSet, string inputHex, TagClass tagClass, int tagValue) { byte[] inputData = inputHex.HexToByteArray(); Asn1Tag correctCons = new Asn1Tag(tagClass, tagValue, true); Asn1Tag correctPrim = new Asn1Tag(tagClass, tagValue, false); AsnReader reader = new AsnReader(inputData, ruleSet); Assert.True( reader.TryGetIA5StringBytes( correctCons, out ReadOnlyMemory <byte> val1)); Assert.False(reader.HasData); reader = new AsnReader(inputData, ruleSet); Assert.True( reader.TryGetIA5StringBytes( correctPrim, out ReadOnlyMemory <byte> val2)); Assert.False(reader.HasData); Assert.Equal(val1.ByteArrayToHex(), val2.ByteArrayToHex()); #if NETCOREAPP string expected = Encoding.ASCII.GetString(val1.Span); #else string expected = Encoding.ASCII.GetString(val1.ToArray()); #endif reader = new AsnReader(inputData, ruleSet); Assert.Equal(expected, reader.ReadCharacterString(UniversalTagNumber.IA5String, correctPrim)); reader = new AsnReader(inputData, ruleSet); Assert.Equal(expected, reader.ReadCharacterString(UniversalTagNumber.IA5String, correctCons)); char[] output = new char[28]; reader = new AsnReader(inputData, ruleSet); Assert.True( reader.TryReadCharacterString( output.AsSpan(1), UniversalTagNumber.IA5String, out int charsWritten, correctPrim)); Assert.Equal(expected, output.AsSpan(1, charsWritten).ToString()); reader = new AsnReader(inputData, ruleSet); Assert.True( reader.TryReadCharacterString( output.AsSpan(2), UniversalTagNumber.IA5String, out charsWritten, correctCons)); Assert.Equal(expected, output.AsSpan(2, charsWritten).ToString()); }
protected string GenerateWord(int length, bool isTitleCase = false) { char[] chars = new char[length]; GenerateWord(chars.AsSpan(), isTitleCase); return(new string(chars)); }
internal static async Task RunAsync(string spacyLookupsData, string languagesDirectory) { var rootLangFolder = Path.Combine(spacyLookupsData, @"spacy_lookups_data\data\"); if (!Directory.Exists(rootLangFolder)) { throw new Exception("data directory not found"); } //TODO Handle rules data var tasks = new List <Task>(); foreach (var(file, language) in Directory.GetFiles(rootLangFolder, "*_lemma_lookup*.json").Select(f => (file:f, language: Languages.CodeToEnum(Path.GetFileName(f).Substring(0, 2))))) { tasks.Add(Task.Run(async() => { Console.WriteLine($"\n\n\nBegin processing {file}\n\n"); var name = Path.GetFileNameWithoutExtension(file); var resDir = Path.Combine(languagesDirectory, language.ToString(), "Resources"); Directory.CreateDirectory(resDir); var outputFile = Path.Combine(resDir, name + ".bin"); if (File.Exists(outputFile)) { Console.WriteLine("Skipping..."); return; } var map = JsonConvert.DeserializeObject <Dictionary <string, string> >(FixWordsAsArrays(await File.ReadAllTextAsync(file))); var buffer = new char[map.Values.Sum(k => k.Length)]; var bufferLength = 0; var entries = new Dictionary <ulong, Lookups.Entry>(); int count = 0; foreach (var(k, v) in map.OrderByDescending(kv => kv.Value.Length).ThenBy(kv => kv.Value)) { var keyHash = Lookups.Hash(k); var invKeyHash = Lookups.InvariantHash(k); var index = buffer.AsSpan(0, bufferLength).IndexOf(v); if (index < 0) { v.AsSpan().CopyTo(buffer.AsSpan(bufferLength, v.Length)); entries.TryAdd(keyHash, new Lookups.Entry((byte)v.Length, (uint)bufferLength)); if (invKeyHash != keyHash) { entries.TryAdd(invKeyHash, new Lookups.Entry((byte)v.Length, (uint)bufferLength)); } bufferLength += v.Length; Console.Write("+"); } else { entries.TryAdd(keyHash, new Lookups.Entry((byte)v.Length, (uint)index)); if (invKeyHash != keyHash) { entries.TryAdd(invKeyHash, new Lookups.Entry((byte)v.Length, (uint)index)); } //Console.Write("."); } count++; if (count % 1000 == 0) { Console.WriteLine($"\nAt {count} of {map.Count}"); } } Array.Resize(ref buffer, bufferLength); var lookup = new Lookups(name, language, new string(buffer), entries); using (var f = File.OpenWrite(outputFile)) { await lookup.SerializeAsync(f); } Console.WriteLine($"\n\n\nWrote {outputFile}\n\n"); })); } foreach (var(file, language) in Directory.GetFiles(rootLangFolder, "*_lexeme_cluster*.json").Select(f => (file: f, language: Languages.CodeToEnum(Path.GetFileName(f).Substring(0, 2))))) { tasks.Add(Task.Run(async() => { var probFile = file.Replace("_lexeme_cluster", "_lexeme_prob"); if (!File.Exists(probFile)) { return; } Console.WriteLine($"\n\n\nBegin processing {file} + {probFile}\n\n"); var name = Path.GetFileNameWithoutExtension(file); var resDir = Path.Combine(languagesDirectory, language.ToString(), "Resources"); Directory.CreateDirectory(resDir); var outputFile = Path.Combine(resDir, name + "_prob.bin"); if (File.Exists(outputFile)) { Console.WriteLine("Skipping..."); return; } var cluster = JsonConvert.DeserializeObject <Dictionary <string, uint> >(FixWordsAsArrays(await File.ReadAllTextAsync(file))); var prob = JsonConvert.DeserializeObject <Dictionary <string, float> >(FixWordsAsArrays(await File.ReadAllTextAsync(probFile))); var entries = new Dictionary <ulong, Lookups.Entry>(); int count = 0; foreach (var(k, v) in cluster) { var keyHash = Lookups.Hash(k); var invKeyHash = Lookups.InvariantHash(k); var probVal = prob.TryGetValue(k, out var p) ? p : -25f; entries.TryAdd(keyHash, new Lookups.Entry(probVal, v)); if (invKeyHash != keyHash) { entries.TryAdd(invKeyHash, new Lookups.Entry(probVal, v)); } count++; if (count % 1000 == 0) { Console.WriteLine($"\nAt {count} of {cluster.Count}"); } } var lookup = new Lookups(name, language, null, entries); using (var f = File.OpenWrite(outputFile)) { await lookup.SerializeAsync(f); } Console.WriteLine($"\n\n\nWrote {outputFile}\n\n"); })); } await Task.WhenAll(tasks); }
public void CopyRoundTrip_MatchesOriginalArray(char[] TestArray) { int sizeOfArray = sizeof(char) * TestArray.Length; IntPtr ptr = Marshal.AllocCoTaskMem(sizeOfArray); try { Marshal.Copy(TestArray, 0, ptr, TestArray.Length); char[] array1 = new char[TestArray.Length]; Marshal.Copy(ptr, array1, 0, TestArray.Length); Assert.Equal <char>(TestArray, array1); Marshal.Copy(TestArray, 2, ptr, TestArray.Length - 4); char[] array2 = new char[TestArray.Length]; Marshal.Copy(ptr, array2, 2, TestArray.Length - 4); Assert.Equal <char>(TestArray.AsSpan(2, TestArray.Length - 4).ToArray(), array2.AsSpan(2, TestArray.Length - 4).ToArray()); } finally { Marshal.FreeCoTaskMem(ptr); } }
public static void TestTextEqualsLargeMatch() { var jsonChars = new char[320]; // Some value larger than 256 (stack threshold) jsonChars.AsSpan().Fill('a'); byte[] lookup = Encoding.UTF8.GetBytes(jsonChars); ReadOnlySpan <char> escapedA = new char[6] { '\\', 'u', '0', '0', '6', '1' }; ReadOnlySpan <byte> lookupSpan = lookup.AsSpan(0, lookup.Length - escapedA.Length + 1); // remove extra characters that were replaced by escaped bytes Span <char> lookupChars = new char[jsonChars.Length]; jsonChars.CopyTo(lookupChars); lookupChars = lookupChars.Slice(0, lookupChars.Length - escapedA.Length + 1); // Replacing 'a' with '\u0061', so a net change of 5. // escapedA.Length - 1 = 6 - 1 = 5 for (int i = 0; i < jsonChars.Length - escapedA.Length + 1; i++) { jsonChars.AsSpan().Fill('a'); escapedA.CopyTo(jsonChars.AsSpan(i)); string jsonString = "\"" + new string(jsonChars) + "\""; byte[] utf8Data = Encoding.UTF8.GetBytes(jsonString); bool found = false; var json = new Utf8JsonReader(utf8Data, isFinalBlock: true, state: default); while (json.Read()) { if (json.TokenType == JsonTokenType.String) { if (json.ValueTextEquals(lookupSpan) && json.ValueTextEquals(lookupChars) && json.ValueTextEquals(new string(lookupChars.ToArray()))) { found = true; break; } } } Assert.True(found, $"Json String: {jsonString}"); ReadOnlySequence <byte> sequence = JsonTestHelper.GetSequence(utf8Data, 1); found = false; json = new Utf8JsonReader(sequence, isFinalBlock: true, state: default); while (json.Read()) { if (json.TokenType == JsonTokenType.String) { if (json.ValueTextEquals(lookupSpan) && json.ValueTextEquals(lookupChars) && json.ValueTextEquals(new string(lookupChars.ToArray()))) { found = true; break; } } } Assert.True(found, $"Json String: {jsonString} | Look up: {Encoding.UTF8.GetString(lookupSpan.ToArray())}"); } }
public static string escape(string str = "undefined") { if (str == null) { return("null"); } int strlen = str.Length; char[] buffer = new char[strlen]; int bufPos = 0, bufLen = 0; for (int i = 0; i < str.Length; i++) { char ch = str[i]; bool noEncode = ((uint)(ch - '0') <= 9) || ((uint)(ch - 'A') <= 25) || ((uint)(ch - 'a') <= 25) || (ch <= 0x7F && ESCAPE_NO_ENCODE.Contains(ch)); if (noEncode) { // No percent encoding if (bufPos == bufLen) { DataStructureUtil.expandArray(ref buffer); bufLen = buffer.Length; } buffer[bufPos++] = ch; continue; } if (ch > 0xFF) { if (bufLen - bufPos < 6) { DataStructureUtil.expandArray(ref buffer, 6); bufLen = buffer.Length; } var bufferSpan = buffer.AsSpan(6); bufferSpan[0] = '%'; bufferSpan[1] = 'u'; URIUtil.byteToHex((byte)(ch >> 8), bufferSpan.Slice(2)); URIUtil.byteToHex((byte)ch, bufferSpan.Slice(4)); bufPos += 6; } else { if (bufLen - bufPos < 3) { DataStructureUtil.expandArray(ref buffer, 3); bufLen = buffer.Length; } buffer[bufPos] = '%'; URIUtil.byteToHex((byte)ch, buffer.AsSpan(bufPos + 1)); bufPos += 3; } } return(new string(buffer, 0, bufPos)); }
/// <summary> /// Construct a geoid. /// </summary> /// <param name="name">the name of the geoid.</param> /// <param name="path">directory for data file.</param> /// <param name="cubic">interpolation method; <see langword="false"/> means bilinear, <see langword="true"/> (the default) means cubic.</param> /// <param name="threadsafe">if <see langword="true"/>, construct a thread safe object. The default is <see langword="false"/></param> /// <remarks> /// The data file is formed by appending ".pgm" to the name. /// If <paramref name="path"/> is specified (and is non-empty), then the file is loaded from directory, path. /// Otherwise the path is given by <see cref="DefaultGeoidPath"/>. /// If the <paramref name="threadsafe"/> parameter is <see langword="true"/>, the data set is read into memory, the data file is closed, /// and single-cell caching is turned off; this results in a <see cref="Geoid"/> object which is thread safe. /// </remarks> public Geoid(string name, string path = "", bool cubic = true, bool threadsafe = false) { _name = name; _dir = path; _cubic = cubic; //_a = Constants.WGS84_a; //_e2 = (2 - Constants.WGS84_f) * Constants.WGS84_f; //_degree = Degree; _eps = Sqrt(DBL_EPSILON); _threadsafe = false; Debug.Assert(pixel_size_ == Macros.GEOGRAPHICLIB_GEOID_PGM_PIXEL_WIDTH, "pixel_t has the wrong size"); if (string.IsNullOrWhiteSpace(_dir)) { _dir = DefaultGeoidPath; } _filename = Path.Combine(_dir, Path.ChangeExtension(_name, pixel_size_ != 4 ? "pgm" : "pgm4")); _file = File.OpenRead(_filename); using (var sr = new StreamReader(_file, Encoding.UTF8, true, bufferSize: 1, leaveOpen: true)) { var s = sr.ReadLine(); if (s != "P5") { throw new GeographicException("File not in PGM format: " + _filename); } _offset = double.MaxValue; _scale = 0; _maxerror = _rmserror = -1; _description = "NONE"; _datetime = null; while ((s = sr.ReadLine()) != null) { if (s.StartsWith("#")) { var match = Regex.Match(s, @"^#\s+([A-Za-z]+)\s+(.+)$"); if (!match.Success) { continue; } var key = match.Groups[1].Value; if (key == "Description") { _description = match.Groups[2].Value.Trim(); } else if (key == "DateTime") { _datetime = System.DateTime.Parse(match.Groups[2].Value.Trim()); } else if (key == "Offset") { if (!double.TryParse(match.Groups[2].Value.Trim(), out _offset)) { throw new GeographicException("Error reading offset: " + _filename); } } else if (key == "Scale") { if (!double.TryParse(match.Groups[2].Value.Trim(), out _scale)) { throw new GeographicException("Error reading scale: " + _filename); } } else if (key == (_cubic ? "MaxCubicError" : "MaxBilinearError")) { // It's not an error if the error can't be read double.TryParse(match.Groups[2].Value.Trim(), out _maxerror); } else if (key == (_cubic ? "RMSCubicError" : "RMSBilinearError")) { // It's not an error if the error can't be read double.TryParse(match.Groups[2].Value.Trim(), out _rmserror); } } else { var items = s.Split(new char[] { ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries); if (items.Length != 2 || !int.TryParse(items[0], out _width) || !int.TryParse(items[1], out _height)) { throw new GeographicException("Error reading raster size: " + _filename); } break; } } if (!uint.TryParse(s = sr.ReadLine(), out var maxval)) { throw new GeographicException("Error reading maxval: " + _filename); } if (maxval != pixel_max_) { throw new GeographicException("Incorrect value of maxval: " + _filename); } // HACK: Get start position of binary data. sr.BaseStream.Seek(0, SeekOrigin.Begin); sr.DiscardBufferedData(); var buff = new char[1024]; var sp = buff.AsSpan(); sr.ReadBlock(buff, 0, buff.Length); var end = sp.IndexOf((s + '\n').AsSpan()) + s.Length + 1; // Add 1 for whitespace after maxval _datastart = Encoding.UTF8.GetByteCount(sp.Slice(0, end).ToArray()); // +1 ? _swidth = _width; } if (_offset == double.MaxValue) { throw new GeographicException("Offset not set: " + _filename); } if (_scale == 0) { throw new GeographicException("Scale not set " + _filename); } if (_scale < 0) { throw new GeographicException("Scale must be positive " + _filename); } if (_height < 2 || _width < 2) { // Coarsest grid spacing is 180deg. throw new GeographicException("Raster size too small " + _filename); } if ((_width & 1) != 0) { // This is so that longitude grids can be extended thru the poles. throw new GeographicException("Raster width is odd " + _filename); } if ((_height & 1) == 0) { // This is so that latitude grid includes the equator. throw new GeographicException("Raster height is even " + _filename); } _file.Seek(0, SeekOrigin.End); if (_datastart + pixel_size_ * _swidth * _height != _file.Position) { // Possibly this test should be "<" because the file contains, e.g., a // second image. However, for now we are more strict. throw new GeographicException("File has the wrong length " + _filename); } _rlonres = _width / 360.0; _rlatres = (_height - 1) / 180.0; _cache = false; _ix = _width; _iy = _height; // Ensure that file errors throw exceptions if (threadsafe) { CacheAll(); _file.Close(); _threadsafe = true; } }
public void TryFormat_InvalidFormat_ThrowsFormatException(string invalidFormat) { char[] dst = new char[1]; Assert.Throws <FormatException>(() => new TimeSpan().TryFormat(dst.AsSpan(), out int charsWritten, invalidFormat, null)); }
public static void TestTextEqualsLargeMismatch() { var jsonChars = new char[320]; // Some value larger than 256 (stack threshold) jsonChars.AsSpan().Fill('a'); ReadOnlySpan <char> escapedA = new char[6] { '\\', 'u', '0', '0', '6', '1' }; byte[] originalLookup = Encoding.UTF8.GetBytes(jsonChars); char[] originalLookupChars = new char[jsonChars.Length]; Array.Copy(jsonChars, originalLookupChars, jsonChars.Length); for (int i = 1; i < jsonChars.Length - 6; i++) { jsonChars.AsSpan().Fill('a'); escapedA.CopyTo(jsonChars.AsSpan(i)); string jsonString = "\"" + new string(jsonChars) + "\""; byte[] utf8Data = Encoding.UTF8.GetBytes(jsonString); for (int j = 0; j < 3; j++) { Span <byte> lookup = new byte[originalLookup.Length]; originalLookup.CopyTo(lookup); lookup = lookup.Slice(0, lookup.Length - escapedA.Length + 1); // remove extra characters that were replaced by escaped bytes Span <char> lookupChars = new char[originalLookupChars.Length]; originalLookupChars.CopyTo(lookupChars); lookupChars = lookupChars.Slice(0, lookupChars.Length - escapedA.Length + 1); // remove extra characters that were replaced by escaped bytes switch (j) { case 0: lookup[i] = (byte)'b'; lookupChars[i] = 'b'; break; case 1: lookup[i + 1] = (byte)'b'; lookupChars[i + 1] = 'b'; break; case 2: lookup[i - 1] = (byte)'b'; lookupChars[i - 1] = 'b'; break; } bool found = false; var json = new Utf8JsonReader(utf8Data, isFinalBlock: true, state: default); while (json.Read()) { if (json.TokenType == JsonTokenType.String) { if (json.ValueTextEquals(lookup) || json.ValueTextEquals(lookupChars) || json.ValueTextEquals(new string(lookupChars.ToArray()))) { found = true; break; } } } Assert.False(found, $"Json String: {jsonString}"); ReadOnlySequence <byte> sequence = JsonTestHelper.GetSequence(utf8Data, 1); found = false; json = new Utf8JsonReader(sequence, isFinalBlock: true, state: default); while (json.Read()) { if (json.TokenType == JsonTokenType.String) { if (json.ValueTextEquals(lookup) || json.ValueTextEquals(lookupChars) || json.ValueTextEquals(new string(lookupChars.ToArray()))) { found = true; break; } } } Assert.False(found); } } }
public override void Solve(IOManager io) { var n = io.ReadInt(); var queries = io.ReadInt(); var followees = Enumerable.Repeat(0, n).Select(_ => new SortedSet <int>()).ToArray(); for (int q = 0; q < queries; q++) { var type = io.ReadInt(); var user = io.ReadInt() - 1; if (type == 1) { var another = io.ReadInt() - 1; followees[user].Add(another); } else if (type == 2) { for (int i = 0; i < followees.Length; i++) { if (i == user) { continue; } if (followees[i].Contains(user)) { followees[user].Add(i); } } } else { var toAdd = new SortedSet <int>(); foreach (var another in followees[user]) { foreach (var fol in followees[another]) { if (fol != user) { toAdd.Add(fol); } } } foreach (var another in toAdd) { followees[user].Add(another); } } } foreach (var fol in followees) { var result = new char[n]; result.AsSpan().Fill('N'); foreach (var f in fol) { result[f] = 'Y'; } io.WriteLine(string.Concat(result)); } }
protected void _RunEncodeUtf16_Battery(string[] inputs, string[] expectedOutputs) { string accumInput = _disallowedChar.ToString(); string accumExpectedOutput = GetExpectedEscapedRepresentation(new Rune(_disallowedChar)); // First, make sure we handle the simple "can't escape a single char to the buffer" case OperationStatus opStatus = _encoder.Encode(accumInput.AsSpan(), new char[accumExpectedOutput.Length - 1], out int charsConsumed, out int charsWritten); Assert.Equal(OperationStatus.DestinationTooSmall, opStatus); Assert.Equal(0, charsConsumed); Assert.Equal(0, charsWritten); // Then, escape a single char to the destination buffer. // This skips the "find the first char to encode" fast path in TextEncoder.cs. char[] destination = new char[accumExpectedOutput.Length]; opStatus = _encoder.Encode(accumInput.AsSpan(), destination, out charsConsumed, out charsWritten); Assert.Equal(OperationStatus.Done, opStatus); Assert.Equal(1, charsConsumed); Assert.Equal(destination.Length, charsWritten); Assert.Equal(accumExpectedOutput, new string(destination)); // Now, in a loop, append inputs to the source span and test various edge cases of // destination too small vs. destination properly sized. Assert.Equal(expectedOutputs.Length, inputs.Length); for (int i = 0; i < inputs.Length; i++) { accumInput += inputs[i]; string outputToAppend = expectedOutputs[i]; // Test destination too small - we should make progress up until // the very last thing we appended to the input. destination = new char[accumExpectedOutput.Length + outputToAppend.Length - 1]; opStatus = _encoder.Encode(accumInput.AsSpan(), destination, out charsConsumed, out charsWritten); Assert.Equal(OperationStatus.DestinationTooSmall, opStatus); Assert.Equal(accumInput.Length - inputs[i].Length, charsConsumed); // should've consumed everything except the most recent appended data Assert.Equal(accumExpectedOutput.Length, charsWritten); // should've escaped everything we consumed Assert.Equal(accumExpectedOutput, new string(destination, 0, charsWritten)); // Now test destination just right - we should consume the entire buffer successfully. accumExpectedOutput += outputToAppend; destination = new char[accumExpectedOutput.Length]; opStatus = _encoder.Encode(accumInput.AsSpan(), destination, out charsConsumed, out charsWritten); Assert.Equal(OperationStatus.Done, opStatus); Assert.Equal(accumInput.Length, charsConsumed); Assert.Equal(accumExpectedOutput.Length, charsWritten); Assert.Equal(accumExpectedOutput, new string(destination)); // Now test destination oversized - we should consume the entire buffer successfully. destination = new char[accumExpectedOutput.Length + 1]; opStatus = _encoder.Encode(accumInput.AsSpan(), destination, out charsConsumed, out charsWritten); Assert.Equal(OperationStatus.Done, opStatus); Assert.Equal(accumInput.Length, charsConsumed); Assert.Equal(accumExpectedOutput.Length, charsWritten); Assert.Equal(accumExpectedOutput, new string(destination, 0, charsWritten)); // Special-case: if the buffer ended with a legal supplementary scalar value, slice off // the last low surrogate char now and ensure the escaper can handle reading partial // surrogates, returning "Needs More Data". if (EndsWithValidSurrogatePair(accumInput)) { destination.AsSpan().Clear(); opStatus = _encoder.Encode(accumInput.AsSpan(0, accumInput.Length - 1), destination, out charsConsumed, out charsWritten, isFinalBlock: false); Assert.Equal(OperationStatus.NeedMoreData, opStatus); Assert.Equal(accumInput.Length - 2, charsConsumed); Assert.Equal(accumExpectedOutput.Length - outputToAppend.Length, charsWritten); Assert.Equal(accumExpectedOutput.Substring(0, accumExpectedOutput.Length - outputToAppend.Length), new string(destination, 0, charsWritten)); } } }
/// <summary> /// Removes endings. /// </summary> /// <param name="r1">The r1.</param> /// <param name="r1Index">Index of the r1.</param> /// <param name="word">The word.</param> /// <returns>The word minus the endings.</returns> private Span <char> Step1B(Span <char> r1, int r1Index, Span <char> word) { for (int i = 0, Step1ReplacementsLength = Step1Replacements.Length; i < Step1ReplacementsLength; i++) { var Step1Replacement = Step1Replacements[i]; if (i == 0 && r1.EndsWith(Step1Replacement)) { return(word.Length >= 2 ? word.Slice(0, word.Length - 2) : word); } if (i == 3 && r1.EndsWith(Step1Replacement)) { return(word.Length >= 1 ? word.Slice(0, word.Length - 1) : word); } if (word.EndsWith(Step1Replacement)) { bool vowelIsFound = false; if (word.Length > Step1Replacement.Length) { for (int x = 0; x < word.Length - Step1Replacement.Length; x++) { if (IsVowel(word[x])) { word = word.Slice(0, word.Length - Step1Replacement.Length); vowelIsFound = true; break; } } } if (!vowelIsFound) { return(word); } r1 = GetRValue(r1Index, word); if ((word[word.Length - 2] == 'a' && word[word.Length - 1] == 't') || (word[word.Length - 2] == 'b' && word[word.Length - 1] == 'l') || (word[word.Length - 2] == 'i' && word[word.Length - 1] == 'z')) { var Final = new char[word.Length + 1]; Array.Copy(word.ToArray(), Final, word.Length); Array.Copy(new char[] { 'e' }, 0, Final, word.Length, 1); return(Final.AsSpan()); } for (int x = 0; x < Doubles.Length; x++) { if (word.EndsWith(Doubles[x])) { return(word.Slice(0, word.Length - 1)); } } if (IsShortWord(word, r1)) { var Final = new char[word.Length + 1]; Array.Copy(word.ToArray(), Final, word.Length); Array.Copy(new char[] { 'e' }, 0, Final, word.Length, 1); return(Final.AsSpan()); } return(word); } } return(word); }
public static void TryFormat() { RemoteInvoke(() => { CultureInfo.CurrentCulture = CultureInfo.InvariantCulture; foreach (var testdata in ToString_TestData()) { decimal localI = (decimal)testdata[0]; string localFormat = (string)testdata[1]; IFormatProvider localProvider = (IFormatProvider)testdata[2]; string localExpected = (string)testdata[3]; try { char[] actual; int charsWritten; // Just right actual = new char[localExpected.Length]; Assert.True(localI.TryFormat(actual.AsSpan(), out charsWritten, localFormat, localProvider)); Assert.Equal(localExpected.Length, charsWritten); Assert.Equal(localExpected, new string(actual)); // Longer than needed actual = new char[localExpected.Length + 1]; Assert.True(localI.TryFormat(actual.AsSpan(), out charsWritten, localFormat, localProvider)); Assert.Equal(localExpected.Length, charsWritten); Assert.Equal(localExpected, new string(actual, 0, charsWritten)); // Too short if (localExpected.Length > 0) { actual = new char[localExpected.Length - 1]; Assert.False(localI.TryFormat(actual.AsSpan(), out charsWritten, localFormat, localProvider)); Assert.Equal(0, charsWritten); } if (localFormat != null) { // Upper localFormat actual = new char[localExpected.Length]; Assert.True(localI.TryFormat(actual.AsSpan(), out charsWritten, localFormat.ToUpperInvariant(), localProvider)); Assert.Equal(localExpected.Length, charsWritten); Assert.Equal(localExpected.ToUpperInvariant(), new string(actual)); // Lower format actual = new char[localExpected.Length]; Assert.True(localI.TryFormat(actual.AsSpan(), out charsWritten, localFormat.ToLowerInvariant(), localProvider)); Assert.Equal(localExpected.Length, charsWritten); Assert.Equal(localExpected.ToLowerInvariant(), new string(actual)); } } catch (Exception exc) { throw new Exception($"Failed on `{localI}`, `{localFormat}`, `{localProvider}`, `{localExpected}`. {exc}"); } } return(SuccessExitCode); }).Dispose(); }
/// <summary> /// Encodes a string for use as a URI. Any character other than a letter, number or a /// character in the whitelist will be percent-encoded as UTF-8. /// </summary> /// /// <param name="uri">The string to encode.</param> /// <param name="noEscapeChars">A string containing characters that are not letters or digits, that /// should not be escaped. If this is the empty string, all characters except letters and digits will be /// escaped. Only characters in the ASCII range (0-127) are checked against this string; /// characters outside this range are always escaped.</param> /// <param name="failOnInvalidSurrogate">If this is true, encoding fails if an invalid surrogate /// character is detected. Otherwise, invalid surrogates are replaced by '?' in the encoded /// string.</param> /// <param name="encodedURI">The encoded URI string.</param> /// /// <returns>True if the URI was encoded successfully, false if it is invalid.</returns> public static bool tryEncode( string uri, string noEscapeChars, bool failOnInvalidSurrogate, [NotNullWhen(true)] out string?encodedURI) { if (uri == null) { encodedURI = null; return(false); } char[] buffer = new char[uri.Length]; int bufPos = 0, bufLen = 0; bool error = false; for (int i = 0; i < uri.Length; i++) { char ch = uri[i]; bool noEscape = ((uint)(ch - '0') <= 9) || ((uint)(ch - 'A') <= 25) || ((uint)(ch - 'a') <= 25) || (ch <= 0x7F && noEscapeChars.Contains(ch)); if (bufPos == bufLen) { DataStructureUtil.expandArray(ref buffer); bufLen = buffer.Length; } if (noEscape) { // No percent encoding buffer[bufPos++] = ch; continue; } uint bytes = 0; bool invalidSurrogate = false; if ((uint)(ch - 0xD800) < 0x400) { // Check surrogate pairs if (i == uri.Length - 1) { invalidSurrogate = true; } else { char trail = uri[i + 1]; if ((uint)(trail - 0xDC00) < 0x400) { bytes = _getUTF8Bytes(ch, trail); i++; } else { invalidSurrogate = true; } } } else if ((uint)(ch - 0xDC00) < 0x400) { invalidSurrogate = true; } else { bytes = _getUTF8Bytes(ch); } if (invalidSurrogate) { if (failOnInvalidSurrogate) { error = true; break; } buffer[bufPos++] = '?'; continue; } do { // Write the percent encoding for each byte if (bufLen - bufPos < 3) { DataStructureUtil.expandArray(ref buffer, 3); bufLen = buffer.Length; } buffer[bufPos] = '%'; byteToHex((byte)bytes, buffer.AsSpan(bufPos + 1)); bufPos += 3; bytes >>= 8; } while (bytes != 0); } if (error) { encodedURI = null; return(false); } encodedURI = new string(buffer, 0, bufPos); return(true); }
public bool TryString(out Ast.String?str) { if (At() != '\"') { str = null; return(false); } var start = lexer.Current; var accu = new StringBuilder(); Match('\"'); while (HaveMore()) { if (At() == '\"') { break; } switch (At()) { case '\\': Match('\\'); switch (At()) { case '\"': case '\\': case '/': accu.Append(MatchAny()); break; case 'b': MatchAny(); accu.Append('\b'); break; case 'f': MatchAny(); accu.Append('\f'); break; case 'n': MatchAny(); accu.Append('\n'); break; case 'r': MatchAny(); accu.Append('\r'); break; case 't': MatchAny(); accu.Append('\r'); break; case 'u': MatchAny(); var digits = new char[4]; digits[0] = MatchHexDigit(); digits[1] = MatchHexDigit(); digits[2] = MatchHexDigit(); digits[3] = MatchHexDigit(); accu.Append((char)int.Parse(digits.AsSpan(), NumberStyles.HexNumber)); break; default: throw new NotImplementedException(); } break; default: if (At() >= 0x20) { accu.Append(MatchAny()); break; } throw new NotImplementedException(); } } Match('\"'); str = new Ast.String(lexer.Span(start), accu.ToString()); return(true); }
public void TempTest() { using var RawEventFile = File.Open(@"C:\Development\Resources\Zenith\Events 134.log", FileMode.Open); using var RawReader = new StreamBufferReader(RawEventFile); using var EventReader = new BinaryTextReader(RawReader); var EventBuffer = new char[128]; var TotalRecords = 0; var LastSequenceNumber = 1L; var TimeStamp = DateTimeOffset.MinValue; var IsStatus = false; while (!EventReader.EndOfReader) { var Offset = 0; // Decode the start of the next line into the buffer. It should be more than sufficiently large for the header if (!EventReader.TryReadLine(EventBuffer.AsSpan(Offset), out var CharsWritten)) { EventReader.SkipLine(); // We don't need the rest of the line data, no sense copying it } var Line = EventBuffer.AsSpan(0, CharsWritten); if (Line.IsEmpty) { continue; } // Find the first SequenceNumber:DateTimeUtc:Type:Data divider var Divider = Line.IndexOf(':'); if (Divider == -1) { continue; } // Process the sequence number if (!long.TryParse(Line.Slice(0, Divider), NumberStyles.None, CultureInfo.InvariantCulture, out var SequenceNumber)) { continue; } TotalRecords++; LastSequenceNumber = SequenceNumber; IsStatus = false; // Find the next divider (end of DateTimeUtc) var NextDivider = Line.Slice(++Divider).IndexOf(':'); if (NextDivider == -1) { continue; } if (!DateTimeOffset.TryParseExact(Line.Slice(Divider, NextDivider), "yyyyMMddHHmmssfff", CultureInfo.InvariantCulture, DateTimeStyles.None, out var NewTimeStamp)) { continue; } TimeStamp = NewTimeStamp; Divider += NextDivider + 1; // Next divider (end of Type) NextDivider = Line.Slice(Divider).IndexOf(':'); var Type = Line.Slice(Divider, NextDivider); // Check if it's a Status event IsStatus = Type.SequenceEqual("Status"); } }