public void Example() { #region Usage // person.json, has a relative external schema reference 'address.json' // -------- // { // 'type': 'object', // 'properties': { // 'name': {'type':'string'}, // 'addresses': { // 'type': 'array', // 'items': {'$ref': 'address.json'} // } // } // } // -------- using (StreamReader file = File.OpenText(@"c:\person.json")) using (JsonTextReader reader = new JsonTextReader(file)) { JSchemaUrlResolver resolver = new JSchemaUrlResolver(); JSchema schema = JSchema.Load(reader, new JSchemaReaderSettings { Resolver = resolver, // where the schema is being loaded from // referenced 'address.json' schema will be loaded from disk at 'c:\address.json' BaseUri = new Uri(@"c:\person.json") }); // validate JSON } #endregion }
public void ReadFrom() { JObject o = (JObject)JToken.ReadFrom(new JsonTextReader(new StringReader("{'pie':true}"))); Assert.AreEqual(true, (bool)o["pie"]); JArray a = (JArray)JToken.ReadFrom(new JsonTextReader(new StringReader("[1,2,3]"))); Assert.AreEqual(1, (int)a[0]); Assert.AreEqual(2, (int)a[1]); Assert.AreEqual(3, (int)a[2]); JsonReader reader = new JsonTextReader(new StringReader("{'pie':true}")); reader.Read(); reader.Read(); JProperty p = (JProperty)JToken.ReadFrom(reader); Assert.AreEqual("pie", p.Name); Assert.AreEqual(true, (bool)p.Value); JConstructor c = (JConstructor)JToken.ReadFrom(new JsonTextReader(new StringReader("new Date(1)"))); Assert.AreEqual("Date", c.Name); Assert.IsTrue(JToken.DeepEquals(new JValue(1), c.Values().ElementAt(0))); JValue v; v = (JValue)JToken.ReadFrom(new JsonTextReader(new StringReader(@"""stringvalue"""))); Assert.AreEqual("stringvalue", (string)v); v = (JValue)JToken.ReadFrom(new JsonTextReader(new StringReader(@"1"))); Assert.AreEqual(1, (int)v); v = (JValue)JToken.ReadFrom(new JsonTextReader(new StringReader(@"1.1"))); Assert.AreEqual(1.1, (double)v); }
public void Example() { #region Usage string json = @"{ 'name': 'Admin' }{ 'name': 'Publisher' }"; IList<Role> roles = new List<Role>(); JsonTextReader reader = new JsonTextReader(new StringReader(json)); reader.SupportMultipleContent = true; while (true) { if (!reader.Read()) break; JsonSerializer serializer = new JsonSerializer(); Role role = serializer.Deserialize<Role>(reader); roles.Add(role); } foreach (Role role in roles) { Console.WriteLine(role.Name); } // Admin // Publisher #endregion Assert.AreEqual(2, roles.Count); Assert.AreEqual("Admin", roles[0].Name); Assert.AreEqual("Publisher", roles[1].Name); }
public static void ReadLargeJson() { using (var jsonFile = System.IO.File.OpenText("large.json")) using (JsonTextReader jsonTextReader = new JsonTextReader(jsonFile)) { JsonSerializer serializer = new JsonSerializer(); serializer.Deserialize<IList<PerformanceTests.RootObject>>(jsonTextReader); } Console.WriteLine("Press any key to start deserialization"); Console.ReadKey(); Console.WriteLine("Deserializing..."); Stopwatch sw = new Stopwatch(); sw.Start(); for (int i = 0; i < 5; i++) { using (var jsonFile = System.IO.File.OpenText("large.json")) using (JsonTextReader jsonTextReader = new JsonTextReader(jsonFile)) { JsonSerializer serializer = new JsonSerializer(); serializer.Deserialize<IList<PerformanceTests.RootObject>>(jsonTextReader); } } sw.Stop(); Console.WriteLine("Finished. Total seconds: " + sw.Elapsed.TotalSeconds); }
private static void AssertImport(object expected, string input) { JsonTextReader reader = new JsonTextReader(new StringReader(input)); Type expectedType = expected.GetType(); object o = JsonImporterStock.Get(expectedType).Import(reader); Assert.IsInstanceOfType(expectedType, o); Assert.AreEqual(expected, o); }
public void CheckInnerReader() { string json = "{'name':'James','hobbies':['pie','cake']}"; JsonReader reader = new JsonTextReader(new StringReader(json)); JsonValidatingReader validatingReader = new JsonValidatingReader(reader); Assert.AreEqual(reader, validatingReader.Reader); }
private static void AssertImport(object expected, string input) { JsonTextReader reader = new JsonTextReader(new StringReader(input)); Type expectedType = expected.GetType(); ImportContext context = new ImportContext(); object o = context.Import(expectedType, reader); Assert.IsInstanceOfType(expectedType, o); Assert.AreEqual(expected, o); }
public void Load() { JsonReader reader = new JsonTextReader(new StringReader("new Date(123)")); reader.Read(); JConstructor constructor = JConstructor.Load(reader); Assert.AreEqual("Date", constructor.Name); Assert.IsTrue(JToken.DeepEquals(new JValue(123), constructor.Values().ElementAt(0))); }
private void ReaderValidation() { JsonTextReader reader = new JsonTextReader(new StringReader(Json)); JSchemaValidatingReader validatingReader = new JSchemaValidatingReader(reader); validatingReader.Schema = Schema; while (validatingReader.Read()) { } }
private static void AssertImport(object expected, string input) { JsonTextReader reader = new JsonTextReader(new StringReader(input)); GuidImporter importer = new GuidImporter(); object o = importer.Import(new ImportContext(), reader); Assert.IsTrue(reader.EOF, "Reader must be at EOF."); if (expected != null) Assert.IsInstanceOfType(typeof(Guid), o); Assert.AreEqual(expected, o); }
public void ReadDollarQuoteStringWithTag() { string json = @"{""NameOfStore"":$tag$Forest's Bakery And Cafe$tag$}"; JsonTextReader jsonTextReader = new JsonTextReader(new StringReader(json)); jsonTextReader.Read(); jsonTextReader.Read(); jsonTextReader.Read(); Assert.AreEqual(@"Forest's Bakery And Cafe", jsonTextReader.Value); }
public void CanReadComma() { // arrange var reader = new JsonTextReader(new StringReader(",")); // act var token = reader.NextToken(); // assert Assert.Equal(JsonToken.Comma, token); }
public void CanReadBooleanTrue() { // arrange var reader = new JsonTextReader(new StringReader("true")); // act var token = reader.NextToken(); // assert Assert.Equal(JsonToken.True, token); }
public void CanReadDecimal() { // arrange var reader = new JsonTextReader(new StringReader("123.456")); // act var token = reader.NextToken(); // assert Assert.Equal(new JsonToken(JsonTokenKind.Fractional, "123.456"), token); }
public void Example() { JObject o1 = JObject.Parse(File.ReadAllText(@"c:\videogames.json")); // read JSON directly from a file using (StreamReader file = File.OpenText(@"c:\videogames.json")) using (JsonTextReader reader = new JsonTextReader(file)) { JObject o2 = (JObject) JToken.ReadFrom(reader); } }
public void Example() { // read file into a string and parse JsonSchema from the string JsonSchema schema1 = JsonSchema.Parse(File.ReadAllText(@"c:\schema.json")); // read JsonSchema directly from a file using (StreamReader file = File.OpenText(@"c:\schema.json")) using (JsonTextReader reader = new JsonTextReader(file)) { JsonSchema schema2 = JsonSchema.Read(reader); } }
public void LoadFile() { #region LoadFile using (StreamReader file = File.OpenText(@"c:\person.json")) using (JsonTextReader reader = new JsonTextReader(file)) { JSchema schema = JSchema.Load(reader); // validate JSON } #endregion }
private JSchemaValidatingReader CreateReader(string json, JSchema schema, out IList<SchemaValidationEventArgs> errors) { JsonReader reader = new JsonTextReader(new StringReader(json)); List<SchemaValidationEventArgs> localErrors = new List<SchemaValidationEventArgs>(); JSchemaValidatingReader validatingReader = new JSchemaValidatingReader(reader); validatingReader.ValidationEventHandler += (sender, args) => { localErrors.Add(args); }; validatingReader.Schema = schema; errors = localErrors; return validatingReader; }
public void Example() { #region Usage // read file into a string and parse JSchema from the string JSchema schema1 = JSchema.Parse(File.ReadAllText(@"c:\schema.json")); // load JSchema directly from a file using (StreamReader file = File.OpenText(@"c:\schema.json")) using (JsonTextReader reader = new JsonTextReader(file)) { JSchema schema2 = JSchema.Load(reader); } #endregion }
private void ParseRequest(string request) { using (var stringReader = new StringReader(request)) using (var reader = new JsonTextReader(stringReader)) { reader.DateParseHandling = DateParseHandling.None; // Messages parsed as a dictionary with the following // parameters: // // 'r': Request type: 0, 1 or 2 // 'a': Association ID when the request type is 1 or 2 // 't': Message type // 'p': Payload if (!reader.Read()) throw new HttpException("Invalid request"); if (reader.TokenType == JsonToken.StartObject) { ProcessMessageRequest(reader); } else if (reader.TokenType == JsonToken.StartArray) { ReadToken(reader, JsonToken.String); string action = (string)reader.Value; ReadToken(reader, JsonToken.EndArray); switch (action) { case "close": _client.Dispose(); break; default: throw new HttpException("Invalid request"); } } else { throw new HttpException("Invalid request"); } if (reader.Read()) throw new HttpException("Invalid request"); } }
public void CanIgnoreWhiteSpace() { // arrange var reader = new JsonTextReader(new StringReader(" false true null")); // act var token1 = reader.NextToken(); var token2 = reader.NextToken(); var token3 = reader.NextToken(); // assert Assert.Equal(JsonToken.False, token1); Assert.Equal(JsonToken.True, token2); Assert.Equal(JsonToken.Null, token3); }
public static JSchema OpenSchemaFile(string name, JSchemaResolver resolver = null) { string path = ResolveFilePath(name); using (Stream file = File.OpenRead(path)) using (JsonReader reader = new JsonTextReader(new StreamReader(file))) { JSchema schema = JSchema.Load(reader, new JSchemaReaderSettings { Resolver = resolver, BaseUri = new Uri(path, UriKind.RelativeOrAbsolute) }); return schema; } }
public void CommentsAndReadFrom() { StringReader textReader = new StringReader(@"[ // hi 1, 2, 3 ]"); JsonTextReader jsonReader = new JsonTextReader(textReader); JArray a = (JArray)JToken.ReadFrom(jsonReader); Assert.AreEqual(4, a.Count); Assert.AreEqual(JTokenType.Comment, a[0].Type); Assert.AreEqual(" hi", ((JValue)a[0]).Value); }
public static JSchema OpenSchemaFile(string name, JSchemaResolver resolver, Uri baseUri = null) { string baseDirectory = AppDomain.CurrentDomain.BaseDirectory; string path = Path.Combine(baseDirectory, name); using (JsonReader reader = new JsonTextReader(new StreamReader(path))) { JSchema schema = JSchema.Load(reader, new JSchemaReaderSettings { BaseUri = baseUri ?? new Uri(path, UriKind.RelativeOrAbsolute), Resolver = resolver }); return schema; } }
public void Example() { string schemaJson = @"{'type': 'object'}"; JsonSchema schema; using (TextReader file = File.OpenText(@"c:\schema.json")) using (JsonTextReader reader = new JsonTextReader(file)) { schema = JsonSchema.Read(reader); } JObject o = JObject.Parse(@"{}"); bool valid = o.IsValid(schema); Console.WriteLine(valid); // true }
public void Sample() { JSchema schema = JSchema.Parse(@"{ 'type': 'array', 'items': {'type':'string'} }"); JsonTextReader reader = new JsonTextReader(new StringReader(@"[ 'Developer', 'Administrator' ]")); JSchemaValidatingReader validatingReader = new JSchemaValidatingReader(reader); validatingReader.Schema = schema; validatingReader.ValidationEventHandler += (sender, args) => { throw new Exception(args.Message); }; JsonSerializer serializer = new JsonSerializer(); List<string> roles = serializer.Deserialize<List<string>>(validatingReader); }
public void Example() { #region Usage string json = @"{ 'CPU': 'Intel', 'PSU': '500W', 'Drives': [ 'DVD read/writer' /*(broken)*/, '500 gigabyte hard drive', '200 gigabype hard drive' ] }"; JsonTextReader reader = new JsonTextReader(new StringReader(json)); while (reader.Read()) { if (reader.Value != null) { Console.WriteLine("Token: {0}, Value: {1}", reader.TokenType, reader.Value); } else { Console.WriteLine("Token: {0}", reader.TokenType); } } // Token: StartObject // Token: PropertyName, Value: CPU // Token: String, Value: Intel // Token: PropertyName, Value: PSU // Token: String, Value: 500W // Token: PropertyName, Value: Drives // Token: StartArray // Token: String, Value: DVD read/writer // Token: Comment, Value: (broken) // Token: String, Value: 500 gigabyte hard drive // Token: String, Value: 200 gigabype hard drive // Token: EndArray // Token: EndObject #endregion }
public void Example() { #region Usage string schemaJson = @"{ 'description': 'A person', 'type': 'object', 'properties': { 'name': {'type':'string'}, 'hobbies': { 'type': 'array', 'items': {'type':'string'} } } }"; string json = @"{ 'name': 'James', 'hobbies': ['.NET', 'Blogging', 'Reading', 'Xbox', 'LOLCATS'] }"; JsonTextReader reader = new JsonTextReader(new StringReader(json)); JSchemaValidatingReader validatingReader = new JSchemaValidatingReader(reader); validatingReader.Schema = JSchema.Parse(schemaJson); IList<string> messages = new List<string>(); validatingReader.ValidationEventHandler += (o, a) => messages.Add(a.Message); JsonSerializer serializer = new JsonSerializer(); Person p = serializer.Deserialize<Person>(validatingReader); Console.WriteLine(p.Name); // James bool isValid = (messages.Count == 0); Console.WriteLine(isValid); // true #endregion Assert.IsTrue(isValid); }
public void ReadFrom() { JObject o = (JObject)JToken.ReadFrom(new JsonTextReader(new StringReader("{'pie':true}"))); Assert.AreEqual(true, (bool)o["pie"]); JArray a = (JArray)JToken.ReadFrom(new JsonTextReader(new StringReader("[1,2,3]"))); Assert.AreEqual(1, (int)a[0]); Assert.AreEqual(2, (int)a[1]); Assert.AreEqual(3, (int)a[2]); JsonReader reader = new JsonTextReader(new StringReader("{'pie':true}")); reader.Read(); reader.Read(); JProperty p = (JProperty)JToken.ReadFrom(reader); Assert.AreEqual("pie", p.Name); Assert.AreEqual(true, (bool)p.Value); JConstructor c = (JConstructor)JToken.ReadFrom(new JsonTextReader(new StringReader("new Date(1)"))); Assert.AreEqual("Date", c.Name); Assert.IsTrue(JToken.DeepEquals(new JValue(1), c.Values().ElementAt(0))); JValue v; v = (JValue)JToken.ReadFrom(new JsonTextReader(new StringReader(@"""stringvalue"""))); Assert.AreEqual("stringvalue", (string)v); v = (JValue)JToken.ReadFrom(new JsonTextReader(new StringReader(@"1"))); Assert.AreEqual(1, (int)v); v = (JValue)JToken.ReadFrom(new JsonTextReader(new StringReader(@"1.1"))); Assert.AreEqual(1.1, (double)v); #if !NET20 v = (JValue)JToken.ReadFrom(new JsonTextReader(new StringReader(@"""1970-01-01T00:00:00+12:31""")) { DateParseHandling = DateParseHandling.DateTimeOffset }); Assert.AreEqual(typeof(DateTimeOffset), v.Value.GetType()); Assert.AreEqual(new DateTimeOffset(DateTimeUtils.InitialJavaScriptDateTicks, new TimeSpan(12, 31, 0)), v.Value); #endif }
public void StartingCommentAndReadFrom() { StringReader textReader = new StringReader(@" // hi [ 1, 2, 3 ]"); JsonTextReader jsonReader = new JsonTextReader(textReader); JValue v = (JValue)JToken.ReadFrom(jsonReader); Assert.AreEqual(JTokenType.Comment, v.Type); IJsonLineInfo lineInfo = v; Assert.AreEqual(true, lineInfo.HasLineInfo()); Assert.AreEqual(3, lineInfo.LineNumber); Assert.AreEqual(1, lineInfo.LinePosition); }
public static T Deserialize <T>(string json) { using var jsonTextReader = new JsonTextReader(new StringReader(json)); return((T)serializer.Deserialize(jsonTextReader, typeof(T))); }
public DiscordSocketApiClient(RestClientProvider restClientProvider, WebSocketProvider webSocketProvider, string userAgent, string url = null, RetryMode defaultRetryMode = RetryMode.AlwaysRetry, JsonSerializer serializer = null, bool useSystemClock = true) : base(restClientProvider, userAgent, defaultRetryMode, serializer, useSystemClock) { _gatewayUrl = url; if (url != null) _isExplicitUrl = true; WebSocketClient = webSocketProvider(); //WebSocketClient.SetHeader("user-agent", DiscordConfig.UserAgent); (Causes issues in .NET Framework 4.6+) WebSocketClient.BinaryMessage += async (data, index, count) => { using (var decompressed = new MemoryStream()) { if (data[0] == 0x78) { //Strip the zlib header _compressed.Write(data, index + 2, count - 2); _compressed.SetLength(count - 2); } else { _compressed.Write(data, index, count); _compressed.SetLength(count); } //Reset positions so we don't run out of memory _compressed.Position = 0; _decompressor.CopyTo(decompressed); _compressed.Position = 0; decompressed.Position = 0; using (var reader = new StreamReader(decompressed)) using (var jsonReader = new JsonTextReader(reader)) { var msg = _serializer.Deserialize<SocketFrame>(jsonReader); if (msg != null) { #if DEBUG_PACKETS Console.WriteLine($"<- {(GatewayOpCode)msg.Operation} [{msg.Type ?? "none"}] : {(msg.Payload as Newtonsoft.Json.Linq.JToken)?.ToString().Length}"); #endif await _receivedGatewayEvent.InvokeAsync((GatewayOpCode)msg.Operation, msg.Sequence, msg.Type, msg.Payload).ConfigureAwait(false); } } } }; WebSocketClient.TextMessage += async text => { using (var reader = new StringReader(text)) using (var jsonReader = new JsonTextReader(reader)) { var msg = _serializer.Deserialize<SocketFrame>(jsonReader); if (msg != null) { #if DEBUG_PACKETS Console.WriteLine($"<- {(GatewayOpCode)msg.Operation} [{msg.Type ?? "none"}] : {(msg.Payload as Newtonsoft.Json.Linq.JToken)?.ToString().Length}"); #endif await _receivedGatewayEvent.InvokeAsync((GatewayOpCode)msg.Operation, msg.Sequence, msg.Type, msg.Payload).ConfigureAwait(false); } } }; WebSocketClient.Closed += async ex => { #if DEBUG_PACKETS Console.WriteLine(ex); #endif await DisconnectAsync().ConfigureAwait(false); await _disconnectedEvent.InvokeAsync(ex).ConfigureAwait(false); }; }
/// <summary> /// The response is a FeatureCollection object with a list of image features ordered by captured_at by default. If closeto is provided, image features will be ordered by their distances to the closeto location. /// </summary> /// <param name="bbox">Filter by the bounding box, given as minx,miny,maxx,maxy.</param> /// <param name="closeTo">Filter by a location that images are close to, given as longitude,latitude.</param> /// <param name="endTime">Filter images that are captured before end_time.</param> /// <param name="imageKeys">Filter images by a list of image keys.</param> /// <param name="lookAt">Filter images that images are taken in the direction of the specified location (and therefore that location is likely to be visible in the images), given as longitude,latitude. Note that If lookat is provided without geospatial filters like closeto or bbox, then it will search global images that look at the point.</param> /// <param name="pano">Filer panoramic images (true) or flat images (false).</param> /// <param name="perPage">The number of images per page (default 200, and maximum 1000).</param> /// <param name="projectKeys">Filter images by projects, given as project keys.</param> /// <param name="radius">Filter images within the radius around the closeto location (default 100 meters).</param> /// <param name="sequenceKeys">Filter images by sequences.</param> /// <param name="startTime">Filter images that are captured since start_time.</param> /// <param name="userkeys">Filter images captured by users, given as user keys.</param> /// <param name="usernames">Filter images captured by users, given as usernames.</param> /// <returns>The response is a FeatureCollection object with a list of image features ordered by captured_at by default. </returns> public static FeatureCollection <Image> SearchImage(string cliendId, BoundingBox bbox, Coordinates closeTo, DateTime?endTime, string[] imageKeys, Coordinates lookAt, bool?pano, int?perPage, string[] projectKeys, int?radius, string[] sequenceKeys, DateTime?startTime, string[] userkeys, string[] usernames) { FeatureCollection <Image> jsonResult = null; JsonImage imageToAdd = null; Feature <Image> featureToAdd = null; Geometry geometryToAdd = null; var data = RequestData(cliendId, bbox, closeTo, endTime, imageKeys, lookAt, pano, perPage, projectKeys, radius, sequenceKeys, startTime, userkeys, usernames); using (JsonTextReader reader = new JsonTextReader(data)) { while (reader.Read()) { switch (reader.TokenType) { case JsonToken.PropertyName: switch ((string)reader.Value) { case "type": reader.Read(); switch ((string)reader.Value) { case "FeatureCollection": jsonResult = new FeatureCollection <Image>((string)reader.Value); break; case "Feature": if (imageToAdd != null) { featureToAdd.Result = imageToAdd.GenerateImage(); } if (geometryToAdd != null) { featureToAdd.Geometry = geometryToAdd; } if (featureToAdd != null) { jsonResult.Add(featureToAdd); } featureToAdd = new Feature <Image>((string)reader.Value); break; case "Point": if (geometryToAdd != null) { featureToAdd.Geometry = geometryToAdd; } geometryToAdd = new Geometry((string)reader.Value); break; default: // Console.WriteLine($"Unhandled Type ({(string)reader.Value})"); break; } break; case "features": break; case "properties": if (imageToAdd != null) { featureToAdd.Result = imageToAdd.GenerateImage(); } imageToAdd = new JsonImage(); break; case "ca": reader.Read(); imageToAdd.CameraAngle = float.Parse(reader.Value.ToString()); break; case "camera_make": reader.Read(); imageToAdd.CameraMake = (string)reader.Value; break; case "camera_model": reader.Read(); imageToAdd.CameraModel = (string)reader.Value; break; case "captured_at": reader.Read(); imageToAdd.CapturedAt = DateTime.Parse(reader.Value.ToString()); break; case "key": reader.Read(); imageToAdd.Key = (string)reader.Value; break; case "pano": reader.Read(); imageToAdd.Pano = (bool)reader.Value; break; case "sequence_key": reader.Read(); imageToAdd.SequenceKey = (string)reader.Value; break; case "user_key": reader.Read(); imageToAdd.UserKey = (string)reader.Value; break; case "username": reader.Read(); imageToAdd.Username = (string)reader.Value; break; case "geometry": break; case "coordinates": while ((reader.TokenType != JsonToken.Float && reader.TokenType != JsonToken.Integer) && reader.Read()) { ; } float longitude = float.Parse(reader.Value.ToString()); reader.Read(); float lattitude = float.Parse(reader.Value.ToString()); geometryToAdd.Coordinates = new Coordinates(longitude, lattitude); break; default: //Console.WriteLine($"Unhandled Property ({reader.Value}) occured."); break; } break; #region unused case JsonToken.EndObject: case JsonToken.EndArray: case JsonToken.EndConstructor: case JsonToken.StartObject: case JsonToken.StartArray: case JsonToken.StartConstructor: //silencee break; case JsonToken.Date: case JsonToken.Boolean: case JsonToken.String: case JsonToken.Float: case JsonToken.Integer: case JsonToken.Raw: case JsonToken.Null: case JsonToken.Comment: case JsonToken.Undefined: case JsonToken.Bytes: case JsonToken.None: default: // Console.WriteLine($"Type {reader.TokenType} occured. Value is {reader.Value}."); break; #endregion } } } if (imageToAdd != null) { featureToAdd.Result = imageToAdd.GenerateImage(); } if (geometryToAdd != null) { featureToAdd.Geometry = geometryToAdd; } if (featureToAdd != null) { jsonResult.Add(featureToAdd); } return(jsonResult); }
public async Task<DoujinInfo> GetAsync(string id, CancellationToken cancellationToken = default) { if (!int.TryParse(id, out var intId)) return null; HtmlNode root; // load html page using (var response = await _http.SendAsync( new HttpRequestMessage { Method = HttpMethod.Get, RequestUri = new Uri(Hitomi.Gallery(intId)) }, cancellationToken)) { if (!response.IsSuccessStatusCode) return null; using (var reader = new StringReader(await response.Content.ReadAsStringAsync())) { var doc = new HtmlDocument(); doc.Load(reader); root = doc.DocumentNode; } } // filter out anime var type = Sanitize(root.SelectSingleNode(Hitomi.XPath.Type)); if (type != null && type.Equals("anime", StringComparison.OrdinalIgnoreCase)) { _logger.LogInformation($"Skipping '{id}' because it is of type 'anime'."); return null; } var prettyName = Sanitize(root.SelectSingleNode(Hitomi.XPath.Name)); // replace stuff in brackets with nothing prettyName = _bracketsRegex.Replace(prettyName, ""); var originalName = prettyName; // parse names with two parts var pipeIndex = prettyName.IndexOf('|'); if (pipeIndex != -1) { prettyName = prettyName.Substring(0, pipeIndex).Trim(); originalName = originalName.Substring(pipeIndex + 1).Trim(); } // parse language var languageHref = root.SelectSingleNode(Hitomi.XPath.Language)?.Attributes["href"]?.Value; var language = languageHref == null ? null : _languageHrefRegex.Match(languageHref).Groups["language"].Value; var doujin = new DoujinInfo { PrettyName = prettyName, OriginalName = originalName, UploadTime = DateTime.Parse(Sanitize(root.SelectSingleNode(Hitomi.XPath.Date))).ToUniversalTime(), Source = this, SourceId = id, Artist = Sanitize(root.SelectSingleNode(Hitomi.XPath.Artists))?.ToLowerInvariant(), Group = Sanitize(root.SelectSingleNode(Hitomi.XPath.Groups))?.ToLowerInvariant(), Language = language?.ToLowerInvariant(), Parody = ConvertSeries(Sanitize(root.SelectSingleNode(Hitomi.XPath.Series)))?.ToLowerInvariant(), Characters = root.SelectNodes(Hitomi.XPath.Characters)?.Select(n => Sanitize(n)?.ToLowerInvariant()), Tags = root.SelectNodes(Hitomi.XPath.Tags) ?.Select(n => ConvertTag(Sanitize(n)?.ToLowerInvariant())) }; // parse images using (var response = await _http.SendAsync( new HttpRequestMessage { Method = HttpMethod.Get, RequestUri = new Uri(Hitomi.GalleryInfo(intId)) }, cancellationToken)) { if (!response.IsSuccessStatusCode) return null; using (var textReader = new StringReader(await response.Content.ReadAsStringAsync())) using (var jsonReader = new JsonTextReader(textReader)) { // discard javascript bit and start at json while ((char) textReader.Peek() != '[') textReader.Read(); var images = _serializer.Deserialize<ImageInfo[]>(jsonReader); var extensionsCombined = new string(images.Select(i => { var ext = Path.GetExtension(i.Name); switch (ext) { case "": return '.'; case ".jpg": return 'j'; case ".jpeg": return 'J'; case ".png": return 'p'; case ".gif": return 'g'; default: throw new NotSupportedException( $"Unknown image format '{ext}'."); } }) .ToArray()); doujin.PageCount = images.Length; doujin.Data = _serializer.Serialize(new InternalDoujinData { ImageNames = images.Select(i => Path.GetFileNameWithoutExtension(i.Name)).ToArray(), Extensions = extensionsCombined }); } } return doujin; }
private static void GetLoadBalancerEvents(DateTime logStart, DateTime logEnd) { CloudStorageAccount storageAccount = CloudStorageAccount.Parse(BlobStorageConnectionString); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); Console.WriteLine($"Getting reference to container {EventContainerName}"); CloudBlobContainer container = blobClient.GetContainerReference(EventContainerName); StorageURL storageUrl = new StorageURL(container.Uri, SubscriptionID, ResrouceGroupsName, ProviderName, ResrouceTypeName, ResourceType.LOADBALANCERS); List <Log> logs = new List <Log>(); int itemPosition = 0; // Using the date and time as arguments download all logs from the storage blob. for (DateTime logTimeStamp = logStart; logTimeStamp <= logEnd; logTimeStamp = logTimeStamp.AddHours(1)) { Console.WriteLine(logTimeStamp); Uri storageBlobUrl = storageUrl.GetURL(logTimeStamp); CloudBlockBlob blockBlob = new CloudBlockBlob(storageBlobUrl, storageAccount.Credentials); MemoryStream memstream = new MemoryStream(); try { blockBlob.DownloadToStream(memstream); memstream.Position = 0; JsonSerializer serializer = new JsonSerializer(); using (StreamReader sr = new StreamReader(memstream)) { using (JsonTextReader jsonTextReader = new JsonTextReader(sr)) { LogRecords logRecords = serializer.Deserialize <LogRecords>(jsonTextReader); itemPosition = 0; foreach (Log logItem in logRecords.records) { logs.Add(logItem); itemPosition++; } } } } catch (Exception ex) { Console.WriteLine($"{ex.Message} - {storageBlobUrl}"); } } using (System.IO.StreamWriter file = new System.IO.StreamWriter(EventCSVExportNamePath)) { file.WriteLine("time,systemId,category,resourceId,operationName,properties.publicIpAddress" + ",properties.port,properties.totalDipCount,properties.dipDownCount,properties.healthPercentage"); foreach (Log log in logs) { file.WriteLine($"{DateTime.Parse(log.time).ToUniversalTime()}, {log.systemId}, {log.category}, {log.resourceId}, {log.operationName}" + $", {log.properties.publicIpAddress }, {log.properties.port}, {log.properties.totalDipCount}" + $", {log.properties.dipDownCount}, {log.properties.healthPercentage}"); } } }
public T Deserialize(TextReader source) { using var jsonReader = new JsonTextReader(source); return(_serializer.Deserialize <T>(jsonReader)); }
public static void Convert(string srcFilename, string dstPath, Action <string> onMessage, Action <string> onError) { try { if (!Directory.Exists(dstPath)) { Directory.CreateDirectory(dstPath); } string srcPath = Path.GetDirectoryName(srcFilename); string dstFilename = Path.Combine(dstPath, Path.GetFileNameWithoutExtension(srcFilename) + ".binary.babylon"); dynamic scene; // Loading onMessage("Loading " + srcFilename); using (var streamReader = new StreamReader(srcFilename)) { using (var reader = new JsonTextReader(streamReader)) { scene = JObject.Load(reader); } } // Marking scene string objName = scene.name; if (string.IsNullOrEmpty(objName)) { objName = Path.GetFileNameWithoutExtension(srcFilename); } var atDot = objName.IndexOf(".incremental"); if (atDot > 0) { objName = objName.Substring(0, atDot); } scene["autoClear"] = true; scene["useDelayedTextureLoading"] = true; var doNotDelayLoadingForGeometries = new List <string>(); // Parsing meshes var meshes = (JArray)scene.meshes; foreach (dynamic mesh in meshes) { if (mesh.checkCollisions.Value) // Do not delay load collisions object { if (mesh.geometryId != null) { doNotDelayLoadingForGeometries.Add(mesh.geometryId.Value); } continue; } Extract(srcPath, dstPath, objName, mesh, true); } // Parsing vertexData var geometries = scene.geometries; if (geometries != null) { var vertexData = (JArray)geometries.vertexData; foreach (dynamic geometry in vertexData) { var id = geometry.id.Value; if (doNotDelayLoadingForGeometries.Any(g => g == id)) { continue; } Extract(srcPath, dstPath, objName, geometry, false); } } // Saving onMessage("Saving " + dstFilename); string json = scene.ToString(Formatting.Indented); using (var writer = new StreamWriter(WebUtility.UrlDecode(dstFilename))) { writer.Write(json); } } catch (Exception ex) { onError(ex.Message); } }
public async Task MatchWithNoTrailingSeparatorAsync() { JsonTextReader reader = new JsonTextReader(new StringReader(@"nullz")); await ExceptionAssert.ThrowsAsync <JsonReaderException>(async() => { await reader.ReadAsync(); }, "Error parsing null value. Path '', line 1, position 4."); }
public async Task UnclosedCommentAsync() { JsonTextReader reader = new JsonTextReader(new StringReader(@"/* sdf")); await ExceptionAssert.ThrowsAsync <JsonReaderException>(async() => { await reader.ReadAsync(); }, "Unexpected end while parsing comment. Path '', line 1, position 6."); }
public async Task UnexpectedEndOfStringAsync() { JsonReader reader = new JsonTextReader(new StringReader("'hi")); await ExceptionAssert.ThrowsAsync <JsonReaderException>(async() => { await reader.ReadAsync(); }, "Unterminated string. Expected delimiter: '. Path '', line 1, position 3."); }
public async Task MatchWithInsufficentCharactersAsync() { JsonTextReader reader = new JsonTextReader(new StringReader(@"nul")); await ExceptionAssert.ThrowsAsync <JsonReaderException>(async() => { await reader.ReadAsync(); }, "Unexpected end when reading JSON. Path '', line 1, position 3."); }
public async Task UnexpectedEndOfHexAsync() { JsonReader reader = new JsonTextReader(new StringReader(@"'h\u123")); await ExceptionAssert.ThrowsAsync <JsonReaderException>(async() => { await reader.ReadAsync(); }, "Unexpected end while parsing Unicode escape sequence. Path '', line 1, position 4."); }
public async Task UnexpectedEndAfterReadingNeAsync() { JsonTextReader reader = new JsonTextReader(new StringReader("ne")); await ExceptionAssert.ThrowsAsync <JsonReaderException>(async() => await reader.ReadAsync(), "Unexpected end when reading JSON. Path '', line 1, position 2."); }
public void DoImport(Stream stream, PlatformExportManifest manifest, Action <ExportImportProgressInfo> progressCallback) { var progressInfo = new ExportImportProgressInfo(); var productsTotalCount = 0; using (StreamReader streamReader = new StreamReader(stream)) using (JsonTextReader reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString() == "Catalogs") { reader.Read(); var catalogs = _serializer.Deserialize <Catalog[]>(reader); progressInfo.Description = $"{ catalogs.Count() } catalogs importing..."; progressCallback(progressInfo); _catalogService.Update(catalogs); } else if (reader.Value.ToString() == "Categories") { reader.Read(); var categories = _serializer.Deserialize <Category[]>(reader); progressInfo.Description = $"{ categories.Count() } categories importing..."; progressCallback(progressInfo); _categoryService.Update(categories); if (manifest.HandleBinaryData) { ImportImages(categories); } } else if (reader.Value.ToString() == "Properties") { reader.Read(); var properties = _serializer.Deserialize <Property[]>(reader); progressInfo.Description = $"{ properties.Count() } properties importing..."; progressCallback(progressInfo); _propertyService.Update(properties); } else if (reader.Value.ToString() == "ProductsTotalCount") { productsTotalCount = reader.ReadAsInt32() ?? 0; } else if (reader.Value.ToString() == "Products") { reader.Read(); if (reader.TokenType == JsonToken.StartArray) { reader.Read(); var associationBackupMap = new Dictionary <string, ICollection <ProductAssociation> >(); var products = new List <CatalogProduct>(); var productsCount = 0; //Import products while (reader.TokenType != JsonToken.EndArray) { var product = _serializer.Deserialize <CatalogProduct>(reader); //Do not save associations withing product to prevent dependency conflicts in db //we will save separateley after product import if (!product.Associations.IsNullOrEmpty()) { associationBackupMap[product.Id] = product.Associations; } product.Associations = null; products.Add(product); productsCount++; reader.Read(); if (productsCount % _batchSize == 0 || reader.TokenType == JsonToken.EndArray) { _itemService.Update(products.ToArray()); if (manifest.HandleBinaryData) { ImportImages(products.ToArray()); } products.Clear(); if (productsTotalCount > 0) { progressInfo.Description = $"{ productsCount } of { productsTotalCount } products imported"; } else { progressInfo.Description = $"{ productsCount } products imported"; } progressCallback(progressInfo); } } //Import products associations separately to avoid DB constrain violation var totalProductsWithAssociationsCount = associationBackupMap.Count(); progressInfo.Description = $"{ totalProductsWithAssociationsCount } products associations importing..."; progressCallback(progressInfo); for (int i = 0; i < totalProductsWithAssociationsCount; i += _batchSize) { var fakeProducts = new List <CatalogProduct>(); foreach (var pair in associationBackupMap.Skip(i).Take(_batchSize)) { var fakeProduct = new CatalogProduct { Id = pair.Key, Associations = pair.Value }; fakeProducts.Add(fakeProduct); } _associationService.SaveChanges(fakeProducts.ToArray()); progressInfo.Description = $"{ Math.Min(totalProductsWithAssociationsCount, i + _batchSize) } of { totalProductsWithAssociationsCount } products associations imported"; progressCallback(progressInfo); } } } } } } }
public ModelFile(byte[] file, string filename = null) { int tmpaddr; bool be = ByteConverter.BigEndian; ByteConverter.BigEndian = false; ulong magic = ByteConverter.ToUInt64(file, 0) & FormatMask; byte version = file[7]; if (version > CurrentVersion) { throw new FormatException("Not a valid SA1MDL/SA2MDL file."); } Metadata = new Dictionary <uint, byte[]>(); Dictionary <int, string> labels = new Dictionary <int, string>(); Dictionary <int, Attach> attaches = new Dictionary <int, Attach>(); if (version < 2) { if (version == 1) { tmpaddr = ByteConverter.ToInt32(file, 0x14); if (tmpaddr != 0) { int addr = ByteConverter.ToInt32(file, tmpaddr); while (addr != -1) { labels.Add(addr, file.GetCString(ByteConverter.ToInt32(file, tmpaddr + 4))); tmpaddr += 8; addr = ByteConverter.ToInt32(file, tmpaddr); } } } switch (magic) { case SA1MDL: Format = ModelFormat.Basic; break; case SA2MDL: Format = ModelFormat.Chunk; break; default: throw new FormatException("Not a valid SA1MDL/SA2MDL file."); } Model = new NJS_OBJECT(file, ByteConverter.ToInt32(file, 8), 0, Format, labels, attaches); if (filename != null) { tmpaddr = ByteConverter.ToInt32(file, 0xC); if (tmpaddr != 0) { List <string> animfiles = new List <string>(); int addr = ByteConverter.ToInt32(file, tmpaddr); while (addr != -1) { animfiles.Add(file.GetCString(addr)); tmpaddr += 4; addr = ByteConverter.ToInt32(file, tmpaddr); } animationFiles = animfiles.ToArray(); } else { animationFiles = new string[0]; } string path = Path.GetDirectoryName(filename); List <NJS_MOTION> anims = new List <NJS_MOTION>(); try { foreach (string item in animationFiles) { anims.Add(NJS_MOTION.Load(Path.Combine(path, item), Model.CountAnimated())); } } catch { anims.Clear(); } Animations = anims.AsReadOnly(); } } else { animationFiles = new string[0]; tmpaddr = ByteConverter.ToInt32(file, 0xC); if (tmpaddr != 0) { bool finished = false; while (!finished) { ChunkTypes type = (ChunkTypes)ByteConverter.ToUInt32(file, tmpaddr); int chunksz = ByteConverter.ToInt32(file, tmpaddr + 4); int nextchunk = tmpaddr + 8 + chunksz; tmpaddr += 8; if (version == 2) { switch (type) { case ChunkTypes.Label: while (ByteConverter.ToInt64(file, tmpaddr) != -1) { labels.Add(ByteConverter.ToInt32(file, tmpaddr), file.GetCString(ByteConverter.ToInt32(file, tmpaddr + 4))); tmpaddr += 8; } break; case ChunkTypes.Animation: List <string> animfiles = new List <string>(); while (ByteConverter.ToInt32(file, tmpaddr) != -1) { animfiles.Add(file.GetCString(ByteConverter.ToInt32(file, tmpaddr))); tmpaddr += 4; } animationFiles = animfiles.ToArray(); break; case ChunkTypes.Morph: break; case ChunkTypes.Author: Author = file.GetCString(tmpaddr); break; case ChunkTypes.Tool: break; case ChunkTypes.Description: Description = file.GetCString(tmpaddr); break; case ChunkTypes.Texture: break; case ChunkTypes.End: finished = true; break; } } else { byte[] chunk = new byte[chunksz]; Array.Copy(file, tmpaddr, chunk, 0, chunksz); int chunkaddr = 0; switch (type) { case ChunkTypes.Label: while (ByteConverter.ToInt64(chunk, chunkaddr) != -1) { labels.Add(ByteConverter.ToInt32(chunk, chunkaddr), chunk.GetCString(ByteConverter.ToInt32(chunk, chunkaddr + 4))); chunkaddr += 8; } break; case ChunkTypes.Animation: List <string> animchunks = new List <string>(); while (ByteConverter.ToInt32(chunk, chunkaddr) != -1) { animchunks.Add(chunk.GetCString(ByteConverter.ToInt32(chunk, chunkaddr))); chunkaddr += 4; } animationFiles = animchunks.ToArray(); break; case ChunkTypes.Morph: break; case ChunkTypes.Author: Author = chunk.GetCString(chunkaddr); break; case ChunkTypes.Tool: break; case ChunkTypes.Description: Description = chunk.GetCString(chunkaddr); break; case ChunkTypes.End: finished = true; break; default: Metadata.Add((uint)type, chunk); break; } } tmpaddr = nextchunk; } } switch (magic) { case SA1MDL: Format = ModelFormat.Basic; break; case SA2MDL: Format = ModelFormat.Chunk; break; case SA2BMDL: Format = ModelFormat.GC; break; case XJMDL: Format = ModelFormat.XJ; break; default: throw new FormatException("Not a valid SA1MDL/SA2MDL file."); } Model = new NJS_OBJECT(file, ByteConverter.ToInt32(file, 8), 0, Format, labels, attaches); if (filename != null) { string path = Path.GetDirectoryName(filename); if (File.Exists(Path.GetFileNameWithoutExtension(filename) + ".action")) { using (TextReader tr = File.OpenText(Path.GetFileNameWithoutExtension(filename) + ".action")) { List <string> animlist = new List <string>(); int count = File.ReadLines(Path.GetFileNameWithoutExtension(filename) + ".action").Count(); for (int i = 0; i < count; i++) { string line = tr.ReadLine(); if (File.Exists(Path.Combine(path, line))) { animlist.Add(line); } } animationFiles = animlist.ToArray(); } } List <NJS_MOTION> anims = new List <NJS_MOTION>(); try { foreach (string item in animationFiles) { if (Path.GetExtension(item).ToLowerInvariant() == ".json") { JsonSerializer js = new JsonSerializer() { Culture = System.Globalization.CultureInfo.InvariantCulture }; using (TextReader tr = File.OpenText(Path.Combine(path, item))) { using (JsonTextReader jtr = new JsonTextReader(tr)) anims.Add(js.Deserialize <NJS_MOTION>(jtr)); } } else { anims.Add(NJS_MOTION.Load(Path.Combine(path, item), Model.CountAnimated())); } } } catch { anims.Clear(); } Animations = anims.AsReadOnly(); } } ByteConverter.BigEndian = be; }
public async Task <OdataPagedResponse <AzureADUser> > GetUsersAsync(string nextLink = null) { var url = new StringBuilder(nextLink); if (String.IsNullOrEmpty(nextLink)) { // Build the intial URL url.AppendFormat(CultureInfo.InvariantCulture, UsersUrlFormat, this._tenantId); url.Append(SelectParameter); } // Add query string parameters url.Append(ApiVersionParameter) .Append(this._batchSizeParameter); // Perform API call try { using (var request = new HttpRequestMessage(HttpMethod.Get, url.ToString())) using (var response = await this._httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead).ConfigureAwait(false)) using (var responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false)) using (var streamReader = new StreamReader(responseStream)) { if (s_contentType.MediaType.Equals(response.Content.Headers.ContentType.MediaType, StringComparison.InvariantCultureIgnoreCase)) { // The response is a JSON document using (var jsonTextReader = new JsonTextReader(streamReader)) { if (response.StatusCode == HttpStatusCode.OK) { var result = this._jsonSerializer.Deserialize <OdataPagedResponse <AzureADUser> >(jsonTextReader); // Update key credential owner references if (result.Items != null) { result.Items.ForEach(user => user.UpdateKeyCredentialReferences()); } return(result); } else { // Translate OData response to an exception var error = this._jsonSerializer.Deserialize <OdataErrorResponse>(jsonTextReader); throw error.GetException(); } } } else { // The response is not a JSON document, so we parse its first line as message text string message = await streamReader.ReadLineAsync().ConfigureAwait(false); throw new GraphApiException(message, response.StatusCode.ToString()); } } } catch (JsonException e) { throw new GraphApiException("The data returned by the REST API call has an unexpected format.", e); } catch (HttpRequestException e) { // Unpack a more meaningful message, e. g. DNS error throw new GraphApiException(e?.InnerException.Message ?? "An error occured while trying to call the REST API.", e); } }
Dictionary <string, string> GetHeaders(DbDataReader reader) { using var stream = reader.GetTextReader(5); using var jsonReader = new JsonTextReader(stream); return(serializer.Deserialize <Dictionary <string, string> >(jsonReader)); }
/// <summary> /// Load a <see cref="JArray"/> from a string that contains JSON. /// </summary> /// <param name="json">A <see cref="String"/> that contains JSON.</param> /// <returns>A <see cref="JArray"/> populated from the string that contains JSON.</returns> public static new JArray Parse(string json) { JsonReader jsonReader = new JsonTextReader(new StringReader(json)); return(Load(jsonReader)); }
public static void Main(string[] args) { int maxEntityLength = 0; int state = 0; using (var json = new JsonTextReader(new StreamReader("HtmlEntities.json"))) { while (json.Read()) { string name, value; if (json.TokenType == JsonToken.StartObject) { continue; } if (json.TokenType != JsonToken.PropertyName) { break; } name = (string)json.Value; // trim leading '&' and trailing ';' name = name.TrimStart('&').TrimEnd(';'); if (!json.Read() || json.TokenType != JsonToken.StartObject) { break; } // read to the "codepoints" property if (!json.Read() || json.TokenType != JsonToken.PropertyName) { break; } // skip the array of integers... if (!json.Read() || json.TokenType != JsonToken.StartArray) { break; } while (json.Read()) { if (json.TokenType == JsonToken.EndArray) { break; } } // the property should be "characters" - this is what we want if (!json.Read() || json.TokenType != JsonToken.PropertyName) { break; } value = json.ReadAsString(); var node = Root; for (int i = 0; i < name.Length; i++) { bool found = false; for (int j = 0; j < node.Children.Count; j++) { if (node.Children[j].Char == name[i]) { node = node.Children[j]; found = true; break; } } if (!found) { node = new GraphNode(node, ++state, name[i]); continue; } } if (node.Value == null) { FinalStates.Add(node.State, node); node.Value = value; } maxEntityLength = Math.Max(maxEntityLength, name.Length + 1); if (!json.Read() || json.TokenType != JsonToken.EndObject) { break; } } } using (var output = new StreamWriter("HtmlEntityDecoder.g.cs")) { output.WriteLine("// WARNING: This file is auto-generated. DO NOT EDIT!"); output.WriteLine(); output.WriteLine("namespace HtmlKit {"); output.WriteLine("\tpublic partial class HtmlEntityDecoder {"); output.WriteLine("\t\tconst int MaxEntityLength = {0};", maxEntityLength); output.WriteLine(); GeneratePushNamedEntityMethod(output); output.WriteLine(); GenerateGetNamedEntityValueMethod(output); output.WriteLine("\t}"); output.WriteLine("}"); } }
private static T LoadInternal(string filePath, List <string> fallbackFilePaths = null) { string typeName = typeof(T).Name; if (!string.IsNullOrEmpty(filePath) && File.Exists(filePath)) { DebugHelper.WriteLine($"{typeName} load started: {filePath}"); try { using (FileStream fileStream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read)) { if (fileStream.Length > 0) { T settings; using (StreamReader streamReader = new StreamReader(fileStream)) using (JsonTextReader jsonReader = new JsonTextReader(streamReader)) { JsonSerializer serializer = new JsonSerializer(); serializer.ContractResolver = new DPAPIEncryptedStringPropertyResolver(); serializer.Converters.Add(new StringEnumConverter()); serializer.DateTimeZoneHandling = DateTimeZoneHandling.Local; serializer.ObjectCreationHandling = ObjectCreationHandling.Replace; serializer.Error += Serializer_Error; settings = serializer.Deserialize <T>(jsonReader); } if (settings == null) { throw new Exception($"{typeName} object is null."); } DebugHelper.WriteLine($"{typeName} load finished: {filePath}"); return(settings); } else { throw new Exception($"{typeName} file stream length is 0."); } } } catch (Exception e) { DebugHelper.WriteException(e, $"{typeName} load failed: {filePath}"); } } else { DebugHelper.WriteLine($"{typeName} file does not exist: {filePath}"); } if (fallbackFilePaths != null && fallbackFilePaths.Count > 0) { filePath = fallbackFilePaths[0]; fallbackFilePaths.RemoveAt(0); return(LoadInternal(filePath, fallbackFilePaths)); } DebugHelper.WriteLine($"Loading new {typeName} instance."); return(new T()); }
static void Extract(string srcPath, string dstPath, string objName, dynamic meshObj, bool isMesh) { string dstFilename = meshObj.delayLoadingFile; string dstExt = (isMesh ? ".babylonbinarymeshdata" : ".babylonbinarygeometrydata"); if (!string.IsNullOrEmpty(dstFilename)) { string filename = WebUtility.UrlDecode(Path.Combine(srcPath, (string)meshObj.delayLoadingFile)); using (var streamReader = new StreamReader(filename)) { using (var reader = new JsonTextReader(streamReader)) { var meshData = JObject.Load(reader); meshObj.positions = meshData["positions"]; meshObj.normals = meshData["normals"]; meshObj.indices = meshData["indices"]; meshObj.uvs = meshData["uvs"]; meshObj.uvs2 = meshData["uvs2"]; meshObj.colors = meshData["colors"]; meshObj.matricesIndices = meshData["matricesIndices"]; meshObj.matricesWeights = meshData["matricesWeights"]; meshObj.subMeshes = meshData["subMeshes"]; } } } if (meshObj.positions == null || meshObj.positions.Count == 0 || meshObj.normals == null || meshObj.normals.Count == 0 || meshObj.indices == null || meshObj.indices.Count == 0) { return; } ComputeBoundingBox(meshObj); string meshName = meshObj.name.ToString(); meshName = meshName.Trim(); if (meshName.Length > 40) { meshName = meshName.Substring(0, 40); } if (isMesh && !string.IsNullOrEmpty(meshName)) { dstFilename = objName + "." + meshName + "." + meshObj.id.ToString() + dstExt; } else { dstFilename = objName + meshObj.id.ToString() + dstExt; } dstFilename = dstFilename.Replace("+", "_").Replace(" ", "_").Replace("/", "_").Replace("\\", "_").Replace(":", "_"); meshObj.delayLoadingFile = WebUtility.UrlEncode(dstFilename); var binaryInfo = new JObject(); using (var stream = File.Open(WebUtility.UrlDecode(Path.Combine(dstPath, dstFilename)), FileMode.Create)) { var writer = new BinaryWriter(stream); if (meshObj.positions != null && meshObj.positions.Count > 0) { var attrData = new JObject(); attrData["count"] = meshObj.positions.Count; attrData["stride"] = 3; attrData["offset"] = stream.Length; attrData["dataType"] = (int)DataType.Float; binaryInfo["positionsAttrDesc"] = attrData; for (int x = 0; x < meshObj.positions.Count; x++) { writer.Write((float)meshObj.positions[x]); } meshObj.positions = null; } if (meshObj.colors != null && meshObj.colors.Count > 0) { var attrData = new JObject(); attrData["count"] = meshObj.colors.Count; attrData["stride"] = 3; attrData["offset"] = stream.Length; attrData["dataType"] = (int)DataType.Float; binaryInfo["colorsAttrDesc"] = attrData; for (int x = 0; x < meshObj.colors.Count; x++) { writer.Write((float)meshObj.colors[x]); } meshObj["hasColors"] = true; meshObj.colors = null; } if (meshObj.normals != null && meshObj.normals.Count > 0) { var attrData = new JObject(); attrData["count"] = meshObj.normals.Count; attrData["stride"] = 3; attrData["offset"] = stream.Length; attrData["dataType"] = (int)DataType.Float; binaryInfo["normalsAttrDesc"] = attrData; for (int x = 0; x < meshObj.normals.Count; x++) { writer.Write((float)meshObj.normals[x]); } meshObj.normals = null; } if (meshObj.uvs != null && meshObj.uvs.Count > 0) { var attrData = new JObject(); attrData["count"] = meshObj.uvs.Count; attrData["stride"] = 2; attrData["offset"] = stream.Length; attrData["dataType"] = (int)DataType.Float; binaryInfo["uvsAttrDesc"] = attrData; for (int x = 0; x < meshObj.uvs.Count; x++) { writer.Write((float)meshObj.uvs[x]); } meshObj["hasUVs"] = true; meshObj.uvs = null; } if (meshObj.uvs2 != null && meshObj.uvs2.Count > 0) { var attrData = new JObject(); attrData["count"] = meshObj.uvs2.Count; attrData["stride"] = 2; attrData["offset"] = stream.Length; attrData["dataType"] = (int)DataType.Float; binaryInfo["uvs2AttrDesc"] = attrData; for (int x = 0; x < meshObj.uvs2.Count; x++) { writer.Write((float)meshObj.uvs2[x]); } meshObj["hasUVs2"] = true; meshObj.uvs2 = null; } if (meshObj.indices != null && meshObj.indices.Count > 0) { var attrData = new JObject(); attrData["count"] = meshObj.indices.Count; attrData["stride"] = 1; attrData["offset"] = stream.Length; attrData["dataType"] = (int)DataType.Int32; binaryInfo["indicesAttrDesc"] = attrData; for (int x = 0; x < meshObj.indices.Count; x++) { writer.Write((int)meshObj.indices[x]); } meshObj.indices = null; } if (meshObj.matricesIndices != null && meshObj.matricesIndices.Count > 0) { var attrData = new JObject(); attrData["count"] = meshObj.matricesIndices.Count; attrData["stride"] = 1; attrData["offset"] = stream.Length; attrData["dataType"] = (int)DataType.Int32; binaryInfo["matricesIndicesAttrDesc"] = attrData; for (int x = 0; x < meshObj.matricesIndices.Count; x++) { writer.Write((int)meshObj.matricesIndices[x]); } meshObj["hasMatricesIndices"] = true; meshObj.matricesIndices = null; } if (meshObj.matricesWeights != null && meshObj.matricesWeights.Count > 0) { var attrData = new JObject(); attrData["count"] = meshObj.matricesWeights.Count; attrData["stride"] = 2; attrData["offset"] = stream.Length; attrData["dataType"] = (int)DataType.Float; binaryInfo["matricesWeightsAttrDesc"] = attrData; for (int x = 0; x < meshObj.matricesWeights.Count; x++) { writer.Write((float)meshObj.matricesWeights[x]); } meshObj["hasMatricesWeights"] = true; meshObj.matricesWeights = null; } if (isMesh && meshObj.subMeshes != null && meshObj.subMeshes.Count > 0) { var attrData = new JObject(); attrData["count"] = meshObj.subMeshes.Count; attrData["stride"] = 5; attrData["offset"] = stream.Length; attrData["dataType"] = (int)DataType.Int32; binaryInfo["subMeshesAttrDesc"] = attrData; var smData = new int[5]; for (int x = 0; x < meshObj.subMeshes.Count; x++) { smData[0] = meshObj.subMeshes[x].materialIndex; smData[1] = meshObj.subMeshes[x].verticesStart; smData[2] = meshObj.subMeshes[x].verticesCount; smData[3] = meshObj.subMeshes[x].indexStart; smData[4] = meshObj.subMeshes[x].indexCount; for (int y = 0; y < smData.Length; y++) { writer.Write(smData[y]); } } meshObj.subMeshes = null; } } meshObj["_binaryInfo"] = binaryInfo; }
/** * Send a server request and display the response. */ static async Task SendRequestAndReceiveResponseAsync(string server, RequestPayload payload) { /** * Create a TcpClient socket in order to connectto the echo server. */ using (TcpClient connection = new TcpClient()) { try { // Start a stop watch timer Stopwatch sw = Stopwatch.StartNew(); // connect socket to the echo server. await connection.ConnectAsync(server, SERVER_PORT); // Create and fill the Request with "payload" as Payload Request request = new Request { Method = payload.Method, Path = payload.Text, Headers = new Dictionary <String, String>(), Payload = (JObject)JToken.FromObject(payload), }; // Add some headers for test purposes request.Headers.Add("agent", "json-client"); request.Headers.Add("timeout", "10000"); /** * Translate the message to JSON and send it to the echo server. */ JsonTextWriter writer = new JsonTextWriter(new StreamWriter(connection.GetStream())); serializer.Serialize(writer, request); Console.WriteLine($"-->{payload.ToString()}"); await writer.FlushAsync(); /** * Receive the server's response and display it. */ JsonTextReader reader = new JsonTextReader(new StreamReader(connection.GetStream())) { // To support reading multiple top-level objects SupportMultipleContent = true }; try { // to consume any bytes until start of object ('{') do { await reader.ReadAsync(); } while (reader.TokenType != JsonToken.StartObject && reader.TokenType != JsonToken.None); if (reader.TokenType == JsonToken.None) { Console.WriteLine("***error: reached end of input stream, ending."); return; } /** * Read response JSON object */ JObject jresponse = await JObject.LoadAsync(reader); sw.Stop(); /** * Back to the .NET world */ Response response = jresponse.ToObject <Response>(); RequestPayload recoveredPayload = response.Payload.ToObject <RequestPayload>(); Console.WriteLine($"<--{response.ToString()}, elapsed: {sw.ElapsedMilliseconds} ms"); } catch (JsonReaderException jre) { Console.WriteLine($"***error: error reading JSON: {jre.Message}"); } catch (Exception e) { Console.WriteLine($"-***error: exception: {e}"); } sw.Stop(); Interlocked.Increment(ref requestCount); } catch (Exception ex) { Console.WriteLine($"--***error:[{payload}] {ex.Message}"); } } }
public static StreamMetadata FromJsonBytes(byte[] json) { using (var reader = new JsonTextReader(new StreamReader(new MemoryStream(json)))) { Check(reader.Read(), reader); Check(JsonToken.StartObject, reader); int? maxCount = null; TimeSpan? maxAge = null; TimeSpan? cacheControl = null; StreamAcl acl = null; Dictionary <string, JToken> customMetadata = null; while (true) { Check(reader.Read(), reader); if (reader.TokenType == JsonToken.EndObject) { break; } Check(JsonToken.PropertyName, reader); var name = (string)reader.Value; switch (name) { case SystemMetadata.MaxCount: { Check(reader.Read(), reader); Check(JsonToken.Integer, reader); maxCount = (int)(long)reader.Value; break; } case SystemMetadata.MaxAge: { Check(reader.Read(), reader); Check(JsonToken.Integer, reader); maxAge = TimeSpan.FromSeconds((long)reader.Value); break; } case SystemMetadata.CacheControl: { Check(reader.Read(), reader); Check(JsonToken.Integer, reader); cacheControl = TimeSpan.FromSeconds((long)reader.Value); break; } case SystemMetadata.Acl: { acl = ReadAcl(reader); break; } default: { if (customMetadata == null) { customMetadata = new Dictionary <string, JToken>(); } Check(reader.Read(), reader); var jToken = JToken.ReadFrom(reader); customMetadata.Add(name, jToken); break; } } } return(new StreamMetadata(maxCount, maxAge, cacheControl, acl, customMetadata)); } }
static void Import(AzureDataCatalog td, string exportedCatalogFilePath) { int totalAssetsImportSucceeded = 0; int totalAssetsImportFailed = 0; System.IO.StreamReader sr = new StreamReader(exportedCatalogFilePath); JsonTextReader reader = new JsonTextReader(sr); StringWriter sw = new StringWriter(new StringBuilder()); JsonTextWriter jtw = new JsonTextWriter(sw); reader.Read(); if (reader.TokenType != JsonToken.StartObject) { throw new Exception("Invalid Json. Expected StartObject"); } reader.Read(); if ((reader.TokenType != JsonToken.PropertyName) || (!reader.Value.ToString().Equals("catalog"))) { throw new Exception("Invalid Json. Expected catalog array"); } reader.Read(); if (reader.TokenType != JsonToken.StartArray) { throw new Exception("Invalid Json. Expected StartArray"); } while (reader.Read()) { if (reader.TokenType == JsonToken.EndArray) { break; } jtw.WriteToken(reader); JObject asset = JObject.Parse(sw.ToString()); string id = asset["id"].ToString(); asset.Remove("id"); string[] idInfo = id.Split(new char[] { '/' }); string newid; string UpdateResponse = td.Update(asset.ToString(), idInfo[idInfo.Length - 2], out newid); if ((UpdateResponse != null) && (!string.IsNullOrEmpty(newid))) { totalAssetsImportSucceeded++; if (totalAssetsImportSucceeded % 50 == 0) { System.Console.WriteLine(totalAssetsImportSucceeded + "Assets Imported Succesfully"); } } else { totalAssetsImportFailed++; } //reset local variables for next iteration sw = new StringWriter(new StringBuilder()); jtw = new JsonTextWriter(sw); } Console.WriteLine("Total Imported Success: " + totalAssetsImportSucceeded); Console.WriteLine("Total Imported Failed: " + totalAssetsImportFailed); }
public override async Task <ITraktSyncHistoryRemovePostResponse> ReadObjectAsync(JsonTextReader jsonReader, CancellationToken cancellationToken = default) { if (jsonReader == null) { return(await Task.FromResult(default(ITraktSyncHistoryRemovePostResponse))); } if (await jsonReader.ReadAsync(cancellationToken) && jsonReader.TokenType == JsonToken.StartObject) { var groupReader = new SyncHistoryRemovePostResponseGroupObjectJsonReader(); var notFoundGroupReader = new SyncHistoryRemovePostResponseNotFoundGroupObjectJsonReader(); ITraktSyncHistoryRemovePostResponse syncHistoryRemovePostResponse = new TraktSyncHistoryRemovePostResponse(); while (await jsonReader.ReadAsync(cancellationToken) && jsonReader.TokenType == JsonToken.PropertyName) { var propertyName = jsonReader.Value.ToString(); switch (propertyName) { case JsonProperties.SYNC_HISTORY_REMOVE_POST_RESPONSE_PROPERTY_NAME_DELETED: syncHistoryRemovePostResponse.Deleted = await groupReader.ReadObjectAsync(jsonReader, cancellationToken); break; case JsonProperties.SYNC_HISTORY_REMOVE_POST_RESPONSE_PROPERTY_NAME_NOT_FOUND: syncHistoryRemovePostResponse.NotFound = await notFoundGroupReader.ReadObjectAsync(jsonReader, cancellationToken); break; default: await JsonReaderHelper.ReadAndIgnoreInvalidContentAsync(jsonReader, cancellationToken); break; } } return(syncHistoryRemovePostResponse); } return(await Task.FromResult(default(ITraktSyncHistoryRemovePostResponse))); }
public BulkResponse HugeResponseWithString() { using (var reader = new JsonTextReader(new StringReader(Encoding.UTF8.GetString(_hugeResponse)))) return(_jsonSerializer.Deserialize <BulkResponse>(reader)); }
public SQLFlowResult(string text) { if (text.Length <= maxLength) { var jobject = JObject.Parse(text); json = jobject.ToString(); data = jobject.SelectToken("data") != null; error = jobject.SelectToken("error") != null; dbobjs = jobject.SelectToken("data.dbobjs") != null; sqlflow = jobject.SelectToken("data.sqlflow") != null; graph = jobject.SelectToken("data.graph") != null; } else { json = text; data = false; error = false; dbobjs = false; sqlflow = false; graph = false; using var reader = new JsonTextReader(new StringReader(text)); while (reader.Read()) { if (reader.Value != null) { //Console.WriteLine("Token: {0}, Value: {1} ,Depth:{2}", reader.TokenType, reader.Value, reader.Depth); if (reader.Depth > 3) { goto End; } if (reader.TokenType.ToString() == "PropertyName") { switch (reader.Value.ToString()) { case "data": data = true; break; case "error": error = true; break; case "dbobjs": dbobjs = true; break; case "sqlflow": sqlflow = true; break; case "graph": graph = true; break; } } } else { //Console.WriteLine("Token: {0}", reader.TokenType); if (error || dbobjs || sqlflow || graph) { reader.Skip(); } } } End : { } } }
public BulkResponse HugeResponseWithStream() { using (var ms = new JsonTextReader(new StreamReader(new MemoryStream(_hugeResponse)))) return(_jsonSerializer.Deserialize <BulkResponse>(ms)); }
public override void FromJson(JsonTextReader reader) { reference = ((string)reader.Value).Substring(5); }
public override async Task <IEnumerable <ITraktCertifications> > ReadArrayAsync(JsonTextReader jsonReader, CancellationToken cancellationToken = default) { if (jsonReader == null) { return(await Task.FromResult(default(IEnumerable <ITraktCertifications>))); } if (await jsonReader.ReadAsync(cancellationToken) && jsonReader.TokenType == JsonToken.StartArray) { var certificationsReader = new CertificationsObjectJsonReader(); var certificationss = new List <ITraktCertifications>(); ITraktCertifications certifications = await certificationsReader.ReadObjectAsync(jsonReader, cancellationToken); while (certifications != null) { certificationss.Add(certifications); certifications = await certificationsReader.ReadObjectAsync(jsonReader, cancellationToken); } return(certificationss); } return(await Task.FromResult(default(IEnumerable <ITraktCertifications>))); }