public void Analyze_ArrayMultiItem_ReturnsExpectedArray() { var input = new[] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive(0), ModelGrammar.TokenNull, ModelGrammar.TokenFalse, ModelGrammar.TokenTrue, ModelGrammar.TokenArrayEnd }; var expected = new object[] { 0, null, false, true }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast <object[]>().Single(); Assert.Equal(expected, actual); }
public void Analyze_EmptyInput_ReturnsNothing() { var input = Enumerable.Empty <Token <ModelTokenType> >(); var analyzer = new ModelAnalyzer(new DataReaderSettings()); Assert.False(analyzer.Analyze <object>(input).Any()); }
public void JsonAnalyzerParse_EnumFromJsonName_ReturnsEnum() { var input = new[] { ModelGrammar.TokenPrimitive("yellow") }; var expected = ExampleEnum.Two; var analyzer = new ModelAnalyzer(new DataReaderSettings(new DataContractResolverStrategy())); var actual = analyzer.Analyze <ExampleEnum>(input).Single(); Assert.Equal(expected, actual); }
public void JsonAnalyzerParse_EnumFromString_ReturnsEnum() { var input = new[] { ModelGrammar.TokenPrimitive("Two") }; var expected = ExampleEnum.Two; var analyzer = new ModelAnalyzer(new DataReaderSettings(new JsonResolverStrategy())); var actual = analyzer.Analyze<ExampleEnum>(input).Single(); Assert.Equal(expected, actual); }
public void Analyze_ObjectEmpty_ReturnsEmptyObject() { var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenObjectEnd }; var expected = new Dictionary <string, object>(); var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast <IDictionary <string, object> >().Single(); Assert.Equal(expected, actual, false); }
public void Analyze_ArrayEmpty_ReturnsEmptyArray() { var input = new [] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayEnd }; var expected = new object[0]; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast <object[]>().Single(); Assert.Equal(expected, actual); }
public void Analyze_NullInput_ThrowsArgumentNullException() { var input = (IEnumerable <Token <ModelTokenType> >)null; var analyzer = new ModelAnalyzer(new DataReaderSettings()); ArgumentNullException ex = Assert.Throws <ArgumentNullException>( delegate { var actual = analyzer.Analyze <object>(input).Single(); }); // verify exception is coming from expected param Assert.Equal("tokens", ex.ParamName); }
public static void Main(string[] args) { AppConfig appConfig = FileSystemHelper.GetAppConfig(); DateTime runDate = GetRunDate(); List <Song> songs = GetPreparedTrainingData(); int experimentIndex = 0; foreach (ExperimentConfig config in appConfig.ExperimentConfigs) { for (int i = 0; i < config.ModelsCount; i++) { DynamicKMeans trainedKMeans = TrainModel(songs, config.ClustersCount); FileSystemHelper.SaveKMeansModel(trainedKMeans.Model, runDate, experimentIndex); experimentIndex += 1; } } List <StaticKMeans> models = ReadAllModels(songs); List <ModelAnalyzer> analyzers = models.Select(model => { ModelAnalyzer analyzer = new ModelAnalyzer(model); analyzer.Analyze(); return(analyzer); }).ToList(); AnalysisDrawer drawer = new AnalysisDrawer(analyzers); drawer.CreatePlots(); FileSystemHelper.SaveAnalysis(drawer, runDate); List <Song> analyzingSongs = GetAnalyzingSongs(); FileSystemHelper.ClearPredictions(); for (int i = 0; i < models.Count; i++) { StaticKMeans model = models[i]; List <Prediction> predictions = MakePredictions(model, analyzingSongs); FileSystemHelper.SavePredictions(predictions, runDate, i); } }
public Result Handle() { var analyzer = new ModelAnalyzer(new ModelAnalysisContext()); var converter = new ModelConverter(); var generator = new ModelGenerator(); var emitter = new TypeScriptEmitter(); var analysisResult = analyzer.Analyze(_configuration.InputPath); if (!analysisResult.Success) { return(Result.CreateError($"Source analysis error: {analysisResult.ErrorMessage}")); } log.Debug("Source analyzed"); var tsClassModels = converter.ConvertClasses(analysisResult.Value.Classes); var tsEnumModels = converter.ConvertEnums(analysisResult.Value.Enums); log.Debug("Models converted"); foreach (var tsModel in tsClassModels) { var contents = generator.GenerateClass(tsModel); emitter.Emit(_configuration.OutputPath, tsModel.Name, EmittedFileType.Model, contents); log.Debug($"Class {tsModel.Name} emitted"); new TsGenerator().GenerateDataModelAST(tsModel, _configuration.OutputPath); } foreach (var tsModel in tsEnumModels) { var contents = generator.GenerateEnum(tsModel); emitter.Emit(_configuration.OutputPath, tsModel.Name, EmittedFileType.Enum, contents); log.Debug($"Enum {tsModel.Name} emitted"); new TsGenerator().GenerateEnumAST(tsModel, _configuration.OutputPath); } return(Result.CreateSuccess()); }
public void Analyze_ObjectOneProperty_ReturnsSimpleObject() { var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("key"), ModelGrammar.TokenPrimitive("value"), ModelGrammar.TokenObjectEnd }; var expected = new Dictionary <string, object> { { "key", "value" } }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast <IDictionary <string, object> >().Single(); Assert.Equal(expected, actual, false); }
public void Analyze_ArrayUnclosed_ThrowsAnalyzerException() { // input from fail2.json in test suite at http://www.json.org/JSON_checker/ var input = new [] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("Unclosed array") }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); TokenException <ModelTokenType> ex = Assert.Throws <TokenException <ModelTokenType> >( delegate { var actual = analyzer.Analyze <object>(input).Single(); }); // verify exception is coming from expected token Assert.Equal(ModelGrammar.TokenNone, ex.Token); }
public void Analyze_ObjectUnterminated_ThrowsAnalyzerException() { // input from fail32.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("Comma instead if closing brace"), ModelGrammar.TokenTrue }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); TokenException <ModelTokenType> ex = Assert.Throws <TokenException <ModelTokenType> >( delegate { var actual = analyzer.Analyze <object>(input).Single(); }); // verify exception is coming from expected token Assert.Equal(ModelGrammar.TokenNone, ex.Token); }
public void Analyze_AnonymousObject_ReturnsAnonymousObject() { // NOTE: order is important to ensure type equivalence var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("AString"), ModelGrammar.TokenPrimitive("Hello world!"), ModelGrammar.TokenProperty("AnInt32"), ModelGrammar.TokenPrimitive(42), ModelGrammar.TokenProperty("AnAnonymous"), ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("AnotherString"), ModelGrammar.TokenPrimitive("Foo."), ModelGrammar.TokenProperty("AnInt64"), ModelGrammar.TokenPrimitive(((long)Int32.MaxValue) * 2L), ModelGrammar.TokenObjectEnd, ModelGrammar.TokenProperty("ADouble"), ModelGrammar.TokenPrimitive(Math.PI), ModelGrammar.TokenObjectEnd }; var expected = new { AString = "Hello world!", AnInt32 = 42, AnAnonymous = new { AnotherString = "Foo.", AnInt64 = ((long)Int32.MaxValue) * 2L }, ADouble = Math.PI }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input, expected).Single(); Assert.Equal(expected, actual, false); }
public void Analyze_ValueInsteadOfProperty_ThrowsAnalyzerException() { // input from fail21.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenPrimitive("Comma instead of colon"), ModelGrammar.TokenNull, ModelGrammar.TokenObjectEnd }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); TokenException <ModelTokenType> ex = Assert.Throws <TokenException <ModelTokenType> >( delegate { var actual = analyzer.Analyze <object>(input).Single(); }); // verify exception is coming from expected token Assert.Equal(ModelGrammar.TokenPrimitive("Comma instead of colon"), ex.Token); }
public void Analyze_ObjectNested_ReturnsNestedObject() { // input from pass3.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("JSON Test Pattern pass3"), ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("The outermost value"), ModelGrammar.TokenPrimitive("must be an object or array."), ModelGrammar.TokenProperty("In this test"), ModelGrammar.TokenPrimitive("It is an object."), ModelGrammar.TokenObjectEnd, ModelGrammar.TokenObjectEnd }; var expected = new Dictionary<string, object> { { "JSON Test Pattern pass3", new Dictionary<string, object> { { "The outermost value", "must be an object or array." }, { "In this test", "It is an object." } } } }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast<IDictionary<string, object>>().Single(); Assert.Equal(expected, actual, false); }
public void Analyze_ArrayEmpty_ReturnsEmptyArray() { var input = new [] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayEnd }; var expected = new object[0]; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast<object[]>().Single(); Assert.Equal(expected, actual); }
public void Analyze_GraphComplex_ReturnsGraph() { // input from pass1.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("JSON Test Pattern pass1"), ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("object with 1 member"), ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("array with 1 element"), ModelGrammar.TokenArrayEnd, ModelGrammar.TokenObjectEnd, ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenObjectEnd, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenPrimitive(-42), ModelGrammar.TokenTrue, ModelGrammar.TokenFalse, ModelGrammar.TokenNull, ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("integer"), ModelGrammar.TokenPrimitive(1234567890), ModelGrammar.TokenProperty("real"), ModelGrammar.TokenPrimitive(-9876.543210), ModelGrammar.TokenProperty("e"), ModelGrammar.TokenPrimitive(0.123456789e-12), ModelGrammar.TokenProperty("E"), ModelGrammar.TokenPrimitive(1.234567890E+34), ModelGrammar.TokenProperty(""), ModelGrammar.TokenPrimitive(23456789012E66), ModelGrammar.TokenProperty("zero"), ModelGrammar.TokenPrimitive(0), ModelGrammar.TokenProperty("one"), ModelGrammar.TokenPrimitive(1), ModelGrammar.TokenProperty("space"), ModelGrammar.TokenPrimitive(" "), ModelGrammar.TokenProperty("quote"), ModelGrammar.TokenPrimitive("\""), ModelGrammar.TokenProperty("backslash"), ModelGrammar.TokenPrimitive("\\"), ModelGrammar.TokenProperty("controls"), ModelGrammar.TokenPrimitive("\b\f\n\r\t"), ModelGrammar.TokenProperty("slash"), ModelGrammar.TokenPrimitive("/ & /"), ModelGrammar.TokenProperty("alpha"), ModelGrammar.TokenPrimitive("abcdefghijklmnopqrstuvwyz"), ModelGrammar.TokenProperty("ALPHA"), ModelGrammar.TokenPrimitive("ABCDEFGHIJKLMNOPQRSTUVWYZ"), ModelGrammar.TokenProperty("digit"), ModelGrammar.TokenPrimitive("0123456789"), ModelGrammar.TokenProperty("0123456789"), ModelGrammar.TokenPrimitive("digit"), ModelGrammar.TokenProperty("special"), ModelGrammar.TokenPrimitive("`1~!@#$%^&*()_+-={':[,]}|;.</>?"), ModelGrammar.TokenProperty("hex"), ModelGrammar.TokenPrimitive("\u0123\u4567\u89AB\uCDEF\uabcd\uef4A"), ModelGrammar.TokenProperty("true"), ModelGrammar.TokenTrue, ModelGrammar.TokenProperty("false"), ModelGrammar.TokenFalse, ModelGrammar.TokenProperty("null"), ModelGrammar.TokenNull, ModelGrammar.TokenProperty("array"), ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenProperty("object"), ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenObjectEnd, ModelGrammar.TokenProperty("address"), ModelGrammar.TokenPrimitive("50 St. James Street"), ModelGrammar.TokenProperty("url"), ModelGrammar.TokenPrimitive("http://www.JSON.org/"), ModelGrammar.TokenProperty("comment"), ModelGrammar.TokenPrimitive("// /* <!-- --"), ModelGrammar.TokenProperty("# -- --> */"), ModelGrammar.TokenPrimitive(" "), ModelGrammar.TokenProperty(" s p a c e d "), ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive(1), ModelGrammar.TokenPrimitive(2), ModelGrammar.TokenPrimitive(3), ModelGrammar.TokenPrimitive(4), ModelGrammar.TokenPrimitive(5), ModelGrammar.TokenPrimitive(6), ModelGrammar.TokenPrimitive(7), ModelGrammar.TokenArrayEnd, ModelGrammar.TokenProperty("compact"), ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive(1), ModelGrammar.TokenPrimitive(2), ModelGrammar.TokenPrimitive(3), ModelGrammar.TokenPrimitive(4), ModelGrammar.TokenPrimitive(5), ModelGrammar.TokenPrimitive(6), ModelGrammar.TokenPrimitive(7), ModelGrammar.TokenArrayEnd, ModelGrammar.TokenProperty("jsontext"), ModelGrammar.TokenPrimitive("{\"object with 1 member\":[\"array with 1 element\"]}"), ModelGrammar.TokenProperty("quotes"), ModelGrammar.TokenPrimitive("" \u0022 %22 0x22 034 ""), ModelGrammar.TokenProperty("/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?"), ModelGrammar.TokenPrimitive("A key can be any string"), ModelGrammar.TokenObjectEnd, ModelGrammar.TokenPrimitive(0.5), ModelGrammar.TokenPrimitive(98.6), ModelGrammar.TokenPrimitive(99.44), ModelGrammar.TokenPrimitive(1066), ModelGrammar.TokenPrimitive(10.0), ModelGrammar.TokenPrimitive(1.0), ModelGrammar.TokenPrimitive(0.1), ModelGrammar.TokenPrimitive(1.0), ModelGrammar.TokenPrimitive(2.0), ModelGrammar.TokenPrimitive(2.0), ModelGrammar.TokenPrimitive("rosebud"), ModelGrammar.TokenArrayEnd }; var expected = new object[] { "JSON Test Pattern pass1", new Dictionary <string, object> { { "object with 1 member", new[] { "array with 1 element" } }, }, new Dictionary <string, object>(), new object[0], -42, true, false, null, new Dictionary <string, object> { { "integer", 1234567890 }, { "real", -9876.543210 }, { "e", 0.123456789e-12 }, { "E", 1.234567890E+34 }, { "", 23456789012E66 }, { "zero", 0 }, { "one", 1 }, { "space", " " }, { "quote", "\"" }, { "backslash", "\\" }, { "controls", "\b\f\n\r\t" }, { "slash", "/ & /" }, { "alpha", "abcdefghijklmnopqrstuvwyz" }, { "ALPHA", "ABCDEFGHIJKLMNOPQRSTUVWYZ" }, { "digit", "0123456789" }, { "0123456789", "digit" }, { "special", "`1~!@#$%^&*()_+-={':[,]}|;.</>?" }, { "hex", "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A" }, { "true", true }, { "false", false }, { "null", null }, { "array", new object[0] }, { "object", new Dictionary <string, object>() }, { "address", "50 St. James Street" }, { "url", "http://www.JSON.org/" }, { "comment", "// /* <!-- --" }, { "# -- --> */", " " }, { " s p a c e d ", new [] { 1, 2, 3, 4, 5, 6, 7 } }, { "compact", new [] { 1, 2, 3, 4, 5, 6, 7 } }, { "jsontext", "{\"object with 1 member\":[\"array with 1 element\"]}" }, { "quotes", "" \u0022 %22 0x22 034 "" }, { "/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?", "A key can be any string" } }, 0.5, 98.6, 99.44, 1066, 1e1, 0.1e1, 1e-1, 1e00, 2e+00, 2e-00, "rosebud" }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast <object[]>().Single(); Assert.Equal(expected, actual, false); }
public void Analyze_EmptyInput_ReturnsNothing() { var input = Enumerable.Empty<Token<ModelTokenType>>(); var analyzer = new ModelAnalyzer(new DataReaderSettings()); Assert.False(analyzer.Analyze<object>(input).Any()); }
public void Analyze_NullInput_ThrowsArgumentNullException() { var input = (IEnumerable<Token<ModelTokenType>>)null; var analyzer = new ModelAnalyzer(new DataReaderSettings()); ArgumentNullException ex = Assert.Throws<ArgumentNullException>( delegate { var actual = analyzer.Analyze<object>(input).Single(); }); // verify exception is coming from expected param Assert.Equal("tokens", ex.ParamName); }
public void Analyze_DynamicExample_ReturnsDynamicObject() { var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("foo"), ModelGrammar.TokenPrimitive("hello world"), ModelGrammar.TokenProperty("number"), ModelGrammar.TokenPrimitive(42), ModelGrammar.TokenProperty("boolean"), ModelGrammar.TokenPrimitive(false), ModelGrammar.TokenProperty("null"), ModelGrammar.TokenPrimitive(null), ModelGrammar.TokenObjectEnd }; dynamic expected = new DynamicExample(); expected.foo = "hello world"; expected.number = 42; expected.boolean = false; expected.@null = null; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze<DynamicExample>(input).Single(); Assert.Equal(expected.Values, actual.Values, false); }
public void Analyze_GraphComplex_ReturnsGraph() { // input from pass1.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("JSON Test Pattern pass1"), ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("object with 1 member"), ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("array with 1 element"), ModelGrammar.TokenArrayEnd, ModelGrammar.TokenObjectEnd, ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenObjectEnd, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenPrimitive(-42), ModelGrammar.TokenTrue, ModelGrammar.TokenFalse, ModelGrammar.TokenNull, ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("integer"), ModelGrammar.TokenPrimitive(1234567890), ModelGrammar.TokenProperty("real"), ModelGrammar.TokenPrimitive(-9876.543210), ModelGrammar.TokenProperty("e"), ModelGrammar.TokenPrimitive(0.123456789e-12), ModelGrammar.TokenProperty("E"), ModelGrammar.TokenPrimitive(1.234567890E+34), ModelGrammar.TokenProperty(""), ModelGrammar.TokenPrimitive(23456789012E66), ModelGrammar.TokenProperty("zero"), ModelGrammar.TokenPrimitive(0), ModelGrammar.TokenProperty("one"), ModelGrammar.TokenPrimitive(1), ModelGrammar.TokenProperty("space"), ModelGrammar.TokenPrimitive(" "), ModelGrammar.TokenProperty("quote"), ModelGrammar.TokenPrimitive("\""), ModelGrammar.TokenProperty("backslash"), ModelGrammar.TokenPrimitive("\\"), ModelGrammar.TokenProperty("controls"), ModelGrammar.TokenPrimitive("\b\f\n\r\t"), ModelGrammar.TokenProperty("slash"), ModelGrammar.TokenPrimitive("/ & /"), ModelGrammar.TokenProperty("alpha"), ModelGrammar.TokenPrimitive("abcdefghijklmnopqrstuvwyz"), ModelGrammar.TokenProperty("ALPHA"), ModelGrammar.TokenPrimitive("ABCDEFGHIJKLMNOPQRSTUVWYZ"), ModelGrammar.TokenProperty("digit"), ModelGrammar.TokenPrimitive("0123456789"), ModelGrammar.TokenProperty("0123456789"), ModelGrammar.TokenPrimitive("digit"), ModelGrammar.TokenProperty("special"), ModelGrammar.TokenPrimitive("`1~!@#$%^&*()_+-={':[,]}|;.</>?"), ModelGrammar.TokenProperty("hex"), ModelGrammar.TokenPrimitive("\u0123\u4567\u89AB\uCDEF\uabcd\uef4A"), ModelGrammar.TokenProperty("true"), ModelGrammar.TokenTrue, ModelGrammar.TokenProperty("false"), ModelGrammar.TokenFalse, ModelGrammar.TokenProperty("null"), ModelGrammar.TokenNull, ModelGrammar.TokenProperty("array"), ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenProperty("object"), ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenObjectEnd, ModelGrammar.TokenProperty("address"), ModelGrammar.TokenPrimitive("50 St. James Street"), ModelGrammar.TokenProperty("url"), ModelGrammar.TokenPrimitive("http://www.JSON.org/"), ModelGrammar.TokenProperty("comment"), ModelGrammar.TokenPrimitive("// /* <!-- --"), ModelGrammar.TokenProperty("# -- --> */"), ModelGrammar.TokenPrimitive(" "), ModelGrammar.TokenProperty(" s p a c e d "), ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive(1), ModelGrammar.TokenPrimitive(2), ModelGrammar.TokenPrimitive(3), ModelGrammar.TokenPrimitive(4), ModelGrammar.TokenPrimitive(5), ModelGrammar.TokenPrimitive(6), ModelGrammar.TokenPrimitive(7), ModelGrammar.TokenArrayEnd, ModelGrammar.TokenProperty("compact"), ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive(1), ModelGrammar.TokenPrimitive(2), ModelGrammar.TokenPrimitive(3), ModelGrammar.TokenPrimitive(4), ModelGrammar.TokenPrimitive(5), ModelGrammar.TokenPrimitive(6), ModelGrammar.TokenPrimitive(7), ModelGrammar.TokenArrayEnd, ModelGrammar.TokenProperty("jsontext"), ModelGrammar.TokenPrimitive("{\"object with 1 member\":[\"array with 1 element\"]}"), ModelGrammar.TokenProperty("quotes"), ModelGrammar.TokenPrimitive("" \u0022 %22 0x22 034 ""), ModelGrammar.TokenProperty("/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?"), ModelGrammar.TokenPrimitive("A key can be any string"), ModelGrammar.TokenObjectEnd, ModelGrammar.TokenPrimitive(0.5), ModelGrammar.TokenPrimitive(98.6), ModelGrammar.TokenPrimitive(99.44), ModelGrammar.TokenPrimitive(1066), ModelGrammar.TokenPrimitive(10.0), ModelGrammar.TokenPrimitive(1.0), ModelGrammar.TokenPrimitive(0.1), ModelGrammar.TokenPrimitive(1.0), ModelGrammar.TokenPrimitive(2.0), ModelGrammar.TokenPrimitive(2.0), ModelGrammar.TokenPrimitive("rosebud"), ModelGrammar.TokenArrayEnd }; var expected = new object[] { "JSON Test Pattern pass1", new Dictionary<string, object> { { "object with 1 member", new[] { "array with 1 element" } }, }, new Dictionary<string, object>(), new object[0], -42, true, false, null, new Dictionary<string, object> { { "integer", 1234567890 }, { "real", -9876.543210 }, { "e", 0.123456789e-12 }, { "E", 1.234567890E+34 }, { "", 23456789012E66 }, { "zero", 0 }, { "one", 1 }, { "space", " " }, { "quote", "\"" }, { "backslash", "\\" }, { "controls", "\b\f\n\r\t" }, { "slash", "/ & /" }, { "alpha", "abcdefghijklmnopqrstuvwyz" }, { "ALPHA", "ABCDEFGHIJKLMNOPQRSTUVWYZ" }, { "digit", "0123456789" }, { "0123456789", "digit" }, { "special", "`1~!@#$%^&*()_+-={':[,]}|;.</>?" }, { "hex", "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A" }, { "true", true }, { "false", false }, { "null", null }, { "array", new object[0] }, { "object", new Dictionary<string, object>() }, { "address", "50 St. James Street" }, { "url", "http://www.JSON.org/" }, { "comment", "// /* <!-- --" }, { "# -- --> */", " " }, { " s p a c e d ", new [] { 1,2,3,4,5,6,7 } }, { "compact", new [] { 1,2,3,4,5,6,7 } }, { "jsontext", "{\"object with 1 member\":[\"array with 1 element\"]}" }, { "quotes", "" \u0022 %22 0x22 034 "" }, { "/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?", "A key can be any string" } }, 0.5, 98.6, 99.44, 1066, 1e1, 0.1e1, 1e-1, 1e00, 2e+00, 2e-00, "rosebud" }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast<object[]>().Single(); Assert.Equal(expected, actual, false); }
public void Analyze_ObjectUnterminated_ThrowsAnalyzerException() { // input from fail32.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("Comma instead if closing brace"), ModelGrammar.TokenTrue }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); TokenException<ModelTokenType> ex = Assert.Throws<TokenException<ModelTokenType>>( delegate { var actual = analyzer.Analyze<object>(input).Single(); }); // verify exception is coming from expected token Assert.Equal(ModelGrammar.TokenNone, ex.Token); }
public void Analyze_AnonymousObject_ReturnsAnonymousObject() { // NOTE: order is important to ensure type equivalence var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("AString"), ModelGrammar.TokenPrimitive("Hello world!"), ModelGrammar.TokenProperty("AnInt32"), ModelGrammar.TokenPrimitive(42), ModelGrammar.TokenProperty("AnAnonymous"), ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("AnotherString"), ModelGrammar.TokenPrimitive("Foo."), ModelGrammar.TokenProperty("AnInt64"), ModelGrammar.TokenPrimitive( ((long)Int32.MaxValue) * 2L ), ModelGrammar.TokenObjectEnd, ModelGrammar.TokenProperty("ADouble"), ModelGrammar.TokenPrimitive(Math.PI), ModelGrammar.TokenObjectEnd }; var expected = new { AString = "Hello world!", AnInt32 = 42, AnAnonymous = new { AnotherString = "Foo.", AnInt64 = ((long)Int32.MaxValue) * 2L }, ADouble = Math.PI }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input, expected).Single(); Assert.Equal(expected, actual, false); }
public void Analyze_ArrayMultiItem_ReturnsExpectedArray() { var input = new[] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive(0), ModelGrammar.TokenNull, ModelGrammar.TokenFalse, ModelGrammar.TokenTrue, ModelGrammar.TokenArrayEnd }; var expected = new object[] { 0, null, false, true }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast<object[]>().Single(); Assert.Equal(expected, actual); }
public void Analyze_ArrayNestedDeeply_ReturnsExpectedArray() { // input from pass2.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("Not too deep"), ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd }; var expected = new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { "Not too deep" } } } } } } } } } } } } } } } } } } }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze((input)).Cast <string[][][][][][][][][][][][][][][][][][][]>().Single(); Assert.Equal(expected, actual); }
public void Analyze_ObjectOneProperty_ReturnsSimpleObject() { var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("key"), ModelGrammar.TokenPrimitive("value"), ModelGrammar.TokenObjectEnd }; var expected = new Dictionary<string, object> { { "key", "value" } }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast<IDictionary<string, object>>().Single(); Assert.Equal(expected, actual, false); }
public void Analyze_ArrayNestedDeeply_ReturnsExpectedArray() { // input from pass2.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("Not too deep"), ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd }; var expected = new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { "Not too deep" } } } } } } } } } } } } } } } } } } }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze((input)).Cast<string[][][][][][][][][][][][][][][][][][][]>().Single(); Assert.Equal(expected, actual); }
public void Analyze_ArrayCloseMismatch_ThrowsAnalyzerException() { // input from fail33.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("mismatch"), ModelGrammar.TokenObjectEnd }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); TokenException<ModelTokenType> ex = Assert.Throws<TokenException<ModelTokenType>>( delegate { var actual = analyzer.Analyze<object>(input).Single(); }); // verify exception is coming from expected token Assert.Equal(ModelGrammar.TokenObjectEnd, ex.Token); }
public void Analyze_ObjectEmpty_ReturnsEmptyObject() { var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenObjectEnd }; var expected = new Dictionary<string, object>(); var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast<IDictionary<string, object>>().Single(); Assert.Equal(expected, actual, false); }
public void Analyze_ValueInsteadOfProperty_ThrowsAnalyzerException() { // input from fail21.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenPrimitive("Comma instead of colon"), ModelGrammar.TokenNull, ModelGrammar.TokenObjectEnd }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); TokenException<ModelTokenType> ex = Assert.Throws<TokenException<ModelTokenType>>( delegate { var actual = analyzer.Analyze<object>(input).Single(); }); // verify exception is coming from expected token Assert.Equal(ModelGrammar.TokenPrimitive("Comma instead of colon"), ex.Token); }