protected override sealed async Task PersistModelAsync(PersistEvent @event, TModel model, Graph <TModel> graph, bool create) { var id = ModelAnalyzer.GetIdentity(model); var streamName = EventStoreCommon.GetStreamName <TModel>(id); var eventStoreModel = new EventStoreEventModelData <TModel>() { Source = @event.Source, SourceType = @event.Source?.GetType().Name, Model = model, Graph = graph }; var data = EventStoreCommon.Serialize(eventStoreModel); var eventNumber = await Engine.AppendAsync(@event.ID, @event.Name, streamName, null, create?EventStoreState.NotExisting : EventStoreState.Existing, data); if (eventNumber > 0 && eventNumber % SaveStateEvery == 0) { var thisEventData = (await Engine.ReadBackwardsAsync(streamName, eventNumber, 1, null, null, null))[0]; var where = ModelAnalyzer.GetIdentityExpression <TModel>(id); var eventStates = await Repo.QueryAsync(new EventQueryMany <TModel>(thisEventData.Date, thisEventData.Date, where)); var eventState = eventStates.Where(x => x.Number == eventNumber).Single(); await SaveModelStateAsync(id, eventState.Model, eventState.Number); } }
public void Analyze_ObjectNested_ReturnsNestedObject() { // input from pass3.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("JSON Test Pattern pass3"), ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("The outermost value"), ModelGrammar.TokenPrimitive("must be an object or array."), ModelGrammar.TokenProperty("In this test"), ModelGrammar.TokenPrimitive("It is an object."), ModelGrammar.TokenObjectEnd, ModelGrammar.TokenObjectEnd }; var expected = new Dictionary <string, object> { { "JSON Test Pattern pass3", new Dictionary <string, object> { { "The outermost value", "must be an object or array." }, { "In this test", "It is an object." } } } }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast <IDictionary <string, object> >().Single(); Assert.Equal(expected, actual, false); }
public void TestVertipaqAnalyzerProfile() { //var cnnStr = ConnectionString; //var cnnStr = @"Data Source=http://localhost:9000/xmla;Workstation ID=C:\Users\c950497\Downloads\RAD Model.xlsx"; var cnnStr = @"Data Source=localhost:9134;"; var cnn = new ADOTabular.ADOTabularConnection(cnnStr, ADOTabular.AdomdClientWrappers.AdomdType.AnalysisServices); cnn.Open(); var db = cnn.Database; var profile = ModelAnalyzer.Create(cnn); JsonSerializer serializer = new JsonSerializer(); //serializer.Converters.Add(new JavaScriptDateTimeConverter()); serializer.NullValueHandling = NullValueHandling.Ignore; using (StreamWriter sw = new StreamWriter(@"d:\temp\BUSINESS_NBN_CUBE_VertipaqAnalyzerProfile.json")) using (JsonWriter writer = new JsonTextWriter(sw)) { serializer.Serialize(writer, profile); // {"ExpiryDate":new Date(1230375600000),"Price":0} } cnn.Close(); }
public void Analyze_ArrayMultiItem_ReturnsExpectedArray() { var input = new[] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive(0), ModelGrammar.TokenNull, ModelGrammar.TokenFalse, ModelGrammar.TokenTrue, ModelGrammar.TokenArrayEnd }; var expected = new object[] { 0, null, false, true }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast <object[]>().Single(); Assert.Equal(expected, actual); }
public void Analyze_DynamicExample_ReturnsDynamicObject() { var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("foo"), ModelGrammar.TokenPrimitive("hello world"), ModelGrammar.TokenProperty("number"), ModelGrammar.TokenPrimitive(42), ModelGrammar.TokenProperty("boolean"), ModelGrammar.TokenPrimitive(false), ModelGrammar.TokenProperty("null"), ModelGrammar.TokenPrimitive(null), ModelGrammar.TokenObjectEnd }; dynamic expected = new DynamicExample(); expected.foo = "hello world"; expected.number = 42; expected.boolean = false; expected.@null = null; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze <DynamicExample>(input).Single(); Assert.Equal(expected.Values, actual.Values, false); }
public void Analyze_EmptyInput_ReturnsNothing() { var input = Enumerable.Empty <Token <ModelTokenType> >(); var analyzer = new ModelAnalyzer(new DataReaderSettings()); Assert.False(analyzer.Analyze <object>(input).Any()); }
public void Run(ref Model model) { // TODO: strip layers not useful to compute output // Strip unused layers var unusedLayers = new HashSet <string>(ModelAnalyzer.FindUnusedLayers(model)); model.layers = model.layers.Where(l => !unusedLayers.Contains(l.name) || l.flags.HasFlag(Layer.Flags.Preserve)).ToList(); }
protected override void ConvertToSqlLambda(Expression exp, ref CharWriteBuffer sb, BuilderContext context) { context.MemberContext.OperatorStack.Push(Operator.Lambda); var lambda = exp as LambdaExpression; if (lambda.Parameters.Count != 1) { throw new NotSupportedException("Can only parse a lambda with one parameter."); } var parameter = lambda.Parameters[0]; var modelDetail = ModelAnalyzer.GetModel(parameter.Type); if (context.RootDependant.ModelDetail.Type != modelDetail.Type) { throw new Exception($"Lambda type {modelDetail.Type.GetNiceName()} does not match the root type {context.RootDependant.ModelDetail.Type.GetNiceName()}"); } if (context.MemberContext.ModelStack.Count > 0) { var callingModel = context.MemberContext.ModelStack.Peek(); var property = context.MemberContext.MemberLambdaStack.Peek(); if (callingModel.IdentityProperties.Count != 1) { throw new NotSupportedException($"Relational queries support only one identity on {callingModel.Type.GetNiceName()}"); } var callingModelIdentity = callingModel.IdentityProperties[0]; var modelProperty = callingModel.GetProperty(property.Member.Name); sb.Write('`'); sb.Write(callingModel.DataSourceEntityName); sb.Write("`.`"); sb.Write(callingModelIdentity.PropertySourceName); sb.Write("`="); sb.Write('`'); sb.Write(modelDetail.DataSourceEntityName); sb.Write("`.`"); sb.Write(modelProperty.ForeignIdentity); sb.Write("`AND"); } context.MemberContext.DependantStack.Push(context.RootDependant); context.MemberContext.ModelStack.Push(modelDetail); context.MemberContext.ModelContexts.Add(parameter.Name, modelDetail); sb.Write('('); ConvertToSql(lambda.Body, ref sb, context); sb.Write(')'); context.MemberContext.DependantStack.Pop(); context.MemberContext.ModelStack.Pop(); context.MemberContext.ModelContexts.Remove(parameter.Name); context.MemberContext.OperatorStack.Pop(); }
public TModel[] EncryptModels(TModel[] models, Graph <TModel> graph, bool newCopy) { if (!this.Enabled) { return(models); } var properties = GetEncryptableProperties(typeof(TModel), this.Properties); if (properties.Length == 0) { return(models); } graph = new Graph <TModel>(graph); //add identites for copying graph.AddProperties(ModelAnalyzer.GetIdentityPropertyNames(typeof(TModel))); if (newCopy) { models = Mapper.Map <TModel[], TModel[]>(models, graph); } foreach (TModel model in models) { foreach (var property in properties) { if (graph.HasLocalProperty(property.Name)) { if (property.TypeDetail.CoreType == CoreType.String) { string plain = (string)property.Getter(model); if (plain != null) { if (plain.Length <= encryptionPrefix.Length || plain.Substring(0, encryptionPrefix.Length) != encryptionPrefix) { plain = encryptionPrefix + plain; string encrypted = SymmetricEncryptor.Encrypt(encryptionAlgorithm, EncryptionKey, plain); property.Setter(model, encrypted); } } } else if (property.Type == typeof(byte[])) { byte[] plain = (byte[])property.Getter(model); if (plain != null) { byte[] encrypted = SymmetricEncryptor.Encrypt(encryptionAlgorithm, EncryptionKey, plain); property.Setter(model, encrypted); } } } } } return(models); }
/// <summary> /// Analytics Runner /// </summary> /// <param name="modelAnalyzer">model Analyzer</param> /// <returns></returns> public AnalyticsRunner(ICrawler crawler, ModelAnalyzer modelAnalyzer = null) { if (crawler == null) { throw new ArgumentNullException("crawler"); } this.crawler = crawler; this.modelAnalyzer = modelAnalyzer ?? new ArticlesSiteAnalyzer(); }
public void Run(Model model, ref List <Model.ImporterWarning> warnings) { // validate, model contains no unconnected layers var unconnectedOutputs = ModelAnalyzer.FindUnconnectedOutputs(model); foreach (var o in unconnectedOutputs) { ValidationHelper.AppendWarning(false, o, "ValidateUnconnectedLayers: Layer is specified as output, but is missing in the model", ref warnings, MessageType.Warning); } }
public void ModelGeneration_Class_Dependency() { // create syntax tree var personSyntaxTree = CSharpSyntaxTree.ParseText(@" using System; public class Person { public long Id { get; set; } public string FirstName { get; set; } public string LastName { get; set; } public DateTime DateOfBirth { get; set; } public Address Address { get; set; } }"); var addressSyntaxTree = CSharpSyntaxTree.ParseText(@"public class Address { public int Id { get; set; } public string Country { get; set; } public string City { get; set; } public string PostalCode { get; set; } public string Street { get; set; } public string HouseNumber { get; set; } }"); // create compilation var compilation = RoslynTestHelpers.CreateTestCompilation(new[] { personSyntaxTree, addressSyntaxTree }); // analyze var context = new TestAnalysisContext(compilation); var analyzer = new ModelAnalyzer(context); var csClassModel = analyzer.AnalyzeClassSymbol(RoslynTestHelpers.GetClassSymbol(compilation, personSyntaxTree)); // convert var converter = new ModelConverter(); var tsClassModel = converter.ConvertClass(csClassModel); // generate var generator = new ModelGenerator(); var generated = generator.GenerateClass(tsClassModel, false); // assert var expected = @"import { Address } from './address.model'; export class Person { id: number; firstName: string; lastName: string; dateOfBirth: Date; address: Address; } "; Assert.AreEqual(expected, generated); }
public void Ctor_NullSettings_ThrowsArgumentNullException() { ArgumentNullException ex = Assert.Throws <ArgumentNullException>( delegate { var analyzer = new ModelAnalyzer(null); }); // verify exception is coming from expected param Assert.Equal("settings", ex.ParamName); }
private TModel[] CompressModels(TModel[] models, Graph <TModel> graph, bool newCopy) { if (!this.Enabled) { return(models); } var properties = CompressionCommon.GetModelCompressableProperties(typeof(TModel), this.Properties); if (properties.Length == 0) { return(models); } graph = new Graph <TModel>(graph); //add identites for copying graph.AddProperties(ModelAnalyzer.GetIdentityPropertyNames(typeof(TModel))); if (newCopy) { models = Mapper.Map <TModel[], TModel[]>(models, graph); } foreach (TModel model in models) { foreach (var property in properties) { if (graph.HasLocalProperty(property.Name)) { if (property.TypeDetail.CoreType == CoreType.String) { string plain = (string)property.Getter(model); if (plain != null) { string compressed = CompressionCommon.CompressGZip(plain); property.Setter(model, compressed); } } else if (property.Type == typeof(byte[])) { byte[] plain = (byte[])property.Getter(model); if (plain != null) { byte[] compressed = CompressionCommon.CompressGZip(plain); property.Setter(model, compressed); } } } } } return(models); }
public void JsonAnalyzerParse_EnumFromString_ReturnsEnum() { var input = new[] { ModelGrammar.TokenPrimitive("Two") }; var expected = ExampleEnum.Two; var analyzer = new ModelAnalyzer(new DataReaderSettings(new JsonResolverStrategy())); var actual = analyzer.Analyze<ExampleEnum>(input).Single(); Assert.Equal(expected, actual); }
public void JsonAnalyzerParse_EnumFromJsonName_ReturnsEnum() { var input = new[] { ModelGrammar.TokenPrimitive("yellow") }; var expected = ExampleEnum.Two; var analyzer = new ModelAnalyzer(new DataReaderSettings(new DataContractResolverStrategy())); var actual = analyzer.Analyze <ExampleEnum>(input).Single(); Assert.Equal(expected, actual); }
public void Analyze_NullInput_ThrowsArgumentNullException() { var input = (IEnumerable <Token <ModelTokenType> >)null; var analyzer = new ModelAnalyzer(new DataReaderSettings()); ArgumentNullException ex = Assert.Throws <ArgumentNullException>( delegate { var actual = analyzer.Analyze <object>(input).Single(); }); // verify exception is coming from expected param Assert.Equal("tokens", ex.ParamName); }
public void Analyze_ObjectEmpty_ReturnsEmptyObject() { var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenObjectEnd }; var expected = new Dictionary <string, object>(); var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast <IDictionary <string, object> >().Single(); Assert.Equal(expected, actual, false); }
public void Analyze_ArrayEmpty_ReturnsEmptyArray() { var input = new [] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayEnd }; var expected = new object[0]; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast <object[]>().Single(); Assert.Equal(expected, actual); }
public override sealed void Create(Persist <TModel> persist) { if (persist.Models.Length == 0) { return; } TModel[] encryptedModels = EncryptModels(persist.Models, persist.Graph, true); NextProvider.Persist(new Create <TModel>(encryptedModels, persist.Graph)); for (var i = 0; i < persist.Models.Length; i++) { object identity = ModelAnalyzer.GetIdentity(encryptedModels[i]); ModelAnalyzer.SetIdentity(persist.Models[i], identity); } }
public override sealed async Task CreateAsync(Persist <TModel> persist) { if (persist.Models.Length == 0) { return; } TModel[] compressedModels = CompressModels(persist.Models, persist.Graph, true); await NextProvider.PersistAsync(new Create <TModel>(persist.Event, compressedModels, persist.Graph)); for (var i = 0; i < persist.Models.Length; i++) { object identity = ModelAnalyzer.GetIdentity(compressedModels[i]); ModelAnalyzer.SetIdentity(persist.Models[i], identity); } }
public void ModelGeneration_Class_BaseClass() { // create syntax tree var personSyntaxTree = CSharpSyntaxTree.ParseText(@" using System; public class Person { public long Id { get; set; } public string FirstName { get; set; } public string LastName { get; set; } public DateTime DateOfBirth { get; set; } }"); var studentSyntaxTree = CSharpSyntaxTree.ParseText(@" using System; public class Student : Person { public DateTime? YearOfGraduation { get; set; } }"); // create compilation var compilation = RoslynTestHelpers.CreateTestCompilation(new[] { personSyntaxTree, studentSyntaxTree }); // analyze var context = new TestAnalysisContext(compilation); var analyzer = new ModelAnalyzer(context); var csClassModel = analyzer.AnalyzeClassSymbol(RoslynTestHelpers.GetClassSymbol(compilation, studentSyntaxTree)); // convert var converter = new ModelConverter(); var tsClassModel = converter.ConvertClass(csClassModel); // generate var generator = new ModelGenerator(); var generated = generator.GenerateClass(tsClassModel, false); // assert var expected = @"import { Person } from './person.model'; export class Student extends Person { yearOfGraduation?: Date; } "; Assert.AreEqual(expected, generated); }
//TODO this pass is handling data transformation in a destructive way and thus loss validation information. //find a cleaner way to report import warnings. public void Run(ref Model model, List <Model.ImporterWarning> warnings) { IDictionary <string, TensorShape?> inputShapes = new Dictionary <string, TensorShape?>(); IDictionary <string, int?> inputRanks = new Dictionary <string, int?>(); List <Model.Input> inputs = model.inputs; foreach (var i in inputs) { inputRanks[i.name] = i.rank; if (!ModelAnalyzer.IsInputShapeAcceptablyKnowForShapeInference(i)) { continue; } inputShapes[i.name] = new TensorShape(i.shape); } FuseShapesIntoConstants(ref model, inputShapes, inputRanks, ref warnings); }
public static void Main(string[] args) { AppConfig appConfig = FileSystemHelper.GetAppConfig(); DateTime runDate = GetRunDate(); List <Song> songs = GetPreparedTrainingData(); int experimentIndex = 0; foreach (ExperimentConfig config in appConfig.ExperimentConfigs) { for (int i = 0; i < config.ModelsCount; i++) { DynamicKMeans trainedKMeans = TrainModel(songs, config.ClustersCount); FileSystemHelper.SaveKMeansModel(trainedKMeans.Model, runDate, experimentIndex); experimentIndex += 1; } } List <StaticKMeans> models = ReadAllModels(songs); List <ModelAnalyzer> analyzers = models.Select(model => { ModelAnalyzer analyzer = new ModelAnalyzer(model); analyzer.Analyze(); return(analyzer); }).ToList(); AnalysisDrawer drawer = new AnalysisDrawer(analyzers); drawer.CreatePlots(); FileSystemHelper.SaveAnalysis(drawer, runDate); List <Song> analyzingSongs = GetAnalyzingSongs(); FileSystemHelper.ClearPredictions(); for (int i = 0; i < models.Count; i++) { StaticKMeans model = models[i]; List <Prediction> predictions = MakePredictions(model, analyzingSongs); FileSystemHelper.SavePredictions(predictions, runDate, i); } }
private static Return ExtractParameter(Context context) { Return ret; var member = context.MemberAccessStack.Pop(); var modelDetail = context.ModelStack.Peek(); var modelProperty = modelDetail.GetProperty(member.Member.Name); if (modelProperty.ForeignIdentity != null) { var subModelInfo = ModelAnalyzer.GetModel(modelProperty.InnerType); context.ModelStack.Push(subModelInfo); ret = ExtractParameter(context); context.ModelStack.Pop(); } else { if (context.MemberAccessStack.Count > 0) { bool memberPropertyHandled = false; var memberProperty = context.MemberAccessStack.Pop(); if (member.Type.Name == typeof(Nullable <>).Name && memberProperty.Member.Name == "Value") { memberPropertyHandled = true; } else if (member.Type == typeof(DateTime)) { memberPropertyHandled = true; } if (!memberPropertyHandled) { throw new NotSupportedException(String.Format("{0}.{1} not supported", member.Member.Name, memberProperty.Member.Name)); } context.MemberAccessStack.Push(memberProperty); } ret = new Return(member.Expression.Type, member.Member.Name); } context.MemberAccessStack.Push(member); return(ret); }
private void PrintOutputShapes(Model model) { var inputShapes = new Dictionary <string, TensorShape>(); foreach (var i in model.inputs) { inputShapes.Add(i.name, new TensorShape(i.shape)); } IDictionary <string, TensorShape?> shapesByName; ModelAnalyzer.ListTemporaryTensorShapes(model, inputShapes, out shapesByName); print(shapesByName); foreach (var pair in shapesByName) { print($"{pair.Key} {pair.Value}"); } }
public Result Handle() { var analyzer = new ModelAnalyzer(new ModelAnalysisContext()); var converter = new ModelConverter(); var generator = new ModelGenerator(); var emitter = new TypeScriptEmitter(); var analysisResult = analyzer.Analyze(_configuration.InputPath); if (!analysisResult.Success) { return(Result.CreateError($"Source analysis error: {analysisResult.ErrorMessage}")); } log.Debug("Source analyzed"); var tsClassModels = converter.ConvertClasses(analysisResult.Value.Classes); var tsEnumModels = converter.ConvertEnums(analysisResult.Value.Enums); log.Debug("Models converted"); foreach (var tsModel in tsClassModels) { var contents = generator.GenerateClass(tsModel); emitter.Emit(_configuration.OutputPath, tsModel.Name, EmittedFileType.Model, contents); log.Debug($"Class {tsModel.Name} emitted"); new TsGenerator().GenerateDataModelAST(tsModel, _configuration.OutputPath); } foreach (var tsModel in tsEnumModels) { var contents = generator.GenerateEnum(tsModel); emitter.Emit(_configuration.OutputPath, tsModel.Name, EmittedFileType.Enum, contents); log.Debug($"Enum {tsModel.Name} emitted"); new TsGenerator().GenerateEnumAST(tsModel, _configuration.OutputPath); } return(Result.CreateSuccess()); }
public void Analyze_ObjectOneProperty_ReturnsSimpleObject() { var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("key"), ModelGrammar.TokenPrimitive("value"), ModelGrammar.TokenObjectEnd }; var expected = new Dictionary <string, object> { { "key", "value" } }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast <IDictionary <string, object> >().Single(); Assert.Equal(expected, actual, false); }
public void Analyze_ArrayUnclosed_ThrowsAnalyzerException() { // input from fail2.json in test suite at http://www.json.org/JSON_checker/ var input = new [] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("Unclosed array") }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); TokenException <ModelTokenType> ex = Assert.Throws <TokenException <ModelTokenType> >( delegate { var actual = analyzer.Analyze <object>(input).Single(); }); // verify exception is coming from expected token Assert.Equal(ModelGrammar.TokenNone, ex.Token); }
public void Analyze_ObjectUnterminated_ThrowsAnalyzerException() { // input from fail32.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("Comma instead if closing brace"), ModelGrammar.TokenTrue }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); TokenException <ModelTokenType> ex = Assert.Throws <TokenException <ModelTokenType> >( delegate { var actual = analyzer.Analyze <object>(input).Single(); }); // verify exception is coming from expected token Assert.Equal(ModelGrammar.TokenNone, ex.Token); }
public void Analyze_AnonymousObject_ReturnsAnonymousObject() { // NOTE: order is important to ensure type equivalence var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("AString"), ModelGrammar.TokenPrimitive("Hello world!"), ModelGrammar.TokenProperty("AnInt32"), ModelGrammar.TokenPrimitive(42), ModelGrammar.TokenProperty("AnAnonymous"), ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("AnotherString"), ModelGrammar.TokenPrimitive("Foo."), ModelGrammar.TokenProperty("AnInt64"), ModelGrammar.TokenPrimitive(((long)Int32.MaxValue) * 2L), ModelGrammar.TokenObjectEnd, ModelGrammar.TokenProperty("ADouble"), ModelGrammar.TokenPrimitive(Math.PI), ModelGrammar.TokenObjectEnd }; var expected = new { AString = "Hello world!", AnInt32 = 42, AnAnonymous = new { AnotherString = "Foo.", AnInt64 = ((long)Int32.MaxValue) * 2L }, ADouble = Math.PI }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input, expected).Single(); Assert.Equal(expected, actual, false); }
public void ModelGeneration_Class_DateTime() { // create syntax tree var syntaxTree = CSharpSyntaxTree.ParseText(@" public class Person { public long Id { get; set; } public string FirstName { get; set; } public string LastName { get; set; } public System.DateTime DateOfBirth { get; set; } }"); // create compilation var compilation = RoslynTestHelpers.CreateTestCompilation(new[] { syntaxTree }); // analyze var context = new TestAnalysisContext(compilation); var analyzer = new ModelAnalyzer(context); var csClassModel = analyzer.AnalyzeClassSymbol(RoslynTestHelpers.GetClassSymbol(compilation, syntaxTree)); // convert var converter = new ModelConverter(); var tsClassModel = converter.ConvertClass(csClassModel); // generate var generator = new ModelGenerator(); var generated = generator.GenerateClass(tsClassModel, false); // assert var expected = @"export class Person { id: number; firstName: string; lastName: string; dateOfBirth: Date; } "; Assert.AreEqual(expected, generated); }
public void Analyze_ArrayMultiItem_ReturnsExpectedArray() { var input = new[] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive(0), ModelGrammar.TokenNull, ModelGrammar.TokenFalse, ModelGrammar.TokenTrue, ModelGrammar.TokenArrayEnd }; var expected = new object[] { 0, null, false, true }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast<object[]>().Single(); Assert.Equal(expected, actual); }
public void Analyze_ArrayNestedDeeply_ReturnsExpectedArray() { // input from pass2.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("Not too deep"), ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenArrayEnd }; var expected = new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { new [] { "Not too deep" } } } } } } } } } } } } } } } } } } }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze((input)).Cast<string[][][][][][][][][][][][][][][][][][][]>().Single(); Assert.Equal(expected, actual); }
public void Analyze_ArrayCloseMismatch_ThrowsAnalyzerException() { // input from fail33.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("mismatch"), ModelGrammar.TokenObjectEnd }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); TokenException<ModelTokenType> ex = Assert.Throws<TokenException<ModelTokenType>>( delegate { var actual = analyzer.Analyze<object>(input).Single(); }); // verify exception is coming from expected token Assert.Equal(ModelGrammar.TokenObjectEnd, ex.Token); }
public void Analyze_ObjectEmpty_ReturnsEmptyObject() { var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenObjectEnd }; var expected = new Dictionary<string, object>(); var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast<IDictionary<string, object>>().Single(); Assert.Equal(expected, actual, false); }
public void Analyze_NullInput_ThrowsArgumentNullException() { var input = (IEnumerable<Token<ModelTokenType>>)null; var analyzer = new ModelAnalyzer(new DataReaderSettings()); ArgumentNullException ex = Assert.Throws<ArgumentNullException>( delegate { var actual = analyzer.Analyze<object>(input).Single(); }); // verify exception is coming from expected param Assert.Equal("tokens", ex.ParamName); }
public void Analyze_EmptyInput_ReturnsNothing() { var input = Enumerable.Empty<Token<ModelTokenType>>(); var analyzer = new ModelAnalyzer(new DataReaderSettings()); Assert.False(analyzer.Analyze<object>(input).Any()); }
public void Ctor_NullSettings_ThrowsArgumentNullException() { ArgumentNullException ex = Assert.Throws<ArgumentNullException>( delegate { var analyzer = new ModelAnalyzer(null); }); // verify exception is coming from expected param Assert.Equal("settings", ex.ParamName); }
public void Analyze_ValueInsteadOfProperty_ThrowsAnalyzerException() { // input from fail21.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenPrimitive("Comma instead of colon"), ModelGrammar.TokenNull, ModelGrammar.TokenObjectEnd }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); TokenException<ModelTokenType> ex = Assert.Throws<TokenException<ModelTokenType>>( delegate { var actual = analyzer.Analyze<object>(input).Single(); }); // verify exception is coming from expected token Assert.Equal(ModelGrammar.TokenPrimitive("Comma instead of colon"), ex.Token); }
public void Analyze_ObjectUnterminated_ThrowsAnalyzerException() { // input from fail32.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("Comma instead if closing brace"), ModelGrammar.TokenTrue }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); TokenException<ModelTokenType> ex = Assert.Throws<TokenException<ModelTokenType>>( delegate { var actual = analyzer.Analyze<object>(input).Single(); }); // verify exception is coming from expected token Assert.Equal(ModelGrammar.TokenNone, ex.Token); }
public void Analyze_AnonymousObject_ReturnsAnonymousObject() { // NOTE: order is important to ensure type equivalence var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("AString"), ModelGrammar.TokenPrimitive("Hello world!"), ModelGrammar.TokenProperty("AnInt32"), ModelGrammar.TokenPrimitive(42), ModelGrammar.TokenProperty("AnAnonymous"), ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("AnotherString"), ModelGrammar.TokenPrimitive("Foo."), ModelGrammar.TokenProperty("AnInt64"), ModelGrammar.TokenPrimitive( ((long)Int32.MaxValue) * 2L ), ModelGrammar.TokenObjectEnd, ModelGrammar.TokenProperty("ADouble"), ModelGrammar.TokenPrimitive(Math.PI), ModelGrammar.TokenObjectEnd }; var expected = new { AString = "Hello world!", AnInt32 = 42, AnAnonymous = new { AnotherString = "Foo.", AnInt64 = ((long)Int32.MaxValue) * 2L }, ADouble = Math.PI }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input, expected).Single(); Assert.Equal(expected, actual, false); }
public void Analyze_ArrayEmpty_ReturnsEmptyArray() { var input = new [] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayEnd }; var expected = new object[0]; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast<object[]>().Single(); Assert.Equal(expected, actual); }
public void Analyze_ObjectNested_ReturnsNestedObject() { // input from pass3.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("JSON Test Pattern pass3"), ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("The outermost value"), ModelGrammar.TokenPrimitive("must be an object or array."), ModelGrammar.TokenProperty("In this test"), ModelGrammar.TokenPrimitive("It is an object."), ModelGrammar.TokenObjectEnd, ModelGrammar.TokenObjectEnd }; var expected = new Dictionary<string, object> { { "JSON Test Pattern pass3", new Dictionary<string, object> { { "The outermost value", "must be an object or array." }, { "In this test", "It is an object." } } } }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast<IDictionary<string, object>>().Single(); Assert.Equal(expected, actual, false); }
public void Analyze_ObjectOneProperty_ReturnsSimpleObject() { var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("key"), ModelGrammar.TokenPrimitive("value"), ModelGrammar.TokenObjectEnd }; var expected = new Dictionary<string, object> { { "key", "value" } }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast<IDictionary<string, object>>().Single(); Assert.Equal(expected, actual, false); }
public void Analyze_GraphComplex_ReturnsGraph() { // input from pass1.json in test suite at http://www.json.org/JSON_checker/ var input = new[] { ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("JSON Test Pattern pass1"), ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("object with 1 member"), ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive("array with 1 element"), ModelGrammar.TokenArrayEnd, ModelGrammar.TokenObjectEnd, ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenObjectEnd, ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenPrimitive(-42), ModelGrammar.TokenTrue, ModelGrammar.TokenFalse, ModelGrammar.TokenNull, ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("integer"), ModelGrammar.TokenPrimitive(1234567890), ModelGrammar.TokenProperty("real"), ModelGrammar.TokenPrimitive(-9876.543210), ModelGrammar.TokenProperty("e"), ModelGrammar.TokenPrimitive(0.123456789e-12), ModelGrammar.TokenProperty("E"), ModelGrammar.TokenPrimitive(1.234567890E+34), ModelGrammar.TokenProperty(""), ModelGrammar.TokenPrimitive(23456789012E66), ModelGrammar.TokenProperty("zero"), ModelGrammar.TokenPrimitive(0), ModelGrammar.TokenProperty("one"), ModelGrammar.TokenPrimitive(1), ModelGrammar.TokenProperty("space"), ModelGrammar.TokenPrimitive(" "), ModelGrammar.TokenProperty("quote"), ModelGrammar.TokenPrimitive("\""), ModelGrammar.TokenProperty("backslash"), ModelGrammar.TokenPrimitive("\\"), ModelGrammar.TokenProperty("controls"), ModelGrammar.TokenPrimitive("\b\f\n\r\t"), ModelGrammar.TokenProperty("slash"), ModelGrammar.TokenPrimitive("/ & /"), ModelGrammar.TokenProperty("alpha"), ModelGrammar.TokenPrimitive("abcdefghijklmnopqrstuvwyz"), ModelGrammar.TokenProperty("ALPHA"), ModelGrammar.TokenPrimitive("ABCDEFGHIJKLMNOPQRSTUVWYZ"), ModelGrammar.TokenProperty("digit"), ModelGrammar.TokenPrimitive("0123456789"), ModelGrammar.TokenProperty("0123456789"), ModelGrammar.TokenPrimitive("digit"), ModelGrammar.TokenProperty("special"), ModelGrammar.TokenPrimitive("`1~!@#$%^&*()_+-={':[,]}|;.</>?"), ModelGrammar.TokenProperty("hex"), ModelGrammar.TokenPrimitive("\u0123\u4567\u89AB\uCDEF\uabcd\uef4A"), ModelGrammar.TokenProperty("true"), ModelGrammar.TokenTrue, ModelGrammar.TokenProperty("false"), ModelGrammar.TokenFalse, ModelGrammar.TokenProperty("null"), ModelGrammar.TokenNull, ModelGrammar.TokenProperty("array"), ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenArrayEnd, ModelGrammar.TokenProperty("object"), ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenObjectEnd, ModelGrammar.TokenProperty("address"), ModelGrammar.TokenPrimitive("50 St. James Street"), ModelGrammar.TokenProperty("url"), ModelGrammar.TokenPrimitive("http://www.JSON.org/"), ModelGrammar.TokenProperty("comment"), ModelGrammar.TokenPrimitive("// /* <!-- --"), ModelGrammar.TokenProperty("# -- --> */"), ModelGrammar.TokenPrimitive(" "), ModelGrammar.TokenProperty(" s p a c e d "), ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive(1), ModelGrammar.TokenPrimitive(2), ModelGrammar.TokenPrimitive(3), ModelGrammar.TokenPrimitive(4), ModelGrammar.TokenPrimitive(5), ModelGrammar.TokenPrimitive(6), ModelGrammar.TokenPrimitive(7), ModelGrammar.TokenArrayEnd, ModelGrammar.TokenProperty("compact"), ModelGrammar.TokenArrayBeginUnnamed, ModelGrammar.TokenPrimitive(1), ModelGrammar.TokenPrimitive(2), ModelGrammar.TokenPrimitive(3), ModelGrammar.TokenPrimitive(4), ModelGrammar.TokenPrimitive(5), ModelGrammar.TokenPrimitive(6), ModelGrammar.TokenPrimitive(7), ModelGrammar.TokenArrayEnd, ModelGrammar.TokenProperty("jsontext"), ModelGrammar.TokenPrimitive("{\"object with 1 member\":[\"array with 1 element\"]}"), ModelGrammar.TokenProperty("quotes"), ModelGrammar.TokenPrimitive("" \u0022 %22 0x22 034 ""), ModelGrammar.TokenProperty("/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?"), ModelGrammar.TokenPrimitive("A key can be any string"), ModelGrammar.TokenObjectEnd, ModelGrammar.TokenPrimitive(0.5), ModelGrammar.TokenPrimitive(98.6), ModelGrammar.TokenPrimitive(99.44), ModelGrammar.TokenPrimitive(1066), ModelGrammar.TokenPrimitive(10.0), ModelGrammar.TokenPrimitive(1.0), ModelGrammar.TokenPrimitive(0.1), ModelGrammar.TokenPrimitive(1.0), ModelGrammar.TokenPrimitive(2.0), ModelGrammar.TokenPrimitive(2.0), ModelGrammar.TokenPrimitive("rosebud"), ModelGrammar.TokenArrayEnd }; var expected = new object[] { "JSON Test Pattern pass1", new Dictionary<string, object> { { "object with 1 member", new[] { "array with 1 element" } }, }, new Dictionary<string, object>(), new object[0], -42, true, false, null, new Dictionary<string, object> { { "integer", 1234567890 }, { "real", -9876.543210 }, { "e", 0.123456789e-12 }, { "E", 1.234567890E+34 }, { "", 23456789012E66 }, { "zero", 0 }, { "one", 1 }, { "space", " " }, { "quote", "\"" }, { "backslash", "\\" }, { "controls", "\b\f\n\r\t" }, { "slash", "/ & /" }, { "alpha", "abcdefghijklmnopqrstuvwyz" }, { "ALPHA", "ABCDEFGHIJKLMNOPQRSTUVWYZ" }, { "digit", "0123456789" }, { "0123456789", "digit" }, { "special", "`1~!@#$%^&*()_+-={':[,]}|;.</>?" }, { "hex", "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A" }, { "true", true }, { "false", false }, { "null", null }, { "array", new object[0] }, { "object", new Dictionary<string, object>() }, { "address", "50 St. James Street" }, { "url", "http://www.JSON.org/" }, { "comment", "// /* <!-- --" }, { "# -- --> */", " " }, { " s p a c e d ", new [] { 1,2,3,4,5,6,7 } }, { "compact", new [] { 1,2,3,4,5,6,7 } }, { "jsontext", "{\"object with 1 member\":[\"array with 1 element\"]}" }, { "quotes", "" \u0022 %22 0x22 034 "" }, { "/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?", "A key can be any string" } }, 0.5, 98.6, 99.44, 1066, 1e1, 0.1e1, 1e-1, 1e00, 2e+00, 2e-00, "rosebud" }; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze(input).Cast<object[]>().Single(); Assert.Equal(expected, actual, false); }
public void Analyze_DynamicExample_ReturnsDynamicObject() { var input = new[] { ModelGrammar.TokenObjectBeginUnnamed, ModelGrammar.TokenProperty("foo"), ModelGrammar.TokenPrimitive("hello world"), ModelGrammar.TokenProperty("number"), ModelGrammar.TokenPrimitive(42), ModelGrammar.TokenProperty("boolean"), ModelGrammar.TokenPrimitive(false), ModelGrammar.TokenProperty("null"), ModelGrammar.TokenPrimitive(null), ModelGrammar.TokenObjectEnd }; dynamic expected = new DynamicExample(); expected.foo = "hello world"; expected.number = 42; expected.boolean = false; expected.@null = null; var analyzer = new ModelAnalyzer(new DataReaderSettings()); var actual = analyzer.Analyze<DynamicExample>(input).Single(); Assert.Equal(expected.Values, actual.Values, false); }