public static RunScriptResult RunScript(string script, string connectionString) { var runErrors = new List <string>(); using (TextReader reader = new StringReader(script)) { TSqlParser parser = new TSql110Parser(true); TSqlFragment fragment = parser.Parse(reader, out var errors); if (errors != null && errors.Count > 0) { foreach (ParseError error in errors) { runErrors.Add($"Line: {error.Line}, Column: {error.Column}: {error.Message}"); } return(new RunScriptResult { Errors = runErrors.ToArray() }); } SqlScriptGenerator sqlScriptGenerator = new Sql110ScriptGenerator(); if (!(fragment is TSqlScript sqlScript)) { sqlScriptGenerator.GenerateScript(fragment, out var sql); if (!ExecuteSql(sql, connectionString, runErrors)) { return(new RunScriptResult { Errors = runErrors.ToArray() }); } }
public void ExtractInfo(string statement) { TSqlFragment sqlFragment; IList <ParseError> errors; TSql110Parser parser = new TSql110Parser(true); using (StringReader reader = new StringReader(statement)) { sqlFragment = parser.Parse(reader, out errors); if (errors != null && errors.Count > 0) { foreach (ParseError error in errors) { Console.WriteLine(error.Message); } return; } } /*IList<TSqlParserToken> tokens = sqlFragment.ScriptTokenStream; * foreach (TSqlParserToken token in tokens) * { * if (token.TokenType == TSqlTokenType.Select) * { * Console.WriteLine(token.TokenType); * } * if (token.TokenType == TSqlTokenType.Identifier) * { * Console.WriteLine(token.Text); * } * }*/ sqlFragment.Accept(this); }
private static int GetStatementCountFromFile(CodeUnit script) { try { var parser = new TSql110Parser(true); var reader = new StringReader(script.Code) as TextReader; IList <ParseError> errors = null; var fragment = parser.Parse(reader, out errors); if (errors != null && errors.Count > 0) { Console.WriteLine("Error unable to parse script file: \"{0}\"", script.Name); foreach (var error in errors) { Console.WriteLine(error.Message); } return(0); } var visitor = new SqlVisitor(script.Name); fragment.AcceptChildren(visitor); return(visitor.StatementCount); } catch (Exception ex) { Console.WriteLine("Error parsing script: \"{0}\" error: \"{1}\"", script.Name, ex.Message); return(0); } }
public void load_string(string str) { var parser = new TSql110Parser(false); IList <ParseError> errors; TSqlFragment f; using (var reader = new StringReader(str)) { f = parser.Parse(reader, out errors); } if (errors.Count > 0) { StringBuilder sb = new StringBuilder(); sb.AppendLine($"There are {errors.Count} errors."); var i = 0; foreach (var e in errors) { sb.AppendLine($"error {++i}\nLine { e.Line}\nColumn { e.Column}\nOffset {e.Offset}\nNumber {e.Number}\n{e.Message}"); } throw new ParserError(sb.ToString()); } var tree_parser = new TreeParser(); root = tree_parser.parse(f); }
private static int GetStatementCountFromFile(CodeUnit script) { try { var parser = new TSql110Parser(true); var reader = new StringReader(script.Code) as TextReader; IList<ParseError> errors = null; var fragment = parser.Parse(reader, out errors); if (errors != null && errors.Count> 0) { Console.WriteLine("Error unable to parse script file: \"{0}\"", script.Name); foreach (var error in errors) { Console.WriteLine(error.Message); } return 0; } var visitor = new SqlVisitor(script.Name); fragment.AcceptChildren(visitor); return visitor.StatementCount; } catch (Exception ex) { Console.WriteLine("Error parsing script: \"{0}\" error: \"{1}\"", script.Name, ex.Message); return 0; } }
private void button1_Click(object sender, EventArgs e) { TextReader rdr = new StringReader(textBox1.Text); IList <ParseError> errors = null; TSql110Parser parser = new TSql110Parser(true); TSqlFragment tree = parser.Parse(rdr, out errors); foreach (ParseError err in errors) { Console.WriteLine(err.Message); } MyVisitor checker = new MyVisitor(); tree.Accept(checker); if (false == checker.containsOnlySelects) { MessageBox.Show("The code contains something other than SELECT statements!"); } else { MessageBox.Show("Looks ok!"); } rdr.Dispose(); }
static void Main(string[] args) { if (args.Count() < 1) { Console.WriteLine("No input file specified"); return; } FileInfo input = new FileInfo(args[0]); bool initialQuotedIdentifiers = false; TSqlParser parser = new TSql110Parser(initialQuotedIdentifiers); StreamReader sr = input.OpenText(); IList <ParseError> errors; TSqlFragment fragment = parser.Parse(sr, out errors); sr.Close(); if (errors.Count > 0) { Console.WriteLine("Parse {0} errors input stream", errors.Count); return; } TableVisitor tableVisitor = new TableVisitor(); fragment.Accept(tableVisitor); ViewVisitor viewVisitor = new ViewVisitor(); fragment.Accept(viewVisitor); ProcVisitor procVisitor = new ProcVisitor(); fragment.Accept(procVisitor); foreach (var table in tableVisitor.Nodes) { Console.WriteLine("table {0}.{1}", table.SchemaObjectName.SchemaIdentifier.Value, table.SchemaObjectName.BaseIdentifier.Value); } foreach (var view in viewVisitor.Nodes) { Console.WriteLine("view {0}.{1}", view.SchemaObjectName.SchemaIdentifier.Value, view.SchemaObjectName.BaseIdentifier.Value); } foreach (var proc in procVisitor.Nodes) { Console.WriteLine("proc {0}.{1}", proc.ProcedureReference.Name.SchemaIdentifier.Value, proc.ProcedureReference.Name.BaseIdentifier.Value); } }
static void Main(string[] args) { if (args.Count() < 1) { Console.WriteLine("No input file specified"); return; } FileInfo input = new FileInfo(args[0]); bool initialQuotedIdentifiers = false; TSqlParser parser = new TSql110Parser(initialQuotedIdentifiers); StreamReader sr = input.OpenText(); IList<ParseError> errors; TSqlFragment fragment = parser.Parse(sr, out errors); sr.Close(); if (errors.Count > 0) { Console.WriteLine("Parse {0} errors input stream", errors.Count); return; } TableVisitor tableVisitor = new TableVisitor(); fragment.Accept(tableVisitor); ViewVisitor viewVisitor = new ViewVisitor(); fragment.Accept(viewVisitor); ProcVisitor procVisitor = new ProcVisitor(); fragment.Accept(procVisitor); foreach (var table in tableVisitor.Nodes) { Console.WriteLine("table {0}.{1}", table.SchemaObjectName.SchemaIdentifier.Value, table.SchemaObjectName.BaseIdentifier.Value); } foreach(var view in viewVisitor.Nodes) { Console.WriteLine("view {0}.{1}", view.SchemaObjectName.SchemaIdentifier.Value, view.SchemaObjectName.BaseIdentifier.Value); } foreach(var proc in procVisitor.Nodes) { Console.WriteLine("proc {0}.{1}", proc.ProcedureReference.Name.SchemaIdentifier.Value, proc.ProcedureReference.Name.BaseIdentifier.Value); } }
private TSqlFragment InitiateTSql110Parser(string query) { Columns = new List <RbacSelectColumn>(); TablesReferred = new List <RbacTable>(); IList <ParseError> parseErrors; TSql110Parser parser = new TSql110Parser(true); TSqlFragment tree = parser.Parse(new StringReader(query), out parseErrors); ParseErrors = parseErrors; PrintErrors(); SyntaxError = ParseErrors.Count > 0 ? true : false; return(tree); }
public static ScriptDetail GetScriptDetail(string script) { var reader = new StringReader(script); var parser = new TSql110Parser(true); IList <ParseError> errors; var sqlFragment = parser.Parse(reader, out errors); var inName = false; var name = ""; var result = new ScriptDetail(); foreach (var a in sqlFragment.ScriptTokenStream) { switch (a.TokenType) { case TSqlTokenType.Procedure: case TSqlTokenType.Schema: result.Type = a.TokenType; break; case TSqlTokenType.AsciiStringOrQuotedIdentifier: case TSqlTokenType.QuotedIdentifier: case TSqlTokenType.Identifier: case TSqlTokenType.Dot: inName = true; name += a.Text; break; case TSqlTokenType.EndOfFile: case TSqlTokenType.WhiteSpace: if (inName) { result.Name = name; return(result); } break; } } if (result.Name == null) { throw new Exception(string.Format("could not get name from script. parsing error count: {0}, detail: {1}", errors.Count, get_errors(errors))); } return(new ScriptDetail()); }
public static ScriptDetail GetScriptDetail(string script) { var reader = new StringReader(script); var parser = new TSql110Parser(true); IList<ParseError> errors; var sqlFragment = parser.Parse(reader, out errors); var inName = false; var name = ""; var result = new ScriptDetail(); foreach (var a in sqlFragment.ScriptTokenStream) { switch (a.TokenType) { case TSqlTokenType.Procedure: case TSqlTokenType.Schema: result.Type = a.TokenType; break; case TSqlTokenType.AsciiStringOrQuotedIdentifier: case TSqlTokenType.QuotedIdentifier: case TSqlTokenType.Identifier: case TSqlTokenType.Dot: inName = true; name += a.Text; break; case TSqlTokenType.EndOfFile: case TSqlTokenType.WhiteSpace: if (inName) { result.Name = name; return result; } break; } } if (result.Name == null) { throw new Exception(string.Format("could not get name from script. parsing error count: {0}, detail: {1}", errors.Count, get_errors(errors))); } return new ScriptDetail(); }
public string StripCommentsFromSql(string sql) { TSql110Parser parser = new TSql110Parser(true); IList <ParseError> errors; var fragments = parser.Parse(new StringReader(sql), out errors); // clear comments string result = string.Join( string.Empty, fragments.ScriptTokenStream .Where(x => x.TokenType != TSqlTokenType.MultilineComment) .Where(x => x.TokenType != TSqlTokenType.SingleLineComment) .Select(x => x.Text)); return(result); }
static void Main(string[] args) { TextReader rdr = new StreamReader(@"c:\ScriptDom\sampleproc.sql"); IList <ParseError> errors = null; TSql110Parser parser = new TSql110Parser(true); TSqlFragment tree = parser.Parse(rdr, out errors); foreach (ParseError err in errors) { Console.WriteLine(err.Message); } tree.Accept(new MyVisitor()); rdr.Dispose(); }
public void AnalyzeTSQLQuery(string query) { IList <ParseError> errors; IQueryable <TSqlParserToken> parserStatement; var parser = new TSql110Parser(false); TSqlFragment parsed = parser.Parse(new StringReader(query), out errors); var parserStatements = parser.ParseStatementList(new StringReader(query), out errors); parserStatement = parserStatements.ScriptTokenStream.AsQueryable(); string keywordstat = ""; bool isNextFlg = false; int statementID = 0; ConvertTSQLStatement(parserStatements, sqlstat, ref keywordstat, ref isNextFlg, ref statementID); }
static void Main(string[] args) { TextReader txtRdr = new StreamReader("foo.sql"); TSql110Parser parser = new TSql110Parser(true); IList <ParseError> errors; TSqlFragment sqlFragment = parser.Parse(txtRdr, out errors); // TODO report the parsing errors generated (if any) SQLVisitor myVisitor = new SQLVisitor(); sqlFragment.Accept(myVisitor); myVisitor.DumpStatistics(); }
public static string FormatSqlCode(string query) { var parser = new TSql110Parser(false); IList <ParseError> errors; var parsedQuery = parser.Parse(new StringReader(query), out errors); var generator = new Sql110ScriptGenerator(new SqlScriptGeneratorOptions() { KeywordCasing = KeywordCasing.Uppercase, IncludeSemicolons = true, NewLineBeforeFromClause = true, NewLineBeforeOrderByClause = true, NewLineBeforeWhereClause = true, AlignClauseBodies = false }); string formattedQuery; generator.GenerateScript(parsedQuery, out formattedQuery); return(formattedQuery); }
static void Main(string[] args) { // before proceeding, add a reference to the ScriptDom assembly IList <ParseError> errors = null; TextReader rdr = new StreamReader(@"c:\ScriptDom\sampleproc.sql"); // pass the reader to the scriptdom TSql110Parser parser = new TSql110Parser(true); TSqlFragment tree = parser.Parse(rdr, out errors); // some placeholders to avoid typing! foreach (ParseError err in errors) { Console.WriteLine(err.Message); } rdr.Dispose(); }
static void Main(string[] args) { TextReader rdr = new StreamReader(@"c:\ScriptDom\sampleproc.sql"); IList <ParseError> errors = null; TSql110Parser parser = new TSql110Parser(true); TSqlFragment tree = parser.Parse(rdr, out errors); foreach (ParseError err in errors) { Console.WriteLine(err.Message); } ScriptDomWalk(tree, "root"); TextWriter wr = new StreamWriter(@"c:\temp\scrdom.xml"); wr.Write(result); wr.Flush(); wr.Dispose(); }
static void Main(string[] args) { TextReader rdr = new StreamReader(new MemoryStream( Encoding.UTF8.GetBytes( @"/* comment */ WITH CteUpdate AS ( SELECT * FROM Table1 WHERE Col1 = 1 ) UPDATE CteUpdate SET city = ""NY"" WHERE name = ""tom""") )); TSql110Parser parser = new TSql110Parser(true); IList <ParseError> errors; StatementList stmtList = parser.ParseStatementList(rdr, out errors); // Process errors foreach (TSqlStatement stmt in stmtList.Statements) { Console.WriteLine("Statement type {0}", stmt.GetType()); if (stmt is SelectStatement) { //Process SELECT statment } else if (stmt is UpdateStatement) { //Process UPDATE statment UpdateStatement stmtUpdate = (UpdateStatement)stmt; NamedTableReference tabRef = (NamedTableReference)stmtUpdate.UpdateSpecification.Target; Console.Write(" > UPDATE statement > target object {0}", tabRef.SchemaObject.BaseIdentifier.Value); } else //Process other statments { throw new NotImplementedException(); } } }
private void button1_Click(object sender, EventArgs e) { TextReader rdr = new StringReader(textBox1.Text); IList <ParseError> errors = null; TSql110Parser parser = new TSql110Parser(true); TSqlFragment tree = parser.Parse(rdr, out errors); foreach (ParseError err in errors) { Console.WriteLine(err.Message); } Sql110ScriptGenerator scrGen = new Sql110ScriptGenerator(); string formattedSQL = null; scrGen.GenerateScript(tree, out formattedSQL); textBox2.Text = formattedSQL; rdr.Dispose(); }
public Tuple <int, int, int, int> DumpStatistics() { TSql110Parser parser = new TSql110Parser(true); IList <ParseError> errors; using (TextReader sr = new StringReader(_storedProcedureSourceCode)) { TSqlFragment sqlFragment = parser.Parse(sr, out errors); if (errors.Count > 0) { throw new Exception(errors.ToString()); } sqlFragment.Accept(this); } return(new Tuple <int, int, int, int>(this.SELECTcount, this.INSERTcount, this.UPDATEcount, this.DELETEcount)); }
public WSqlParser() { tsqlParser = new TSql110Parser(true); }
private TSqlParser CreateSQLParser() { TSqlParser sqlParser; switch (ConfigurationManager.AppSettings["ParserCompatibilityLevel"]) { case "90": sqlParser = new TSql90Parser(true); break; case "100": sqlParser = new TSql100Parser(true); break; case "110": sqlParser = new TSql110Parser(true); break; default: throw new ArgumentException("Invalid argument provided for [ParserCompatibilityLevel] configuration setting!"); } return sqlParser; }
/// <summary> /// A simple T-SQL comment cleaner based on the SQLDOM that ships with SQL Management Studio / DACFX. /// After parsing the input file, we just loop through the input token stream, ignore any comment related token /// and write out the other tokens to the output file. /// </summary> /// <param name="sourceFile"></param> /// <param name="destFile"></param> /// <param name="compatLevel"></param> /// <returns></returns> static bool CleanSQLScript( string sourceFile, string destFile, int compatLevel) { using (var srcRdr = new StreamReader(sourceFile)) { TSqlParser parser; switch (compatLevel) { case 80: { // SQL 2000 parser = new TSql80Parser(true); break; } case 90: { // SQL 2005 parser = new TSql90Parser(true); break; } case 100: { // SQL 2008 / R2 parser = new TSql100Parser(true); break; } case 110: { // SQL 2012 parser = new TSql110Parser(true); break; } case 120: { // SQL 2014 parser = new TSql120Parser(true); break; } case 130: { // SQL 2016 parser = new TSql130Parser(true); break; } case 140: { // SQL 2017 parser = new TSql140Parser(true); break; } default: { parser = new TSql110Parser(true); break; } } IList <ParseError> errors; var tree = parser.Parse(srcRdr, out errors); if (errors.Count > 0) { // TODO report parse errors Console.WriteLine("Errors when parsing T-SQL"); return(false); } using (var writer = new StreamWriter(destFile)) { foreach (var tok in tree.ScriptTokenStream) { // ignore all comments if (tok.TokenType != TSqlTokenType.MultilineComment && tok.TokenType != TSqlTokenType.SingleLineComment) { writer.Write(tok.Text); } } writer.Flush(); writer.Close(); } } return(true); }
public void ParseSql(string sql) { TSqlParser parser = new TSql110Parser(FindProp("QUOTED_IDENTIFIER").Value == "ON"); IList<ParseError> errors = null; TSqlFragment fragment = parser.Parse(new StringReader(sql), out errors); fragment.Accept(new ZenVisitor(this)); }
/// <summary> /// Parses a CREATE TABLE statement. The parser first replaces column annotations with white space, /// then uses T-SQL parser to parse it, and finally interprets the column annotations. /// </summary> /// <param name="queryStr">The CREATE TABLE statement creating a ndoe table</param> /// <param name="nodeTableColumns">A list of columns of the node table</param> /// <param name="errors">Parsing errors</param> /// <returns>The syntax tree of the CREATE TABLE statement</returns> public WSqlFragment ParseCreateNodeTableStatement( string queryStr, out List<WNodeTableColumn> nodeTableColumns, out IList<ParseError> errors) { // Gets token stream var tsqlParser = new TSql110Parser(true); var sr = new StringReader(queryStr); var tokens = new List<TSqlParserToken>(tsqlParser.GetTokenStream(sr, out errors)); if (errors.Count > 0) { nodeTableColumns = null; return null; } // Retrieves node table columns var currentToken = 0; var farestError = 0; nodeTableColumns = new List<WNodeTableColumn>(); while (currentToken < tokens.Count) { WNodeTableColumn column = null; if (ParseNodeTableColumn(tokens, ref currentToken, ref column, ref farestError)) nodeTableColumns.Add(column); else currentToken++; } // Replaces column annotations with whitespace foreach (var t in nodeTableColumns) { tokens[t.FirstTokenIndex].TokenType = TSqlTokenType.WhiteSpace; tokens[t.FirstTokenIndex].Text = ""; } // Parses the remaining statement using the T-SQL parser //IList<ParseError> errors; var parser = new WSqlParser(); var fragment = parser.Parse(tokens, out errors) as WSqlScript; if (errors.Count > 0) return null; // In addition to columns specified in the CREATE TABLE statement, // adds an additional column recording the incoming degree of nodes. var inDegreeCol = new WColumnDefinition { ColumnIdentifier = new Identifier { Value = "InDegree" }, Constraints = new List<WConstraintDefinition>{new WNullableConstraintDefinition { Nullable = false }}, DataType = new WParameterizedDataTypeReference { Name = new WSchemaObjectName(new Identifier { Value = "int" }), }, DefaultConstraint = new WDefaultConstraintDefinition { Expression = new WValueExpression { Value = "0" } } }; var deltaColumnDefList = new List<WColumnDefinition>(); WCreateTableStatement stmt = fragment.Batches[0].Statements[0] as WCreateTableStatement; if (stmt == null || stmt.Definition == null || stmt.Definition.ColumnDefinitions==null) { return null; } else if (stmt.Definition.ColumnDefinitions.Count != nodeTableColumns.Count) { var error = tokens[stmt.FirstTokenIndex]; errors.Add(new ParseError(0, error.Offset, error.Line, error.Column, "Metadata should be specified for each column when creating a node table")); } var graphColIndex = 0; var rawColumnDef = stmt.Definition.ColumnDefinitions; for (var i = 0; i < rawColumnDef.Count && graphColIndex < nodeTableColumns.Count; ++i, ++graphColIndex) { var nextGraphColumn = nodeTableColumns[graphColIndex]; // Skips columns without annotations while (i < rawColumnDef.Count && rawColumnDef[i].LastTokenIndex < nextGraphColumn.FirstTokenIndex) { ++i; } switch (nextGraphColumn.ColumnRole) { case WNodeTableColumnRole.Edge: // For an adjacency-list column, its data type is always varbinary(max) var def = rawColumnDef[i]; def.DataType = new WParameterizedDataTypeReference { Name = new WSchemaObjectName(new Identifier { Value = "varbinary" }), Parameters = new List<Literal> { new MaxLiteral { Value = "max" } } }; def.Constraints.Add(new WNullableConstraintDefinition { Nullable = false }); def.DefaultConstraint = new WDefaultConstraintDefinition { Expression = new WValueExpression { Value = "0x" } }; // For each adjacency-list column, adds a "delta" column to // facilitate deleting edges. deltaColumnDefList.Add(new WColumnDefinition { ColumnIdentifier = new Identifier { Value = def.ColumnIdentifier.Value + "DeleteCol" }, ComputedColumnExpression = def.ComputedColumnExpression, Constraints = def.Constraints, DataType = def.DataType, DefaultConstraint = def.DefaultConstraint, }); // For each adjacency-list column, adds an integer column to record the list's outgoing degree deltaColumnDefList.Add(new WColumnDefinition { ColumnIdentifier = new Identifier { Value = def.ColumnIdentifier.Value + "OutDegree" }, Constraints = def.Constraints, DataType = new WParameterizedDataTypeReference { Name = new WSchemaObjectName(new Identifier { Value = "int" }), }, DefaultConstraint = new WDefaultConstraintDefinition { Expression = new WValueExpression { Value = "0" } } }); break; case WNodeTableColumnRole.NodeId: // set unique key to user defined node id bool containNullableConstraint = false; foreach (var con in rawColumnDef[i].Constraints) { var nullableConstraint = con as WNullableConstraintDefinition; if (nullableConstraint != null) { containNullableConstraint = true; nullableConstraint.Nullable = false; break; } } if (!containNullableConstraint) { rawColumnDef[i].Constraints.Add(new WNullableConstraintDefinition { Nullable = false }); } rawColumnDef[i].Constraints.Add(new WUniqueConstraintDefinition { Clustered = false, IsPrimaryKey = false, }); break; } } // Adds a GlobalNodeID column to the node table. // This column is the primary key of the node table. var globalNodeIdCol = new WColumnDefinition { ColumnIdentifier = new Identifier { Value = "GlobalNodeId" }, DataType = new WParameterizedDataTypeReference { Name = new WSchemaObjectName(new Identifier { Value = "bigint" }), }, Constraints = new List<WConstraintDefinition> { new WUniqueConstraintDefinition { Clustered = true, IsPrimaryKey = true, ConstraintIdentifier = new Identifier { Value = (stmt.SchemaObjectName.SchemaIdentifier == null ? "dbo" : stmt.SchemaObjectName.SchemaIdentifier.Value) + stmt.SchemaObjectName.BaseIdentifier.Value + "_PK_GlobalNodeId" } } }, IdentityOptions = new WIdentityOptions { IdentitySeed = new WValueExpression("1", false), IdentityIncrement = new WValueExpression("1", false), }, }; // Adds an identity column to the node table. // This column will be used to adjust size estimation. var identityCol = new WColumnDefinition { ColumnIdentifier = new Identifier { Value = "LocalNodeId" }, DataType = new WParameterizedDataTypeReference { Name = new WSchemaObjectName(new Identifier { Value = "int" }), }, DefaultConstraint = new WDefaultConstraintDefinition { Expression = new WFunctionCall { FunctionName = new Identifier { Value = "CHECKSUM" }, Parameters = new List<WScalarExpression> { new WFunctionCall { FunctionName = new Identifier{Value = "NEWID"}, Parameters = new List<WScalarExpression>() } } } } }; foreach (var definition in deltaColumnDefList) { stmt.Definition.ColumnDefinitions.Add(definition); } stmt.Definition.ColumnDefinitions.Add(globalNodeIdCol); stmt.Definition.ColumnDefinitions.Add(identityCol); stmt.Definition.ColumnDefinitions.Add(inDegreeCol); return fragment; }
/// <summary> /// Parses a ALTER TABLE ADD PROPERTY/EDGE statement. The parser first replaces column annotations with white space, /// then uses T-SQL parser to parse it, and finally interprets the column annotations. /// </summary> /// <param name="queryStr">The CREATE TABLE statement creating a ndoe table</param> /// <param name="nodeTableColumns">A list of columns of the node table</param> /// <param name="errors">Parsing errors</param> /// <returns>The syntax tree of the CREATE TABLE statement</returns> public WSqlFragment ParseAlterTableAddNodeTableColumnStatement( string queryStr, out List<WNodeTableColumn> nodeTableColumns, out IList<ParseError> errors) { // Gets token stream var tsqlParser = new TSql110Parser(true); var sr = new StringReader(queryStr); var tokens = new List<TSqlParserToken>(tsqlParser.GetTokenStream(sr, out errors)); if (errors.Count > 0) { nodeTableColumns = null; return null; } // Retrieves node table columns var currentToken = 0; var farestError = 0; nodeTableColumns = new List<WNodeTableColumn>(); while (currentToken < tokens.Count) { WNodeTableColumn column = null; if (ParseNodeTableColumn(tokens, ref currentToken, ref column, ref farestError)) nodeTableColumns.Add(column); else currentToken++; } // Replaces column annotations with whitespace foreach (var t in nodeTableColumns) { tokens[t.FirstTokenIndex].TokenType = TSqlTokenType.WhiteSpace; tokens[t.FirstTokenIndex].Text = ""; } // Parses the remaining statement using the T-SQL parser //IList<ParseError> errors; var parser = new WSqlParser(); var fragment = parser.Parse(tokens, out errors) as WSqlScript; if (errors.Count > 0) return null; var deltaColumnDefList = new List<WColumnDefinition>(); var stmt = fragment.Batches[0].Statements[0] as WAlterTableAddTableElementStatement; if (stmt == null || stmt.Definition == null || stmt.Definition.ColumnDefinitions == null) { return null; } else if (stmt.Definition.ColumnDefinitions.Count != nodeTableColumns.Count) { var error = tokens[stmt.FirstTokenIndex]; errors.Add(new ParseError(0, error.Offset, error.Line, error.Column, "Metadata should be specified for each column when altering a node table")); } var graphColIndex = 0; var rawColumnDef = stmt.Definition.ColumnDefinitions; for (var i = 0; i < rawColumnDef.Count && graphColIndex < nodeTableColumns.Count; ++i, ++graphColIndex) { var nextGraphColumn = nodeTableColumns[graphColIndex]; // Skips columns without annotations while (i < rawColumnDef.Count && rawColumnDef[i].LastTokenIndex < nextGraphColumn.FirstTokenIndex) { ++i; } switch (nextGraphColumn.ColumnRole) { case WNodeTableColumnRole.Edge: // For an adjacency-list column, its data type is always varbinary(max) var def = rawColumnDef[i]; def.DataType = new WParameterizedDataTypeReference { Name = new WSchemaObjectName(new Identifier { Value = "varbinary" }), Parameters = new List<Literal> { new MaxLiteral { Value = "max" } } }; def.Constraints.Add(new WNullableConstraintDefinition { Nullable = false }); def.DefaultConstraint = new WDefaultConstraintDefinition { Expression = new WValueExpression { Value = "0x" } }; // For each adjacency-list column, adds a "delta" column to // facilitate deleting edges. deltaColumnDefList.Add(new WColumnDefinition { ColumnIdentifier = new Identifier { Value = def.ColumnIdentifier.Value + "DeleteCol" }, ComputedColumnExpression = def.ComputedColumnExpression, Constraints = def.Constraints, DataType = def.DataType, DefaultConstraint = def.DefaultConstraint, }); // For each adjacency-list column, adds an integer column to record the list's outgoing degree deltaColumnDefList.Add(new WColumnDefinition { ColumnIdentifier = new Identifier {Value = def.ColumnIdentifier.Value + "OutDegree"}, Constraints = def.Constraints, DataType = new WParameterizedDataTypeReference { Name = new WSchemaObjectName(new Identifier {Value = "int"}), }, DefaultConstraint = new WDefaultConstraintDefinition { Expression = new WValueExpression { Value = "0" } } }); break; case WNodeTableColumnRole.Property: break; default: var error = tokens[nextGraphColumn.FirstTokenIndex]; errors.Add(new ParseError(0, error.Offset, error.Line, error.Column, "Only edge or property can be added to the node table")); break; } } foreach (var definition in deltaColumnDefList) stmt.Definition.ColumnDefinitions.Add(definition); return fragment; }
/// <summary> /// Parses a ALTER TABLE DROP COLUMN statement. /// </summary> /// <param name="queryStr">The CREATE TABLE statement creating a ndoe table</param> /// <param name="errors">Parsing errors</param> /// <returns>The syntax tree of the CREATE TABLE statement</returns> public WSqlFragment ParseAlterTableDropNodeTableColumnStatement( string queryStr, out IList<ParseError> errors) { // Gets token stream var tsqlParser = new TSql110Parser(true); var sr = new StringReader(queryStr); var tokens = new List<TSqlParserToken>(tsqlParser.GetTokenStream(sr, out errors)); if (errors.Count > 0) return null; // Parses the remaining statement using the T-SQL parser //IList<ParseError> errors; var parser = new WSqlParser(); var fragment = parser.Parse(tokens, out errors) as WSqlScript; if (errors.Count > 0) return null; return fragment; }
public WSqlFragment ParseCreateNodeEdgeViewStatement(string query, out IList<ParseError> errors) { var tsqlParser = new TSql110Parser(true); var sr = new StringReader(query); var tokens = new List<TSqlParserToken>(tsqlParser.GetTokenStream(sr, out errors)); if (errors.Count > 0) { return null; } int currentToken = 0; int farestError = 0; while (currentToken < tokens.Count) { int nextToken = currentToken; if (ReadToken(tokens, "create", ref nextToken, ref farestError)) { int pos = nextToken; if (ReadToken(tokens, "node", ref nextToken, ref farestError)) { tokens[pos].TokenType = TSqlTokenType.MultilineComment; tokens[pos].Text = "/*__GRAPHVIEW_CREATE_NODEVIEW*/"; } else if (ReadToken(tokens, "edge", ref nextToken, ref farestError)) { tokens[pos].TokenType = TSqlTokenType.MultilineComment; tokens[pos].Text = "/*__GRAPHVIEW_CREATE_EDGEVIEW*/"; } else { var error = tokens[farestError]; throw new SyntaxErrorException(error.Line, error.Text); //errors.Add(new ParseError(0, error.Offset, error.Line, error.Column, // string.Format("Incorrect syntax near {0}", error.Text))); } } currentToken++; } var parser = new WSqlParser(); var fragment = parser.Parse(tokens, out errors) as WSqlScript; if (errors.Count > 0) return null; return fragment; }