public void Compile(string name, string script, string conn, string outputPath, string scriptAssemblyPath ) { Analyzer a = new Analyzer(name, Analyzer.OutputFormat.JSON); try { //@"C:\WorkingFolder-Custom\TZ.BigDataScript\TZ.DataAnalyzer.Console\bin\Debug" Environment.SetEnvironmentVariable("DOTNETBACKEND_PORT", "5567"); //Environment.SetEnvironmentVariable("DOTNET_ASSEMBLY_SEARCH_PATHS", scriptAssemblyPath); // ExecuteCommand("cmd", @"%SPARK_HOME%\bin\spark-submit --class org.apache.spark.deploy.dotnet.DotnetRunner --master local "+ scriptAssemblyPath + @"\microsoft-spark-2.4.x-0.8.0.jar\ dotnet " + scriptAssemblyPath +@"\ TZ.DataAnalyzer.dll"); ExecuteCommand("cmd", @"%SPARK_HOME%\bin\spark-submit --class org.apache.spark.deploy.dotnet.DotnetRunner --master local " + scriptAssemblyPath + @"\microsoft-spark-2.4.x-0.10.0.jar\ debug"); AntlrInputStream inputStream = new AntlrInputStream(script); AScriptLexer lexer = new AScriptLexer(inputStream); CommonTokenStream commonTokenStream = new CommonTokenStream(lexer); AScriptParser expPar = new AScriptParser(commonTokenStream); expPar.ErrorHandler = new MyErrorStrategy(); AScriptParser.StatementContext getContext = expPar.statement(); if (expPar.Errors != null) { Errors = expPar.Errors; return; } else { Errors = new List <string>(); } visitor.Visit(getContext); var p = this.Param.ToList(); foreach (KeyValuePair <string, string> s in p) { a.Param.Add(s.Key, s.Value); } var varStates = visitor.DataCubes.Where(x => x.StatementType == DataStatement.VARIABLE || x.StatementType == DataStatement.PARAM); foreach (DataCube dc in varStates) { a.AddStatement(dc); } var preDC = visitor.DataCubes.Where(x => x.StatementType == DataStatement.IMP || x.StatementType == DataStatement.DR || x.StatementType == DataStatement.NS); foreach (DataCube dc in preDC) { a.AddPreStatement(dc); } a.Init(conn); var mainstate = visitor.DataCubes.Where(x => x.StatementType != DataStatement.VARIABLE && x.StatementType != DataStatement.PARAM); foreach (DataCube dc in visitor.DataCubes) { a.AddStatement(dc); } a.DataSchema[a.DataSchema.Count - 1].Data.Coalesce(1).Write().Mode(Microsoft.Spark.Sql.SaveMode.Overwrite).Json(outputPath); a.DataSchema.Clear(); a.Stop(); System.IO.File.WriteAllText(outputPath + @"/log.txt", "execution completed"); //DirectorySecurity sec = Directory.GetAccessControl(outputPath); //// Using this instead of the "Everyone" string means we work on non-English systems. //SecurityIdentifier everyone = new SecurityIdentifier(WellKnownSidType.WorldSid, null); //sec.AddAccessRule(new FileSystemAccessRule(everyone, FileSystemRights.Modify | FileSystemRights.Synchronize, InheritanceFlags.ContainerInherit | InheritanceFlags.ObjectInherit, PropagationFlags.None, AccessControlType.Allow)); //Directory.SetAccessControl(outputPath, sec); } catch (Exception ex) { throw ex; } finally { a.Stop(); } }