public static DataSet GetData(Workspace Enviro, FNodeSet Nodes, HScriptParser.Return_actionContext context) { // Get the table name // string name = context.full_table_name().table_name().IDENTIFIER().GetText(); string db = (context.full_table_name().database_name() == null) ? "global" : context.full_table_name().database_name().GetText(); // Figure out if we need to append // bool appendto = (context.K_INSERT() != null) ? true : false; // Global -- Append // if (context.full_table_name().database_name() == null && appendto) { if (Enviro.ChunkHeap.Exists(name)) return Enviro.ChunkHeap[name]; throw new HScriptCompileException(string.Format("Chunk '{0}' does not exist", name)); } // Static -- Append // if (appendto) { string fullname = db + "." + name; if (Enviro.Exists(db, name)) return Enviro.GetStaticTable(db, name); throw new HScriptCompileException(string.Format("Table '{0}' does not exist", fullname)); } // Global -- Create New // if (context.full_table_name().database_name() == null) { RecordSet data = new RecordSet(Nodes.Columns); Enviro.ChunkHeap.Reallocate(name, data); return data; } // Static -- Create New // string dir = Enviro.Connections[db]; Table t = new Table(dir, name, Nodes.Columns); return t; }
// Reads // public static StagedReadName RenderStagedReadPlan(Workspace Home, HScriptParser.Crudam_readContext context) { // Get the data source // DataSet data = VisitorHelper.GetData(Home, context.full_table_name()); string alias = (context.K_AS() != null) ? context.IDENTIFIER().GetText() : data.Name; // Create a local heap to work off of // MemoryStruct local_heap = new MemoryStruct(true); // Create a record register // StreamRegister memory = new StreamRegister(null); // Create expression visitor // ExpressionVisitor exp_vis = new ExpressionVisitor(local_heap, Home, alias, data.Columns, memory); // Where clause // Predicate where = VisitorHelper.GetWhere(exp_vis, context.where_clause()); // Create a reader // RecordReader reader = data.OpenReader(where); // Attach the reader to the register // memory.BaseStream = reader; // Create the action visitor // ActionVisitor act_vis = new ActionVisitor(Home, local_heap, exp_vis); // Get the declarations // if (context.crudam_declare_many() != null) { VisitorHelper.AllocateMemory(Home, local_heap, exp_vis, context.crudam_declare_many()); } // Get the initial actionset // TNode pre_run = (context.init_action() != null) ? act_vis.ToNode(context.init_action().query_action()) : new TNodeNothing(null); // Get the main actionset // TNode run = act_vis.ToNode(context.main_action().query_action()); // Get the final actionset // TNode post_run = (context.final_action() != null) ? act_vis.ToNode(context.final_action().query_action()) : new TNodeNothing(null); return new StagedReadName(reader, pre_run, run, post_run); }
public static FastReadPlan RenderFastReadPlan(Workspace Home, HScriptParser.Crudam_read_fastContext context) { // Get the data source // DataSet data = VisitorHelper.GetData(Home, context.full_table_name()); string alias = (context.K_AS() != null) ? context.IDENTIFIER().GetText() : data.Name; // Create a record register // StreamRegister memory = new StreamRegister(null); // Create expression visitor // ExpressionVisitor exp_vis = new ExpressionVisitor(new MemoryStruct(true), Home, alias, data.Columns, memory); // Where clause // Predicate where = VisitorHelper.GetWhere(exp_vis, context.where_clause()); // Create a reader // RecordReader reader = data.OpenReader(where); // Attach the reader to the register // memory.BaseStream = reader; // Get the fields being returned // FNodeSet nodes = VisitorHelper.GetReturnStatement(exp_vis, context.return_action().expression_or_wildcard_set()); // Get the output cursor from the return statement // RecordWriter writer = VisitorHelper.GetWriter(Home, nodes.Columns, context.return_action()); return new FastReadPlan(data, where, nodes, writer); }
public static CreateChunkPlan RenderCreateChunk(Workspace Home, HScriptParser.Crudam_create_tableContext context) { ExpressionVisitor exp_vis = new ExpressionVisitor(null, Home); // Build the schema // Schema columns = new Schema(); foreach (HScriptParser.Create_table_unitContext ctx in context.create_table_unit()) { columns.Add( ctx.IDENTIFIER().GetText(), VisitorHelper.GetAffinity(ctx.type()), (ctx.expression() == null) ? true : exp_vis.ToNode(ctx.expression()).Evaluate().valueBOOL, VisitorHelper.GetSize(ctx.type(), true)); } string name = context.full_table_name().table_name().IDENTIFIER().GetText(); return new CreateChunkPlan(name, columns, Home); }
// Create // public static CreateTablePlan RenderCreatePlan(Workspace Home, HScriptParser.Crudam_create_tableContext context) { // Build visitor // ExpressionVisitor exp_vis = new ExpressionVisitor(null, Home); // Build the schema // Schema columns = new Schema(); foreach (HScriptParser.Create_table_unitContext ctx in context.create_table_unit()) { columns.Add( ctx.IDENTIFIER().GetText(), VisitorHelper.GetAffinity(ctx.type()), (ctx.expression() == null) ? true : exp_vis.ToNode(ctx.expression()).Evaluate().valueBOOL, VisitorHelper.GetSize(ctx.type(), true)); } string name = context.full_table_name().table_name().GetText(); string db = context.full_table_name().database_name().GetText(); string db_path = Home.Connections[db]; long chunk_size = (context.create_table_size() == null) ? RecordSet.EstimateMaxRecords(columns) : exp_vis.ToNode(context.create_table_size().expression()).Evaluate().valueINT; return new CreateTablePlan(db_path, name, columns, (int)chunk_size); }
// Update // public static UpdatePlan RenderUpdatePlan(Workspace Home, HScriptParser.Crudam_updateContext context) { // Get the data source // DataSet data = VisitorHelper.GetData(Home, context.full_table_name()); // Create expression visitor // ExpressionVisitor exp_vis = new ExpressionVisitor(null, Home, data.Name, data.Columns, null); // Get where // Predicate where = VisitorHelper.GetWhere(exp_vis, context.where_clause()); // Create the key and fnodeset // Key keys = new Key(); FNodeSet expressions = new FNodeSet(); foreach (HScriptParser.Update_unitContext ctx in context.update_unit()) { keys.Add(data.Columns.ColumnIndex(ctx.IDENTIFIER().GetText())); expressions.Add(exp_vis.ToNode(ctx.expression())); } return new UpdatePlan(data, keys, expressions, where); }
// Delete // public static DeletePlan RenderDeletePlan(Workspace Home, HScriptParser.Crudam_deleteContext context) { // Get the data source // DataSet data = VisitorHelper.GetData(Home, context.full_table_name()); // Create expression visitor // ExpressionVisitor exp_vis = new ExpressionVisitor(null, Home, data.Name, data.Columns, null); // Get where // Predicate where = VisitorHelper.GetWhere(exp_vis, context.where_clause()); return new DeletePlan(data, where); }
public static PartitionedAggregatePlan RenderPartitionedAggregatePlan(Workspace Home, HScriptParser.Crudam_aggregateContext context) { // Get the data source // DataSet data = VisitorHelper.GetData(Home, context.full_table_name()); string alias = (context.K_AS() != null) ? context.IDENTIFIER().GetText() : data.Name; // Create a register // StaticRegister memory = new StaticRegister(null); // Create expression visitor // ExpressionVisitor exp_vis = new ExpressionVisitor(null, Home, alias, data.Columns, memory); // Get where // Predicate where = VisitorHelper.GetWhere(exp_vis, context.where_clause()); // Get the reader // //RecordReader reader = data.OpenReader(where); // Get the keys // FNodeSet keys = (context.K_BY() != null) ? exp_vis.ToNodes(context.expression_alias_list()) : new FNodeSet(); // Get the reducers // AggregateSet values = (context.K_OVER() != null) ? exp_vis.ToReducers(context.beta_reduction_list()) : new AggregateSet(); // Create a second register for the return memory // StaticRegister return_memory = new StaticRegister(null); // Need to build a visitor off of the aggregator schema // ExpressionVisitor agg_vis = new ExpressionVisitor(null, Home, "agg", AggregatePlan.GetInterimSchema(keys, values), return_memory); // Get the output // FNodeSet return_vars = VisitorHelper.GetReturnStatement(agg_vis, context.return_action().expression_or_wildcard_set()); // Get the output cursor // RecordWriter out_put_writter = VisitorHelper.GetWriter(Home, return_vars.Columns, context.return_action()); // Get the partitioner // int Partitions = VisitorHelper.GetPartitions(exp_vis, context.partitions()); return new PartitionedAggregatePlan(out_put_writter, data, where, keys, values, return_vars, Home.TempSpace, Partitions); }
internal static ReadMapNode RenderMapNode(Workspace Home, int PartitionID, HScriptParser.Crudam_read_maprContext context) { // Get the data source // DataSet data = VisitorHelper.GetData(Home, context.full_table_name()); string alias = (context.K_AS() != null) ? context.IDENTIFIER().GetText() : data.Name; // Create a local heap to work off of // MemoryStruct local_heap = new MemoryStruct(true); // Create a record register // StaticRegister memory = new StaticRegister(null); // Create expression visitor // ExpressionVisitor exp_vis = new ExpressionVisitor(local_heap, Home, alias, data.Columns, memory); // Where clause // Predicate where = VisitorHelper.GetWhere(exp_vis, context.where_clause()); // Create a reader // RecordReader reader = data.OpenReader(where); // Get the declarations // if (context.crudam_declare_many() != null) { VisitorHelper.AllocateMemory(Home, local_heap, exp_vis, context.crudam_declare_many()); } // Get the map actions // ActionVisitor act_vis = new ActionVisitor(Home, local_heap, exp_vis); act_vis.IsAsync = true; TNode map = act_vis.ToNode(context.map_action().query_action()); // Get the reduce actions // act_vis = new ActionVisitor(Home, local_heap, exp_vis); act_vis.IsAsync = false; TNode red = act_vis.ToNode(context.reduce_action().query_action()); ReadMapNode node = new ReadMapNode(PartitionID, map, red, memory, where); return node; }
public static ConcurrentReadPlan RenderMapReducePlan(Workspace Home, HScriptParser.Crudam_read_maprContext context) { DataSet data = VisitorHelper.GetData(Home, context.full_table_name()); int threads = VisitorHelper.GetPartitions(new ExpressionVisitor(null, Home), context.partitions()); ReadMapNodeFactory factory = new ReadMapNodeFactory(Home, context); ReadReduceNode reducer = new ReadReduceNode(); MRJob<ReadMapNode> job = new MRJob<ReadMapNode>(data, reducer, factory, threads); return new ConcurrentReadPlan(job); }