public HParameter(FNode Value) { this._affinity = HParameterAffinity.Expression; this._expression = Value; this._expression_set = new FNodeSet(); this._expression_set.Add(Value); // in case a set of one was passed }
private static void Update(Record Data, Key K, FNodeSet Fields) { int idx = 0; for (int i = 0; i < K.Count; i++) { idx = K[i]; Data[idx] = Fields[i].Evaluate(); } }
// RecordSet SELECTS // public static RecordSet SELECT(DataSet Data, FNodeSet Nodes, Predicate Where) { RecordSet rs = new RecordSet(Nodes.Columns); RecordWriter w = rs.OpenWriter(); FastReadPlan plan = new FastReadPlan(Data, Where, Nodes, w); plan.Execute(); w.Close(); return rs; }
public UpdatePlan(DataSet Data, Key K, FNodeSet Fields, Predicate BaseDataFilter) : base() { this._data = Data; this._keys = K; this._values = Fields; this._where = BaseDataFilter; this.Name = "UPDATE"; }
// Table SELECTS // public static Table SELECT(string Dir, string Name, DataSet Data, FNodeSet Nodes, Predicate Where) { Table rs = new Table(Dir, Name, Nodes.Columns); RecordWriter w = rs.OpenWriter(); FastReadPlan plan = new FastReadPlan(Data, Where, Nodes, w); plan.Execute(); w.Close(); return rs; }
public RowCluster(string Name, DataSet Data, Predicate Where, FNodeSet Fields, FNode Weight, int Count) { this._data = Data; this._where = Where; this._fields = Fields; this._count = Count; this._rule = new RowClusterRuleEuclid(); this._initializer = new RowClusterInitializerSpectrum(); this._means = this._initializer.Initialize(Data, Where, Fields, Count); this._weight = Weight; this.Name = Name; }
public TNodeAppendTo(TNode Parent, RecordWriter Writer, FNodeSet Output) : base(Parent) { // Check that the column count is the same; we dont care about the schema // if (Writer.SourceSchema.Count != Output.Count) throw new Exception("Attempting to write a different number of recors to a stream"); this._writer = Writer; this._output = Output; }
public TNodeAppendToChunkAsync(TNode Parent, RecordSet UseParentData, FNodeSet UseFields) : base(Parent) { if (UseParentData.Columns.GetHashCode() != UseFields.Columns.GetHashCode()) throw new Exception("Output table and fields passed are not compatible"); this._ParentData = UseParentData; this._RecordCache = new RecordSet(UseParentData.Columns); this._Fields = UseFields; }
public GeneralizedLinearModel(string Name, DataSet Data, Predicate Where, FNode Expected, FNodeSet Actual, FNode Weight, Lambda LinkFunction) : base(Name, Data, Where, Expected, Actual, Weight) { int val = IsCorrectLink(LinkFunction); if (val == -1) throw new Exception("Link function must have exactly one argument"); else if (val == -2) throw new Exception("Link function is not differentiable"); this._Link = LinkFunction; }
public static DataSet GetData(Workspace Enviro, FNodeSet Nodes, HScriptParser.Return_actionContext context) { // Get the table name // string name = context.full_table_name().table_name().IDENTIFIER().GetText(); string db = (context.full_table_name().database_name() == null) ? "global" : context.full_table_name().database_name().GetText(); // Figure out if we need to append // bool appendto = (context.K_INSERT() != null) ? true : false; // Global -- Append // if (context.full_table_name().database_name() == null && appendto) { if (Enviro.ChunkHeap.Exists(name)) return Enviro.ChunkHeap[name]; throw new HScriptCompileException(string.Format("Chunk '{0}' does not exist", name)); } // Static -- Append // if (appendto) { string fullname = db + "." + name; if (Enviro.Exists(db, name)) return Enviro.GetStaticTable(db, name); throw new HScriptCompileException(string.Format("Table '{0}' does not exist", fullname)); } // Global -- Create New // if (context.full_table_name().database_name() == null) { RecordSet data = new RecordSet(Nodes.Columns); Enviro.ChunkHeap.Reallocate(name, data); return data; } // Static -- Create New // string dir = Enviro.Connections[db]; Table t = new Table(dir, name, Nodes.Columns); return t; }
public AggregatePlan(RecordWriter Output, DataSet Source, Predicate Filter, FNodeSet Keys, AggregateSet Aggregates, FNodeSet ReturnSet, StaticRegister BaseMem, StaticRegister ReturnMem, string TempDir) : base() { this._writer = Output; this._source = Source; this._filter = Filter; this._keys = Keys ?? new FNodeSet(); this._aggregates = Aggregates; this._returnset = ReturnSet; this._basememory = BaseMem; this._returnmemory = ReturnMem; this._sink = TempDir ?? Source.Directory; this.Name = "AGGREGATE"; }
public override FNode ModelExpected(FNodeSet Inputs) { if (Inputs.Count != this._XValue.Count) throw new ArgumentException("The inputs passed are not the same size as the model inputs"); FNode n = Inputs.Nodes.First().CloneOfMe() * FNodeFactory.Value(this.Beta[0]); for (int i = 1; i < Inputs.Count; i++) { n += Inputs[i].CloneOfMe() * FNodeFactory.Value(this.Beta[i]); } return n; }
public static long Update(DataSet Data, Key K, FNodeSet Fields, Predicate BaseDataFilter) { // Check that the field indicies and the maps have the same length // if (K.Count != Fields.Count) throw new Exception(string.Format("Field collection passed [{0}] has fewer elements than the map collection passed [{0}]", K.Count, Fields.Count)); // Create the total append count // long CountOf = 0; // Loop through each extent // foreach (RecordSet rs in Data.Extents) { // Open a stream // RecordReader rr = new RecordReader(rs, BaseDataFilter); // Create a register // Register mem = new StreamRegister(rr); // Assign the register to the fields // Fields.AssignRegister(mem); // Update the data // while (!rr.EndOfData) { Update(rr.Read(), K, Fields); CountOf++; rr.Advance(); } // if (rs.IsAttached) BinarySerializer.Flush(rs); } // No need to flush the data set // return CountOf; }
private static void AppendSet(ExpressionVisitor Evaluator, FNodeSet Fields, HScriptParser.Expression_or_wildcardContext context) { if (context is HScriptParser.EOW_expressionContext) { AppendSet(Evaluator, Fields, context as HScriptParser.EOW_expressionContext); return; } if (context is HScriptParser.EOW_local_starContext) { AppendSet(Evaluator, Fields, context as HScriptParser.EOW_local_starContext); return; } if (context is HScriptParser.EOW_global_starContext) { AppendSet(Evaluator, Fields, context as HScriptParser.EOW_global_starContext); return; } if (context is HScriptParser.EOW_table_starContext) { AppendSet(Evaluator, Fields, context as HScriptParser.EOW_table_starContext); return; } if (context is HScriptParser.EOW_tables_starContext) { AppendSet(Evaluator, Fields, context as HScriptParser.EOW_tables_starContext); return; } }
private static void AppendSet(ExpressionVisitor Evaluator, FNodeSet Fields, HScriptParser.EOW_table_starContext context) { string alias = context.IDENTIFIER()[0].GetText(); if (!Evaluator.Columns.ContainsKey(alias)) throw new Exception(string.Format("Alias '{0}' does not exist", alias)); FNodeSet nodes = new FNodeSet(Evaluator.Columns[alias]); nodes.AssignRegister(Evaluator.Registers[alias]); string suffix = (context.K_AS() == null) ? null : context.IDENTIFIER()[1].GetText(); for (int i = 0; i < nodes.Count; i++) { Fields.Add((suffix == null) ? nodes.Alias(i) : suffix + nodes.Alias(i), nodes[i]); } }
// Expression or wildcard handelers // private static void AppendSet(ExpressionVisitor Evaluator, FNodeSet Fields, HScriptParser.EOW_expressionContext context) { FNode node = Evaluator.ToNode(context.expression_alias().expression()); string alias = ("F" + Fields.Count.ToString()); if (node.Name != null) alias = node.Name; if (context.expression_alias().K_AS() != null) alias = context.expression_alias().IDENTIFIER().GetText(); Fields.Add(alias, node); }
public static FNodeSet Gradients(FNode Equation, Dictionary<string,int> Map) { FNodeSet nodes = new FNodeSet(); foreach (KeyValuePair<string, int> kv in Map) { FNode dx = FNodeGradient.Gradient(Equation, kv.Key); nodes.Add(kv.Key, dx); } return nodes; }
public HParameter(FNodeSet Value) { this._affinity = HParameterAffinity.ExpressionSet; this._expression_set = Value; this._expression = Value[0]; }
public override FNode ModelExpected(FNodeSet Inputs) { return ModelExpected(Inputs.Nodes.First()); }
public RecordSet ToFinal(FNodeSet Fields) { return this.ToFinal(Fields, Predicate.TrueForAll); }
public static FNodeSet BindNodes(FNodeSet Gradients, CellVector Parameters, Dictionary<string, int> Map) { if (Gradients.Count != Parameters.Count) throw new ArgumentException("The node collection and parameter vector must be the same size"); FNodeSet nodes = new FNodeSet(); for (int i = 0; i < Gradients.Count; i++) { FNode t = NonlinearRegressionModel.BindNode(Gradients[i], Parameters, Map); nodes.Add(Gradients.Alias(i), t); } return nodes; }
public RecordSet ToFinal() { Schema s = Schema.Join(this._Maps.Columns, this._Reducers.GetSchema); FNodeSet leafs = new FNodeSet(s); return ToFinal(leafs); }
private static void AppendSet(ExpressionVisitor Evaluator, FNodeSet Fields, HScriptParser.EOW_global_starContext context) { string suffix = (context.K_AS() == null) ? null : context.IDENTIFIER().GetText(); for (int i = 0; i < Evaluator.GlobalHeap.Scalars.Count; i++) { string alias = (suffix == null) ? Evaluator.LocalHeap.Scalars.Name(i) : suffix + Evaluator.LocalHeap.Scalars.Name(i); FNode node = new FNodeHeapRef(null, Evaluator.GlobalHeap, i); Fields.Add(alias, node); } }
public void WriteToFinal(RecordWriter Writter, FNodeSet Fields) { if (Writter.SourceSchema != Fields.Columns) throw new Exception("Base stream and output schema are different"); // Create a static register // StaticRegister reg = new StaticRegister(null); // Assign the register to the leaf node set // Fields.AssignRegister(reg); // Load // foreach (KeyValuePair<Record, CompoundRecord> t in this._cache) { // Assign the value to the register // reg.Assign(Record.Join(t.Key, this._Reducers.Evaluate(t.Value))); // Evaluate the record // Record r = Fields.Evaluate(); // Write // Writter.Insert(r); } }
private static void AppendSet(ExpressionVisitor Evaluator, FNodeSet Fields, HScriptParser.EOW_tables_starContext context) { if (Evaluator.Columns.Count == 0) return; // no need to toss an exception string alias = Evaluator.Columns.Keys.First(); FNodeSet nodes = new FNodeSet(Evaluator.Columns[alias]); nodes.AssignRegister(Evaluator.Registers[alias]); string suffix = (context.K_AS() == null) ? null : context.IDENTIFIER().GetText(); for (int i = 0; i < nodes.Count; i++) { Fields.Add((suffix == null) ? nodes.Alias(i) : suffix + nodes.Alias(i), nodes[i]); } }
public void WriteToFinal(RecordWriter Writter) { Schema s = Schema.Join(this._Maps.Columns, this._Reducers.GetSchema); FNodeSet leafs = new FNodeSet(s); this.WriteToFinal(Writter, leafs); }
public static FNodeSet GetReturnStatement(ExpressionVisitor Evaluator, HScriptParser.Expression_or_wildcard_setContext context) { FNodeSet nodes = new FNodeSet(); foreach (HScriptParser.Expression_or_wildcardContext ctx in context.expression_or_wildcard()) AppendSet(Evaluator, nodes, ctx); return nodes; }
// Constructor // public KeyValueSet(FNodeSet Fields, AggregateSet Aggregates) { this._Maps = Fields; this._Reducers = Aggregates; this._cache = new Dictionary<Record, CompoundRecord>(Fields.Columns.NullRecord); }
public static FNodeSet CompactTree(FNodeSet Tree) { FNodeSet t = new FNodeSet(); foreach (FNode n in Tree.Nodes) t.Add(CompactNode(n)); return t; }
public static KeyValueSet Open(Header h, FNodeSet Fields, AggregateSet CR) { RecordSet rs = BinarySerializer.BufferRecordSet(h.Path); KeyValueSet gbs = new KeyValueSet(Fields, CR); gbs.ImportFromInterim(rs); return gbs; }