public CreateChunkPlan(string Name, Schema Columns, Workspace Space) { this._name = Name; this._columns = Columns; this._space = Space; this.Name = "CREATE_CHUNK"; }
public RecordSet(string Directory, string Name, Schema S, long MaxRecords) : this(S) { this.MaxRecords = MaxRecords; Header h = new Header(Directory, Name, 0, this, HeaderType.Table); this.Attach(h); BinarySerializer.FlushRecordSet(this); }
public CreateTablePlan(string Directory, string Name, Schema Columns, int Size) : base() { this._columns = Columns; this._dir = Directory; this._name = Name; this._size = Size; this.Name = "CREATE_TABLE"; }
public override string Unparse(Schema S) { List<string> text = new List<string>(); foreach (FNode ln in this.Children) text.Add(ln.Unparse(S)); return this._Func.Unparse(text.ToArray(), S); }
public FNodeSet(Schema Columns, Key Fields) : this() { this.AllowNameDotName = false; for (int i = 0; i < Fields.Count; i++) { this.Add(Columns.ColumnName(Fields[i]), new FNodeFieldRef(null, Fields[i], Columns.ColumnAffinity(Fields[i]), Columns.ColumnSize(Fields[i]), null)); } }
public FNodeSet(Schema Columns, bool AlloowDotNames) : this() { this.AllowNameDotName = AlloowDotNames; for (int i = 0; i < Columns.Count; i++) { this.Add(Columns.ColumnName(i), new FNodeFieldRef(null, i, Columns.ColumnAffinity(i), Columns.ColumnSize(i), null)); } }
// Statics // public static DataSet CreateOfType(DataSet Basis, string Dir, string Name, Schema Columns, long MaxSize) { if (Basis.IsBig) return new Table(Dir, Name, Columns, MaxSize); else if (Basis.ToRecordSet.IsAttached) return new RecordSet(Dir, Name, Columns, MaxSize); else return new RecordSet(Columns); }
// Constructor // public RecordSet(Schema NewColumns, Header NewHeader, List<Record> NewCache, Key NewOrderBy) { this._Columns = NewColumns; this._Cache = NewCache; this._OrderBy = NewOrderBy; this._Head = NewHeader; if (NewHeader != null) { this._MaxRecordCount = NewHeader.MaxRecordCount; this._GhostName = NewHeader.Name; } else { this._MaxRecordCount = EstimateMaxRecords(NewColumns); this._GhostName = "CHUNK"; } }
public void Repoint(Schema OriginalSchema, Schema NewSchema) { if (this._idx >= OriginalSchema.Count) throw new Exception("Original schema is invalid"); if (OriginalSchema.ColumnAffinity(this._idx) != this._affinity) throw new Exception("Original schema is invalid"); string name = OriginalSchema.ColumnName(this._idx); int new_index = NewSchema.ColumnIndex(name); if (new_index == -1) throw new Exception("New schema is invalid"); if (NewSchema.ColumnAffinity(new_index) != this._affinity) throw new Exception("New schema is invalid"); this._idx = new_index; }
// Constructor // public RecordReader(RecordSet From, Predicate Where) { this._ptrRecord = DEFAULT_POINTER; this._Data = From; this._Where = Where; // Assign the where to a register pointing to 'this' // StreamRegister reg = new StreamRegister(this); this._Where.Node.AssignRegister(reg); // Fix the default // if (!Where.Default) { this._IsFiltered = true; while (!this.CheckFilter && !this.EndOfData) this.Advance(); } // This is used to handle the writer class that inherits the reader // if (From != null) this._columns = From.Columns; }
// Records // public static Record ToRecord(string Text, Schema Columns, char[] Delims, char Escape) { // Split the data // string[] t = Splitter.Split(Text, Delims, Escape, false, Cell.NULL_STRING_TEXT); // Check the length // if (t.Length != Columns.Count) throw new Exception(string.Format("Text has {0} fields, but schema has {1} fields", t.Length, Columns.Count)); // Build the record // RecordBuilder rb = new RecordBuilder(); for (int i = 0; i < t.Length; i++) rb.Add(Cell.Parse(t[i], Columns.ColumnAffinity(i))); return rb.ToRecord(); }
public static DataSet CreateOfType(DataSet Basis, Schema Columns) { return CreateOfType(Basis, Basis.Directory, Header.TempName(), Columns, Basis.MaxRecords); }
private static Table ReadTableSafe2(byte[] Mem, int Location) { /* * Read: * Header * Schema * SortKey * Record Collection */ // Read header // Record rh; Location = ReadRecordSafe2(Mem, Location, 11, out rh); TableHeader h = new TableHeader(rh); // Read schema // List<Record> s_cache = new List<Record>(); Location = BinarySerializer.ReadRecordsSafe2(Mem, Location, h.ColumnCount, 4, s_cache); Schema s = new Schema(s_cache); // Read key // Record rk; Location = ReadRecordSafe2(Mem, Location, (int)h.KeyCount, out rk); Key k = new Key(rk); // Read record cache // List<Record> d_cache = new List<Record>(); Location = BinarySerializer.ReadRecordsSafe2(Mem, Location, (int)h.Size, 2, d_cache); // Return recordset // return new Table(h, s, d_cache, k); }
public string UnParse(Schema Columns) { return this._Node.Unparse(Columns); }
public override string Unparse(Schema S) { return this._Heap.Scalars[this._Pointer].ToString(); }
public static FNode Field(Schema Columns, string Name) { return Field(Columns, Name, null); }
public NN_Layer(bool Bias, Key Fields, Schema Columns) : this() { // Check if rendered // if (this._IsRendered) throw new Exception("Layer already rendered"); // Add the bias node // if (Bias) this._Nodes.Add(new NeuralNodeStatic("DATA_BIAS", 1)); // Add the references // for (int i = 0; i < Fields.Count; i++) this._Nodes.Add(new NeuralNodeReference(Columns.ColumnName(Fields[i]), Fields[i])); // Tag as rendered // this._IsRendered = true; }
public FNodeSet(Schema Columns) : this(Columns, false) { }
public static RecordWriter GetWriter(Workspace Enviro, Schema Columns, HScriptParser.Return_actionContext context) { // Get the table name // string name = context.full_table_name().table_name().IDENTIFIER().GetText(); string db = (context.full_table_name().database_name() == null) ? "GLOBAL" : context.full_table_name().database_name().GetText(); // Figure out if we need to append // bool appendto = (context.K_INSERT() != null) ? true : false; // Global -- Append // if (context.full_table_name().database_name() == null && appendto) { if (Enviro.ChunkHeap.Exists(name)) return Enviro.ChunkHeap[name].OpenWriter(); throw new HScriptCompileException(string.Format("Chunk '{0}' does not exist", name)); } // Static -- Append // if (appendto) { string fullname = db + "." + name; if (Enviro.Exists(db, name)) return Enviro.GetStaticTable(db, name).OpenWriter(); throw new HScriptCompileException(string.Format("Table '{0}' does not exist", fullname)); } // Global -- Create New // if (context.full_table_name().database_name() == null) { RecordSet data = new RecordSet(Columns); Enviro.ChunkHeap.Reallocate(name, data); return data.OpenWriter(); } // Static -- Create New // string dir = Enviro.Connections[db]; Table t = new Table(dir, name, Columns); return t.OpenWriter(); }
private static RecordSet ReadRecordSet(BinaryReader Reader) { /* * Read: * Header * Schema * SortKey * Record Collection */ // Read header // Header h = new Header(BinarySerializer.ReadRecord(Reader, 10)); // Read schema // Schema s = new Schema(BinarySerializer.ReadRecords(Reader, h.ColumnCount, 4)); // Read key // Key k = new Key(BinarySerializer.ReadRecord(Reader, (int)h.KeyCount)); // Read record cache // List<Record> l = BinarySerializer.ReadRecords(Reader, h.RecordCount, s.Count); // Return recordset // return new RecordSet(s, h, l, k); }
public string Unparse(Schema Columns) { StringBuilder sb = new StringBuilder(); foreach (FNode n in this._Nodes) sb.Append(n.Unparse(Columns) + " , "); return sb.ToString(); }
public NN_Layer(NodeReduction Connector, ScalarFunction Activator, Key Fields, Schema Columns) : this() { // Check if rendered // if (this._IsRendered) throw new Exception("Layer already rendered"); // Add the references // for (int i = 0; i < Fields.Count; i++) this._Nodes.Add(new NeuralNodePrediction(Columns.ColumnName(Fields[i]), Activator, Connector, Fields[i])); // Tag as rendered // this._IsRendered = true; }
public static Record ToRecord(string Text, Schema Columns, char[] Delims) { return ToRecord(Text, Columns, Delims, char.MaxValue); }
/// <summary> /// Creates a schema from another schema /// </summary> /// <param name="S">The starting point schema</param> /// <param name="K">A key representing the columns to keep</param> /// <returns>A schema</returns> public static Schema Split(Schema S, Key K) { Schema s = new Schema(); for (int i = 0; i < K.Count; i++) { s.Add(S.ColumnName(K[i]), S.ColumnAffinity(K[i]), S.ColumnNull(K[i]), S.ColumnSize(K[i])); } return s; }
public static DataSet CreateData(Workspace Enviro, Schema Columns, HScriptParser.Full_table_nameContext context) { // Get the name // string t_name = context.table_name().IDENTIFIER().GetText(); // Global context // if (context.database_name() == null) { RecordSet rs = new RecordSet(Columns); Enviro.ChunkHeap.Reallocate(t_name, rs); return rs; } // Table context // if (context.database_name() != null) { string d_base = context.database_name().IDENTIFIER().GetText(); if (!Enviro.Connections.Exists(d_base)) throw new HScriptCompileException("Connection to '{0}' does not exist", d_base); string dir = Enviro.Connections[d_base]; Table t = new Table(dir, t_name, Columns); return t; } throw new HScriptCompileException("Cannot create data '{0}'", t_name); }
private static Table ReadTable(BinaryReader Reader) { /* * Read: * Header * Schema * SortKey * Record Collection */ // Read header // TableHeader h = new TableHeader(BinarySerializer.ReadRecord(Reader, 10)); // Read schema // Schema s = new Schema(BinarySerializer.ReadRecords(Reader, h.ColumnCount, 4)); // Read key // Key k = new Key(BinarySerializer.ReadRecord(Reader, (int)h.KeyCount)); // Read record cache // List<Record> l = BinarySerializer.ReadRecords(Reader, h.Size, 2); // Return recordset // return new Table(h, s, l, k); }
public override string Unparse(Schema S) { return this._value.ToString(); }
public string Unparse(Schema Columns) { return this._Expression.Unparse(Columns); }
public static FNode Field(Schema Columns, string Name, Register Memory) { int idx = Columns.ColumnIndex(Name); return Field(idx, Columns.ColumnAffinity(idx), Columns.ColumnSize(idx), Memory); }
// Statics // /// <summary> /// Combines two schemas; throws an exception if two columns have the same name. /// </summary> /// <param name="S1">The left schema</param> /// <param name="S2">The right schema</param> /// <returns>A combined schema</returns> public static Schema Join(Schema S1, Schema S2) { Schema s = new Schema(); for (int i = 0; i < S1.Count; i++) { s.Add(S1.ColumnName(i), S1.ColumnAffinity(i), S1.ColumnNull(i), S1.ColumnSize(i)); } for (int i = 0; i < S2.Count; i++) { s.Add(S2.ColumnName(i), S2.ColumnAffinity(i), S2.ColumnNull(i), S2.ColumnSize(i)); } return s; }