Example #1
0
        public static long Delete(RecordSet Extent, Predicate Where)
        {

            long n = 0;
            RecordSet rs = new RecordSet(Extent.Columns);
            RecordWriter w = rs.OpenWriter();
            FastReadPlan plan = new FastReadPlan(Extent, Where.NOT, new FNodeSet(Extent.Columns), w);
            plan.Execute();
            w.Close();
            n = Extent.Count - rs.Count;
            Extent._Cache = rs._Cache;
            return n;

            //long n = 0;
            //StaticRegister mem = new StaticRegister(null);
            //Where.AssignRegister(mem);
            //for (int i = Extent.Count - 1; i >= 0; i--)
            //{
            //    mem.Assign(Extent[i]);
            //    if (Where.Render())
            //    {
            //        Extent.Remove(i);
            //        n++;
            //    }

            //}
            //return n;

        }
Example #2
0
        // statics //
        public static RecordSet[] Split(RecordSet Data, int PartitionCount)
        {

            RecordSet[] partitions = new RecordSet[PartitionCount];
            int[] partition_map = RecordMap(PartitionCount, Data.Count);

            int rec_ptr = 0;
            for (int i = 0; i < PartitionCount; i++)
            {

                partitions[i] = new RecordSet(Data.Columns);
                int max_count = partition_map[i];
                int local_count = 0;
                while (local_count < max_count)
                {

                    partitions[i].Add(Data[rec_ptr]);
                    rec_ptr++;
                    local_count++;

                }

            }

            return partitions;

        }
Example #3
0
 public override void Execute()
 {
     this._timer = System.Diagnostics.Stopwatch.StartNew();
     RecordSet chunk = new RecordSet(this._columns);
     chunk.SetGhostName(this._name);
     this._space.ChunkHeap.Reallocate(this._name, chunk);
     this._timer.Stop();
     this.Message.AppendLine(string.Format("Chunk '{0}' created", this._name));
 }
Example #4
0
        public SmallTablePartitioner(RecordSet Data, int PartitionCount)
            : base(Data, PartitionCount)
        {

            this._can_request = new bool[PartitionCount];
            for (int i = 0; i < PartitionCount; i++)
                this._can_request[i] = true;
            this._extents = TablePartitioner.Split(Data, PartitionCount);

        }
Example #5
0
        // RecordSet SELECTS //
        public static RecordSet SELECT(DataSet Data, FNodeSet Nodes, Predicate Where)
        {

            RecordSet rs = new RecordSet(Nodes.Columns);
            RecordWriter w = rs.OpenWriter();
            FastReadPlan plan = new FastReadPlan(Data, Where, Nodes, w);
            plan.Execute();
            w.Close();
            return rs;

        }
Example #6
0
 public RowCluster(string Name, DataSet Data, Predicate Where, FNodeSet Fields, FNode Weight, int Count)
 {
     this._data = Data;
     this._where = Where;
     this._fields = Fields;
     this._count = Count;
     this._rule = new RowClusterRuleEuclid();
     this._initializer = new RowClusterInitializerSpectrum();
     this._means = this._initializer.Initialize(Data, Where, Fields, Count);
     this._weight = Weight;
     this.Name = Name;
 }
Example #7
0
        public TNodeAppendToChunkAsync(TNode Parent, RecordSet UseParentData, FNodeSet UseFields)
            : base(Parent)
        {

            if (UseParentData.Columns.GetHashCode() != UseFields.Columns.GetHashCode())
                throw new Exception("Output table and fields passed are not compatible");

            this._ParentData = UseParentData;
            this._RecordCache = new RecordSet(UseParentData.Columns);
            this._Fields = UseFields;

        }
Example #8
0
        // Text serialization //
        public static string ToString(RecordSet Data, Key K, char ColumnDelim, char RowDelim)
        {

            StringBuilder sb = new StringBuilder();

            for (int i = 0; i < Data.Count; i++)
            {
                sb.Append(Data[i].ToString(K, ColumnDelim));
                if (i != Data.Count - 1) sb.Append(RowDelim);
            }
            return sb.ToString();

        }
Example #9
0
        // Base Implementations //
        public override void Invoke()
        {

            // Sink the cache to the parent table //
            if (this._RecordCache.IsFull)
            {
                this._ParentData.Union(this._RecordCache);
                this._RecordCache = new RecordSet(this._ParentData.Columns);
            }

            // Add the record //
            this._RecordCache.Add(this._Fields.Evaluate());

        }
Example #10
0
        // Constructor //
        public IndexSet(RecordSet Data, Key K)
            : base(new Schema(SCHEMA_SQL))
        {

            // Main loop //
            for (int i = 0; i < Data.Count; i++)
            {
                Record r = Record.Stitch(new Cell(Data[i].GetHashCode(K)), new Cell(i));
                this.Add(r);
            }

            // Sort table //
            this.Sort(new Key(0));

        }
Example #11
0
        internal static MergeAlgorithm Optimize(RecordSet T1, Key J1, RecordSet T2, Key J2)
        {

            double n1 = (double)T1.Count;
            double n2 = (double)T2.Count;

            double p = Math.Min(n1, n2) / Math.Max(n1, n2);

            // Test for sort merge //
            if (Key.EqualsStrict(T1.SortBy, J1) && Key.EqualsStrict(T2.SortBy, J2)) return MergeAlgorithm.SortMerge;

            // Test for nested loop //
            if (p <= NESTED_LOOP_RATIO) return MergeAlgorithm.NestedLoop;

            // Otherwise //
            return MergeAlgorithm.HashTable;

        }
Example #12
0
	    // Constructor //
	    public RecordReader(RecordSet From, Predicate Where)
	    {
		
		    this._ptrRecord = DEFAULT_POINTER;
		    this._Data = From;
		    this._Where = Where;
            
            // Assign the where to a register pointing to 'this' //
            StreamRegister reg = new StreamRegister(this);
            this._Where.Node.AssignRegister(reg);
            
            // Fix the default //
            if (!Where.Default)
		    {
			    this._IsFiltered = true;
                while (!this.CheckFilter && !this.EndOfData)
                    this.Advance();
		    }

            // This is used to handle the writer class that inherits the reader //
            if (From != null)
                this._columns = From.Columns;
	    }
Example #13
0
        public static DataSet CreateData(Workspace Enviro, Schema Columns, HScriptParser.Full_table_nameContext context)
        {

            // Get the name //
            string t_name = context.table_name().IDENTIFIER().GetText();

            // Global context //
            if (context.database_name() == null)
            {
                RecordSet rs = new RecordSet(Columns);
                Enviro.ChunkHeap.Reallocate(t_name, rs);
                return rs;
            }

            // Table context //
            if (context.database_name() != null)
            {
                string d_base = context.database_name().IDENTIFIER().GetText();
                if (!Enviro.Connections.Exists(d_base))
                    throw new HScriptCompileException("Connection to '{0}' does not exist", d_base);
                string dir = Enviro.Connections[d_base];
                Table t = new Table(dir, t_name, Columns);
                return t;
            }

            throw new HScriptCompileException("Cannot create data '{0}'", t_name);

        }
Example #14
0
 public abstract void Execute(RecordSet Chunk);
Example #15
0
        public void ImportFromInterim(RecordSet InterimData)
        {

            int MapperCount = this.BaseMappers.Count;
            int[] Signiture = this.BaseReducers.Signiture;
            int TotalCellCount = MapperCount + Signiture.Sum();

            // Check that this is the correct size //
            if (InterimData.Columns.Count != TotalCellCount)
                throw new Exception(string.Format("RecordSet passed [{0}] has few columns than required by deserializer [{1}]", InterimData.Columns.Count, TotalCellCount));

            // Import the data //
            for (int i = 0; i < InterimData.Count; i++)
            {

                // Build map key //
                RecordBuilder KeyBuilder = new RecordBuilder();
                for (int j = 0; j < MapperCount; j++)
                {
                    KeyBuilder.Add(InterimData[i][j]);
                }

                // Build compound record //
                RecordBuilder ValueBuilder = new RecordBuilder();
                for (int j = MapperCount; j < TotalCellCount; j++)
                {
                    ValueBuilder.Add(InterimData[i][j]);
                }

                // Add to dictionary //
                this._cache.Add(KeyBuilder.ToRecord(), CompoundRecord.FromRecord(ValueBuilder.ToRecord(), Signiture));

            }

        }
Example #16
0
        public RecordSet ToInterim()
        {

            // Get schema //
            Schema s = Schema.Join(this._Maps.Columns, this._Reducers.GetInterimSchema);

            // Build the table //
            RecordSet rs = new RecordSet(s);

            // Load //
            foreach (KeyValuePair<Record, CompoundRecord> t in this._cache)
            {
                Record r = Record.Join(t.Key, t.Value.ToRecord());
                rs.Add(r);
            }

            return rs;

        }
Example #17
0
        // To Methods //
        public RecordSet ToFinal(FNodeSet Fields, Predicate Filter)
        {

            RecordSet rs = new RecordSet(Fields.Columns);
            RecordWriter w = rs.OpenWriter();

            this.WriteToFinal(w, Fields);

            return rs;

        }
Example #18
0
 public RecordWriter(RecordSet Data, Predicate Having)
     : base(Data, Having)
 {
 }
Example #19
0
        public static Matrix ToMatrixWithIntercept(RecordSet Data, Key K)
        {

            Matrix m = new Matrix(Data.Count, K.Count + 1);

            for (int i = 0; i < Data.Count; i++)
            {

                m[i, 0] = 1;
                for (int j = 0; j < K.Count; j++)
                {
                    int k = K[j];
                    m[i, j + 1] = Data[i][k].valueDOUBLE;
                }

            }
            return m;

        }
Example #20
0
        public static RecordWriter GetWriter(Workspace Enviro, Schema Columns, HScriptParser.Return_actionContext context)
        {

            // Get the table name //
            string name = context.full_table_name().table_name().IDENTIFIER().GetText();
            string db =
                (context.full_table_name().database_name() == null)
                ? "GLOBAL"
                : context.full_table_name().database_name().GetText();

            // Figure out if we need to append //
            bool appendto =
                (context.K_INSERT() != null)
                ? true
                : false;

            // Global -- Append //
            if (context.full_table_name().database_name() == null && appendto)
            {
                if (Enviro.ChunkHeap.Exists(name))
                    return Enviro.ChunkHeap[name].OpenWriter();
                throw new HScriptCompileException(string.Format("Chunk '{0}' does not exist", name));
            }

            // Static -- Append //
            if (appendto)
            {
                string fullname = db + "." + name;
                if (Enviro.Exists(db, name))
                    return Enviro.GetStaticTable(db, name).OpenWriter();
                throw new HScriptCompileException(string.Format("Table '{0}' does not exist", fullname));
            }

            // Global -- Create New //
            if (context.full_table_name().database_name() == null)
            {
                RecordSet data = new RecordSet(Columns);
                Enviro.ChunkHeap.Reallocate(name, data);
                return data.OpenWriter();
            }

            // Static -- Create New //
            string dir = Enviro.Connections[db];
            Table t = new Table(dir, name, Columns);
            return t.OpenWriter();

        }
Example #21
0
        public override RecordSet Extend(DataSet Data, FNodeSet ClusterVariables, FNodeSet OtherKeepers, Predicate Where)
        {

            // Check that the ClusterVariable count matches the internal node set count //
            if (ClusterVariables.Count != this._fields.Count)
                throw new ArgumentException("The cluster variable count passed does not match the internal cluster variable count");

            // Create the selectors //
            FNodeSet values = OtherKeepers.CloneOfMe();
            FNode n = new FNodeResult(null, new RowClusterCellFunction(this._rule, this._means));
            foreach (FNode t in ClusterVariables.Nodes)
            {
                n.AddChildNode(t.CloneOfMe());
            }
            values.Add("CLUSTER_ID", n);

            // Build a recordset //
            RecordSet rs = new RecordSet(values.Columns);
            RecordWriter w = rs.OpenWriter();

            // Run a fast select //
            FastReadPlan plan = new FastReadPlan(Data, Where, values, w);
            plan.Execute();
            w.Close();

            return rs;

        }
Example #22
0
        private double CompareChanges(RecordSet Current, RecordSet New)
        {

            Key k = Key.Build(1, Current.Columns.Count - 1);
            Current.Sort(k);
            New.Sort(k);

            double Distance = 0;
            for (int i = 0; i < Current.Count; i++)
            {

                for (int j = 0; j < Current.Columns.Count; j++)
                {

                    Distance += Math.Pow(Current[i][j].DOUBLE - New[i][j].DOUBLE, 2);

                }

            }

            return Distance;

        }
Example #23
0
        public static Matrix ToMatrix(RecordSet Data, Key K)
        {

            Matrix m = new Matrix(Data.Count, K.Count);

            for (int i = 0; i < Data.Count; i++)
            {

                for (int j = 0; j < K.Count; j++)
                {
                    int k = K[j];
                    m[i, j] = Data[i][k].valueDOUBLE;
                }

            }
            return m;

        }
Example #24
0
        public virtual int NearestNeighbor(Cell[] Value, RecordSet Means)
        {

            double current_distance = 0;
            int current_index = 0;
            double min_distance = double.MaxValue;
            int min_index = 0;
            foreach (Record r in Means._Cache)
            {

                Record t = Record.Subrecord(r, 2, r.Count - 2); // the first value of the mean record is always the key, second is the count

                current_distance = this.Distance(Value, t);
                if (current_distance < min_distance)
                {
                    min_distance = current_distance;
                    min_index = current_index;
                }
                current_index++;

            }

            return min_index;

        }
Example #25
0
 public static Matrix ToMatrixWithIntercept(RecordSet Data)
 {
     Key k = Key.Build(Data.Columns.Count);
     return ToMatrixWithIntercept(Data, k);
 }
Example #26
0
        public override RecordSet Initialize(DataSet Data, Predicate Where, FNodeSet Fields, int Clusters)
        {

            // Get the min of each field //
            AggregateSet set1 = new AggregateSet();
            for (int i = 0; i < Fields.Count; i++)
            {
                set1.Add(new AggregateMin(Fields[i].CloneOfMe()), Fields.Alias(i));
            }

            // Get the max of each field //
            AggregateSet set2 = new AggregateSet();
            for (int i = 0; i < Fields.Count; i++)
            {
                set2.Add(new AggregateMax(Fields[i].CloneOfMe()), Fields.Alias(i));
            }

            // Render the min and max //
            RecordSet rs1 = AggregatePlan.Render(Data, Where, new FNodeSet(), set1);
            RecordSet rs2 = AggregatePlan.Render(Data, Where, new FNodeSet(), set2);

            // Create the output means table //
            RecordSet rs = new RecordSet(Schema.Join(new Schema("key int, count double"), rs1.Columns));

            // Fill in the gaps //
            for (int i = 0; i < Clusters; i++)
            {

                if (i == 0)
                {
                    RecordBuilder rb = new RecordBuilder();
                    rb.Add(0);
                    rb.Add(0D);
                    rb.Add(rs1[0]);
                    rs.Add(rb.ToRecord());
                }
                else if (i == Clusters - 1)
                {
                    RecordBuilder rb = new RecordBuilder();
                    rb.Add(Clusters - 1);
                    rb.Add(0D);
                    rb.Add(rs2[0]);
                    rs.Add(rb.ToRecord());
                }
                else
                {

                    RecordBuilder rb = new RecordBuilder();
                    rb.Add(i);
                    rb.Add(0D);
                    for (int j = 0; j < rs1.Columns.Count; j++)
                    {
                        double clus = (double)Clusters;
                        double jay = (double)j;
                        rb.Add(rs1[0][j].DOUBLE + (rs2[0][j].DOUBLE - rs1[0][j].DOUBLE) / clus * jay);
                    }
                    rs.Add(rb.ToRecord());

                }

            }

            return rs;

        }
Example #27
0
	    public RecordReader(RecordSet From)
            :this(From, Predicate.TrueForAll)
	    {
	    }
Example #28
0
 public RowClusterCellFunction(RowClusterRule Rule, RecordSet Means)
     : base("row_cluster", -1, null, CellAffinity.INT)
 {
     this._rule = Rule;
     this._means = Means;
 }
Example #29
0
        public RecordWriter(RecordSet Data) 
            :this(Data, Predicate.TrueForAll)
	    {
	    }
Example #30
0
 public abstract void Union(RecordSet Data);