override public object Eval(DataRow row) { object obj1 = expr1.Eval(row); if (obj1 == DBNull.Value || obj1 == null) { return(obj1); } object obj2 = expr2.Eval(row); if (obj2 == DBNull.Value || obj2 == null) { return(obj2); } if (op == Operation.ADD && (obj1 is string || obj2 is string)) { return(obj1.ToString() + obj2.ToString()); } IConvertible o1 = (IConvertible)obj1; IConvertible o2 = (IConvertible)obj2; switch (op) { case Operation.ADD: return(Numeric.Add(o1, o2)); case Operation.SUB: return(Numeric.Subtract(o1, o2)); case Operation.MUL: return(Numeric.Multiply(o1, o2)); case Operation.DIV: return(Numeric.Divide(o1, o2)); case Operation.MOD: return(Numeric.Modulo(o1, o2)); default: return(0); } }
public override object Eval(DataRow row) { //TODO: implement a better caching strategy and a mechanism for cache invalidation. //for now only aggregation over the table owning 'row' (e.g. 'sum(parts)' //in constrast to 'sum(child.parts)') is cached. if (cacheResults && result != null && column.ReferencedTable == ReferencedTable.Self) { return(result); } count = 0; result = null; object[] values; if (rows == null) { values = column.GetValues(column.GetReferencedRows(row)); } else { values = column.GetValues(rows); } foreach (object val in values) { if (val == null) { continue; } count++; Aggregate((IConvertible)val); } switch (function) { case AggregationFunction.StDev: case AggregationFunction.Var: result = CalcStatisticalFunction(values); break; case AggregationFunction.Avg: result = ((count == 0) ? DBNull.Value : Numeric.Divide(result, count)); break; case AggregationFunction.Count: result = count; break; } if (result == null) { result = DBNull.Value; } if (cacheResults && column.ReferencedTable == ReferencedTable.Self) { table = row.Table; row.Table.RowChanged += RowChangeHandler; } return(result); }