public void Select_COUNT__Distinct_Column1__From_Table() { using (var query = new Select().Count(Distinct.Column("Column1")).From.Table("Table")) { Assert.That(query.ToSql(), Is.EqualTo("SELECT COUNT(DISTINCT Column1) FROM Table")); } }
/** * Adds the given projection. * * @param projection the projection to add * @return this */ public IExitOperationsCollector AddProjection(IProjection projection) { if (projection.GetType().IsAssignableFrom(distinct.GetType())) { this.distinct = (Distinct)projection; //TODO: Distinct doesn't work yet log.Error("Distinct is not ready yet"); throw new NotSupportedException(); } if (projection.GetType().IsAssignableFrom(rowCountProjection.GetType())) { rowCountProjection = (RowCountProjection)projection; } if (projection.GetType().IsAssignableFrom(aggregateProjection.GetType())) { if (projection.ToString().ToLower().StartsWith("avg")) { this.avgProjection = (AggregateProjection)projection; } else { this.aggregateProjection = (AggregateProjection)projection; } } else { log.Error("Adding an unsupported Projection: " + projection.GetType().Name); throw new NotSupportedException(); } return(this); }
public void SparqlSetDistinct2() { INode a = this._factory.CreateBlankNode(); INode b1 = (1).ToLiteral(this._factory); INode b2 = (2).ToLiteral(this._factory); Set x = new Set(); x.Add("a", a); x.Add("_:b", b1); Set y = new Set(); y.Add("a", a); y.Add("_:b", b2); Assert.NotEqual(x, y); Multiset data = new Multiset(); data.Add(x); data.Add(y); Assert.Equal(2, data.Count); Table table = new Table(data); Distinct distinct = new Distinct(table, true); //Distinct should yield two result and temporary variables should still //be present SparqlEvaluationContext context = new SparqlEvaluationContext(null, null); BaseMultiset results = distinct.Evaluate(context); Assert.Equal(2, results.Count); Assert.True(results.ContainsVariable("_:b")); }
public void GetShoulReturnNumberOfDistinctNumbersWhenGivenA(int[] a, int expected) { Distinct getter = new Distinct(); int actual = getter.Get(a); Assert.AreEqual(expected, actual); }
public void test_solution_givenArrayOfInterer_returnsNumberOfDistinctElements(int[] given, int expected) { var target = new Distinct(); var actual = target.solution(given); Assert.AreEqual(expected, actual); }
public void SparqlSetDistinct1() { INode a = this._factory.CreateBlankNode(); INode b1 = (1).ToLiteral(this._factory); INode b2 = (2).ToLiteral(this._factory); Set x = new Set(); x.Add("a", a); x.Add("_:b", b1); Set y = new Set(); y.Add("a", a); y.Add("_:b", b2); Assert.AreNotEqual(x, y); Multiset data = new Multiset(); data.Add(x); data.Add(y); Assert.AreEqual(2, data.Count); Table table = new Table(data); Distinct distinct = new Distinct(table); //Distinct should yield a single result since temporary variables //are stripped SparqlEvaluationContext context = new SparqlEvaluationContext(null, null); BaseMultiset results = distinct.Evaluate(context); Assert.AreEqual(1, results.Count); Assert.IsFalse(results.ContainsVariable("_:b")); }
public void DistinctNumbersReturnASequence() { Assert.AreEqual(1, Distinct.Number()); Assert.AreEqual(2, Distinct.Number()); Assert.AreEqual(3, Distinct.Number()); Assert.AreEqual(4, Distinct.Number()); Assert.AreEqual(5, Distinct.Number()); }
/// <summary> /// Processes a Distinct modifier /// </summary> /// <param name="distinct">Distinct modifier</param> /// <param name="context">SPARQL Evaluation Context</param> public virtual BaseMultiset ProcessDistinct(Distinct distinct, SparqlEvaluationContext context) { if (context == null) { context = this.GetContext(); } return(distinct.Evaluate(context)); }
public void DistinctSolutionTest() { var distinct = new Distinct(); int[] array = { 2, 1, 1, 2, 3, 1 }; int result = distinct.Solve(array); Assert.AreEqual(3, result); }
public void Distinct_Should_Hanlde_Empty_Array() { Distinct subject = new Distinct(); int[] array = { }; int result = subject.solution(array); Assert.Equal(0, result); }
public void Distinct_Should_Process_Simple_Array() { Distinct subject = new Distinct(); int[] array = { 2, 1, 1, 2, 3, 1 }; int result = subject.solution(array); Assert.Equal(3, result); }
public void Distinct_Should_Process_Complex_Array() { Distinct subject = new Distinct(); int[] array = { 2, 1, 1, 2, 3, 1, 2, 1, 1, 2, 3, 1, 2, 1, 1, 2, 3, 1, 5, 7, 8, 3, 2, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 6, 7 }; int result = subject.solution(array); Assert.Equal(10, result); }
public void SolutionSimpleTest() { // Arrange int[] A = new int[] { 2, 1, 1, 2, 3, 1 }; // Action int actual = Distinct.Solution(A); // Assert int expected = 3; Assert.AreEqual(expected, actual); }
public void TestBasicCase() { //Given var baseCaseArray = new int[] { 2, 1, 1, 2, 3, 1 }; var distinct = new Distinct(); var expectedResult = 3; //When var actualResult = distinct.Solution(baseCaseArray); //Then Assert.AreEqual(expectedResult, actualResult); }
public void Case01() { // Arrange var algorithm = new Distinct(); var A = new int[] { 2, 1, 1, 2, 3, 1 }; // Act var result = algorithm.solution(A); var expected = 3; // Assert Assert.Equal(expected, result); }
public void SolutionEmptyTest() { // Arrange int[] A = new int[] { }; // Action int actual = Distinct.Solution(A); // Assert int expected = 0; Assert.AreEqual(expected, actual); }
public void SolutionSimpleTwoValuesTest() { // Arrange int[] A = new int[] { 2, 2 }; // Action int actual = Distinct.Solution(A); // Assert int expected = 1; Assert.AreEqual(expected, actual); }
public void Solution_SmallArray_Correct() { //Arrange - Given var array = new int[] { 2, 1, 1, 2, 3, 1 }; //Act - When var result = Distinct.Solution(array); //Assert - Then var expectedResult = 3; Assert.Equal(expectedResult, result); }
public void Distinct_EvalDistinct_HappyPath_Last_ExpectDistinctResults() { //------------Setup for test-------------------------- var env = CreateEnvironmentWithData(); //------------Execute Test--------------------------- var modified = Distinct.evalDistinct(env, new List <string>() { "[[Rec(*).a]]" }, new List <string> { "[[Rec(*).a]]" }, 0, new List <string> { "[[Bec().a]]" }); //------------Assert Results------------------------- var res = CommonFunctions.evalResultToString(EvaluationFunctions.eval(modified, 0, false, "[[Bec(*).a]]")); Assert.AreEqual(res, "1,2,3"); }
public static List <string> Distinct <T>(string token, string collectionName, string field, FilterDefinition <BsonDocument> filter, DistinctOptions options = null) { var request = new Distinct { CollectionName = collectionName, DistinctOptions = options, Filter = filter.ToJson(), Field = field, OutputMode = JsonOutputMode.Strict, CultureCode = CultureInfo.CurrentCulture.Name }; var response = Send <DistinctResponse>("entities/" + collectionName + "/distinct", request, "POST"); if (response != null) { return(response.Result); } return(new List <string>()); }
private static byte[] ProcessMergerClient(byte[] bytes) { try { // 1.获取合并包 var client = Serializer.DeserializeFromBytes<MergePacket>(bytes); // 2.处理消息 if (client.Type == MergePacket.MergeType.Sum) { Sum sum = new Sum(client.TimeStamp, client.AppName); double result = sum.Compute(Serializer.DeserializeFromBytes<double>(client.Data)); string flag = "sum_" + client.TimeStamp + "_" + client.AppName; Logger.Info(flag + ",result is " + result); sum.Remove(flag); return Serializer.SerializeToBytes(result); } if (client.Type == MergePacket.MergeType.Average) { Average average = new Average(client.TimeStamp, client.AppName); double result = average.Compute(Serializer.DeserializeFromBytes<double>(client.Data)); string flag = "average_" + client.TimeStamp + "_" + client.AppName; Logger.Info(flag + ",result is " + result); average.Remove(flag); return Serializer.SerializeToBytes(result); } if (client.Type == MergePacket.MergeType.Distinct) { Distinct distinct = new Distinct(client.TimeStamp, client.AppName); List<object> objects = distinct.Compute(Serializer.DeserializeFromBytes<List<object>>(client.Data)); string flag = "distinct_" + client.TimeStamp + "_" + client.AppName; Logger.Info(flag + ", result count is " + objects.Count); return Serializer.SerializeToBytes(objects); } if (client.Type == MergePacket.MergeType.CombineTable) { CombineTable combineTable = new CombineTable(client.TimeStamp, client.AppName); string flag = "combine_table_" + client.TimeStamp + "_" + client.AppName; Logger.Info(flag + ", combine table."); Hashtable objects = combineTable.Compute(Serializer.DeserializeFromBytes<Hashtable>(client.Data)); return Serializer.SerializeToBytes(objects); } if (client.Type == MergePacket.MergeType.CombineList) { CombineList combineList = new CombineList(client.TimeStamp, client.AppName); string flag = "combine_list_" + client.TimeStamp + "_" + client.AppName; Logger.Info(flag + ", combine list."); List<object> objects = combineList.Compute(Serializer.DeserializeFromBytes<List<object>>(client.Data)); return Serializer.SerializeToBytes(objects); } if (client.Type == MergePacket.MergeType.CombineSort) { try { CombineSort combineSort = new CombineSort(client.TimeStamp, client.AppName); string flag = "combine_sort_" + client.TimeStamp + "_" + client.AppName; Logger.Info(flag + ", combine sort."); object[] objects = combineSort.ArrayCompute(Serializer.DeserializeFromBytes(client.Data)); if (objects == null) { Logger.Warn("Result is null."); } else { Logger.Info("Result count is " + objects.Count()); } return Serializer.SerializeToBytes(objects); } catch (Exception exception) { Logger.Error(exception); } object[] errorObjects = { -1 }; return Serializer.SerializeToBytes(errorObjects); } } catch (Exception exception) { Logger.Error(exception); } return Serializer.SerializeToBytes(-1); }
/// <summary> /// Processes a Distinct modifier. /// </summary> /// <param name="distinct">Distinct modifier.</param> /// <param name="context">SPARQL Evaluation Context.</param> public override BaseMultiset ProcessDistinct(Distinct distinct, SparqlEvaluationContext context) { return(ExplainAndEvaluate <Distinct>(distinct, context, base.ProcessDistinct)); }
/// <summary> /// Optimizes the given algebra. /// </summary> /// <param name="algebra">Algebra.</param> /// <returns>Optimized algebra.</returns> public ISparqlAlgebra Optimise(ISparqlAlgebra algebra) { if (algebra is Distinct) { Distinct distinct = (Distinct)algebra; if (distinct.InnerAlgebra is Select) { Select select = (Select)distinct.InnerAlgebra; if (!select.IsSelectAll) { if (select.InnerAlgebra is OrderBy) { bool ok = true; OrderBy orderBy = (OrderBy)select.InnerAlgebra; List <String> projectVars = select.SparqlVariables.Select(v => v.Name).ToList(); foreach (String var in orderBy.Ordering.Variables) { if (!projectVars.Contains(var)) { ok = false; break; } } if (ok) { // Safe to apply the optimization Select newSelect = new Select(orderBy.InnerAlgebra, false, select.SparqlVariables); Distinct newDistinct = new Distinct(newSelect); return(new OrderBy(newDistinct, orderBy.Ordering)); } } } } // If we reach here than optimization is not applicable return(((Distinct)algebra).Transform(this)); } else if (algebra is Reduced) { Reduced reduced = (Reduced)algebra; if (reduced.InnerAlgebra is Select) { Select select = (Select)reduced.InnerAlgebra; if (!select.IsSelectAll) { if (select.InnerAlgebra is OrderBy) { bool ok = true; OrderBy orderBy = (OrderBy)select.InnerAlgebra; List <String> projectVars = select.SparqlVariables.Select(v => v.Name).ToList(); foreach (String var in orderBy.Ordering.Variables) { if (!projectVars.Contains(var)) { ok = false; break; } } if (ok) { // Safe to apply the optimization Select newSelect = new Select(orderBy.InnerAlgebra, false, select.SparqlVariables); Reduced newReduced = new Reduced(newSelect); return(new OrderBy(newReduced, orderBy.Ordering)); } } } } // If we reach here than optimization is not applicable return(((Reduced)algebra).Transform(this)); } else if (algebra is ITerminalOperator) { return(algebra); } else if (algebra is IUnaryOperator) { return(((IUnaryOperator)algebra).Transform(this)); } else if (algebra is IAbstractJoin) { return(((IAbstractJoin)algebra).Transform(this)); } else { return(algebra); } }
public void EnumberableTest() { Assert.AreEqual(50, Distinct.Solution(Enumerable.Range(-25, 50).ToArray())); }
public void AllZeroTest() { Assert.AreEqual(1, Distinct.Solution(new int[1000])); }
public void ExampleTest() { Assert.AreEqual(3, Distinct.Solution(new [] { 2, 1, 1, 2, 3, 1 })); }
public void AssignUnique(IEnumerable <string> distinctList, IEnumerable <string> valueList, IEnumerable <string> resList, int update) { var output = Distinct.EvalDistinct(_env, distinctList, valueList, update, resList); _env = output; }
public static void CreateOutput(object sender, object selectedItem) { if (selectedItem is OutputCollectionViewModel outputCollectionViewModel) { CreateOutput(sender, outputCollectionViewModel.Parent); return; } if (!(selectedItem is GroupViewModel entityGroupViewModel)) { return; } var button = (Button)sender; var type = (string)button.CommandParameter; OutputBase entity; switch (type) { case nameof(RenderArrayHint): entity = RenderArrayHint.New(); break; case nameof(RenderNextDate): entity = RenderNextDate.New("Name", "0001-01-01", "Weekly"); break; case nameof(RenderIndex): entity = RenderIndex.New(); break; case nameof(RenderEntity): entity = RenderEntity.New(); break; case nameof(RenderLink): entity = RenderLink.New(); break; case nameof(RenderProperty): entity = RenderProperty.New("Display Name", "Property Name"); break; case nameof(RenderValue): entity = RenderValue.New("Name", "Value"); break; case nameof(RenderTypeName): entity = RenderTypeName.New("Name"); break; case nameof(RenderTaxPeriodDate): entity = RenderTaxPeriodDate.New("Name", "2020", "1"); break; case nameof(RenderConstant): entity = RenderConstant.New("Name", "Constant Name", typeof(DateTime)); break; case nameof(RenderTaxPeriod): entity = RenderTaxPeriod.New("Name", "Monthly", "0001-01-01"); break; case nameof(RenderDateAdd): entity = RenderDateAdd.New("Name", "0001-01-01", "Day", "1"); break; case nameof(RenderUniqueKeyFromLink): entity = RenderUniqueKeyFromLink.New("Name", "[Link]"); break; case nameof(Avg): entity = Avg.New("Name", "Property"); break; case nameof(Max): entity = Max.New("Name", "Property"); break; case nameof(Min): entity = Min.New("Name", "Property"); break; case nameof(Sum): entity = Sum.New("Name", "Property"); break; case nameof(Count): entity = Count.New("Name"); break; case nameof(ExpressionCalculator): entity = ExpressionCalculator.New("Name", "1 + 2 - 3 * 4 / 5", rounding: RoundingOption.NotSet); break; case nameof(Distinct): entity = Distinct.New("Name", "Property"); break; default: throw new ArgumentOutOfRangeException(); } entityGroupViewModel.Element.Outputs.Add(entity); var viewModelCollection = entityGroupViewModel.Children.OfType <OutputCollectionViewModel>().First(); var viewModel = new OutputViewModel(entity, viewModelCollection); viewModelCollection.Children.Add(viewModel); entityGroupViewModel.IsExpanded = true; viewModelCollection.IsExpanded = true; viewModel.IsSelected = true; viewModel.IsExpanded = true; }
/// <summary> /// Converts the Query into it's SPARQL Algebra representation (as represented in the Leviathan API) /// </summary> /// <returns></returns> public ISparqlAlgebra ToAlgebra() { //Firstly Transform the Root Graph Pattern to SPARQL Algebra ISparqlAlgebra pattern; if (this._rootGraphPattern != null) { if (Options.AlgebraOptimisation) { //If using Algebra Optimisation may use a special algebra in some cases switch (this.SpecialType) { case SparqlSpecialQueryType.DistinctGraphs: pattern = new SelectDistinctGraphs(this.Variables.First(v => v.IsResultVariable).Name); break; case SparqlSpecialQueryType.AskAnyTriples: pattern = new AskAnyTriples(); break; case SparqlSpecialQueryType.NotApplicable: default: //If not just use the standard transform pattern = this._rootGraphPattern.ToAlgebra(); break; } } else { //If not using Algebra Optimisation just use the standard transform pattern = this._rootGraphPattern.ToAlgebra(); } } else { pattern = new Bgp(); } //If we have a BINDINGS clause then we'll add it into the algebra here if (this._bindings != null) { pattern = new Bindings(this._bindings, pattern); } //Then we apply any optimisers followed by relevant solution modifiers switch (this._type) { case SparqlQueryType.Ask: //Apply Algebra Optimisation is enabled if (Options.AlgebraOptimisation) { pattern = this.ApplyAlgebraOptimisations(pattern); } return(new Ask(pattern)); case SparqlQueryType.Construct: case SparqlQueryType.Describe: case SparqlQueryType.DescribeAll: case SparqlQueryType.Select: case SparqlQueryType.SelectAll: case SparqlQueryType.SelectAllDistinct: case SparqlQueryType.SelectAllReduced: case SparqlQueryType.SelectDistinct: case SparqlQueryType.SelectReduced: //Apply Algebra Optimisation if enabled if (Options.AlgebraOptimisation) { pattern = this.ApplyAlgebraOptimisations(pattern); } //GROUP BY is the first thing applied if (this._groupBy != null) { pattern = new GroupBy(pattern, this._groupBy); } //After grouping we do projection //This will generate the values for any Project Expressions and Aggregates pattern = new Project(pattern, this.Variables); //Add HAVING clause after the projection if (this._having != null) { pattern = new Having(pattern, this._having); } //We can then Order our results //We do ordering before we do Select but after Project so we can order by any of //the project expressions/aggregates and any variable in the results even if //it won't be output as a result variable if (this._orderBy != null) { pattern = new OrderBy(pattern, this._orderBy); } //After Ordering we apply Select //Select effectively trims the results so only result variables are left //This doesn't apply to CONSTRUCT since any variable may be used in the Construct Template //so we don't want to eliminate anything if (this._type != SparqlQueryType.Construct) { pattern = new Select(pattern, this.Variables); } //If we have a Distinct/Reduced then we'll apply those after Selection if (this._type == SparqlQueryType.SelectAllDistinct || this._type == SparqlQueryType.SelectDistinct) { pattern = new Distinct(pattern); } else if (this._type == SparqlQueryType.SelectAllReduced || this._type == SparqlQueryType.SelectReduced) { pattern = new Reduced(pattern); } //Finally we can apply any limit and/or offset if (this._limit >= 0 || this._offset > 0) { pattern = new Slice(pattern, this._limit, this._offset); } return(pattern); default: throw new RdfQueryException("Unable to convert unknown Query Types to SPARQL Algebra"); } }
/// <summary> /// Converts the Query into it's SPARQL Algebra representation (as represented in the Leviathan API) /// </summary> /// <returns></returns> public ISparqlAlgebra ToAlgebra() { //Depending on how the query gets built we may not have had graph pattern optimization applied //which we should do here if query optimization is enabled if (!this.IsOptimised && Options.QueryOptimisation) { this.Optimise(); } //Firstly Transform the Root Graph Pattern to SPARQL Algebra ISparqlAlgebra algebra; if (this._rootGraphPattern != null) { if (Options.AlgebraOptimisation) { //If using Algebra Optimisation may use a special algebra in some cases switch (this.SpecialType) { case SparqlSpecialQueryType.DistinctGraphs: algebra = new SelectDistinctGraphs(this.Variables.First(v => v.IsResultVariable).Name); break; case SparqlSpecialQueryType.AskAnyTriples: algebra = new AskAnyTriples(); break; case SparqlSpecialQueryType.NotApplicable: default: //If not just use the standard transform algebra = this._rootGraphPattern.ToAlgebra(); break; } } else { //If not using Algebra Optimisation just use the standard transform algebra = this._rootGraphPattern.ToAlgebra(); } } else { //No root graph pattern means empty BGP algebra = new Bgp(); } //If we have a top level VALUES clause then we'll add it into the algebra here if (this._bindings != null) { algebra = Join.CreateJoin(algebra, new Bindings(this._bindings)); } //Then we apply any optimisers followed by relevant solution modifiers switch (this._type) { case SparqlQueryType.Ask: //Apply Algebra Optimisation is enabled if (Options.AlgebraOptimisation) { algebra = this.ApplyAlgebraOptimisations(algebra); } return(new Ask(algebra)); case SparqlQueryType.Construct: case SparqlQueryType.Describe: case SparqlQueryType.DescribeAll: case SparqlQueryType.Select: case SparqlQueryType.SelectAll: case SparqlQueryType.SelectAllDistinct: case SparqlQueryType.SelectAllReduced: case SparqlQueryType.SelectDistinct: case SparqlQueryType.SelectReduced: //Apply Algebra Optimisation if enabled if (Options.AlgebraOptimisation) { algebra = this.ApplyAlgebraOptimisations(algebra); } //GROUP BY is the first thing applied //This applies if there is a GROUP BY or if there are aggregates //With no GROUP BY it produces a single group of all results if (this._groupBy != null || this._vars.Any(v => v.IsAggregate)) { algebra = new GroupBy(algebra, this._groupBy, this._vars.Where(v => v.IsAggregate)); } //After grouping we do projection //We introduce an Extend for each Project Expression foreach (SparqlVariable var in this._vars) { if (var.IsProjection) { algebra = new Extend(algebra, var.Projection, var.Name); } } //Add HAVING clause after the projection if (this._having != null) { algebra = new Having(algebra, this._having); } //We can then Order our results //We do ordering before we do Select but after Project so we can order by any of //the project expressions/aggregates and any variable in the results even if //it won't be output as a result variable if (this._orderBy != null) { algebra = new OrderBy(algebra, this._orderBy); } //After Ordering we apply Select //Select effectively trims the results so only result variables are left //This doesn't apply to CONSTRUCT since any variable may be used in the Construct Template //so we don't want to eliminate anything if (this._type != SparqlQueryType.Construct) { algebra = new Select(algebra, this.Variables); } //If we have a Distinct/Reduced then we'll apply those after Selection if (this._type == SparqlQueryType.SelectAllDistinct || this._type == SparqlQueryType.SelectDistinct) { algebra = new Distinct(algebra); } else if (this._type == SparqlQueryType.SelectAllReduced || this._type == SparqlQueryType.SelectReduced) { algebra = new Reduced(algebra); } //Finally we can apply any limit and/or offset if (this._limit >= 0 || this._offset > 0) { algebra = new Slice(algebra, this._limit, this._offset); } return(algebra); default: throw new RdfQueryException("Unable to convert unknown Query Types to SPARQL Algebra"); } }
public void sortingXY(ConcurrentDictionary <string, clientMachine.userPreference> userPreference, List <decimal> distinctListChecksum, List <decimal> distinctSet, Dictionary <decimal, decimal> unsorted2SortedCheksum, decimal requestID, string outputFolder, ConcurrentDictionary <string, clientMachine.clientSession> clientSessionVariable, ConcurrentDictionary <decimal, clientMachine.request> requestDict, ConcurrentDictionary <decimal, clientMachine.response> responseDict, Dictionary <int, Dictionary <double, string> > distinctXramKey2Value, Dictionary <int, Dictionary <double, string> > distinctYramKey2Value, Dictionary <int, Dictionary <double, string> > distinctRamKey2Value, Dictionary <int, List <double> > XdistinctList, Dictionary <int, List <double> > YdistinctList, List <int> sortedXdimension, List <int> sortedYdimension, Dictionary <int, Dictionary <double, double> > ramKey2Order, Dictionary <int, Dictionary <double, double> > ramOrder2Key, Dictionary <int, Dictionary <double, double> > distinctXramKey2Order, Dictionary <int, Dictionary <double, double> > distinctXramOrder2Key, Dictionary <int, Dictionary <double, double> > distinctYramKey2Order, Dictionary <int, Dictionary <double, double> > distinctYramOrder2Key, Dictionary <int, List <double> > copyXdistinctList, Dictionary <int, List <double> > copyYdistinctList, List <int> crosstabDimension, List <int> yDimension, Dictionary <int, List <double> > distinctList, List <int> revisedX, List <int> revisedY) { // reorganize master (key to value) for X,Y distinctList by assigned "=" function for (int i = 0; i < crosstabDimension.Count; i++) { distinctXramKey2Order[i] = ramKey2Order[crosstabDimension[i]]; distinctXramOrder2Key[i] = ramOrder2Key[crosstabDimension[i]]; } for (int i = 0; i < yDimension.Count; i++) { // distinctYramKey2Value[i] = ramKey2Valuegz[yDimension[i]]; distinctYramKey2Order[i] = ramKey2Order[yDimension[i]]; distinctYramOrder2Key[i] = ramOrder2Key[yDimension[i]]; } // select X, Y dimension from distinctList to output XdistinctList and YdistinctList Distinct currentdistinct = new Distinct(); copyXdistinctList = currentdistinct.distinctDB(distinctList, distinctRamKey2Value, revisedX); // get distinct distinctList by selected X dimensions copyYdistinctList = currentdistinct.distinctDB(distinctList, distinctRamKey2Value, revisedY); // get distinct distinctList by selected Y dimensions Dictionary <int, List <double> > tempXdistinctList = new Dictionary <int, List <double> >(); Dictionary <int, List <double> > tempYdistinctList = new Dictionary <int, List <double> >(); List <decimal> XdimensionSortingChecksumList = new List <decimal>(); // ChecksumList for Sorting of X dimensions List <decimal> YdimensionSortingChecksumList = new List <decimal>(); // ChecksumList for Sorting Y dimensions List <Sorting> checksum2OrderX = new List <Sorting>(); List <Sorting> checksum2OrderY = new List <Sorting>(); int eachChecksum2OrderRow = 0; StringBuilder csvString = new StringBuilder(); if (requestDict[requestID].sortXdimension == "A" || requestDict[requestID].sortXdimension == "D") // Sort X Dimension //////////////////////////////////////////// { var startSortXTime = DateTime.Now; if (requestDict[requestID].debugOutput == "Y") { if (!Directory.Exists(outputFolder + userPreference["system"].slash + "debug" + userPreference["system"].slash)) { Directory.CreateDirectory(outputFolder + userPreference["system"].slash + "debug" + userPreference["system"].slash); } using (StreamWriter toDisk = new StreamWriter(outputFolder + userPreference["system"].slash + "debug" + userPreference["system"].slash + "XdistinctList.csv")) { csvString.Append("distinctXramKey2Value[0][i]" + "," + "distinctXramKey2Value[i][XdistinctList[i][j]]" + "," + "XdistinctList[i][j]" + Environment.NewLine); for (int i = 0; i < copyXdistinctList.Count; i++) // output XdistinctList { for (int j = 0; j < copyXdistinctList[i].Count; j++) { csvString.Append(distinctXramKey2Value[i][0] + "," + distinctXramKey2Value[i][copyXdistinctList[i][j]] + "," + copyXdistinctList[i][j] + Environment.NewLine); } } toDisk.Write(csvString); toDisk.Close(); csvString.Clear(); } } for (int i = 0; i < copyXdistinctList.Count; i++) // convert key to order and save as tempXdistinctList { tempXdistinctList.Add(i, new List <double>()); for (int j = 0; j < copyXdistinctList[i].Count; j++) { tempXdistinctList[i].Add(distinctXramKey2Order[i][copyXdistinctList[i][j]]); // convert master key to sorting order of the key } } // return Y dimensionSortingChecksumList Distinct getXY = new Distinct(); XdimensionSortingChecksumList = getXY.getXYcheckSumList(tempXdistinctList, distinctXramKey2Value, sortedXdimension); for (int i = 0; i < XdimensionSortingChecksumList.Count; i++) { var checksum2OrderXRow = new Sorting { sortingChecksum = XdimensionSortingChecksumList[i], sortingOrder = i }; checksum2OrderX.Add(checksum2OrderXRow); } if (requestDict[requestID].sortXdimension == "A") // Sort checksum2OrderX by ascending or descending //if (dataSortingOrder[0] == "sortAscending") { var sortChecksum2OrderX = from eachChecksum2OrderX in checksum2OrderX orderby eachChecksum2OrderX.sortingChecksum ascending select eachChecksum2OrderX; outputsortChecksum2OrderX(sortChecksum2OrderX); } if (requestDict[requestID].sortXdimension == "D") { var sortChecksum2OrderX = from eachChecksum2OrderX in checksum2OrderX orderby eachChecksum2OrderX.sortingChecksum descending select eachChecksum2OrderX; outputsortChecksum2OrderX(sortChecksum2OrderX); } } void outputsortChecksum2OrderX(IOrderedEnumerable <Sorting> sortChecksum2OrderX) // output sorting result to csv and to XdistinctList { if (requestDict[requestID].debugOutput == "Y") { if (!Directory.Exists(outputFolder + userPreference["system"].slash + "debug" + userPreference["system"].slash)) { Directory.CreateDirectory(outputFolder + userPreference["system"].slash + "debug" + userPreference["system"].slash); } using (StreamWriter toDisk = new StreamWriter(outputFolder + userPreference["system"].slash + "debug" + userPreference["system"].slash + "sortChecksum2OrderX.csv")) { eachChecksum2OrderRow = 0; csvString.Append("column" + "," + "sortingChecksum" + "," + "sortingOrder" + "," + "XdistinctList[i][eachChecksum2OrderX.sortingOrder]" + "," + "distinctXramKey2Value[i][XdistinctList[i][eachChecksum2OrderX.sortingOrder]]" + Environment.NewLine); for (int i = 0; i < copyXdistinctList.Count; i++) { csvString.Append("0" + "," + "0" + "," + distinctXramKey2Value[i][copyXdistinctList[i][0]] + "," + copyXdistinctList[i][0] + Environment.NewLine); foreach (var eachChecksum2OrderX in sortChecksum2OrderX) { if (eachChecksum2OrderX.sortingOrder != 0) { csvString.Append(distinctXramKey2Value[i][0] + "," + eachChecksum2OrderX.sortingChecksum + "," + eachChecksum2OrderX.sortingOrder + "," + copyXdistinctList[i][eachChecksum2OrderX.sortingOrder] + "," + distinctXramKey2Value[i][copyXdistinctList[i][eachChecksum2OrderX.sortingOrder]] + Environment.NewLine); } } eachChecksum2OrderRow++; } toDisk.Write(csvString); toDisk.Close(); csvString.Clear(); } } eachChecksum2OrderRow = 0; for (int i = 0; i < copyXdistinctList.Count; i++) { XdistinctList.Add(i, new List <double>()); XdistinctList[i].Add(0); foreach (var eachChecksum2OrderX in sortChecksum2OrderX) { if (eachChecksum2OrderX.sortingOrder != 0) { XdistinctList[i].Add(copyXdistinctList[i][eachChecksum2OrderX.sortingOrder]); } eachChecksum2OrderRow++; } } } if (requestDict[requestID].sortYdimension == "A" || requestDict[requestID].sortYdimension == "D") // Sort Y Dimension //////////////////////////////////////////// { var startSortYTime = DateTime.Now; if (requestDict[requestID].debugOutput == "Y") { if (!Directory.Exists(outputFolder + userPreference["system"].slash + "debug" + userPreference["system"].slash)) { Directory.CreateDirectory(outputFolder + userPreference["system"].slash + "debug" + userPreference["system"].slash); } using (StreamWriter toDisk = new StreamWriter(outputFolder + userPreference["system"].slash + "debug" + userPreference["system"].slash + "YdistinctList.csv")) { csvString.Append("distinctYramKey2Value[0][i]" + "," + "distinctYramKey2Value[i][copyYdistinctList[i][j]]" + "," + "YdistinctList[i][j]" + Environment.NewLine); for (int i = 0; i < copyYdistinctList.Count; i++) // output YdistinctList { for (int j = 0; j < copyYdistinctList[i].Count; j++) { csvString.Append(distinctYramKey2Value[i][0] + "," + distinctYramKey2Value[i][copyYdistinctList[i][j]] + "," + copyYdistinctList[i][j] + Environment.NewLine); } } toDisk.Write(csvString); toDisk.Close(); csvString.Clear(); } } for (int i = 0; i < copyYdistinctList.Count; i++) // convert key to order and save as tempYdistinctList { tempYdistinctList.Add(i, new List <double>()); for (int j = 0; j < copyYdistinctList[i].Count; j++) { tempYdistinctList[i].Add(distinctYramKey2Order[i][copyYdistinctList[i][j]]); // convert master key to sorting order of the key } } // return Y dimensionSortingChecksumList Distinct getXY = new Distinct(); YdimensionSortingChecksumList = getXY.getXYcheckSumList(tempYdistinctList, distinctYramKey2Value, sortedYdimension); for (int i = 0; i < YdimensionSortingChecksumList.Count; i++) { var checksum2OrderYRow = new Sorting { sortingChecksum = YdimensionSortingChecksumList[i], sortingOrder = i }; checksum2OrderY.Add(checksum2OrderYRow); } if (requestDict[requestID].sortYdimension == "A") { var sortChecksum2OrderY = from eachChecksum2OrderY in checksum2OrderY orderby eachChecksum2OrderY.sortingChecksum ascending select eachChecksum2OrderY; outputsortChecksum2OrderY(sortChecksum2OrderY); } if (requestDict[requestID].sortYdimension == "D") { var sortChecksum2OrderY = from eachChecksum2OrderY in checksum2OrderY orderby eachChecksum2OrderY.sortingChecksum descending select eachChecksum2OrderY; outputsortChecksum2OrderY(sortChecksum2OrderY); } void outputsortChecksum2OrderY(IOrderedEnumerable <Sorting> sortChecksum2OrderY) // output sorting result to csv and to YdistinctList { if (requestDict[requestID].debugOutput == "Y") { if (!Directory.Exists(outputFolder + userPreference["system"].slash + "debug" + userPreference["system"].slash)) { Directory.CreateDirectory(outputFolder + userPreference["system"].slash + "debug" + userPreference["system"].slash); } using (StreamWriter toDisk = new StreamWriter(outputFolder + userPreference["system"].slash + "debug" + userPreference["system"].slash + "sortChecksum2OrderY.csv")) { eachChecksum2OrderRow = 0; csvString.Append("column" + "," + "sortingChecksum" + "," + "sortingOrder" + "," + "YdistinctList[i][eachChecksum2OrderY.sortingOrder]" + "," + "distinctYramKey2Value[i][YdistinctList[i][eachChecksum2OrderY.sortingOrder]]" + Environment.NewLine); for (int i = 0; i < copyYdistinctList.Count; i++) { csvString.Append("0" + "," + "0" + "," + distinctYramKey2Value[i][copyYdistinctList[i][0]] + "," + copyYdistinctList[i][0] + Environment.NewLine); foreach (var eachChecksum2OrderY in sortChecksum2OrderY) { if (eachChecksum2OrderY.sortingOrder != 0) { csvString.Append(distinctYramKey2Value[i][0] + "," + eachChecksum2OrderY.sortingChecksum + "," + eachChecksum2OrderY.sortingOrder + "," + copyYdistinctList[i][eachChecksum2OrderY.sortingOrder] + "," + distinctYramKey2Value[i][copyYdistinctList[i][eachChecksum2OrderY.sortingOrder]] + Environment.NewLine); } } eachChecksum2OrderRow++; } toDisk.Write(csvString); toDisk.Close(); csvString.Clear(); } } for (int i = 0; i < copyYdistinctList.Count; i++) { foreach (var eachChecksum2OrderY in sortChecksum2OrderY) { var key = distinctListChecksum[eachChecksum2OrderY.sortingOrder]; if (eachChecksum2OrderY.sortingOrder != 0 && !unsorted2SortedCheksum.ContainsKey(key)) { unsorted2SortedCheksum.Add(key, eachChecksum2OrderY.sortingChecksum); distinctSet.Add(key); } } eachChecksum2OrderRow++; } eachChecksum2OrderRow = 0; for (int i = 0; i < copyYdistinctList.Count; i++) { YdistinctList.Add(i, new List <double>()); YdistinctList[i].Add(0); foreach (var eachChecksum2OrderY in sortChecksum2OrderY) { if (eachChecksum2OrderY.sortingOrder != 0) { YdistinctList[i].Add(copyYdistinctList[i][eachChecksum2OrderY.sortingOrder]); } eachChecksum2OrderRow++; } } } } }