Exemplo n.º 1
0
        public ExecuteCommandSetAxis(IBasicActivateItems basicActivator, AggregateConfiguration aggregate, string column) : base(basicActivator)
        {
            this.aggregate = aggregate;
            this.column    = column;

            if (!string.IsNullOrWhiteSpace(column))
            {
                // don't let them try to set an axis on a cohort aggregate configuration but do let them clear it if it somehow ended up with one
                if (aggregate.IsCohortIdentificationAggregate)
                {
                    SetImpossible($"AggregateConfiguration {aggregate} is a cohort identification aggregate and so cannot have an axis");
                    return;
                }

                if (aggregate.GetAxisIfAny() != null)
                {
                    SetImpossible($"AggregateConfiguration {aggregate} already has an axis");
                    return;
                }
            }
            else
            {
                if (aggregate.GetAxisIfAny() == null)
                {
                    SetImpossible($"AggregateConfiguration {aggregate} does not have an axis to clear");
                }
            }
        }
Exemplo n.º 2
0
        private CohortQueryBuilderDependency AddDependency(AggregateConfiguration cohortSet)
        {
            if (cohortSet.Catalogue.IsApiCall())
            {
                if (CacheManager == null)
                {
                    throw new Exception($"Caching must be enabled to execute API call '{cohortSet}'");
                }

                if (!PluginCohortCompilers.Any(c => c.ShouldRun(cohortSet)))
                {
                    throw new Exception($"No PluginCohortCompilers claimed to support '{cohortSet}' in their ShouldRun method");
                }
            }

            var join = ChildProvider.AllJoinUses.Where(j => j.AggregateConfiguration_ID == cohortSet.ID).ToArray();

            if (join.Length > 1)
            {
                throw new NotSupportedException($"There are {join.Length} joins configured to AggregateConfiguration {cohortSet}");
            }

            var d = new CohortQueryBuilderDependency(cohortSet, join.SingleOrDefault(), ChildProvider, PluginCohortCompilers);

            _dependencies.Add(d);

            return(d);
        }
Exemplo n.º 3
0
        public CohortQueryBuilderDependency(AggregateConfiguration cohortSet,
                                            JoinableCohortAggregateConfigurationUse patientIndexTableIfAny, ICoreChildProvider childProvider)
        {
            _childProvider         = childProvider;
            CohortSet              = cohortSet;
            PatientIndexTableIfAny = patientIndexTableIfAny;

            //record the IsExtractionIdentifier column for the log (helps with debugging count issues)
            var eis = cohortSet?.AggregateDimensions?.Where(d => d.IsExtractionIdentifier).ToArray();

            //Multiple IsExtractionIdentifier columns is a big problem but it's handled elsewhere
            if (eis != null && eis.Length == 1)
            {
                ExtractionIdentifierColumn = eis[0];
            }

            if (PatientIndexTableIfAny != null)
            {
                var join = _childProvider.AllJoinables.SingleOrDefault(j =>
                                                                       j.ID == PatientIndexTableIfAny.JoinableCohortAggregateConfiguration_ID);

                if (join == null)
                {
                    throw new Exception("ICoreChildProvider did not know about the provided patient index table");
                }

                JoinedTo = _childProvider.AllAggregateConfigurations.SingleOrDefault(ac =>
                                                                                     ac.ID == join.AggregateConfiguration_ID);

                if (JoinedTo == null)
                {
                    throw new Exception("ICoreChildProvider did not know about the provided patient index table AggregateConfiguration");
                }
            }
        }
Exemplo n.º 4
0
        public CacheCommitIdentifierList(AggregateConfiguration configuration, string sql, DataTable results, DatabaseColumnRequest identifierColumn, int timeout)
            : base(AggregateOperation.IndexedExtractionIdentifierList, configuration, sql, results, timeout, new [] { identifierColumn })
        {
            //advise them if they are trying to cache an identifier list but the DataTable has more than 1 column
            if (results.Columns.Count != 1)
            {
                throw new NotSupportedException("The DataTable did not have exactly 1 column (it had " + results.Columns.Count + " columns).  This makes it incompatible with committing to the Cache as an IdentifierList");
            }

            //advise them if they are trying to cache a cache query itself!
            if (sql.Trim().StartsWith(CachedAggregateConfigurationResultsManager.CachingPrefix))
            {
                throw new NotSupportedException("Sql for the query started with '" + CachedAggregateConfigurationResultsManager.CachingPrefix + "' which implies you ran some SQL code to fetch some stuff from the cache and then committed it back into the cache (obliterating the record of what the originally executed query was).  This is referred to as Inception Caching and isn't allowed.  Note to developers: this happens if user caches a query then runs the query again (fetching it from the cache) and somehow tries to commit the cache fetch request back into the cache as an overwrite");
            }

            //throw away nulls
            foreach (var r in results.Rows.Cast <DataRow>().ToArray())
            {
                if (r[0] == null || r[0] == DBNull.Value)
                {
                    results.Rows.Remove(r);
                }
            }

            if (identifierColumn == null)
            {
                throw new Exception("You must specify the data type of the identifier column, identifierColumn was null");
            }

            _identifierColumn            = identifierColumn;
            _identifierColumn.AllowNulls = false;
            _identifierColumn.ColumnName = results.Columns[0].ColumnName;
        }
        /// <summary>
        /// Defines a new use case in which the given <see cref="AggregateConfiguration"/> will be turned into an SQL query and used to generate rows
        /// that will be released into the pipeline.  The source is fixed the destination and middle components are open.
        /// </summary>
        /// <param name="aggregateConfiguration">The aggregate query that will be run to generate the rows</param>
        /// <param name="constrainByCohort">Only applies if <see cref="AggregateConfiguration"/> is a patient index table, specifying a cohort will only commit rows
        /// in which the patient id appears in the cohort</param>
        /// <param name="table">The destination table in which to put the matched records.
        /// <para> (table does not have to exist yet, you can use <see cref="DiscoveredDatabase.ExpectTable"/> to obtain a reference to a non existant table)</para></param>
        public CreateTableFromAggregateUseCase(AggregateConfiguration aggregateConfiguration, ExtractableCohort constrainByCohort, DiscoveredTable table)
        {
            if (constrainByCohort == null)
            {
                var src = new AggregateConfigurationTableSource();
                src.PreInitialize(aggregateConfiguration, new ThrowImmediatelyDataLoadEventListener());
                src.TableName  = table.GetRuntimeName();
                ExplicitSource = src;
            }
            else
            {
                AddInitializationObject(constrainByCohort);

                var src = new PatientIndexTableSource();
                src.PreInitialize(aggregateConfiguration, new ThrowImmediatelyDataLoadEventListener());
                src.PreInitialize(constrainByCohort, new ThrowImmediatelyDataLoadEventListener());
                src.TableName  = table.GetRuntimeName();
                ExplicitSource = src;
            }

            AddInitializationObject(aggregateConfiguration);
            AddInitializationObject(aggregateConfiguration.Repository);
            AddInitializationObject(table.Database);

            GenerateContext();
        }
Exemplo n.º 6
0
 /// <summary>
 /// Sets the default aggregate configuration.
 /// </summary>
 /// <param name="configuration">The default aggregate configuration..</param>
 public void SetDefaultConfiguration(AggregateConfiguration configuration)
 {
     lock (m_lock)
     {
         m_defaultConfiguration = configuration;
     }
 }
Exemplo n.º 7
0
        public void CacheSingleTask(ICacheableTask cacheableTask, ExternalDatabaseServer queryCachingServer)
        {
            //if it is already cached don't inception cache
            var sql = Tasks[cacheableTask].CountSQL;

            if (sql.Trim().StartsWith(CachedAggregateConfigurationResultsManager.CachingPrefix))
            {
                return;
            }

            var manager = new CachedAggregateConfigurationResultsManager(queryCachingServer);

            var explicitTypes = new List <DatabaseColumnRequest>();

            AggregateConfiguration configuration = cacheableTask.GetAggregateConfiguration();

            try
            {
                //the identifier column that we read from
                ColumnInfo identifierColumnInfo = configuration.AggregateDimensions.Single(c => c.IsExtractionIdentifier).ColumnInfo;
                var        destinationDataType  = GetDestinationType(identifierColumnInfo.Data_type, cacheableTask, queryCachingServer);

                explicitTypes.Add(new DatabaseColumnRequest(identifierColumnInfo.GetRuntimeName(), destinationDataType));
            }
            catch (Exception e)
            {
                throw new Exception("Error occurred trying to find the data type of the identifier column when attempting to submit the result data table to the cache", e);
            }

            CacheCommitArguments args = cacheableTask.GetCacheArguments(sql, Tasks[cacheableTask].Identifiers, explicitTypes.ToArray());

            manager.CommitResults(args);
        }
Exemplo n.º 8
0
        /// <summary>
        /// Build a query based on the current <see cref="AggregateConfiguration"/>
        /// </summary>
        /// <param name="limitationSQL">See <see cref="LimitationSQL"/></param>
        /// <param name="countSQL">
        /// Intended purpose:The line of SELECT Sql that is an 'Aggregate Function' e.g. count(*).
        /// <para>Other purposes: You can use this to ram arbitrary lines of code into SELECT section of the query e.g. see CohortQueryBuilder </para>
        /// </param>
        /// <param name="aggregateConfigurationIfAny"><see cref="AggregateConfiguration"/> containing columns, filters, parameters etc for the GROUP BY</param>
        public AggregateBuilder(string limitationSQL, string countSQL, AggregateConfiguration aggregateConfigurationIfAny)
        {
            if (limitationSQL != null && limitationSQL.Trim().StartsWith("top", StringComparison.CurrentCultureIgnoreCase))
            {
                throw new Exception("Use AggregateTopX property instead of limitation SQL to acheive this");
            }

            _aggregateConfigurationIfAny = aggregateConfigurationIfAny;
            LimitationSQL    = limitationSQL;
            ParameterManager = new ParameterManager();
            CustomLines      = new List <CustomLine>();
            SQLOutOfDate     = true;

            SelectColumns = new List <QueryTimeColumn>();

            if (!string.IsNullOrWhiteSpace(countSQL))
            {
                _countColumn       = new AggregateCountColumn(countSQL);
                _countColumn.Order = int.MaxValue;//order these last
                AddColumn(_countColumn);
            }

            LabelWithComment = aggregateConfigurationIfAny != null ? aggregateConfigurationIfAny.Name : "";

            _queryLevelParameterProvider = aggregateConfigurationIfAny;

            if (aggregateConfigurationIfAny != null)
            {
                HavingSQL     = aggregateConfigurationIfAny.HavingSQL;
                AggregateTopX = aggregateConfigurationIfAny.GetTopXIfAny();
            }
        }
Exemplo n.º 9
0
        public void GroupBy_CategoryWithSum_WHEREStatement(DatabaseType type)
        {
            var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, out var tableInfo);

            //setup the aggregate
            var categoryDimension = extractionInformations.Single(e => e.GetRuntimeName().Equals("Category", StringComparison.CurrentCultureIgnoreCase));
            var configuration     = new AggregateConfiguration(CatalogueRepository, catalogue, "GroupBy_Category");
            var dimension         = new AggregateDimension(CatalogueRepository, categoryDimension, configuration);

            configuration.CountSQL = "sum(NumberInTrouble)";
            configuration.SaveToDatabase();

            try
            {
                //get the result of the aggregate
                var builder = new AggregateBuilder(null, configuration.CountSQL, configuration);
                builder.AddColumn(dimension);

                AddWHEREToBuilder_CategoryIsTOrNumberGreaterThan42(builder, type);

                var resultTable = GetResultForBuilder(builder, tbl);

                //T is matched on all records so they are summed
                VerifyRowExist(resultTable, "T", 139);
                //VerifyRowExist(resultTable, "F", 60); //F does not have any records over 42 and isn't T so shouldnt be matched
                VerifyRowExist(resultTable, "E&, %a' mp;E", 59); //E has 1 records over 42
                VerifyRowExist(resultTable, "G", 100);           //47 + 53
                Assert.AreEqual(3, resultTable.Rows.Count);
            }
            finally
            {
                Destroy(tbl, configuration, catalogue, tableInfo);
            }
        }
Exemplo n.º 10
0
        public CohortQueryBuilder(AggregateConfiguration config, IEnumerable <ISqlParameter> globals, ICoreChildProvider childProvider) : this(globals, childProvider)
        {
            //set ourselves up to run with the root container
            configuration = config;

            SetChildProviderIfNull();
        }
Exemplo n.º 11
0
        /// <inheritdoc />
        public async IAsyncEnumerable <TIdentity> AggregateIdsAsync <TIdentity, TAggregate>(
            AggregateConfiguration <TIdentity, TAggregate> configuration,
            [EnumeratorCancellation] CancellationToken cancellationToken = default
            ) where TAggregate : Aggregate <TIdentity, TAggregate>
        {
            var constructors = configuration.Constructors;

            foreach (var(eventType, _) in constructors)
            {
                var cursor = await _events
                             .Find(e => e.EventType == eventType && e.EventNumber == 0)
                             .ToCursorAsync(cancellationToken);

                while (await cursor.MoveNextAsync(cancellationToken))
                {
                    foreach (var recordedEvent in cursor.Current)
                    {
                        var eventStreamParts    = recordedEvent.Stream.Split("-", 2);
                        var lastEventStreamPart = eventStreamParts[1];
                        var id = configuration.IdentityParser(lastEventStreamPart);
                        yield return(id);
                    }
                }
            }
        }
Exemplo n.º 12
0
        public void GroupBy_CategoryWithSum_Correct(DatabaseType type)
        {
            var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, out var tableInfo);

            //setup the aggregate
            var categoryDimension = extractionInformations.Single(e => e.GetRuntimeName().Equals("Category", StringComparison.CurrentCultureIgnoreCase));
            var configuration     = new AggregateConfiguration(CatalogueRepository, catalogue, "GroupBy_Category");
            var dimension         = new AggregateDimension(CatalogueRepository, categoryDimension, configuration);

            configuration.CountSQL = "sum(NumberInTrouble)";
            configuration.SaveToDatabase();
            try
            {
                //get the result of the aggregate
                var builder = new AggregateBuilder(null, configuration.CountSQL, configuration);
                builder.AddColumn(dimension);
                var resultTable = GetResultForBuilder(builder, tbl);

                VerifyRowExist(resultTable, "T", 139);
                VerifyRowExist(resultTable, "F", 60);
                VerifyRowExist(resultTable, "E&, %a' mp;E", 137);
                VerifyRowExist(resultTable, "G", 100);
                Assert.AreEqual(4, resultTable.Rows.Count);
            }
            finally
            {
                Destroy(tbl, configuration, catalogue, tableInfo);
            }
        }
Exemplo n.º 13
0
        public void TestStripZeroSeries_Nulls(bool includeZeroSeries)
        {
            var dt = new DataTable();

            dt.Columns.Add("date");
            dt.Columns.Add("col1");
            dt.Columns.Add("col2");

            dt.Rows.Add("2001", 0, 12);
            dt.Rows.Add("2002", null, 333);

            UserSettings.IncludeZeroSeriesInGraphs = includeZeroSeries;

            AggregateConfiguration.AdjustGraphDataTable(dt);

            if (includeZeroSeries)
            {
                Assert.AreEqual(3, dt.Columns.Count);
            }
            else
            {
                // col1 should have been gotten rid of
                Assert.AreEqual(2, dt.Columns.Count);
                dt.Columns.Contains("date");
                dt.Columns.Contains("col2");
            }

            dt.Dispose();
        }
Exemplo n.º 14
0
 public void SetOrder(AggregateConfiguration child, int newOrder)
 {
     CatalogueRepository.Update("UPDATE CohortAggregateContainer_AggregateConfiguration SET [Order] = " + newOrder + " WHERE AggregateConfiguration_ID = @AggregateConfiguration_ID", new Dictionary <string, object>
     {
         { "AggregateConfiguration_ID", child.ID }
     });
 }
Exemplo n.º 15
0
        public void GroupBy_CategoryWithCount_Correct(DatabaseType type)
        {
            Catalogue catalogue;

            ExtractionInformation[] extractionInformations;
            TableInfo tableInfo;
            var       tbl = UploadTestDataAsTableToServer(type, out catalogue, out extractionInformations, out tableInfo);

            //setup the aggregate
            var categoryDimension = extractionInformations.Single(e => e.GetRuntimeName().Equals("Category", StringComparison.CurrentCultureIgnoreCase));
            var configuration     = new AggregateConfiguration(CatalogueRepository, catalogue, "GroupBy_Category");
            var dimension         = new AggregateDimension(CatalogueRepository, categoryDimension, configuration);

            try
            {
                //get the result of the aggregate
                var builder = new AggregateBuilder(null, configuration.CountSQL, configuration);
                builder.AddColumn(dimension);
                var resultTable = GetResultForBuilder(builder, tbl);

                VerifyRowExist(resultTable, "T", 7);
                VerifyRowExist(resultTable, "F", 2);
                VerifyRowExist(resultTable, "E&, %a' mp;E", 3);
                VerifyRowExist(resultTable, "G", 2);
            }
            finally
            {
                Destroy(tbl, configuration, catalogue, tableInfo);
            }
        }
Exemplo n.º 16
0
        /// <summary>
        /// Creates a new AggregateGraph for the given dataset (<paramref name="cata"/>)
        /// </summary>
        /// <param name="cata"></param>
        /// <param name="name">The name to give the graph</param>
        /// <param name="dimension1">The first dimension e.g. pass only one dimension to create a bar chart</param>
        /// <param name="isAxis">True if <paramref name="dimension1"/> should be created as a axis (creates a line chart)</param>
        /// <param name="dimension2">Optional second dimension to create (this will be the pivot column)</param>
        private AggregateConfiguration CreateGraph(Catalogue cata, string name, string dimension1, bool isAxis, string dimension2)
        {
            var ac = new AggregateConfiguration(_repos.CatalogueRepository, cata, name);

            ac.CountSQL = "count(*) as NumberOfRecords";
            ac.SaveToDatabase();
            ac.IsExtractable = true;

            var mainDimension  = ac.AddDimension(GetExtractionInformation(cata, dimension1));
            var otherDimension = string.IsNullOrWhiteSpace(dimension2) ? null : ac.AddDimension(GetExtractionInformation(cata, dimension2));

            if (isAxis)
            {
                var axis = new AggregateContinuousDateAxis(_repos.CatalogueRepository, mainDimension);
                axis.StartDate     = "'1970-01-01'";
                axis.AxisIncrement = FAnsi.Discovery.QuerySyntax.Aggregation.AxisIncrement.Year;
                axis.SaveToDatabase();
            }

            if (otherDimension != null)
            {
                ac.PivotOnDimensionID = otherDimension.ID;
                ac.SaveToDatabase();
            }

            return(ac);
        }
Exemplo n.º 17
0
        protected override void SetBindings(BinderWithErrorProviderFactory rules, AggregateConfiguration databaseObject)
        {
            base.SetBindings(rules, databaseObject);

            Bind(tbDescription, "Text", "Description", a => a.Description);
            Bind(cbExtractable, "Checked", "IsExtractable", a => a.IsExtractable);
        }
Exemplo n.º 18
0
        /// <summary>
        /// Returns the name of the query cache results table for <paramref name="configuration"/> if the <paramref name="currentSql"/> matches
        /// the SQL run when the cache result was generated.  Returns null if no cache result is found or there are changes in the <paramref name="currentSql"/>
        /// since the cache result was generated.
        /// </summary>
        /// <param name="configuration"></param>
        /// <param name="operation"></param>
        /// <param name="currentSql"></param>
        /// <returns></returns>
        public IHasFullyQualifiedNameToo GetLatestResultsTable(AggregateConfiguration configuration, AggregateOperation operation, string currentSql)
        {
            var syntax   = _database.Server.GetQuerySyntaxHelper();
            var mgrTable = _database.ExpectTable(ResultsManagerTable);

            using (var con = _server.GetConnection())
            {
                con.Open();

                using (var cmd = DatabaseCommandHelper.GetCommand(
                           $@"Select 
{syntax.EnsureWrapped("TableName")},
{syntax.EnsureWrapped("SqlExecuted")} 
from {mgrTable.GetFullyQualifiedName()} 
WHERE 
{syntax.EnsureWrapped("AggregateConfiguration_ID")} = {configuration.ID} AND
{syntax.EnsureWrapped("Operation")} = '{operation}'", con))
                {
                    using (var r = cmd.ExecuteReader())
                        if (r.Read())
                        {
                            if (IsMatchOnSqlExecuted(r, currentSql))
                            {
                                string tableName = r["TableName"].ToString();
                                return(_database.ExpectTable(tableName));
                            }

                            return(null); //this means that there was outdated SQL, we could show this to user at some point
                        }
                }
            }

            return(null);
        }
Exemplo n.º 19
0
        /// <summary>
        /// Deletes any cache entries for <paramref name="configuration"/> in its role as <paramref name="operation"/>
        /// </summary>
        /// <param name="configuration"></param>
        /// <param name="operation"></param>
        /// <returns>True if a cache entry was found and deleted otherwise false</returns>
        /// <exception cref="Exception"></exception>
        public bool DeleteCacheEntryIfAny(AggregateConfiguration configuration, AggregateOperation operation)
        {
            var table    = GetLatestResultsTableUnsafe(configuration, operation);
            var mgrTable = _database.ExpectTable(ResultsManagerTable);

            if (table != null)
            {
                using (var con = _server.GetConnection())
                {
                    con.Open();

                    //drop the data
                    _database.ExpectTable(table.GetRuntimeName()).Drop();

                    //delete the record!
                    using (var cmd = DatabaseCommandHelper.GetCommand(
                               $"DELETE FROM {mgrTable.GetFullyQualifiedName()} WHERE AggregateConfiguration_ID = " +
                               configuration.ID + " AND Operation = '" + operation + "'", con))
                    {
                        int deletedRows = cmd.ExecuteNonQuery();
                        if (deletedRows != 1)
                        {
                            throw new Exception("Expected exactly 1 record in CachedAggregateConfigurationResults to be deleted when erasing its record of operation " + operation + " but there were " + deletedRows + " affected records");
                        }
                    }

                    return(true);
                }
            }

            return(false);
        }
        /// <summary>
        /// Creates a new patient index table based on Biochemistry which selects the distinct dates of "NA" test results
        /// for every patient
        /// </summary>
        /// <param name="db"></param>
        /// <param name="people"></param>
        /// <param name="r"></param>
        /// <param name="cic"></param>
        /// <returns></returns>
        private JoinableCohortAggregateConfiguration SetupPatientIndexTable(DiscoveredDatabase db, PersonCollection people, Random r, CohortIdentificationConfiguration cic)
        {
            var syntax = db.Server.GetQuerySyntaxHelper();

            var tbl  = CreateDataset <Biochemistry>(db, people, 10000, r);
            var cata = Import(tbl, out _, out _, out _, out ExtractionInformation[] eis);

            var chi  = eis.Single(ei => ei.GetRuntimeName().Equals("chi", StringComparison.CurrentCultureIgnoreCase));
            var code = eis.Single(ei => ei.GetRuntimeName().Equals("TestCode", StringComparison.CurrentCultureIgnoreCase));
            var date = eis.Single(ei => ei.GetRuntimeName().Equals("SampleDate", StringComparison.CurrentCultureIgnoreCase));

            chi.IsExtractionIdentifier = true;
            chi.SaveToDatabase();

            var ac = new AggregateConfiguration(CatalogueRepository, cata, "NA by date");

            ac.AddDimension(chi);
            ac.AddDimension(code);
            ac.AddDimension(date);
            ac.CountSQL = null;

            cic.EnsureNamingConvention(ac);

            var and    = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND);
            var filter = new AggregateFilter(CatalogueRepository, "TestCode is NA", and);

            filter.WhereSQL = syntax.EnsureWrapped("TestCode") + " = 'NA'";
            filter.SaveToDatabase();

            ac.RootFilterContainer_ID = and.ID;
            ac.SaveToDatabase();

            return(new JoinableCohortAggregateConfiguration(CatalogueRepository, cic, ac));
        }
Exemplo n.º 21
0
        private void SetupCohort(out DiscoveredDatabase db, out CohortIdentificationConfiguration cic, out DataTable dt)
        {
            dt = new DataTable();
            dt.Columns.Add("PK");

            //add lots of rows
            for (int i = 0; i < 100000; i++)
            {
                dt.Rows.Add(i);
            }

            db = GetCleanedServer(DatabaseType.MicrosoftSQLServer, true);
            var tbl = db.CreateTable("CohortCompilerRunnerTestsTable", dt);

            var cata = Import(tbl);

            var ei = cata.CatalogueItems[0].ExtractionInformation;

            ei.IsExtractionIdentifier = true;
            ei.SaveToDatabase();

            var agg = new AggregateConfiguration(CatalogueRepository, cata, "MyAgg");

            agg.CountSQL = null;
            agg.SaveToDatabase();
            var dimension = new AggregateDimension(CatalogueRepository, ei, agg);

            cic = new CohortIdentificationConfiguration(CatalogueRepository, "MyCic");
            cic.CreateRootContainerIfNotExists();
            cic.RootCohortAggregateContainer.AddChild(agg, 0);
        }
Exemplo n.º 22
0
        public override void Execute()
        {
            base.Execute();

            var cic = _targetCohortAggregateContainer.GetCohortIdentificationConfiguration();

            AggregateConfiguration child = cic.ImportAggregateConfigurationAsIdentifierList(_aggregateConfigurationCommand.Aggregate, CohortCommandHelper.PickOneExtractionIdentifier);

            //current contents
            var contents = _targetCohortAggregateContainer.GetOrderedContents().ToArray();

            //insert it at the begining of the contents
            int minimumOrder = 0;

            if (contents.Any())
            {
                minimumOrder = contents.Min(o => o.Order);
            }

            //bump everyone down to make room
            _targetCohortAggregateContainer.CreateInsertionPointAtOrder(child, minimumOrder, true);
            _targetCohortAggregateContainer.AddChild(child, minimumOrder);
            Publish(_targetCohortAggregateContainer);

            AggregateCreatedIfAny = child;
        }
        /// <summary>
        /// Asserts that the given <paramref name="task"/> (when run on it's own) crashed with the given
        /// <see cref="expectedErrorMessageToContain"/>
        /// </summary>
        private void AssertCrashed(CohortCompiler compiler, AggregateConfiguration task, string expectedErrorMessageToContain)
        {
            var acResult = compiler.Tasks.Single(t => t.Key is AggregationTask a && a.Aggregate.Equals(task));

            Assert.AreEqual(CompilationState.Crashed, acResult.Key.State);
            StringAssert.Contains(expectedErrorMessageToContain, acResult.Key.CrashMessage.Message);
        }
Exemplo n.º 24
0
        protected override void SetUp()
        {
            base.SetUp();

            _c         = new Catalogue(CatalogueRepository, "AggregateBuilderTests");
            _cataItem1 = new CatalogueItem(CatalogueRepository, _c, "Col1");
            _cataItem2 = new CatalogueItem(CatalogueRepository, _c, "Col2");

            _ti          = new TableInfo(CatalogueRepository, "T1");
            _columnInfo1 = new ColumnInfo(CatalogueRepository, "Col1", "varchar(100)", _ti);
            _columnInfo2 = new ColumnInfo(CatalogueRepository, "Col2", "date", _ti);

            _ei1 = new ExtractionInformation(CatalogueRepository, _cataItem1, _columnInfo1, _columnInfo1.Name);
            _ei2 = new ExtractionInformation(CatalogueRepository, _cataItem2, _columnInfo2, _columnInfo2.Name);

            _configuration = new AggregateConfiguration(CatalogueRepository, _c, "MyConfig");

            _dimension1 = new AggregateDimension(CatalogueRepository, _ei1, _configuration);
            _dimension2 = new AggregateDimension(CatalogueRepository, _ei2, _configuration);

            _dimension1.Order = 1;
            _dimension1.SaveToDatabase();
            _dimension2.Order = 2;
            _dimension2.SaveToDatabase();
        }
Exemplo n.º 25
0
        /// <summary>
        /// Creates a calculator for one of the standard aggregates.
        /// </summary>
        public static IAggregateCalculator CreateStandardCalculator(
            NodeId aggregateId,
            DateTime startTime,
            DateTime endTime,
            double processingInterval,
            bool stepped,
            AggregateConfiguration configuration)
        {
            for (int ii = 0; ii < s_Mappings.Length; ii++)
            {
                if (s_Mappings[ii].AggregateId == aggregateId)
                {
                    return((IAggregateCalculator)Activator.CreateInstance(
                               s_Mappings[ii].Calculator,
                               aggregateId,
                               startTime,
                               endTime,
                               processingInterval,
                               stepped,
                               configuration));
                }
            }

            return(null);
        }
Exemplo n.º 26
0
        public CacheCommitExtractableAggregate(AggregateConfiguration configuration, string sql, DataTable results, int timeout)
            : base(AggregateOperation.ExtractableAggregateResults, configuration, sql, results, timeout)
        {
            if (results.Columns.Count == 0)
            {
                throw new ArgumentException("The DataTable that you claimed was an " + Operation + " had zero columns and therefore cannot be cached");
            }

            string[] suspectDimensions =
                configuration.AggregateDimensions
                .Where(d => d.IsExtractionIdentifier || d.HashOnDataRelease)
                .Select(d => d.GetRuntimeName())
                .ToArray();
            if (suspectDimensions.Any())
            {
                throw new NotSupportedException("Aggregate " + configuration +
                                                " contains dimensions marked as IsExtractionIdentifier or HashOnDataRelease (" +
                                                string.Join(",", suspectDimensions) +
                                                ") so the aggregate cannot be cached.  This would/could result in private patient identifiers appearing on your website!");
            }

            if (!configuration.IsExtractable)
            {
                throw new NotSupportedException("Aggregate " + configuration + " is not marked as IsExtractable therefore cannot be cached for publication on website");
            }
        }
Exemplo n.º 27
0
 public IAsyncEnumerable <TIdentity> AggregateIdsAsync <TIdentity, TAggregate>(
     AggregateConfiguration <TIdentity, TAggregate> configuration,
     [EnumeratorCancellation] CancellationToken cancellationToken = default
     ) where TAggregate : Aggregate <TIdentity, TAggregate>
 {
     throw new NotImplementedException();
 }
Exemplo n.º 28
0
        /// <summary>
        /// All <see cref="AggregateConfiguration"/>s within a <see cref="CohortIdentificationConfiguration"/> must start with the appropriate prefix (and ID of the cic)
        /// (See <see cref="CICPrefix"/>).  This method will change the <see cref="AggregateConfiguration.Name"/> to match the expected prefix.
        /// <para>If the name change would result in a collisionw ith an existing set in the configuration then (Copy X) will appear at the end of the name</para>
        /// </summary>
        /// <param name="aggregate"></param>
        public void EnsureNamingConvention(AggregateConfiguration aggregate)
        {
            //it is already valid
            if (IsValidNamedConfiguration(aggregate))
            {
                return;
            }

            //make it valid by sticking on the prefix
            aggregate.Name = GetNamingConventionPrefixForConfigurations() + aggregate.Name;

            int    copy     = 0;
            string origName = aggregate.Name;


            var otherConfigurations = Repository.GetAllObjects <AggregateConfiguration>().Except(new[] { aggregate }).ToArray();

            //if there is a conflict on the name
            if (otherConfigurations.Any(c => c.Name.Equals(origName)))
            {
                do
                {
                    //add Copy 1 then Copy 2 etc
                    copy++;
                    aggregate.Name = origName + " (Copy " + copy + ")";
                }while (otherConfigurations.Any(c => c.Name.Equals(aggregate.Name)));//until there are no more copies
            }

            aggregate.SaveToDatabase();
        }
Exemplo n.º 29
0
        /// <summary>
        /// Creates an adjusted copy of the <paramref name="toClone"/> to be used as a cohort identification <see cref="AggregateConfiguration"/>.  This could be
        /// an <see cref="AggregateConfiguration"/> graph or one that is acting as a patient index table / cohort set for another <see cref="CohortIdentificationConfiguration"/>.
        /// <para>IMPORTANT: It must be possible to select a single column from which to harvest the patient identifiers from <paramref name="resolveMultipleExtractionIdentifiers"/></para>
        /// </summary>
        /// <param name="toClone">The aggregate to import</param>
        /// <param name="resolveMultipleExtractionIdentifiers">What to do if there are multiple <see cref="ExtractionInformation"/>/<see cref="AggregateDimension"/>
        ///  marked IsExtractionIdentifier</param>
        /// <param name="useTransaction">True to run the import in a transaction</param>
        /// <returns></returns>
        public AggregateConfiguration ImportAggregateConfigurationAsIdentifierList(AggregateConfiguration toClone, ChooseWhichExtractionIdentifierToUseFromManyHandler resolveMultipleExtractionIdentifiers, bool useTransaction = true)
        {
            if (!useTransaction)
            {
                return(CreateCloneOfAggregateConfigurationPrivate(toClone, resolveMultipleExtractionIdentifiers));
            }

            var cataRepo = (CatalogueRepository)Repository;


            using (cataRepo.BeginNewTransactedConnection())
            {
                try
                {
                    var toReturn = CreateCloneOfAggregateConfigurationPrivate(toClone, resolveMultipleExtractionIdentifiers);
                    cataRepo.EndTransactedConnection(true);
                    return(toReturn);
                }
                catch (Exception)
                {
                    cataRepo.EndTransactedConnection(false);//abandon
                    throw;
                }
            }
        }
Exemplo n.º 30
0
        public IHasFullyQualifiedNameToo GetLatestResultsTableUnsafe(AggregateConfiguration configuration, AggregateOperation operation, out string sql)
        {
            var syntax   = _database.Server.GetQuerySyntaxHelper();
            var mgrTable = _database.ExpectTable(ResultsManagerTable);

            using (var con = _server.GetConnection())
            {
                con.Open();
                using (var cmd = DatabaseCommandHelper.GetCommand(
                           $@"Select 
{syntax.EnsureWrapped("TableName")},
{syntax.EnsureWrapped("SqlExecuted")} from {mgrTable.GetFullyQualifiedName()}
WHERE {syntax.EnsureWrapped("AggregateConfiguration_ID")} = {configuration.ID}
AND {syntax.EnsureWrapped("Operation")} = '{operation}'", con))
                {
                    using (var r = cmd.ExecuteReader())
                        if (r.Read())
                        {
                            string tableName = r["TableName"].ToString();
                            sql = r["SqlExecuted"] as string;
                            return(_database.ExpectTable(tableName));
                        }
                }
            }

            sql = null;
            return(null);
        }
 /// <summary>
 /// Creates a default aggregator.
 /// </summary>
 protected AggregateCalculator(NodeId aggregateId)
 {
     AggregateConfiguration configuration = new AggregateConfiguration();
     configuration.TreatUncertainAsBad = false;
     configuration.PercentDataBad = 100;
     configuration.PercentDataGood = 100;
     configuration.UseSlopedExtrapolation = false;
     Initialize(aggregateId, DateTime.UtcNow, DateTime.MaxValue, 1000, false, configuration);
 }
 /// <summary>
 /// Initializes the calculation stream.
 /// </summary>
 /// <param name="aggregateId">The aggregate function to apply.</param>
 /// <param name="startTime">The start time.</param>
 /// <param name="endTime">The end time.</param>
 /// <param name="processingInterval">The processing interval.</param>
 /// <param name="stepped">Whether to use stepped interpolation.</param>
 /// <param name="configuration">The aggregate configuration.</param>
 public AggregateCalculator(
     NodeId aggregateId,
     DateTime startTime,
     DateTime endTime,
     double processingInterval,
     bool stepped,
     AggregateConfiguration configuration)
 {
     Initialize(aggregateId, startTime, endTime, processingInterval, stepped, configuration);
 }
        public void ShouldAllowTwoDifferentContextsToBeLoadedInTheSameAppDomain()
        {
            //Arrange
            var container = new WindsorContainer();
            var configuration = new AggregateConfiguration("Test",
                                                           new IMappingConfiguration[]
                                                               {
                                                                   new FooMappingConfiguration(),
                                                                   new BarMappingConfiguration()
                                                               }, null, null,
                                                           new[] {typeof (Foo), typeof (Bar)});
            var secondConfiguration = new AggregateConfiguration("Test",
                                                                 new IMappingConfiguration[]
                                                                     {new FooMappingConfiguration()}, null, null,
                                                                 new[] {typeof (Foo)});

            container.Register(
                Component.For<IAggregateConfiguration>().Instance(configuration).Named(AggregateConfigurationKeyFactory.GenerateKey<Foo,Bar>()),
                Component.For<IAggregateConfiguration>().Instance(secondConfiguration).Named(AggregateConfigurationKeyFactory.GenerateKey<Foo>()));

            var windsorServiceLocator = new WindsorServiceLocator(container);
            ServiceLocator.SetLocatorProvider(() => windsorServiceLocator);

            IDataContext context = AggregateContextFactory.Create<Foo, Bar>();
            IDataContext contextTwo = AggregateContextFactory.Create<Foo>();
            var foo = new Foo();
            var bar = new Bar();

            // Act
            context.Add(foo);
            context.Add(bar);

            contextTwo.Add(foo);

            // Assert
            // peek under the covers and ensure that each add went
            // to the right DbContext.
            Assert.AreNotEqual(context.GetType().FullName, contextTwo.GetType().FullName);
            ObjectContext objectContext = ((IObjectContextAdapter) context).ObjectContext;
            Assert.IsTrue(objectContext.ObjectStateManager.GetObjectStateEntries(EntityState.Added).Count() == 2);
            Assert.IsTrue(
                objectContext.ObjectStateManager.GetObjectStateEntries(EntityState.Added).First().EntitySet.Name ==
                "Foos");
            Assert.IsTrue(
                objectContext.ObjectStateManager.GetObjectStateEntries(EntityState.Added).Last().EntitySet.Name ==
                "Bars");

            ObjectContext objectContextTwo = ((IObjectContextAdapter) contextTwo).ObjectContext;
            Assert.IsTrue(objectContextTwo.ObjectStateManager.GetObjectStateEntries(EntityState.Added).Count() == 1);
            Assert.IsTrue(
                objectContextTwo.ObjectStateManager.GetObjectStateEntries(EntityState.Added).Single().EntitySet.Name ==
                "Foos");
        }
 /// <summary>
 /// Initializes the aggregate calculator.
 /// </summary>
 /// <param name="aggregateId">The aggregate function to apply.</param>
 /// <param name="startTime">The start time.</param>
 /// <param name="endTime">The end time.</param>
 /// <param name="processingInterval">The processing interval.</param>
 /// <param name="stepped">Whether to use stepped interpolation.</param>
 /// <param name="configuration">The aggregate configuration.</param>
 public StartEndAggregateCalculator(
     NodeId aggregateId,
     DateTime startTime,
     DateTime endTime,
     double processingInterval,
     bool stepped,
     AggregateConfiguration configuration)
 : 
     base(aggregateId, startTime, endTime, processingInterval, stepped, configuration)
 {
     SetPartialBit = true;
 }
 /// <summary>
 /// Initializes the aggregate calculator.
 /// </summary>
 /// <param name="aggregateId">The aggregate function to apply.</param>
 /// <param name="startTime">The start time.</param>
 /// <param name="endTime">The end time.</param>
 /// <param name="processingInterval">The processing interval.</param>
 /// <param name="stepped">Whether to use stepped interpolation.</param>
 /// <param name="configuration">The aggregate configuration.</param>
 public AverageAggregateCalculator(
     NodeId aggregateId,
     DateTime startTime,
     DateTime endTime,
     double processingInterval,
     bool stepped,
     AggregateConfiguration configuration)
 : 
     base(aggregateId, startTime, endTime, processingInterval, stepped, configuration)
 {
     SetPartialBit = aggregateId != Opc.Ua.ObjectIds.AggregateFunction_Average;
 }
Exemplo n.º 36
0
        /// <summary>
        /// Initializes the calculation stream.
        /// </summary>
        /// <param name="startTime">The start time.</param>
        /// <param name="endTime">The end time.</param>
        /// <param name="processingInterval">The processing interval.</param>
        /// <param name="configuration">The aggregate configuration.</param>
        public void Initialize(
            DateTime startTime,
            DateTime endTime,
            double processingInterval,
            AggregateConfiguration configuration)
        {
            m_startTime = startTime;
            m_endTime = endTime;
            m_configuration = configuration;
            m_processingInterval = processingInterval;
            m_timeFlowsBackward = (endTime < startTime);
            m_values = new LinkedList<DataValue>();
            m_lastRawTimestamp = (m_timeFlowsBackward) ? DateTime.MaxValue : DateTime.MinValue;

            TimeSlice slice = new TimeSlice();
            slice.StartTime = startTime;
            slice.EndTime = slice.StartTime.AddMilliseconds((m_timeFlowsBackward) ? -m_processingInterval : m_processingInterval);
            slice.EarlyBound = null;
            slice.LateBound = null;
            slice.Complete = false;
            m_nextSlice = slice;
        }
        public void ShouldAddToTheCorrectContext()
        {
            // Arrange
            var container = new WindsorContainer();
            var configuration = new AggregateConfiguration("Test",
                                                           new IMappingConfiguration[]
                                                               {
                                                                   new FooMappingConfiguration(),
                                                                   new BarMappingConfiguration()
                                                               }, null, null,
                                                           new[] {typeof (Foo), typeof (Bar)});
            container.Register(
                Component.For<IAggregateConfiguration>().Instance(configuration)
                    .Named(AggregateConfigurationKeyFactory.GenerateKey<Foo,Bar>()));
            string typeName = string.Format("{0},{1}", typeof (Foo).FullName, typeof (Bar).FullName);
            var windsorServiceLocator = new WindsorServiceLocator(container);
            ServiceLocator.SetLocatorProvider(() => windsorServiceLocator);

            IDataContext context = AggregateContextFactory.Create<Foo, Bar>();
            var foo = new Foo();
            var bar = new Bar();

            // Act
            context.Add(foo);
            context.Add(bar);

            // Assert
            // peek under the covers and ensure that each add went
            // to the right DbContext.
            ObjectContext objectContext = ((IObjectContextAdapter) context).ObjectContext;
            Assert.IsTrue(objectContext.ObjectStateManager.GetObjectStateEntries(EntityState.Added).Count() == 2);
            Assert.IsTrue(
                objectContext.ObjectStateManager.GetObjectStateEntries(EntityState.Added).First().EntitySet.Name ==
                "Foos");
            Assert.IsTrue(
                objectContext.ObjectStateManager.GetObjectStateEntries(EntityState.Added).Last().EntitySet.Name ==
                "Bars");
        }
Exemplo n.º 38
0
        /// <summary>
        /// Creates a new aggregate calculator.
        /// </summary>
        /// <param name="aggregateId">The id of the aggregate function.</param>
        /// <param name="startTime">When to start processing.</param>
        /// <param name="endTime">When to stop processing.</param>
        /// <param name="processingInterval">The processing interval.</param>
        /// <param name="configuration">The configuaration to use.</param>
        /// <returns></returns>
        public IAggregateCalculator CreateCalculator(
            NodeId aggregateId,
            DateTime startTime,
            DateTime endTime,
            double processingInterval,
            AggregateConfiguration configuration)
        {
            if (NodeId.IsNull(aggregateId))
            {
                return null;
            }

            AggregatorFactory factory = null;

            lock (m_lock)
            {
                if (!m_factories.TryGetValue(aggregateId, out factory))
                {
                    return null;
                }
            }

            AggregateCalculatorImpl calculator = factory();

            if (calculator == null)
            {
                return null;
            }

            calculator.StartTime = startTime;
            calculator.EndTime = endTime;
            calculator.ProcessingInterval = processingInterval;
            calculator.Configuration = configuration;
            calculator.SteppedVariable = configuration.UseSlopedExtrapolation;

            return calculator;
        }
Exemplo n.º 39
0
        /// <summary>
        /// Revises the aggregate configuration.
        /// </summary>
        /// <param name="context"></param>
        /// <param name="item"></param>
        /// <param name="configurationToUse"></param>
        private void ReviseAggregateConfiguration(
            ServerSystemContext context,
            ArchiveItemState item,
            AggregateConfiguration configurationToUse)
        {
            // set configuration from defaults.
            if (configurationToUse.UseServerCapabilitiesDefaults)
            {
                AggregateConfiguration configuration = item.ArchiveItem.AggregateConfiguration;

                if (configuration == null || configuration.UseServerCapabilitiesDefaults)
                {
                    configuration = Server.AggregateManager.GetDefaultConfiguration(null);
                }

                configurationToUse.UseSlopedExtrapolation = configuration.UseSlopedExtrapolation;
                configurationToUse.TreatUncertainAsBad = configuration.TreatUncertainAsBad;
                configurationToUse.PercentDataBad = configuration.PercentDataBad;
                configurationToUse.PercentDataGood = configuration.PercentDataGood;
            }

            // override configuration when it does not make sense for the item.
            configurationToUse.UseServerCapabilitiesDefaults = false;

            if (item.ArchiveItem.Stepped)
            {
                configurationToUse.UseSlopedExtrapolation = false;
            }
        }
        /// <summary>
        /// Initializes the calculation stream.
        /// </summary>
        /// <param name="aggregateId">The aggregate function to apply.</param>
        /// <param name="startTime">The start time.</param>
        /// <param name="endTime">The end time.</param>
        /// <param name="processingInterval">The processing interval.</param>
        /// <param name="stepped">Whether to use stepped interpolation.</param>
        /// <param name="configuration">The aggregate configuration.</param>
        protected void Initialize(
            NodeId aggregateId,
            DateTime startTime,
            DateTime endTime,
            double processingInterval,
            bool stepped,
            AggregateConfiguration configuration)
        {
            AggregateId = aggregateId;
            StartTime = startTime;
            EndTime = endTime;
            ProcessingInterval = processingInterval;
            Stepped = stepped;
            Configuration = configuration;
            TimeFlowsBackward = (endTime < startTime);

            if (processingInterval == 0)
            {
                if (endTime == DateTime.MinValue || startTime == DateTime.MinValue)
                {
                    throw new ArgumentException("Non-zero processingInterval required.", "processingInterval");
                }

                ProcessingInterval = Math.Abs((endTime - startTime).TotalMilliseconds);
            }

            m_values = new LinkedList<DataValue>();
        }
Exemplo n.º 41
0
        private void GenerateData()
        {
            AggregateConfiguration configuration = new AggregateConfiguration();
            configuration.TreatUncertainAsBad = false;
            configuration.PercentDataBad = 100;
            configuration.PercentDataGood = 100;
            configuration.UseSlopedExtrapolation = false;
            configuration.UseServerCapabilitiesDefaults = false;

            GenerateData("Historian1", configuration, false, false);

            configuration.TreatUncertainAsBad = true;
            configuration.PercentDataBad = 100;
            configuration.PercentDataGood = 100;
            configuration.UseSlopedExtrapolation = true;
            configuration.UseServerCapabilitiesDefaults = false;

            GenerateData("Historian2", configuration, false, false);

            configuration.TreatUncertainAsBad = true;
            configuration.PercentDataBad = 50;
            configuration.PercentDataGood = 50;
            configuration.UseSlopedExtrapolation = false;
            configuration.UseServerCapabilitiesDefaults = false;

            GenerateData("Historian3", configuration, true, false);

            configuration.TreatUncertainAsBad = true;
            configuration.PercentDataBad = 100;
            configuration.PercentDataGood = 100;
            configuration.UseSlopedExtrapolation = false;
            configuration.UseServerCapabilitiesDefaults = false;

            GenerateData("Historian4", configuration, true, true);

            configuration.TreatUncertainAsBad = false;
            configuration.PercentDataBad = 100;
            configuration.PercentDataGood = 100;
            configuration.UseSlopedExtrapolation = false;
            configuration.UseServerCapabilitiesDefaults = false;

            GenerateData("Historian5", configuration, false, false);
        }
Exemplo n.º 42
0
        private void GenerateData(string historianName, AggregateConfiguration configuration, bool stepped, bool discrete)
        {
            if (!discrete)
            {
                GenerateData(historianName, BrowseNames.AggregateFunction_Interpolative, 5000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_Average, 5000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_TimeAverage, 5000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_TimeAverage2, 5000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_Total, 5000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_Total2, 5000, configuration, stepped);

                GenerateData(historianName, BrowseNames.AggregateFunction_Minimum, 16000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_Maximum, 16000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_MinimumActualTime, 16000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_MaximumActualTime, 16000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_Range, 16000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_Minimum2, 16000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_Maximum2, 16000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_MinimumActualTime2, 16000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_MaximumActualTime2, 16000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_Range2, 16000, configuration, stepped);
            }

            GenerateData(historianName, BrowseNames.AggregateFunction_Count, 16000, configuration, stepped);
            GenerateData(historianName, BrowseNames.AggregateFunction_AnnotationCount, 16000, configuration, stepped);
            GenerateData(historianName, BrowseNames.AggregateFunction_DurationInStateZero, 16000, configuration, stepped);
            GenerateData(historianName, BrowseNames.AggregateFunction_DurationInStateNonZero, 16000, configuration, stepped);
            GenerateData(historianName, BrowseNames.AggregateFunction_NumberOfTransitions, 16000, configuration, stepped);

            if (!discrete)
            {
                GenerateData(historianName, BrowseNames.AggregateFunction_Start, 16000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_End, 16000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_Delta, 16000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_StartBound, 16000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_EndBound, 16000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_DeltaBounds, 16000, configuration, stepped);
            }

            GenerateData(historianName, BrowseNames.AggregateFunction_DurationGood, 16000, configuration, stepped);
            GenerateData(historianName, BrowseNames.AggregateFunction_DurationBad, 16000, configuration, stepped);
            GenerateData(historianName, BrowseNames.AggregateFunction_PercentGood, 16000, configuration, stepped);
            GenerateData(historianName, BrowseNames.AggregateFunction_PercentBad, 16000, configuration, stepped);
            GenerateData(historianName, BrowseNames.AggregateFunction_WorstQuality, 16000, configuration, stepped);
            GenerateData(historianName, BrowseNames.AggregateFunction_WorstQuality2, 16000, configuration, stepped);

            if (!discrete)
            {
                GenerateData(historianName, BrowseNames.AggregateFunction_StandardDeviationPopulation, 20000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_VariancePopulation, 20000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_StandardDeviationSample, 20000, configuration, stepped);
                GenerateData(historianName, BrowseNames.AggregateFunction_VarianceSample, 20000, configuration, stepped);
            }
        }
Exemplo n.º 43
0
        private void GenerateData(string historianName, string aggregateName, double processingInterval, AggregateConfiguration configuration, bool stepped)
        {
            DateTime startTime = new DateTime(DateTime.UtcNow.Year, 1, 1, 0, 0, 0, DateTimeKind.Utc);

            IAggregateCalculator calculator = Aggregators.CreateStandardCalculator(
                Aggregators.GetIdForStandardAggregate(aggregateName),
                startTime.AddSeconds(0),
                startTime.AddSeconds(100),
                processingInterval,
                stepped,
                configuration);

            SortedDictionary<DateTime, TestData.DataValue> rawValues = m_testData.GetRawValues(historianName);
            List<TestData.DataValue> processedValues = new List<TestData.DataValue>();

            foreach (TestData.DataValue rawValue in rawValues.Values)
            {
                if (!calculator.QueueRawValue((Opc.Ua.DataValue)rawValue))
                {
                    break;
                }

                DataValue processedValue = calculator.GetProcessedValue(false);

                if (processedValue != null)
                {
                    processedValues.Add(new TestData.DataValue(processedValue));
                }
            }

            for (DataValue processedValue = calculator.GetProcessedValue(true); processedValue != null; processedValue = calculator.GetProcessedValue(true))
            {
                processedValues.Add(new TestData.DataValue(processedValue));
            }

            ProcessedDataSetType dataset = new ProcessedDataSetType();

            dataset.DataSetName = historianName;
            dataset.AggregateName = aggregateName;
            dataset.Stepped = stepped;
            dataset.UseSlopedExtrapolation = configuration.UseSlopedExtrapolation;
            dataset.TreatUncertainAsBad = configuration.TreatUncertainAsBad;
            dataset.ProcessingInterval = (uint)processingInterval;
            dataset.PercentBad = configuration.PercentDataBad;
            dataset.PercentGood = configuration.PercentDataGood;

            m_testData.AddDataSet(dataset);
            m_testData.UpdateProcessedValues(dataset, processedValues.ToArray());
        }
Exemplo n.º 44
0
        private void DoTest()
        {
            if (HistorianCB.SelectedItem == null)
            {
                return;
            }

            m_dataset.AcceptChanges();

            // reset row state.
            ResetRowState();

            AggregateConfiguration configuration = new AggregateConfiguration();
            configuration.TreatUncertainAsBad = TreatUncertainAsBadCK.Checked;
            configuration.PercentDataGood = (byte)PercentGoodNP.Value;
            configuration.PercentDataBad = (byte)PercentBadNP.Value;
            configuration.UseSlopedExtrapolation = UseSlopedExtrapolationCK.Checked;
            configuration.UseServerCapabilitiesDefaults = false;

            DateTime startTime = new DateTime(DateTime.UtcNow.Year, 1, 1, 0, 0, 0, DateTimeKind.Utc);

            IAggregateCalculator calculator = Aggregators.CreateStandardCalculator(
                Aggregators.GetIdForStandardAggregate(AggregateCB.SelectedItem as string),
                (!this.TimeFlowsBackwardsCK.Checked)?startTime:startTime.AddSeconds(100),
                (this.TimeFlowsBackwardsCK.Checked)?startTime:startTime.AddSeconds(100),
                (double)ProcessingIntervalNP.Value,
                SteppedCK.Checked,
                configuration);

            SortedDictionary<DateTime, TestData.DataValue> rawValues = m_testData.GetRawValues(HistorianCB.SelectedItem as string);
            List<DataValue> processedValues = new List<DataValue>();

            List<Opc.Ua.DataValue> valuesToProcess = new List<DataValue>();

            foreach (TestData.DataValue rawValue in rawValues.Values)
            {
                valuesToProcess.Add((Opc.Ua.DataValue)rawValue);
            }

            if (TimeFlowsBackwardsCK.Checked)
            {
                valuesToProcess.Reverse();
            }

            foreach (Opc.Ua.DataValue rawValue in valuesToProcess)
            {
                if (!calculator.QueueRawValue(rawValue))
                {
                    break;
                }

                DataValue processedValue = calculator.GetProcessedValue(false);

                if (processedValue != null)
                {
                    processedValues.Add(processedValue);
                }
            }

            for (DataValue processedValue = calculator.GetProcessedValue(true); processedValue != null; processedValue = calculator.GetProcessedValue(true))
            {
                processedValues.Add(processedValue);
            }

            string sort = "Timestamp";

            if (TimeFlowsBackwardsCK.Checked)
            {
                sort += " DESC";
            }

            DataView view = new DataView(m_dataset.Tables[0], "RowState = 'OK'", sort, DataViewRowState.CurrentRows);

            int index = 0;

            foreach (DataRowView row in view)
            {
                if (index >= processedValues.Count)
                {
                    UpdateActualValue(row.Row, null, RowState.Failed);
                    continue;
                }

                TestData.DataValue actualValue = new TestData.DataValue(processedValues[index++]);
                DateTime expectedTimestamp = TestData.ValidateTimestamp(row[0]);

                if (expectedTimestamp != actualValue.SourceTimestamp)
                {
                    AddActualValue(actualValue, RowState.Failed);

                    bool found = false;

                    while (TimeFlowsBackwardsCK.Checked && expectedTimestamp < actualValue.SourceTimestamp)
                    {
                        actualValue = new TestData.DataValue(processedValues[index++]);

                        if (expectedTimestamp == actualValue.SourceTimestamp)
                        {
                            found = true;
                            break;
                        }

                        AddActualValue(actualValue, RowState.Failed);
                    }

                    if (!found)
                    {
                        continue;
                    }
                }

                StatusCode expectedQuality = TestData.ValidateQuality(row[4]);

                if (expectedQuality != actualValue.StatusCode)
                {
                    UpdateActualValue(row.Row, actualValue, RowState.Failed);
                    continue;
                }

                if (StatusCode.IsNotBad(expectedQuality))
                {
                    Variant expectedValue = TestData.ValidateValue(row[3]);

                    StatusCode? statusValue1 = expectedValue.Value as StatusCode?;

                    if (statusValue1 != null)
                    {
                        StatusCode? statusValue2 = actualValue.Value as StatusCode?;

                        if (statusValue2 == null || statusValue2.Value != statusValue1.Value)
                        {
                            UpdateActualValue(row.Row, actualValue, RowState.Failed);
                            continue;
                        }
                    }

                    else
                    {
                        double value1 = Math.Round(Convert.ToDouble(expectedValue.Value), 4);
                        double value2 = Math.Round(Convert.ToDouble(actualValue.Value), 4);

                        if (value1 != value2)
                        {
                            UpdateActualValue(row.Row, actualValue, RowState.Failed);
                            continue;
                        }
                    }
                }

                UpdateActualValue(row.Row, actualValue, RowState.Success);
            }

            // add any unexpected data at the end.
            while (index < processedValues.Count)
            {
                TestData.DataValue actualValue = new TestData.DataValue(processedValues[index++]);

                DataRowView row = FindRowByTimestamp(actualValue.SourceTimestamp);

                if (row == null)
                {
                    AddActualValue(actualValue, RowState.Failed);
                }
                else
                {
                    UpdateActualValue(row.Row, actualValue, RowState.Failed);
                }
            }

            m_dataset.AcceptChanges();
            UpdatesComplete();
        }
Exemplo n.º 45
0
        static void DoTest(TestCase test, string filePath)
        {
            List<DataValue> expectedValues = GetExpectedResults(test.ExpectedResultsPath, test.TestId);

            ArchiveItem item = new ArchiveItem(test.DataPath, Assembly.GetExecutingAssembly(), test.DataPath);

            DataFileReader reader = new DataFileReader();
            reader.LoadConfiguration(null, item);
            reader.LoadHistoryData(null, item);

            AggregateConfiguration configuration = new AggregateConfiguration();
            configuration.PercentDataBad = 100;
            configuration.PercentDataGood = 100;
            configuration.TreatUncertainAsBad = test.TreatUncertainAsBad;
            configuration.UseSlopedExtrapolation = test.UseSlopedExtrapolation;
            configuration.UseServerCapabilitiesDefaults = false;

            DateTime startTime = DateTime.UtcNow;
            startTime = new DateTime(startTime.Year, startTime.Month, startTime.Day, startTime.Hour, 0, 0, DateTimeKind.Utc);

            AggregateCalculator calculator = new AggregateCalculator(
                test.AggregateId,
                startTime.AddSeconds(0),
                startTime.AddSeconds(100),
                5000,
                test.Stepped,
                configuration);

            StringBuilder buffer = new StringBuilder();
            List<DataValue> values = new List<DataValue>();

            foreach (DataRowView row in item.DataSet.Tables[0].DefaultView)
            {
                DataValue rawValue = (DataValue)row.Row[2];

                if (!calculator.QueueRawValue(rawValue))
                {
                    Utils.Trace("Oops!");
                    continue;
                }

                DataValue processedValue = calculator.GetProcessedValue(false);

                if (processedValue != null)
                {
                    values.Add(processedValue);
                }
            }

            for (DataValue processedValue = calculator.GetProcessedValue(true); processedValue != null; processedValue = calculator.GetProcessedValue(true))
            {
                values.Add(processedValue);
            }

            for (int ii = 0; ii < values.Count && ii < expectedValues.Count; ii++)
            {
                if (values[ii].SourceTimestamp != expectedValues[ii].SourceTimestamp)
                {
                    Utils.Trace("Wrong Status Timestamp");
                    continue;
                }

                if (values[ii].StatusCode != expectedValues[ii].StatusCode)
                {
                    Utils.Trace("Wrong Status Code");
                    continue;
                }

                if (StatusCode.IsNotBad(values[ii].StatusCode))
                {
                    double value1 = Math.Round(Convert.ToDouble(values[ii].Value), 4);
                    double value2 = Math.Round(Convert.ToDouble(expectedValues[ii].Value), 4);

                    if (value1 != value2)
                    {
                        Utils.Trace("Wrong Value");
                        continue;
                    }
                }
            }
            
            foreach (DataValue processedValue in values)
            {
                buffer.Append(processedValue.SourceTimestamp.ToString("HH:mm:ss"));
                buffer.Append(", ");
                buffer.Append(processedValue.WrappedValue);
                buffer.Append(", ");
                buffer.Append(new StatusCode(processedValue.StatusCode.CodeBits));
                buffer.Append(", ");
                buffer.Append(processedValue.StatusCode.AggregateBits);
                buffer.Append("\r\n");
            }

            // write to the file.
            using (StreamWriter writer = new StreamWriter(filePath))
            {
                writer.Write(buffer.ToString());
            }
        }
Exemplo n.º 46
0
        /// <summary>
        /// Creates a calculator for one of the standard aggregates.
        /// </summary>
        public static IAggregateCalculator CreateStandardCalculator(
            NodeId aggregateId,
            DateTime startTime,
            DateTime endTime,
            double processingInterval,
            bool stepped,
            AggregateConfiguration configuration) 
        {
            for (int ii = 0; ii < s_Mappings.Length; ii++)
            {
                if (s_Mappings[ii].AggregateId == aggregateId)
                {
                    return (IAggregateCalculator)Activator.CreateInstance(
                        s_Mappings[ii].Calculator,
                        aggregateId, 
                        startTime, 
                        endTime, 
                        processingInterval, 
                        stepped, 
                        configuration);
                }
            }

            return null;
        }
Exemplo n.º 47
0
        /// <summary>
        /// Returns the default configuration for the specified variable id.
        /// </summary>
        /// <param name="variableId">The id of history data node.</param>
        /// <returns>The configuration.</returns>
        public AggregateConfiguration GetDefaultConfiguration(NodeId variableId)
        {
            lock (m_lock)
            {
                if (m_defaultConfiguration == null)
                {
                    m_defaultConfiguration = new AggregateConfiguration();
                    m_defaultConfiguration.PercentDataBad = 100;
                    m_defaultConfiguration.PercentDataGood = 100;
                    m_defaultConfiguration.TreatUncertainAsBad = false;
                    m_defaultConfiguration.UseSlopedExtrapolation = false;
                    m_defaultConfiguration.UseServerCapabilitiesDefaults = false;
                }

                return m_defaultConfiguration;
            }
        }
Exemplo n.º 48
0
 /// <summary>
 /// Sets the default aggregate configuration.
 /// </summary>
 /// <param name="configuration">The default aggregate configuration..</param>
 public void SetDefaultConfiguration(AggregateConfiguration configuration)
 {
     lock (m_lock)
     {
         m_defaultConfiguration = configuration;
     }
 }
Exemplo n.º 49
0
        /// <summary>
        /// Creates a new aggregate calculator.
        /// </summary>
        /// <param name="aggregateId">The id of the aggregate function.</param>
        /// <param name="startTime">When to start processing.</param>
        /// <param name="endTime">When to stop processing.</param>
        /// <param name="processingInterval">The processing interval.</param>
        /// <param name="stepped">Whether stepped interpolation should be used.</param>
        /// <param name="configuration">The configuaration to use.</param>
        /// <returns></returns>
        public IAggregateCalculator CreateCalculator(
            NodeId aggregateId,
            DateTime startTime,
            DateTime endTime,
            double processingInterval,
            bool stepped,
            AggregateConfiguration configuration)
        {
            if (NodeId.IsNull(aggregateId))
            {
                return null;
            }

            AggregatorFactory factory = null;

            lock (m_lock)
            {
                if (!m_factories.TryGetValue(aggregateId, out factory))
                {
                    return null;
                }
            }

            if (configuration.UseServerCapabilitiesDefaults)
            {
                configuration = m_defaultConfiguration;
            }

            IAggregateCalculator calculator = factory(aggregateId, startTime, endTime, processingInterval, stepped, configuration);

            if (calculator == null)
            {
                return null;
            }

            return calculator;
        }