Ejemplo n.º 1
0
        /// <summary>
        /// Gets all data series that match the
        /// specified criteria and are within the geographic polygon
        /// </summary>
        /// <param name="polygons">one or multiple polygons</param>
        /// <param name="keywords">array of keywords. If set to null,
        /// results will not be filtered by keyword.</param>
        /// <param name="startDate">start date. If set to null, results will not be filtered by start date.</param>
        /// <param name="endDate">end date. If set to null, results will not be filtered by end date.</param>
        /// <param name="serviceIDs">array of serviceIDs provided by GetServicesInBox.
        /// <param name="bgWorker">The background worker (may be null) for reporting progress</param>
        /// <param name="e">The results of the search (convert to DataTable)</param>
        /// If set to null, results will not be filtered by web service.</param>
        /// <returns>A list of data series matching the specified criteria</returns>
        public void GetSeriesCatalogInPolygon(IList <IFeature> polygons, string[] keywords, DateTime startDate,
                                              DateTime endDate, int[] serviceIDs, IProgressHandler bgWorker, DoWorkEventArgs e)
        {
            double tileWidth  = 1.0; //the initial tile width is set to 1 degree
            double tileHeight = 1.0; //the initial tile height is set to 1 degree

            //(1): Get the union of the polygons
            if (polygons.Count == 0)
            {
                throw new ArgumentException("The number of polygons must be greater than zero.");
            }

            // Check for cancel
            bgWorker.CheckForCancel();

            if (polygons.Count > 1)
            {
                bgWorker.ReportProgress(0, "Processing Polygons");
            }

            //get the list of series
            var fullSeriesList = new List <SeriesDataCart>();

            foreach (IFeature polygon in polygons)
            {
                //Split the polygon area bounding box into 1x1 decimal degree tiles
                IEnvelope   env       = polygon.Envelope;
                Box         extentBox = new Box(env.Left(), env.Right(), env.Bottom(), env.Top());
                IList <Box> tiles     = SearchHelper.CreateTiles(extentBox, tileWidth, tileHeight);
                int         numTiles  = tiles.Count;


                for (int i = 0; i < numTiles; i++)
                {
                    Box tile = tiles[i];

                    bgWorker.CheckForCancel();

                    // Do the web service call
                    IEnumerable <SeriesDataCart> tileSeriesList = GetSeriesCatalogForBox(tile.XMin, tile.XMax, tile.YMin, tile.YMax, keywords, startDate, endDate, serviceIDs);

                    // Clip the points by polygon
                    IEnumerable <SeriesDataCart> seriesInPolygon = SearchHelper.ClipByPolygon(tileSeriesList, polygon);

                    fullSeriesList.AddRange(seriesInPolygon);

                    // Report progress
                    {
                        string message         = fullSeriesList.Count.ToString();
                        int    percentProgress = (i * 100) / numTiles + 1;
                        bgWorker.ReportProgress(percentProgress, message);
                    }
                }
            }

            //(4) Create the Feature Set
            SearchResult resultFs = null;

            if (fullSeriesList.Count > 0)
            {
                bgWorker.ReportProgress(0, "Calculating Points");
                resultFs = SearchHelper.ToFeatureSetsByDataSource(fullSeriesList);
            }

            // (5) Final Background worker updates
            if (e != null)
            {
                bgWorker.CheckForCancel();

                // Report progress
                bgWorker.ReportProgress(100, "Search Finished");
                e.Result = resultFs;
            }
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Gets all search result that match the
        /// specified criteria and are within the specific rectangle
        /// </summary>
        /// <param name="keywords">array of keywords. If set to null,
        /// results will not be filtered by keyword.</param>
        /// <param name="startDate">start date. If set to null, results will not be filtered by start date.</param>
        /// <param name="endDate">end date. If set to null, results will not be filtered by end date.</param>
        /// <param name="e">The results of the search (convert to DataTable)</param>
        /// <returns>A list of data series matching the specified criteria</returns>
        public void GetSeriesCatalogInRectangle(double xMin, double XMax, double YMin, double YMax, string[] keywords,
                                                DateTime startDate, DateTime endDate, int[] serviceIDs, IProgressHandler bgWorker, DoWorkEventArgs e)
        {
            if (bgWorker == null)
            {
                throw new ArgumentNullException("bgWorker");
            }

            double tileWidth  = 1.0; //the initial tile width is set to 1 degree
            double tileHeight = 1.0; //the initial tile height is set to 1 degree

            bgWorker.CheckForCancel();

            //get the list of series
            var fullSeriesList = new List <SeriesDataCart>();

            //Split the polygon area bounding box into 1x1 decimal degree tiles

            Box         extentBox = new Box(xMin, XMax, YMin, YMax);
            IList <Box> tiles     = SearchHelper.CreateTiles(extentBox, tileWidth, tileHeight);
            int         numTiles  = tiles.Count;

            for (int i = 0; i < numTiles; i++)
            {
                Box tile = tiles[i];

                bgWorker.CheckForCancel();

                // Do the web service call
                //IList<SeriesDataCart> tileSeriesList = new List<SeriesMetadata>();
                IEnumerable <SeriesDataCart> tileSeriesList = GetSeriesCatalogForBox(tile.XMin, tile.XMax, tile.YMin, tile.YMax, keywords, startDate, endDate, serviceIDs);

                fullSeriesList.AddRange(tileSeriesList);

                // Report progress
                {
                    string message         = fullSeriesList.Count.ToString();
                    int    percentProgress = (i * 100) / numTiles + 1;
                    bgWorker.ReportProgress(percentProgress, message);
                }
            }


            //(4) Create the Feature Set
            SearchResult resultFs = null;

            if (fullSeriesList.Count > 0)
            {
                bgWorker.ReportProgress(0, "Calculating Points");
                resultFs = SearchHelper.ToFeatureSetsByDataSource(fullSeriesList);
            }

            // (5) Final Background worker updates
            if (e != null)
            {
                bgWorker.CheckForCancel();

                // Report progress
                bgWorker.ReportProgress(100, "Search Finished");
                e.Result = resultFs;
            }
        }
Ejemplo n.º 3
0
        public async Task <bool> BatchRunAsync(CancellationToken cancellationToken = default)
        {
            m_JournalWriter.WriteLine($"Batch macro running started");

            var batchStartTime = DateTime.Now;

            TimeSpan?timeout = null;

            if (m_Job.Timeout > 0)
            {
                timeout = TimeSpan.FromSeconds(m_Job.Timeout);
            }

            var worker = m_WorkerFact.Invoke(timeout);

            worker.Retry   += OnRetry;
            worker.Timeout += OnTimeout;

            m_CurrentContext = new BatchJobContext()
            {
                Job = m_Job
            };

            var jobResult = false;

            try
            {
                await TaskEx.Run(() =>
                {
                    m_JournalWriter.WriteLine($"Collecting files for processing");

                    var app = EnsureApplication(m_CurrentContext, cancellationToken);

                    var allFiles = PrepareJobScope(app, m_Job.Input, m_Job.Filters, m_Job.Macros);

                    m_JournalWriter.WriteLine($"Running batch processing for {allFiles.Length} file(s)");

                    m_ProgressHandler.SetJobScope(allFiles, batchStartTime);

                    if (!allFiles.Any())
                    {
                        throw new UserException("Empty job. No files matching specified filter");
                    }

                    var curBatchSize = 0;

                    for (int i = 0; i < allFiles.Length; i++)
                    {
                        var curAppPrc = m_CurrentContext.CurrentApplicationProcess?.Id;

                        m_CurrentContext.CurrentJobItem = allFiles[i];
                        var res = TryProcessFile(m_CurrentContext, worker, cancellationToken);

                        TryCloseDocument(m_CurrentContext.CurrentDocument);

                        m_CurrentContext.CurrentDocument = null;

                        m_ProgressHandler?.ReportProgress(m_CurrentContext.CurrentJobItem, res);

                        if (!res && !m_Job.ContinueOnError)
                        {
                            throw new UserException("Cancelling the job. Set 'Continue On Error' option to continue job if file failed");
                        }

                        if (m_CurrentContext.CurrentApplicationProcess?.Id != curAppPrc)
                        {
                            curBatchSize = 1;
                        }
                        else
                        {
                            curBatchSize++;
                        }

                        if (m_Job.BatchSize > 0 && curBatchSize >= m_Job.BatchSize)
                        {
                            m_JournalWriter.WriteLine("Closing application as batch size reached the limit");
                            TryShutDownApplication(m_CurrentContext.CurrentApplicationProcess);
                            curBatchSize = 0;
                        }
                    }
                }, new StaTaskScheduler(m_Logger)).ConfigureAwait(false);

                jobResult = true;
            }
            catch (OperationCanceledException)
            {
                throw new JobCancelledException();
            }
            finally
            {
                TryShutDownApplication(m_CurrentContext.CurrentApplicationProcess);
            }

            var duration = DateTime.Now.Subtract(batchStartTime);

            m_ProgressHandler.ReportCompleted(duration);

            m_JournalWriter.WriteLine($"Batch running completed in {duration.ToString(@"hh\:mm\:ss")}");

            return(jobResult);
        }
Ejemplo n.º 4
0
        public void DeriveInsertDataValues(double A, double B, double C, double D, double E, double F,
                                           DataTable dt,
                                           long newSeriesID, long sourceSeriesID, bool isAlgebraic, IProgressHandler progressHandler)
        {
            const int chunkLength = 400;
            var       nodatavalue = GetNoDataValueForSeriesVariable(newSeriesID);

            const string insertQuery =
                "INSERT INTO DataValues(ValueID, SeriesID, DataValue, ValueAccuracy, LocalDateTime, UtcOffset, DateTimeUtc, OffsetValue, OffsetTypeID, CensorCode, QualifierID, SampleID, FileID) " +
                "VALUES ({0}, {1}, {2}, {3}, '{4}', {5}, '{6}', {7}, {8}, '{9}', {10}, {11}, {12});";

            var index = 0;

            while (index != dt.Rows.Count - 1)
            {
                //Save values by chunks

                var newValueID = DbOperations.GetNextID("DataValues", "ValueID");
                var query      = new StringBuilder("BEGIN TRANSACTION; ");


                for (int i = 0; i < chunkLength; i++)
                {
                    // Calculating value
                    double newvalue = 0.0;
                    if (isAlgebraic)
                    {
                        var currentvalue = Convert.ToDouble(dt.Rows[index]["DataValue"]);
                        if (currentvalue != nodatavalue)
                        {
                            //NOTE:Equation = Fx ^ 5 + Ex ^ 4 + Dx ^ 3 + Cx ^ 2 + Bx + A
                            newvalue = (F * (Math.Pow(currentvalue, 5))) + (E * (Math.Pow(currentvalue, 4))) +
                                       (D * (Math.Pow(currentvalue, 3))) + (C * (Math.Pow(currentvalue, 2))) +
                                       (B * currentvalue) +
                                       A;
                            newvalue = Math.Round(newvalue, 5);
                        }
                        else
                        {
                            newvalue = nodatavalue;
                        }
                    }
                    else
                    {
                        newvalue = Convert.ToDouble(dt.Rows[index]["DataValue"]);
                    }

                    var row = dt.Rows[index];
                    query.AppendFormat(insertQuery,
                                       newValueID + i,
                                       newSeriesID,
                                       newvalue,
                                       row["ValueAccuracy"].ToString() == "" ? "NULL" : row["ValueAccuracy"].ToString(),
                                       Convert.ToDateTime(row["LocalDateTime"]).ToString("yyyy-MM-dd HH:mm:ss"),
                                       row["UTCOffset"].ToString(),
                                       Convert.ToDateTime(row["DateTimeUTC"]).ToString("yyyy-MM-dd HH:mm:ss"),
                                       row["OffsetValue"].ToString() == "" ? "NULL" : row["OffsetValue"].ToString(),
                                       row["OffsetTypeID"].ToString() == "" ? "NULL" : row["OffsetTypeID"].ToString(),
                                       row["CensorCode"].ToString(),
                                       row["QualifierID"].ToString() == "" ? "NULL" : row["QualifierID"].ToString(),
                                       row["SampleID"].ToString() == "" ? "NULL" : row["SampleID"].ToString(),
                                       row["FileID"].ToString() == "" ? "NULL" : row["FileID"].ToString());
                    query.AppendLine();

                    if (index == dt.Rows.Count - 1)
                    {
                        break;
                    }
                    index = index + 1;
                }

                query.AppendLine("COMMIT;");
                DbOperations.ExecuteNonQuery(query.ToString());

                progressHandler.ReportProgress(index, null);
            }
        }
Ejemplo n.º 5
0
        public void DeriveInsertAggregateDataValues(DataTable dt,
                                                    long newSeriesID,
                                                    DateTime currentdate, DateTime lastdate, DeriveAggregateMode mode,
                                                    DeriveComputeMode computeMode,
                                                    double nodatavalue, IProgressHandler progressHandler)
        {
            const string insertQuery =
                "INSERT INTO DataValues(ValueID, SeriesID, DataValue, ValueAccuracy, LocalDateTime, UtcOffset, DateTimeUtc, OffsetValue, OffsetTypeID, CensorCode, QualifierID, SampleID, FileID) " +
                "VALUES ({0}, {1}, {2}, {3}, '{4}', {5}, '{6}', {7}, {8}, '{9}', {10}, {11}, {12});";

            const int chunkLength = 400;
            var       index       = 0;

            while (currentdate <= lastdate)
            {
                // Save values by chunks

                var newValueID = DbOperations.GetNextID("DataValues", "ValueID");
                var query      = new StringBuilder("BEGIN TRANSACTION; ");


                for (int i = 0; i <= chunkLength - 1; i++)
                {
                    double newvalue  = 0.0;
                    string sqlString = string.Empty;
                    double UTC       = 0.0;

                    switch (mode)
                    {
                    case DeriveAggregateMode.Daily:
                        sqlString = "LocalDateTime >= '" + currentdate.ToString(CultureInfo.InvariantCulture) + "' AND LocalDateTime <= '" +
                                    currentdate.AddDays(1).AddMilliseconds(-1).ToString(CultureInfo.InvariantCulture) + "' AND DataValue <> " +
                                    nodatavalue.ToString(CultureInfo.InvariantCulture);
                        break;

                    case DeriveAggregateMode.Monthly:
                        sqlString = "LocalDateTime >= '" + currentdate.ToString(CultureInfo.InvariantCulture) + "' AND LocalDateTime <= '" +
                                    currentdate.AddMonths(1).AddMilliseconds(-1).ToString(CultureInfo.InvariantCulture) + "' AND DataValue <> " +
                                    nodatavalue.ToString(CultureInfo.InvariantCulture);
                        break;

                    case DeriveAggregateMode.Quarterly:
                        sqlString = "LocalDateTime >= '" + currentdate.ToString(CultureInfo.InvariantCulture) +
                                    "' AND LocalDateTime <= '" +
                                    currentdate.AddMonths(3).AddMilliseconds(-1).ToString(
                            CultureInfo.InvariantCulture) + "' AND DataValue <> " +
                                    nodatavalue.ToString(CultureInfo.InvariantCulture);
                        break;
                    }
                    try
                    {
                        switch (computeMode)
                        {
                        case DeriveComputeMode.Maximum:
                            newvalue = Convert.ToDouble(dt.Compute("Max(DataValue)", sqlString));
                            break;

                        case DeriveComputeMode.Minimum:
                            newvalue = Convert.ToDouble(dt.Compute("MIN(DataValue)", sqlString));
                            break;

                        case DeriveComputeMode.Average:
                            newvalue = Convert.ToDouble(dt.Compute("AVG(DataValue)", sqlString));
                            break;

                        case DeriveComputeMode.Sum:
                            newvalue = Convert.ToDouble(dt.Compute("Sum(DataValue)", sqlString));
                            break;
                        }

                        UTC = Convert.ToDouble(dt.Compute("AVG(UTCOffset)", sqlString));
                    }
                    catch (Exception)
                    {
                        newvalue = nodatavalue;
                    }

                    query.AppendFormat(insertQuery,
                                       newValueID + i,
                                       newSeriesID,
                                       newvalue,
                                       0,
                                       Convert.ToDateTime(dt.Rows[index]["LocalDateTime"]).ToString("yyyy-MM-dd HH:mm:ss"),
                                       UTC.ToString(CultureInfo.InvariantCulture),
                                       currentdate.AddHours(UTC).ToString("yyyy-MM-dd HH:mm:ss"),
                                       "NULL",
                                       "NULL",
                                       "nc",
                                       "NULL",
                                       "NULL",
                                       "NULL");
                    query.AppendLine();

                    switch (mode)
                    {
                    case DeriveAggregateMode.Daily:
                        currentdate = currentdate.AddDays(1);
                        break;

                    case DeriveAggregateMode.Monthly:
                        currentdate = currentdate.AddMonths(1);
                        break;

                    case DeriveAggregateMode.Quarterly:
                        currentdate = currentdate.AddMonths(3);
                        break;
                    }

                    if (currentdate > lastdate)
                    {
                        break;
                    }
                    index = index + 1;

                    //Report progress
                    progressHandler.ReportProgress(index - 1, null);
                }

                query.AppendLine("COMMIT;");
                DbOperations.ExecuteNonQuery(query.ToString());

                progressHandler.ReportProgress(index - 1, null);
            }
        }
Ejemplo n.º 6
0
        public async Task <bool> BatchRun(BatchJob opts, CancellationToken cancellationToken = default)
        {
            m_UserLogger.WriteLine($"Batch macro running started");

            var batchStartTime = DateTime.Now;

            var allFiles = PrepareJobScope(opts.Input, opts.Filters, opts.Macros).ToArray();

            m_UserLogger.WriteLine($"Running batch processing for {allFiles.Length} file(s)");

            m_ProgressHandler.SetJobScope(allFiles, batchStartTime);

            if (!allFiles.Any())
            {
                throw new UserMessageException("Empty job. No files matching specified filter");
            }

            TimeSpan timeout = default;

            if (opts.Timeout > 0)
            {
                timeout = TimeSpan.FromSeconds(opts.Timeout);
            }

            IXApplication app    = null;
            Process       appPrc = null;

            if (cancellationToken != default)
            {
                cancellationToken.Register(() =>
                {
                    m_UserLogger.WriteLine($"Cancelled by the user");
                    TryShutDownApplication(appPrc);
                });
            }

            var jobResult = false;

            try
            {
                await Task.Run(() =>
                {
                    var curBatchSize = 0;

                    for (int i = 0; i < allFiles.Length; i++)
                    {
                        var curAppPrc = appPrc?.Id;

                        var curFile = allFiles[i];
                        var res     = AttemptProcessFile(ref app, ref appPrc, curFile, opts, cancellationToken);
                        m_ProgressHandler?.ReportProgress(curFile, res);

                        if (!res && !opts.ContinueOnError)
                        {
                            throw new UserMessageException("Cancelling the job. Set 'Continue On Error' option to continue job if file failed");
                        }

                        if (appPrc?.Id != curAppPrc)
                        {
                            curBatchSize = 1;
                        }
                        else
                        {
                            curBatchSize++;
                        }

                        if (opts.BatchSize > 0 && curBatchSize >= opts.BatchSize && !cancellationToken.IsCancellationRequested)
                        {
                            m_UserLogger.WriteLine("Closing application as batch size reached the limit");
                            TryShutDownApplication(appPrc);
                            curBatchSize = 0;
                        }
                    }
                }).ConfigureAwait(false);

                jobResult = true;
            }
            finally
            {
                TryShutDownApplication(appPrc);
            }

            var duration = DateTime.Now.Subtract(batchStartTime);

            m_ProgressHandler.ReportCompleted(duration);

            m_UserLogger.WriteLine($"Batch running completed in {duration.ToString(@"hh\:mm\:ss")}");

            return(jobResult);
        }