/// <summary>
        /// This method splits a point list into severall smaller lists and perform bulk calls on each list
        /// In parallel.  
        /// </summary>
        private void GetRecordedValuesBulkParrallel(DataQuery query, AFTimeRange timeRange, int bulkPageSize, int maxDegOfParallel, int bulkParallelChunkSize, CancellationToken cancelToken)
        {
            _logger.WarnFormat("QUERY (BULK-P) # {5} - TAGS: {6} - PERIOD: {3} to {4} - MAX DEG. PAR. {0}, TAG_CHUNK_SIZE {1}, TAG_PAGE_SIZE {2},", maxDegOfParallel, bulkParallelChunkSize, bulkPageSize, timeRange.StartTime,timeRange.EndTime, query.QueryId, query.PiPoints.Count);

            // PARALLEL bulk
            var pointListList = query.PiPoints.ToList().ChunkBy(bulkParallelChunkSize);
            Parallel.ForEach(pointListList, new ParallelOptions { MaxDegreeOfParallelism = maxDegOfParallel,CancellationToken = cancelToken },
                (pts,state,index) =>
                {
                   var stats=new StatisticsInfo();
                    stats.Stopwatch.Start();

                    PIPagingConfiguration pagingConfiguration = new PIPagingConfiguration(PIPageType.TagCount, bulkPageSize);
                    PIPointList pointList = new PIPointList(pts);

                    try
                    {
                       // _logger.InfoFormat("Bulk query");
                        IEnumerable<AFValues> bulkData = pointList.RecordedValues(timeRange,
                            AFBoundaryType.Inside, String.Empty, false, pagingConfiguration).ToList();

                        if (_enableWrite)
                        {
                            var writeInfo=new WriteInfo()
                            {
                                Data = bulkData,
                                StartTime = timeRange.StartTime,
                                EndTime = timeRange.EndTime,
                                ChunkId = query.ChunkId,
                                SubChunkId= index
                            };

                            _dataWriter.DataQueue.Add(writeInfo, cancelToken);
                        }

                        stats.EventsCount = bulkData.Sum(s=>s.Count);
                        stats.Stopwatch.Stop();
                        stats.EventsInWritingQueue = _dataWriter.DataQueue.Count;
                        Statistics.StatisticsQueue.Add(stats, cancelToken);

                    }
                    catch (OperationCanceledException ex)
                    {
                        _logger.Error(pagingConfiguration.Error);
                    }
                    catch (Exception ex)
                    {

                        _logger.Error(ex);

                    }

                });
        }
        /// <summary>
        /// This method splits a point list into severall smaller lists and perform bulk calls on each list
        /// In parallel.
        /// </summary>
        private void GetRecordedValuesBulkParrallel(DataQuery query, AFTimeRange timeRange, int bulkPageSize, int maxDegOfParallel, int bulkParallelChunkSize, CancellationToken cancelToken)
        {
            _logger.WarnFormat("QUERY (BULK-P) # {5} - TAGS: {6} - PERIOD: {3} to {4} - MAX DEG. PAR. {0}, TAG_CHUNK_SIZE {1}, TAG_PAGE_SIZE {2},", maxDegOfParallel, bulkParallelChunkSize, bulkPageSize, timeRange.StartTime, timeRange.EndTime, query.QueryId, query.PiPoints.Count);

            // PARALLEL bulk
            var pointListList = query.PiPoints.ToList().ChunkBy(bulkParallelChunkSize);

            Parallel.ForEach(pointListList, new ParallelOptions {
                MaxDegreeOfParallelism = maxDegOfParallel, CancellationToken = cancelToken
            },
                             (pts, state, index) =>
            {
                var stats = new StatisticsInfo();
                stats.Stopwatch.Start();

                PIPagingConfiguration pagingConfiguration = new PIPagingConfiguration(PIPageType.TagCount, bulkPageSize);
                PIPointList pointList = new PIPointList(pts);

                try
                {
                    // _logger.InfoFormat("Bulk query");
                    IEnumerable <AFValues> bulkData = pointList.RecordedValues(timeRange,
                                                                               AFBoundaryType.Inside, String.Empty, false, pagingConfiguration).ToList();


                    if (_enableWrite)
                    {
                        var writeInfo = new WriteInfo()
                        {
                            Data       = bulkData,
                            StartTime  = timeRange.StartTime,
                            EndTime    = timeRange.EndTime,
                            ChunkId    = query.ChunkId,
                            SubChunkId = index
                        };

                        _dataWriter.DataQueue.Add(writeInfo, cancelToken);
                    }


                    stats.EventsCount = bulkData.Sum(s => s.Count);
                    stats.Stopwatch.Stop();
                    stats.EventsInWritingQueue = _dataWriter.DataQueue.Count;
                    Statistics.StatisticsQueue.Add(stats, cancelToken);
                }
                catch (OperationCanceledException ex)
                {
                    _logger.Error(pagingConfiguration.Error);
                }
                catch (Exception ex)
                {
                    _logger.Error(ex);
                }
            });
        }