private void CopyModelAsCsvToStream(SecurityPrincipal securityPrincipal, NameValueCollection requestParameters, Stream responseStream, Action flushResponse, CompatibleCancellationToken cancellationToken) { string modelName = requestParameters["ModelName"]; string hubName = requestParameters["HubName"]; string connectionID = requestParameters["ConnectionID"]; string filterText = requestParameters["FilterText"]; string sortField = requestParameters["SortField"]; bool sortAscending = requestParameters["SortAscending"].ParseBoolean(); bool showDeleted = requestParameters["ShowDeleted"].ParseBoolean(); string[] parentKeys = requestParameters["ParentKeys"].Split(',').Select(parentKey => parentKey.Replace(','.RegexEncode(), ",")).ToArray(); if (string.IsNullOrEmpty(modelName)) { throw new ArgumentNullException(nameof(modelName), "Cannot download CSV data: no model type name was specified."); } if (string.IsNullOrEmpty(hubName)) { throw new ArgumentNullException(nameof(hubName), "Cannot download CSV data: no hub type name was specified."); } Type modelType = AssemblyInfo.FindType(modelName); if ((object)modelType == null) { throw new InvalidOperationException($"Cannot download CSV data: failed to find model type \"{modelName}\" in loaded assemblies."); } Type hubType = AssemblyInfo.FindType(hubName); if ((object)hubType == null) { throw new InvalidOperationException($"Cannot download CSV data: failed to find hub type \"{hubName}\" in loaded assemblies."); } IRecordOperationsHub hub; // Record operation tuple defines method name and allowed roles Tuple <string, string> queryRecordCountOperation; Tuple <string, string> queryRecordsOperation; string queryRoles; try { // Create a local record operations hub instance so that CSV export can query same record set that is visible in active hub context hub = Activator.CreateInstance(hubType) as IRecordOperationsHub; if ((object)hub == null) { throw new SecurityException($"Cannot download CSV data: hub type \"{hubName}\" is not a IRecordOperationsHub, access cannot be validated."); } // Assign provided connection ID from active hub context to our local hub instance so that any session based data will be available to query functions hub.ConnectionID = connectionID; Tuple <string, string>[] recordOperations; try { // Get any authorized query roles as defined in hub records operations for modeled table, default to read allowed for query recordOperations = hub.RecordOperationsCache.GetRecordOperations(modelType); if ((object)recordOperations == null) { throw new NullReferenceException(); } } catch (KeyNotFoundException ex) { throw new SecurityException($"Cannot download CSV data: hub type \"{hubName}\" does not define record operations for \"{modelName}\", access cannot be validated.", ex); } // Get record operation for querying record count queryRecordCountOperation = recordOperations[(int)RecordOperation.QueryRecordCount]; if ((object)queryRecordCountOperation == null) { throw new NullReferenceException(); } // Get record operation for querying records queryRecordsOperation = recordOperations[(int)RecordOperation.QueryRecords]; if ((object)queryRecordsOperation == null) { throw new NullReferenceException(); } // Get any defined role restrictions for record query operation - access to CSV download will based on these roles queryRoles = string.IsNullOrEmpty(queryRecordsOperation.Item1) ? "*" : queryRecordsOperation.Item2 ?? "*"; } catch (Exception ex) { throw new SecurityException($"Cannot download CSV data: failed to instantiate hub type \"{hubName}\" or access record operations, access cannot be validated.", ex); } DataContext dataContext = hub.DataContext; // Validate current user has access to requested data if (!dataContext.UserIsInRole(securityPrincipal, queryRoles)) { throw new SecurityException($"Cannot download CSV data: access is denied for user \"{securityPrincipal?.Identity.Name ?? "Undefined"}\", minimum required roles = {queryRoles.ToDelimitedString(", ")}."); } const int TargetBufferSize = 524288; StringBuilder readBuffer = new StringBuilder(TargetBufferSize * 2); ManualResetEventSlim bufferReady = new ManualResetEventSlim(false); List <string> writeBuffer = new List <string>(); object writeBufferLock = new object(); bool readComplete = false; ITableOperations table; string[] fieldNames; bool hasDeletedField; table = dataContext.Table(modelType); fieldNames = table.GetFieldNames(false).Where(field => (!(table.FieldHasAttribute <GSF.Data.Model.CSVExcludeFieldAttribute>(field)))).ToArray(); hasDeletedField = !string.IsNullOrEmpty(dataContext.GetIsDeletedFlag(modelType)); Task readTask = Task.Factory.StartNew(() => { try { const int PageSize = 250; // Get query operation methods MethodInfo queryRecordCount = hubType.GetMethod(queryRecordCountOperation.Item1); MethodInfo queryRecords = hubType.GetMethod(queryRecordsOperation.Item1); // Setup query parameters List <object> queryRecordCountParameters = new List <object>(); List <object> queryRecordsParameters = new List <object>(); // Add current show deleted state parameter, if model defines a show deleted field if (hasDeletedField) { queryRecordCountParameters.Add(showDeleted); } // Add any parent key restriction parameters if (parentKeys.Length > 0 && parentKeys[0].Length > 0) { queryRecordCountParameters.AddRange(parentKeys.Select((value, i) => { Type type = queryRecordCount.GetParameters()[i].ParameterType; if (type == typeof(string)) { return((object)value); } if (type == typeof(Guid)) { return((object)Guid.Parse(value)); } return(Convert.ChangeType(value, type)); })); } // Add parameters for query records from query record count parameters - they match up to this point queryRecordsParameters.AddRange(queryRecordCountParameters); // Add sort field parameter queryRecordsParameters.Add(sortField); // Add ascending sort order parameter queryRecordsParameters.Add(sortAscending); // Track parameter index for current page to query int pageParameterIndex = queryRecordsParameters.Count; // Add page index parameter queryRecordsParameters.Add(0); // Add page size parameter queryRecordsParameters.Add(PageSize); // Add filter text parameter queryRecordCountParameters.Add(filterText); queryRecordsParameters.Add(filterText); // Read queried records in page sets so there is not a memory burden and long initial query delay on very large data sets int recordCount = (int)queryRecordCount.Invoke(hub, queryRecordCountParameters.ToArray()); int totalPages = Math.Max((int)Math.Ceiling(recordCount / (double)PageSize), 1); // Read data pages for (int page = 0; page < totalPages && !cancellationToken.IsCancelled; page++) { // Update desired page to query queryRecordsParameters[pageParameterIndex] = page + 1; // Query page records IEnumerable records = queryRecords.Invoke(hub, queryRecordsParameters.ToArray()) as IEnumerable ?? Enumerable.Empty <object>(); int exportCount = 0; // Export page records foreach (object record in records) { // Periodically check for client cancellation if (exportCount++ % (PageSize / 4) == 0 && cancellationToken.IsCancelled) { break; } readBuffer.AppendLine(string.Join(",", fieldNames.Select(fieldName => $"\"{table.GetFieldValue(record, fieldName)}\""))); if (readBuffer.Length < TargetBufferSize) { continue; } lock (writeBufferLock) writeBuffer.Add(readBuffer.ToString()); readBuffer.Clear(); bufferReady.Set(); } } if (readBuffer.Length > 0) { lock (writeBufferLock) writeBuffer.Add(readBuffer.ToString()); } } finally { readComplete = true; bufferReady.Set(); } }, cancellationToken); Task writeTask = Task.Factory.StartNew(() => { using (StreamWriter writer = new StreamWriter(responseStream)) { //Ticks exportStart = DateTime.UtcNow.Ticks; string[] localBuffer; Action flushStream = () => { writer.Flush(); if ((object)flushResponse != null) { flushResponse(); } }; // Write column headers writer.WriteLine(string.Join(",", fieldNames.Select(fieldName => $"\"{fieldName}\""))); flushStream(); while ((writeBuffer.Count > 0 || !readComplete) && !cancellationToken.IsCancelled) { bufferReady.Wait(cancellationToken); bufferReady.Reset(); lock (writeBufferLock) { localBuffer = writeBuffer.ToArray(); writeBuffer.Clear(); } foreach (string buffer in localBuffer) { writer.Write(buffer); } } // Flush stream flushStream(); //Debug.WriteLine("Export time: " + (DateTime.UtcNow.Ticks - exportStart).ToElapsedTimeString(3)); } }, cancellationToken); Task.WaitAll(readTask, writeTask); }
private void WriteTableToStream(NameValueCollection requestParameters, Stream responseStream, Action flushResponse, CompatibleCancellationToken cancellationToken) { if (requestParameters["type"] == "csv") { ExportToCSV(responseStream, requestParameters); } else if (requestParameters["type"] == "stats") { ExportStatsToCSV(responseStream, requestParameters); } else if (requestParameters["type"] == "harmonics") { ExportHarmonicsToCSV(responseStream, requestParameters); } else if (requestParameters["type"] == "correlatedsags") { ExportCorrelatedSagsToCSV(responseStream, requestParameters); } }
public async Task <IEnumerable <TrendValue> > GetHistorianData(CancellationToken cancellationToken) { QueryParameters queryParameters; using (Stream contentStream = await Request.Content.ReadAsStreamAsync()) using (StreamReader contentReader = new StreamReader(contentStream)) using (JsonTextReader jsonReader = new JsonTextReader(contentReader)) { JObject jsonObject = await JObject.LoadAsync(jsonReader); queryParameters = jsonObject.ToObject <QueryParameters>(); } string instanceName = queryParameters.instanceName; DateTime startTime = queryParameters.startTime; DateTime stopTime = queryParameters.stopTime; ulong[] measurementIDs = queryParameters.measurementIDs; Resolution resolution = queryParameters.resolution; int seriesLimit = queryParameters.seriesLimit; bool forceLimit = queryParameters.forceLimit; // Try to ensure another linked cancellation token is not created after dispose, // because another call to Dispose() won't clean it up if (m_disposed) { throw new ObjectDisposedException(nameof(HistorianQueryController)); } // Cancel any running operation CancellationTokenSource linkedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); using (CancellationTokenSource oldTokenSource = Interlocked.Exchange(ref m_linkedTokenSource, linkedTokenSource)) { oldTokenSource?.Cancel(); } SnapServer server = GetServer(instanceName)?.Host; ICancellationToken compatibleToken = new CompatibleCancellationToken(linkedTokenSource); IEnumerable <TrendValue> values = TrendValueAPI.GetHistorianData(server, instanceName, startTime, stopTime, measurementIDs, resolution, seriesLimit, forceLimit, compatibleToken); switch (queryParameters.timestampType) { case TimestampType.Ticks: return(values.Select(value => { value.Timestamp = value.Timestamp * 10000.0D + 621355968000000000.0D; return value; })); case TimestampType.UnixSeconds: return(values.Select(value => { value.Timestamp = value.Timestamp / 1000.0D; return value; })); default: return(values); } }
private void WriteTableToStream(string breaker, DateTime fromDate, DateTime toDate, Stream responseStream, Action flushResponse, CompatibleCancellationToken cancellationToken) { if (breaker == "0") { ExportAllToCSV(responseStream, fromDate, toDate); } else { ExportIndividualToCSV(responseStream, breaker, fromDate, toDate); } }
public void SendChannelsWithNormalLimitsToCSV(Stream returnStream, int meterID, string searchString, CompatibleCancellationToken cancellationToken, string sortField = "Name", bool ascending = true) { MetersWithNormalLimits meter; IEnumerable <ChannelsWithNormalLimits> channels; using (var connection = new AdoDataConnection("systemSettings")) { TableOperations <MetersWithNormalLimits> meterTable = new TableOperations <MetersWithNormalLimits>(connection); TableOperations <ChannelsWithNormalLimits> channelTable = new TableOperations <ChannelsWithNormalLimits>(connection); meter = meterTable.QueryRecordWhere("ID = {0}", meterID); RecordRestriction searchRestriction = channelTable.GetSearchRestriction(searchString); string sortExpression = sortField != null ? sortField + (ascending == true ? " ASC" : " DESC") : null; channels = channelTable.QueryRecords(sortExpression, new RecordRestriction("MeterID = {0}", meterID) + searchRestriction).ToList(); } string meterInfo = meter.ToCSV(); using (StreamWriter writer = new StreamWriter(returnStream)) foreach (ChannelsWithNormalLimits limit in channels) { writer.WriteLine(meterInfo + "," + limit.ToCSV()); } }
public void SendMetersWithNormalLimitsToCSV(Stream returnStream, string searchString, CompatibleCancellationToken cancellationToken, string sortField = "Name", bool ascending = true) { List <MetersWithNormalLimits> meters; using (var connection = new AdoDataConnection("systemSettings")) { TableOperations <MetersWithNormalLimits> meterTable = new TableOperations <MetersWithNormalLimits>(connection); RecordRestriction restriction = meterTable.GetSearchRestriction(searchString); string sortExpression = sortField != null ? sortField + (ascending == true ? " ASC" : " DESC") : null; meters = meterTable.QueryRecords(sortExpression, restriction).ToList(); } foreach (MetersWithNormalLimits meter in meters) { if (cancellationToken.IsCancelled) { return; } SendChannelsWithNormalLimitsToCSV(returnStream, meter.ID, "", cancellationToken); } }
public void SendHourOfWeekLimitTableToCSV(Stream returnStream, int channelID, string searchString, CompatibleCancellationToken cancellationToken, string sortField = "HourOfWeek", bool ascending = true) { MetersWithHourlyLimits meter; ChannelsWithHourlyLimits channel; List <HourOfWeekLimitView> hourlyLimits = new List <HourOfWeekLimitView>(); using (var connection = new AdoDataConnection("systemSettings")) { TableOperations <MetersWithHourlyLimits> meterTable = new TableOperations <MetersWithHourlyLimits>(connection); TableOperations <ChannelsWithHourlyLimits> channelTable = new TableOperations <ChannelsWithHourlyLimits>(connection); TableOperations <HourOfWeekLimitView> table = new TableOperations <HourOfWeekLimitView>(connection); channel = channelTable.QueryRecordWhere("ID = {0}", channelID); meter = meterTable.QueryRecordWhere("ID = {0}", channel.MeterID); RecordRestriction searchRestriction = table.GetSearchRestriction(searchString); RecordRestriction channelRestriction = new RecordRestriction("ChannelID = {0}", channel.ID); RecordRestriction finalRestriction = searchRestriction + channelRestriction; string sortExpression = sortField != null ? sortField + (ascending == true ? " ASC" : " DESC") : null; hourlyLimits = table.QueryRecords(sortExpression, finalRestriction).ToList(); } string meterInfo = meter.ToCSV(); string channelInfo = channel.ToCSV(); using (StreamWriter writer = new StreamWriter(returnStream)) foreach (HourOfWeekLimitView limit in hourlyLimits) { writer.WriteLine(meterInfo + "," + channelInfo + "," + limit.ToCSV()); } }
public void SendChannelsWithHourlyLimitsToCSV(Stream returnStream, int meterID, string searchString, CompatibleCancellationToken cancellationToken, string sortField = "Name", bool ascending = true) { MetersWithHourlyLimits meter; List <ChannelsWithHourlyLimits> channels; using (var connection = new AdoDataConnection("systemSettings")) { TableOperations <MetersWithHourlyLimits> meterTable = new TableOperations <MetersWithHourlyLimits>(connection); TableOperations <ChannelsWithHourlyLimits> channelTable = new TableOperations <ChannelsWithHourlyLimits>(connection); meter = meterTable.QueryRecordWhere("ID = {0}", meterID); string sortExpression = sortField != null ? sortField + (ascending == true ? " ASC" : " DESC") : null; RecordRestriction searchRestriction = channelTable.GetSearchRestriction(searchString); RecordRestriction meterRestriction = new RecordRestriction("MeterID = {0}", meterID); RecordRestriction finalRestriction = searchRestriction + meterRestriction; channels = channelTable.QueryRecords(sortExpression, finalRestriction).ToList(); } foreach (var channel in channels) { if (cancellationToken.IsCancelled) { return; } SendHourOfWeekLimitTableToCSV(returnStream, channel.ID, "", cancellationToken); } }
public void WriteTableToStream(NameValueCollection requestParameters, Stream responseStream, Action flushResponse, CompatibleCancellationToken cancellationToken) { string modelName = requestParameters["ModelName"]; string hubName = requestParameters["HubName"]; string connectionID = requestParameters["ConnectionID"]; string callingFrom = requestParameters["CallingFrom"]; int ID = int.Parse(requestParameters["ID"]); string filterText = requestParameters["FilterText"]; string sortField = requestParameters["SortField"]; bool sortAscending = requestParameters["SortAscending"].ParseBoolean(); bool showDeleted = requestParameters["ShowDeleted"].ParseBoolean(); using (StreamWriter writer = new StreamWriter(responseStream)) { writer.WriteLine(GetCSVHeaders(callingFrom)); } if (callingFrom == "Alarms") { SendHourOfWeekLimitTableToCSV(responseStream, ID, filterText, cancellationToken, sortField, sortAscending); } else if (callingFrom == "Channels") { SendChannelsWithHourlyLimitsToCSV(responseStream, ID, filterText, cancellationToken, sortField, sortAscending); } else if (callingFrom == "MetersWithNormalLimits") { SendMetersWithNormalLimitsToCSV(responseStream, filterText, cancellationToken, sortField, sortAscending); } else if (callingFrom == "ChannelsWithNormalLimits") { SendChannelsWithNormalLimitsToCSV(responseStream, ID, filterText, cancellationToken, sortField, sortAscending); } else { SendMetersWithHourlyLimitsToCSV(responseStream, filterText, cancellationToken, sortField, sortAscending); } }