/// <summary> /// Creates a new instance of the <see cref="Timestamp"/> from an existing line image. /// </summary> /// <param name="lineImage">Line image to parse.</param> public Timestamp(string lineImage) { string[] parts = lineImage.Split(':'); double seconds; double milliseconds = 0.0D; if (parts.Length == 4) { double.TryParse(parts[parts.Length - 1], out milliseconds); parts = new[] { parts[0], parts[1], parts[2] }; } double.TryParse(parts[parts.Length - 1], out seconds); seconds += milliseconds; parts[parts.Length - 1] = seconds.ToString("00.000000"); lineImage = string.Join(":", parts).RemoveWhiteSpace(); DateTime result; DateTime.TryParseExact(lineImage, new[] {"d/M/yyyy,H:mm:ss.ffffff", "M/d/yyyy,H:mm:ss.ffffff"}, CultureInfo.InvariantCulture, DateTimeStyles.None, out result); Value = result.Ticks; }
/// <summary> /// Constructs a new <see cref="Frame"/> given the specified parameters. /// </summary> /// <param name="timestamp">Timestamp, in ticks, for this <see cref="Frame"/>.</param> /// <param name="measurements">Initial set of measurements to load into the <see cref="Frame"/>, if any.</param> public Frame(Ticks timestamp, IDictionary<MeasurementKey, IMeasurement> measurements) { m_timestamp = timestamp; m_receivedTimestamp = DateTime.UtcNow.Ticks; m_measurements = new ConcurrentDictionary<MeasurementKey, IMeasurement>(measurements); m_sortedMeasurements = -1; }
/// <summary> /// Initializes a new instance of the <see cref="TimestampTest"/> class. /// </summary> public TimestampTest() { m_badTimestampMeasurements = new Dictionary<Ticks, LinkedList<IMeasurement>>(); m_timeToPurge = Ticks.FromSeconds(1.0); m_warnInterval = Ticks.FromSeconds(4.0); m_purgeTimer = new Timer(); m_warningTimer = new Timer(); }
/// <summary> /// Constructs a new <see cref="Measurement"/> using default settings. /// </summary> public Measurement() { #if UseHighResolutionTime m_receivedTimestamp = PrecisionTimer.UtcNow.Ticks; #else m_receivedTimestamp = DateTime.UtcNow.Ticks; #endif m_multiplier = 1.0D; }
/// <summary> /// Creates a new <see cref="ConfigurationCell"/> from serialization parameters. /// </summary> /// <param name="info">The <see cref="SerializationInfo"/> with populated with data.</param> /// <param name="context">The source <see cref="StreamingContext"/> for this deserialization.</param> protected ConfigurationCell(SerializationInfo info, StreamingContext context) : base(info, context) { // Deserialize configuration cell m_timeOffset = info.GetInt64("timeOffset"); m_longitude = info.GetDouble("longitude"); m_latitude = info.GetDouble("latitude"); m_numberOfSatellites = info.GetInt32("numberOfSatellites"); }
/// <summary> /// Creates a new instance of the <see cref="FlatlineTest"/> class. /// </summary> public FlatlineTest() { m_minFlatline = Ticks.FromSeconds(4); m_warnInterval = Ticks.FromSeconds(4); m_emailInterval = Ticks.FromSeconds(3600); m_smtpServer = Mail.DefaultSmtpServer; m_lastChange = new Dictionary<MeasurementKey, IMeasurement>(); m_lastNotified = new Dictionary<MeasurementKey, Ticks>(); m_warningTimer = new Timer(); }
/// <summary> /// Constructs a new <see cref="TemporalMeasurement"/> given the specified parameters. /// </summary> /// <param name="id">Numeric ID of the <see cref="TemporalMeasurement"/>.</param> /// <param name="source">Source of the <see cref="TemporalMeasurement"/>(e.g., name of archive).</param> /// <param name="value">Value of the <see cref="TemporalMeasurement"/>.</param> /// <param name="timestamp">Timestamp of the <see cref="TemporalMeasurement"/>.</param> /// <param name="lagTime">Past time deviation tolerance, in seconds - this becomes the amount of time to wait before publishing begins.</param> /// <param name="leadTime">Future time deviation tolerance, in seconds - this becomes the tolerated +/- accuracy of the local clock to real-time.</param> public TemporalMeasurement(uint id, string source, double value, Ticks timestamp, double lagTime, double leadTime) : base(id, source, value, timestamp) { if (lagTime <= 0) throw new ArgumentOutOfRangeException("lagTime", "lagTime must be greater than zero, but it can be less than one"); if (leadTime <= 0) throw new ArgumentOutOfRangeException("leadTime", "leadTime must be greater than zero, but it can be less than one"); m_lagTime = lagTime; m_leadTime = leadTime; }
private IMeasurement m_lastSortedMeasurement; // Last measurement sorted into this frame #endregion #region [ Constructors ] /// <summary> /// Constructs a new <see cref="Frame"/> given the specified parameters. /// </summary> /// <param name="timestamp">Timestamp, in ticks, for this <see cref="Frame"/>.</param> /// <param name="expectedMeasurements">Expected number of measurements for the <see cref="Frame"/>.</param> public Frame(Ticks timestamp, int expectedMeasurements = -1) { m_timestamp = timestamp; m_receivedTimestamp = DateTime.UtcNow.Ticks; if (expectedMeasurements > 0) m_measurements = new ConcurrentDictionary<MeasurementKey, IMeasurement>(s_defaultConcurrencyLevel, expectedMeasurements * 2); else m_measurements = new ConcurrentDictionary<MeasurementKey, IMeasurement>(); m_sortedMeasurements = -1; }
static void Main(string[] args) { //Using delegates write a class Timer that has can execute certain method at each t seconds. Timer objct = new Timer(); Ticks timer = new Ticks(objct.Ticksing);//the delegate calls the method while (true) { Thread.Sleep(60);//every 60 ms a number is printed timer(10); } }
/// <summary> /// Constructs a new <see cref="Frame"/> given the specified parameters. /// </summary> /// <param name="timestamp">Timestamp, in ticks, for this <see cref="Frame"/>.</param> /// <param name="expectedMeasurements">Expected number of measurements for the <see cref="Frame"/>.</param> public Frame(Ticks timestamp, int expectedMeasurements) { m_timestamp = timestamp; #if UseHighResolutionTime m_receivedTimestamp = PrecisionTimer.UtcNow.Ticks; #else m_receivedTimestamp = DateTime.UtcNow.Ticks; #endif if (expectedMeasurements > 0) m_measurements = new ConcurrentDictionary<MeasurementKey, IMeasurement>(s_defaultConcurrencyLevel, expectedMeasurements * 2); else m_measurements = new ConcurrentDictionary<MeasurementKey, IMeasurement>(); m_sortedMeasurements = -1; }
/// <summary> /// Data arriving here will now be filtered. /// </summary> /// <param name="data">Ticks data array</param> public void OnData(Ticks data) { if (!data.ContainsKey("SPY")) return; var spyTickList = data["SPY"]; //Ticks return a list of ticks this second foreach (var tick in spyTickList) { Log(tick.Exchange); } if (!Portfolio.Invested) { SetHoldings("SPY", 1); } }
/// <summary> /// Creates a new <see cref="ConfigurationFrame"/> from specified parameters. /// </summary> /// <param name="idCode">The ID code of this <see cref="ConfigurationFrame"/>.</param> /// <param name="timestamp">The exact timestamp, in <see cref="Ticks"/>, of the data represented by this <see cref="ConfigurationFrame"/>.</param> /// <param name="frameRate">The defined frame rate of this <see cref="ConfigurationFrame"/>.</param> /// <param name="nominalFrequency">The nominal <see cref="LineFrequency"/> of this <see cref="ConfigurationFrame"/>.</param> /// <param name="timeOffset">The time offset of F-NET device in <see cref="Ticks"/>.</param> /// <param name="stationName">The station name of the F-NET device.</param> /// <remarks> /// This constructor is used by a consumer to generate an F-NET configuration frame. /// </remarks> public ConfigurationFrame(ushort idCode, Ticks timestamp, ushort frameRate, LineFrequency nominalFrequency, Ticks timeOffset, string stationName) : base(idCode, new ConfigurationCellCollection(), timestamp, frameRate) { ConfigurationCell configCell = new ConfigurationCell(this, nominalFrequency, timeOffset); // FNet protocol sends data for one device Cells.Add(configCell); // Assign station name if (string.IsNullOrEmpty(stationName)) configCell.StationName = "F-NET Unit-" + idCode; else configCell.StationName = stationName; // Add a single frequency definition configCell.FrequencyDefinition = new FrequencyDefinition(configCell, "Line frequency"); // Add a single phasor definition configCell.PhasorDefinitions.Add(new PhasorDefinition(configCell, "120V Phasor", PhasorType.Voltage, null)); }
/// <summary> /// Creates a new <see cref="TimeSlice"/> for the specified time using the specified data /// </summary> /// <param name="utcDateTime">The UTC frontier date time</param> /// <param name="algorithmTimeZone">The algorithm's time zone, required for computing algorithm and slice time</param> /// <param name="cashBook">The algorithm's cash book, required for generating cash update pairs</param> /// <param name="data">The data in this <see cref="TimeSlice"/></param> /// <param name="changes">The new changes that are seen in this time slice as a result of universe selection</param> /// <returns>A new <see cref="TimeSlice"/> containing the specified data</returns> public static TimeSlice Create(DateTime utcDateTime, DateTimeZone algorithmTimeZone, CashBook cashBook, List<KeyValuePair<Security, List<BaseData>>> data, SecurityChanges changes) { int count = 0; var security = new List<KeyValuePair<Security, BaseData>>(); var custom = new List<KeyValuePair<Security, List<BaseData>>>(); var consolidator = new List<KeyValuePair<SubscriptionDataConfig, List<BaseData>>>(); var allDataForAlgorithm = new List<BaseData>(data.Count); var cash = new List<KeyValuePair<Cash, BaseData>>(cashBook.Count); var cashSecurities = new HashSet<Symbol>(); foreach (var cashItem in cashBook.Values) { cashSecurities.Add(cashItem.SecuritySymbol); } Split split; Dividend dividend; Delisting delisting; SymbolChangedEvent symbolChange; var algorithmTime = utcDateTime.ConvertFromUtc(algorithmTimeZone); var tradeBars = new TradeBars(algorithmTime); var ticks = new Ticks(algorithmTime); var splits = new Splits(algorithmTime); var dividends = new Dividends(algorithmTime); var delistings = new Delistings(algorithmTime); var symbolChanges = new SymbolChangedEvents(algorithmTime); foreach (var kvp in data) { var list = kvp.Value; var symbol = kvp.Key.Symbol; // keep count of all data points if (list.Count == 1 && list[0] is BaseDataCollection) { count += ((BaseDataCollection) list[0]).Data.Count; } else { count += list.Count; } BaseData update = null; var consolidatorUpdate = new List<BaseData>(list.Count); for (int i = 0; i < list.Count; i++) { var baseData = list[i]; if (!kvp.Key.SubscriptionDataConfig.IsInternalFeed) { // this is all the data that goes into the algorithm allDataForAlgorithm.Add(baseData); if (kvp.Key.SubscriptionDataConfig.IsCustomData) { // this is all the custom data custom.Add(kvp); } } // don't add internal feed data to ticks/bars objects if (baseData.DataType != MarketDataType.Auxiliary) { if (!kvp.Key.SubscriptionDataConfig.IsInternalFeed) { // populate ticks and tradebars dictionaries with no aux data if (baseData.DataType == MarketDataType.Tick) { List<Tick> ticksList; if (!ticks.TryGetValue(symbol, out ticksList)) { ticksList = new List<Tick> {(Tick) baseData}; ticks[symbol] = ticksList; } ticksList.Add((Tick) baseData); } else if (baseData.DataType == MarketDataType.TradeBar) { tradeBars[symbol] = (TradeBar) baseData; } // this is data used to update consolidators consolidatorUpdate.Add(baseData); } // this is the data used set market prices update = baseData; } // include checks for various aux types so we don't have to construct the dictionaries in Slice else if ((delisting = baseData as Delisting) != null) { delistings[symbol] = delisting; } else if ((dividend = baseData as Dividend) != null) { dividends[symbol] = dividend; } else if ((split = baseData as Split) != null) { splits[symbol] = split; } else if ((symbolChange = baseData as SymbolChangedEvent) != null) { // symbol changes is keyed by the requested symbol symbolChanges[kvp.Key.SubscriptionDataConfig.Symbol] = symbolChange; } } // check for 'cash securities' if we found valid update data for this symbol // and we need this data to update cash conversion rates, long term we should // have Cash hold onto it's security, then he can update himself, or rather, just // patch through calls to conversion rate to compue it on the fly using Security.Price if (update != null && cashSecurities.Contains(kvp.Key.Symbol)) { foreach (var cashKvp in cashBook) { if (cashKvp.Value.SecuritySymbol == kvp.Key.Symbol) { cash.Add(new KeyValuePair<Cash, BaseData>(cashKvp.Value, update)); } } } security.Add(new KeyValuePair<Security, BaseData>(kvp.Key, update)); consolidator.Add(new KeyValuePair<SubscriptionDataConfig, List<BaseData>>(kvp.Key.SubscriptionDataConfig, consolidatorUpdate)); } var slice = new Slice(utcDateTime.ConvertFromUtc(algorithmTimeZone), allDataForAlgorithm, tradeBars, ticks, splits, dividends, delistings, symbolChanges, allDataForAlgorithm.Count > 0); return new TimeSlice(utcDateTime, count, slice, data, cash, security, consolidator, custom, changes); }
/// <summary> /// Initializes <see cref="FileExporter"/>. /// </summary> public override void Initialize() { base.Initialize(); Dictionary <string, string> settings = Settings; const string errorMessage = "{0} is missing from Settings - Example: exportInterval=5; useReferenceAngle=True; referenceAngleMeasurement=DEVARCHIVE:6; companyTagPrefix=TVA; useNumericQuality=True; inputMeasurementKeys={{FILTER ActiveMeasurements WHERE Device='SHELBY' AND SignalType='FREQ'}}"; string setting; double seconds; // Load required parameters if (!settings.TryGetValue("exportInterval", out setting) || !double.TryParse(setting, out seconds)) { throw new ArgumentException(string.Format(errorMessage, "exportInterval")); } m_exportInterval = (int)(seconds * 1000.0D); m_lastPublicationTime = 0; if (m_exportInterval <= 0) { throw new ArgumentException("exportInterval should not be 0 - Example: exportInterval=5.5"); } if (InputMeasurementKeys == null || InputMeasurementKeys.Length == 0) { throw new InvalidOperationException("There are no input measurements defined. You must define \"inputMeasurementKeys\" to define which measurements to export."); } if (!settings.TryGetValue("useReferenceAngle", out setting)) { throw new ArgumentException(string.Format(errorMessage, "useReferenceAngle")); } m_useReferenceAngle = setting.ParseBoolean(); if (m_useReferenceAngle) { // Reference angle measurement has to be defined if using reference angle if (!settings.TryGetValue("referenceAngleMeasurement", out setting)) { throw new ArgumentException(string.Format(errorMessage, "referenceAngleMeasurement")); } m_referenceAngleKey = MeasurementKey.Parse(setting); // Make sure reference angle is part of input measurement keys collection if (!InputMeasurementKeys.Contains(m_referenceAngleKey)) { InputMeasurementKeys = InputMeasurementKeys.Concat(new[] { m_referenceAngleKey }).ToArray(); } // Make sure sure reference angle key is actually an angle measurement SignalType signalType = InputMeasurementKeyTypes[InputMeasurementKeys.IndexOf(key => key == m_referenceAngleKey)]; if (signalType != SignalType.IPHA && signalType != SignalType.VPHA) { throw new InvalidOperationException(string.Format("Specified reference angle measurement key is a {0} signal, not a phase angle.", signalType.GetFormattedName())); } } // Load optional parameters if (settings.TryGetValue("companyTagPrefix", out setting)) { m_companyTagPrefix = setting.ToUpper().Trim(); } else { m_companyTagPrefix = null; } if (settings.TryGetValue("useNumericQuality", out setting)) { m_useNumericQuality = setting.ParseBoolean(); } else { m_useNumericQuality = false; } // Suffix company tag prefix with an underscore if defined if (!string.IsNullOrWhiteSpace(m_companyTagPrefix)) { m_companyTagPrefix = m_companyTagPrefix.EnsureEnd('_'); } // Define a default export location - user can override and add multiple locations in config later... m_dataExporter = new MultipleDestinationExporter(ConfigurationSection, m_exportInterval); m_dataExporter.StatusMessage += m_dataExporter_StatusMessage; m_dataExporter.ProcessException += m_dataExporter_ProcessException; m_dataExporter.Initialize(new[] { new ExportDestination(FilePath.GetAbsolutePath(ConfigurationSection + ".txt"), false) }); // Create new measurement tag name dictionary m_measurementTags = new ConcurrentDictionary <MeasurementKey, string>(); string pointID = "undefined"; // Lookup point tag name for input measurement in the ActiveMeasurements table foreach (MeasurementKey key in InputMeasurementKeys) { try { // Get measurement key as a string pointID = key.ToString(); // Lookup measurement key in active measurements table DataRow row = DataSource.Tables["ActiveMeasurements"].Select(string.Format("ID='{0}'", pointID))[0]; // Remove invalid symbols that may be in tag name string pointTag = row["PointTag"].ToNonNullString(pointID).Replace('-', '_').Replace(':', '_').ToUpper(); // Prefix point tag with company prefix if defined if (!string.IsNullOrWhiteSpace(m_companyTagPrefix) && !pointTag.StartsWith(m_companyTagPrefix)) { pointTag = m_companyTagPrefix + pointTag; } m_measurementTags.TryAdd(key, pointTag); } catch (ThreadAbortException) { throw; } catch (Exception ex) { OnProcessException(new InvalidOperationException(string.Format("Failed to lookup point tag for measurement [{0}] due to exception: {1}", pointID, ex.Message))); } } // We enable tracking of latest measurements so we can use these values if points are missing - since we are using // latest measurement tracking, we sort all incoming points even though most of them will be thrown out... TrackLatestMeasurements = true; }
/// <summary> /// Process frame of time-aligned measurements that arrived within the defined lag time. /// </summary> /// <param name="frame"><see cref="IFrame"/> of measurements that arrived within lag time and are ready for processing.</param> /// <param name="index">Index of <see cref="IFrame"/> within one second of data ranging from zero to frames per second - 1.</param> protected override void PublishFrame(IFrame frame, int index) { Ticks timestamp = frame.Timestamp; // Only publish when the export interval time has passed if ((timestamp - m_lastPublicationTime).ToMilliseconds() > m_exportInterval) { ConcurrentDictionary <MeasurementKey, IMeasurement> measurements = frame.Measurements; m_lastPublicationTime = timestamp; if (measurements.Count > 0) { StringBuilder fileData = new StringBuilder(); IMeasurement measurement, referenceAngle; MeasurementKey inputMeasurementKey; SignalType signalType; DataQuality measurementQuality; double measurementValue, referenceAngleValue; string measurementTag; bool displayedWarning = false; // We need to get calculated reference angle value in order to export relative phase angles // If the value is not here, we don't export referenceAngle = null; // Make sure reference made it in this frame... if (m_useReferenceAngle && !measurements.TryGetValue(m_referenceAngleKey, out referenceAngle)) { OnProcessException(new InvalidOperationException("Calculated reference angle was not found in this frame, possible reasons: system is initializing, receiving no data or lag time is too small. File creation was skipped.")); } else { // Export all defined input measurements for (int i = 0; i < InputMeasurementKeys.Length; i++) { inputMeasurementKey = InputMeasurementKeys[i]; signalType = InputMeasurementKeyTypes[i]; // Look up measurement's tag name if (m_measurementTags.TryGetValue(inputMeasurementKey, out measurementTag)) { // See if measurement exists in this frame if (measurements.TryGetValue(inputMeasurementKey, out measurement)) { // Get measurement's adjusted value (takes into account any adder and or multipler) measurementValue = measurement.AdjustedValue; // Interpret data quality flags measurementQuality = (measurement.ValueQualityIsGood() ? (measurement.TimestampQualityIsGood() ? DataQuality.Good : DataQuality.Suspect) : DataQuality.Bad); } else { // Didn't find measurement in this frame, try using a recent value measurementValue = LatestMeasurements[inputMeasurementKey]; // Interpret data quality flags - if no recent measurement is available, we mark it as bad measurementQuality = (Double.IsNaN(measurementValue) ? DataQuality.Bad : DataQuality.Good); // We'll export zero instead of NaN for bad data if (measurementQuality == DataQuality.Bad) { measurementValue = 0.0D; } } // Export tag name field fileData.Append(measurementTag); fileData.Append(","); // Export measurement value making any needed adjustments based on signal type if (signalType == SignalType.VPHA || signalType == SignalType.IPHA) { // This is a phase angle measurement, export the value relative to the reference angle (if available) if (referenceAngle == null) { // No reference angle defined, export raw angle fileData.Append(measurementValue); } else { // Get reference angle's adjusted value (takes into account any adder and or multipler) referenceAngleValue = referenceAngle.AdjustedValue; // Handle relative angle wrapping double dis0 = Math.Abs(measurementValue - referenceAngleValue); double dis1 = Math.Abs(measurementValue - referenceAngleValue + 360); double dis2 = Math.Abs(measurementValue - referenceAngleValue - 360); if ((dis0 < dis1) && (dis0 < dis2)) { measurementValue = measurementValue - referenceAngleValue; } else if (dis1 < dis2) { measurementValue = measurementValue - referenceAngleValue + 360; } else { measurementValue = measurementValue - referenceAngleValue - 360; } fileData.Append(measurementValue); } } else if (signalType == SignalType.VPHM) { // Typical voltages from PMU's are line-to-neutral volts so we convert them to line-to-line kilovolts fileData.Append(measurementValue * SqrtOf3 / 1000.0D); } else { // Export all other types of measurements as their raw value fileData.Append(measurementValue); } // Export interpreted measurement quality fileData.Append(","); if (m_useNumericQuality) { fileData.Append((int)measurementQuality); } else { fileData.Append(measurementQuality); } // Terminate line (ICCP file link expects these two terminating commas, weird...) fileData.AppendLine(",,"); } else { // We were unable to find measurement tag for this key - this is unexpected OnProcessException(new InvalidOperationException(string.Format("Failed to find measurement tag for measurement {0}", inputMeasurementKey))); } } } // Queue up measurement export to data exporter - this will only allow one export at a time try { m_dataExporter.ExportData(fileData.ToString()); } catch (ThreadAbortException) { throw; } catch (Exception ex) { m_skippedExports++; OnStatusMessage("WARNING: Skipped export due to exception: " + ex.Message); displayedWarning = true; } // We display export status every other minute if (new DateTime(timestamp).Minute % 2 == 0 && !displayedWarning) { //Make sure message is only displayed once during the minute if (!m_statusDisplayed) { OnStatusMessage("{0} successful file based measurement exports...", m_dataExporter.TotalExports); m_statusDisplayed = true; } } else { m_statusDisplayed = false; } } else { // No data was available in the frame, lag time set too tight? OnProcessException(new InvalidOperationException("No measurements were available for file based data export, possible reasons: system is initializing , receiving no data or lag time is too small. File creation was skipped.")); } } }
public IViewComponentResult Invoke() { // var chart = JsonConvert.DeserializeObject<ChartJs> (chartData); Ticks ticks = new Ticks { beginAtZero = true }; Yax yax = new Yax { ticks = ticks }; Yax[] y = new Yax[1]; y[0] = yax; Scales scales = new Scales(); scales.yAxes = y; Data data = new Data(); MonthlySaleData saleData = GetMonthSaleData(); Dataset dataset = new Dataset { borderWidth = 1, label = "Monthly Sale", data = saleData.Amount.ToArray(), backgroundColor = new string[] { "rgba(255, 99, 132, 0.2)", "rgba(54, 162, 235, 0.2)", "rgba(255, 206, 86, 0.2)", "rgba(75, 192, 192, 0.2)", "rgba(153, 102, 255, 0.2)", "rgba(54, 162, 235, 0.2)", "rgba(255, 203, 83, 0.2)", "rgba(255, 159, 64, 0.2)", "rgba(255, 99, 132, 0.2)", "rgba(54, 162, 235, 0.2)", "rgba(255, 206, 86, 0.2)", "rgba(75, 192, 192, 0.2)", }, borderColor = new string[] { "rgba(255, 99, 132, 1)", "rgba(54, 162, 235, 1)", "rgba(255, 206, 86, 1)", "rgba(75, 192, 192, 1)", "rgba(153, 102, 255, 1)", "rgba(255, 203, 83, 1)", "rgba(255, 159, 64, 1)", "rgba(255, 99, 132, 1)", "rgba(54, 162, 235, 1)", "rgba(255, 206, 86, 1)", "rgba(75, 192, 192, 1)", "rgba(153, 102, 255, 1)", } }; ChartJs chart = new ChartJs { type = "bar", responsive = true, options = new Options { scales = scales }, data = new Data { datasets = new Dataset[] { dataset }, labels = saleData.MonthName.ToArray() } }; var chartModel = new ChartJsViewModel { Chart = chart, ChartJson = JsonConvert.SerializeObject(chart, new JsonSerializerSettings { NullValueHandling = NullValueHandling.Ignore }) }; return(View(chartModel)); }
private void buttonImport_Click(object sender, EventArgs e) { if (openFileDialogSelectCSVImport.ShowDialog(this) != DialogResult.OK) { return; } string csvFile = FilePath.GetAbsolutePath(openFileDialogSelectCSVImport.FileName); UpdateProgressBar(0); ShowUpdateMessage(""); if (!File.Exists(csvFile)) { ShowUpdateMessage($"CSV import file \"{csvFile}\" was not found. Import canceled."); return; } Task.Run(() => { try { SetControlEnabledState(buttonImport, false); m_cancellationTokenSource = new CancellationTokenSource(); Dictionary <int, int> comPortIDCodeMap = new(); try { using TextFieldParser parser = new(csvFile); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { string[] fields = parser.ReadFields(); if (!(fields?.Length > 1)) { continue; } if (fields[0].StartsWith("COM", StringComparison.OrdinalIgnoreCase) && fields.Length > 3) { fields[0] = fields[0].Substring(3); } if (ushort.TryParse(fields[0], out ushort comPort) && ushort.TryParse(fields[1], out ushort idCode)) { comPortIDCodeMap[comPort] = idCode; } } } catch (Exception ex) { ShowUpdateMessage($"ERROR: Failed while attempting to parse CSV file \"{Path.GetFileName(csvFile)}\": {ex.Message}"); m_log.Publish(MessageLevel.Error, nameof(AutoConfigPortScanner), exception: ex); return; } if (comPortIDCodeMap.Count == 0) { ShowUpdateMessage($"No mappings found in CSV import file \"{csvFile}\".{Environment.NewLine} Verify format: column 1 should be COM port and column 2 should be ID code."); return; } ShowUpdateMessage($"Loaded {comPortIDCodeMap.Count:N0} mappings from CSV import file \"{csvFile}\", starting scan and import..."); Ticks startTime = DateTime.UtcNow.Ticks; int mappings = 0; // Save original settings string orgStartComPort = textBoxStartComPort.Text; string orgEndComPort = textBoxEndComPort.Text; string orgStartIDCode = textBoxStartIDCode.Text; string orgEndIDCode = textBoxEndIDCode.Text; SetProgressBarMinMax(0, comPortIDCodeMap.Count); try { // Scan each row in import file foreach (KeyValuePair <int, int> kvp in comPortIDCodeMap) { int comPort = kvp.Key; int idCode = kvp.Value; SetTextBoxText(textBoxStartComPort, comPort.ToString()); SetTextBoxText(textBoxEndComPort, comPort.ToString()); SetTextBoxText(textBoxStartIDCode, idCode.ToString()); SetTextBoxText(textBoxEndIDCode, idCode.ToString()); buttonScan_Click(buttonScan, e); m_scanExecutionComplete.Wait(); if (m_cancellationTokenSource?.IsCancellationRequested ?? false) { break; } mappings++; UpdateProgressBar(mappings); } } catch (Exception ex) { ShowUpdateMessage($"ERROR: Failed while attempting to scan COM port to ID code mappings in CSV file \"{Path.GetFileName(csvFile)}\": {ex.Message}"); m_log.Publish(MessageLevel.Error, nameof(AutoConfigPortScanner), exception: ex); } finally { // Restore original settings SetTextBoxText(textBoxStartComPort, orgStartComPort); SetTextBoxText(textBoxEndComPort, orgEndComPort); SetTextBoxText(textBoxStartIDCode, orgStartIDCode); SetTextBoxText(textBoxEndIDCode, orgEndIDCode); if (m_cancellationTokenSource?.IsCancellationRequested ?? false) { ShowUpdateMessage($"{Environment.NewLine}Import canceled after running for {(DateTime.UtcNow.Ticks - startTime).ToElapsedTimeString(3)}. Completed {mappings:N0} mappings from CSV file \"{Path.GetFileName(csvFile)}\" before cancel."); } else { ShowUpdateMessage($"{Environment.NewLine}Import for {mappings:N0} mappings in CSV file \"{Path.GetFileName(csvFile)}\" completed in {(DateTime.UtcNow.Ticks - startTime).ToElapsedTimeString(3)}"); } } } finally { SetControlEnabledState(buttonImport, true); } }); }
/// <summary> /// Creates a new <see cref="ConfigurationFrame3"/> from specified parameters. /// </summary> /// <param name="timebase">Timebase to use for fraction second resolution.</param> /// <param name="idCode">The ID code of this <see cref="ConfigurationFrame3"/>.</param> /// <param name="timestamp">The exact timestamp, in <see cref="Ticks"/>, of the data represented by this <see cref="ConfigurationFrame3"/>.</param> /// <param name="frameRate">The defined frame rate of this <see cref="ConfigurationFrame3"/>.</param> /// <remarks> /// This constructor is used by a consumer to generate an IEEE C37.118 configuration frame, type 3. /// </remarks> public ConfigurationFrame3(uint timebase, ushort idCode, Ticks timestamp, ushort frameRate) : base(timebase, idCode, timestamp, frameRate) { }
// Indicates whether the given measurement's value has changed. private bool ClearIfNotFlatline(IMeasurement measurement) { if (measurement.Value != m_lastValue) { m_lastChanged = measurement.Timestamp; m_lastValue = measurement.Value; return true; } return false; }
/// <summary> /// Creates a new <see cref="TimeSlice"/> for the specified time using the specified data /// </summary> /// <param name="utcDateTime">The UTC frontier date time</param> /// <param name="algorithmTimeZone">The algorithm's time zone, required for computing algorithm and slice time</param> /// <param name="cashBook">The algorithm's cash book, required for generating cash update pairs</param> /// <param name="data">The data in this <see cref="TimeSlice"/></param> /// <param name="changes">The new changes that are seen in this time slice as a result of universe selection</param> /// <param name="universeData"></param> /// <returns>A new <see cref="TimeSlice"/> containing the specified data</returns> public static TimeSlice Create(DateTime utcDateTime, DateTimeZone algorithmTimeZone, CashBook cashBook, List <DataFeedPacket> data, SecurityChanges changes, Dictionary <Universe, BaseDataCollection> universeData) { int count = 0; var security = new List <UpdateData <Security> >(); var custom = new List <UpdateData <Security> >(); var consolidator = new List <UpdateData <SubscriptionDataConfig> >(); var allDataForAlgorithm = new List <BaseData>(data.Count); var optionUnderlyingUpdates = new Dictionary <Symbol, BaseData>(); Split split; Dividend dividend; Delisting delisting; SymbolChangedEvent symbolChange; // we need to be able to reference the slice being created in order to define the // evaluation of option price models, so we define a 'future' that can be referenced // in the option price model evaluation delegates for each contract Slice slice = null; var sliceFuture = new Lazy <Slice>(() => slice); var algorithmTime = utcDateTime.ConvertFromUtc(algorithmTimeZone); var tradeBars = new TradeBars(algorithmTime); var quoteBars = new QuoteBars(algorithmTime); var ticks = new Ticks(algorithmTime); var splits = new Splits(algorithmTime); var dividends = new Dividends(algorithmTime); var delistings = new Delistings(algorithmTime); var optionChains = new OptionChains(algorithmTime); var futuresChains = new FuturesChains(algorithmTime); var symbolChanges = new SymbolChangedEvents(algorithmTime); if (universeData.Count > 0) { // count universe data foreach (var kvp in universeData) { count += kvp.Value.Data.Count; } } // ensure we read equity data before option data, so we can set the current underlying price foreach (var packet in data) { var list = packet.Data; var symbol = packet.Security.Symbol; if (list.Count == 0) { continue; } // keep count of all data points if (list.Count == 1 && list[0] is BaseDataCollection) { var baseDataCollectionCount = ((BaseDataCollection)list[0]).Data.Count; if (baseDataCollectionCount == 0) { continue; } count += baseDataCollectionCount; } else { count += list.Count; } if (!packet.Configuration.IsInternalFeed && packet.Configuration.IsCustomData) { // This is all the custom data custom.Add(new UpdateData <Security>(packet.Security, packet.Configuration.Type, list)); } var securityUpdate = new List <BaseData>(list.Count); var consolidatorUpdate = new List <BaseData>(list.Count); for (int i = 0; i < list.Count; i++) { var baseData = list[i]; if (!packet.Configuration.IsInternalFeed) { // this is all the data that goes into the algorithm allDataForAlgorithm.Add(baseData); } // don't add internal feed data to ticks/bars objects if (baseData.DataType != MarketDataType.Auxiliary) { if (!packet.Configuration.IsInternalFeed) { PopulateDataDictionaries(baseData, ticks, tradeBars, quoteBars, optionChains, futuresChains); // special handling of options data to build the option chain if (packet.Security.Type == SecurityType.Option) { if (baseData.DataType == MarketDataType.OptionChain) { optionChains[baseData.Symbol] = (OptionChain)baseData; } else if (!HandleOptionData(algorithmTime, baseData, optionChains, packet.Security, sliceFuture, optionUnderlyingUpdates)) { continue; } } // special handling of futures data to build the futures chain if (packet.Security.Type == SecurityType.Future) { if (baseData.DataType == MarketDataType.FuturesChain) { futuresChains[baseData.Symbol] = (FuturesChain)baseData; } else if (!HandleFuturesData(algorithmTime, baseData, futuresChains, packet.Security)) { continue; } } // this is data used to update consolidators consolidatorUpdate.Add(baseData); } // this is the data used set market prices // do not add it if it is a Suspicious tick var tick = baseData as Tick; if (tick != null && tick.Suspicious) { continue; } securityUpdate.Add(baseData); // option underlying security update if (packet.Security.Symbol.SecurityType == SecurityType.Equity) { optionUnderlyingUpdates[packet.Security.Symbol] = baseData; } } // include checks for various aux types so we don't have to construct the dictionaries in Slice else if ((delisting = baseData as Delisting) != null) { delistings[symbol] = delisting; } else if ((dividend = baseData as Dividend) != null) { dividends[symbol] = dividend; } else if ((split = baseData as Split) != null) { splits[symbol] = split; } else if ((symbolChange = baseData as SymbolChangedEvent) != null) { // symbol changes is keyed by the requested symbol symbolChanges[packet.Configuration.Symbol] = symbolChange; } } if (securityUpdate.Count > 0) { security.Add(new UpdateData <Security>(packet.Security, packet.Configuration.Type, securityUpdate)); } if (consolidatorUpdate.Count > 0) { consolidator.Add(new UpdateData <SubscriptionDataConfig>(packet.Configuration, packet.Configuration.Type, consolidatorUpdate)); } } slice = new Slice(algorithmTime, allDataForAlgorithm, tradeBars, quoteBars, ticks, optionChains, futuresChains, splits, dividends, delistings, symbolChanges, allDataForAlgorithm.Count > 0); return(new TimeSlice(utcDateTime, count, slice, data, security, consolidator, custom, changes, universeData)); }
/// <inheritdoc /> public override int GetHashCode() { unchecked // Overflow is fine, just wrap { var hashCode = 41; if (Visible != null) { hashCode = hashCode * 59 + Visible.GetHashCode(); } if (Type != null) { hashCode = hashCode * 59 + Type.GetHashCode(); } if (CategoryOrder != null) { hashCode = hashCode * 59 + CategoryOrder.GetHashCode(); } if (CategoryArray != null) { hashCode = hashCode * 59 + CategoryArray.GetHashCode(); } if (ThetaUnit != null) { hashCode = hashCode * 59 + ThetaUnit.GetHashCode(); } if (Period != null) { hashCode = hashCode * 59 + Period.GetHashCode(); } if (Direction != null) { hashCode = hashCode * 59 + Direction.GetHashCode(); } if (Rotation != null) { hashCode = hashCode * 59 + Rotation.GetHashCode(); } if (HoverFormat != null) { hashCode = hashCode * 59 + HoverFormat.GetHashCode(); } if (UiRevision != null) { hashCode = hashCode * 59 + UiRevision.GetHashCode(); } if (Color != null) { hashCode = hashCode * 59 + Color.GetHashCode(); } if (ShowLine != null) { hashCode = hashCode * 59 + ShowLine.GetHashCode(); } if (LineColor != null) { hashCode = hashCode * 59 + LineColor.GetHashCode(); } if (LineWidth != null) { hashCode = hashCode * 59 + LineWidth.GetHashCode(); } if (ShowGrid != null) { hashCode = hashCode * 59 + ShowGrid.GetHashCode(); } if (GridColor != null) { hashCode = hashCode * 59 + GridColor.GetHashCode(); } if (GridWidth != null) { hashCode = hashCode * 59 + GridWidth.GetHashCode(); } if (TickMode != null) { hashCode = hashCode * 59 + TickMode.GetHashCode(); } if (NTicks != null) { hashCode = hashCode * 59 + NTicks.GetHashCode(); } if (Tick0 != null) { hashCode = hashCode * 59 + Tick0.GetHashCode(); } if (DTick != null) { hashCode = hashCode * 59 + DTick.GetHashCode(); } if (TickVals != null) { hashCode = hashCode * 59 + TickVals.GetHashCode(); } if (TickText != null) { hashCode = hashCode * 59 + TickText.GetHashCode(); } if (Ticks != null) { hashCode = hashCode * 59 + Ticks.GetHashCode(); } if (TickLen != null) { hashCode = hashCode * 59 + TickLen.GetHashCode(); } if (TickWidth != null) { hashCode = hashCode * 59 + TickWidth.GetHashCode(); } if (TickColor != null) { hashCode = hashCode * 59 + TickColor.GetHashCode(); } if (ShowTickLabels != null) { hashCode = hashCode * 59 + ShowTickLabels.GetHashCode(); } if (ShowTickPrefix != null) { hashCode = hashCode * 59 + ShowTickPrefix.GetHashCode(); } if (TickPrefix != null) { hashCode = hashCode * 59 + TickPrefix.GetHashCode(); } if (ShowTickSuffix != null) { hashCode = hashCode * 59 + ShowTickSuffix.GetHashCode(); } if (TickSuffix != null) { hashCode = hashCode * 59 + TickSuffix.GetHashCode(); } if (ShowExponent != null) { hashCode = hashCode * 59 + ShowExponent.GetHashCode(); } if (ExponentFormat != null) { hashCode = hashCode * 59 + ExponentFormat.GetHashCode(); } if (MinExponent != null) { hashCode = hashCode * 59 + MinExponent.GetHashCode(); } if (SeparateThousands != null) { hashCode = hashCode * 59 + SeparateThousands.GetHashCode(); } if (TickFont != null) { hashCode = hashCode * 59 + TickFont.GetHashCode(); } if (TickAngle != null) { hashCode = hashCode * 59 + TickAngle.GetHashCode(); } if (TickFormat != null) { hashCode = hashCode * 59 + TickFormat.GetHashCode(); } if (TickFormatStops != null) { hashCode = hashCode * 59 + TickFormatStops.GetHashCode(); } if (Layer != null) { hashCode = hashCode * 59 + Layer.GetHashCode(); } if (CategoryArraySrc != null) { hashCode = hashCode * 59 + CategoryArraySrc.GetHashCode(); } if (TickValsSrc != null) { hashCode = hashCode * 59 + TickValsSrc.GetHashCode(); } if (TickTextSrc != null) { hashCode = hashCode * 59 + TickTextSrc.GetHashCode(); } return(hashCode); } }
/// <inheritdoc /> public bool Equals([AllowNull] AngularAxis other) { if (other == null) { return(false); } if (ReferenceEquals(this, other)) { return(true); } return (( Visible == other.Visible || Visible != null && Visible.Equals(other.Visible) ) && ( Type == other.Type || Type != null && Type.Equals(other.Type) ) && ( CategoryOrder == other.CategoryOrder || CategoryOrder != null && CategoryOrder.Equals(other.CategoryOrder) ) && ( Equals(CategoryArray, other.CategoryArray) || CategoryArray != null && other.CategoryArray != null && CategoryArray.SequenceEqual(other.CategoryArray) ) && ( ThetaUnit == other.ThetaUnit || ThetaUnit != null && ThetaUnit.Equals(other.ThetaUnit) ) && ( Period == other.Period || Period != null && Period.Equals(other.Period) ) && ( Direction == other.Direction || Direction != null && Direction.Equals(other.Direction) ) && ( Rotation == other.Rotation || Rotation != null && Rotation.Equals(other.Rotation) ) && ( HoverFormat == other.HoverFormat || HoverFormat != null && HoverFormat.Equals(other.HoverFormat) ) && ( UiRevision == other.UiRevision || UiRevision != null && UiRevision.Equals(other.UiRevision) ) && ( Color == other.Color || Color != null && Color.Equals(other.Color) ) && ( ShowLine == other.ShowLine || ShowLine != null && ShowLine.Equals(other.ShowLine) ) && ( LineColor == other.LineColor || LineColor != null && LineColor.Equals(other.LineColor) ) && ( LineWidth == other.LineWidth || LineWidth != null && LineWidth.Equals(other.LineWidth) ) && ( ShowGrid == other.ShowGrid || ShowGrid != null && ShowGrid.Equals(other.ShowGrid) ) && ( GridColor == other.GridColor || GridColor != null && GridColor.Equals(other.GridColor) ) && ( GridWidth == other.GridWidth || GridWidth != null && GridWidth.Equals(other.GridWidth) ) && ( TickMode == other.TickMode || TickMode != null && TickMode.Equals(other.TickMode) ) && ( NTicks == other.NTicks || NTicks != null && NTicks.Equals(other.NTicks) ) && ( Tick0 == other.Tick0 || Tick0 != null && Tick0.Equals(other.Tick0) ) && ( DTick == other.DTick || DTick != null && DTick.Equals(other.DTick) ) && ( Equals(TickVals, other.TickVals) || TickVals != null && other.TickVals != null && TickVals.SequenceEqual(other.TickVals) ) && ( Equals(TickText, other.TickText) || TickText != null && other.TickText != null && TickText.SequenceEqual(other.TickText) ) && ( Ticks == other.Ticks || Ticks != null && Ticks.Equals(other.Ticks) ) && ( TickLen == other.TickLen || TickLen != null && TickLen.Equals(other.TickLen) ) && ( TickWidth == other.TickWidth || TickWidth != null && TickWidth.Equals(other.TickWidth) ) && ( TickColor == other.TickColor || TickColor != null && TickColor.Equals(other.TickColor) ) && ( ShowTickLabels == other.ShowTickLabels || ShowTickLabels != null && ShowTickLabels.Equals(other.ShowTickLabels) ) && ( ShowTickPrefix == other.ShowTickPrefix || ShowTickPrefix != null && ShowTickPrefix.Equals(other.ShowTickPrefix) ) && ( TickPrefix == other.TickPrefix || TickPrefix != null && TickPrefix.Equals(other.TickPrefix) ) && ( ShowTickSuffix == other.ShowTickSuffix || ShowTickSuffix != null && ShowTickSuffix.Equals(other.ShowTickSuffix) ) && ( TickSuffix == other.TickSuffix || TickSuffix != null && TickSuffix.Equals(other.TickSuffix) ) && ( ShowExponent == other.ShowExponent || ShowExponent != null && ShowExponent.Equals(other.ShowExponent) ) && ( ExponentFormat == other.ExponentFormat || ExponentFormat != null && ExponentFormat.Equals(other.ExponentFormat) ) && ( MinExponent == other.MinExponent || MinExponent != null && MinExponent.Equals(other.MinExponent) ) && ( SeparateThousands == other.SeparateThousands || SeparateThousands != null && SeparateThousands.Equals(other.SeparateThousands) ) && ( TickFont == other.TickFont || TickFont != null && TickFont.Equals(other.TickFont) ) && ( TickAngle == other.TickAngle || TickAngle != null && TickAngle.Equals(other.TickAngle) ) && ( TickFormat == other.TickFormat || TickFormat != null && TickFormat.Equals(other.TickFormat) ) && ( Equals(TickFormatStops, other.TickFormatStops) || TickFormatStops != null && other.TickFormatStops != null && TickFormatStops.SequenceEqual(other.TickFormatStops) ) && ( Layer == other.Layer || Layer != null && Layer.Equals(other.Layer) ) && ( CategoryArraySrc == other.CategoryArraySrc || CategoryArraySrc != null && CategoryArraySrc.Equals(other.CategoryArraySrc) ) && ( TickValsSrc == other.TickValsSrc || TickValsSrc != null && TickValsSrc.Equals(other.TickValsSrc) ) && ( TickTextSrc == other.TickTextSrc || TickTextSrc != null && TickTextSrc.Equals(other.TickTextSrc) )); }
// Any exceptions in this handler will be exposed through ProcessException event and cause OutageLogProcessor // to requeue the data gap outage so it will be processed again (could be that remote system is offline). private void ProcessDataGap(Outage dataGap) { // Establish start and stop time for temporal session m_subscriptionInfo.StartTime = dataGap.Start.ToString(OutageLog.DateTimeFormat, CultureInfo.InvariantCulture); m_subscriptionInfo.StopTime = dataGap.End.ToString(OutageLog.DateTimeFormat, CultureInfo.InvariantCulture); OnStatusMessage(MessageLevel.Info, $"Starting data gap recovery for period \"{m_subscriptionInfo.StartTime}\" - \"{m_subscriptionInfo.StopTime}\"..."); // Enable data monitor m_dataStreamMonitor.Enabled = true; // Reset measurement counters m_measurementsRecoveredForDataGap = 0; m_measurementsRecoveredOverLastInterval = 0; // Reset processing fields m_mostRecentRecoveredTime = dataGap.Start.Ticks; m_abnormalTermination = false; // Reset process completion wait handle m_dataGapRecoveryCompleted.Reset(); // Start temporal data recovery session m_temporalSubscription.Subscribe(m_subscriptionInfo); // Save the currently processing data gap for reporting m_currentDataGap = dataGap; // Wait for process completion - success or fail m_dataGapRecoveryCompleted.Wait(); // Clear the currently processing data gap m_currentDataGap = null; // If temporal session failed to connect, retry data recovery for this outage if (m_abnormalTermination) { // Make sure any data recovered so far doesn't get unnecessarily re-recovered, this requires that source historian report data in time-sorted order dataGap = new Outage(new DateTime(GSF.Common.Max((Ticks)dataGap.Start.Ticks, m_mostRecentRecoveredTime - (m_subscriptionInfo.UseMillisecondResolution ? Ticks.PerMillisecond : 1L)), DateTimeKind.Utc), dataGap.End); // Re-insert adjusted data gap at the top of the processing queue m_dataGapLog.Add(dataGap); if (m_measurementsRecoveredForDataGap == 0) { OnStatusMessage(MessageLevel.Warning, $"Failed to establish temporal session. Data recovery for period \"{m_subscriptionInfo.StartTime}\" - \"{m_subscriptionInfo.StopTime}\" will be re-attempted."); } else { OnStatusMessage(MessageLevel.Warning, $"Temporal session was disconnected during recovery operation. Data recovery for adjusted period \"{dataGap.Start.ToString(OutageLog.DateTimeFormat, CultureInfo.InvariantCulture)}\" - \"{m_subscriptionInfo.StopTime}\" will be re-attempted."); } } // Unsubscribe from temporal session m_temporalSubscription.Unsubscribe(); // Disable data monitor m_dataStreamMonitor.Enabled = false; OnStatusMessage(m_measurementsRecoveredForDataGap == 0 ? MessageLevel.Warning : MessageLevel.Info, $"Recovered {m_measurementsRecoveredForDataGap} measurements for period \"{m_subscriptionInfo.StartTime}\" - \"{m_subscriptionInfo.StopTime}\"."); }
/// <summary>Gets numeric value of this <see cref="TemporalMeasurement"/>, constrained within specified ticks.</summary> /// <remarks> /// <para>Operation will return NaN if ticks are outside of time deviation tolerances.</para> /// </remarks> /// <param name="timestamp">Timestamp, in ticks, used to constrain <see cref="TemporalMeasurement"/> (typically set to real-time, i.e. "now").</param> /// <returns>Raw value of this measurement (i.e., value that is not offset by adder and multiplier).</returns> public double GetValue(Ticks timestamp) { // We only return a measurement value that is up-to-date... return(Timestamp.TimeIsValid(timestamp, m_lagTime, m_leadTime) ? base.Value : double.NaN); }
private void BulkInsert(IEnumerable <IMeasurement> measurements) { Type measurementType = typeof(IMeasurement); IDbCommand command = null; StringBuilder commandBuilder = new StringBuilder(); string insertFormat = "INSERT INTO {0} ({1}) "; string selectFormat = "SELECT {0} "; string unionFormat = "UNION ALL "; StringBuilder valuesBuilder; string fields = m_fieldList.Aggregate((field1, field2) => field1 + "," + field2); string values; IDbDataParameter parameter; char paramChar = m_isOracle ? ':' : '@'; int paramCount = 0; try { command = m_connection.CreateCommand(); commandBuilder.Append(string.Format(insertFormat, m_dbTableName, fields)); foreach (IMeasurement measurement in measurements) { valuesBuilder = new StringBuilder(); // Build the values list. foreach (string fieldName in m_fieldList) { string propertyName = m_fieldNames[fieldName]; object value = GetAllProperties(measurementType).FirstOrDefault(prop => prop.Name == propertyName).GetValue(measurement, null); if (valuesBuilder.Length > 0) { valuesBuilder.Append(','); } if ((object)value == null) { valuesBuilder.Append("NULL"); continue; } valuesBuilder.Append(paramChar); valuesBuilder.Append('p'); valuesBuilder.Append(paramCount); parameter = command.CreateParameter(); parameter.ParameterName = paramChar + "p" + paramCount; parameter.Direction = ParameterDirection.Input; switch (propertyName.ToLower()) { case "id": parameter.Value = m_isJetEngine ? "{" + value + "}" : value; break; case "key": parameter.Value = value.ToString(); break; case "timestamp": case "publishedtimestamp": case "receivedtimestamp": Ticks timestamp = (Ticks)value; // If the value is a timestamp, use the timestamp format // specified by the user when inserting the timestamp. if (m_timestampFormat == null) { parameter.Value = (long)timestamp; } else { parameter.Value = timestamp.ToString(m_timestampFormat); } break; case "stateflags": // IMeasurement.StateFlags field is an uint, cast this back to a // signed integer to work with most database field types parameter.Value = Convert.ToInt32(value); break; default: parameter.Value = value; break; } command.Parameters.Add(parameter); paramCount++; } values = valuesBuilder.ToString(); commandBuilder.Append(string.Format(selectFormat, values)); commandBuilder.Append(unionFormat); } // Remove "UNION ALL " from the end of the command text. commandBuilder.Remove(commandBuilder.Length - unionFormat.Length, unionFormat.Length); // Set the command text and execute the command. command.CommandText = commandBuilder.ToString(); command.ExecuteNonQuery(); m_measurementCount += measurements.Count(); } finally { if ((object)command != null) { command.Dispose(); } } }
// Any exceptions in this handler will be exposed through ProcessException event and cause OutageLogProcessor // to requeue the data gap outage so it will be processed again (could be that remote system is offline). private void ProcessDataGap(Outage dataGap) { // Establish start and stop time for temporal session m_subscriptionInfo.StartTime = dataGap.Start.ToString(OutageLog.DateTimeFormat, CultureInfo.InvariantCulture); m_subscriptionInfo.StopTime = dataGap.End.ToString(OutageLog.DateTimeFormat, CultureInfo.InvariantCulture); OnStatusMessage("Starting data gap recovery for period \"{0}\" - \"{1}\"...", m_subscriptionInfo.StartTime, m_subscriptionInfo.StopTime); // Enable data monitor m_dataStreamMonitor.Enabled = true; // Reset measurement counters m_measurementsRecoveredForDataGap = 0; m_measurementsRecoveredOverLastInterval = 0; // Reset processing fields m_mostRecentRecoveredTime = dataGap.Start.Ticks; m_abnormalTermination = false; // Reset process completion wait handle m_dataGapRecoveryCompleted.Reset(); // Start temporal data recovery session m_temporalSubscription.Subscribe(m_subscriptionInfo); // Wait for process completion - success or fail m_dataGapRecoveryCompleted.Wait(); // If temporal session failed to connect, retry data recovery for this outage if (m_abnormalTermination) { // Make sure any data recovered so far doesn't get unnecessarily re-recovered, this requires that source historian report data in time-sorted order dataGap = new Outage(new DateTime(GSF.Common.Max((Ticks)dataGap.Start.Ticks, m_mostRecentRecoveredTime - (m_subscriptionInfo.UseMillisecondResolution ? Ticks.PerMillisecond : 1L)), DateTimeKind.Utc), dataGap.End); // Re-insert adjusted data gap at the top of the processing queue m_dataGapLog.Insert(0, dataGap); FlushLogAsync(); if (m_measurementsRecoveredForDataGap == 0) OnStatusMessage("WARNING: Failed to establish temporal session. Data recovery for period \"{0}\" - \"{1}\" will be re-attempted.", m_subscriptionInfo.StartTime, m_subscriptionInfo.StopTime); else OnStatusMessage("WARNING: Temporal session was disconnected during recovery operation. Data recovery for adjusted period \"{0}\" - \"{1}\" will be re-attempted.", dataGap.Start.ToString(OutageLog.DateTimeFormat, CultureInfo.InvariantCulture), m_subscriptionInfo.StopTime); } // Disconnect temporal session m_temporalSubscription.Stop(); // Disable data monitor m_dataStreamMonitor.Enabled = false; OnStatusMessage("{0}Recovered {1} measurements for period \"{2}\" - \"{3}\".", m_measurementsRecoveredForDataGap == 0 ? "WARNING: " : "", m_measurementsRecoveredForDataGap, m_subscriptionInfo.StartTime, m_subscriptionInfo.StopTime); }
/// <summary> /// Creates a new BPA PDCstream specific <see cref="DataFrame"/> for the given <paramref name="timestamp"/>. /// </summary> /// <param name="timestamp">Timestamp for new <see cref="IFrame"/> in <see cref="Ticks"/>.</param> /// <returns>New BPA PDCstream <see cref="DataFrame"/> at given <paramref name="timestamp"/>.</returns> /// <remarks> /// Note that the <see cref="ConcentratorBase"/> class (which the <see cref="ActionAdapterBase"/> is derived from) /// is designed to sort <see cref="IMeasurement"/> implementations into an <see cref="IFrame"/> which represents /// a collection of measurements at a given timestamp. The <c>CreateNewFrame</c> method allows consumers to create /// their own <see cref="IFrame"/> implementations, in our case this will be a BPA PDCstream data frame. /// </remarks> protected override IFrame CreateNewFrame(Ticks timestamp) { // We create a new BPA PDCstream data frame based on current configuration frame ushort sampleNumber = (ushort)((timestamp.DistanceBeyondSecond() + 1.0D) / base.TicksPerFrame); DataFrame dataFrame = new DataFrame(timestamp, m_configurationFrame, 1, sampleNumber); DataCell dataCell; foreach (ConfigurationCell configurationCell in m_configurationFrame.Cells) { // Create a new BPA PDCstream data cell (i.e., a PMU entry for this frame) dataCell = new DataCell(dataFrame, configurationCell, true); // Add data cell to the frame dataFrame.Cells.Add(dataCell); } return dataFrame; }
/******************************************************** * CLASS METHODS *********************************************************/ /// <summary> /// Launch the algorithm manager to run this strategy /// </summary> /// <param name="job">Algorithm job</param> /// <param name="algorithm">Algorithm instance</param> /// <param name="feed">Datafeed object</param> /// <param name="transactions">Transaction manager object</param> /// <param name="results">Result handler object</param> /// <param name="setup">Setup handler object</param> /// <param name="realtime">Realtime processing object</param> /// <remarks>Modify with caution</remarks> public static void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, ISetupHandler setup, IRealTimeHandler realtime) { //Initialize: _dataPointCount = 0; var startingPortfolioValue = setup.StartingPortfolioValue; var backtestMode = (job.Type == PacketType.BacktestNode); var methodInvokers = new Dictionary <Type, MethodInvoker>(); var marginCallFrequency = TimeSpan.FromMinutes(5); var nextMarginCallTime = DateTime.MinValue; //Initialize Properties: _algorithmId = job.AlgorithmId; _algorithmState = AlgorithmStatus.Running; _previousTime = setup.StartingDate.Date; //Create the method accessors to push generic types into algorithm: Find all OnData events: // Algorithm 1.0 data accessors var hasOnTradeBar = AddMethodInvoker <Dictionary <string, TradeBar> >(algorithm, methodInvokers, "OnTradeBar"); var hasOnTick = AddMethodInvoker <Dictionary <string, List <Tick> > >(algorithm, methodInvokers, "OnTick"); // Algorithm 2.0 data accessors var hasOnDataTradeBars = AddMethodInvoker <TradeBars>(algorithm, methodInvokers); var hasOnDataTicks = AddMethodInvoker <Ticks>(algorithm, methodInvokers); // determine what mode we're in var backwardsCompatibilityMode = !hasOnDataTradeBars && !hasOnDataTicks; // dividend and split events var hasOnDataDividends = AddMethodInvoker <Dividends>(algorithm, methodInvokers); var hasOnDataSplits = AddMethodInvoker <Splits>(algorithm, methodInvokers); //Go through the subscription types and create invokers to trigger the event handlers for each custom type: foreach (var config in feed.Subscriptions) { //If type is a tradebar, combine tradebars and ticks into unified array: if (config.Type.Name != "TradeBar" && config.Type.Name != "Tick") { //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. } var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type }); //If we already have this Type-handler then don't add it to invokers again. if (methodInvokers.ContainsKey(config.Type)) { continue; } //If we couldnt find the event handler, let the user know we can't fire that event. if (genericMethod == null) { algorithm.RunTimeError = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) { }"); _algorithmState = AlgorithmStatus.RuntimeError; return; } methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod()); } } //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm. Log.Debug("AlgorithmManager.Run(): Algorithm initialized, launching time loop."); foreach (var newData in DataStream.GetData(feed, setup.StartingDate)) { //Check this backtest is still running: if (_algorithmState != AlgorithmStatus.Running) { break; } //Execute with TimeLimit Monitor: if (Isolator.IsCancellationRequested) { return; } var time = DataStream.AlorithmTime; //If we're in backtest mode we need to capture the daily performance. We do this here directly //before updating the algorithm state with the new data from this time step, otherwise we'll //produce incorrect samples (they'll take into account this time step's new price values) if (backtestMode) { //Refresh the realtime event monitor: //in backtest mode use the algorithms clock as realtime. realtime.SetTime(time); //On day-change sample equity and daily performance for statistics calculations if (_previousTime.Date != time.Date) { //Sample the portfolio value over time for chart. results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); //Check for divide by zero if (startingPortfolioValue == 0m) { results.SamplePerformance(_previousTime.Date, 0); } else { results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPortfolioValue) * 100 / startingPortfolioValue, 10)); } startingPortfolioValue = algorithm.Portfolio.TotalPortfolioValue; } } //Update algorithm state after capturing performance from previous day //On each time step push the real time prices to the cashbook so we can have updated conversion rates algorithm.Portfolio.CashBook.Update(newData); //Update the securities properties: first before calling user code to avoid issues with data algorithm.Securities.Update(time, newData); // perform margin calls, in live mode we can also use realtime to emit these if (time >= nextMarginCallTime || (Engine.LiveMode && nextMarginCallTime > DateTime.Now)) { // determine if there are possible margin call orders to be executed var marginCallOrders = algorithm.Portfolio.ScanForMarginCall(); if (marginCallOrders.Count != 0) { // execute the margin call orders var executedOrders = algorithm.Portfolio.MarginCallModel.ExecuteMarginCall(marginCallOrders); foreach (var order in executedOrders) { algorithm.Error(string.Format("Executed MarginCallOrder: {0} - Quantity: {1} @ {2}", order.Symbol, order.Quantity, order.Price)); } } nextMarginCallTime = time + marginCallFrequency; } //Check if the user's signalled Quit: loop over data until day changes. if (algorithm.GetQuit()) { _algorithmState = AlgorithmStatus.Quit; break; } if (algorithm.RunTimeError != null) { _algorithmState = AlgorithmStatus.RuntimeError; break; } //Pass in the new time first: algorithm.SetDateTime(time); //Trigger the data events: Invoke the types we have data for: var oldBars = new Dictionary <string, TradeBar>(); var oldTicks = new Dictionary <string, List <Tick> >(); var newBars = new TradeBars(time); var newTicks = new Ticks(time); var newDividends = new Dividends(time); var newSplits = new Splits(time); //Invoke all non-tradebars, non-ticks methods and build up the TradeBars and Ticks dictionaries // --> i == Subscription Configuration Index, so we don't need to compare types. foreach (var i in newData.Keys) { //Data point and config of this point: var dataPoints = newData[i]; var config = feed.Subscriptions[i]; //Keep track of how many data points we've processed _dataPointCount += dataPoints.Count; //We don't want to pump data that we added just for currency conversions if (config.IsInternalFeed) { continue; } //Create TradeBars Unified Data --> OR --> invoke generic data event. One loop. // Aggregate Dividends and Splits -- invoke portfolio application methods foreach (var dataPoint in dataPoints) { var dividend = dataPoint as Dividend; if (dividend != null) { Log.Trace("AlgorithmManager.Run(): Applying Dividend for " + dividend.Symbol); // if this is a dividend apply to portfolio algorithm.Portfolio.ApplyDividend(dividend); if (hasOnDataDividends) { // and add to our data dictionary to pump into OnData(Dividends data) newDividends.Add(dividend); } continue; } var split = dataPoint as Split; if (split != null) { Log.Trace("AlgorithmManager.Run(): Applying Split for " + split.Symbol); // if this is a split apply to portfolio algorithm.Portfolio.ApplySplit(split); if (hasOnDataSplits) { // and add to our data dictionary to pump into OnData(Splits data) newSplits.Add(split); } continue; } //Update registered consolidators for this symbol index try { for (var j = 0; j < config.Consolidators.Count; j++) { config.Consolidators[j].Update(dataPoint); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Error("AlgorithmManager.Run(): RuntimeError: Consolidators update: " + err.Message); return; } // TRADEBAR -- add to our dictionary var bar = dataPoint as TradeBar; if (bar != null) { try { if (backwardsCompatibilityMode) { oldBars[bar.Symbol] = bar; } else { newBars[bar.Symbol] = bar; } } catch (Exception err) { Log.Error(time.ToLongTimeString() + " >> " + bar.Time.ToLongTimeString() + " >> " + bar.Symbol + " >> " + bar.Value.ToString("C")); Log.Error("AlgorithmManager.Run(): Failed to add TradeBar (" + bar.Symbol + ") Time: (" + time.ToLongTimeString() + ") Count:(" + newBars.Count + ") " + err.Message); } continue; } // TICK -- add to our dictionary var tick = dataPoint as Tick; if (tick != null) { if (backwardsCompatibilityMode) { List <Tick> ticks; if (!oldTicks.TryGetValue(tick.Symbol, out ticks)) { ticks = new List <Tick>(3); oldTicks.Add(tick.Symbol, ticks); } ticks.Add(tick); } else { List <Tick> ticks; if (!newTicks.TryGetValue(tick.Symbol, out ticks)) { ticks = new List <Tick>(3); newTicks.Add(tick.Symbol, ticks); } ticks.Add(tick); } continue; } // if it was nothing else then it must be custom data // CUSTOM DATA -- invoke on data method //Send data into the generic algorithm event handlers try { methodInvokers[config.Type](algorithm, dataPoint); } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err.Message + " STACK >>> " + err.StackTrace); return; } } } try { // fire off the dividend and split events before pricing events if (hasOnDataDividends && newDividends.Count != 0) { methodInvokers[typeof(Dividends)](algorithm, newDividends); } if (hasOnDataSplits && newSplits.Count != 0) { methodInvokers[typeof(Splits)](algorithm, newSplits); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: Dividends/Splits: " + err.Message + " STACK >>> " + err.StackTrace); return; } //After we've fired all other events in this second, fire the pricing events: if (backwardsCompatibilityMode) { //Log.Debug("AlgorithmManager.Run(): Invoking v1.0 Event Handlers..."); try { if (hasOnTradeBar && oldBars.Count > 0) { methodInvokers[typeof(Dictionary <string, TradeBar>)](algorithm, oldBars); } if (hasOnTick && oldTicks.Count > 0) { methodInvokers[typeof(Dictionary <string, List <Tick> >)](algorithm, oldTicks); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: Backwards Compatibility Mode: " + err.Message + " STACK >>> " + err.StackTrace); return; } } else { //Log.Debug("AlgorithmManager.Run(): Invoking v2.0 Event Handlers..."); try { if (hasOnDataTradeBars && newBars.Count > 0) { methodInvokers[typeof(TradeBars)](algorithm, newBars); } if (hasOnDataTicks && newTicks.Count > 0) { methodInvokers[typeof(Ticks)](algorithm, newTicks); } } catch (Exception err) { algorithm.RunTimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err.Message + " STACK >>> " + err.StackTrace); return; } } //If its the historical/paper trading models, wait until market orders have been "filled" // Manually trigger the event handler to prevent thread switch. transactions.ProcessSynchronousEvents(); //Save the previous time for the sample calculations _previousTime = time; // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(); } // End of ForEach DataStream //Stream over:: Send the final packet and fire final events: Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm..."); try { algorithm.OnEndOfAlgorithm(); } catch (Exception err) { _algorithmState = AlgorithmStatus.RuntimeError; algorithm.RunTimeError = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException); Log.Debug("AlgorithmManager.OnEndOfAlgorithm(): " + err.Message + " STACK >>> " + err.StackTrace); return; } // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(forceProcess: true); //Liquidate Holdings for Calculations: if (_algorithmState == AlgorithmStatus.Liquidated || !Engine.LiveMode) { // without this we can't liquidate equities since the exchange is 'technically' closed var hackedFrontier = algorithm.Time.AddMilliseconds(-1); algorithm.SetDateTime(hackedFrontier); foreach (var security in algorithm.Securities) { security.Value.SetMarketPrice(hackedFrontier, null); } Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings..."); algorithm.Liquidate(); results.LogMessage("Algorithm Liquidated"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Liquidated); } //Manually stopped the algorithm if (_algorithmState == AlgorithmStatus.Stopped) { Log.Trace("AlgorithmManager.Run(): Stopping algorithm..."); results.LogMessage("Algorithm Stopped"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Stopped); } //Backtest deleted. if (_algorithmState == AlgorithmStatus.Deleted) { Log.Trace("AlgorithmManager.Run(): Deleting algorithm..."); results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request."); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Deleted); } //Algorithm finished, send regardless of commands: results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Completed); //Take final samples: results.SampleRange(algorithm.GetChartUpdates()); results.SampleEquity(DataStream.AlorithmTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); results.SamplePerformance(DataStream.AlorithmTime, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPortfolioValue) * 100 / startingPortfolioValue, 10)); } // End of Run();
private int m_numberOfSatellites = 1; // We'll initially assume synchronization is good until told otherwise #endregion #region [ Constructors ] /// <summary> /// Creates a new <see cref="ConfigurationCell"/> from specified parameters. /// </summary> /// <param name="parent">The reference to parent <see cref="ConfigurationFrame"/> of this <see cref="ConfigurationCell"/>.</param> /// <param name="nominalFrequency">The nominal <see cref="LineFrequency"/> of the <see cref="FrequencyDefinition"/> of this <see cref="ConfigurationCell"/>.</param> /// <param name="timeOffset">The time offset of F-NET device in <see cref="Ticks"/>.</param> internal ConfigurationCell(ConfigurationFrame parent, LineFrequency nominalFrequency, Ticks timeOffset) : base(parent, parent.IDCode, Common.MaximumPhasorValues, Common.MaximumAnalogValues, Common.MaximumDigitalValues) { NominalFrequency = nominalFrequency; m_timeOffset = timeOffset; }
/// <summary> /// Creates a new <see cref="CommonFrameHeader"/> from specified parameters. /// </summary> /// <param name="frameSize">The <see cref="SelFastMessage.FrameSize"/> of this frame.</param> /// <param name="timestamp">The timestamp of this frame.</param> public CommonFrameHeader(FrameSize frameSize, Ticks timestamp) { m_frameSize = frameSize; m_timestamp = timestamp; }
// PI data updated handler private void m_dataUpdateObserver_DataUpdated(object sender, EventArgs<AFValue> e) { OnStatusMessage("DEBUG: Data observer event handler called with a new value: {0:N3}...", Convert.ToDouble(e.Argument.Value)); AFValue value = e.Argument; MeasurementKey key; OnStatusMessage("DEBUG: Data observer event handler looking up point ID {0:N0} in table...", value.PIPoint.ID); if ((object)value != null && m_tagKeyMap.TryGetValue(value.PIPoint.ID, out key)) { OnStatusMessage("DEBUG: Data observer event handler found point ID {0:N0} in table: {1}...", value.PIPoint.ID, key); Measurement measurement = new Measurement(); measurement.Key = key; measurement.Value = Convert.ToDouble(value.Value); measurement.Timestamp = value.Timestamp.UtcTime; OnNewMeasurements(new[] { measurement }); m_lastReceivedTimestamp = measurement.Timestamp; m_lastReceivedValue = measurement.Value; } else { OnStatusMessage("DEBUG: Data observer event handler did not find point ID {0:N0} in table...", value.PIPoint.ID); } }
/// <summary> /// Processes 1-second data window consisting of measurements. /// </summary> /// <param name="timestamp">Top of second window timestamp.</param> /// <param name="dataWindow">1-second data window for input values.</param> /// <returns>New result measurements ready for publication.</returns> public void ProcessDataWindow(Ticks timestamp, IMeasurement[,] dataWindow) { Debug.Assert(dataWindow.GetLength(1) == FramesPerSecond, $"Expected {FramesPerSecond} data window inputs, received {dataWindow.Length}."); int inputCount = InputTypes.Length; // Break measurement data window into parallel value, time and quality vectors double[,] values = new double[inputCount, FramesPerSecond]; bool[,] valueQualities = new bool[inputCount, FramesPerSecond]; bool[,] timeQualities = new bool[inputCount, FramesPerSecond]; for (int i = 0; i < inputCount; i++) { switch (InputTypes[i]) { case SignalType.VPHM: for (int j = 0; j < FramesPerSecond; j++) { values[i, j] = dataWindow[i, j].AdjustedValue; valueQualities[i, j] = dataWindow[i, j].ValueQualityIsGood(); timeQualities[i, j] = dataWindow[i, j].TimestampQualityIsGood(); } break; case SignalType.VPHA: for (int j = 0; j < FramesPerSecond; j++) { values[i, j] = dataWindow[i, j].AdjustedValue; valueQualities[i, j] = dataWindow[i, j].ValueQualityIsGood(); timeQualities[i, j] = dataWindow[i, j].TimestampQualityIsGood(); } break; case SignalType.IPHM: for (int j = 0; j < FramesPerSecond; j++) { values[i, j] = dataWindow[i, j].AdjustedValue; valueQualities[i, j] = dataWindow[i, j].ValueQualityIsGood(); timeQualities[i, j] = dataWindow[i, j].TimestampQualityIsGood(); } break; case SignalType.IPHA: for (int j = 0; j < FramesPerSecond; j++) { values[i, j] = dataWindow[i, j].AdjustedValue; valueQualities[i, j] = dataWindow[i, j].ValueQualityIsGood(); timeQualities[i, j] = dataWindow[i, j].TimestampQualityIsGood(); } break; default: for (int j = 0; j < FramesPerSecond; j++) { values[i, j] = dataWindow[i, j].AdjustedValue; valueQualities[i, j] = dataWindow[i, j].ValueQualityIsGood(); timeQualities[i, j] = dataWindow[i, j].TimestampQualityIsGood(); } break; } } // Derive overall time result quality state based on all incoming states bool[] allQualities = new bool[inputCount * FramesPerSecond]; for (int i = 0; i < inputCount; i++) { for (int j = 0; j < FramesPerSecond; j++) { allQualities[i * FramesPerSecond + j] = timeQualities[i, j]; } } m_timeQualityIsGood = allQualities.All(state => state); // Process vector based data window DetectorAPI.Load(timestamp, values, valueQualities); }
/// <summary> /// Creates a new <see cref="DataFrame"/> from specified parameters. /// </summary> /// <param name="timestamp">The exact timestamp, in <see cref="Ticks"/>, of the data represented by this <see cref="DataFrame"/>.</param> /// <param name="configurationFrame">The <see cref="ConfigurationFrame"/> associated with this <see cref="DataFrame"/>.</param> /// <param name="packetNumber">Packet number for this <see cref="DataFrame"/>.</param> /// <param name="sampleNumber">Sample number for this <see cref="DataFrame"/>.</param> /// <remarks> /// This constructor is used by a consumer to generate a BPA PDCstream data frame. /// </remarks> public DataFrame(Ticks timestamp, ConfigurationFrame configurationFrame, byte packetNumber, ushort sampleNumber) : base(new DataCellCollection(), timestamp, configurationFrame) { PacketNumber = packetNumber; m_sampleNumber = sampleNumber; }
public Slice(DateTime time, IEnumerable <BaseData> data, TradeBars tradeBars, QuoteBars quoteBars, Ticks ticks, OptionChains optionChains, Splits splits, Dividends dividends, Delistings delistings, SymbolChangedEvents symbolChanges, bool?hasData = null) { Time = time; _dataByType = new Dictionary <Type, Lazy <object> >(); _data = new Lazy <DataDictionary <SymbolData> >(() => CreateDynamicDataDictionary(data)); HasData = hasData ?? _data.Value.Count > 0; _ticks = CreateTicksCollection(ticks); _bars = CreateCollection <TradeBars, TradeBar>(tradeBars); _quoteBars = CreateCollection <QuoteBars, QuoteBar>(quoteBars); _optionChains = CreateCollection <OptionChains, OptionChain>(optionChains); _splits = CreateCollection <Splits, Split>(splits); _dividends = CreateCollection <Dividends, Dividend>(dividends); _delistings = CreateCollection <Delistings, Delisting>(delistings); _symbolChangedEvents = CreateCollection <SymbolChangedEvents, SymbolChangedEvent>(symbolChanges); }
/// <summary> /// Creates a new <see cref="TimeTagBase"/>, given standard .NET <see cref="DateTime"/>. /// </summary> /// <param name="baseDateOffsetTicks">Ticks of timetag base.</param> /// <param name="timestamp">Timestamp in <see cref="Ticks"/> used to create timetag from.</param> protected TimeTagBase(long baseDateOffsetTicks, Ticks timestamp) { // Zero base 100-nanosecond ticks from 1/1/1970 and convert to seconds. m_baseDateOffsetTicks = baseDateOffsetTicks; m_seconds = (timestamp - m_baseDateOffsetTicks) / (decimal)Ticks.PerSecond; }
/// <summary> /// Gets a short one-line status of this <see cref="AdapterBase"/>. /// </summary> /// <param name="maxLength">Maximum number of available characters for display.</param> /// <returns>A short one-line summary of the current status of this <see cref="AdapterBase"/>.</returns> public override string GetShortStatus(int maxLength) { TimeSpan time = Ticks.FromSeconds(m_dataIndex / (double)m_sampleRate); return(string.Format("Streaming {0} at time {1} / {2} - {3:0.00%}.", Path.GetFileName(WavFileName), time.ToString(@"m\:ss"), m_audioLength.ToString(@"m\:ss"), time.TotalSeconds / m_audioLength.TotalSeconds)); }
// Generates new measurements since the last time this was called. private void ProcessMeasurements() { // Declare the variables use in this method. List <IMeasurement> measurements = new List <IMeasurement>((int)(Ticks.ToSeconds(GapThreshold) * m_sampleRate * m_channels * 1.1D)); LittleBinaryValue[] sample; while (Enabled) { try { SpinWait spinner = new SpinWait(); // Determine what time it is now. long now = DateTime.UtcNow.Ticks; // Assign a timestamp to the next sample based on its location // in the file relative to the other samples in the file. long timestamp = m_startTime + (m_dataIndex * Ticks.PerSecond / m_sampleRate); if (now - timestamp > GapThreshold) { // Reset the start time and delay next transmission in an attempt to catch up m_startTime = now - (m_dataIndex * Ticks.PerSecond / m_sampleRate) + Ticks.FromSeconds(RecoveryDelay); timestamp = now; OnStatusMessage("Start time reset."); } // Keep generating measurements until // we catch up to the current time. while (timestamp < now) { sample = m_data.GetNextSample(); // If the sample is null, we've reached the end of the file. // Close and reopen it, resetting the data index and start time. if (sample == null) { m_data.Close(); m_data.Dispose(); m_data = WaveDataReader.FromFile(WavFileName); m_dataIndex = 0; m_startTime = timestamp; sample = m_data.GetNextSample(); } // Create new measurements, one for each channel, // and add them to the measurements list. for (int i = 0; i < m_channels; i++) { measurements.Add(Measurement.Clone(OutputMeasurements[i], sample[i].ConvertToType(TypeCode.Double), timestamp)); } // Update the data index and recalculate // the assigned timestamp for the next sample. m_dataIndex++; timestamp = m_startTime + (m_dataIndex * Ticks.PerSecond / m_sampleRate); } OnNewMeasurements(measurements); measurements.Clear(); while (DateTime.UtcNow.Ticks - timestamp <= GapThreshold / 100) { // Ahead of schedule -- pause for a moment spinner.SpinOnce(); } } catch (Exception ex) { OnProcessException(ex); } } }
public void Ages() { UT_INIT(); Log.SetVerbosity(new ConsoleLogger(), Verbosity.Verbose, "/"); Log.MapThreadName("UnitTest"); Log.SetDomain("TickWatch", Scope.Method); TickWatch tt = new TickWatch(); // minimum time measuring { tt.Start(); tt.Sample(); tt.Reset(); // we need to do this once before, otherwise C# might be // very slow. Obviously the optimizer... tt.Start(); tt.Sample(); long ttAverageInNanos = tt.GetAverage().InNanos(); long ttAverageInMicros = tt.GetAverage().InMicros(); long ttAverageInMillis = tt.GetAverage().InMillis(); Log.Info("TickWatch minimum measurement nanos: " + ttAverageInNanos); Log.Info("TickWatch minimum measurement micros: " + ttAverageInMicros); Log.Info("TickWatch minimum measurement millis: " + ttAverageInMillis); UT_TRUE(ttAverageInNanos < 5000); UT_TRUE(ttAverageInMicros <= 5); UT_TRUE(ttAverageInMillis == 0); } // minimum sleep time measuring { tt.Reset(); for (int i = 0; i < 100; i++) { ALIB.SleepNanos(1); tt.Sample(); } Ticks avg = tt.GetAverage(); Log.Info("100 probes of 1 ns of sleep leads to average sleep time of " + avg.InNanos() + " ns"); } // sleep two times 20 ms and probe it to an average { tt.Reset(); tt.Start(); ALIB.SleepMillis(20); tt.Sample(); ALIB.SleepMillis(80); tt.Start(); ALIB.SleepMillis(20); tt.Sample(); long cnt = tt.GetSampleCnt(); long avg = tt.GetAverage().InMillis(); double hertz = tt.GetAverage().InHertz(1); Log.Info("TickWatch sum is " + tt.GetCumulated().InMillis() + " after " + cnt + " times 20 ms sleep"); Log.Info(" average is: " + avg + " ms"); Log.Info(" in Hertz: " + hertz); UT_TRUE(hertz < 53); UT_TRUE(hertz > 30); // should work even on heavily loaded machines UT_EQ(2, cnt); UT_TRUE(avg >= 18); UT_TRUE(avg < 45); // should work even on heavily loaded machines } // Ticks Since { Ticks tt1 = new Ticks(); tt1.FromSeconds(1000); Ticks tt2 = new Ticks(); tt2.FromSeconds(1001); UT_TRUE(tt2.Since(tt1).InMillis() == 1000L); UT_TRUE(tt2.Since(tt1).InMicros() == 1000L * 1000L); UT_TRUE(tt2.Since(tt1).InNanos() == 1000L * 1000L * 1000L); } }
public void Basics() { UT_INIT(); Log.SetVerbosity(new ConsoleLogger(), Verbosity.Verbose, "/"); Log.MapThreadName("UnitTest"); Log.SetDomain("TickWatch", Scope.Method); Log.Info("\n### TicksBasics ###"); // check times { Ticks t = new Ticks(); t.FromNanos(42); // beyond resolution in C#: UT_EQ( t.InNanos(), 42L); UT_EQ(t.InMicros(), 0L); UT_EQ(t.InMillis(), 0L); UT_EQ(t.InSeconds(), 0L); t.FromMicros(42); UT_EQ(t.InNanos(), 42000L); UT_EQ(t.InMicros(), 42L); UT_EQ(t.InMillis(), 0L); UT_EQ(t.InSeconds(), 0L); t.FromMillis(42); UT_EQ(t.InNanos(), 42000000L); UT_EQ(t.InMicros(), 42000L); UT_EQ(t.InMillis(), 42L); UT_EQ(t.InSeconds(), 0L); t.FromSeconds(42); UT_EQ(t.InNanos(), 42000000000L); UT_EQ(t.InMicros(), 42000000L); UT_EQ(t.InMillis(), 42000L); UT_EQ(t.InSeconds(), 42L); Ticks diff = new Ticks(); diff.FromMillis(100); t.Add(diff); UT_EQ(t.InNanos(), 42100000000L); UT_EQ(t.InMicros(), 42100000L); UT_EQ(t.InMillis(), 42100L); UT_EQ(t.InSeconds(), 42L); t.Sub(diff); UT_EQ(t.InNanos(), 42000000000L); UT_EQ(t.InMicros(), 42000000L); UT_EQ(t.InMillis(), 42000L); UT_EQ(t.InSeconds(), 42L); t.FromMillis(100); UT_EQ(t.InHertz(), 10.0); t.FromMillis(300); UT_EQ(t.InHertz(0), 3.0); UT_EQ(t.InHertz(1), 3.3); UT_EQ(t.InHertz(2), 3.33); UT_EQ(t.InHertz(5), 3.33333); } // check internal frequency { double freq = Ticks.InternalFrequency; Log.Info("TickWatch InternalFrequency: " + freq); UT_TRUE(freq >= 1000000.0); } // check TickWatch creation time { Ticks creationTimeDiff = new Ticks(); creationTimeDiff.Sub(Ticks.CreationTime); Log.Info("TickWatch library creation was: " + creationTimeDiff.InNanos() + "ns ago"); Log.Info("TickWatch library creation was: " + creationTimeDiff.InMicros() + "µs ago"); Log.Info("TickWatch library creation was: " + creationTimeDiff.InMillis() + "ms ago"); Log.Info("TickWatch library creation was: " + creationTimeDiff.InSeconds() + "s ago"); UT_TRUE(creationTimeDiff.InNanos() > 100); // It should really take 100 nanoseconds to get here! UT_TRUE(creationTimeDiff.InSeconds() < 3600); // these test will probably not last an hour } // check if we could sleep for 100ms { Ticks start = new Ticks(); ALIB.SleepMillis(30); Ticks sleepTime = new Ticks(); sleepTime.Sub(start); Log.Info("TickWatch diff after 100ms sleep: " + sleepTime.InMillis() + " ms"); UT_TRUE(sleepTime.InMillis() > 28); UT_TRUE(sleepTime.InMillis() < 150); // should work even on heavily loaded machines } }
/// <summary> /// Creates a new <see cref="UnixTimeTag"/>, given specified <see cref="Ticks"/>. /// </summary> /// <param name="timestamp">Timestamp in <see cref="Ticks"/> to create Unix timetag from (minimum valid date is 1/1/1970).</param> /// <remarks> /// This constructor will accept a <see cref="DateTime"/> parameter since <see cref="Ticks"/> is implicitly castable to a <see cref="DateTime"/>. /// </remarks> public UnixTimeTag(Ticks timestamp) : base(BaseTicks, timestamp) { }
/// <summary> /// Process frame of time-aligned measurements that arrived within the defined lag time. /// </summary> /// <param name="frame"><see cref="IFrame"/> of measurements that arrived within lag time and are ready for processing.</param> /// <param name="index">Index of <see cref="IFrame"/> within one second of data ranging from zero to frames per second - 1.</param> protected override void PublishFrame(IFrame frame, int index) { Ticks timestamp = frame.Timestamp; ConcurrentDictionary<MeasurementKey, IMeasurement> measurements = frame.Measurements; if (measurements.Count > 0) { lock (m_fileDataLock) { IMeasurement measurement; MeasurementKey inputMeasurementKey; SignalType signalType; double measurementValue; if ((object)m_fileData == null) { m_fileData = new StringBuilder(); m_startTime = timestamp; m_rowCount = 0; } m_fileData.AppendFormat("{0}", timestamp.ToString("dd-MMM-yyyy HH:mm:ss.fff")); // Export all defined input measurements for (int i = 0; i < InputMeasurementKeys.Length; i++) { m_fileData.Append(','); inputMeasurementKey = InputMeasurementKeys[i]; signalType = InputMeasurementKeyTypes[i]; // Get measurement for this frame, falling back on latest value measurementValue = measurements.TryGetValue(inputMeasurementKey, out measurement) ? measurement.AdjustedValue : LatestMeasurements[inputMeasurementKey]; // Export measurement value making any needed adjustments based on signal type if (signalType == SignalType.VPHM) { // Convert voltages to base units m_fileData.Append(measurementValue / SI.Kilo); } else { // Export all other types of measurements as their raw value m_fileData.Append(measurementValue); } } // Terminate line m_fileData.AppendLine(); m_rowCount++; } } // Only publish when the export interval time has passed if ((timestamp - m_startTime).ToMilliseconds() > m_exportInterval) m_fileExport.TryRunOnceAsync(); }
private void TemporalSubscription_NewMeasurements(object sender, EventArgs<ICollection<IMeasurement>> e) { ICollection<IMeasurement> measurements = e.Argument; int total = measurements.Count; if (total > 0) { m_measurementsRecoveredForDataGap += total; m_measurementsRecoveredOverLastInterval += total; // Publish recovered measurements back to consumer OnRecoveredMeasurements(measurements); // Track latest reporting time long mostRecentRecoveredTime = measurements.Select(m => (long)m.Timestamp).Max(); if (mostRecentRecoveredTime > m_mostRecentRecoveredTime) m_mostRecentRecoveredTime = mostRecentRecoveredTime; } // See if consumer has requested to stop recovery operations if (!m_enabled) { OnStatusMessage("Data gap recovery has been canceled."); m_abnormalTermination = true; if ((object)m_dataGapRecoveryCompleted != null) m_dataGapRecoveryCompleted.Set(); m_dataStreamMonitor.Enabled = false; } }
/// <summary> /// Writes next COMTRADE record in ASCII format. /// </summary> /// <param name="output">Destination stream.</param> /// <param name="schema">Source schema.</param> /// <param name="timestamp">Record timestamp (implicitly castable as <see cref="DateTime"/>).</param> /// <param name="values">Values to write - 16-bit digitals should exist as a word in an individual double value, method will write out bits.</param> /// <param name="sample">User incremented sample index.</param> /// <param name="injectFracSecValue">Determines if FRACSEC value should be automatically injected into stream as first digital - defaults to <c>true</c>.</param> /// <param name="fracSecValue">FRACSEC value to inject into output stream - defaults to 0x0000.</param> /// <remarks> /// This function is primarily intended to write COMTRADE ASCII data records based on synchrophasor data /// (see Annex H: Schema for Phasor Data 2150 Using the COMTRADE File Standard in IEEE C37.111-2010), /// it may be necessary to manually write records for other COMTRADE needs (e.g., non 16-bit digitals). /// </remarks> public static void WriteNextRecordAscii(StreamWriter output, Schema schema, Ticks timestamp, double[] values, uint sample, bool injectFracSecValue = true, ushort fracSecValue = 0x0000) { // Make timestamp relative to beginning of file timestamp -= schema.StartTime.Value; uint microseconds = (uint)(timestamp.ToMicroseconds() / schema.TimeFactor); double value; StringBuilder line = new StringBuilder(); bool isFirstDigital = true; line.Append(sample); line.Append(','); line.Append(microseconds); for (int i = 0; i < values.Length; i++) { value = values[i]; if (i < schema.AnalogChannels.Length) { value -= schema.AnalogChannels[i].Adder; value /= schema.AnalogChannels[i].Multiplier; line.Append(','); line.Append(value); } else { if (isFirstDigital) { // Handle automatic injection of IEEE C37.118 FRACSEC digital value if requested isFirstDigital = false; if (injectFracSecValue) { for (int j = 0; j < 16; j++) { line.Append(','); line.Append(fracSecValue.CheckBits(BitExtensions.BitVal(j)) ? 1 : 0); } } } ushort digitalWord = (ushort)value; for (int j = 0; j < 16; j++) { line.Append(','); line.Append(digitalWord.CheckBits(BitExtensions.BitVal(j)) ? 1 : 0); } } } // Make sure FRACSEC values are injected if (isFirstDigital && injectFracSecValue) { for (int j = 0; j < 16; j++) { line.Append(','); line.Append(fracSecValue.CheckBits(BitExtensions.BitVal(j)) ? 1 : 0); } } output.WriteLine(line.ToString()); }
// Indicates whether the given measurement has maintained the same // value for at least a number of seconds defined by the delay. private bool RaiseIfFlatline(IMeasurement measurement) { long dist, diff; if (measurement.Value != m_lastValue) { m_lastChanged = measurement.Timestamp; m_lastValue = measurement.Value; } dist = Ticks.FromSeconds(Delay.GetValueOrDefault()); diff = measurement.Timestamp - m_lastChanged; return diff >= dist; }
/// <summary> /// Creates a new COMTRADE configuration <see cref="Schema"/>. /// </summary> /// <param name="metadata">Schema <see cref="ChannelMetadata"/> records.</param> /// <param name="stationName">Station name for the schema.</param> /// <param name="deviceID">Device ID for the schema.</param> /// <param name="dataStartTime">Data start time.</param> /// <param name="sampleCount">Total data samples (i.e., total number of rows).</param> /// <param name="isBinary">Determines if data file should be binary or ASCII - defaults to <c>true</c> for binary.</param> /// <param name="timeFactor">Time factor to use in schema - defaults to 1000.</param> /// <param name="samplingRate">Desired sampling rate - defaults to 33.3333Hz.</param> /// <param name="nominalFrequency">Nominal frequency - defaults to 60Hz.</param> /// <param name="includeFracSecDefinition">Determines if the FRACSEC word digital definitions should be included - defaults to <c>true</c>.</param> /// <returns>New COMTRADE configuration <see cref="Schema"/>.</returns> /// <remarks> /// This function is primarily intended to create a configuration based on synchrophasor data /// (see Annex H: Schema for Phasor Data 2150 Using the COMTRADE File Standard in IEEE C37.111-2010), /// it may be necessary to manually create a schema object for other COMTRADE needs. You can call /// the <see cref="Schema.FileImage"/> property to return a string that that can be written to a file /// that will be the contents of the configuration file. /// </remarks> public static Schema CreateSchema(IEnumerable<ChannelMetadata> metadata, string stationName, string deviceID, Ticks dataStartTime, int sampleCount, bool isBinary = true, double timeFactor = 1.0D, double samplingRate = 30.0D, double nominalFrequency = 60.0D, bool includeFracSecDefinition = true) { Schema schema = new Schema(); schema.StationName = stationName; schema.DeviceID = deviceID; SampleRate samplingFrequency = new SampleRate(); samplingFrequency.Rate = samplingRate; samplingFrequency.EndSample = sampleCount; schema.SampleRates = new[] { samplingFrequency }; Timestamp startTime; startTime.Value = dataStartTime; schema.StartTime = startTime; schema.TriggerTime = startTime; schema.FileType = isBinary ? FileType.Binary : FileType.Ascii; schema.TimeFactor = timeFactor; List<AnalogChannel> analogChannels = new List<AnalogChannel>(); List<DigitalChannel> digitalChannels = new List<DigitalChannel>(); int analogIndex = 1; int digitalIndex = 1; if (includeFracSecDefinition) { // Add default time quality digitals for IEEE C37.118 FRACSEC word. Note that these flags, as // defined in Annex H of the IEEE C37.111-2010 standard, assume full export was all from one // source device. This a poor assumption since data can be exported from historical data for any // number of points which could have come from any number of devices all with different FRACSEC // values. Regardless there is only one FRACSEC definition defined and, if included, it must // come as the first set of digitals in the COMTRADE configuration. for (int i = 0; i < 4; i++) { digitalChannels.Add(new DigitalChannel { Index = digitalIndex, Name = "TQ_CNT" + i, PhaseID = "T" + digitalIndex++ }); } digitalChannels.Add(new DigitalChannel { Index = digitalIndex, Name = "TQ_LSPND", PhaseID = "T" + digitalIndex++ }); digitalChannels.Add(new DigitalChannel { Index = digitalIndex, Name = "TQ_LSOCC", PhaseID = "T" + digitalIndex++ }); digitalChannels.Add(new DigitalChannel { Index = digitalIndex, Name = "TQ_LSDIR", PhaseID = "T" + digitalIndex++ }); digitalChannels.Add(new DigitalChannel { Index = digitalIndex, Name = "RSV", PhaseID = "T" + digitalIndex++ }); for (int i = 1; i < 9; i++) { digitalChannels.Add(new DigitalChannel { Index = digitalIndex, Name = "RESV" + i, PhaseID = "T" + digitalIndex++ }); } } // Add meta data for selected points sorted analogs followed by status flags then digitals foreach (ChannelMetadata record in metadata.OrderBy(m => m, ChannelMetadataSorter.Default)) { if (record.IsDigital) { // Every synchrophasor digital is 16-bits for (int i = 0; i < 16; i++) { digitalChannels.Add(new DigitalChannel { Index = digitalIndex++, Name = record.Name, PhaseID = "B" + i.ToString("X") }); } } else { switch (record.SignalType) { case SignalType.IPHM: // Current Magnitude analogChannels.Add(new AnalogChannel { Index = analogIndex++, Name = record.Name, PhaseID = "Pm", Units = "A", Multiplier = 0.05D }); break; case SignalType.VPHM: // Voltage Magnitude analogChannels.Add(new AnalogChannel { Index = analogIndex++, Name = record.Name, PhaseID = "Pm", Units = "V", Multiplier = 5.77362D }); break; case SignalType.IPHA: // Current Phase Angle case SignalType.VPHA: // Voltage Phase Angle analogChannels.Add(new AnalogChannel { Index = analogIndex++, Name = record.Name, PhaseID = "Pa", Units = "Rads", Multiplier = 1.0E-4D }); break; case SignalType.FREQ: // Frequency analogChannels.Add(new AnalogChannel { Index = analogIndex++, Name = record.Name, PhaseID = "F", Units = "Hz", Adder = (double)nominalFrequency, Multiplier = 0.001D }); break; case SignalType.DFDT: // Frequency Delta (dF/dt) analogChannels.Add(new AnalogChannel { Index = analogIndex++, Name = record.Name, PhaseID = "dF", Units = "Hz/s", Multiplier = 0.01D }); break; case SignalType.FLAG: // Status flags // Add synchrophasor status flag specific digitals int statusIndex = 0; for (int i = 1; i < 5; i++) { digitalChannels.Add(new DigitalChannel { Index = digitalIndex++, Name = record.Name + ":TRG" + i, PhaseID = "S" + statusIndex++.ToString("X") }); } for (int i = 1; i < 3; i++) { digitalChannels.Add(new DigitalChannel { Index = digitalIndex++, Name = record.Name + ":UNLK" + i, PhaseID = "S" + statusIndex++.ToString("X") }); } for (int i = 1; i < 5; i++) { digitalChannels.Add(new DigitalChannel { Index = digitalIndex++, Name = record.Name + ":SEC" + i, PhaseID = "S" + statusIndex++.ToString("X") }); } digitalChannels.Add(new DigitalChannel { Index = digitalIndex++, Name = record.Name + ":CFGCH", PhaseID = "S" + statusIndex++.ToString("X") }); digitalChannels.Add(new DigitalChannel { Index = digitalIndex++, Name = record.Name + ":PMUTR", PhaseID = "S" + statusIndex++.ToString("X") }); digitalChannels.Add(new DigitalChannel { Index = digitalIndex++, Name = record.Name + ":SORT", PhaseID = "S" + statusIndex++.ToString("X") }); digitalChannels.Add(new DigitalChannel { Index = digitalIndex++, Name = record.Name + ":SYNC", PhaseID = "S" + statusIndex++.ToString("X") }); digitalChannels.Add(new DigitalChannel { Index = digitalIndex++, Name = record.Name + ":PMUERR", PhaseID = "S" + statusIndex++.ToString("X") }); digitalChannels.Add(new DigitalChannel { Index = digitalIndex++, Name = record.Name + ":DTVLD", PhaseID = "S" + statusIndex.ToString("X") }); break; default: // All other signals assumed to be analog values analogChannels.Add(new AnalogChannel { Index = analogIndex++, Name = record.Name, PhaseID = "" }); break; } } } schema.AnalogChannels = analogChannels.ToArray(); schema.DigitalChannels = digitalChannels.ToArray(); schema.NominalFrequency = nominalFrequency; return schema; }
// Keeps track of the signal's timestamps to determine whether a given // measurement is eligible to raise the alarm based on the delay. private bool CheckDelay(IMeasurement measurement, bool raiseCondition) { Ticks dist; if (!raiseCondition) { // Keep track of the last time // the signal failed the raise test m_lastNegative = measurement.Timestamp; } else { // Get the amount of time since the last // time the signal failed the raise test dist = measurement.Timestamp - m_lastNegative; // If the amount of time is larger than // the delay threshold, raise the alarm if (dist >= Ticks.FromSeconds(m_delay.GetValueOrDefault())) return true; } return false; }
/// <summary> /// Creates a new <see cref="ConfigurationFrameBase"/> from specified parameters. /// </summary> /// <param name="idCode">The ID code of this <see cref="ConfigurationFrameBase"/>.</param> /// <param name="cells">The reference to the collection of cells for this <see cref="ConfigurationFrameBase"/>.</param> /// <param name="timestamp">The exact timestamp, in <see cref="Ticks"/>, of the data represented by this <see cref="ConfigurationFrameBase"/>.</param> /// <param name="frameRate">The defined frame rate of this <see cref="ConfigurationFrameBase"/>.</param> protected ConfigurationFrameBase(ushort idCode, ConfigurationCellCollection cells, Ticks timestamp, ushort frameRate) : base(idCode, cells, timestamp) { FrameRate = frameRate; }
/// <inheritdoc /> public override int GetHashCode() { unchecked // Overflow is fine, just wrap { var hashCode = 41; if (ThicknessMode != null) { hashCode = hashCode * 59 + ThicknessMode.GetHashCode(); } if (Thickness != null) { hashCode = hashCode * 59 + Thickness.GetHashCode(); } if (LenMode != null) { hashCode = hashCode * 59 + LenMode.GetHashCode(); } if (Len != null) { hashCode = hashCode * 59 + Len.GetHashCode(); } if (X != null) { hashCode = hashCode * 59 + X.GetHashCode(); } if (XAnchor != null) { hashCode = hashCode * 59 + XAnchor.GetHashCode(); } if (XPad != null) { hashCode = hashCode * 59 + XPad.GetHashCode(); } if (Y != null) { hashCode = hashCode * 59 + Y.GetHashCode(); } if (YAnchor != null) { hashCode = hashCode * 59 + YAnchor.GetHashCode(); } if (YPad != null) { hashCode = hashCode * 59 + YPad.GetHashCode(); } if (OutlineColor != null) { hashCode = hashCode * 59 + OutlineColor.GetHashCode(); } if (OutlineWidth != null) { hashCode = hashCode * 59 + OutlineWidth.GetHashCode(); } if (BorderColor != null) { hashCode = hashCode * 59 + BorderColor.GetHashCode(); } if (BorderWidth != null) { hashCode = hashCode * 59 + BorderWidth.GetHashCode(); } if (BgColor != null) { hashCode = hashCode * 59 + BgColor.GetHashCode(); } if (TickMode != null) { hashCode = hashCode * 59 + TickMode.GetHashCode(); } if (NTicks != null) { hashCode = hashCode * 59 + NTicks.GetHashCode(); } if (Tick0 != null) { hashCode = hashCode * 59 + Tick0.GetHashCode(); } if (DTick != null) { hashCode = hashCode * 59 + DTick.GetHashCode(); } if (TickVals != null) { hashCode = hashCode * 59 + TickVals.GetHashCode(); } if (TickText != null) { hashCode = hashCode * 59 + TickText.GetHashCode(); } if (Ticks != null) { hashCode = hashCode * 59 + Ticks.GetHashCode(); } if (TickLabelPosition != null) { hashCode = hashCode * 59 + TickLabelPosition.GetHashCode(); } if (TickLen != null) { hashCode = hashCode * 59 + TickLen.GetHashCode(); } if (TickWidth != null) { hashCode = hashCode * 59 + TickWidth.GetHashCode(); } if (TickColor != null) { hashCode = hashCode * 59 + TickColor.GetHashCode(); } if (ShowTickLabels != null) { hashCode = hashCode * 59 + ShowTickLabels.GetHashCode(); } if (TickFont != null) { hashCode = hashCode * 59 + TickFont.GetHashCode(); } if (TickAngle != null) { hashCode = hashCode * 59 + TickAngle.GetHashCode(); } if (TickFormat != null) { hashCode = hashCode * 59 + TickFormat.GetHashCode(); } if (TickFormatStops != null) { hashCode = hashCode * 59 + TickFormatStops.GetHashCode(); } if (TickPrefix != null) { hashCode = hashCode * 59 + TickPrefix.GetHashCode(); } if (ShowTickPrefix != null) { hashCode = hashCode * 59 + ShowTickPrefix.GetHashCode(); } if (TickSuffix != null) { hashCode = hashCode * 59 + TickSuffix.GetHashCode(); } if (ShowTickSuffix != null) { hashCode = hashCode * 59 + ShowTickSuffix.GetHashCode(); } if (SeparateThousands != null) { hashCode = hashCode * 59 + SeparateThousands.GetHashCode(); } if (ExponentFormat != null) { hashCode = hashCode * 59 + ExponentFormat.GetHashCode(); } if (MinExponent != null) { hashCode = hashCode * 59 + MinExponent.GetHashCode(); } if (ShowExponent != null) { hashCode = hashCode * 59 + ShowExponent.GetHashCode(); } if (Title != null) { hashCode = hashCode * 59 + Title.GetHashCode(); } if (TickValsSrc != null) { hashCode = hashCode * 59 + TickValsSrc.GetHashCode(); } if (TickTextSrc != null) { hashCode = hashCode * 59 + TickTextSrc.GetHashCode(); } return(hashCode); } }
public void OnData(Ticks ticks) { }
/// <summary> /// Creates a new <see cref="UnixTimeTag"/>, given specified <see cref="Ticks"/>. /// </summary> /// <param name="timestamp">Timestamp in <see cref="Ticks"/> to create Unix time-tag from (minimum valid date is 1/1/1970).</param> /// <remarks> /// This constructor will accept a <see cref="DateTime"/> parameter since <see cref="Ticks"/> is implicitly castable to a <see cref="DateTime"/>. /// </remarks> public UnixTimeTag(Ticks timestamp) : base(BaseTicks, timestamp) { }
/// <summary> /// Initializes a new instance of the <see cref="Slice"/> class /// </summary> /// <param name="time">The timestamp for this slice of data</param> /// <param name="data">The raw data in this slice</param> /// <param name="tradeBars">The trade bars for this slice</param> /// <param name="quoteBars">The quote bars for this slice</param> /// <param name="ticks">This ticks for this slice</param> /// <param name="optionChains">The option chains for this slice</param> /// <param name="futuresChains">The futures chains for this slice</param> /// <param name="splits">The splits for this slice</param> /// <param name="dividends">The dividends for this slice</param> /// <param name="delistings">The delistings for this slice</param> /// <param name="symbolChanges">The symbol changed events for this slice</param> /// <param name="hasData">true if this slice contains data</param> public Slice(DateTime time, IEnumerable <BaseData> data, TradeBars tradeBars, QuoteBars quoteBars, Ticks ticks, OptionChains optionChains, FuturesChains futuresChains, Splits splits, Dividends dividends, Delistings delistings, SymbolChangedEvents symbolChanges, bool?hasData = null) { Time = time; _dataByType = new Dictionary <Type, object>(); // market data _data = new Lazy <DataDictionary <SymbolData> >(() => CreateDynamicDataDictionary(data)); HasData = hasData ?? _data.Value.Count > 0; _ticks = ticks; _bars = tradeBars; _quoteBars = quoteBars; _optionChains = optionChains; _futuresChains = futuresChains; // auxiliary data _splits = splits; _dividends = dividends; _delistings = delistings; _symbolChangedEvents = symbolChanges; }
/// <summary> /// Calculates MW, MVAR and MVA then publishes those measurements /// </summary> /// <param name="frame">Input values for calculation</param> /// <param name="index">Index of frame within second.</param> protected override void PublishFrame(IFrame frame, int index) { ConcurrentDictionary <MeasurementKey, IMeasurement> measurements = frame.Measurements; Ticks totalCalculationTime = DateTime.UtcNow.Ticks; Ticks lastCalculationTime = DateTime.UtcNow.Ticks; List <IMeasurement> outputMeasurements = new List <IMeasurement>(); IMeasurement measurement; int calculations = 0; foreach (PowerCalculation powerCalculation in m_configuredCalculations) { double activePower = double.NaN, reactivePower = double.NaN, apparentPower = double.NaN; try { double voltageMagnitude = 0.0D, voltageAngle = 0.0D, currentMagnitude = 0.0D, currentAngle = 0.0D; bool allValuesReceivedWithGoodQuality = false; lastCalculationTime = DateTime.UtcNow.Ticks; if (measurements.TryGetValue(powerCalculation.VoltageMagnitudeMeasurementKey, out measurement) && measurement.ValueQualityIsGood()) { voltageMagnitude = measurement.AdjustedValue; if (!m_adjustmentStrategies.TryGetValue(powerCalculation.VoltageMagnitudeMeasurementKey, out VoltageAdjustmentStrategy adjustmentStrategy)) { adjustmentStrategy = AdjustmentStrategy; } switch (adjustmentStrategy) { case VoltageAdjustmentStrategy.LineToNeutral: voltageMagnitude *= 3; break; case VoltageAdjustmentStrategy.LineToLine: voltageMagnitude *= SqrtOf3; break; } if (measurements.TryGetValue(powerCalculation.VoltageAngleMeasurementKey, out measurement) && measurement.ValueQualityIsGood()) { voltageAngle = measurement.AdjustedValue; if (measurements.TryGetValue(powerCalculation.CurrentMagnitudeMeasurementKey, out measurement) && measurement.ValueQualityIsGood()) { currentMagnitude = measurement.AdjustedValue; if (measurements.TryGetValue(powerCalculation.CurrentAngleMeasurementKey, out measurement) && measurement.ValueQualityIsGood()) { currentAngle = measurement.AdjustedValue; allValuesReceivedWithGoodQuality = true; } } } } if (allValuesReceivedWithGoodQuality) { // Calculate power (P), reactive power (Q) and apparent power (|S|) Phasor voltage = new Phasor(PhasorType.Voltage, Angle.FromDegrees(voltageAngle), voltageMagnitude); Phasor current = new Phasor(PhasorType.Current, Angle.FromDegrees(currentAngle), currentMagnitude); activePower = Phasor.CalculateActivePower(voltage, current) / SI.Mega; reactivePower = Phasor.CalculateReactivePower(voltage, current) / SI.Mega; apparentPower = Phasor.CalculateApparentPower(voltage, current) / SI.Mega; } } catch (Exception ex) { OnProcessException(MessageLevel.Warning, ex); } finally { if ((object)powerCalculation.ActivePowerOutputMeasurement != null) { Measurement activePowerMeasurement = Measurement.Clone(powerCalculation.ActivePowerOutputMeasurement, activePower, frame.Timestamp); if (AlwaysProduceResult || !double.IsNaN(activePowerMeasurement.Value)) { outputMeasurements.Add(activePowerMeasurement); calculations++; m_lastActivePowerCalculations.Enqueue(activePowerMeasurement); while (m_lastActivePowerCalculations.Count > ValuesToTrack) { m_lastActivePowerCalculations.TryDequeue(out measurement); } } } if ((object)powerCalculation.ReactivePowerOutputMeasurement != null) { Measurement reactivePowerMeasurement = Measurement.Clone(powerCalculation.ReactivePowerOutputMeasurement, reactivePower, frame.Timestamp); if (AlwaysProduceResult || !double.IsNaN(reactivePowerMeasurement.Value)) { outputMeasurements.Add(reactivePowerMeasurement); calculations++; m_lastReactivePowerCalculations.Enqueue(reactivePowerMeasurement); while (m_lastReactivePowerCalculations.Count > ValuesToTrack) { m_lastReactivePowerCalculations.TryDequeue(out measurement); } } } if ((object)powerCalculation.ApparentPowerOutputMeasurement != null) { Measurement apparentPowerMeasurement = Measurement.Clone(powerCalculation.ApparentPowerOutputMeasurement, apparentPower, frame.Timestamp); if (AlwaysProduceResult || !double.IsNaN(apparentPowerMeasurement.Value)) { outputMeasurements.Add(apparentPowerMeasurement); calculations++; m_lastApparentPowerCalculations.Enqueue(apparentPowerMeasurement); while (m_lastApparentPowerCalculations.Count > ValuesToTrack) { m_lastApparentPowerCalculations.TryDequeue(out measurement); } } } m_averageCalculationTime.AddValue((DateTime.UtcNow.Ticks - lastCalculationTime).ToMilliseconds()); } } m_lastTotalCalculationTime = (DateTime.UtcNow.Ticks - totalCalculationTime).ToMilliseconds(); m_averageTotalCalculationTime.AddValue(m_lastTotalCalculationTime); m_lastTotalCalculations = calculations; m_averageCalculationsPerFrame.AddValue(calculations); OnNewMeasurements(outputMeasurements); }
/******************************************************** * CLASS METHODS *********************************************************/ /// <summary> /// Launch the algorithm manager to run this strategy /// </summary> /// <param name="job">Algorithm job</param> /// <param name="algorithm">Algorithm instance</param> /// <param name="feed">Datafeed object</param> /// <param name="transactions">Transaction manager object</param> /// <param name="results">Result handler object</param> /// <param name="setup">Setup handler object</param> /// <param name="realtime">Realtime processing object</param> /// <remarks>Modify with caution</remarks> public static void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, ISetupHandler setup, IRealTimeHandler realtime) { //Initialize: var backwardsCompatibilityMode = false; var tradebarsType = typeof (TradeBars); var ticksType = typeof(Ticks); var startingPerformance = setup.StartingCapital; var backtestMode = (job.Type == PacketType.BacktestNode); var methodInvokers = new Dictionary<Type, MethodInvoker>(); //Initialize Properties: _frontier = setup.StartingDate; _runtimeError = null; _algorithmId = job.AlgorithmId; _algorithmState = AlgorithmStatus.Running; _previousTime = setup.StartingDate.Date; //Create the method accessors to push generic types into algorithm: Find all OnData events: //Algorithm 1.0 Data Accessors. //If the users defined these methods, add them in manually. This allows keeping backwards compatibility to algorithm 1.0. var oldTradeBarsMethodInfo = (algorithm.GetType()).GetMethod("OnTradeBar", new[] { typeof(Dictionary<string, TradeBar>) }); var oldTicksMethodInfo = (algorithm.GetType()).GetMethod("OnTick", new[] { typeof(Dictionary<string, List<Tick>>) }); //Algorithm 2.0 Data Generics Accessors. //New hidden access to tradebars with custom type. var newTradeBarsMethodInfo = (algorithm.GetType()).GetMethod("OnData", new[] { tradebarsType }); var newTicksMethodInfo = (algorithm.GetType()).GetMethod("OnData", new[] { ticksType }); if (newTradeBarsMethodInfo == null && newTicksMethodInfo == null) { backwardsCompatibilityMode = true; if (oldTradeBarsMethodInfo != null) methodInvokers.Add(tradebarsType, oldTradeBarsMethodInfo.DelegateForCallMethod()); if (oldTradeBarsMethodInfo != null) methodInvokers.Add(ticksType, oldTicksMethodInfo.DelegateForCallMethod()); } else { backwardsCompatibilityMode = false; if (newTradeBarsMethodInfo != null) methodInvokers.Add(tradebarsType, newTradeBarsMethodInfo.DelegateForCallMethod()); if (newTicksMethodInfo != null) methodInvokers.Add(ticksType, newTicksMethodInfo.DelegateForCallMethod()); } //Go through the subscription types and create invokers to trigger the event handlers for each custom type: foreach (var config in feed.Subscriptions) { //If type is a tradebar, combine tradebars and ticks into unified array: if (config.Type.Name != "TradeBar" && config.Type.Name != "Tick") { //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. } var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type }); //Is we already have this Type-handler then don't add it to invokers again. if (methodInvokers.ContainsKey(config.Type)) continue; //If we couldnt find the event handler, let the user know we can't fire that event. if (genericMethod == null) { _runtimeError = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) { }"); _algorithmState = AlgorithmStatus.RuntimeError; return; } methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod()); } } //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm. Log.Debug("AlgorithmManager.Run(): Algorithm initialized, launching time loop."); foreach (var newData in DataStream.GetData(feed, setup.StartingDate)) { //Check this backtest is still running: if (_algorithmState != AlgorithmStatus.Running) break; //Go over each time stamp we've collected, pass it into the algorithm in order: foreach (var time in newData.Keys) { //Set the time frontier: _frontier = time; //Execute with TimeLimit Monitor: if (Isolator.IsCancellationRequested) return; //Refresh the realtime event monitor: realtime.SetTime(time); //Fire EOD if the time packet we just processed is greater if (backtestMode && _previousTime.Date != time.Date) { //Sample the portfolio value over time for chart. results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); if (startingPerformance == 0) { results.SamplePerformance(_previousTime.Date, 0); } else { results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPerformance) * 100 / startingPerformance, 10)); } startingPerformance = algorithm.Portfolio.TotalPortfolioValue; } //Check if the user's signalled Quit: loop over data until day changes. if (algorithm.GetQuit()) { _algorithmState = AlgorithmStatus.Quit; break; } //Pass in the new time first: algorithm.SetDateTime(time); //Trigger the data events: Invoke the types we have data for: var oldBars = new Dictionary<string, TradeBar>(); var oldTicks = new Dictionary<string, List<Tick>>(); var newBars = new TradeBars(time); var newTicks = new Ticks(time); //Invoke all non-tradebars, non-ticks methods: // --> i == Subscription Configuration Index, so we don't need to compare types. foreach (var i in newData[time].Keys) { //Data point and config of this point: var dataPoints = newData[time][i]; var config = feed.Subscriptions[i]; //Create TradeBars Unified Data --> OR --> invoke generic data event. One loop. foreach (var dataPoint in dataPoints) { //Update the securities properties: first before calling user code to avoid issues with data algorithm.Securities.Update(time, dataPoint); //Update registered consolidators for this symbol index for (var j = 0; j < config.Consolidators.Count; j++) { config.Consolidators[j].Update(dataPoint); } switch (config.Type.Name) { case "TradeBar": var bar = dataPoint as TradeBar; try { if (bar != null) { if (backwardsCompatibilityMode) { if (!oldBars.ContainsKey(bar.Symbol)) oldBars.Add(bar.Symbol, bar); } else { if (!newBars.ContainsKey(bar.Symbol)) newBars.Add(bar.Symbol, bar); } } } catch (Exception err) { Log.Error(time.ToLongTimeString() + " >> " + bar.Time.ToLongTimeString() + " >> " + bar.Symbol + " >> " + bar.Value.ToString("C")); Log.Error("AlgorithmManager.Run(): Failed to add TradeBar (" + bar.Symbol + ") Time: (" + time.ToLongTimeString() + ") Count:(" + newBars.Count + ") " + err.Message); } break; case "Tick": var tick = dataPoint as Tick; if (tick != null) { if (backwardsCompatibilityMode) { if (!oldTicks.ContainsKey(tick.Symbol)) { oldTicks.Add(tick.Symbol, new List<Tick>()); } oldTicks[tick.Symbol].Add(tick); } else { if (!newTicks.ContainsKey(tick.Symbol)) { newTicks.Add(tick.Symbol, new List<Tick>()); } newTicks[tick.Symbol].Add(tick); } } break; default: //Send data into the generic algorithm event handlers try { methodInvokers[config.Type](algorithm, dataPoint); } catch (Exception err) { _runtimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err.Message + " STACK >>> " + err.StackTrace); return; } break; } } } //After we've fired all other events in this second, fire the pricing events: if (backwardsCompatibilityMode) { //Log.Debug("AlgorithmManager.Run(): Invoking v1.0 Event Handlers..."); try { if (oldTradeBarsMethodInfo != null && oldBars.Count > 0) methodInvokers[tradebarsType](algorithm, oldBars); if (oldTicksMethodInfo != null && oldTicks.Count > 0) methodInvokers[ticksType](algorithm, oldTicks); } catch (Exception err) { _runtimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: Backwards Compatibility Mode: " + err.Message + " STACK >>> " + err.StackTrace); return; } } else { //Log.Debug("AlgorithmManager.Run(): Invoking v2.0 Event Handlers..."); try { if (newTradeBarsMethodInfo != null && newBars.Count > 0) methodInvokers[tradebarsType](algorithm, newBars); if (newTicksMethodInfo != null && newTicks.Count > 0) methodInvokers[ticksType](algorithm, newTicks); } catch (Exception err) { _runtimeError = err; _algorithmState = AlgorithmStatus.RuntimeError; Log.Debug("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err.Message + " STACK >>> " + err.StackTrace); return; } } //If its the historical/paper trading models, wait until market orders have been "filled" // Manually trigger the event handler to prevent thread switch. transactions.ProcessSynchronousEvents(); //Save the previous time for the sample calculations _previousTime = time; } // End of Time Loop // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(); } // End of ForEach DataStream //Stream over:: Send the final packet and fire final events: Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm..."); try { algorithm.OnEndOfAlgorithm(); } catch (Exception err) { _algorithmState = AlgorithmStatus.RuntimeError; _runtimeError = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException); Log.Debug("AlgorithmManager.OnEndOfAlgorithm(): " + err.Message + " STACK >>> " + err.StackTrace); return; } // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(forceProcess: true); //Liquidate Holdings for Calculations: if (_algorithmState == AlgorithmStatus.Liquidated || !Engine.LiveMode) { Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings..."); algorithm.Liquidate(); results.LogMessage("Algorithm Liquidated"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Liquidated); } //Manually stopped the algorithm if (_algorithmState == AlgorithmStatus.Stopped) { Log.Trace("AlgorithmManager.Run(): Stopping algorithm..."); results.LogMessage("Algorithm Stopped"); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Stopped); } //Backtest deleted. if (_algorithmState == AlgorithmStatus.Deleted) { Log.Trace("AlgorithmManager.Run(): Deleting algorithm..."); results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request."); results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Deleted); } //Algorithm finished, send regardless of commands: results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Completed); //Take final samples: results.SampleRange(algorithm.GetChartUpdates()); results.SampleEquity(_frontier, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4)); results.SamplePerformance(_frontier, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPerformance) * 100 / startingPerformance, 10)); }
private APIGatewayProxyResponse HandleUpdate(APIGatewayProxyRequest pRequest, ILambdaContext pContext, CognitoUser pCognitoUser, opendkpContext pDatabase) { var vResponse = HttpHelper.HandleError("[InsertOrUpdateAdjustment] Unknown error backend...", 500); try { //Populate Model dynamic vModel = JsonConvert.DeserializeObject(pRequest.Body); int vIdRaid = vModel.IdRaid; //We need to retrieve the ClientId for multitenancy purposes var vClientId = pRequest.Headers["clientid"]; Dictionary <string, Characters> vCharacterModels = RaidHelper.GetCharacterModels(pDatabase, vClientId, vModel); using (var dbContextTransaction = pDatabase.Database.BeginTransaction()) { Raids vRaid = pDatabase.Raids. Include("Ticks.TicksXCharacters"). Include("ItemsXCharacters.Item"). Include("ItemsXCharacters.Character"). Include("IdPoolNavigation"). FirstOrDefault(x => x.ClientId.Equals(vClientId) && x.IdRaid == vIdRaid); dynamic vOldModel = GetAuditModel(vRaid); //Update some attributes of the raid vRaid.Name = vModel.Name; vRaid.Timestamp = vModel.Timestamp; vRaid.UpdatedTimestamp = DateTime.Now; vRaid.UpdatedBy = vModel.UpdatedBy; vRaid.IdPool = vModel.Pool.IdPool; vRaid.ClientId = vClientId; //Three Cases to handle: Raid Tick Added, Raid Tick Removed, Raid Tick Updated #region Handle Raid Tick Removed here SimpleTick[] vSimpleTicks = vModel.Ticks.ToObject <SimpleTick[]>(); List <Ticks> vRemoveTicks = new List <Ticks>(); foreach (Ticks vIndex in vRaid.Ticks) { var vFound = vSimpleTicks.FirstOrDefault(x => x.TickId == vIndex.TickId); if (vFound == null) { vRemoveTicks.Add(vIndex); } } foreach (Ticks vTick in vRemoveTicks) { vRaid.Ticks.Remove(vTick); } #endregion #region Handle Raid Tick Added & Raid Tick Updated Here foreach (var vTick in vModel.Ticks) { int?vTickId = vTick.TickId; //If tickId is null, we have to create an insert a new one if (vTickId == null) { RaidHelper.CreateTick(pDatabase, vClientId, vRaid.IdRaid, vTick, vCharacterModels); } else { Ticks vFoundTick = vRaid.Ticks.FirstOrDefault(x => x.TickId == vTickId); if (vFoundTick != null) { vFoundTick.Description = vTick.Description; vFoundTick.Value = vTick.Value; vFoundTick.ClientId = vClientId; //Now that I've found the tick vFoundTick.TicksXCharacters = new List <TicksXCharacters>(); foreach (string vAttendee in vTick.Attendees) { vFoundTick.TicksXCharacters.Add(new TicksXCharacters { IdCharacterNavigation = vCharacterModels[vAttendee.ToLower()], IdTickNavigation = vFoundTick, ClientId = vClientId }); } } else { throw new Exception(string.Format("The tick id {0} does not exist, will not save raid", vTickId)); } } } #endregion //Handle Items vRaid.ItemsXCharacters = new List <ItemsXCharacters>(); RaidHelper.InsertRaidItems(pDatabase, vClientId, vModel, vCharacterModels); //Save pDatabase.SaveChanges(); dbContextTransaction.Commit(); //Respond vResponse = HttpHelper.HandleResponse(vModel, 200); //Audit AuditHelper.InsertAudit(pDatabase, vClientId, vOldModel, vModel, pCognitoUser.Username, Audit.ACTION_RAID_UPDATE); //Update Caches int vStatus = CacheManager.UpdateSummaryCacheAsync(vClientId).GetAwaiter().GetResult(); Console.WriteLine("SummaryCacheResponse=" + vStatus); vStatus = CacheManager.UpdateItemCacheAsync(vClientId).GetAwaiter().GetResult(); Console.WriteLine("ItemCacheResponse=" + vStatus); } } catch (Exception vException) { vResponse = HttpHelper.HandleError(vException.Message, 500); } return(vResponse); }
/// <summary> /// Writes next COMTRADE record in binary format. /// </summary> /// <param name="output">Destination stream.</param> /// <param name="schema">Source schema.</param> /// <param name="timestamp">Record timestamp (implicitly castable as <see cref="DateTime"/>).</param> /// <param name="values">Values to write - 16-bit digitals should exist as a word in an individual double value.</param> /// <param name="sample">User incremented sample index.</param> /// <param name="injectFracSecValue">Determines if FRACSEC value should be automatically injected into stream as first digital - defaults to <c>true</c>.</param> /// <param name="fracSecValue">FRACSEC value to inject into output stream - defaults to 0x0000.</param> /// <remarks> /// This function is primarily intended to write COMTRADE binary data records based on synchrophasor data /// (see Annex H: Schema for Phasor Data 2150 Using the COMTRADE File Standard in IEEE C37.111-2010), /// it may be necessary to manually write records for other COMTRADE needs (e.g., non 16-bit digitals). /// </remarks> public static void WriteNextRecordBinary(Stream output, Schema schema, Ticks timestamp, double[] values, uint sample, bool injectFracSecValue = true, ushort fracSecValue = 0x0000) { // Make timestamp relative to beginning of file timestamp -= schema.StartTime.Value; uint microseconds = (uint)(timestamp.ToMicroseconds() / schema.TimeFactor); double value; bool isFirstDigital = true; output.Write(LittleEndian.GetBytes(sample), 0, 4); output.Write(LittleEndian.GetBytes(microseconds), 0, 4); for (int i = 0; i < values.Length; i++) { value = values[i]; if (i < schema.AnalogChannels.Length) { value -= schema.AnalogChannels[i].Adder; value /= schema.AnalogChannels[i].Multiplier; } else if (isFirstDigital) { // Handle automatic injection of IEEE C37.118 FRACSEC digital value if requested isFirstDigital = false; if (injectFracSecValue) output.Write(LittleEndian.GetBytes(fracSecValue), 0, 2); } output.Write(LittleEndian.GetBytes((ushort)value), 0, 2); } // Make sure FRACSEC values are injected if (isFirstDigital && injectFracSecValue) output.Write(LittleEndian.GetBytes(fracSecValue), 0, 2); }
public static void Main(string[] args) { m_definedMeasurements = new ConcurrentDictionary <string, MeasurementMetadata>(); m_definedDevices = new ConcurrentDictionary <ushort, ConfigurationCell>(); if (WriteLogs) { m_exportFile = new StreamWriter(FilePath.GetAbsolutePath("InputTimestamps.csv")); } if (TestConcentrator) { // Create a new concentrator concentrator = new Concentrator(WriteLogs, FilePath.GetAbsolutePath("OutputTimestamps.csv")); concentrator.TimeResolution = 333000; concentrator.FramesPerSecond = 30; concentrator.LagTime = 3.0D; concentrator.LeadTime = 9.0D; concentrator.PerformTimestampReasonabilityCheck = false; concentrator.ProcessByReceivedTimestamp = true; concentrator.Start(); } // Create a new protocol parser parser = new MultiProtocolFrameParser(); parser.AllowedParsingExceptions = 500; parser.ParsingExceptionWindow = 5; // Attach to desired events parser.ConnectionAttempt += parser_ConnectionAttempt; parser.ConnectionEstablished += parser_ConnectionEstablished; parser.ConnectionException += parser_ConnectionException; parser.ParsingException += parser_ParsingException; parser.ReceivedConfigurationFrame += parser_ReceivedConfigurationFrame; parser.ReceivedDataFrame += parser_ReceivedDataFrame; parser.ReceivedFrameBufferImage += parser_ReceivedFrameBufferImage; parser.ConnectionTerminated += parser_ConnectionTerminated; // Define the connection string //parser.ConnectionString = @"phasorProtocol=IeeeC37_118V1; transportProtocol=UDP; localport=5000; server=233.123.123.123:5000; interface=0.0.0.0"; //parser.ConnectionString = @"phasorProtocol=Ieee1344; transportProtocol=File; file=D:\Projects\Applications\openPDC\Synchrophasor\Current Version\Build\Output\Debug\Applications\openPDC\Sample1344.PmuCapture"; //parser.ConnectionString = @"phasorProtocol=Macrodyne; accessID=1; transportProtocol=File; skipDisableRealTimeData = true; file=C:\Users\Ritchie\Desktop\Development\Macrodyne\ING.out; iniFileName=C:\Users\Ritchie\Desktop\Development\Macrodyne\BCH18Aug2011.ini; deviceLabel=ING1; protocolVersion=G"; //parser.ConnectionString = @"phasorProtocol=Iec61850_90_5; accessID=1; transportProtocol=UDP; skipDisableRealTimeData = true; localPort=102; interface=0.0.0.0; commandChannel={transportProtocol=TCP; server=172.21.1.201:4712; interface=0.0.0.0}"; //parser.ConnectionString = @"phasorProtocol=FNET; transportProtocol=TCP; server=172.21.4.100:4001; interface=0.0.0.0; isListener=false"; //parser.ConnectionString = @"phasorProtocol=Macrodyne; transportProtocol=Serial; port=COM6; baudrate=38400; parity=None; stopbits=One; databits=8; dtrenable=False; rtsenable=False;"; //parser.ConnectionString = @"phasorProtocol=SelFastMessage; transportProtocol=Serial; port=COM5; baudrate=57600; parity=None; stopbits=One; databits=8; dtrenable=False; rtsenable=False;"; //parser.ConnectionString = @"phasorProtocol=IEEEC37_118v1; transportProtocol=File; file=C:\Users\Ritchie\Desktop\MTI_Test_3phase.PmuCapture; checkSumValidationFrameTypes=DataFrame,HeaderFrame,CommandFrame"; parser.ConnectionString = @"phasorProtocol=IEEEC37_118V1; transportProtocol=tcp; accessID=105; server=172.31.105.135:4712; interface=0.0.0.0; isListener=false"; Dictionary <string, string> settings = parser.ConnectionString.ParseKeyValuePairs(); string setting; // TODO: These should be optional picked up from connection string inside of MPFP // Apply other settings as needed if (settings.TryGetValue("accessID", out setting)) { parser.DeviceID = ushort.Parse(setting); } if (settings.TryGetValue("simulateTimestamp", out setting)) { parser.InjectSimulatedTimestamp = setting.ParseBoolean(); } if (settings.TryGetValue("allowedParsingExceptions", out setting)) { parser.AllowedParsingExceptions = int.Parse(setting); } if (settings.TryGetValue("parsingExceptionWindow", out setting)) { parser.ParsingExceptionWindow = Ticks.FromSeconds(double.Parse(setting)); } if (settings.TryGetValue("autoStartDataParsingSequence", out setting)) { parser.AutoStartDataParsingSequence = setting.ParseBoolean(); } if (settings.TryGetValue("skipDisableRealTimeData", out setting)) { parser.SkipDisableRealTimeData = setting.ParseBoolean(); } // When connecting to a file based resource you may want to loop the data parser.AutoRepeatCapturedPlayback = true; // Start frame parser parser.AutoStartDataParsingSequence = true; Console.WriteLine("ConnectionString: {0}", parser.ConnectionString); parser.Start(); // To keep the console open while receiving live data with AutoRepeatCapturedPlayback = false, uncomment the following line of code: Console.WriteLine("Press <enter> to terminate application..."); Console.ReadLine(); parser.Stop(); // Stop concentrator if (TestConcentrator) { concentrator.Stop(); } if (WriteLogs) { m_exportFile.Close(); } }
/// <summary> /// Creates a new <see cref="ConfigurationFrame"/> from specified parameters. /// </summary> /// <param name="idCode">The ID code of this <see cref="ConfigurationFrame"/>.</param> /// <param name="timestamp">The exact timestamp, in <see cref="Ticks"/>, of the data represented by this <see cref="ConfigurationFrame"/>.</param> /// <param name="frameRate">The defined frame rate of this <see cref="ConfigurationFrame"/>.</param> public ConfigurationFrame(ushort idCode, Ticks timestamp, ushort frameRate) : base(idCode, new ConfigurationCellCollection(), timestamp, frameRate) { }
/// <inheritdoc /> public bool Equals([AllowNull] ColorBar other) { if (other == null) { return(false); } if (ReferenceEquals(this, other)) { return(true); } return (( ThicknessMode == other.ThicknessMode || ThicknessMode != null && ThicknessMode.Equals(other.ThicknessMode) ) && ( Thickness == other.Thickness || Thickness != null && Thickness.Equals(other.Thickness) ) && ( LenMode == other.LenMode || LenMode != null && LenMode.Equals(other.LenMode) ) && ( Len == other.Len || Len != null && Len.Equals(other.Len) ) && ( X == other.X || X != null && X.Equals(other.X) ) && ( XAnchor == other.XAnchor || XAnchor != null && XAnchor.Equals(other.XAnchor) ) && ( XPad == other.XPad || XPad != null && XPad.Equals(other.XPad) ) && ( Y == other.Y || Y != null && Y.Equals(other.Y) ) && ( YAnchor == other.YAnchor || YAnchor != null && YAnchor.Equals(other.YAnchor) ) && ( YPad == other.YPad || YPad != null && YPad.Equals(other.YPad) ) && ( OutlineColor == other.OutlineColor || OutlineColor != null && OutlineColor.Equals(other.OutlineColor) ) && ( OutlineWidth == other.OutlineWidth || OutlineWidth != null && OutlineWidth.Equals(other.OutlineWidth) ) && ( BorderColor == other.BorderColor || BorderColor != null && BorderColor.Equals(other.BorderColor) ) && ( BorderWidth == other.BorderWidth || BorderWidth != null && BorderWidth.Equals(other.BorderWidth) ) && ( BgColor == other.BgColor || BgColor != null && BgColor.Equals(other.BgColor) ) && ( TickMode == other.TickMode || TickMode != null && TickMode.Equals(other.TickMode) ) && ( NTicks == other.NTicks || NTicks != null && NTicks.Equals(other.NTicks) ) && ( Tick0 == other.Tick0 || Tick0 != null && Tick0.Equals(other.Tick0) ) && ( DTick == other.DTick || DTick != null && DTick.Equals(other.DTick) ) && ( Equals(TickVals, other.TickVals) || TickVals != null && other.TickVals != null && TickVals.SequenceEqual(other.TickVals) ) && ( Equals(TickText, other.TickText) || TickText != null && other.TickText != null && TickText.SequenceEqual(other.TickText) ) && ( Ticks == other.Ticks || Ticks != null && Ticks.Equals(other.Ticks) ) && ( TickLabelPosition == other.TickLabelPosition || TickLabelPosition != null && TickLabelPosition.Equals(other.TickLabelPosition) ) && ( TickLen == other.TickLen || TickLen != null && TickLen.Equals(other.TickLen) ) && ( TickWidth == other.TickWidth || TickWidth != null && TickWidth.Equals(other.TickWidth) ) && ( TickColor == other.TickColor || TickColor != null && TickColor.Equals(other.TickColor) ) && ( ShowTickLabels == other.ShowTickLabels || ShowTickLabels != null && ShowTickLabels.Equals(other.ShowTickLabels) ) && ( TickFont == other.TickFont || TickFont != null && TickFont.Equals(other.TickFont) ) && ( TickAngle == other.TickAngle || TickAngle != null && TickAngle.Equals(other.TickAngle) ) && ( TickFormat == other.TickFormat || TickFormat != null && TickFormat.Equals(other.TickFormat) ) && ( Equals(TickFormatStops, other.TickFormatStops) || TickFormatStops != null && other.TickFormatStops != null && TickFormatStops.SequenceEqual(other.TickFormatStops) ) && ( TickPrefix == other.TickPrefix || TickPrefix != null && TickPrefix.Equals(other.TickPrefix) ) && ( ShowTickPrefix == other.ShowTickPrefix || ShowTickPrefix != null && ShowTickPrefix.Equals(other.ShowTickPrefix) ) && ( TickSuffix == other.TickSuffix || TickSuffix != null && TickSuffix.Equals(other.TickSuffix) ) && ( ShowTickSuffix == other.ShowTickSuffix || ShowTickSuffix != null && ShowTickSuffix.Equals(other.ShowTickSuffix) ) && ( SeparateThousands == other.SeparateThousands || SeparateThousands != null && SeparateThousands.Equals(other.SeparateThousands) ) && ( ExponentFormat == other.ExponentFormat || ExponentFormat != null && ExponentFormat.Equals(other.ExponentFormat) ) && ( MinExponent == other.MinExponent || MinExponent != null && MinExponent.Equals(other.MinExponent) ) && ( ShowExponent == other.ShowExponent || ShowExponent != null && ShowExponent.Equals(other.ShowExponent) ) && ( Title == other.Title || Title != null && Title.Equals(other.Title) ) && ( TickValsSrc == other.TickValsSrc || TickValsSrc != null && TickValsSrc.Equals(other.TickValsSrc) ) && ( TickTextSrc == other.TickTextSrc || TickTextSrc != null && TickTextSrc.Equals(other.TickTextSrc) )); }