public ListedSensor(Sensor s, Dataset d) { if(s == null) throw new ArgumentNullException("The specified sensor cannot be null."); if (d == null) throw new ArgumentNullException("The specified dataset cannot be null."); _sensor = s; _ds = d; }
/// <summary> /// Calculates the densities for the dataset /// </summary> /// <param name="dataset">The dataset to calculate from</param> /// <returns>The list of densities</returns> public static IEnumerable<DensitySeries> CalculateDensity(Dataset dataset) { var densitySeries = new List<DensitySeries>(); foreach (var sensor in dataset.Sensors.Where(x => x.SensorType == "Water_Temperature")) { var series = new DensitySeries(sensor.Elevation); foreach (var value in sensor.CurrentState.Values) { var density = (1 - (((value.Value + 288.9414) / (508929.2 * (value.Value + 68.12963))) * Math.Pow((value.Value - 3.9863), 2))) * 1000; series.AddValue(value.Key, density); } densitySeries.Add(series); } return densitySeries.ToArray(); }
public static string DatasetExportRootFolder(Dataset dataset) { var directory = Path.Combine(AppDataPath, "Backups", "Exports", dataset.Site.Name); if (!Directory.Exists(directory)) Directory.CreateDirectory(directory); return directory; }
/// <summary> /// Calculates the metalimnion boundaries /// </summary> /// <param name="dataset">The dataset to use</param> /// <param name="thermoclineDepths">The precalculated thermocline depths for the dataset</param> /// <param name="minimumMetalimionSlope">The minimum metalimnion slope</param> /// <returns></returns> public static Dictionary<DateTime, MetalimnionBoundariesDetails> CalculateMetalimnionBoundaries(Dataset dataset, Dictionary<DateTime, ThermoclineDepthDetails> thermoclineDepths, float minimumMetalimionSlope = 0.1f) { var metalimnionBoundaries = new Dictionary<DateTime, MetalimnionBoundariesDetails>(); foreach (var timestamp in thermoclineDepths.Keys) { var depths = dataset.Sensors.Where(x => x.SensorType == "Water_Temperature" && x.CurrentState.Values.ContainsKey(timestamp)).Select(x => x.Elevation).Distinct().OrderBy(x => x).ToArray(); var meanDepths = new float[depths.Length - 1]; for (var i = 0; i < depths.Length - 1; i++) { meanDepths[i] = (depths[i] + depths[i + 1]) / 2; } var metalimnionBoundary = new MetalimnionBoundariesDetails(); var sortedDepths = meanDepths.Union(new[] { thermoclineDepths[timestamp].ThermoclineDepth }).OrderBy(x => x).ToArray(); var sortedDepthsParent = meanDepths.Union(new[] { thermoclineDepths[timestamp].SeasonallyAdjustedThermoclineDepth }).OrderBy(x => x).ToArray(); var points = new Point[meanDepths.Length]; for (var i = 0; i < points.Length; i++) { points[i] = new Point(meanDepths[i], thermoclineDepths[timestamp].DrhoDz[i]); } var slopes = Interpolate(points, sortedDepths).ToArray(); var slopesParent = Interpolate(points, sortedDepthsParent).ToArray(); var thermoclineIndex = Array.IndexOf(slopes.Select(x => x.X).ToArray(), thermoclineDepths[timestamp].ThermoclineDepth); var thermoclineIndexParent = Array.IndexOf(slopesParent.Select(x => x.X).ToArray(), thermoclineDepths[timestamp].SeasonallyAdjustedThermoclineDepth); #region Top metalimnionBoundary.Top = meanDepths[0]; int k; for (k = thermoclineIndex; k > -1; k--) { if (slopes[k].Y < minimumMetalimionSlope) { metalimnionBoundary.Top = sortedDepths[k]; break; } } if (k == -1) k = 0; if (thermoclineIndex - k > 1 && slopes[thermoclineIndex].Y > minimumMetalimionSlope) { var outsidePoints = new List<Point>(); for (var j = k; j <= thermoclineIndex; j++) { outsidePoints.Add(new Point(slopes[j].Y, sortedDepths[j])); } metalimnionBoundary.Top = (float)Interpolate(outsidePoints.ToArray(), new[] { minimumMetalimionSlope })[0].Y; } #endregion #region Bottom metalimnionBoundary.Bottom = meanDepths.Last(); for (k = thermoclineIndex; k < slopes.Length; k++) { if (slopes[k].Y < minimumMetalimionSlope) { metalimnionBoundary.Bottom = sortedDepths[k]; break; } } if (k == slopes.Length) k--; if (k - thermoclineIndex > 1 && slopes[thermoclineIndex].Y > minimumMetalimionSlope) { var outsidePoints = new List<Point>(); for (var j = thermoclineIndex; j <= k; j++) { outsidePoints.Add(new Point(slopes[j].Y, sortedDepths[j])); } metalimnionBoundary.Bottom = (float)Interpolate(outsidePoints.ToArray(), new[] { minimumMetalimionSlope })[0].Y; } #endregion #region IfParent if (thermoclineDepths[timestamp].HasSeaonallyAdjusted) { #region Top metalimnionBoundary.SeasonallyAdjustedTop = meanDepths[0]; int m; for (m = thermoclineIndexParent; m > -1; m--) { if (slopesParent[m].Y < minimumMetalimionSlope) { metalimnionBoundary.SeasonallyAdjustedTop = sortedDepthsParent[m]; break; } } if (m == -1) m = 0; if (thermoclineIndexParent - m > 0 && slopesParent[thermoclineIndexParent].Y > minimumMetalimionSlope) { var outsidePoints = new List<Point>(); for (var j = m; j <= thermoclineIndexParent; j++) { outsidePoints.Add(new Point(slopesParent[j].Y, sortedDepthsParent[j])); } metalimnionBoundary.SeasonallyAdjustedTop = (float)Interpolate(outsidePoints.ToArray(), new[] { minimumMetalimionSlope })[0].Y; } #endregion #region Bottom metalimnionBoundary.SeasonallyAdjustedBottom = meanDepths.Last(); for (m = thermoclineIndexParent; m < slopesParent.Length; m++) { if (slopesParent[m].Y < minimumMetalimionSlope) { metalimnionBoundary.SeasonallyAdjustedBottom = sortedDepthsParent[m]; break; } } if (m == slopes.Length) m--; if (m - thermoclineIndexParent > 0 && slopesParent[thermoclineIndexParent].Y > minimumMetalimionSlope) { var outsidePoints = new List<Point>(); for (var j = thermoclineIndexParent; j <= m; j++) { outsidePoints.Add(new Point(slopesParent[j].Y, sortedDepthsParent[j])); } metalimnionBoundary.SeasonallyAdjustedBottom = (float)Interpolate(outsidePoints.ToArray(), new[] { minimumMetalimionSlope })[0].Y; } #endregion } else { metalimnionBoundary.NoSeasonalFound(); } #endregion metalimnionBoundaries[timestamp] = metalimnionBoundary; } return metalimnionBoundaries; }
/// <summary> /// Gets a value indicating whether or not this sensor shows signs of physical failure. /// </summary> /// <param name="dataset">The dataset, containing information about the data interval for this sensor.</param> /// <returns>A value indicating whether or not this sensor is failing.</returns> public bool IsFailing(Dataset dataset) { if (Properties.Settings.Default.IgnoreSensorErrorDetection) return false; if (dataset == null) throw new NullReferenceException("You must provide a non-null dataset."); if (CurrentState == null) throw new NullReferenceException("No active sensor state exists for this sensor, so you can't detect whether it is failing or not."); if (CurrentState.Values.Count == 0) return false; var baseTime = CurrentState.Values.ElementAt(0).Key; var incidence = 0; var time = 0; for (int i = 0; i < dataset.ExpectedDataPointCount; i++) { var key = baseTime.AddMinutes(time); if (CurrentState.Values.ContainsKey(key)) incidence = 0; else incidence++; if (incidence == ErrorThreshold) return true; time += dataset.DataInterval; } return false; }
/// <summary> /// Creates a new sensor. /// </summary> /// <param name="name">The name of the sensor.</param> /// <param name="description">A description of the sensor's function or purpose.</param> /// <param name="upperLimit">The upper limit for values reported by this sensor.</param> /// <param name="lowerLimit">The lower limit for values reported by this sensor.</param> /// <param name="unit">The unit used to report values given by this sensor.</param> /// <param name="maxRateOfChange">The maximum rate of change allowed by this sensor.</param> /// <param name="manufacturer">The manufacturer of this sensor.</param> /// <param name="serial">The serial number associated with this sensor.</param> /// <param name="undoStack">A stack containing previous sensor states.</param> /// <param name="redoStack">A stack containing sensor states created after the modifications of the current state.</param> /// <param name="calibrations">A list of dates, on which calibration was performed.</param> /// <param name="errorThreshold">The number of times a failure-indicating value can occur before this sensor is flagged as failing.</param> /// <param name="owner">The dataset that owns the sensor</param> public Sensor(string name, string description, float upperLimit, float lowerLimit, string unit, float maxRateOfChange, Stack<SensorState> undoStack, Stack<SensorState> redoStack, List<Calibration> calibrations, int errorThreshold, Dataset owner) : this(name, description, upperLimit, lowerLimit, unit, maxRateOfChange, undoStack, redoStack, calibrations, errorThreshold, owner, SummaryType.Average) { }
/// <summary> /// Creates a new sensor, using default values for Undo/Redo stacks, calibration dates, error threshold and a failure-indicating value. /// </summary> /// <param name="name">The name of the sensor.</param> /// <param name="description">A description of the sensor's function or purpose.</param> /// <param name="upperLimit">The upper limit for values reported by this sensor.</param> /// <param name="lowerLimit">The lower limit for values reported by this sensor.</param> /// <param name="unit">The unit used to report values given by this sensor.</param> /// <param name="maxRateOfChange">The maximum rate of change allowed by this sensor.</param> /// <param name="manufacturer">The manufacturer of this sensor.</param> /// <param name="serial">The serial number associated with this sensor.</param> /// <param name="owner">The dataset owner of the sensor</param> public Sensor(string name, string description, float upperLimit, float lowerLimit, string unit, float maxRateOfChange, Dataset owner) : this(name, description, upperLimit, lowerLimit, unit, maxRateOfChange, new Stack<SensorState>(), new Stack<SensorState>(), new List<Calibration>(), owner) { }
/// <summary> /// Loads in a meta file and adds it to a site /// </summary> /// <param name="filename">the metya file filepath</param> /// <param name="attachedDataset">the dataset that its to be attached to</param> /// <returns></returns> public Boolean LoadSiteFromMeta(string filename, Dataset attachedDataset) { var view = _container.GetInstance(typeof(EditSiteDataViewModel), "EditSiteDataViewModel") as EditSiteDataViewModel; if (view == null) { EventLogger.LogError(null, "Loading Site Editor", "Critical! Failed to get a View!!"); return false; } view.DataSet = attachedDataset; if (attachedDataset.Site.PrimaryContact == null) view.IsNewSite = true; view.LoadFromMeta(filename); view.Deactivated += (o, e) => { _dataSetFiles = null; NotifyOfPropertyChange(() => SiteNames); }; view.BtnSiteDone(); NotifyOfPropertyChange(() => EditingNotes); return true; }
/// <summary> /// Given a timestamp which represents a missing value, interpolates the dataset using the first known point before the given point, and the first known point after the given point in the list of keys. /// </summary> /// <param name="valuesToInterpolate">A list of data point 'keys' where values are missing.</param> /// <param name="ds">A dataset to use, which indicates the length of time that elapses between readings.</param> /// <returns>A sensor state with the interpolated data.</returns> public SensorState Interpolate(List <DateTime> valuesToInterpolate, Dataset ds, ChangeReason reason) { EventLogger.LogInfo(_owner.Owner, GetType().ToString(), "Starting extrapolation process"); if (valuesToInterpolate == null) { throw new ArgumentNullException("valuesToInterpolate"); } if (valuesToInterpolate.Count == 0) { throw new ArgumentException("You must specify at least one value to use for extrapolation."); } if (ds == null) { throw new ArgumentNullException("ds"); } //First remove values var newState = RemoveValues(valuesToInterpolate, reason); newState.Reason = reason; foreach (var time in valuesToInterpolate) { if (newState.Values.ContainsKey(time)) { continue; } DateTime startValue; try { startValue = newState.FindPrevValue(time); } catch (Exception) { Debug.WriteLine("Failed to find start value continuing"); continue; } DateTime endValue; try { endValue = newState.FindNextValue(time); } catch (Exception) { Debug.WriteLine("Failed to find end value continuing"); continue; } var timeDiff = endValue.Subtract(startValue).TotalMinutes; var valDiff = newState.Values[endValue] - newState.Values[startValue]; var step = valDiff / (timeDiff / ds.DataInterval); var value = newState.Values[startValue] + step; for (var i = ds.DataInterval; i < timeDiff; i += ds.DataInterval) { newState.Values[startValue.AddMinutes(i)] = (float)Math.Round(value, 2); newState.AddToChanges(startValue.AddMinutes(i), reason.ID); value += step; } } /* ==OLD METHOD== * var first = valuesToInterpolate[0]; * DateTime startValue; * try * { * startValue = FindPrevValue(first, ds); * } * catch(Exception e) * { * throw new DataException("No start value"); * } * var endValue = DateTime.MinValue; * var time = 0; * try * { * while (endValue == DateTime.MinValue) * { * endValue = (Values.ContainsKey(first.AddMinutes(time)) * ? first.AddMinutes(time) * : DateTime.MinValue); * time += ds.DataInterval; * } * } * catch(Exception e) * { * throw new DataException("No end value"); * } * * var timeDiff = endValue.Subtract(startValue).TotalMinutes; * var valDiff = Values[endValue] - Values[startValue]; * var step = valDiff / (timeDiff / ds.DataInterval); * var value = Values[startValue] + step; * * var newState = Clone(); * * for (var i = ds.DataInterval; i < timeDiff; i += ds.DataInterval) * { * newState.Values.Add(startValue.AddMinutes(i), (float)Math.Round(value, 2)); * value += step; * } * * EventLogger.LogInfo(GetType().ToString(), "Completing extrapolation process");*/ return(newState); }
/// <summary> /// Exports a data set to a CSV file. /// The file is saved in the same format as the original CSV files. /// </summary> /// <param name="data">The dataset to export</param> /// <param name="filePath">The desired path and file name of the file to be saved. No not include an extension.</param> /// <param name="format">The format to save the file in.</param> /// <param name="includeEmptyLines">Wether to export the file with empty lines or not.</param> /// <param name="addMetaDataFile">Wether to export the file with embedded site meta data.</param> /// <param name="includeChangeLog">Wether to include a seperate log file that details the changes made to the data.</param> /// <param name="exportedPoints">What points to export.</param> /// <param name="dateColumnFormat">Wether to split the two date/time columns into five seperate columns</param> /// <param name="exportRaw">Whether to export the raw data or the current state.</param> /// <param name="loadInUnloadedValues">Whether or not to load in any unloaded values</param> /// <param name="copyLogFiles">Whether or not to copy the log files</param> public static void Export(Dataset data, string filePath, ExportFormat format, bool includeEmptyLines, bool addMetaDataFile, bool includeChangeLog, ExportedPoints exportedPoints, DateColumnFormat dateColumnFormat, bool exportRaw, bool loadInUnloadedValues, bool copyLogFiles = false) { if (data == null) throw new ArgumentNullException("Dataset cannot be null"); //LOAD IN ALL THE VALUES var firstYearLoaded = data.LowestYearLoaded; var lastYearLoaded = data.HighestYearLoaded; if (loadInUnloadedValues) { if (firstYearLoaded != 0) { for (var i = 0; i < firstYearLoaded; i++) { data.LoadInSensorData(i, true); } } if (data.EndYear > data.StartYear.AddYears(lastYearLoaded + 1)) { for (var i = lastYearLoaded + 1; data.EndYear >= data.StartYear.AddYears(i + 1); i++) { data.LoadInSensorData(i, true); } } } EventLogger.LogInfo(data, "EXPORTER", "Data export started."); if (String.IsNullOrWhiteSpace(filePath)) throw new ArgumentNullException("filePath cannot be null"); if (format == null) throw new ArgumentNullException("Export format cannot be null"); //Strip the existing extension and add the one specified in the method args filePath = Path.Combine(Path.GetDirectoryName(filePath), Path.GetFileNameWithoutExtension(filePath), Path.GetFileName(filePath)); if (!Directory.Exists(Path.GetDirectoryName(filePath))) Directory.CreateDirectory(Path.GetDirectoryName(filePath)); else { throw new Exception("Cannot Overwrite Existing Data"); } string metaDataFilePath = Path.ChangeExtension(filePath, "_") + "Metadata.txt"; string changeMatrixFilePath = Path.ChangeExtension(filePath, "_") + "ChangesMatrix.txt"; var changesFilePath = Path.ChangeExtension(filePath, "_") + "Changes Log.txt"; var numOfPointsToSummarise = 1; if (exportedPoints.NumberOfMinutes != 0) numOfPointsToSummarise = exportedPoints.NumberOfMinutes / data.DataInterval; if (format.Equals(ExportFormat.CSV)) { ExportCSV(data, filePath, includeEmptyLines, dateColumnFormat, false, numOfPointsToSummarise); if (exportRaw) ExportCSV(data, Path.ChangeExtension(filePath, "_") + "Raw.txt", includeEmptyLines, dateColumnFormat, true, numOfPointsToSummarise); if (addMetaDataFile && data.Site != null) ExportMetaData(data, filePath, metaDataFilePath); if (includeChangeLog) ExportChangesFile(data, filePath, changeMatrixFilePath, changesFilePath, dateColumnFormat); EventLogger.LogInfo(data, "EXPORTER", "Data export complete. File saved to: " + filePath); } else if (format.Equals(ExportFormat.XLSX)) { throw new NotImplementedException("Cannot export as XLSX yet."); } else { throw new NotImplementedException("File format not supported."); } if (copyLogFiles && data.Site != null && Directory.Exists(Path.Combine(Common.AppDataPath, "Logs", data.Site.Name, "SensorLogs"))) { var sourcePath = Path.Combine(Common.AppDataPath, "Logs", data.Site.Name, "SensorLogs"); using (TextWriter tw = new StreamWriter(changesFilePath, true)) { tw.WriteLine("Log of change reasons for associated file " + Path.GetFileNameWithoutExtension(filePath)); //Copy all the files into one File Log foreach (string newPath in Directory.GetFiles(sourcePath, "*.*", SearchOption.AllDirectories)) { tw.WriteLine(""); tw.WriteLine("Change reasons for sensor " + Path.GetFileNameWithoutExtension(newPath)); tw.WriteLine(""); using (TextReader tr = new StreamReader(newPath)) { tw.WriteLine(tr.ReadToEnd()); tr.Close(); } Console.WriteLine("File Processed : " + filePath); } tw.Close(); } } if (loadInUnloadedValues) { //Unload all values not in our time range foreach (var sensor in data.Sensors) { var currentValuesToRemove = sensor.CurrentState.Values.Where( x => x.Key < data.StartYear.AddYears(firstYearLoaded) || x.Key >= data.StartYear.AddYears(lastYearLoaded + 1)).ToArray(); foreach (var keyValuePair in currentValuesToRemove) { sensor.CurrentState.Values.Remove(keyValuePair.Key); } var rawValuesToRemove = sensor.RawData.Values.Where( x => x.Key < data.StartYear.AddYears(firstYearLoaded) || x.Key >= data.StartYear.AddYears(lastYearLoaded + 1)).ToArray(); foreach (var keyValuePair in rawValuesToRemove) { sensor.RawData.Values.Remove(keyValuePair.Key); } } data.LowestYearLoaded = firstYearLoaded; data.HighestYearLoaded = lastYearLoaded; } }
/// <summary> /// Exports a data set to a CSV file. /// The file is saved in the same format as the original CSV files. /// </summary> /// <param name="data">The dataset to export</param> /// <param name="filePath">The desired path and file name of the file to be saved. No not include an extension.</param> /// <param name="format">The format to save the file in.</param> /// <param name="includeEmptyLines">Wether to export the file with empty lines or not.</param> /// <param name="addMetaDataFile">Wether to export the file with embedded site meta data.</param> /// <param name="includeChangeLog">Wether to include a seperate log file that details the changes made to the data.</param> /// <param name="exportedPoints">What points to export.</param> /// <param name="dateColumnFormat">Wether to split the two date/time columns into five seperate columns</param> /// <param name="loadInUnloadedValues">Whether or not to load in any unloaded values</param> public static void Export(Dataset data, string filePath, ExportFormat format, bool includeEmptyLines, bool addMetaDataFile, bool includeChangeLog, ExportedPoints exportedPoints, DateColumnFormat dateColumnFormat, bool loadInUnloadedValues = true) { Export(data, filePath, format, includeEmptyLines, addMetaDataFile, includeChangeLog, exportedPoints, dateColumnFormat, false, loadInUnloadedValues); }
private static int GetArrayRowFromTime(Dataset data, DateTime startDate, DateTime currentDate, int numOfPointsToAverage) { if (currentDate < startDate) throw new ArgumentException("currentDate must be larger than or equal to startDate\nYou supplied startDate=" + startDate.ToString() + " currentDate=" + currentDate.ToString()); return (int)Math.Floor(currentDate.Subtract(startDate).TotalMinutes / data.DataInterval / numOfPointsToAverage); }
/// <summary> /// Exports the metadata /// </summary> /// <param name="data"></param> /// <param name="filePath"></param> /// <param name="metaDataFilePath"></param> private static void ExportMetaData(Dataset data, string filePath, string metaDataFilePath) { using (StreamWriter writer = File.CreateText(metaDataFilePath)) { writer.WriteLine("Associated File:: " + Path.GetFileName(filePath)); writer.WriteLine("Site Name: " + data.Site.Name); writer.WriteLine("Owner: " + data.Site.Owner); writer.WriteLine("Latitude/Northing: " + data.Site.GpsLocation.DecimalDegreesLatitude); writer.WriteLine("Longitude/Easting: " + data.Site.GpsLocation.DecimalDegreesLongitude); writer.WriteLine("GPS Grid System: " + data.Site.GpsLocation.GridSystem); writer.WriteLine("Elevation (MASL): " + data.Site.Elevation); writer.WriteLine("Country: " + data.Site.CountryName); if (data.Site.PrimaryContact != null) { writer.WriteLine("Contact"); writer.WriteLine("Name: " + data.Site.PrimaryContact.FirstName + " " + data.Site.PrimaryContact.LastName); writer.WriteLine("Organisation: " + data.Site.PrimaryContact.Business); writer.WriteLine("Phone: " + data.Site.PrimaryContact.Phone); writer.WriteLine("Email: " + data.Site.PrimaryContact.Email); } else { writer.WriteLine("Contact:"); writer.WriteLine("Name: "); writer.WriteLine("Organisation: "); writer.WriteLine("Phone: "); writer.WriteLine("Email: "); } writer.WriteLine("Number of Sensors: " + data.Sensors.Count ); writer.WriteLine(); if (data.Sensors != null && data.Sensors.Count > 0) { foreach (var sensor in data.Sensors.OrderBy(x => x.SortIndex)) { writer.WriteLine(sensor.Name); writer.WriteLine("Description: " + sensor.Description); foreach (var metaData in sensor.MetaData) { writer.WriteLine("Serial Number: " + metaData.SerialNumber); writer.WriteLine("Manufacturer: " + metaData.Manufacturer); writer.WriteLine("Date Installed: " + metaData.DateOfInstallation); writer.WriteLine("Calibration Frequency (Days): " + metaData.IdealCalibrationFrequency.Days); } foreach (var calibration in sensor.Calibrations) { writer.WriteLine(calibration); } writer.WriteLine(); } } writer.WriteLine("Dataset Notes\r\n"); if (data.Site.DataEditingNotes != null) { foreach (var note in data.Site.DataEditingNotes) { writer.WriteLine(note); } } writer.WriteLine(); writer.WriteLine("Site Notes\r\n" + data.Site.SiteNotes); Debug.WriteLine(metaDataFilePath); writer.Close(); } }
/// <summary> /// Exports as a CSV /// </summary> /// <param name="data"></param> /// <param name="filePath"></param> /// <param name="includeEmptyLines"></param> /// <param name="dateColumnFormat"></param> /// <param name="exportRaw"></param> /// <param name="numOfPointsToSummarise"></param> private static void ExportCSV(Dataset data, string filePath, bool includeEmptyLines, DateColumnFormat dateColumnFormat, bool exportRaw, int numOfPointsToSummarise) { using (StreamWriter writer = File.CreateText(filePath)) { const char del = '\t'; var columnHeadings = dateColumnFormat.Equals(DateColumnFormat.OneDateColumn) ? "DateTime" : "Date" + del + "Time"; var currentSensorIndex = 0; var outputData = new string[data.Sensors.Count, (data.ExpectedDataPointCount / numOfPointsToSummarise) + 1]; var rowDate = data.StartTimeStamp; foreach (var sensor in data.Sensors.OrderBy(x => x.SortIndex)) { var stateToUse = (exportRaw) ? sensor.RawData : sensor.CurrentState; //Construct the column headings (Sensor siteNames) columnHeadings += del + ConstructHeader(sensor); var i = data.StartTimeStamp; while (i <= data.EndTimeStamp) { var sum = float.MinValue; for (var j = 0; j < numOfPointsToSummarise; j++, i = i.AddMinutes(data.DataInterval)) { float value; if (stateToUse.Values.TryGetValue(i, out value)) if (sum.Equals(float.MinValue)) sum = value; else sum += value; } if (!sum.Equals(float.MinValue)) { if (sensor.SummaryType == SummaryType.Average) outputData[ currentSensorIndex, GetArrayRowFromTime(data, data.StartTimeStamp, i.AddMinutes((-data.DataInterval) * numOfPointsToSummarise), numOfPointsToSummarise)] = Math.Round((sum / numOfPointsToSummarise), 2).ToString(); else outputData[ currentSensorIndex, GetArrayRowFromTime(data, data.StartTimeStamp, i.AddMinutes((-data.DataInterval) * numOfPointsToSummarise), numOfPointsToSummarise)] = Math.Round((sum), 2).ToString(); } } currentSensorIndex++; } //Strip the last delimiter from the headings and write the line writer.WriteLine(columnHeadings); //write the data here... for (int row = 0; row < data.ExpectedDataPointCount / numOfPointsToSummarise; row++) { string line = ""; for (int col = 0; col < data.Sensors.Count; col++) line += del + outputData[col, row]; if (includeEmptyLines || line.Length != data.Sensors.Count) { line = dateColumnFormat.Equals(DateColumnFormat.OneDateColumn) ? rowDate.ToString("yyyy-MM-dd HH:mm") + line : rowDate.ToString("yyyy-MM-dd") + del + rowDate.ToString("HH:mm") + line; writer.WriteLine(line); } rowDate = rowDate.AddMinutes(data.DataInterval * numOfPointsToSummarise); } writer.Close(); } }
/// <summary> /// Exports the changes file /// </summary> /// <param name="data"></param> /// <param name="filePath"></param> /// <param name="changeMatrixFilePath"></param> /// <param name="changesFilePath"></param> /// <param name="dateColumnFormat"></param> private static void ExportChangesFile(Dataset data, string filePath, string changeMatrixFilePath, string changesFilePath, DateColumnFormat dateColumnFormat) { var changesUsed = new List<int>(); using (var writer = File.CreateText(changeMatrixFilePath)) { writer.WriteLine("Change matrix for file: " + Path.GetFileName(filePath)); writer.WriteLine("Cell format: QA/QC value [Raw value] (Change reason number)"); var line = dateColumnFormat.Equals(DateColumnFormat.OneDateColumn) ? "Day,Month,Year,Hours,Minutes" + '\t' : "Date,Time" + '\t'; line = data.Sensors.OrderBy(x => x.SortIndex).Aggregate(line, (current, sensor) => current + (sensor.Name + "\t")); line = line.Remove(line.Count() - 2); writer.WriteLine(line); for (var time = data.StartTimeStamp; time <= data.EndTimeStamp; time = time.AddMinutes(data.DataInterval)) { line = dateColumnFormat.Equals(DateColumnFormat.OneDateColumn) ? time.ToString("yyyy-MM-dd HH:mm") + '\t' : time.ToString("yyyy-MM-dd") + '\t' + time.ToString("HH:mm") + '\t'; foreach (var sensor in data.Sensors.OrderBy(x => x.SortIndex)) { LinkedList<int> vals; float valsRaw; float currentValue; if (sensor.CurrentState.Changes.TryGetValue(time, out vals)) { if (sensor.CurrentState.Values.TryGetValue(time, out currentValue)) { line = line + currentValue + " "; if (sensor.RawData.Values.TryGetValue(time, out valsRaw)) { line = line + "[" + valsRaw + "] ("; } changesUsed.AddRange(vals.Where(x => !changesUsed.Contains(x))); // line = sensor.CurrentState.Values<time>.Values; line = vals.Aggregate(line, (current, val) => current + (val + " ")) + ")"; } } line += "\t"; } line = line.Remove(line.Count() - 2); writer.WriteLine(line); } } using (var writer = File.CreateText(changesFilePath)) { writer.WriteLine("Change log for file " + Path.GetFileName(filePath)); foreach (var i in changesUsed.OrderBy(i => i)) { Debug.Print("Change number " + i); writer.WriteLine(i == -1 ? new ChangeReason(-1, "Reason not specified") : ChangeReason.ChangeReasons.FirstOrDefault(x => x.ID == i)); } } }
/// <summary> /// Imports from a meta file and a data file /// </summary> public void ImportDataMeta() { var importWindow = (LoadInDataMetaViewModel)_container.GetInstance(typeof(LoadInDataMetaViewModel), "LoadInDataMetaViewModel"); if (importWindow == null) return; _windowManager.ShowDialog(importWindow); if (importWindow.Success) { _sensorsToGraph.Clear(); SensorsToCheckMethodsAgainst.Clear(); UpdateGraph(true); var saveFirst = false; if (CurrentDataset != null) { saveFirst = Common.Confirm("Save before closing?", string.Format("Before we close '{0}' should we save it first?", CurrentDataset.Site.Name)); } var bw = new BackgroundWorker(); bw.DoWork += (o, e) => { ProgressIndeterminate = true; ShowProgressArea = true; if (!saveFirst) return; EventLogger.LogInfo(CurrentDataset, "Closing Save", "Saving to file before close"); WaitEventString = string.Format("Saving {0} to file", CurrentDataset.Site.Name); CurrentDataset.SaveToFile(); }; bw.RunWorkerCompleted += (o, e) => { ShowProgressArea = false; EnableFeatures(); }; DisableFeatures(); bw.RunWorkerAsync(); var newDataset = new Dataset(new Site(Site.NextID, "New Site", "", null, null, null)); CurrentDataset = newDataset; Import(importWindow.DataPath); LoadSiteFromMeta(importWindow.MetaPath, CurrentDataset); ClearDetectedValues(); NotifyOfPropertyChange(() => SiteNames); ChosenSelectedIndex = CurrentDataset.Site.Id + 1; } }
/// <summary> /// The sensors site log path /// </summary> /// <param name="sensorName">The sensor</param> /// <param name="dataSet">The dataset</param> /// <returns>The path to the site sensor</returns> public static string GetSensorLogPathForSensorBelongingToSite(string sensorName, Dataset dataSet) { return Path.Combine(Common.AppDataPath, "Logs", dataSet.IdentifiableName, "SensorLogs", sensorName + ".txt"); }
/// <summary> /// Shows the site editing view for a given site /// </summary> /// <param name="dataSetToShow">The dataset that owns the site to view</param> /// <returns>Whether or not the view was completed</returns> private bool ShowSiteInformation(Dataset dataSetToShow) { if (dataSetToShow == null) { Common.ShowMessageBox("No Site Selected", "To view site information you must first select or create a site", false, false); return false; } var view = _container.GetInstance(typeof(EditSiteDataViewModel), "EditSiteDataViewModel") as EditSiteDataViewModel; if (view == null) { EventLogger.LogError(null, "Loading Site Editor", "Critical! Failed to get a View!!"); return false; } view.DataSet = dataSetToShow; if (dataSetToShow.Site.PrimaryContact == null) view.IsNewSite = true; view.Deactivated += (o, e) => { _dataSetFiles = null; NotifyOfPropertyChange(() => SiteNames); }; _windowManager.ShowDialog(view); NotifyOfPropertyChange(() => EditingNotes); return view.WasCompleted; }
/// <summary> /// The site log path /// </summary> /// <param name="dataSet">The dataset</param> /// <returns>The dataset's site log path</returns> public static string GetSiteLogPath(Dataset dataSet) { return Path.Combine(Common.AppDataPath, "Logs", dataSet.IdentifiableName, "log.txt"); }
/// <summary> /// Creates a new sensor, using default values for error threshold and failure-indicating value. /// </summary> /// <param name="name">The name of the sensor.</param> /// <param name="description">A description of the sensor's function or purpose.</param> /// <param name="upperLimit">The upper limit for values reported by this sensor.</param> /// <param name="lowerLimit">The lower limit for values reported by this sensor.</param> /// <param name="unit">The unit used to report values given by this sensor.</param> /// <param name="maxRateOfChange">The maximum rate of change allowed by this sensor.</param> /// <param name="manufacturer">The manufacturer of this sensor.</param> /// <param name="serial">The serial number associated with this sensor.</param> /// <param name="undoStack">A stack containing previous sensor states.</param> /// <param name="redoStack">A stack containing sensor states created after the modifications of the current state.</param> /// <param name="Calibrations">A list of dates, on which calibration was performed.</param> /// <param name="owner">The dataset owner of the sensor</param> public Sensor(string name, string description, float upperLimit, float lowerLimit, string unit, float maxRateOfChange, Stack<SensorState> undoStack, Stack<SensorState> redoStack, List<Calibration> calibrations, Dataset owner) : this(name, description, upperLimit, lowerLimit, unit, maxRateOfChange, undoStack, redoStack, calibrations, Properties.Settings.Default.DefaultErrorThreshold, owner) { }
/// <summary> /// Logs a change to a sensor, to an individual file for each sensor. /// </summary> /// <param name="site">The site this log belongs to</param> /// <param name="sensorName"></param> /// <param name="eventDetails"></param> /// <returns></returns> public static string LogSensorInfo(Dataset site, string sensorName, string eventDetails) { return site == null ? LogBase(Info, sensorName, eventDetails, GetSensorLogPath(sensorName)) : LogBase(Info, sensorName, eventDetails, GetSensorLogPathForSensorBelongingToSite(sensorName, site)); }
/// <summary> /// Creates a new sensor. /// </summary> /// <param name="name">The name of the sensor.</param> /// <param name="description">A description of the sensor's function or purpose.</param> /// <param name="upperLimit">The upper limit for values reported by this sensor.</param> /// <param name="lowerLimit">The lower limit for values reported by this sensor.</param> /// <param name="unit">The unit used to report values given by this sensor.</param> /// <param name="maxRateOfChange">The maximum rate of change allowed by this sensor.</param> /// <param name="manufacturer">The manufacturer of this sensor.</param> /// <param name="serial">The serial number associated with this sensor.</param> /// <param name="undoStack">A stack containing previous sensor states.</param> /// <param name="redoStack">A stack containing sensor states created after the modifications of the current state.</param> /// <param name="calibrations">A list of dates, on which calibration was performed.</param> /// <param name="errorThreshold">The number of times a failure-indicating value can occur before this sensor is flagged as failing.</param> /// <param name="owner">The dataset that owns the sensor</param> /// <param name="sType">Indicates whether the sensor's values should be averaged or summed when summarised</param> public Sensor(string name, string description, float upperLimit, float lowerLimit, string unit, float maxRateOfChange, Stack<SensorState> undoStack, Stack<SensorState> redoStack, List<Calibration> calibrations, int errorThreshold, Dataset owner, SummaryType sType) { if (name == "") throw new ArgumentNullException("Name"); // if (unit == "") // throw new ArgumentNullException("Unit"); if (calibrations == null) throw new ArgumentNullException("Calibrations"); if (undoStack == null) throw new ArgumentNullException("UndoStack"); if (redoStack == null) throw new ArgumentNullException("RedoStack"); if (upperLimit <= lowerLimit) throw new ArgumentOutOfRangeException("UpperLimit"); _name = name; RawName = name; Description = description; UpperLimit = upperLimit; LowerLimit = lowerLimit; _unit = unit; MaxRateOfChange = maxRateOfChange; _undoStack = undoStack; _redoStack = redoStack; _calibrations = calibrations; ErrorThreshold = errorThreshold; Owner = owner; _summaryType = sType; _metaData = new ObservableCollection<SensorMetaData>(); CurrentMetaData = new SensorMetaData(""); Colour = Color.FromRgb((byte)(Common.Generator.Next()), (byte)(Common.Generator.Next()), (byte)(Common.Generator.Next())); }
/// <summary> /// Logs a warning event to the log file, containing the current time, thread name and details of the event /// </summary> /// <param name="site">The site this log belongs to</param> /// <param name="threadName">The name of the thread calling this Method. For Background workers, supply a brief description of the threads purpose</param> /// <param name="eventDetails">The specific details of the event that are to be written to file</param> /// <returns></returns> public static string LogWarning(Dataset site, string threadName, string eventDetails) { return site == null ? LogBase(Warning, threadName, eventDetails, null) : LogBase(Warning, threadName, eventDetails, GetSiteLogPath(site)); }
/// <summary> /// Creates a new sensor, with the specified sensor name and measurement unit. /// </summary> /// <param name="name">The name of the sensor.</param> /// <param name="unit">The unit used to report values given by this sensor.</param> /// <param name="owner">The owner of the sensor</param> public Sensor(string name, string unit, Dataset owner) : this(name, "", 100, 0, unit, 0, owner) { }
/// <summary> /// Given a timestamp which represents a missing value, interpolates the dataset using the first known point before the given point, and the first known point after the given point in the list of keys. /// </summary> /// <param name="valuesToInterpolate">A list of data point 'keys' where values are missing.</param> /// <param name="ds">A dataset to use, which indicates the length of time that elapses between readings.</param> /// <returns>A sensor state with the interpolated data.</returns> public SensorState Interpolate(List<DateTime> valuesToInterpolate, Dataset ds, ChangeReason reason) { EventLogger.LogInfo(_owner.Owner, GetType().ToString(), "Starting extrapolation process"); if (valuesToInterpolate == null) throw new ArgumentNullException("valuesToInterpolate"); if (valuesToInterpolate.Count == 0) throw new ArgumentException("You must specify at least one value to use for extrapolation."); if (ds == null) throw new ArgumentNullException("ds"); //First remove values var newState = RemoveValues(valuesToInterpolate, reason); newState.Reason = reason; foreach (var time in valuesToInterpolate) { if (newState.Values.ContainsKey(time)) continue; DateTime startValue; try { startValue = newState.FindPrevValue(time); } catch (Exception) { Debug.WriteLine("Failed to find start value continuing"); continue; } DateTime endValue; try { endValue = newState.FindNextValue(time); } catch (Exception) { Debug.WriteLine("Failed to find end value continuing"); continue; } var timeDiff = endValue.Subtract(startValue).TotalMinutes; var valDiff = newState.Values[endValue] - newState.Values[startValue]; var step = valDiff / (timeDiff / ds.DataInterval); var value = newState.Values[startValue] + step; for (var i = ds.DataInterval; i < timeDiff; i += ds.DataInterval) { newState.Values[startValue.AddMinutes(i)] = (float)Math.Round(value, 2); newState.AddToChanges(startValue.AddMinutes(i), reason.ID); value += step; } } /* ==OLD METHOD== var first = valuesToInterpolate[0]; DateTime startValue; try { startValue = FindPrevValue(first, ds); } catch(Exception e) { throw new DataException("No start value"); } var endValue = DateTime.MinValue; var time = 0; try { while (endValue == DateTime.MinValue) { endValue = (Values.ContainsKey(first.AddMinutes(time)) ? first.AddMinutes(time) : DateTime.MinValue); time += ds.DataInterval; } } catch(Exception e) { throw new DataException("No end value"); } var timeDiff = endValue.Subtract(startValue).TotalMinutes; var valDiff = Values[endValue] - Values[startValue]; var step = valDiff / (timeDiff / ds.DataInterval); var value = Values[startValue] + step; var newState = Clone(); for (var i = ds.DataInterval; i < timeDiff; i += ds.DataInterval) { newState.Values.Add(startValue.AddMinutes(i), (float)Math.Round(value, 2)); value += step; } EventLogger.LogInfo(GetType().ToString(), "Completing extrapolation process");*/ return newState; }
/// <summary> /// Calculate the thermocline depth /// </summary> /// <param name="dataset">The dataset to use</param> /// <param name="mixedTempDifferential">The minimum mixed temp differnetial</param> /// <param name="preCalculatedDensities">The set of precalculated densities</param> /// <param name="seasonal">Whether or not to look check that values aren't seasonal</param> /// <param name="minimumMetalimionSlope">The minimum metalimion slope</param> /// <returns></returns> public static Dictionary<DateTime, ThermoclineDepthDetails> CalculateThermoclineDepth(Dataset dataset, double mixedTempDifferential = 0, IEnumerable<DensitySeries> preCalculatedDensities = null, bool seasonal = false, float minimumMetalimionSlope = 0.1f) { var thermocline = new Dictionary<DateTime, ThermoclineDepthDetails>(); var densities = (preCalculatedDensities == null) ? CalculateDensity(dataset).OrderBy(x => x.Depth).ToArray() : preCalculatedDensities.OrderBy(x => x.Depth).ToArray(); var densityColumns = GenerateDensityColumns(densities); var timeStamps = densityColumns.Keys.ToArray(); foreach (var t in timeStamps) { var thermoclineDetails = new ThermoclineDepthDetails(); var depths = densityColumns[t].Keys.OrderBy(x => x).ToArray(); if (depths.Length < 3) //We need at least 3 depths to calculate continue; if (mixedTempDifferential > 0) { var orderedSensors = dataset.Sensors.Where(x => x.SensorType == "Water_Temperature").OrderBy(x => x.Elevation).ToArray(); if (orderedSensors.Any()) { var first = orderedSensors.First(x => x.CurrentState.Values.ContainsKey(t)); var last = orderedSensors.Last(x => x.CurrentState.Values.ContainsKey(t)); if (first != null && last != null) { if (first.CurrentState.Values[t] - last.CurrentState.Values[t] <= mixedTempDifferential) continue; } } } var slopes = new double[depths.Length]; for (var i = 1; i < depths.Length - 1; i++) { slopes[i] = (densityColumns[t][depths[i + 1]] - densityColumns[t][depths[i]]) / (depths[i + 1] - depths[i]); } thermoclineDetails.DrhoDz = slopes; var maxSlope = slopes.Max(); var indexOfMaxium = Array.IndexOf(slopes, maxSlope); thermoclineDetails.ThermoclineIndex = indexOfMaxium; thermoclineDetails.ThermoclineDepth = (depths[indexOfMaxium] + depths[indexOfMaxium + 1]) / 2; if (indexOfMaxium > 1 && indexOfMaxium < depths.Length - 2) { var sdn = -(depths[indexOfMaxium + 1] - depths[indexOfMaxium]) / (slopes[indexOfMaxium + 1] - slopes[indexOfMaxium]); var sup = (depths[indexOfMaxium] - depths[indexOfMaxium - 1]) / (slopes[indexOfMaxium] - slopes[indexOfMaxium - 1]); var upD = depths[indexOfMaxium]; var dnD = depths[indexOfMaxium + 1]; if (!(double.IsInfinity(sdn) || double.IsInfinity(sup) || double.IsNaN(sdn) || double.IsNaN(sup))) { thermoclineDetails.ThermoclineDepth = (float)(dnD * (sdn / (sdn + sup)) + upD * (sup / (sdn + sup))); } } if (seasonal) { const float minPercentageForUniqueTheroclineStep = 0.15f; var minCutPoint = Math.Max(minPercentageForUniqueTheroclineStep * maxSlope, minimumMetalimionSlope); var localPeaks = LocalPeaks(slopes, minCutPoint); if (localPeaks.Any()) { var indexOfSeasonallyAdjustedMaximum = Array.IndexOf(slopes, localPeaks.Last()); if (indexOfSeasonallyAdjustedMaximum > indexOfMaxium + 1) { thermoclineDetails.SeasonallyAdjustedThermoclineIndex = indexOfSeasonallyAdjustedMaximum; thermoclineDetails.SeasonallyAdjustedThermoclineDepth = (depths[indexOfSeasonallyAdjustedMaximum] + depths[indexOfSeasonallyAdjustedMaximum + 1]) / 2; if (indexOfSeasonallyAdjustedMaximum > 1 && indexOfSeasonallyAdjustedMaximum < depths.Length - 2) { var sdn = -(depths[indexOfSeasonallyAdjustedMaximum + 1] - depths[indexOfSeasonallyAdjustedMaximum]) / (slopes[indexOfSeasonallyAdjustedMaximum + 1] - slopes[indexOfSeasonallyAdjustedMaximum]); var sup = (depths[indexOfSeasonallyAdjustedMaximum] - depths[indexOfSeasonallyAdjustedMaximum - 1]) / (slopes[indexOfSeasonallyAdjustedMaximum] - slopes[indexOfSeasonallyAdjustedMaximum - 1]); var upD = depths[indexOfSeasonallyAdjustedMaximum]; var dnD = depths[indexOfSeasonallyAdjustedMaximum + 1]; if (!(double.IsInfinity(sdn) || double.IsInfinity(sup) || double.IsNaN(sdn) || double.IsNaN(sup))) { thermoclineDetails.SeasonallyAdjustedThermoclineDepth = (float)(dnD * (sdn / (sdn + sup)) + upD * (sup / (sdn + sup))); } } } else { thermoclineDetails.NoSeasonalFound(); } } else { thermoclineDetails.NoSeasonalFound(); } } else { thermoclineDetails.NoSeasonalFound(); } thermocline[t] = thermoclineDetails; } return thermocline; }
public Dataset readMeta(Dataset input, string filename) { string siteOwner, siteName, siteCountry, siteGPSLat, siteGPSLong, siteGPSGrid, siteElevation, siteContactName, siteContactNumber, siteContactEmail, siteContactOrginisation; try { int iss; string numSensors, loopStr; StreamReader reader = new StreamReader(filename); reader.ReadLine(); // Throwing away asscoiated file siteName = CleanMetaIn(reader.ReadLine(), 11); siteOwner = CleanMetaIn(reader.ReadLine(), 7); siteGPSLat = CleanMetaIn(reader.ReadLine(), 19); siteGPSLong = CleanMetaIn(reader.ReadLine(), 19); siteGPSGrid = CleanMetaIn(reader.ReadLine(), 17); siteElevation = CleanMetaIn(reader.ReadLine(), 18); siteCountry = CleanMetaIn(reader.ReadLine(), 9); reader.ReadLine(); // Throwing away contact header siteContactName = CleanMetaIn(reader.ReadLine(), 6); siteContactOrginisation = CleanMetaIn(reader.ReadLine(), 14); siteContactNumber = CleanMetaIn(reader.ReadLine(), 7); siteContactEmail = CleanMetaIn(reader.ReadLine(), 7); numSensors = reader.ReadLine(); if (String.IsNullOrWhiteSpace(siteContactName)) { siteContactName = ". ."; } if (String.IsNullOrWhiteSpace(siteContactNumber)) { siteContactNumber = " "; } if (String.IsNullOrWhiteSpace(siteContactEmail)) { siteContactEmail = " "; } if (String.IsNullOrWhiteSpace(siteContactOrginisation)) { siteContactOrginisation = " "; } iss = Int32.Parse(CleanMetaIn(numSensors, 19)); if (iss == input.Sensors.Count) // check to see if the number of sensors matches whats in the meta file { string header = reader.ReadLine(); string[] arr4 = new string[iss + 1]; for (int i = 0; i < iss; i++) { header = reader.ReadLine(); int ndx = input.Sensors.FindIndex(delegate(Sensor toFind) { return toFind.Name == header; } ); loopStr = reader.ReadLine(); if (ndx >= 0) { do // Works out what meta data is attached to each sensor //and adds it into the correct place { if (!string.IsNullOrEmpty(loopStr) && loopStr.Substring(0, 4) == "Desc") { input.Sensors[ndx].Description = CleanMetaIn(loopStr, 13); loopStr = reader.ReadLine(); } if (!string.IsNullOrEmpty(loopStr) && loopStr.Substring(0, 4) == "Seri") { input.Sensors[ndx].CurrentMetaData.SerialNumber = CleanMetaIn(loopStr, 15); loopStr = reader.ReadLine(); } if (!string.IsNullOrEmpty(loopStr) && loopStr.Substring(0, 4) == "Manu") { input.Sensors[ndx].CurrentMetaData.Manufacturer = CleanMetaIn(loopStr, 14); loopStr = reader.ReadLine(); } if (!string.IsNullOrEmpty(loopStr) && loopStr.Substring(0, 4) == "Date") { input.Sensors[ndx].CurrentMetaData.DateOfInstallation = DateTime.Parse(CleanMetaIn(loopStr, 16)); loopStr = reader.ReadLine(); } if (!string.IsNullOrEmpty(loopStr) && loopStr.Substring(0, 4) == "Cali") { if (loopStr.Substring(10, 2) == "n ") { input.Sensors[ndx].CurrentMetaData.IdealCalibrationFrequency = TimeSpan.FromDays(Double.Parse(CleanMetaIn(loopStr, 29))); loopStr = reader.ReadLine(); } if (!string.IsNullOrEmpty(loopStr) && loopStr.Substring(0, 4) == "Cali" && loopStr.Substring(10, 2) == "n:") { var calibStr = CleanMetaIn(loopStr, 12); DateTime calibTime = new DateTime(int.Parse(calibStr.Substring(0, 4)), int.Parse(calibStr.Substring(5, 2)), int.Parse(calibStr.Substring(8, 2))); string[] first = calibStr.Substring(16).Split(' '); string[] preNum = first[0].Split('-'); string[] postNum = first[2].Split('-'); postNum[1].Remove(0, 6); input.Sensors[ndx].Calibrations.Add(new Calibration(calibTime, float.Parse(preNum[0].TrimStart('[')), float.Parse(preNum[1]), float.Parse(preNum[2].TrimEnd(']')), float.Parse(postNum[0].TrimStart('[')), float.Parse(postNum[1]), float.Parse(postNum[2].TrimEnd(']')))); loopStr = reader.ReadLine(); } } } while (!string.IsNullOrEmpty(loopStr)); } } } else { Microsoft.Windows.Controls.MessageBox.Show("Could not load sensor data as meta file did not match actual number of sensors"); //Skip through the sensors until the notes do { reader.ReadLine(); } while (reader.Peek() != 'D'); } string checkNext = reader.ReadLine(); if (checkNext.Equals("Dataset Notes")) // Reasd { if (input.Site.DataEditingNotes == null) input.Site.DataEditingNotes = new Dictionary<DateTime, string>(); loopStr = reader.ReadLine(); while (!string.IsNullOrEmpty(loopStr)) { input.Site.DataEditingNotes.Add(DateTime.Now, loopStr); loopStr = reader.ReadLine(); } } checkNext = reader.ReadLine(); if (checkNext.Equals("Site Notes")) // Rerads and add the site notes { if (input.Site.SiteNotes == null) input.Site.SiteNotes = " "; loopStr = reader.ReadLine(); while (!string.IsNullOrEmpty(loopStr)) { input.Site.SiteNotes = input.Site.SiteNotes + loopStr; loopStr = reader.ReadLine(); } } var oldFile = input.SaveLocation; File.Delete(oldFile); var names = siteContactName.Split(' '); Contact siteContact = new Contact(names[0], names[1], siteContactEmail, siteContactOrginisation, siteContactNumber, 12); OwnerHelper.Add(siteOwner); ObservableCollection<Contact> contactList = Contact.ImportAll(); if (!contactList.Contains(siteContact)) { contactList.Add(siteContact); Contact.ExportAll(contactList); } input.Site.PrimaryContact = siteContact; input.Site.GpsLocation = new GPSCoords(decimal.Parse(siteGPSLat), decimal.Parse(siteGPSLong), siteGPSGrid); input.Site.Name = siteName; input.Site.Elevation = float.Parse(siteElevation); input.Site.Owner = siteOwner; input.Site.CountryName = siteCountry; } catch (Exception excep) { System.Windows.MessageBox.Show("There was an error importing the meta file, please make sure that it is correctly formated and all parts are filled in" ); } return input; }
public static string DatasetExportLocation(Dataset dataset) { var timestamp = DateTime.Now; var directory = Path.Combine(AppDataPath, "Backups", "Exports", dataset.Site.Name); if (!Directory.Exists(directory)) Directory.CreateDirectory(directory); return Path.Combine(directory, timestamp.ToString("ddhhmmss")); }
/// <summary> /// Creates a new site /// </summary> public void CreateNewSite() { _sensorsToGraph.Clear(); SensorsToCheckMethodsAgainst.Clear(); UpdateGraph(true); var saveFirst = false; if (CurrentDataset != null) { saveFirst = Common.Confirm("Save before closing?", string.Format("Before we close '{0}' should we save it first?", CurrentDataset.Site.Name)); } var bw = new BackgroundWorker(); bw.DoWork += (o, e) => { ProgressIndeterminate = true; ShowProgressArea = true; if (!saveFirst) return; EventLogger.LogInfo(CurrentDataset, "Closing Save", "Saving to file before close"); WaitEventString = string.Format("Saving {0} to file", CurrentDataset.Site.Name); CurrentDataset.SaveToFile(); }; bw.RunWorkerCompleted += (o, e) => { ShowProgressArea = false; EnableFeatures(); var newDataset = new Dataset(new Site(Site.NextID, "New Site", "", null, null, null)); if (ShowSiteInformation(newDataset)) { CurrentDataset = newDataset; ClearDetectedValues(); } else { _graphableSensors = null; NotifyOfPropertyChange(() => GraphableSensors); } NotifyOfPropertyChange(() => SiteNames); }; DisableFeatures(); bw.RunWorkerAsync(); }
public static void SaveSession(BackgroundWorker delegatedBackgroundWorker, Dataset sessionToSave) { EventLogger.LogInfo(sessionToSave, "Save daemon", "Session save started."); if (delegatedBackgroundWorker == null) delegatedBackgroundWorker = new BackgroundWorker(); if (!string.IsNullOrWhiteSpace(sessionToSave.SaveLocation)) { delegatedBackgroundWorker.DoWork += (o, e) => { using (var stream = new FileStream(sessionToSave.SaveLocation, FileMode.Create)) new BinaryFormatter().Serialize(stream, sessionToSave); EventLogger.LogInfo(sessionToSave, "Save [Background Worker]", string.Format("Session save complete. File saved to: {0}", sessionToSave.SaveLocation)); }; } else EventLogger.LogInfo(sessionToSave, "Save daemon", "Session save aborted"); delegatedBackgroundWorker.RunWorkerAsync(); }
/// <summary> /// Exports a data set to a CSV file. /// The file is saved in the same format as the original CSV files. /// </summary> /// <param name="data">The dataset to export</param> /// <param name="filePath">The desired path and file name of the file to be saved. No not include an extension.</param> /// <param name="format">The format to save the file in.</param> /// <param name="includeEmptyLines">Wether to export the file with empty lines or not.</param> /// <param name="loadInUnloadedValues">Whether or not to load in any unloaded values</param> public static void Export(Dataset data, string filePath, ExportFormat format, bool includeEmptyLines, bool loadInUnloadedValues = true) { Export(data, filePath, format, includeEmptyLines, false, false, ExportedPoints.AllPoints, DateColumnFormat.TwoDateColumn, false, loadInUnloadedValues); }