internal void AddHeatChartFromCurrent() { AggregatedData lastInserted = this.livePanel.Item2.intensityData[this.livePanel.Item2.intensityData.Count - 1]; ChartValues <HeatPoint> temporalvalues = new ChartValues <HeatPoint>(); for (int j = 0; j < lastInserted.aggregatedData.Count(); j++) { temporalvalues.Add(new HeatPoint(j, livePanel.Item2.intensityData.Count(), lastInserted.aggregatedData[j])); } if (liveheatchart.Series.Count() == 0) { liveheat = new HeatSeries { Values = temporalvalues, GradientStopCollection = gradient }; liveheatchart.Series.Add(liveheat); } else { for (int j = 0; j < lastInserted.aggregatedData.Count(); j++) { //liveheatchart.Series.Last().Values.Add(new HeatPoint(j, livePanel.Item2.intensityData.Count(), lastInserted.aggregatedData[j])); liveheat.Values.Add(new HeatPoint(j, livePanel.Item2.intensityData.Count(), lastInserted.aggregatedData[j])); } } }
private void AggregateContinentData(string csvString, AggregatedData aggregatedData) { string[] csvRows = csvString.Replace("\"", String.Empty).Trim().Split('\n'); string[] headers = csvRows[0].Split(','); int indexCountryName = Array.IndexOf(headers, fieldCountryName); int indexGDP2012 = Array.IndexOf(headers, fieldGDP2012); int indexPopulation2012 = Array.IndexOf(headers, fieldPopulation2012); for (int i = 1; i < csvRows.Length; i++) { string[] rowData = csvRows[i].Split(','); String countryName = " "; if (countryContinentMapper.ContainsKey(rowData[indexCountryName])) { countryName = countryContinentMapper[rowData[indexCountryName]]; } if (!String.IsNullOrWhiteSpace(countryName)) { string continentName = countryContinentMapper[rowData[indexCountryName]]; aggregatedData.AddOrUpdateData(continentName, float.Parse(rowData[indexGDP2012]), float.Parse(rowData[indexPopulation2012])); } } }
public async Task ShouldBeSameAsExpectedOutput() { var aggregateData = new AggregateGDP(); var actualData = new AggregatedData(); var expectedData = new AggregatedData(); await aggregateData.AggregatePopulationAndGDPData(@"data/datafile.csv"); string actualContent; using (var reader = new StreamReader(Environment.CurrentDirectory + @"/output/output.json")) { actualContent = await reader.ReadToEndAsync(); } actualData.DeserializeData(actualContent); string expectedContent; using (var reader = new StreamReader(Environment.CurrentDirectory + @"../../../../expected-output.json")) { expectedContent = await reader.ReadToEndAsync(); } expectedData.DeserializeData(expectedContent); Assert.Equal(actualData, expectedData); }
public string saveAggregatedData(AggregatedData aggregatedData) { string fileName = Settings.projectName + "_" + new DateTimeOffset(DateTime.UtcNow).ToUnixTimeSeconds() + ".csv"; try { this.createFolderIfNotExists(AppDomain.CurrentDomain.BaseDirectory + AGGREGATED_DATA_PATH); using (StreamWriter sw = File.CreateText(AppDomain.CurrentDomain.BaseDirectory + AGGREGATED_DATA_PATH + fileName)) { sw.WriteLine("#HEADER"); sw.WriteLine("numberOfMeasurements :" + aggregatedData.numberOfMeasurements); sw.WriteLine("sampling :" + aggregatedData.sampling); sw.WriteLine("gate :" + aggregatedData.gate); double sampling = 0; sw.WriteLine("X(ms); Y1"); for (int i = 0; i < aggregatedData.aggregatedData.Length; i++) { sw.WriteLine(String.Format("{0:0.000}", sampling) + ";" + aggregatedData.aggregatedData[i]); sampling += ((double)aggregatedData.sampling / 1000); } sw.Close(); } } catch (Exception e) { throw new Exception("Could not save AggregatedData, exception : " + e.Message); } return(fileName); }
private IntensityData constructIntensityData() { AggregatedData aggregated = new AggregatedData(); aggregated.aggregatedData = new int[9]; aggregated.numberOfMeasurements = 5; aggregated.aggregatedData[0] = 5; aggregated.aggregatedData[1] = 6; aggregated.aggregatedData[2] = 5; aggregated.aggregatedData[3] = 5; aggregated.aggregatedData[4] = 10; aggregated.aggregatedData[5] = 11; aggregated.aggregatedData[6] = 15; aggregated.aggregatedData[7] = 6; aggregated.aggregatedData[8] = 5; AggregatedData aggregated1 = new AggregatedData(); aggregated1.aggregatedData = new int[9]; aggregated1.numberOfMeasurements = 2; aggregated1.aggregatedData[0] = 2; aggregated1.aggregatedData[1] = 2; aggregated1.aggregatedData[2] = 5; aggregated1.aggregatedData[3] = 5; aggregated1.aggregatedData[4] = 5; aggregated1.aggregatedData[5] = 2; aggregated1.aggregatedData[6] = 2; aggregated1.aggregatedData[7] = 2; aggregated1.aggregatedData[8] = 2; AggregatedData aggregated2 = new AggregatedData(); aggregated2.aggregatedData = new int[9]; aggregated2.numberOfMeasurements = 4; aggregated2.aggregatedData[0] = 4; aggregated2.aggregatedData[1] = 4; aggregated2.aggregatedData[2] = 4; aggregated2.aggregatedData[3] = 4; aggregated2.aggregatedData[4] = 10; aggregated2.aggregatedData[5] = 10; aggregated2.aggregatedData[6] = 10; aggregated2.aggregatedData[7] = 4; aggregated2.aggregatedData[8] = 4; IntensityData currentIntensityData = new IntensityData(); currentIntensityData.intensityData.Add(aggregated); currentIntensityData.intensityData.Add(aggregated1); currentIntensityData.intensityData.Add(aggregated2); return(currentIntensityData); }
public async Task testGetAggregatedData() { ArduinoConnectionService.Instance.start(); AggregatedData aggregated = await DataManagementService.Instance.getAggregatedData(); Assert.IsTrue(aggregated.aggregatedData.Length > 0, "container for agregated data is empty"); Assert.IsTrue(aggregated.numberOfMeasurements > 0, "number of measurements for agregated data is 0"); for (int i = 0; i < aggregated.aggregatedData.Length; i++) { Assert.IsTrue(aggregated.aggregatedData[i] > 0); } }
internal async void DrawGraph() { while (isStarted) { try { cartesianChartMain.AxisY.Clear(); cartesianChartMain.AxisX.Clear(); // disalbe animation for faster rendering cartesianChartMain.DisableAnimations = true; cartesianChartMain.Hoverable = false; //cartesianChartMain.DataTooltip = null; //Debug.WriteLine(cartesianChartMain.MouseEnter); // MAIN CHART if (this.isTesting) { Random rnd = new Random(); this.aggData = new AggregatedData(); int[] aggregatedData = new int[500]; for (int i = 0; i < 500; i++) { aggregatedData[i] = (i > 250 && i < 300) ? rnd.Next(100, 180) : rnd.Next(52); } this.aggData.aggregatedData = aggregatedData; await Task.Run(() => Thread.Sleep(2000)); } else { this.aggData = await Task.Run(() => DataManagementService.Instance.getAggregatedData()); } // HEAT MAP - if user pressed rending heap map if (heatIsStarted) { this.livePanel.Item2.intensityData.Add(this.aggData); this.AddHeatChartFromCurrent(); } cartesianChartMain.Zoom = ZoomingOptions.X; ValuesFill(); } catch (Exception err) { MessageBox.Show("Connection not found : " + err.Message); isStarted = !isStarted; } } }
public async Task AggregatePopulationAndGDPData(string filePath) { Task <string> mapperTask = _fileUtils.ReadFile(mapperFilePath); Task <string> csvParserTask = _fileUtils.ReadFile(filePath); await mapperTask; countryContinentMapper = ParseMapper(mapperTask.Result); await csvParserTask; AggregatedData aggregatedData = new AggregatedData(); AggregateContinentData(csvParserTask.Result, aggregatedData); await _fileUtils.WriteFile(outputFilePath, aggregatedData.SerializeData()); }
public string saveIntensityData(IntensityData intensityData) { int numberofElements = intensityData.intensityData.Count; if (numberofElements == 0) { throw new Exception("Could not save IntensityData, does not contains any data "); } string fileName = Settings.projectName + "_" + new DateTimeOffset(DateTime.UtcNow).ToUnixTimeSeconds() + ".csv"; try { this.createFolderIfNotExists(AppDomain.CurrentDomain.BaseDirectory + INTENSITY_PATH); using (StreamWriter sw = File.CreateText(AppDomain.CurrentDomain.BaseDirectory + INTENSITY_PATH + fileName)) { AggregatedData firstAggregatedData = intensityData.intensityData.First(); sw.WriteLine("#HEADER"); sw.WriteLine("numberOfAggregatedData :" + numberofElements); sw.WriteLine("lengthOfOneAggregatedData :" + firstAggregatedData.aggregatedData.Length); sw.WriteLine("numberOfMeasurements :" + firstAggregatedData.numberOfMeasurements); sw.WriteLine("sampling :" + firstAggregatedData.sampling); sw.WriteLine("gate :" + firstAggregatedData.gate); // create header for csv string header = "t(ms);"; for (int i = 1; i <= numberofElements; i++) { header += "Y" + i + ";"; } header = header.Remove(header.Length - 1); // remove last comma sw.WriteLine(header); // append data into csv double sampling = 0; string line = ""; for (int i = 0; i < firstAggregatedData.aggregatedData.Length; i++) { line = String.Format("{0:0.000}", sampling); for (int j = 0; j < numberofElements; j++) { line += ";" + intensityData.intensityData[j].aggregatedData[i]; } sw.WriteLine(line); sampling += ((double)firstAggregatedData.sampling / 1000); } sw.Close(); } } catch (Exception e) { throw new Exception("Could not save IntensityData, exception : " + e.Message); } return(fileName); }
/* * Mobilitne zobrazenie je vhodnejsie na identifikaciu ionov. Vztah medzi driftovym casom a pohyblivostou je jednoduchy vzorec. * Kde treba zadat niekolko parametrov merania * (dlzku trubice-L(cm), * tlak plynu - p- (pa), * teplotu plynu T(K), * napatie na driftovej trubici U (kV)) (mohli by sa zadavat ) * a samozrejme driftovy cas -t (ms) * potom je mozne vypocitat redukovanu pohyblivost ioniov. Vzorec je: * Ko=(L^2/U*t)[(p*To)/(po*T)) * Kde po je normálny tlak (101325 Pa), To je 293.15 K, */ public double[] calculateMobilities(AggregatedData data) { double[] mobilityData = new double[10]; // data.aggregatedData.Length double Pa = 100.0 * Mobility.p; //Pa <- mbar double kV = 1000.0 * Mobility.U; //V <- kV for (int i = 0; i < 10; i++) { double t = (i + 0.5) * Settings.sampling / 1000.0e3;//milliseconds var K0 = (Mobility.L * Mobility.L / (kV * t)) * (Pa * 293.15 / (101325.0 * Mobility.T)); mobilityData[i] = K0; } return(mobilityData); }
/** * aggregating data from measurements */ public async Task <AggregatedData> getAggregatedData() { AggregatedData aggregatedData = new AggregatedData(); // synchronized waiting for arduino to send back measurements List <Measurement> measurements = await getOneLifeCycleOfArduinoData(); // if there is no data received from arduino, throw exception if (!measurements.Any()) { throw new ArithmeticException("There were no measurements found to aggregate data."); } // save how many measurements will be aggregated aggregatedData.numberOfMeasurements = measurements.Count; int[] sum = new int[Measurement.BUFFER_SIZE]; int maximumSize = 0; // sum of all measurement in each position, foreach (Measurement measurement in measurements) { int measurementSize = measurement.measurement.Length; if (maximumSize < measurementSize) { maximumSize = measurementSize; } for (int i = 0; i < measurementSize; i++) { sum[i] += measurement.measurement[i]; } } // perform data average for (int i = 0; i < maximumSize; i++) { sum[i] /= aggregatedData.numberOfMeasurements; } // copy values from averaged sum into returning object aggregatedData.aggregatedData = new int[maximumSize]; Array.Copy(sum, aggregatedData.aggregatedData, maximumSize); return(aggregatedData); }
void flushData() { if (dataSlice != null) { dataToWrite.Enqueue(new AggregatedDataSerializer(exportFields) { Data = dataSlice }); } dataSlice = new AggregatedData(writeAtDataCount + 1); if (!threadWaitHandle.Set()) { Debug.LogError("Error setting the event to wake up the file writer thread on application quit"); } }
public void TestMethod2() { var da = new DataAcquisition(); SessionInfo si = new SessionInfo(); si.SessionStart = TimeSpan.FromHours(15); DateTime now = new DateTime(2000, 5, 6, 15, 0, 0); List <Trade> trades = new List <Trade>(); trades.Add(Trade.Create(now, 1)); trades.Add(Trade.Create(now, 2)); trades.Add(Trade.Create(now, 3)); AggregatedData at = da.AgregateTrades(now, si, trades); Assert.AreEqual(at.TradingDate, now); Assert.AreEqual(at.SessionStart, si.SessionStart); Assert.AreEqual(at.Volumes.Length, 24); }
public void saveAggregatedData() { AggregatedData aggregatedTest = new AggregatedData(); aggregatedTest.aggregatedData = new int[9]; aggregatedTest.numberOfMeasurements = 5; aggregatedTest.aggregatedData[0] = 5; aggregatedTest.aggregatedData[1] = 6; aggregatedTest.aggregatedData[2] = 5; aggregatedTest.aggregatedData[3] = 5; aggregatedTest.aggregatedData[4] = 10; aggregatedTest.aggregatedData[5] = 11; aggregatedTest.aggregatedData[6] = 15; aggregatedTest.aggregatedData[7] = 6; aggregatedTest.aggregatedData[8] = 5; FileService.Instance.saveAggregatedData(aggregatedTest); }
public async Task testChangingSettings() { ArduinoConnectionService.Instance.start(); Settings.sampling = 5; ArduinoConnectionService.Instance.sendSettingsToArduino(); AggregatedData measurementNormal = await DataManagementService.Instance.getAggregatedData(); Settings.sampling = 30; ArduinoConnectionService.Instance.sendSettingsToArduino(); AggregatedData measurementSlow = await DataManagementService.Instance.getAggregatedData(); Settings.sampling = 1; ArduinoConnectionService.Instance.sendSettingsToArduino(); AggregatedData measurementFast = await DataManagementService.Instance.getAggregatedData(); Console.WriteLine(measurementFast.aggregatedData.Length); Console.WriteLine(measurementNormal.aggregatedData.Length); Console.WriteLine(measurementSlow.aggregatedData.Length); Assert.IsTrue(measurementFast.aggregatedData.Length > measurementNormal.aggregatedData.Length); Assert.IsTrue(measurementFast.aggregatedData.Length > measurementSlow.aggregatedData.Length); Assert.IsTrue(measurementNormal.aggregatedData.Length > measurementSlow.aggregatedData.Length); }
private Stopwatch stopwatch = new Stopwatch(); // Unity Time.time can't be use outside of main thread. // Use this for initialization. void Start() { stopwatch.Start(); if (!Stopwatch.IsHighResolution) { Debug.LogWarning("High precision stopwatch is not supported on this machine. Recorded frame times may not be highly accurate."); } // Check to make sure that the FOVE interface variable is assigned. This prevents a ton of errors // from filling your log if you forget to assign the interface through the inspector. if (fove == null) { Debug.LogWarning("Forgot to assign a Fove interface to the FOVERecorder object."); enabled = false; return; } var caps = ClientCapabilities.EyeTracking; if (exportFields.GazeDepth) { caps |= ClientCapabilities.GazeDepth; } if (exportFields.PupilsRadius) { caps |= ClientCapabilities.PupilRadius; } if (exportFields.GazedObject) { caps |= ClientCapabilities.GazedObjectDetection; } if (exportFields.EyeTorsion) { caps |= ClientCapabilities.EyeTorsion; } if (exportFields.UserPresence) { caps |= ClientCapabilities.UserPresence; } if (exportFields.UserAttentionShift) { caps |= ClientCapabilities.UserAttentionShift; } if (exportFields.IPD) { caps |= ClientCapabilities.UserIPD; } if (exportFields.IOD) { caps |= ClientCapabilities.UserIOD; } if (exportFields.EyeballRadius) { caps |= ClientCapabilities.EyeballRadius; } if (exportFields.EyeShape) { caps |= ClientCapabilities.EyeShape; } if (exportFields.PupilShape) { caps |= ClientCapabilities.PupilShape; } FoveManager.RegisterCapabilities(caps); // We set the initial data slice capacity to the expected size + 1 so that we never waste time reallocating and // copying data under the hood. If the system ever requires more than a single extra entry, there is likely // a severe problem causing delays which should be addressed. dataSlice = new AggregatedData(writeAtDataCount + 1); // If overwrite is not set, then we need to make sure our selected file name is valid before proceeding. if (!Directory.Exists(OutputFolder)) { Directory.CreateDirectory(OutputFolder); } { string testFileName = Path.Combine(OutputFolder, outputFileName + ".csv"); if (!overwriteExistingFile) { int counter = 1; while (File.Exists(testFileName)) { testFileName = Path.Combine(OutputFolder, outputFileName + "_" + (counter++) + ".csv"); // e.g., "results_12.csv" } } outputFileName = testFileName; Debug.Log("Writing data to " + outputFileName); } dataToWrite.Enqueue(new DataHeaderSerializer(exportFields)); // Create the write thread to call "WriteThreadFunc", and then start it. writeThread = new Thread(WriteThreadFunc); writeThread.Start(); StartCoroutine(JobsSpawnerCoroutine()); }
public IntensityData loadIntensityData(string projectPath) { IntensityData intensityData = new IntensityData(); try { using (var streamReader = tryToOpenFile(projectPath)) { string line = ""; int countline = 0; int sampling = 0; int gate = 0; int numberOfMeasurements = 0; int lengthOfOneMeasuremet = 0; int numberOfAggregatedData = 0; List <string[]> saveMeasurements = new List <string[]>(); while ((line = streamReader.ReadLine()) != null) { if (countline == 1) { numberOfAggregatedData = Int32.Parse(line.Split(':')[1]); } else if (countline == 2) { lengthOfOneMeasuremet = Int32.Parse(line.Split(':')[1]); } else if (countline == 3) { numberOfMeasurements = Int32.Parse(line.Split(':')[1]); } else if (countline == 4) { sampling = Int32.Parse(line.Split(':')[1]); } else if (countline == 5) { gate = Int32.Parse(line.Split(':')[1]); } else if (countline == 0 || countline == 6) { countline++; continue; } else { saveMeasurements.Add(line.Split(';')); } countline++; } // create instance of aggregated data into intensity data for (int i = 0; i < numberOfAggregatedData; i++) { AggregatedData aggregated = new AggregatedData(); aggregated.sampling = sampling; aggregated.gate = gate; aggregated.numberOfMeasurements = numberOfMeasurements; aggregated.aggregatedData = new int[lengthOfOneMeasuremet]; intensityData.intensityData.Add(aggregated); } // save data from csv into list of aggregated data int positionCounter = 0; foreach (string[] measurement in saveMeasurements) { for (int i = 0; i < numberOfAggregatedData; i++) { intensityData.intensityData[i].aggregatedData[positionCounter] = Int32.Parse(measurement[i + 1]); } positionCounter++; } } } catch (Exception e) { throw new FileLoadException("Could not parse file content into IntensityData, got error : " + e.Message); } return(intensityData); }
public async Task<IEnumerable<AggregatedData>> GetAggregatedData(string table, int deviceId, DateTime now, DateGrouping groupType = DateGrouping.Hourly) { var result = new List<AggregatedData>(); string query, minDate, maxDate; var aggregator = table == "EnergyUsages" ? "SUM" : "AVG"; switch(groupType) { case DateGrouping.Monthly: query = string.Format(_monthlyQuery, aggregator, table); minDate = (new DateTime(now.Year, now.Month, 1)).AddYears(-1).ToString("yyyy-MM-dd"); maxDate = (new DateTime(now.Year, now.Month, 1)).ToString("yyyy-MM-dd"); break; case DateGrouping.Daily: query = string.Format(_dailyQuery, aggregator, table); minDate = (new DateTime(now.Year, now.Month, now.Day)).AddMonths(-1).ToString("yyyy-MM-dd"); maxDate = (new DateTime(now.Year, now.Month, now.Day)).ToString("yyyy-MM-dd"); break; default: query = string.Format(_hourlyQuery, aggregator, table); minDate = (new DateTime(now.Year, now.Month, now.Day, now.Hour, 0, 0)).AddDays(-1).ToString("yyyy-MM-dd HH:mm:ss"); maxDate = (new DateTime(now.Year, now.Month, now.Day, now.Hour, 0, 0)).ToString("yyyy-MM-dd HH:mm:ss"); break; } await _connection.OpenAsync(); using (var cmd = _connection.CreateCommand()) { cmd.CommandText = query; cmd.Parameters.AddWithValue("@devId", deviceId); cmd.Parameters.AddWithValue("@mindate", minDate); cmd.Parameters.AddWithValue("@maxdate", maxDate); using (var reader = await cmd.ExecuteReaderAsync()) { if (reader != null) { while (await reader.ReadAsync()) { var datapoint = new AggregatedData(); datapoint.NumSamples = (long)reader["numSamples"]; datapoint.AvgValue = (double)reader["average"]; datapoint.DeviceId = deviceId; datapoint.GroupedType = groupType; datapoint.Type = table; switch (groupType) { case DateGrouping.Monthly: datapoint.GroupedDate = new DateTime((int)reader["year"], (int)reader["month"], 1); break; case DateGrouping.Daily: datapoint.GroupedDate = (DateTime)reader["date"]; break; default: datapoint.GroupedDate = ((DateTime)reader["date"]).AddHours((int)reader["hour"]); break; } result.Add(datapoint); } reader.Close(); } } } await _connection.CloseAsync(); return result; }