private int GetWeatherCategory(TimeSerie index) { var wsymb2 = Decimal.ToInt32(index.Parameters .Where(p => p.Name == "Wsymb2").Select(ws => ws.Values[0]).FirstOrDefault()); return(wsymb2); }
public void AccumulateCompoundInterest_IEnumerableToIEnumberable_FloatProperty_SourceCumulativePercentageToOverflow_InfinityReturned() { IEnumerable <TimeSerie <float> > source = new TimeSerie <float>[] { new TimeSerie <float>(new DateTime(2012, 01, 01), 1) { GrowthRate = float.MaxValue }, new TimeSerie <float>(new DateTime(2012, 01, 01), 1) { GrowthRate = float.MaxValue } }; var result = source .OrderBy(x => x.ReferenceDate) .AccumulateCompoundInterest( x => x.GrowthRate, (x, cumProf) => new { Date = x.ReferenceDate, CumulativePercentage = cumProf }) .ToList(); float.IsPositiveInfinity(result[0].CumulativePercentage).Should().BeTrue(); }
private int GetWindDegree(TimeSerie index) { var windDegree = (int)index.Parameters.Where(p => p.Name == "wd") .Select(wd => wd.Values[0]).FirstOrDefault(); return(windDegree); }
private decimal GetTemperature(TimeSerie index) { var temperature = index.Parameters.Where(p => p.Name == "t") .Select(t => t.Values[0]).FirstOrDefault(); return(temperature); }
private decimal GetWind(TimeSerie index) { var wind = index.Parameters.Where(p => p.Name == "ws") .Select(ws => ws.Values[0]).FirstOrDefault(); return(wind); }
private decimal GetPrecipitationMedian(TimeSerie index) { var precipitationMedian = index.Parameters.Where(p => p.Name == "pmedian") .Select(pm => pm.Values[0]).FirstOrDefault(); return(precipitationMedian); }
public async Task <IHttpActionResult> Delete(int Id) { TimeSerie timeSerie = await _db.TimeSeries.FindAsync(Id); if (timeSerie == null) { return(NotFound()); } List <TimeSerieValue> timeSerieValues = new List <TimeSerieValue>(); timeSerieValues.AddRange(timeSerie.TimeSerieValues); foreach (TimeSerieValue tsv in timeSerieValues) { _db.TimeSerieValues.Remove(tsv); } _db.TimeSeries.Remove(timeSerie); try { await _db.SaveChangesAsync(); } catch (Exception exp) { throw; } return(Ok(timeSerie)); }
public void AccumulateCompoundInterest_IEnumerableToIEnumberable_DecimalProperty_SourceCumulativePercentageToOverflow_OverflowExceptionThrown() { IEnumerable <TimeSerie <decimal> > source = new TimeSerie <decimal>[] { new TimeSerie <decimal>(new DateTime(2012, 01, 01), 1M) { GrowthRate = decimal.MaxValue }, new TimeSerie <decimal>(new DateTime(2012, 01, 01), 1M) { GrowthRate = decimal.MaxValue } }; Action comparison = () => { source .OrderBy(x => x.ReferenceDate) .AccumulateCompoundInterest( x => x.GrowthRate, (x, cumProf) => new { Date = x.ReferenceDate, CumulativePercentage = cumProf }) .ToList(); }; comparison.ShouldThrow <OverflowException>(); }
public void Convert() { var timevalues = new TimeValue[20000]; for (int i = 0; i < timevalues.Length; i++) { timevalues[i] = new TimeValue { Time = new DateTime(2001, 1, 1).AddMinutes(i), Value = i }; } var serie = new TimeSerie { TimeValues = timevalues }; var cloudEntity = new CloudEntity <TimeSerie> { PartitionKey = "part", RowKey = "key", Value = serie }; var fatEntity = FatEntity.Convert(cloudEntity, _serializer); var cloudEntity2 = FatEntity.Convert <TimeSerie>(fatEntity, _serializer, null); var fatEntity2 = FatEntity.Convert(cloudEntity2, _serializer); Assert.IsNotNull(cloudEntity2); Assert.IsNotNull(fatEntity2); Assert.AreEqual(cloudEntity.PartitionKey, fatEntity.PartitionKey); Assert.AreEqual(cloudEntity.RowKey, fatEntity.RowKey); Assert.AreEqual(cloudEntity.PartitionKey, fatEntity2.PartitionKey); Assert.AreEqual(cloudEntity.RowKey, fatEntity2.RowKey); Assert.IsNotNull(cloudEntity2.Value); Assert.AreEqual(cloudEntity.Value.TimeValues.Length, cloudEntity2.Value.TimeValues.Length); for (int i = 0; i < timevalues.Length; i++) { Assert.AreEqual(cloudEntity.Value.TimeValues[i].Time, cloudEntity2.Value.TimeValues[i].Time); Assert.AreEqual(cloudEntity.Value.TimeValues[i].Value, cloudEntity2.Value.TimeValues[i].Value); } var data1 = fatEntity.GetData(); var data2 = fatEntity2.GetData(); Assert.AreEqual(data1.Length, data2.Length); for (int i = 0; i < data2.Length; i++) { Assert.AreEqual(data1[i], data2[i]); } }
public async Task <IHttpActionResult> Put(int Id, [FromBody] TimeSerie timeSerie) { if (!ModelState.IsValid) { return(BadRequest(ModelState)); } if (Id != timeSerie.Id) { return(BadRequest()); } try { _db.UpdateGraph(timeSerie, map => map .AssociatedEntity(ts => ts.Currency) .AssociatedEntity(ts => ts.Unit) .OwnedCollection(ts => ts.Formulae, with => with.OwnedEntity(f => f.TimeSerie)) .OwnedCollection(ts => ts.CommodityHedges, with => with.OwnedEntity(ch => ch.TimeSerie)) .OwnedCollection(ts => ts.TimeSerieValues, with => with.OwnedEntity(tsv => tsv.TimeSerie)) ); await _db.SaveChangesAsync(); } catch (DbEntityValidationException e) { List <string> errors = new List <string>(); foreach (var eve in e.EntityValidationErrors) { errors.Add(string.Format("Entity of type \"{0}\" in state \"{1}\" has the following validation errors:", eve.Entry.Entity.GetType().Name, eve.Entry.State)); foreach (var ve in eve.ValidationErrors) { errors.Add(string.Format("- Property: \"{0}\", Error: \"{1}\"", ve.PropertyName, ve.ErrorMessage)); } } throw; } catch (DbUpdateConcurrencyException) { if (!TimeSerieExists(Id)) { return(NotFound()); } else { throw; } } return(Updated(timeSerie)); }
public void StandardDeviationOfFloat_SourceCumulativePercentageToOverflow_NotInfinityReturned() { IEnumerable <TimeSerie <float> > source = new TimeSerie <float>[] { new TimeSerie <float>(new DateTime(2012, 01, 01), float.MaxValue), new TimeSerie <float>(new DateTime(2012, 01, 01), float.MaxValue) }; var result = source .OrderBy(x => x.ReferenceDate) .StandardDeviation(x => x.Value); float.IsPositiveInfinity(result).Should().BeFalse(); }
public void VarianceOfDouble_SourceCumulativePercentageToOverflow_NotInfinityReturned() { IEnumerable <TimeSerie <double> > source = new TimeSerie <double>[] { new TimeSerie <double>(new DateTime(2012, 01, 01), double.MaxValue), new TimeSerie <double>(new DateTime(2012, 01, 01), double.MaxValue) }; var result = source .OrderBy(x => x.ReferenceDate) .Variance(x => x.Value); double.IsPositiveInfinity(result).Should().BeFalse(); }
public void Convert() { var timevalues = new TimeValue[20000]; for(int i = 0; i < timevalues.Length; i++) { timevalues[i] = new TimeValue {Time = new DateTime(2001, 1, 1).AddMinutes(i), Value = i}; } var serie = new TimeSerie {TimeValues = timevalues}; var cloudEntity = new CloudEntity<TimeSerie> { PartitionKey = "part", RowKey = "key", Value = serie }; var fatEntity = FatEntity.Convert(cloudEntity, _serializer); var cloudEntity2 = FatEntity.Convert<TimeSerie>(fatEntity, _serializer, null); var fatEntity2 = FatEntity.Convert(cloudEntity2, _serializer); Assert.IsNotNull(cloudEntity2); Assert.IsNotNull(fatEntity2); Assert.AreEqual(cloudEntity.PartitionKey, fatEntity.PartitionKey); Assert.AreEqual(cloudEntity.RowKey, fatEntity.RowKey); Assert.AreEqual(cloudEntity.PartitionKey, fatEntity2.PartitionKey); Assert.AreEqual(cloudEntity.RowKey, fatEntity2.RowKey); Assert.IsNotNull(cloudEntity2.Value); Assert.AreEqual(cloudEntity.Value.TimeValues.Length, cloudEntity2.Value.TimeValues.Length); for(int i = 0; i < timevalues.Length; i++) { Assert.AreEqual(cloudEntity.Value.TimeValues[i].Time, cloudEntity2.Value.TimeValues[i].Time); Assert.AreEqual(cloudEntity.Value.TimeValues[i].Value, cloudEntity2.Value.TimeValues[i].Value); } var data1 = fatEntity.GetData(); var data2 = fatEntity2.GetData(); Assert.AreEqual(data1.Length, data2.Length); for(int i = 0; i < data2.Length; i++) { Assert.AreEqual(data1[i], data2[i]); } }
public void StandardDeviationOfDecimal_SourceCumulativePercentageToOverflow_OverflowExceptionThrown() { IEnumerable <TimeSerie <decimal> > source = new TimeSerie <decimal>[] { new TimeSerie <decimal>(new DateTime(2012, 01, 01), decimal.MaxValue), new TimeSerie <decimal>(new DateTime(2012, 01, 01), decimal.MaxValue) }; Action comparison = () => { source .OrderBy(x => x.ReferenceDate) .StandardDeviation(x => x.Value); }; comparison.ShouldThrow <OverflowException>(); }
public virtual Task <TimeSerie <ITradeBar> > GetHistoryNavigator(DateTime historyStartTime) { //todo fetch history from database TimeSerie <ITradeBar> newNavigator = new TimeSerie <ITradeBar>(); //consolidate the currently available data using (var navigator = new TimeSerieNavigator <ITradeBar>(this.DataSource.Ticks)) { //add all records up to current time navigator.SeekNearestBefore(historyStartTime); while (navigator.MoveNext() && navigator.Time <= this.DataSource.Ticks.Time) { newNavigator.AddRecord(navigator.Current); } } return(Task.FromResult(newNavigator)); }
public void AccumulateCompoundInterest_IEnumerableToScalar_DoubleProperty_SourceCumulativePercentageToOverflow_NotInfinityReturned() { IEnumerable <TimeSerie <double> > source = new TimeSerie <double>[] { new TimeSerie <double>(new DateTime(2012, 01, 01), 1) { GrowthRate = double.MaxValue }, new TimeSerie <double>(new DateTime(2012, 01, 01), 1) { GrowthRate = double.MaxValue } }; var result = source .OrderBy(x => x.ReferenceDate) .AccumulateCompoundInterest(x => x.GrowthRate); double.IsPositiveInfinity(result).Should().BeTrue(); }
public async Task <TimeSerie <ITradeBar> > GetHistoryNavigator(TimeSpan resolution, DateTime historyStartTime) { if (historyStartTime > this.Time) { throw new InvalidOperationException("Requested future quotes"); } if (resolution != TimeSpan.FromMinutes(1)) { throw new NotSupportedException("Binance symbol history only supports resolution 1 minute"); } var history = new TimeSerie <ITradeBar>(); var sem = GetSemaphore(); await sem.WaitAsync(); try { //--- get the data from db await HistoryDb.AssureData(HistoryId, historyStartTime, this.Time); ISymbolHistory symbolHistory = HistoryDb.GetSymbolHistory(HistoryId, historyStartTime, DateTime.MaxValue); //HistoryDb.CloseFile(this.Market, Symbol.Key, TimeSpan.FromSeconds(60)); while (symbolHistory.Ticks.MoveNext()) { history.AddRecord(symbolHistory.Ticks.Current, true); } } catch (Exception ex) { Logger.Error("Exception during SymbolFeed.GetHistoryNavigator: {0}", ex.Message); } finally { sem.Release(); } return(history); }
public void IsValidTimeSerie6() { var tags = new string[150]; for (int i = 0; i < tags.Length; i++) tags[i] = "t" + i; var timeSerie = new TimeSerie { Name = "validname", Tags = tags }; timeSerie.Validate(); }
public void IsValidTimeSerie4() { var timeSerie = new TimeSerie { Name = "validname", Events = new[] { new EventValue { KnownSince = new DateTime(2001,1,1), Time = new DateTime(2001,1,1), Tags = new [] { "invalid-tag" } }}, }; timeSerie.Validate(); }
public void IsValidTimeSerie3() { var timeSerie = new TimeSerie { Name = "validname", Tags = new[] { "invalid-tag" }, }; timeSerie.Validate(); }
public void IsValidTimeSerie2Bis() { var timeSerie = new TimeSerie { Name = null, }; timeSerie.Validate(); }
public void IsValidTimeSerie2() { var timeSerie = new TimeSerie { Name = "invalid-name", }; timeSerie.Validate(); }
public void IsValidTimeSerie1() { var timeSerie = new TimeSerie { Name = "validname", Tags = new [] { "validtag" }, Events = new [] { new EventValue { KnownSince = new DateTime(2001,1,1), Time = new DateTime(2001,1,1), Tags = new [] { "validtag" } }}, Values = new[]{ new TimeValue { Time = new DateTime(2001,1,1), Value = 13 }} }; timeSerie.Validate(); }
public void IsValidTimeSerie0Bis() { var timeSerie = new TimeSerie { Name = "validname", Tags = new string[0], Events = new EventValue[0], Values = new TimeValue[0], }; timeSerie.Validate(); }
TimeSerie[] GetTimeSeries(int count) { var array = new TimeSerie[count]; for(int i = 0; i < array.Length; i++) { array[i] = new TimeSerie { Name = "t" + i, Values = new[] {new TimeValue {Time = new DateTime(2001, 1, 1), Value = 1.0}} }; } return array; }
public void IsValidTimeSerie7() { var timeSerie = new TimeSerie { Name = "validname", Tags = new[]{ "duplicatetag", "duplicatetag"} }; timeSerie.Validate(); }
public static List <TimeSerie> ProcessData(Provider provider, string csv_file_path, Logger logger) { var records = new List <TimeSerie>(); bool didPassSecurity = true; try { var queryDate = File.GetCreationTime(csv_file_path); // Test date //var queryDate = new DateTime(2020, 07, 23); // Retrieve shareclass sql table by date of today to perform security checks var actualShareClassList = SqlService.GetShareClassList(queryDate, logger); using (var streamReader = new StreamReader(csv_file_path)) { using (CsvReader reader = new CsvReader(streamReader, CultureInfo.InvariantCulture)) { reader.Configuration.Delimiter = GlobalConstants.RequiredDelimiter; reader.Read(); reader.ReadHeader(); while (reader.Read()) { var headers = provider.Headers.ToArray(); //Expected result coming from official file - isin, currencyShare, dateReport ExpectedResultDto dto = new ExpectedResultDto(); dto.ProviderId = provider.Id; dto.ProviderName = provider.Title; var dateField = reader.GetField(headers[IndexNavDate].Name); dto.DateReport = new DateTime(Convert.ToInt32(dateField.Substring(0, 4)), // Year Convert.ToInt32(dateField.Substring(4, 2)), // Month Convert.ToInt32(dateField.Substring(6, 2))); // Day dto.CurrencyShare = reader.GetField(headers[IndexCurrency].Name); dto.Isin = reader.GetField(headers[IndexIsin].Name); // Map time series types between xml and source var types = new List <TimeSerieType>(); for (int i = CountHeadersToSkip; i < headers.Length; i++) { var type = new TimeSerieType { Id = headers[i].Id_TS, Value = reader.GetField <decimal>(headers[i].Name), }; types.Add(type); } // Check result from security before creating new entity didPassSecurity = Controller.SecurityCheck(actualShareClassList, dto, logger); if (!didPassSecurity) { records = null; continue; } // Different time series type creates new entry in DB foreach (var type in types) { var record = new TimeSerie { date_ts = dto.DateReport, id_ts = type.Id, value_ts = type.Value, currency_ts = dto.CurrencyShare, provider_ts = dto.ProviderId, id_shareclass = dto.Id, file_name = Path.GetFileName(csv_file_path), }; records.Add(record); } } } } } catch (Exception ex) { didPassSecurity = true; logger.Error(ex.Message); } if (!didPassSecurity) { return(null); } return(records); }
public Dictionary <string, double> ConvertStockDataToHighChartJson(TimeSerie timeSerie, string symbolName, int resultsCount) { Dictionary <string, double> jsonForStockComponent = new Dictionary <string, double>(); string timeSerieValue = TimeSeries.FirstOrDefault(o => o.Key == timeSerie).Value; string stockJson; if (symbolName == "USD" || symbolName == "GBP") { using (WebClient wc = new WebClient()) { stockJson = wc.DownloadString($"https://www.alphavantage.co/query?function=FX_DAILY&from_symbol=EUR&to_symbol={symbolName}&apikey=YFBDJX9GUMLZZ2YP"); } } else { using (WebClient wc = new WebClient()) { stockJson = wc.DownloadString($"https://www.alphavantage.co/query?function={timeSerieValue}&symbol={symbolName}&apikey=YFBDJX9GUMLZZ2YP"); } } dynamic jsonObj = JObject.Parse(stockJson); dynamic stockData = null; foreach (JProperty prop in jsonObj) { if (prop.Name.Contains("Time Series")) { stockData = prop.Value; } } if (stockData == null) { throw new Exception("error requesting time series"); } int count = 0; foreach (JProperty serie in stockData) { if (count >= resultsCount) { break; } string date = serie.Name; double stockValue = 0; foreach (JProperty value in serie.Value) { if (value.Name.Contains("close")) { stockValue = Math.Round((double)value.Value, 2); } } jsonForStockComponent.Add(date, stockValue); count++; } return(jsonForStockComponent); }
public static IEnumerable <TimeSerie> GetDeepTimeSeries(this TimeSerie serie) { return(serie.RecursiveSelect <TimeSerie>(c => c.ListData.Select(e => e.A).Where(e => e != null))); }
public void PruneIntermediateZeroes_WorksWithEmptyValues() { var ts = new TimeSerie(); var pruned = ForecastingClient.PruneIntermediateZeroes(ts); Assert.IsNotNull(pruned, "#A00"); Assert.IsNull(pruned.Values, "#A01"); }
public void PruneIntermediateZeroes_WorksWithMinySeries() { var ts = new TimeSerie { Values = new[] { new TimeValue {Time = new DateTime(2001,1,1), Value = 0.0}, new TimeValue {Time = new DateTime(2001,1,2), Value = 0.0}, new TimeValue { Time = new DateTime(2001,1,3), Value = 0.0} } }; var pruned = ForecastingClient.PruneIntermediateZeroes(ts); Assert.IsNotNull(pruned, "#A00"); Assert.IsNotNull(pruned.Values, "#A01"); Assert.AreEqual(2, pruned.Values.Length, "#A02"); Assert.AreEqual(ts.Values[0].Time, pruned.Values[0].Time, "#A03"); Assert.AreEqual(ts.Values[2].Time, pruned.Values[1].Time, "#A04"); }
public void PruneIntermediateZeroes_WorksWithSmallSeries() { var ts = new TimeSerie { Values = new[] { new TimeValue {Time = DateTime.UtcNow, Value = 0.0}, new TimeValue { Time = DateTime.UtcNow.AddHours(1), Value = 0.0} } }; var pruned = ForecastingClient.PruneIntermediateZeroes(ts); Assert.IsNotNull(pruned, "#A00"); Assert.IsNotNull(pruned.Values, "#A01"); Assert.AreEqual(2, pruned.Values.Length, "#A02"); }
private static TimeSerie[] GetTimeSeries(int count) { var array = new TimeSerie[count]; for (int i = 0; i < array.Length; i++) { array[i] = new TimeSerie { Name = "t" + i, Values = GetTimeValues(20, i, 0.3 * i), Tags = new[] { "T" + i }, Events = new[] { new EventValue { Tags = new[] { "foo" + i }, KnownSince = new DateTime(2001, 1, 1).AddDays(i), Time = new DateTime(2001, 1, 1).AddDays(i) }, } }; if (i % 4 > 0) { array[i].Lambda = 14f; array[i].Tau = 0.95f; } } return array; }
public void IsValidTimeSerie0() { var timeSerie = new TimeSerie { Name = "validname", }; timeSerie.Validate(); }