/// <summary> /// Gets the diagnostics files for reporting to the HMS Cloud service. /// </summary> /// <param name="timer">The timer.</param> /// <param name="obj">The object.</param> private void GetDiagnosticsFiles(Timer timer, object obj) { var diagnosticData = DataAggregator.GetCasinoDiagnosticData(); if (diagnosticData == null || 0 >= diagnosticData.Count) { return; } try { DataExporter.SendCasinoDiagnosticData(diagnosticData); } catch (Exception ex) { Logger.Warn( $"An unexpected error occurred while calling DataReporter.SendCasinoDiagnosticData. Check WCF HMS configuration: [{ex.Message}]"); var innerEx = ex.InnerException; while (null != innerEx) { Logger.Warn($"[{innerEx.Message}]"); innerEx = innerEx.InnerException; } Logger.Warn($"Stack Trace: [{Environment.StackTrace}]"); } }
/// <summary> /// Tests that merging dictionaries from different threads works properly. /// </summary> public void TestMultithreadedMergeWorksProperly() { var dataAggregator = new DataAggregator(); Thread[] threads = new Thread[ThreadsCount]; for (int i = 0; i < threads.Length; i++) { int threadNumber = i; threads[i] = new Thread(() => { for (int j = 0; j < MergeOperationsCount; j++) { dataAggregator.MergeData(multiThreadedDictionaries[threadNumber]); } }); } foreach (Thread thread in threads) { thread.Start(); } foreach (Thread thread in threads) { thread.Join(); } IDictionary <string, long> actualData = dataAggregator.GetData(); TestHelpers.AssertEqualsDictionaries(expectedMultiThreadedResult, actualData); }
static void Main(string[] args) { var dataLoader = new CsvDataLoader(); var dateProcessor = new DateProcessor(); var dataAggregator = new DataAggregator(); var dataExporter = new CsvDataExporter(); var excelReader = new ExcelDataLoader(); var csvFinancialDataLoader = new CsvFinancialDataLoader(); var desktopFolder = Environment.GetFolderPath(Environment.SpecialFolder.DesktopDirectory); var result = dataLoader.LoadDataFromFile(desktopFolder + "/Diplomovka_ESF/transformed_10000.csv"); // var result = dataLoader.LoadDataFromFile(desktopFolder + "/Diplomovka_ESF/owners.csv"); List <BusinessDataItem> parsedBusinessData = new List <BusinessDataItem>(); result.ForEach(res => parsedBusinessData.AddRange(dateProcessor.SplitBusinessDataByYear(res))); var allLoadedIcos = parsedBusinessData.Select(businessData => businessData.ICO).Distinct().OrderBy(ico => ico).ToList(); var allIcosWithFinancialData = excelReader.LoadFinancialDataOfCompany(allLoadedIcos, desktopFolder + "/Data_DP/financial_data.xlsx"); Console.WriteLine($"AllLoadedIcos size {allLoadedIcos.Count} vs. icos with financialData size {allIcosWithFinancialData.Count}"); // var allIcosWithFinancialData = csvFinancialDataLoader.LoadFinancialDataOfCompany(allLoadedIcos, desktopFolder + "/Data_DP/financial_data_less_detail.csv"); // Console.WriteLine($"AllLoadedIcos size {allLoadedIcos.Count} vs. icos with financialData from csv size {allIcosWithFinancialData.Count}"); List <CompanyOutputData> ownersInfo = dataAggregator.AggregateDataByCompany(parsedBusinessData); dataExporter.ExportDataToCsv(ownersInfo); Console.WriteLine("Transformation finished"); }
public object RunGetAggregatedDataTest() { List <DataAggregator <IncidentData> > aggregators = new List <DataAggregator <IncidentData> >(); MonthAggregator <IncidentData> monthAggregator = new MonthAggregator <IncidentData>(); PropertyInfo prop = typeof(IncidentData).GetProperty("Time"); monthAggregator.Member = prop; aggregators.Add(monthAggregator); DataKeyValueDelegate areaDelegate = new DataKeyValueDelegate("Area"); ValueDelegateAggregator <IncidentData> areaAggregator = new ValueDelegateAggregator <IncidentData>(); areaAggregator.Name = "Area"; areaAggregator.ValueDelegate = areaDelegate.Delegate; aggregators.Add(areaAggregator); DataKeyValueDelegate codeDelegate = new DataKeyValueDelegate("Code"); ValueDelegateAggregator <IncidentData> codeAggregator = new ValueDelegateAggregator <IncidentData>(); codeAggregator.Name = "Code"; codeAggregator.ValueDelegate = codeDelegate.Delegate; aggregators.Add(codeAggregator); return(DataAggregator <IncidentData> .GetAggregatedData(aggregators, Incidents)); }
/// <summary> /// Add a new data interval. /// </summary> /// <param name="security">The security.</param> /// <param name="resolution">The resolution.</param> /// <param name="aggregation">The aggregation.</param> /// <returns></returns> public DataAggregator AddInterval(Security security, Resolution resolution, AggregationType aggregation = AggregationType.QuoteBar) { //Get correct aggregator DataAggregator aggregator = null; switch (aggregation) { case AggregationType.QuoteBar: aggregator = resolution.IsTick ? new QuoteBarAggregator(Convert.ToInt32(resolution.Ticks)) : new QuoteBarAggregator(resolution.TimeSpan.Value); break; case AggregationType.RenkoBar: aggregator = null; //aggregator = new RenkoAggregator(Convert.ToInt32(resolution.Ticks), null); //TODO: set renko type break; case AggregationType.TradeBar: aggregator = resolution.IsTick ? new TradeAggregator(Convert.ToInt32(resolution.Ticks)) : new TradeAggregator(resolution.TimeSpan.Value); break; } //Add aggregator and return return(AddInterval(security, aggregator)); }
private static async Task <int> OutputCube(string baseUrl, string dataViewName, string username, string password, string systemName, string queryFilePath, List <string> variableNames) { using (LoggingHandler loggingHandler = new LoggingHandler()) { ILogger <Program> logger = loggingHandler.CreateLogger <Program>(); ApiConnectorFactory connectorFactory = new ApiConnectorFactory(baseUrl); LoginService loginService = new LoginService(connectorFactory, dataViewName); SessionDetails sessionDetails = await loginService.Login(username, password); if (sessionDetails == null) { logger.LogError($"Couldn't log in to data view {dataViewName} as user {username}"); return(-1); } try { DataAggregator dataExplorer = new DataAggregator(connectorFactory, dataViewName, loggingHandler.CreateLogger <DataAggregator>()); bool success = await dataExplorer.TryShowCubeForVariables(sessionDetails, systemName, queryFilePath, variableNames, System.Console.Out); return(success ? 0 : -1); } finally { await loginService.Logout(sessionDetails); } } }
public async Task Run() { // Arrange var apartComplexRepositoryMock = MockApartComplexRepository(); var lunUaApartComplexes = CreateRandomApartComplexes(); var lunUaAggregator = MockLunUaAggregator(lunUaApartComplexes); var domRiaApartComplexes = CreateRandomApartComplexes(); var domRiaAggregator = MockDomRiaAggregator(domRiaApartComplexes); var expectedResult = CreateExpectedResult(lunUaApartComplexes, domRiaApartComplexes).ToList(); var aggregators = new List <IAggregator> { lunUaAggregator.Object, domRiaAggregator.Object }; var dataAggregator = new DataAggregator(apartComplexRepositoryMock, aggregators); // Act await dataAggregator.Run(); // Assert Assert.Equal(expectedResult.Count(), ((TestRepository)apartComplexRepositoryMock)._apartComplexes.Count()); for (var iter = 0; iter < expectedResult.Count(); iter++) { Assert.True(CompareApartComplexes(expectedResult[iter], ((TestRepository)apartComplexRepositoryMock)._apartComplexes.ToList()[iter])); } }
/// <summary> /// Gets the weekly data backup files. /// </summary> /// <param name="timer">The timer.</param> /// <param name="o">The o.</param> private void GetWeeklyDataBackupFiles(Timer timer, object o) { // Get last week's pg_dump files and send them (via WCF/MSMQ message payload) // up to HMS cloud server var backupData = DataAggregator.GetDataBackup(); if (null == backupData || 0 >= backupData.Count) { return; } try { DataExporter.SendDataBackup(backupData); } catch (Exception ex) { Logger.Warn( $"An unexpected error occurred while calling DataReporter.SendDataBackup. Check WCF HMS configuration: [{ex.Message}]"); var innerEx = ex.InnerException; while (null != innerEx) { Logger.Warn($"[{innerEx.Message}]"); innerEx = innerEx.InnerException; } Logger.Warn($"Stack Trace: [{Environment.StackTrace}]"); } }
public void saveToDb() { decimal income = props.totalIncome; decimal spending = getTotalSpending(); DateTime timeStamp; if (DateTimerPicker.Value != null) { timeStamp = (DateTime)DateTimerPicker.Value; } else { timeStamp = DateTime.Now; } DataAggregator da = new DataAggregator(income, spending, timeStamp); var caList = new List <CustomPropAggregator>(); foreach (CustomProperty prop in myProperties.GetAllProps()) { CustomPropAggregator ca = new CustomPropAggregator(timeStamp, prop.Value, prop.Name); caList.Add(ca); } saveDatabase(da, caList); searchForOverLimit(); }
public void DataAggregator_ShouldShowLatestValue() { DataAggregator.AggregateData("5.05\r"); Thread.Sleep(1000); DataAggregator.AggregateData("18.55\r"); Assert.AreEqual(DataAggregator.CurrentValue(), "18,55"); }
public void DataAggregator_ShouldShowMinValue() { DataAggregator.AggregateData("5.05\r"); Thread.Sleep(1000); DataAggregator.AggregateData("18.55\r"); Thread.Sleep(1000); Assert.AreEqual("5,05", DataAggregator.MinValue()); }
public Connector(ClientConfig config) { if (Client == null) { if (config.Servers == null || config.Servers.Count == 0) { var channel = new InProcessChannel(); Client = new DataClient { Channel = channel }; _server = new Server.Server(new NodeConfig { IsPersistent = config.IsPersistent, DataPath = "." }) { Channel = channel }; _server.Start(); } else if (config.Servers.Count == 1) { var serverCfg = config.Servers[0]; var channel = new TcpClientChannel(new TcpClientPool(4, 1, serverCfg.Host, serverCfg.Port)); Client = new DataClient { Channel = channel }; } else // multiple servers { var aggregator = new DataAggregator(); var index = 0; foreach (var serverConfig in config.Servers) { var channel = new TcpClientChannel(new TcpClientPool(4, 1, serverConfig.Host, serverConfig.Port)); var client = new DataClient { Channel = channel, ShardIndex = index, ShardsCount = config.Servers.Count }; aggregator.CacheClients.Add(client); index++; } Client = aggregator; } } }
/// <inheritdoc /> /// <exception cref="T:System.Exception"> /// ServiceConsumerFactory.Create returned null/invalid IHmsCloudService reference. /// Check WCF HMS configuration. /// </exception> public void SendCasinoDataReport(IReportable reportable) { var dataReport = reportable as CasinoDataReport; if (null == dataReport) { return; } using ( var cloudServiceProxy = ServiceConsumerFactory.Create <IHmsCloudService>(() => new HmsCloudServerProxy()) ) { try { if (cloudServiceProxy?.Operations == null) { throw new Exception( "ServiceConsumerFactory.Create returned null/invalid IHmsCloudService reference. Check WCF HMS configuration."); } // The TransactionScope here will provide behavior such that - if/when the call to // DataAggregator.SuccessfulCasinoDataReport fails for some reason (e.g., problem // writing to the database) - the transactional WCF/MSMQ message delivery will be // rolled back (i.e., the messages will be discarded from the client-side MQ) var txnOptions = new TransactionOptions { IsolationLevel = IsolationLevel.RepeatableRead }; using (var txnScope = new TransactionScope(TransactionScopeOption.Required, txnOptions)) { cloudServiceProxy.Operations.ReportCasinoData(dataReport); DataAggregator.SuccessfulCasinoDataReport(reportable.ReportGuid); txnScope.Complete(); } } catch (FaultException fe) { Logger.Warn($"Service operation ReportCasinoData threw a fault: [{fe.Message}]"); DataAggregator.UnsuccessfulCasinoDataReport(reportable.ReportGuid); } catch (Exception ex) { Logger.Warn( $"An unexpected error occurred while calling the ReportCasinoData service operation: [{ex.Message}]"); var innerEx = ex.InnerException; while (null != innerEx) { Logger.Warn($"[{innerEx.Message}]"); innerEx = innerEx.InnerException; } DataAggregator.UnsuccessfulCasinoDataReport(reportable.ReportGuid); Logger.Warn($"Stack Trace: [{Environment.StackTrace}]"); } } }
public void TestDictionariesAreProperlyMerged() { var dataAggregator = new DataAggregator(); dataAggregator.MergeData(firstDictionary); dataAggregator.MergeData(secondDictionary); IDictionary <string, long> actualData = dataAggregator.GetData(); TestHelpers.AssertEqualsDictionaries(expectedData, actualData); }
public void SetUp() { _executedQueries.Clear(); _dataGenProvider = Substitute.For <IDataGenProvider>(); _dataGenProvider.GetTableName(StockType.Stock, StockDataRange.Daily, 0).Returns(TblDaily); _dataGenProvider.GetTableName(StockType.Stock, StockDataRange.Weekly, 0).Returns(TblWeekly); _dataGenProvider.GetTableName(StockType.Stock, StockDataRange.Monthly, 0).Returns(TblMonthly); _dataGenProvider.ExecuteSQL(Arg.Compat.Do <string>(s => _executedQueries.Add(s))); TestObj = new DataAggregator(_dataGenProvider); }
/// <summary> /// Exports ICasinoDataReport data as CSV. /// </summary> /// <param name="casinoDataReport">The casino data report.</param> private void ExportCasinoDataReportAsJson(ICasinoDataReport casinoDataReport) { string casinoDataReportFile = null; try { var exportDirectory = Directory.CreateDirectory(Path.Combine(ExportLocation, DateTime.UtcNow.ToString("yyyy-MM-dd"))); casinoDataReportFile = Path.Combine(exportDirectory.FullName, $"casinoDataReport-{Regex.Replace(casinoDataReport.CasinoCode, @"\s+", "")}-{casinoDataReport.ReportedAt:yyyy-MM-dd-HHmmss}-{casinoDataReport.ReportGuid.ToString()}.txt"); var casinoDataReportJson = JsonConvert.SerializeObject(casinoDataReport, Formatting.None, new JsonSerializerSettings { TypeNameHandling = TypeNameHandling.Auto }); if (!string.IsNullOrWhiteSpace(casinoDataReportJson)) { File.WriteAllText(casinoDataReportFile, StringEncryption.EncryptString(casinoDataReportJson)); } DataAggregator.SuccessfulCasinoDataReport(casinoDataReport.ReportGuid); } catch (Exception ex) { Logger.Warn( $"An unexpected error occurred in LocalDataExporter.ExportCasinoDataReportAsJson method: [{ex.Message}]"); var innerEx = ex.InnerException; while (null != innerEx) { Logger.Warn($"[{innerEx.Message}]"); innerEx = innerEx.InnerException; } DataAggregator.UnsuccessfulCasinoDataReport(casinoDataReport.ReportGuid); Logger.Warn($"Stack Trace: [{Environment.StackTrace}]"); // clean up any files created during this failed export try { if (!string.IsNullOrWhiteSpace(casinoDataReportFile)) { File.Delete(casinoDataReportFile); } } catch (Exception exception) { // just log the issue, nothing else needed Logger.Warn( $"LocalDataExporter.ExportCasinoDataReportAsJson: problem deleting [{casinoDataReportFile}] file in catch block [{exception.Message}]"); } } }
/// <summary> /// Gets the egm data for report. /// </summary> /// <param name="timer">The timer.</param> /// <param name="o">The o.</param> private void GetEgmDataForReport(Timer timer, object o) { var casinoDataReport = DataAggregator.GetCasinoDataReport(); if (null == casinoDataReport) { return; } // Guard against Timer event re-entrancy by breaking out of // data processing do...while loop (below) when we are approaching // next Timer event (even if there is more data to process). var startAt = DateTime.UtcNow; var reentrantGuardMinutes = Settings.Default.CloudReportInterval * 0.10d; if (ReentrantGuardMaxMinutes < reentrantGuardMinutes) { reentrantGuardMinutes = ReentrantGuardMaxMinutes; } var reentrantGuardTimeSpan = TimeSpan.FromMinutes(Settings.Default.CloudReportInterval) .Subtract(TimeSpan.FromMinutes(reentrantGuardMinutes)); do { Logger.Debug($"Casino Data Report {casinoDataReport}"); try { DataExporter.SendCasinoDataReport(casinoDataReport); } catch (Exception ex) { Logger.Warn( $"An unexpected error occurred while calling DataReporter.SendCasinoDataReport. Check WCF HMS configuration: [{ex.Message}]"); var innerEx = ex.InnerException; while (null != innerEx) { Logger.Warn($"[{innerEx.Message}]"); innerEx = innerEx.InnerException; } DataAggregator.UnsuccessfulCasinoDataReport(casinoDataReport.ReportGuid); Logger.Warn($"Stack Trace: [{Environment.StackTrace}]"); break; } casinoDataReport = DataAggregator.GetCasinoDataReport(); } while (null != casinoDataReport && DateTime.UtcNow.Subtract(startAt) < reentrantGuardTimeSpan); }
public void saveDatabase(DataAggregator da, List <CustomPropAggregator> caList) { using (var db = new LiteDatabase(@"AdatBazis.db")) { var col = db.GetCollection <DataAggregator>("data"); var results = col.Find(Query.All()); bool updated = false; foreach (var result in results) { if ((result.TimeStamp.Month == da.TimeStamp.Month) && (result.TimeStamp.Year == da.TimeStamp.Year)) //Minden honapban csak egy bejegyzes legyen. { result.Income = da.Income; result.Spending = da.Spending; result.TimeStamp = da.TimeStamp; col.Update(result); updated = true; } } if (!updated) { col.Insert(da); } var list = db.GetCollection <CustomPropAggregator>("customList"); //ha a név ugyanaz ÉS a hónap ugyanaz, le kell frissíteni az értéket. var customResults = list.Find(Query.All()); bool shouldInsert; foreach (var member in caList) { shouldInsert = true; foreach (var result in customResults) { if (member.TimeStamp.Month == result.TimeStamp.Month) { if (member.Title == result.Title) { result.Spending = member.Spending; //update list.Update(result); shouldInsert = false; } } } if (shouldInsert) { list.Insert(member); } } } }
private string createAggregatedFileAndReturnItsDirectory(string filePath) { string[] rawFileContent = FileReaderWriter.readFromFile(filePath); var countryPersonPair = DictionaryFactory.createCountryPersonDictionary(rawFileContent); var sortedDict = DictionaryFactory.sortCountriesByAverageScore(countryPersonPair); string aggregatedData = DataAggregator.collectAndSortData(countryPersonPair, sortedDict); string aggregatedDataFilePath = FileReaderWriter.writeAggregatedDataToFile(aggregatedData); return(aggregatedDataFilePath); }
/// <summary> /// Adds the signal. /// </summary> /// <param name="name">The name.</param> /// <param name="action">The action.</param> /// <param name="trigger">The trigger.</param> /// <returns></returns> public TradingSignal AddSignal(string name, Func <Security, bool> action, DataAggregator trigger) { TradingSignal nevent = new TradingSignal(Universe, name, action); trigger.DataAggregated += (sender, aggregate) => { if (IsRunning) { nevent.Execute(); } }; nevent.SignalFired += OnSignal; Signals[nevent.Name] = nevent; return(nevent); }
public void Init() { _serverChannel1 = new TcpServerChannel(); _server1 = new Server.Server(new NodeConfig { DataPath = "server1" }) { Channel = _serverChannel1 }; _serverPort1 = _serverChannel1.Init(); _serverChannel1.Start(); _server1.Start(); _serverChannel2 = new TcpServerChannel(); _server2 = new Server.Server(new NodeConfig { DataPath = "server2" }) { Channel = _serverChannel2 }; _serverPort2 = _serverChannel2.Init(); _serverChannel2.Start(); _server2.Start(); Thread.Sleep(500); //be sure the server nodes are started _client1 = new DataClient { Channel = new TcpClientChannel(new TcpClientPool(4, 1, "localhost", _serverPort1)), ShardIndex = 0, ShardsCount = 2 }; _client2 = new DataClient { Channel = new TcpClientChannel(new TcpClientPool(4, 1, "localhost", _serverPort2)), ShardIndex = 1, ShardsCount = 2 }; _aggregator = new DataAggregator { CacheClients = { _client1, _client2 } }; _aggregator.DeclareCollection <CacheableTypeOk>(); }
public void DataAggregator_NotMoreThanOneHunderedRecords() { DataAggregator.AggregateData("11.11\r"); Thread.Sleep(1000); DataAggregator.ShowAllOfTheRecords(); for (int i = 0; i < 110; i++) { DataAggregator.AggregateData("22.22\r"); Thread.Sleep(1000); } DataAggregator.AggregateData("66.66\r"); DataAggregator.ShowAllOfTheRecords(); Assert.AreEqual("66,66", DataAggregator.CurrentValue()); Assert.AreNotEqual("11,11", DataAggregator.MinValue()); }
public void RunComboAggregatorTest() { PropertyInfo prop = typeof(IncidentData).GetProperty("Time"); MonthOfYearAggregator <IncidentData> moyAggTest = new MonthOfYearAggregator <IncidentData>(); moyAggTest.Member = prop; CategoryAggregator <IncidentData> locAggTest = new CategoryAggregator <IncidentData>(); locAggTest.Member = typeof(IncidentData).GetProperty("Location"); // CategoryAggregator<IncidentData> codeAggTest = new CategoryAggregator<IncidentData>(); // codeAggTest.Member = typeof(IncidentData).GetProperty("Code"); List <DataAggregator <IncidentData> > aggregators = new List <DataAggregator <IncidentData> >(); aggregators.Add(locAggTest); aggregators.Add(moyAggTest); // aggregators.Add(codeAggTest); Dictionary <object, object> dictionary = DataAggregator <IncidentData> .GetData(aggregators, Incidents); Console.WriteLine(JsonConvert.SerializeObject(dictionary)); }
public object RunGroupByTest() { List <DataAggregator <IncidentData> > aggregators = new List <DataAggregator <IncidentData> >(); MonthAggregator <IncidentData> monthAggregator = new MonthAggregator <IncidentData>(); PropertyInfo prop = typeof(IncidentData).GetProperty("Time"); monthAggregator.Member = prop; aggregators.Add(monthAggregator); DataKeyValueDelegate areaDelegate = new DataKeyValueDelegate("Area"); ValueDelegateAggregator <IncidentData> areaAggregator = new ValueDelegateAggregator <IncidentData>(); areaAggregator.Name = "Area"; areaAggregator.ValueDelegate = areaDelegate.Delegate; aggregators.Add(areaAggregator); DataKeyValueDelegate codeDelegate = new DataKeyValueDelegate("Code"); ValueDelegateAggregator <IncidentData> codeAggregator = new ValueDelegateAggregator <IncidentData>(); codeAggregator.Name = "Code"; codeAggregator.ValueDelegate = codeDelegate.Delegate; aggregators.Add(codeAggregator); var data = DataAggregator <IncidentData> .GetAggregatedData(aggregators, Incidents); var test = data.GroupBy( x => x.AggregatorValues.GetValueOrDefault("Area"), x => x.AggregatorValues.GetValueOrDefault("Code"), (area, codes) => new { Area = area, Count = codes.Count(), CodeCount = getCodeCounts(codes.ToList()) });; return(test); }
private void RecreateAggregators() { _aggregator = new DataAggregator(_gameReader, new PlayerService(_gameReader)); }
/// <summary> /// Initializes a new instance of the <see cref="DataSubscription"/> class. /// </summary> /// <param name="fundid">The fundid.</param> /// <param name="request">The request.</param> /// <param name="security">The security.</param> /// <param name="timezone">The timezone.</param> /// <param name="aggregator">Initial aggregator</param> public DataSubscription(string fundid, DataSubscriptionRequest request, Security security, TimeZone timezone, DataAggregator aggregator) { //Set values Resolution = new Resolution(request.Aggregation); Request = request; Aggregators = new HashSet <DataAggregator>(); if (aggregator != null) { Aggregators.Add(aggregator); } ExchangeTimeZone = security.Exchange.TimeZone; DateTimeZone = timezone; FundId = fundid; }
/// <inheritdoc /> /// <exception cref="T:System.Exception"> /// ServiceConsumerFactory.Create returned null/invalid IHmsCloudService reference. /// Check WCF HMS configuration. /// </exception> public void SendCasinoDiagnosticData(IDictionary <string, IList <byte[]> > diagnosticData) { if (null == diagnosticData || 0 >= diagnosticData.Count) { return; } var casinoCode = Settings.Default.CasinoCode; using ( var cloudServiceProxy = ServiceConsumerFactory.Create <IHmsCloudService>(() => new HmsCloudServerProxy()) ) { try { if (cloudServiceProxy?.Operations == null) { throw new Exception( "ServiceConsumerFactory.Create returned null/invalid IHmsCloudService reference. Check WCF HMS configuration."); } foreach (var diagnosticFilename in diagnosticData.Keys) { var diagnosticFileChunks = diagnosticData[diagnosticFilename]; if (null == diagnosticFileChunks || 0 >= diagnosticFileChunks.Count) { continue; } // create a new ReportGuid - one for each filename being sent (in either one or // multiple messages/chunks - based on size of diagnosticFileChunks IList) var reportGuid = Guid.NewGuid(); var reportedAt = DateTime.UtcNow; // The TransactionScope here will provide behavior such that - if/when the call to // DataAggregator.SuccessfulCasinoDiagnosticReport fails for some reason - the transactional // WCF/MSMQ message delivery will be rolled back (i.e., the messages will be discarded // from the client-side MQ). // // Additionally, we bundle all the chunks for each diagnostic file into a single // transaction. Thus, we will roll back if not all chunks are successfully // processed/sent. var txnOptions = new TransactionOptions { IsolationLevel = IsolationLevel.RepeatableRead }; using (var txnScope = new TransactionScope(TransactionScopeOption.Required, txnOptions)) { for (var iChunk = 0; iChunk < diagnosticFileChunks.Count; ++iChunk) { var chunk = diagnosticFileChunks[iChunk]; if (null == chunk || 0 >= chunk.LongLength) { continue; } var casinoDiagnosticData = new CasinoDiagnosticData { Filename = diagnosticFilename, Chunk = chunk, ChunkIndex = iChunk, NumChunks = diagnosticFileChunks.Count, CasinoCode = casinoCode, ReportGuid = reportGuid, ReportedAt = reportedAt }; cloudServiceProxy.Operations.ReportCasinoDiagnostics(casinoDiagnosticData); } DataAggregator.SuccessfulCasinoDiagnosticReport(diagnosticFilename, reportGuid); txnScope.Complete(); } } } catch (FaultException fe) { Logger.Warn($"Service operation ReportCasinoDiagnostics threw a fault: [{fe.Message}]"); } catch (Exception ex) { Logger.Warn( $"An unexpected error occurred while calling the ReportCasinoDiagnostics service operation: [{ex.Message}]"); var innerEx = ex.InnerException; while (null != innerEx) { Logger.Warn($"[{innerEx.Message}]"); innerEx = innerEx.InnerException; } Logger.Warn($"Stack Trace: [{Environment.StackTrace}]"); } } }
/// <summary> /// Adds a datasubscription which is derived from the requested data aggregator instance /// Force tick will force the data to contain the highest granularity (otherwise it might be based on 1-minute data) /// TODO: add unit test, if we request 1 minute data and than request tick data we should keep the tick data request and replace all 1 minute request with the tick data request? (so that we only keep the tick data request) /// TODO: we will only do ticks or tradebars! (where a trade bar is based on any data) /// </summary> /// <param name="quantfund"></param> /// <param name="security">The security.</param> /// <param name="aggregator">The aggregator.</param> /// <param name="forcetick">if set to <c>true</c> [forcetick].</param> /// <returns>Can be a different dataggregator due to change in data requested</returns> public DataAggregator AddSubscription(IQuantFund quantfund, Security security, DataAggregator aggregator, bool forcetick = false) { //Initial values TimeSpan?aggregationneeded = null; DataType datatypeneeded = DataType.Tick; TimeSpan preaggregated = TimeSpan.FromMinutes(1); if (!forcetick) { //TradeBar -> TradeBar if (aggregator is TimeSerieAggregator <TradeBar, TradeBar> tradetotrade && tradetotrade.IsTimeBased) { if (tradetotrade.Period.Value.TotalSeconds % 60 == 0D) { aggregator = new TradeAggregator(tradetotrade.Period.Value); aggregationneeded = preaggregated; datatypeneeded = DataType.TradeBar; } } //Tick -> TradeBar if (aggregator is TimeSerieAggregator <Tick, TradeBar> ticktobar && ticktobar.IsTimeBased) { if (ticktobar.Period.Value.TotalSeconds % 60 == 0D) { aggregator = new TickQuoteBarAggregator(ticktobar.Period.Value); aggregationneeded = TimeSpan.FromMinutes(1); datatypeneeded = DataType.TradeBar; } } } //get and add subscription var subscription = DataSubscriptionRequest.CreateSubscriptionRequest(security.Ticker, _datafeed.DataSource, aggregationneeded, datatypeneeded); subscription = AddSubscription(subscription); //Add base currency conversion AddBaseCurrencyConversionFeed(security); //Check if we already have a similar data aggregator, reuse the existing version if possible if (_registeredsubscriptions.ContainsKey(quantfund.FundId)) { var found = _registeredsubscriptions[quantfund.FundId].FirstOrDefault(x => x.Request.GetSubscriptionName() == subscription.GetSubscriptionName()); var existing = found?.Aggregators.FirstOrDefault(x => x.Name == aggregator.Name); if (existing != null) { return(existing); } else if (found == null) { _registeredsubscriptions[quantfund.FundId].Add(new DataSubscription(quantfund.FundId, subscription, security, security.Exchange.TimeZone, aggregator)); } else { found.Aggregators.Add(aggregator); } } else { //Add new _registeredsubscriptions.Add(quantfund.FundId, new List <DataSubscription>()); _registeredsubscriptions[quantfund.FundId].Add(new DataSubscription(quantfund.FundId, subscription, security, security.Exchange.TimeZone, aggregator)); } //Return our current aggregator return(aggregator); }
/// <summary> /// Add a new data interval. /// </summary> /// <param name="aggregation">The aggregation.</param> /// <param name="security"></param> /// <returns></returns> public DataAggregator AddInterval(Security security, DataAggregator aggregation) => QuantFund.Portfolio.Subscription.AddSubscription(QuantFund, security, aggregation, QuantFund.IsForceTick);
public void AggregateTwoOwnersOfOneYear_OneOfSecondYear() { var inputBusinessItems = new List <BusinessDataItem> { new BusinessDataItem { OwnerId = "21442131", ICO = "123", Name = "Test company", LegalFormOfOwner = "s.r.o", CountryOfOwner = "Czech republic", OwnerCountrySign = "DOM", OwnerType = "FO", OwnerShare = "50", FromTime = DateTime.ParseExact("01.01.2020", "dd.MM.yyyy", CultureInfo.CurrentCulture), ToTime = DateTime.ParseExact("31.12.2020", "dd.MM.yyyy", CultureInfo.CurrentCulture), IsValid = "1", }, new BusinessDataItem { OwnerId = "21442131", ICO = "123", Name = "Test company", LegalFormOfOwner = "s.r.o", CountryOfOwner = "Czech republic", OwnerCountrySign = "DOM", OwnerType = "FO", OwnerShare = "50", FromTime = DateTime.ParseExact("01.01.2021", "dd.MM.yyyy", CultureInfo.CurrentCulture), ToTime = DateTime.ParseExact("31.12.2021", "dd.MM.yyyy", CultureInfo.CurrentCulture), IsValid = "1", }, new BusinessDataItem { OwnerId = "534523", ICO = "123", Name = "Test company", LegalFormOfOwner = "s.r.o", CountryOfOwner = "USA", OwnerCountrySign = "FOR", OwnerType = "FO", OwnerShare = "50", FromTime = DateTime.ParseExact("01.01.2020", "dd.MM.yyyy", CultureInfo.CurrentCulture), ToTime = DateTime.ParseExact("31.12.2020", "dd.MM.yyyy", CultureInfo.CurrentCulture), IsValid = "1", }, }; var czechOwnerInfo = new OwnerInfo { OwnerId = "21442131", LegalFormOfOwner = "s.r.o", CountryOfOwner = "Czech republic", OwnerCountrySign = "DOM", OwnerType = "FO", OwnerShare = "50", IsValid = "1", }; var usaOwnerInfo = new OwnerInfo { OwnerId = "534523", LegalFormOfOwner = "s.r.o", CountryOfOwner = "USA", OwnerCountrySign = "FOR", OwnerType = "FO", OwnerShare = "50", IsValid = "1", }; var dataAggregator = new DataAggregator(); var result = dataAggregator.AggregateDataByCompany(inputBusinessItems); Assert.AreEqual(1, result.Count); Assert.AreEqual("123", result[0].ICO); Assert.AreEqual("Test company", result[0].Name); Assert.AreEqual(2, result[0].OwnersByYears.Keys.Count); Assert.IsTrue(result[0].OwnersByYears.ContainsKey(2020)); Assert.IsTrue(result[0].OwnersByYears.ContainsKey(2021)); Assert.AreEqual(2, result[0].OwnersByYears[2020].Count); Assert.AreEqual(1, result[0].OwnersByYears[2021].Count); Assert.AreEqual(czechOwnerInfo.OwnerId, result[0].OwnersByYears[2020][0].OwnerId); Assert.AreEqual(usaOwnerInfo.OwnerId, result[0].OwnersByYears[2020][1].OwnerId); Assert.AreEqual(czechOwnerInfo.OwnerId, result[0].OwnersByYears[2021][0].OwnerId); }