private static ProcessedDataPackage CollectFor(Type type, ProcessedDataPackage cmd) { if (processors.ContainsKey(type) == false) { return(cmd); } DataProcessor dp = processors[type]; ProcessedData t = null; if (dp.IsDataExists(cmd)) { t = dp.ProcessData(cmd); if (t != null) { if (cmd.ContainsKey(t.GetType()) == false) { cmd.Add(t.GetType(), t); } } } return(cmd); }
void Start() { DataProcessor processor = DataProcessor.GetTestProcessor(); Node rootNode = processor.ProcessData(); _computeFinalPositionsKernel = BranchCompute.FindKernel("ComputeFinalPositions"); _computeSiblingPressureKernel = BranchCompute.FindKernel("ComputeSiblingPressure"); _computeChildrenPositionsKernel = BranchCompute.FindKernel("ComputeChildrenPositions"); _nodeCount = rootNode.TotalChildCount + 1; Node[] nodeList = GetNodeList(rootNode); Dictionary <Node, int> lookupTable = GetLookupTable(nodeList); int[] layerOffsetAccumulator = GetLayerOffsetAccumulator(nodeList, rootNode); _variableDataBuffer = GetVariableDataBuffer(rootNode, lookupTable, layerOffsetAccumulator); _fixedDataBuffer = GetFixedDataBuffer(nodeList, lookupTable); SiblingsSetupData siblingSetup = GetSibblingSetup(nodeList, lookupTable); _siblingPairCounts = siblingSetup.PairCounts; _siblingPairsBuffers = siblingSetup.Buffers; _nodeBatchSize = Mathf.CeilToInt((float)_nodeCount / BatchSize); _siblingBatchSizes = _siblingPairCounts.Select(item => Mathf.CeilToInt((float)item / BatchSize)).ToArray(); }
public void Given_InputDirectory_Null_Then_ProcessData_Returns_False() { DataProcessor dataProcessor = new DataProcessor(); bool bReturn = dataProcessor.ProcessData(null, null, string.Empty); Assert.IsFalse(bReturn); }
public void ProcessData_validID_callsReportPrinter() { PersonRepositoryStub_OK repository = new PersonRepositoryStub_OK(); string oib = "12345678912"; string name = "Ana"; string surname = "Anic"; DateTime dateOfBirth = new DateTime(1988, 6, 6); string adress = "Ilica 6"; string eMail = "*****@*****.**"; repository.Person = new Person(oib, name, surname, dateOfBirth, adress, eMail); ReportPrinterMock printer = new ReportPrinterMock(); DataProcessor processor = new DataProcessor(); processor.Repository = repository; processor.Printer = printer; processor.ProcessData(oib); Assert.AreEqual(oib, printer.Oib); Assert.AreEqual(name, printer.Name); Assert.AreEqual(surname, printer.Surname); Assert.AreEqual(dateOfBirth, printer.DateOfBirth); Assert.AreEqual(adress, printer.Adress); Assert.AreEqual(eMail, printer.EMail); }
public void NMock_ProcessData_validID_callsReportPrinter() { string oib = "12345678912"; string name = "Ana"; string surname = "Anic"; DateTime dateOfBirth = new DateTime(1988, 6, 6); string adress = "Ilica 6"; string eMail = "*****@*****.**"; Person person = new Person(oib, name, surname, dateOfBirth, adress, eMail); MockFactory _factory = new MockFactory(); var mockRepository = _factory.CreateMock <IPersonRepository>(); var mockPrinter = _factory.CreateMock <IReportPrinter>(); DataProcessor processor = new DataProcessor { Repository = mockRepository.MockObject, Printer = mockPrinter.MockObject }; //expectations mockRepository.Expects.One.MethodWith(_ => _.GetPersonByOib(oib)).WillReturn(person); mockPrinter.Expects.One.MethodWith(_ => _.PrintReport(person)); //start processor.ProcessData(oib); _factory.VerifyAllExpectationsHaveBeenMet(); }
public void Moq_ProcessData_validID_callsReportPrinter() { string oib = "12345678912"; string name = "Ana"; string surname = "Anic"; DateTime dateOfBirth = new DateTime(1988, 6, 6); string adress = "Ilica 6"; string eMail = "*****@*****.**"; Person person = new Person(oib, name, surname, dateOfBirth, adress, eMail); // Arrange var repository = new Mock <IPersonRepository>(); var printer = new Mock <IReportPrinter>(); repository.Setup(x => x.GetPersonByOib(oib)).Returns(person); // printer.Setup(x => x.PrintReport(person)); DataProcessor processor = new DataProcessor(); processor.Repository = (IPersonRepository)repository.Object; processor.Printer = (IReportPrinter)printer.Object; // Act processor.ProcessData(oib); // Assert printer.Verify(x => x.PrintReport(person), Times.Once); }
public void NSubstitute_ProcessData_validID_callsReportPrinter() { string oib = "12345678912"; string name = "Ana"; string surname = "Anic"; DateTime dateOfBirth = new DateTime(1988, 6, 6); string adress = "Ilica 6"; string eMail = "*****@*****.**"; Person person = new Person(oib, name, surname, dateOfBirth, adress, eMail); // Arrange var repository = Substitute.For <IPersonRepository>(); var printer = Substitute.For <IReportPrinter>(); repository.GetPersonByOib(oib).Returns(person); DataProcessor processor = new DataProcessor(); processor.Repository = (IPersonRepository)repository; processor.Printer = (IReportPrinter)printer; // Act processor.ProcessData(oib); // Assert printer.Received(1).PrintReport(person); }
public void ProcessData_invalidID_throwsException() { PersonRepositoryStub_Exception repository = new PersonRepositoryStub_Exception(); repository.ToThrow = new PersonDoesNotExistException(); IReportPrinter printer = new ReportPrinterMock(); DataProcessor processor = new DataProcessor(repository, printer); processor.ProcessData("12345678913"); }
private bool ProcessCsvFile(DataProcessor instanceDataProcessor) { Console.WriteLine("Enter the input directory: "); var inputDirectory = Console.ReadLine(); Console.WriteLine("Enter the output directory: "); var outputDirectory = Console.ReadLine(); bool isProcessed = instanceDataProcessor.ProcessData(inputDirectory, outputDirectory, "A"); Console.WriteLine(isProcessed ? "Please see inside the output directory for result csv File " : "Could not process the data.... "); return(isProcessed); }
static void Main(string[] args) { var uploader = new DataProcessor( new TimeService(), new UploadService(new TimeService())); while (true) { var s = Console.ReadLine(); var didParse = Int32.TryParse(s, out var val); if (!didParse) { break; } uploader.ProcessData(val); } }
private EvaluationResult DumpData(Context context, ModuleLiteral env, EvaluationStackFrame args) { var pathTable = context.FrontEndContext.PathTable; var data = Args.AsIs(args, 0); string dataAsString = null; switch (data) { case string s: dataAsString = s; break; case IImplicitPath pathData: dataAsString = pathData.Path.ToString(context.PathTable); break; case PathAtom pathAtom: dataAsString = pathAtom.ToString(context.StringTable); break; case RelativePath relativePath: dataAsString = relativePath.ToString(context.StringTable); break; case int n: dataAsString = n.ToString(CultureInfo.InvariantCulture); break; default: // This is effectively only for object literals // Slow path dataAsString = DataProcessor.ProcessData(context.StringTable, m_dataSeparator, m_dataContents, context.FrontEndContext.PipDataBuilderPool, EvaluationResult.Create(data), new ConversionContext(pos: 1)).ToString(context.PathTable); break; } return(EvaluationResult.Create(dataAsString)); }
public void TestDataProcessor() { DateTime start = new DateTime(2012, 08, 22); RrdDef def = new RrdDef(SAMPLE, start.GetTimestamp(), 60); def.AddDatasource("speed",DataSourceType.COUNTER,120,double.NaN,double.NaN); // Step : every minute def.AddArchive(ConsolidationFunction.AVERAGE, 0, 5, 12 * 24 * 30); // Archive average every 5 minutes during 30 days def.AddArchive(ConsolidationFunction.AVERAGE, 0, 5 * 12, 24 * 30); // Archive average every hour during 30 days start = start.AddSeconds(40); using (RrdDb db = RrdDb.Create(def)) { Sample sample = db.CreateSample(); for (int i = 1; i < 60 * 24 * 3; i++) // add 3 days of samples { sample.Set(start.AddMinutes(i), 100 * i); sample.Update(); } } DataProcessor dataProcessor = new DataProcessor(start.AddHours(3), start.AddHours(13)); dataProcessor.FetchRequestResolution = 3600; dataProcessor.AddDatasource("speed", SAMPLE, "speed", ConsolidationFunction.AVERAGE); dataProcessor.AddDatasource("speedByHour", "speed, STEP, *"); dataProcessor.ProcessData(); double[] vals = dataProcessor.GetValues("speedByHour"); Assert.AreEqual(12, vals.Length); for (int i = 0; i < vals.Length; i++) { Assert.AreEqual(6000,((int)vals[i])); } }
private DataProcessor CreateDataProcessor(String rpnExpression) { var dataProcessor = new DataProcessor(request.FetchStart, request.FetchEnd); foreach (String dsName in dsNames) { dataProcessor.AddDatasource(dsName, this); } if (rpnExpression != null) { dataProcessor.AddDatasource(RPN_SOURCE_NAME, rpnExpression); try { dataProcessor.ProcessData(); } catch (IOException ioe) { // highly unlikely, since all datasources have already calculated values throw new InvalidOperationException("Impossible error: " + ioe); } } return dataProcessor; }
private void fetchData() { dproc = new DataProcessor(gdef.startTime, gdef.endTime); dproc.IsPoolUsed = gdef.poolUsed; if (gdef.step > 0) { dproc.Step = gdef.step; } foreach (Source src in gdef.sources) { src.requestData(dproc); } dproc.ProcessData(); //long[] t = dproc.getTimestamps(); //im.start = t[0]; //im.end = t[t.length - 1]; im.start = gdef.startTime; im.end = gdef.endTime; }