private void button1_Click(object sender, EventArgs e) { string host = ConfigurationManager.AppSettings["RemoteHost"]; Task task = new Task(() => ProcessingData.Process(host)); task.Start(); }
public void Execute(ProcessingData data) { var values = data.GetAs <float>("Values"); var values2 = data.GetAs <float>("Values2"); FindSum(values, values2); }
public void ProcessWaste_WithValidWasteAndContainedStrategy_ShouldReturnCorrectProcessingData() { var sut = this.garbageProcessor.ProcessWaste(new BurnableGarbage("a", 1, 1)); var expected = new ProcessingData(0.8, 0); Assert.AreEqual(expected.EnergyBalance, sut.EnergyBalance); Assert.AreEqual(expected.CapitalBalance, sut.CapitalBalance); }
public IProcessingData ProcessGarbage(IWaste garbage, ProcessingData processingData) { var energyUsed = (garbage.Weight * garbage.VolumePerKg) * 0.5; var capitalEarned = 400 * garbage.Weight; processingData.EnergyBalance -= energyUsed; processingData.CapitalBalance += capitalEarned; return(processingData); }
public IProcessingData ProcessGarbage(IWaste garbage) { var energy = (garbage.VolumePerKg * garbage.Weight) * 80 / 100; var data = new ProcessingData(); data.IncreaseEnergyBalance(energy); return(data); }
public void ProcessWaste_WithStorableAttribute_ShouldReturnCorrectData() { StorableGarbage storableGarbage = new StorableGarbage(TestName, TestVolumePerKg, TestWeight); IProcessingData expecteData = new ProcessingData(-26, -130); var result = this.garbageProcessor.ProcessWaste(storableGarbage); Assert.AreEqual(expecteData.CapitalBalance, result.CapitalBalance); Assert.AreEqual(expecteData.EnergyBalance, result.EnergyBalance); }
public override IProcessingData ProcessGarbage(IWaste garbage) { var garbageVolume = garbage.VolumePerKg * garbage.Weight; var energyBalance = -(garbageVolume / 2); var capitalBalance = 400 * garbage.Weight; IProcessingData data = new ProcessingData(energyBalance, capitalBalance); return(data); }
public void Execute(ProcessingData data) { var length = data.Length; for (var i = 0; i < length; i++) { data.GetAs <float>("Velocity")[i] += data.GetAs <float>("Force")[i] / data.GetAs <float>("Mass")[i]; } }
/// <summary> /// Добавляет файл исходных даных для парсинга /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void addeddatafileToolStripMenuItem_Click(object sender, EventArgs e) { OpenFileDialog dialogAddedFile = new OpenFileDialog(); dialogAddedFile.Multiselect = false; dialogAddedFile.Filter = "Бинарный файл данных |*.bin"; if (dialogAddedFile.ShowDialog() == DialogResult.OK) { ProcessingData ProcData = new ProcessingData(dialogAddedFile.FileName); } }
public IProcessingData ProcessGarbage(IWaste garbage) { var energy = -((garbage.VolumePerKg * garbage.Weight) * 13 / 100); var capital = -((garbage.VolumePerKg * garbage.Weight) * 65 / 100); var data = new ProcessingData(); data.IncreaseEnergyBalance(energy); data.IncreaseCapitalBalance(capital); return(data); }
public IProcessingData ProcessGarbage(IWaste garbage) { double garbageVolume = garbage.Weight * garbage.VolumePerKg; double energyBalance = (1 - 0.2) * garbageVolume; double capitalBalance = 0; var type = GarbageType.Burnable; IProcessingData data = new ProcessingData(capitalBalance, energyBalance, type); return(data); }
/// <summary> /// Initializes the whole sfc editor /// </summary> public override void InitialiseWith(IMainNode mainNode, ILessonEntity openedLesson) { _mainNode = mainNode; _processingData = InitialisePlant(openedLesson); InitialiseDiagram(openedLesson); InitialiseSimulation(openedLesson); _breakpoints = new BreakpointManager(_simulationMaster, _sfc2dEditorNode); if (!_isExecutable) { GetNode <Label>(_errorLabelPath).Visible = true; } }
public IProcessingData ProcessGarbage(IWaste garbage) { IProcessingData data; double garbageVolume = garbage.Weight * garbage.VolumePerKg; double energyProduced = garbageVolume; double energyUsed = garbageVolume * EnergyUsedModifier; double capitalEarned = 0; double capitalUsed = 0; data = new ProcessingData(energyProduced - energyUsed, capitalEarned - capitalUsed); return(data); }
public static void Main() { IGarbageFactory garbageFactory = new GarbageFactory(); IGarbageProcessor garbageProcessor = new GarbageProcessor(); IProcessingData processingData = new ProcessingData(); ICommandInterpreter commandInterpreter = new CommandInterpreter(garbageFactory, garbageProcessor, processingData); IRunnable engine = new Engine(commandInterpreter); engine.Run(); }
/// <summary> /// Loads the plant node and links the I/O Table /// </summary> private ProcessingData InitialisePlant(ILessonEntity openedLesson) { _lessonView = GetNode <LessonView>("HscRelative/LessonView"); _lessonView.LoadAndShowInfo(openedLesson); _loadedSimulationNode = _lessonView.PlantView.LoadedSimulationNode; ProcessingData data = new ProcessingData { InputRegisters = new StateTable(_loadedSimulationNode.SimulationOutput), OutputRegisters = new StateTable(_loadedSimulationNode.SimulationInput) }; return(data); }
public static void Main() { var processingData = new ProcessingData(); var strategyHolder = new StrategyHolder(); var garbageProcessor = new GarbageProcessor(processingData, strategyHolder); var reader = new ConsoleReader(); var writer = new ConsoleWriter(); var interpreter = new CommandInterpreter(); var engine = new Engine(interpreter, garbageProcessor, reader, writer); engine.Run(); }
public void OutboxMessageShouldBeCreatedWithValidParams() { // Arrange int id = 42; int value = 5; // Act ProcessingData res = null !; var exception = Record.Exception(() => res = new ProcessingData(id, value)); // Assert exception.Should().BeNull(); res.Id.Should().Be(id); res.Value.Should().Be(value); }
public async Task CanProcessExistsDataRollBackAsync() { // Arrange var ctx = _ctx; var oldData = new ProcessingData { Id = 1, Value = 42 }; _ctx.ProcessingData.Add(oldData); _ctx.SaveChanges(); var logger = new Mock <ILogger <ProcessingDataUnitOfWork> >(); var serializer = new Mock <ISerializer <IProcessingData> >(); var data = new TestData { Id = 1, Value = 2 }; var testJson = "test"; serializer.Setup(x => x.Serialize(data)).Returns(testJson); // Act var exception = await Record.ExceptionAsync(async() => { using var uow = new ProcessingDataUnitOfWork(ctx, logger.Object, serializer.Object); await uow.ProcessDataAsync(data, CancellationToken.None); }); // Assert exception.Should().BeNull(); // AsNoTracking to check real not cached data ctx.ProcessingData.AsNoTracking().Should().HaveCount(1); ctx.ProcessingData.AsNoTracking().Single().Id.Should().Be(1); ctx.ProcessingData.AsNoTracking().Single().Value.Should().Be(42); ctx.OutboxMessages.Should().HaveCount(0); }
public void Loading(PreparationData data) { ProcessingData deserializeData = new ProcessingData(data); deserializeData.LoadingData(deserializeData); TrackBarOfWidth.Value = _coreUML.DefaultWidth; ButtonColor.BackColor = _coreUML.DefaultColor; int tmp; switch (_coreUML.DefaultStep.X) { case 5: tmp = 1; break; case 10: tmp = 2; break; case 15: tmp = 3; break; case 20: tmp = 4; break; case 25: tmp = 5; break; default: tmp = 1; break; } trackBarOfStep.Value = tmp; _coreUML.UpdPicture(); }
public IDataSource LoadOldXml(string path) { logger.LogInformation("Loading {0}", path); var doc = XDocument.Load(path); var data = new ProcessingData(); foreach (var item in GetRecords(doc.Descendants("Positive"))) { data.Add(PositivityType.Positive, item); } foreach (var item in GetRecords(doc.Descendants("Negative"))) { data.Add(PositivityType.Negative, item); } foreach (var item in GetRecords(doc.Descendants("Neutral"))) { data.Add(PositivityType.Neutral, item); } return(new StaticDataSource(data)); }
public IProcessingData ProcessGarbage(IWaste garbage, ProcessingData processingData) { return(processingData); }
/// <summary> /// Implements muticore initialization of pooler. /// </summary> /// <param name="c"></param> protected override void ConnectAndConfigureInputs(Connections c) { List <KeyPair> colList = new List <KeyPair>(); ConcurrentDictionary <int, KeyPair> colList2 = new ConcurrentDictionary <int, KeyPair>(); int numColumns = c.HtmConfig.NumColumns; // Parallel implementation of initialization ParallelOptions opts = new ParallelOptions(); //int synapseCounter = 0; Parallel.For(0, numColumns, opts, (indx) => { Random rnd = new Random(42); int colIndex = (int)indx; var data = new ProcessingData { // Gets RF Potential = HtmCompute.MapPotential(c.HtmConfig, colIndex, rnd /*(c.getRandom()*/), Column = c.GetColumn(colIndex) }; // This line initializes all synases in the potential pool of synapses. // It creates the pool on proximal dendrite segment of the column. // After initialization permancences are set to zero. data.Column.CreatePotentialPool(c.HtmConfig, data.Potential, -1); //connectColumnToInputRF(c.HtmConfig, data.Potential, data.Column); //Interlocked.Add(ref synapseCounter, data.Column.ProximalDendrite.Synapses.Count); //colList.Add(new KeyPair() { Key = i, Value = column }); data.Perm = HtmCompute.InitSynapsePermanences(c.HtmConfig, data.Potential, c.HtmConfig.Random); data.AvgConnected = GetAvgSpanOfConnectedSynapses(c, colIndex); HtmCompute.UpdatePermanencesForColumn(c.HtmConfig, data.Perm, data.Column, data.Potential, true); if (!colList2.TryAdd(colIndex, new KeyPair() { Key = colIndex, Value = data })) { } }); //c.setProximalSynapseCount(synapseCounter); List <double> avgSynapsesConnected = new List <double>(); foreach (var item in colList2.Values) //for (int i = 0; i < numColumns; i++) { int i = (int)item.Key; ProcessingData data = (ProcessingData)item.Value; //ProcessingData data = new ProcessingData(); // Debug.WriteLine(i); //data.Potential = mapPotential(c, i, c.isWrapAround()); //var st = string.Join(",", data.Potential); //Debug.WriteLine($"{i} - [{st}]"); //var counts = c.getConnectedCounts(); //for (int h = 0; h < counts.getDimensions()[0]; h++) //{ // // Gets the synapse mapping between column-i with input vector. // int[] slice = (int[])counts.getSlice(h); // Debug.Write($"{slice.Count(y => y == 1)} - "); //} //Debug.WriteLine(" --- "); // Console.WriteLine($"{i} - [{String.Join(",", ((ProcessingData)item.Value).Potential)}]"); // This line initializes all synases in the potential pool of synapses. // It creates the pool on proximal dendrite segment of the column. // After initialization permancences are set to zero. //var potPool = data.Column.createPotentialPool(c, data.Potential); //connectColumnToInputRF(c, data.Potential, data.Column); //data.Perm = initPermanence(c.getSynPermConnected(), c.getSynPermMax(), // c.getRandom(), c.getSynPermTrimThreshold(), c, data.Potential, data.Column, c.getInitConnectedPct()); //updatePermanencesForColumn(c, data.Perm, data.Column, data.Potential, true); avgSynapsesConnected.Add(data.AvgConnected); colList.Add(new KeyPair() { Key = i, Value = data.Column }); } SparseObjectMatrix <Column> mem = (SparseObjectMatrix <Column>)c.HtmConfig.Memory; if (mem.IsRemotelyDistributed) { // Pool is created and attached to the local instance of Column. // Here we need to update the pool on remote Column instance. mem.set(colList); } // The inhibition radius determines the size of a column's local // neighborhood. A cortical column must overcome the overlap score of // columns in its neighborhood in order to become active. This radius is // updated every learning round. It grows and shrinks with the average // number of connected synapses per column. UpdateInhibitionRadius(c, avgSynapsesConnected); }
protected Strategy(IGarbage garbage, ProcessingData processingData) { this.Garbage = garbage; this.processingData = new ProcessingData(); }
public RecyclableStrategy(IGarbage garbage, ProcessingData processingData) : base(garbage, processingData) { }
private string BuildInsertProcessingDataSql(long fileDetailsId, ProcessingData processingData) { return($"INSERT INTO [dbo].[ProcessingData] ([UKPRN], [FileDetailsID], [ProcessingStep], [ExecutionTime]) VALUES ({processingData.UKPRN}, {fileDetailsId}, '{processingData.ProcessingStep}', '{processingData.ExecutionTime}')"); }
public void Execute(ProcessingData data) { }
public StaticDataSource(ProcessingData data) { this.data = data; }
public void Execute(ProcessingData data) { var values = data.GetAs <float>("Values"); FindMinMax(values); }
public IProcessingData ProcessGarbage(IWaste garbage, ProcessingData processingData) { throw new System.NotImplementedException(); }