public override void Calculate() { if (Outputs.Count == 1 && Inputs.All(i => i.State.HasValue)) { Outputs.First().State = (Inputs.Count < 2) ? false : Inputs.All(i => (bool)i.State == true); } }
public override void Calculate() { if (Outputs.Count == 1 && Inputs.All(i => i.State.HasValue)) { Outputs.First().State = Inputs.Count < 2 ? false : !Inputs.All(i => i.State == false); } }
public override void Calculate() { if (Inputs.Count == 1 && Outputs.Count == 1) { if (Inputs.First().State == true) { if (disposable == null) { // create timer var observable = Observable.Timer(DateTimeOffset.Now.AddSeconds(delay), scheduler == null ? Scheduler.Default : scheduler); // start pulse Outputs.First().State = true; // subcribe to timer disposable = observable.Subscribe(x => { // update output if (Outputs.Count == 1) { // end pulse after time 'delay' Outputs.First().State = false; } // dispose timer disposable.Dispose(); disposable = null; }); } } } }
/// <summary> /// /// </summary> /// <param name="output"></param> public void SelectOutput(string output) { SelectedOutput = Outputs.First(tmp => tmp.Description.DeviceName == output); if (SelectedAdapter is null) { throw new NullReferenceException(); } }
public void PushValueOnOutput(double outputValue) { if (!Outputs.Any()) { AddOutputSynapse(outputValue); } ((InputSynapse)Outputs.First()).Output = outputValue; }
private void ProcessOutput(List <Token> tokens) { Pin pin = new Pin(); bool nested = false; bool negation = false; List <Token> nameTokens = new List <Token>(); for (int i = 0; i < tokens.Count; i++) { if (tokens[i].Value == "!") { negation = true; continue; } if (tokens[i].Value == ".") { nested = true; continue; } if (tokens[i].Type == Token.TokenType.Identifier) { nameTokens.Add(tokens[i]); } } if (nested) { var gate = Gates.First(x => x.Name == nameTokens[1].Value); pin = gate.Outputs.First(x => x.Name == nameTokens[2].Value); } else { pin = Inputs.First(x => x.Name == nameTokens[1].Value); } if (negation) { var negationPin1 = new Pin(pin.Name); var negationGate = new Negation(negationPin1); Links.Add(new Link( negationPin1, pin )); Gates.Add(negationGate); pin = negationGate.Outputs[0]; } Links.Add(new Link( Outputs.First(x => nameTokens[0].Value == x.Name), pin )); }
public override void ActionCaller() { var ins = Inputs.Values.SelectMany(list => list).Cast <Mark>().First(); if (ins.value != 5) { throw new Exception("Bad test inputs"); } Outputs.First((p) => p.Key.Name == "B").Value.Add(MarkType.Create <Mark>(123)); Outputs.First((p) => p.Key.Name == "C").Value.Add(MarkType.Create <Mark>(321)); }
protected override void LoadMyViewContext(IoMap map) { Outputs.First(X => X.Name == "Block.Genre").Fill(map); cbGenre.SelectedIndex = cbGenre.Items.Count > 0 ? 0 : -1; Genre genre = cbGenre.Items.Count == 0 ? DataAccess.SelectAll <Genre>().FirstOrDefault() : cbGenre.SelectedItem as Genre; if (genre != null) { map.SetOutput("Data", DataAccess.Execute(Resources.SelectSeriesInfoQuery, new [] { new QueryParam("genreId", QueryParamType.Integer) }, new object[] { genre.GenreId })); } }
public override IFunction CallInternal(IFunction[] arguments, string output, CompilationContext context) { if (IsNamespace && arguments.Length == 0) { return(CompileIntermediate(arguments, output, context)); } else if (IsNamespace && !IsClass) { return(context.LogError($"This is a Namespace, so it has no constructor")); } if (IsClass && arguments.Length == Inputs.Length && Outputs.All(p => p.Name != output) && NamespaceMembers.Contains(output)) { var memberFunction = CompileIntermediate(Array.Empty <IFunction>(), output, context); if (memberFunction.Inputs.Length > 0 && memberFunction.Inputs[0].Name == "this") { var classInstance = this.Call(arguments, context); return(classInstance.AsMethod(memberFunction, MakeCallSite(Ast), context)); } } context.Push(MakeCallSite(Ast)); if (this.CheckArguments(arguments, output, context) != null) { context.Pop(); return(Error.Instance); } context.Pop(); if (IsClass) { return(arguments[Array.FindIndex(Outputs, p => p.Name == output)]); } var outputPort = Outputs.First(p => p.Name == output); var outValue = CompileIntermediate(arguments, output, context); var success = outputPort.Type.SatisfiedBy(outValue, context); return(success switch { false => context.LogError("ELE0008", $"Output `{outputPort}` was not satisfied by its value `{outValue}` (See previous errors)"), null => Error.Instance, _ => outValue });
/// <summary> /// Perform a Forward operation of Operator /// After this operation, user can get the result by using function head. /// </summary> /// <param name="isTrain"></param> public void Forward(bool isTrain) { NativeMethods.MXExecutorForward(_handle, isTrain ? 1 : 0); uint outSize; NDArrayHandle outArrayPtr; Util.CallCheck(NativeMethods.MXExecutorOutputs(_handle, out outSize, out outArrayPtr)); var outArray = new NDArrayHandle[outSize]; Marshal.Copy(outArrayPtr, outArray, 0, (int)outSize); for (var i = 0; i < outSize; ++i) { Outputs[i] = new NdArray(outArray[i]); } var shape = Outputs.First().GetShape(); }
public MainViewModel(ILogger <MainViewModel> logger, GoXLRServer server) { _logger = logger; Clients = new List <ClientIdentifier>(); Inputs = new [] { "Mic", "Chat", "Music", "Game", "Console", "Line In", "System", "Samples" }; SelectedInput = Inputs.First(); Outputs = new [] { "Headphones", "Broadcast Mix", "Line Out", "Chat Mic", "Sampler" }; SelectedOutput = Outputs.First(); Actions = new [] { "Turn On", "Turn Off", "Toggle" }; SelectedAction = Actions.First(); _server = server; _server.UpdateConnectedClientsEvent = UpdateClientState; }
public MainViewModel(ILogger <MainViewModel> logger) { _logger = logger; _allProfiles = Enumerable.Empty <ProfileModel>(); Clients = new List <string>(); Inputs = Routing.Inputs; SelectedInput = Inputs.First(); Outputs = Routing.Outputs; SelectedOutput = Outputs.First(); Actions = Routing.Actions; SelectedAction = Actions.First(); _server = new WatsonWsServer("127.0.0.1", 6805, false); _server.ClientConnected += ServerOnClientConnected; _server.ClientDisconnected += ServerOnClientDisconnected; _server.MessageReceived += ServerOnMessageReceived; _server.ServerStopped += (sender, args) => _logger.LogInformation("Server Stopped"); _server.Start(); }
public override IFunction CallInternal(IFunction[] arguments, string output, CompilationContext context) { return(this.CheckArguments(arguments, output, context) ?? Outputs.First(p => p.Name == output).Type); }
/// <summary> /// Runs the learning algorithm. /// </summary> /// /// <param name="token">A token to stop processing when requested.</param> /// <param name="c">The complexity for each sample.</param> protected override void Run(CancellationToken token, double[] c) { // The SMO algorithm chooses to solve the smallest possible optimization problem // at every step. At every step, SMO chooses two Lagrange multipliers to jointly // optimize, finds the optimal values for these multipliers, and updates the SVM // to reflect the new optimal values. // // Reference: http://research.microsoft.com/en-us/um/people/jplatt/smoTR.pdf // The algorithm has been updated to implement the improvements suggested // by Keerthi et al. The code has been based on the pseudo-code available // on the author's technical report. // // Reference: http://www.cs.iastate.edu/~honavar/keerthi-svm.pdf // Initialize variables int samples = Inputs.Length; int dimension = Inputs[0].Length; this.c = c; // Lagrange multipliers Array.Clear(alpha, 0, alpha.Length); if (IsLinear) // Hyperplane weights { Array.Clear(weights, 0, weights.Length); } // Error cache Array.Clear(errors, 0, errors.Length); // Kernel evaluations cache this.kernelCache = new KernelFunctionCache(Kernel, Inputs, cacheSize); // [Keerthi] Initialize b_up to -1 and // i_up to any one index of class 1: this.b_upper = -1; this.i_upper = Outputs.First(x => x > 0); // [Keerthi] Initialize b_low to +1 and // i_low to any one index of class 2: this.b_lower = +1; this.i_lower = Outputs.First(x => x < 0); // [Keerthi] Set error cache for i_low and i_up: this.errors[i_lower] = +1; this.errors[i_upper] = -1; // Prepare indices sets activeExamples.Clear(); nonBoundExamples.Clear(); atBoundsExamples.Clear(); // Algorithm: int numChanged = 0; int wholeSetChecks = 0; bool examineAll = true; bool diverged = false; bool shouldStop = false; while ((numChanged > 0 || examineAll) && !shouldStop) { numChanged = 0; if (examineAll) { // loop I over all training examples for (int i = 0; i < samples; i++) { if (examineExample(i)) { numChanged++; } } wholeSetChecks++; } else { if (strategy == SelectionStrategy.Sequential) { // loop I over examples not at bounds for (int i = 0; i < alpha.Length; i++) { if (alpha[i] != 0 && alpha[i] != c[i]) { if (examineExample(i)) { numChanged++; } if (b_upper > b_lower - 2.0 * tolerance) { numChanged = 0; break; } } } } else // strategy == Strategy.WorstPair { int attempts = 0; do { attempts++; if (!takeStep(i_upper, i_lower)) { break; } if (attempts > samples * maxChecks) { break; } }while ((b_upper <= b_lower - 2.0 * tolerance)); numChanged = 0; } } if (examineAll) { examineAll = false; } else if (numChanged == 0) { examineAll = true; } if (wholeSetChecks > maxChecks) { shouldStop = diverged = true; } if (token.IsCancellationRequested) { shouldStop = true; } } // Store information about bounded examples for (int i = 0; i < alpha.Length; i++) { if (alpha[i] == c[i]) { atBoundsExamples.Add(i); } } if (isCompact) { // Store the hyperplane directly Machine.SupportVectors = null; Machine.Weights = weights; Machine.Threshold = -(b_lower + b_upper) / 2.0; } else { // Store Support Vectors in the SV Machine. Only vectors which have Lagrange multipliers // greater than zero will be stored as only those are actually required during evaluation. int activeCount = activeExamples.Count; int[] idx = new int[activeCount]; activeExamples.CopyTo(idx); Machine.SupportVectors = new double[activeCount][]; Machine.Weights = new double[activeCount]; for (int i = 0; i < idx.Length; i++) { int j = idx[i]; Machine.SupportVectors[i] = Inputs[j]; Machine.Weights[i] = alpha[j] * Outputs[j]; } Machine.Threshold = -(b_lower + b_upper) / 2; } // Clear function cache this.kernelCache.Clear(); this.kernelCache = null; if (diverged) { throw new ConvergenceException("Convergence could not be attained. " + "Please reduce the cost of misclassification errors by reducing " + "the complexity parameter C or try a different kernel function."); } }
public async Task Execute(BuildTasks build_tasks) { if (started_task.TrySetResult(true)) { var watch = new System.Diagnostics.Stopwatch(); try { Log("Launching task"); var deps = Dependencies.ToArray(); var dep_tasks = new Task [deps.Length]; for (int i = 0; i < deps.Length; i++) { dep_tasks [i] = deps [i].Execute(build_tasks); } Log("Waiting for dependencies to complete."); await Task.WhenAll(dep_tasks); Log("Done waiting for dependencies."); // We can only check if we're up-to-date after executing dependencies. if (IsUptodate) { if (Outputs.Count() > 1) { Driver.Log(3, "Targets '{0}' are up-to-date.", string.Join("', '", Outputs.ToArray())); } else { Driver.Log(3, "Target '{0}' is up-to-date.", Outputs.First()); } completed_task.SetResult(false); } else { Driver.Log(3, "Target(s) {0} must be rebuilt.", string.Join(", ", Outputs.ToArray())); Log("Dependencies are complete."); await build_tasks.AcquireSemaphore(); try { Log("Executing task"); watch.Start(); await ExecuteAsync(); watch.Stop(); Log("Completed task {0} s", watch.Elapsed.TotalSeconds); completed_task.SetResult(true); } finally { build_tasks.ReleaseSemaphore(); } } } catch (Exception e) { Log("Completed task in {0} s with exception: {1}", watch.Elapsed.TotalSeconds, e.Message); completed_task.SetException(e); throw; } } else { Log("Waiting for started task"); await completed_task.Task; Log("Waited for started task"); } }
//開始學習 public bool Run() { bool IsDone = false; try { FlowDatas db = new FlowDatas(); (double[][] Inputs, double[][] Outputs) = DeepLearningTools.FlowSampleToLearningData(db.FlowSampleStatistics.Where(c => c.BehaviorNumber != 0).ToArray()); db.Dispose(); //產生DBN網路 DBNetwork = new DeepBeliefNetwork(Inputs.First().Length, (int)((Inputs.First().Length + Outputs.First().Length) / 1.5), (int)((Inputs.First().Length + Outputs.First().Length) / 2), Outputs.First().Length); //亂數打亂整個網路參數 new GaussianWeights(DBNetwork, 0.1).Randomize(); DBNetwork.UpdateVisibleWeights(); //設定無監督學習組態 DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(DBNetwork) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.01, Momentum = 0.5, Decay = 0.001, } }; //設置批量輸入學習。 int batchCount1 = Math.Max(1, Inputs.Length / 10); //創建小批量加速學習。 int[] groups1 = Accord.Statistics.Classes.Random(Inputs.Length, batchCount1); double[][][] batches = Inputs.Subgroups(groups1); //學習指定圖層的數據。 double[][][] layerData; //運行無監督學習。 for (int layerIndex = 0; layerIndex < DBNetwork.Machines.Count - 1; layerIndex++) { teacher.LayerIndex = layerIndex; layerData = teacher.GetLayerInput(batches); for (int i = 0; i < 200; i++) { double error = teacher.RunEpoch(layerData) / Inputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } } //對整個網絡進行監督學習,提供輸出分類。 var teacher2 = new ParallelResilientBackpropagationLearning(DBNetwork); double error1 = double.MaxValue; //運行監督學習。 for (int i = 0; i < 500; i++) { error1 = teacher2.RunEpoch(Inputs, Outputs) / Inputs.Length; Console.WriteLine(i + ", Error = " + error1); DBNetwork.Save(Path); Console.WriteLine("Save Done"); } DBNetwork.Save(Path); Console.WriteLine("Save Done"); IsDone = true; } catch (Exception ex) { Debug.Write(ex.ToString()); } return(IsDone); }
public override double CalculateOutput() { return(Outputs.First().GetOutput()); }