public static void DirectoryBrowsing(this Application app, Activation activation) { ConfigurationSection directoryBrowseSection = app.GetWebConfiguration().GetSection("system.webServer/directoryBrowse"); var b = activation == Activation.Enable; directoryBrowseSection["enabled"] = b; directoryBrowseSection["showFlags"] = @"Date, Time, Size, Extension"; }
public Avalon(GeometryFactory Segments, Activation Attach, Activation Detach, ManipulationDeltaEventHandler Router,byte definition , bool Real = false) { if (!Real) { this.definition = definition; Data = Segments.Pattern(); ManipulationMode = Windows.UI.Xaml.Input.ManipulationModes.All; RenderTransform = new CompositeTransform(); RenderTransformOrigin = new Windows.Foundation.Point(0.5, 0.5); Fill = new SolidColorBrush(new Color { A = 0, R = 0, G = 0, B = 0 }); this.Attach = Attach; this.Detach = Detach; this.Router = Router; Tapped += Avalon_Tapped; Holding += Avalon_Holding; Loaded += Avalon_Loaded; Unloaded += Avalon_Unloaded; ManipulationDelta += Avalon_ManipulationDelta; this.Real = new Avalon(Segments, Attach, Detach, null, definition,true); this.Real.RenderTransform = RenderTransform; this.Real.RenderTransformOrigin = RenderTransformOrigin; this.Real.Data = Segments.Pattern(); this.Real.Fill = new SolidColorBrush(Colors.RosyBrown); Canvas.SetZIndex(this, 2); Canvas.SetZIndex(this.Real, 0); Avalon_Holding(null, null); } }
public void Create(object message, Activation activation) { if (message == null) return; _typeFactoryCache[message.GetType()].Create(message, activation); }
public AFactory(List<GeometryFactory> Data, Activation Attach, Activation Detach, ManipulationDeltaEventHandler Router , Point DefaultLocation) { this.Attach = Attach; this.Detach = Detach; this.Router = Router; this.Data = Data; this.DefaultLocation = DefaultLocation; }
public FullyConnLayer(int neuronCount, Activation activation = Activation.Undefined) { this.NeuronCount = neuronCount; this.Activation = activation; this.L1DecayMul = 0.0; this.L2DecayMul = 1.0; }
public DefaultComponentProfile( String key, Type service, Type implementation, Lifestyle lifestyle, Activation activation, IConfiguration configuration ) { m_key = key; m_service = service; m_implementation = implementation; m_lifestyle = lifestyle; m_activation = activation; m_configuration = configuration; }
public void Save(Activation activation) { var db = UnitOfWork.Current; var exists = FindById(activation.Id); if(exists == null) { db.Insert(activation); } else { db.Update(activation); } }
public void Enqueue(int priority, Activation activation) { bool isRefracted = !_refractions.Add(activation); if (isRefracted && activation.Rule.Repeatability == RuleRepeatability.NonRepeatable) { return; } bool isNewActivation = _activations.Add(activation); if (isNewActivation) { _queue.Enqueue(priority, activation); } }
/// <summary> /// Checks the <see cref="faults" /> for criticality using the <see cref="activation" /> mode. /// </summary> /// <param name="faults">The fault set that should be checked for criticality.</param> /// <param name="activation">The activation mode of the fault set.</param> internal override AnalysisResult CheckCriticality(FaultSet faults, Activation activation) { var suppressedFaults = new FaultSet(); foreach (var fault in Model.Faults) { if (GetEffectiveActivation(fault, faults, activation) == Activation.Suppressed) suppressedFaults = suppressedFaults.Add(fault); } _checker.Context.TraversalParameters.TransitionModifiers.Clear(); _checker.Context.TraversalParameters.TransitionModifiers.Add(() => new FaultSuppressionModifier(suppressedFaults)); return _checker.Check(); }
public void Deactivate_CalledAfterActivation_ActivationQueueEmpty() { // Arrange var ruleMock = new Mock<ICompiledRule>(); var factObject = new FactObject { Value = "Test" }; var tuple = CreateTuple(factObject); var activation1 = new Activation(ruleMock.Object, tuple, null); var target = CreateTarget(); target.Activate(activation1); // Act var activation2 = new Activation(ruleMock.Object, tuple, null); target.Deactivate(activation2); // Assert Assert.False(target.HasActiveRules()); }
protected override bool BindingExists(Activation.IContext context, Planning.Targets.ITarget target) { if (target.HasDefaultValue) return true; var targetType = GetTargetType(target); var searchKernel = context.Kernel; while(searchKernel != null) { if (searchKernel.GetBindings(targetType).Any(b => !b.IsImplicit)) return true; if (searchKernel is IChildKernel) searchKernel = (searchKernel as IChildKernel).ParentResolutionRoot as IKernel; else break; } return false; }
public void Activate_Called_ActivationEndsUpInQueue() { // Arrange var ruleMock = new Mock<ICompiledRule>(); var factObject = new FactObject {Value = "Test"}; var tuple = CreateTuple(factObject); var activation = new Activation(ruleMock.Object, tuple, null); var target = CreateTarget(); // Act target.Activate(activation); // Assert Assert.True(target.HasActiveRules()); var actualActivation = target.NextActivation(); Assert.AreEqual(ruleMock.Object, actualActivation.Rule); Assert.AreEqual(factObject, actualActivation.Tuple.RightFact.Object); Assert.False(target.HasActiveRules()); }
public void Activate_CalledWithMultipleRules_RulesAreQueuedInOrder() { // Arrange var ruleMock1 = new Mock<ICompiledRule>(); var ruleMock2 = new Mock<ICompiledRule>(); var activation1 = new Activation(ruleMock1.Object, new Tuple(), null); var activation2 = new Activation(ruleMock2.Object, new Tuple(), null); var target = CreateTarget(); // Act target.Activate(activation1); target.Activate(activation2); // Assert Assert.True(target.HasActiveRules()); Assert.AreEqual(ruleMock1.Object, target.NextActivation().Rule); Assert.True(target.HasActiveRules()); Assert.AreEqual(ruleMock2.Object, target.NextActivation().Rule); }
/// <summary> /// Computes activation function and its derivative /// </summary> /// <param name="n">int</param> /// <param name="net">double - net</param> /// <param name="acct">VectorHorizontal - activation matrix</param> /// <param name="gain">VectorHorizontal - gain matrix</param> /// <returns>FunctionRD - function result</returns> /// <remarks>Remember that values are passed by ref to speed up</remarks> public static FunctionRD computeFunctionDervative(ref int n, ref double net, ref Activation acct, ref Gain gain) { FunctionChoice function = (FunctionChoice)(int)acct.Data[0][n]; FunctionRD rd = new FunctionRD(); switch (function) { case FunctionChoice.LinearNeuron://for output layer { rd.FunctionResult = gain.Data[0][n] * net; rd.FunctionDerivative = gain.Data[0][n]; }break; case FunctionChoice.UnipolarNeuron: { rd.FunctionResult = 1 / (1 + System.Math.Exp(-gain.Data[0][n] * net)); rd.FunctionDerivative = gain.Data[0][n] * (1 - rd.FunctionResult) * rd.FunctionResult; }break; case FunctionChoice.BipolarNeuron://for hidden layer { rd.FunctionResult = System.Math.Tanh(gain.Data[0][n] * net); rd.FunctionDerivative = gain.Data[0][n] * (1 - System.Math.Pow(rd.FunctionResult,2)); }break; case FunctionChoice.BipolarElliotNeuron: { rd.FunctionResult = gain.Data[0][n] * net / (1 + gain.Data[0][n] * System.Math.Abs(net)); rd.FunctionDerivative = 1 / System.Math.Pow((gain.Data[0][n] * System.Math.Abs(net) + 1), 2); }break; case FunctionChoice.UnipolarElliotNeuron: { rd.FunctionResult = 2 * gain.Data[0][n] * net / (1 + gain.Data[0][n] * System.Math.Abs(net)) - 1; rd.FunctionDerivative = 2 * gain.Data[0][n] / System.Math.Pow((gain.Data[0][n] * System.Math.Abs(net) + 1), 2); }break; default: { throw new NeuralNetworkError(Properties.Settings.Default.FE1); } } return rd; }
private string StoreAuthenticationTicket(TimeSpan expiry, string email, string tag) { var issueDate = DateTime.UtcNow; var expiryDate = issueDate.Add(expiry); var ticket = _authenticationService.Encrypt( new FormsAuthenticationTicket(1, email, issueDate, expiryDate, false, tag) ); var hash = Guid.NewGuid().ToString().MD5(); var activation = new Activation { Hash = hash, Ticket = ticket }; _activationRepository.Save(activation); return hash; }
/// <summary> /// Compute psudo hessian matrix and its gradient /// </summary> /// <param name="info">NetworkInfo - information about neural network</param> /// <param name="inp">Input - input data patterns used for learn</param> /// <param name="dout">Output - output data patterns used for learn</param> /// <param name="topo">Topography - neural network topography</param> /// <param name="ww">Weights - weights</param> /// <param name="act">Activation - activation function selection</param> /// <param name="gain">Gain - neuron gain</param> /// <param name="iw">Index - topography indexes</param> public void Compute(ref NetworkInfo info, ref Input inp, ref Output dout, ref Topography topo, Weights ww, ref Activation act, ref Gain gain, ref Index iw) { GradientMat = MatrixMB.Zeros(info.nw, 1); HessianMat = MatrixMB.Zeros(info.nw, info.nw); np = info.np;//number of patterns ni = info.ni;//number of inputs no = info.no;//number of outputs nw = info.nw;//number of weights nn = info.nn;//number of neurons nio = nn + ni - no; zeros = ni.Zeros(); delo = MatrixMB.Zeros(1, nio + 1); J = MatrixMB.Zeros(1, nw); for (p = 0; p < np; p++) { node.Clear(); node.AddRange(inp.Data[p]); CalculateFunctionValuesAndDerivates(ref ww, ref iw, ref topo, ref act, ref gain); for (k = 0; k < no; k++) { o = nio + k; error = dout.Data[p][k] - node[o]; J.ClearWithZeros(); s = iw.Pos(o - ni); J.Data[0][s] = -derivates[o]; delo.ClearWithZeros(); CalculateJacobian(ref ww, ref iw, ref topo); CalculateForHiddenLayer(ref iw, ref topo, ref ww); if (dout[p, 0] > 0.5) J = J * ratio; var JT = J.Transposed; GradientMat = GradientMat + JT * error; HessianMat = HessianMat + JT * J; } } }
/// <summary> /// Computes activation function /// </summary> /// <param name="n">int</param> /// <param name="net">double - net</param> /// <param name="acct">VectorHorizontal - activation matrix</param> /// <param name="gain">VectorHorizontal - gain matrix</param> /// <returns>double - function result</returns> /// <remarks>Remember that values are passed by ref to speed up</remarks> public static double computeFunction(ref int n,ref double net, ref Activation acct, ref Gain gain) { try { FunctionChoice function = (FunctionChoice)(int)acct.Data[0][n]; switch (function) { case FunctionChoice.LinearNeuron: { return gain.Data[0][n] * net; } case FunctionChoice.UnipolarNeuron: { return 1 / (1 + System.Math.Exp(-gain.Data[0][n] * net)); } case FunctionChoice.BipolarNeuron: { return System.Math.Tanh(gain.Data[0][n] * net); } case FunctionChoice.BipolarElliotNeuron: { return gain.Data[0][n] * net / (1 + gain.Data[0][n] * System.Math.Abs(net)); } case FunctionChoice.UnipolarElliotNeuron: { return 2 * gain.Data[0][n] * net / (1 + gain.Data[0][n] * System.Math.Abs(net)) - 1; } default: { throw new System.Exception(Properties.Settings.Default.FE1); } } } catch (System.Exception ex) { throw new NeuralNetworkError("Błąd obliczania funkcji aktywacji.", ex); } }
public void OnlineActivationCommand_CurrentActivationIsValid_LicenseCheckerActivationIsNotValid_DoNotSaveNewActivationAndInformUser() { _savedActivation = null; _expectedLicenseKey = "not empty"; CreateLicenseKeyEnteredInteraction(_expectedLicenseKey); var viewModel = BuildViewModel(); viewModel.OnlineActivationCommand.Execute(null); viewModel.LicenseCheckFinishedEvent.WaitOne(_timeout); _licenseChecker.DidNotReceive().SaveActivation(Arg.Any <Activation>()); var messageInteraction = _interactionRequest.AssertWasRaised <MessageInteraction>(); Assert.AreEqual(_translation.ActivationFailed, messageInteraction.Title); Assert.AreEqual(MessageOptions.OK, messageInteraction.Buttons); Assert.AreEqual(MessageIcon.Error, messageInteraction.Icon); }
public void Add_RejectingRuleFilterForDifferentRule_ActivationInAgenda() { // Arrange var rule1 = MockRule(); var rule2 = MockRule(); var factObject = new FactObject { Value = "Test" }; var tuple = CreateTuple(factObject); var activation = new Activation(rule1, tuple); var target = CreateTarget(); target.AddFilter(rule2.Definition, new RejectingFilter()); // Act target.Add(activation); // Assert Assert.False(target.IsEmpty); }
public void Add_AcceptingAndRejectingGlobalFilter_ActivationNotInAgenda() { // Arrange var rule = MockRule(); var factObject = new FactObject { Value = "Test" }; var tuple = CreateTuple(factObject); var activation = new Activation(rule, tuple); var target = CreateTarget(); target.AddFilter(new AcceptingFilter()); target.AddFilter(new RejectingFilter()); // Act target.Add(activation); // Assert Assert.True(target.IsEmpty); }
public Layer(int NeuronCount, Layer PreviousLayer, Activation function, double L1R = 0, double L2R = 0) { Random random = new Random(); this.Weights = new double[NeuronCount, PreviousLayer.Weights.GetLength(0)]; this.Biases = new double[NeuronCount]; this.WMomentum = new double[NeuronCount, PreviousLayer.Weights.GetLength(0)]; this.BMomentum = new double[NeuronCount]; this.L1Regularization = L1R; this.L2Regularizattion = L2R; this.Function = function; for (int i = 0; i < NeuronCount; i++) { for (int j = 0; j < PreviousLayer.Weights.GetLength(0); j++) { this.Weights[i, j] = random.NextDouble() > 0.5 ? random.NextDouble() : (-1 * random.NextDouble()); } } }
public void OfflineActivationCommandExecute_LicenseCheckerOfflineActivationStringFromLicenseServer_GetsCalledWithExpectedOfflineLsa() { _savedActivation = null; var viewModel = BuildViewModel(); const string expectedOfflineLsa = "OfflineLsa"; _interactionInvoker.When(x => x.Invoke(Arg.Any <OfflineActivationInteraction>())).Do( x => { var offlineActivationInteraction = x.Arg <OfflineActivationInteraction>(); offlineActivationInteraction.Success = true; offlineActivationInteraction.LicenseServerAnswer = expectedOfflineLsa; }); viewModel.OfflineActivationCommand.Execute(null); _offlineActivator.Received().ValidateOfflineActivationString(expectedOfflineLsa); }
protected void Add(string value, Activation <TContext> activation, Func <long> generateId) { Activation <TContext> existing; if (_values.TryGetValue(value, out existing)) { var alphaNode = existing as AlphaNode <TContext>; if (alphaNode == null) { alphaNode = new AlphaNode <TContext>(generateId()); alphaNode.Add(existing); _values[value] = alphaNode; } alphaNode.Add(activation); return; } _values.Add(value, activation); }
public void SimpleNet_CrossEntropySoftMax_OneIter() { // arrange var net = Mocks.SimpleLinearNetwork2(Activation.ReLU); net[2].ActivationFunction = Activation.Logistic(1); var sample = new ClassifiedSample <double[][, ]>(); var point1 = new double[1][, ] { new[, ] { { 1.0D } } }; var point2 = new double[1][, ] { new[, ] { { -1.0D } } }; var cls1 = new Class("a", 0); var cls2 = new Class("b", 1); sample[point1] = cls1; sample[point2] = cls2; var alg = new BackpropAlgorithm(sample, net); alg.LearningRate = 2.0D; alg.LossFunction = Loss.CrossEntropySoftMax; alg.Build(); // act alg.RunIteration(point1, cls1); // assert AssertNetGradient(alg, point1, 2, 1); AssertNetGradient(alg, point1, 1, 0); AssertNetGradient(alg, point1, 1, 1); AssertNetGradient(alg, point1, 0, 0); AssertNetGradient(alg, point1, 0, 1); }
protected override Function BuildNetwork(Variable input, DeviceDescriptor device, string name) { var c1 = UnityCNTK.Layers.Dense(input, HiddenSize, device, HasBias, name + ".Dense", InitialWeightScale); if (Activation != null) { resultOutput = Activation.BuildNew(c1, Device, name + ".Activation"); } else { resultOutput = c1; } if (loss != LossFunction.None) { targetInput = CNTKLib.InputVariable(resultOutput.Shape, resultOutput.DataType, name + ".TargetInput"); } else { targetInput = null; } lossOutput = null; if (Loss == LossFunction.CrossEntropy) { lossOutput = CNTKLib.CrossEntropyWithSoftmax(c1, targetInput); } else if (Loss == LossFunction.Square) { lossOutput = CNTKLib.SquaredError(resultOutput, targetInput); } else { lossOutput = null; } //add parameters to list ParameterNames.Add(ParamTypeToName(DenseParamType.Weight)); ParameterNames.Add(ParamTypeToName(DenseParamType.Bias)); return(lossOutput != null?lossOutput:resultOutput); }
IEnumerator TrainNetwork() { var inputlayer = new DenseLayer(10, 4, Activation.ReLU()); var hiddenlayer1 = new DenseLayer(inputlayer, Activation.ReLU(), 30, LayerType.Hidden); var hiddenlayer2 = new DenseLayer(hiddenlayer1, Activation.ReLU(), 500, LayerType.Hidden); var outputlayer = new DenseLayer(hiddenlayer1, Activation.TangesHyperbolic(), 2, LayerType.Output); Network = outputlayer; outputlayer.Initilize(); var trainingdata = GenerateTrainingData(); int epoch = 0; int epochsize = trainingdata.GetLength(0); var epocherror = float.MaxValue; yield return(0); while (epocherror > 10 && _Training) { yield return(0); epocherror = 0f; epoch++; for (var t = 0; t < epochsize; t++) { var truth = trainingdata[t, 1]; var input = trainingdata[t, 0]; var output = (Tensor1D)outputlayer.Forward(input); var dif = output - truth; var sq = dif * dif; epocherror += (float)Math.Pow(sq.ElementSum(), 2); outputlayer.Backward(dif); } ErrorText.text = epocherror.ToString(); } ErrorText.text = ("Finished!"); _Training = false; }
public TValue GetValue <TValue>() { if (typeof(TValue) == typeof(string)) { object obj = Value; return((TValue)obj); } if (CONVERSION_TYPES.TryGetValue(typeof(TValue), out var method)) { return((TValue)method.InvokeWithUnwrap(null, new [] { this })); } if (DomValue is TValue value) { return(value); } if (DomValue is IDomValue <TValue> dd) { return(dd.TypedValue); } return(Activation.FromText <TValue>(Value)); }
public void Remove_CalledAfterAdd_AgendaEmpty() { // Arrange var ruleMock = new Mock <ICompiledRule>(); var factObject = new FactObject { Value = "Test" }; var tuple = CreateTuple(factObject); var activation1 = new Activation(ruleMock.Object, tuple, null); var target = CreateTarget(); target.Add(activation1); // Act var activation2 = new Activation(ruleMock.Object, tuple, null); target.Remove(activation2); // Assert Assert.True(target.IsEmpty()); }
/// <summary> /// Activation layer. Adds activation functions to a neural net. /// </summary> /// <param name="activation"></param> public ActivationLayer(Activation activation) { ActivationFunc = activation; switch (activation) { case Activation.Undefined: throw new ArgumentException("ActivationLayer must have a defined activation function. Provided with: " + activation); case Activation.Relu: m_activation = new ReluActivation(); break; case Activation.Sigmoid: m_activation = new SigmoidActivation(); break; default: throw new ArgumentException("Unsupported activation type: " + activation); } }
public bool ActivationStatus() { Activation activation = new Activation(); string key = SettingsManager.ReadSetting("ActivationKey"); string code = activation.GetActivationCode(); string correctkey = activation.ThisComputerKey(); if (correctkey == key) { return(true); } else { return(false); } }
private ILicenseServerHelper BuildLicenseServerHelperWithActivationTimes(DateTime hkcuActivationTime, DateTime hklmActivationTime) { var hkcuActivation = new Activation(); var hkcuLicenseChecker = Substitute.For <ILicenseChecker>(); hkcuLicenseChecker.GetSavedActivation().Returns(hkcuActivation); var hklmActivation = new Activation(); var hklmLicenseChecker = Substitute.For <ILicenseChecker>(); hklmLicenseChecker.GetSavedActivation().Returns(hklmActivation); var licenseServerHelper = Substitute.For <ILicenseServerHelper>(); licenseServerHelper.BuildLicenseChecker(RegistryHive.CurrentUser).Returns(hkcuLicenseChecker); licenseServerHelper.BuildLicenseChecker(RegistryHive.LocalMachine).Returns(hklmLicenseChecker); hkcuActivation.TimeOfActivation = hkcuActivationTime; hklmActivation.TimeOfActivation = hklmActivationTime; return(licenseServerHelper); }
public static double Derivative(Activation activation, double value) { double result = 0; switch (activation) { case Activation.Adam: break; case Activation.Sigmoid: result = value * (1.0 - value); break; case Activation.ReLu: break; default: break; } return(result); }
// - Element is an ElementFragment protected override DomNode Convert(DomDocument document, DomElement element, IScriptGenerator gen) { // TODO This will not work if expressions are in non-string types var myValues = element.Attributes.Select(t => new KeyValuePair <string, object>(t.Name, t.Value)); // Locate the property handling inner text // TODO Using inner text (but it could contain markup, which would technically require special handling logic) // TODO Memoize this lookup (performance) foreach (PropertyInfo p in Utility.ReflectGetProperties(element.GetType())) { if (p.IsDefined(typeof(ValueAttribute))) { var kvp = new KeyValuePair <string, object>(p.Name, element.InnerText); myValues = Utility.Cons(kvp, myValues); } } Activation.Initialize(element, myValues); return(element); }
public void CalculateDelta(bool isOutput, double requestedValue, Layer nextL) { switch (isOutput) { case true: Delta = Activation.Derivative(WeigtedInput + Bias) * (requestedValue - Get()); break; case false: double sum = .0d; for (int i = 0; i < nextL.Neurones.Length - 1; i++) { sum += nextL.Neurones[i].Delta * Connections.Find(p => p.Axon.Equals(nextL.Neurones[i])).Weight; } Delta = Activation.Derivative(WeigtedInput + Bias) * sum; break; } }
//Update professional tools details public async Task <bool> UpdateTools(Activation model, Guid userId) { var procedure = "spProfessionalForm_UpdateUserTools"; using IDbConnection dbConnection = new SqlConnection(_config.GetConnectionString("TazzerCleanCs")); dbConnection.Open(); await dbConnection.ExecuteAsync( procedure, new { userId, model.OwnMaterialTools, model.Tools, model.AreasAroundPostcode }, commandType : CommandType.StoredProcedure, commandTimeout : 10 ); return(true); }
public void Remove_CalledAfterAdd_AgendaEmpty() { // Arrange var rule = MockRule(); var factObject = new FactObject { Value = "Test" }; var tuple = CreateTuple(factObject); var activation = new Activation(rule, tuple, null); var target = CreateTarget(); target.Add(_context.Object, activation); _session.Setup(x => x.GetLinkedKeys(activation)).Returns(new object[0]); // Act target.Remove(_context.Object, activation); // Assert Assert.True(target.IsEmpty()); }
public void OnlineActivationCommand_CurrentActivationIsValid_LicenseCheckerActivationIsValid_SaveNewActivationAndStoreLicenseForAllUsersQuery() { _expectedLicenseKey = "not empty"; CreateLicenseKeyEnteredInteraction(_expectedLicenseKey); _activationFromServer = BuildValidActivation(_expectedLicenseKey); var viewModel = BuildViewModel(); var propertyChangedEvents = new List <string>(); viewModel.PropertyChanged += (sender, args) => propertyChangedEvents.Add(args.PropertyName); viewModel.OnlineActivationCommand.Execute(null); var success = viewModel.LicenseCheckFinishedEvent.WaitOne(_timeout); Assert.IsTrue(success); _licenseChecker.Received().SaveActivation(_activationFromServer); _interactionRequest.AssertWasRaised <StoreLicenseForAllUsersInteraction>(); }
public async Task OnlineActivationCommand_LicenseCheckerActivationIsValid_CallsCloseLicenseWindowEvent() { var wasCalled = false; _savedActivation = null; _expectedLicenseKey = "given-key"; CreateLicenseKeyEnteredInteraction(_expectedLicenseKey); _activationFromServer = BuildValidActivation(_expectedLicenseKey); var viewModel = BuildViewModel(); viewModel.CloseLicenseWindowEvent += (s, e) => { wasCalled = true; }; await viewModel.OnlineActivationAsyncCommand.ExecuteAsync(null); Assert.IsTrue(wasCalled); }
/// <summary> /// Determines the effective <see cref="Activation" /> of the <paramref name="fault" /> when <paramref name="faults" /> should /// be checked for criticality. /// </summary> protected Activation GetEffectiveActivation(Fault fault, FaultSet faults, Activation activation) { Assert.That(fault != null && fault.IsUsed, "Invalid fault."); if (SuppressedFaults.Contains(fault)) { return(Activation.Suppressed); } if (ForcedFaults.Contains(fault)) { return(Activation.Forced); } if (faults.Contains(fault)) { return(activation); } return(Activation.Suppressed); }
public static double Activate(Activation activation, double value) { double result = 0; switch (activation) { case Activation.Adam: break; case Activation.Sigmoid: result = 1.0 / (1.0 + Math.Exp(-value)); break; case Activation.ReLu: break; default: break; } return(result); }
private static Layer parse_shortcut(KeyValuePair[] options, SizeParams parameters, Network net) { string l = OptionList.option_find(options, "from"); int index = int.Parse(l); if (index < 0) { index = parameters.Index + index; } int batch = parameters.Batch; Layer from = net.Layers[index]; Layer s = Layer.make_shortcut_layer(batch, index, parameters.W, parameters.H, parameters.C, from.OutW, from.OutH, from.OutC); string activationS = OptionList.option_find_str(options, "activation", "linear"); Activation activation = ActivationsHelper.Get_activation(activationS); s.Activation = activation; return(s); }
public void WhenSavedActivationIsNotValidBase64_HandlesFormatExceptionProperly() { var licenseCheckerWithException = Substitute.For <ILicenseChecker>(); licenseCheckerWithException .When(x => x.GetSavedActivation()) .Do(x => { throw new FormatException(); }); var licenseChecker = Substitute.For <ILicenseChecker>(); licenseChecker.GetSavedActivation().Returns(_savedActivationCurrentUser); _licenseServerHelper.BuildLicenseChecker(RegistryHive.CurrentUser) .Returns(licenseCheckerWithException); var activationHelper = BuildActivationHelper(Product.PdfCreatorBusiness); activationHelper.LoadActivation(); var newActivation = new Activation(); newActivation.Product = Product.PdfCreatorBusiness; }
public void Deactivate_CalledAfterActivation_ActivationQueueEmpty() { // Arrange var ruleMock = new Mock <ICompiledRule>(); var factObject = new FactObject { Value = "Test" }; var tuple = CreateTuple(factObject); var activation1 = new Activation(ruleMock.Object, tuple, null); var target = CreateTarget(); target.Activate(activation1); // Act var activation2 = new Activation(ruleMock.Object, tuple, null); target.Deactivate(activation2); // Assert Assert.False(target.HasActiveRules()); }
/// <summary> /// Applying the activation function to the layer. /// </summary> /// <param name="x">Input variable</param> /// <param name="activation">Activation function name.</param> /// <param name="name">Name of the output function.</param> /// <returns></returns> public Function AFunction(Function x, Activation activation, string name = "") { switch (activation) { default: case Activation.None: return(x); case Activation.ReLU: return(CNTKLib.ReLU(x, name)); case Activation.Sigmoid: return(CNTKLib.Sigmoid(x, name)); case Activation.Softmax: return(CNTKLib.Softmax(x, name)); case Activation.TanH: return(CNTKLib.Tanh(x, name)); } }
/// <summary> /// Computes the states of the layer's neurons /// </summary> /// <param name="inputs">The inputs for this layer</param> /// <param name="flatWeights">Network's weights in a flat structure</param> /// <param name="flatDerivatives">Network's neuron state derivatives in a flat structure</param> /// <returns>The layer's neurons states</returns> internal double[] Compute(double[] inputs, double[] flatWeights, double[] flatDerivatives = null) { double[] result = new double[NumOfLayerNeurons]; int weightFlatIdx = WeightsStartFlatIdx; int biasFlatIdx = BiasesStartFlatIdx; for (int neuronIdx = 0; neuronIdx < NumOfLayerNeurons; neuronIdx++, biasFlatIdx++) { double sum = flatWeights[biasFlatIdx] * BiasValue; for (int inputIdx = 0; inputIdx < NumOfInputNodes; inputIdx++, weightFlatIdx++) { sum += flatWeights[weightFlatIdx] * inputs[inputIdx]; } result[neuronIdx] = Activation.Compute(sum); if (flatDerivatives != null) { flatDerivatives[NeuronsStartFlatIdx + neuronIdx] = Activation.ComputeDerivative(result[neuronIdx], sum); } } return(result); }
/// <summary> /// Total error calculation /// </summary> /// <param name="info">NetworkInfo</param> /// <param name="inp">ref Input - input data patterns</param> /// <param name="dout">ref Output - output data</param> /// <param name="topo">ref Topography - topo is network topology in the form of one vector</param> /// <param name="ww">ref Weights weights</param> /// <param name="act">ref Activation - type of activation function</param> /// <param name="gain">ref Gain - strengthening the activation function</param> /// <param name="iw">ref WeightsPointers - index pointers used for network topology stored in top in the form of one vector</param> /// <remarks>Network error will be overriden so please save it</remarks> public double CalculateError(ref NetworkInfo info, ref Input inp, ref Output dout, ref Topography topo, Weights ww, ref Activation act, ref Gain gain, ref Index iw) { try { Error = 0; for (p = 0; p < info.np; p++) { node.Clear(); node.AddRange(inp.Data[p]); for (n = 0; n < info.nn; n++) { net = ww[iw.Pos(n)]; int from = iw.Pos(n) + 1; int to = iw.Pos(n + 1) - 1; for (i = from; i <= to; i++) { net += node[(int)topo[i]] * ww[i]; } node.Add(ActivationFunction.computeFunction(ref n, ref net, ref act, ref gain)); } for (k = 0; k < info.no; k++) { Error += System.Math.Pow((dout.Data[p][k] - node[info.nio + k]), 2); } } return Error; } catch (System.Exception ex) { throw new NeuralNetworkError("Błąd uaktualnienia błędu sieci neuronowej. " + ex.Message, ex); } }
/// <summary> /// Creates the recurrence network based on LSTM cell /// </summary> /// <param name="input">Input variable.</param> /// <param name="outputDim">Placeholder for previous output.</param> /// <param name="cellDim">Dimension of the LSTM cell.</param> /// <param name="dataType">Type of data.</param> /// <param name="device">Device where computing will happen.</param> /// <param name="returnSequence">Determines if the return value full sequence or the last element of sequence</param> /// <param name="actFun">Type of activation function for update cell state.</param> /// <param name="usePeephole">Include peephole connection in the gate.</param> /// <param name="useStabilizer">Use self stabilization for output.</param> /// <param name="seed">Random seed.</param> /// <returns></returns> public static Function RecurrenceLSTM(Variable input, int outputDim, int cellDim, DataType dataType, DeviceDescriptor device, bool returnSequence = false, Activation actFun = Activation.TanH, bool usePeephole = true, bool useStabilizer = true, uint seed = 1) { if (outputDim <= 0 || cellDim <= 0) { throw new Exception("Dimension of LSTM cell cannot be zero."); } //prepare output and cell dimensions NDShape hShape = new int[] { outputDim }; NDShape cShape = new int[] { cellDim }; //create placeholders //Define previous output and previous cell state as placeholder which will be replace with past values later var dh = Variable.PlaceholderVariable(hShape, input.DynamicAxes); var dc = Variable.PlaceholderVariable(cShape, input.DynamicAxes); //create lstm cell var lstmCell = new LSTM(input, dh, dc, dataType, actFun, usePeephole, useStabilizer, seed, device); //get actual values of output and cell state var actualDh = CNTKLib.PastValue(lstmCell.H); var actualDc = CNTKLib.PastValue(lstmCell.C); // Form the recurrence loop by replacing the dh and dc placeholders with the actualDh and actualDc lstmCell.H.ReplacePlaceholders(new Dictionary <Variable, Variable> { { dh, actualDh }, { dc, actualDc } }); //return value depending of type of LSTM layer //For Stacked LSTM (with more than one LSTM layer in the network), the last LSTM must return last Sequence element, // otherwise full sequence is returned if (returnSequence) { return(lstmCell.H); } else { return(CNTKLib.SequenceLast(lstmCell.H)); } }
private void SaveActivation() { //Load the current activation for the question. This will tell us if there is already one in the database or if it needs to be creates HttpClient client = InitializeClient(); Question question = questions[cboQuestions.SelectedIndex]; HttpResponseMessage responseRetrieved = client.GetAsync("Activation?questionId=" + question.Id).Result; string result = responseRetrieved.Content.ReadAsStringAsync().Result; Activation activation = new Activation(); activation = JsonConvert.DeserializeObject <Activation>(result); //Set the properties on the activation activation.QuestionId = questions[cboQuestions.SelectedIndex].Id; activation.StartDate = dtpStartDate.SelectedDate.Value; activation.EndDate = dtpEndDate.SelectedDate.Value; activation.ActivationCode = txtActivationCode.Text.ToLower(); //Serialize and set headers string serializedActivation = JsonConvert.SerializeObject(activation); var activationContent = new StringContent(serializedActivation); activationContent.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"); if (activation.Id == Guid.Empty) { //No activation record retrieved for this question, create one HttpResponseMessage response = client.PostAsync("Activation/", activationContent).Result; lblStatus.Content = "Activation Added to the Question"; } else { //Activation retrieved, update it HttpResponseMessage response = client.PutAsync("Activation/" + activation.Id, activationContent).Result; lblStatus.Content = "Activation Updated on the Question"; } }
public void Remove(Activation activation) { _activations.Remove(activation); _refractions.Remove(activation); }
public void Deactivate(Activation activation) { _activationQueue.Remove(activation); }
public ComponentModelOverride( IComponentProfile componentProfile ) { m_lifestyle = componentProfile.Lifestyle; m_activation = componentProfile.Activation; }
private void ToggleAllSlidersActive(Activation i_toggle) { int proIndex = 0; foreach (Tuple<Slider, Label> sl in _slidersAndLabels) { ToggleSlider(sl.Item1, sl.Item2, i_toggle); proIndex++; } }
private void UpdateRepositoryFor(Profile profile, Repository repository) { Activation activation = new Activation(); activation.activeByDefault = true; profile.activation = activation; repository.url = textBox1.Text; repository.id = "NPanday.id"; RepositoryPolicy releasesPolicy = new RepositoryPolicy(); RepositoryPolicy snapshotsPolicy = new RepositoryPolicy(); releasesPolicy.enabled = checkBoxRelease.Checked; snapshotsPolicy.enabled = checkBoxSnapshot.Checked; repository.releases = releasesPolicy; repository.snapshots = snapshotsPolicy; }
private void ToggleSlider(Slider i_slider, Label i_label, Activation i_act) { switch (i_act) { case Activation.Activate: i_slider.IsEnabled = true; i_label.Foreground = new SolidColorBrush(_labelColorNormal); break; case Activation.Deactivate: i_slider.IsEnabled = false; i_label.Foreground = new SolidColorBrush(_labelColorDisabled); break; } }
public static void CheckIfNotNull(Activation value, string email) { CheckIfNotNull(value, email, "Cannot find activation info for '{0}'"); }
public static void BasicAuthentication(this Application app, Activation activation) { var config = app.GetWebConfiguration().GetSection("system.webServer/security/authentication/basicAuthentication"); config["enabled"] = activation == Activation.Enable; }
public override void Activate(Activation.IContext context, Activation.InstanceReference reference) { this.activationCount++; base.Activate(context, reference); }
void Awake() { main = this; }
/// <summary> /// Checks the order of <see cref="firstFault" /> and <see cref="secondFault" /> for the /// <see cref="minimalCriticalFaultSet" /> using the <see cref="activation" /> mode. /// </summary> /// <param name="firstFault">The first fault that should be checked.</param> /// <param name="secondFault">The second fault that should be checked.</param> /// <param name="minimalCriticalFaultSet">The minimal critical fault set that should be checked.</param> /// <param name="activation">The activation mode of the fault set.</param> /// <param name="forceSimultaneous">Indicates whether both faults must occur simultaneously.</param> internal override AnalysisResult CheckOrder(Fault firstFault, Fault secondFault, FaultSet minimalCriticalFaultSet, Activation activation, bool forceSimultaneous) { throw new NotImplementedException(); }
public ActionResult Register( RegisterModel model ) { if ( ModelState.IsValid ) { // Attempt to register the user if ( userRepo.CreateUser( model.UserName, model.Password, model.Email, Role.AuthUser ) ) { Guid guid = Guid.NewGuid(); Activation activation = new Activation { Id = guid, Username = model.UserName }; Repo.Add( activation ); // TODO: SEND EMAIL System.Diagnostics.Debug.WriteLine( "Guid: {0}", guid ); return RedirectToAction( "Index", "Home" ); } ModelState.AddModelError( "", ErrorCodeToString( MembershipCreateStatus.UserRejected ) ); // If we got this far, something failed, redisplay form return View( model ); } return View( model ); }