private static async Task CreateDataArrayByFillTest(ComputationContext ctx) { int count = 10; using (var array = ctx.DataArrayFactory.Create(count, 1.1f)) { var sw = new Stopwatch(); sw.Start(); var values = new float[count]; await array.Read(values); foreach (var v in values) Assert.AreEqual(1.1f, v); var valuesToWrite = new float[count]; for (int i = 0; i < valuesToWrite.Length; i++) valuesToWrite[i] = 5.5f; await array.Write(valuesToWrite); await array.Read(values); foreach (var v in values) Assert.AreEqual(5.5f, v); sw.Stop(); Console.WriteLine("CreateDataArrayByFillTest: " + sw.ElapsedMilliseconds + " ms"); } }
public override int Compute(ComputationContext<int> task) { int first = firstOperand.Compute (task); int second = secondOperand.Compute (task); return Math.Max (first, second); }
public override MoveDirection Compute(ComputationContext<MoveDirection> task) { if (task is AntNavigationContext) return Compute ((AntNavigationContext)task); else throw new ArgumentException ("ComputationContext should be AntNavigationContext!"); }
public override int Compute(ComputationContext<int> task) { int possiblyZero = secondOperand.Compute (task); if (possiblyZero == 0) return 0; else return firstOperand.Compute (task) / possiblyZero; }
private async Task DoTest(ComputationContext ctx, int size) { float[] notZeros = Enumerable.Repeat(1.1f, size).ToArray(); float[] values = new float[size]; using (var data = ctx.DataArrayFactory.Create(notZeros)) { await data.Read(values); Assert.IsTrue(values.All(v => v == 1.1f)); ctx.VectorUtils.Zero(data); await data.Read(values); Assert.IsTrue(values.All(v => v == 0.0f)); } }
private async Task DoTest(ComputationContext ctx, int fromSize, int toSize) { for (int size = fromSize; size <= toSize; size++) await DoTest(ctx, size); }
private async Task DoTest(ComputationContext ctx) { await DoTest(ctx, 4096, 4911); }
private async Task DoTest(ComputationContext ctx) { await DoTest(ctx, 64, 128); }
private static async Task MLPComputeTest(ComputationContext ctx) { const int inputSize = 15; const int hiddenSize = 30; const int outputSize = 12; var inputValues = RandomGenerator.NextFloats(-1.0f, 1.0f, inputSize).ToArray(); using (var inputDataArray = ctx.DataArrayFactory.Create(inputValues)) using (var outputDataArray = ctx.DataArrayFactory.Create(outputSize)) { var layers = new[] { new Layer(inputSize), new Layer(hiddenSize) { Descriptions = { new ActivationDescription(ActivationFunction.Sigmoid) } }, new Layer(outputSize) { Descriptions = { new ActivationDescription(ActivationFunction.Linear) } }, }; layers[0].OutputConnections.AddOneWay(layers[1]); layers[1].OutputConnections.AddOneWay(layers[2]); using (var nn = ctx.NeuralNetworkFactory.CreateMultilayerPerceptron(layers)) { int numWeights = nn.NumberOfWeights; Assert.AreEqual(layers[1].Size * layers[0].Size + layers[1].Size * layers[2].Size + layers[1].Size + layers[2].Size, numWeights); var rndWeights = RandomGenerator.NextFloats(-1.0f, 1.0f, numWeights).ToArray(); using (var tmpWeights = ctx.DataArrayFactory.Create(rndWeights)) { nn.SetWeights(tmpWeights); } var readWeights = new float[numWeights]; using (var tmpWeights = ctx.DataArrayFactory.Create(numWeights)) { nn.GetWeights(tmpWeights); await tmpWeights.Read(readWeights); } for (int i = 0; i < rndWeights.Length; i++) { Assert.AreEqual(rndWeights[i], readWeights[i]); } var readOutputs = new float[outputSize]; await outputDataArray.Read(readOutputs); for (int i = 0; i < outputSize; i++) { Assert.AreEqual(0.0f, readOutputs[i]); } var sw = new Stopwatch(); sw.Start(); nn.Compute(inputDataArray, outputDataArray); await outputDataArray.Read(readOutputs); sw.Stop(); Console.WriteLine("Ellapsed: {0} ms", sw.ElapsedMilliseconds); Assert.IsTrue(readOutputs.Any(o => o != 0.0f)); var readInputs = new float[inputSize]; await inputDataArray.Read(readInputs); for (int i = 0; i < inputSize; i++) { Assert.AreEqual(inputValues[i], readInputs[i]); } } } }
public override void Validate(ComputationContext ctx) { base.Validate(ctx); }
private async Task MLPTrainRecTest(ComputationContext ctx, GradientComputationMethod method, params LayerBehavior[] rules) { var trainingData = new[] { new[] { Tuple.Create( -1.0f, (float[])null), Tuple.Create( -1.0f, (float[])null), Tuple.Create( -1.0f, new[] { -1.0f, -1.0f, -1.0f }), }, new[] { Tuple.Create( -1.0f, (float[])null), Tuple.Create( -1.0f, (float[])null), Tuple.Create( 1.0f, new[] { -1.0f, -1.0f, 1.0f }), }, new[] { Tuple.Create( -1.0f, (float[])null), Tuple.Create( 1.0f, (float[])null), Tuple.Create( -1.0f, new[] { -1.0f, 1.0f, -1.0f }), }, new[] { Tuple.Create( -1.0f, (float[])null), Tuple.Create( 1.0f, (float[])null), Tuple.Create( 1.0f, new[] { -1.0f, 1.0f, 1.0f }), }, new[] { Tuple.Create( 1.0f, (float[])null), Tuple.Create( -1.0f, (float[])null), Tuple.Create( -1.0f, new[] { 1.0f, -1.0f, -1.0f }), }, new[] { Tuple.Create( 1.0f, (float[])null), Tuple.Create( -1.0f, (float[])null), Tuple.Create( 1.0f, new[] { 1.0f, -1.0f, 1.0f }), }, new[] { Tuple.Create( 1.0f, (float[])null), Tuple.Create( 1.0f, (float[])null), Tuple.Create( -1.0f, new[] { 1.0f, 1.0f, -1.0f }), }, new[] { Tuple.Create( 1.0f, (float[])null), Tuple.Create( 1.0f, (float[])null), Tuple.Create( 1.0f, new[] { 1.0f, 1.0f, 1.0f }), } }; const int inputSize = 1; const int hiddenSize = 8; const int outputSize = 3; const int maxIterations = 1000; var layers = NNTestHelpers.CreateGDMLPLayers(false, inputSize, hiddenSize, outputSize, rules); using (var nn = ctx.NeuralNetworkFactory.CreateMultilayerPerceptron(layers, new MultilayerPerceptronProperties { GradientComputationMethod = method })) using (var batch = new SupervisedBatch()) using (var errors = ctx.DataArrayFactory.Create(maxIterations)) { foreach (var dataEntry in trainingData) { var sample = new SupervisedSample(); foreach (var sampleEntry in dataEntry) { if (sampleEntry.Item2 == null) { sample.Add(ctx.DataArrayFactory.CreateConst(new[] { sampleEntry.Item1 })); } else { sample.Add( ctx.DataArrayFactory.CreateConst(new[] { sampleEntry.Item1 }), ctx.DataArrayFactory.CreateConst(sampleEntry.Item2), ctx.DataArrayFactory.Create(sampleEntry.Item2.Length)); } } batch.Add(sample); } bool first = true; var sw = new Stopwatch(); for (int it = 0; it < maxIterations; it++) { nn.Train(batch); ctx.VectorUtils.CalculateMSE(batch, errors, it); if (first) { using (var weights = ctx.DataArrayFactory.Create(nn.NumberOfWeights)) { nn.GetWeights(weights); float[] wa = new float[weights.Size]; await weights.Read(wa); // It must be randomized: Assert.IsTrue(wa.Sum() != 0.0f); } first = false; sw.Start(); } } float[] mses = new float[maxIterations]; await errors.Read(mses); sw.Stop(); foreach (var mse in mses) Console.WriteLine("Error: {0}", mse.ToString("0.00000000")); Console.WriteLine("Ellapsed: {0} ms", sw.Elapsed.TotalMilliseconds); } }
private async Task CalculateMSETest(ComputationContext ctx) { const int valuesCount = 1024; const int repeat = 10000; float[][][] desired = { new[] { RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray(), RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray() }, new[] { RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray(), RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray() } }; float[][][] current = { new[] { RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray(), RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray() }, new[] { RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray(), RandomGenerator.NextFloats(-1.0f, 1.0f, valuesCount).ToArray() } }; float mse = CalcMSE(desired, current); using (var batch = new SupervisedBatch()) using (var resultValues = ctx.DataArrayFactory.Create(2)) { Assert.AreEqual(desired.Length, current.Length); for (int i1 = 0; i1 < desired.Length; i1++) { float[][] d1 = desired[i1]; float[][] c1 = current[i1]; var sample = new SupervisedSample(); batch.Add(sample); Assert.AreEqual(d1.Length, c1.Length); for (int i2 = 0; i2 < d1.Length; i2++) { float[] d2 = d1[i2]; float[] c2 = c1[i2]; Assert.AreEqual(d2.Length, c2.Length); var da = ctx.DataArrayFactory.CreateConst(d2); var ca = ctx.DataArrayFactory.CreateConst(c2); sample.Add(da, da, ca); } } float[] result = new float[2]; var sw = new Stopwatch(); sw.Start(); for (int i = 0; i < repeat; i++) { ctx.VectorUtils.CalculateMSE(batch, resultValues, 1); await resultValues.Read(result); Assert.AreEqual(0.0f, result[0]); Assert.AreEqual(Math.Round(mse, 4), Math.Round(result[1], 4)); } sw.Stop(); Console.WriteLine("Ellapsed: " + sw.ElapsedMilliseconds + " ms"); } }
/// <summary> /// Handles the computation internally, i.e. calls dependencies, creates output, manages delays, etc /// </summary> /// <param name="transformationRule">The transformation rule</param> /// <param name="input">The input elements for this computation</param> /// <param name="context">The transformation context</param> /// <param name="computations">The computations for the input</param> /// <param name="originalTransformationRule">The transformation rule of the original call</param> /// <param name="comp">The computation</param> /// <param name="compCon">The computation context</param> private void HandleComputation(GeneralTransformationRule transformationRule, object[] input, IEnumerable context, List <ITraceEntry> computations, GeneralTransformationRule originalTransformationRule, Computation comp, ComputationContext compCon) { // The transformation output is only generated when we are handling the base transformation rule, // because this is always required if (compCon.IsDelayed) { Stack <Computation> dependantComputes = new Stack <Computation>(); var ruleStack = Transformation.ComputeInstantiatingTransformationRulePath(comp); if (transformationRule != originalTransformationRule) { ReorderStack(originalTransformationRule, comp, ruleStack); } var delayLevel = comp.Context.MinOutputDelayLevel; var computes = new List <Computation>(); Computation lastComp = null; while (ruleStack.Count > 0) { var rule = ruleStack.Pop(); var comp2 = FindOrCreateDependentComputation(input, computations, comp, dependantComputes, rule); // in case comp2 is not yet handled, a delay does not yet exist and thus // DelayLevel < minDelayLevel delayLevel = Math.Max(delayLevel, Math.Max(comp2.OutputDelayLevel, comp2.Context.MinOutputDelayLevel)); if (lastComp != null) { lastComp.SetBaseComputation(comp2); } lastComp = comp2; computes.Add(comp2); } // delay the call of dependencies // this prevents the issue arising from computations calling their parents that come later in the stack foreach (var comp2 in dependantComputes) { CallDependencies(comp2, true); } if (delayLevel <= currentOutputDelay) { var createRule = computes[0]; // Generate the output var output = createRule.CreateOutput(context); for (int i = computes.Count - 1; i >= 0; i--) { computes[i].InitializeOutput(output); } if (callTransformations) { for (int i = computes.Count - 1; i >= 0; i--) { computes[i].Transform(); } } } else { //Save computations into Delay Delay(delayLevel, computes, context); } if (!callTransformations) { for (int i = computes.Count - 1; i >= 0; i--) { AddToComputationOrder(computes[i], currentTransformationDelay); } } for (int i = computes.Count - 1; i >= 0; i--) { dependencyCallQueue.Enqueue(computes[i]); } } }
public static IEnumerable <TypeDefinition> AvailableTraits(this EntityInstance instance, ComputationContext ctx) { IEntityScope scope = instance.Target.Cast <TemplateDefinition>(); if (scope is TypeDefinition typedef) { foreach (TypeDefinition trait in typedef.AssociatedTraits) { // todo: once computed which traits fit maybe we could cache them within given instance? ConstraintMatch match = TypeMatcher.ArgumentsMatchConstraintsOf(ctx, trait.Name.Parameters, instance); if (match != ConstraintMatch.Yes) { continue; } yield return(trait); } } }
public static IEnumerable <EntityInstance> PrimaryAncestors(this EntityInstance instance, ComputationContext ctx) { EntityInstance primary_parent = instance.Inheritance(ctx).MinimalParentsIncludingObject.FirstOrDefault(); if (primary_parent == null) { return(Enumerable.Empty <EntityInstance>()); } else { return new[] { primary_parent } }.Concat(primary_parent.PrimaryAncestors(ctx));
public void Surf(ComputationContext ctx) { this.ChildrenNodes.WhereType <ISurfable>().ForEach(it => it.Surfed(ctx)); }
public override void Validate(ComputationContext ctx) { this.ValidateRestrictedMember(ctx); TypeDefinition type_owner = this.ContainingType(); if (type_owner != null && type_owner.IsTrait && this.IsAnyConstructor()) { ctx.AddError(ErrorCode.TraitConstructor, this); } if (this.Name.Name == NameFactory.ConvertFunctionName && type_owner != null) { if (this.Parameters.Any()) { ctx.AddError(ErrorCode.ConverterWithParameters, this); } if (!this.Modifier.HasPinned && !type_owner.Modifier.HasEnum && !type_owner.Modifier.IsSealed) { ctx.AddError(ErrorCode.ConverterNotPinned, this); } if (this.CallMode != ExpressionReadMode.ReadRequired) { ctx.AddError(ErrorCode.ConverterDeclaredWithIgnoredOutput, this); } } if (!this.IsAnyConstructor()) { foreach (INameReference typename in this.Parameters.Select(it => it.TypeName)) { typename.ValidateTypeNameVariance(ctx, VarianceMode.In); } this.ResultTypeName.ValidateTypeNameVariance(ctx, VarianceMode.Out); } if (!ctx.Env.Options.AllowInvalidMainResult && this == ctx.Env.MainFunction(ctx) && !ctx.Env.Nat8Type.InstanceOf.IsIdentical(this.ResultTypeName.Evaluation.Components)) { ctx.AddError(ErrorCode.MainFunctionInvalidResultType, this.ResultTypeName); } if (this.Modifier.HasOverride && !this.Modifier.HasUnchainBase) { if (!this.IsDeclaration && this.ContainingType().DerivationTable.TryGetSuper(this, out FunctionDefinition dummy) && !this.DescendantNodes().WhereType <FunctionCall>().Any(it => it.Name.IsSuperReference)) { ctx.AddError(ErrorCode.DerivationWithoutSuperCall, this); } } if (!this.IsDeclaration && !this.Modifier.HasNative) { if (!ctx.Env.IsOfUnitType(this.ResultTypeName) && !this.UserBody.Validation.IsTerminated) { ctx.AddError(ErrorCode.MissingReturn, this.UserBody); } } FunctionParameter tail_anon_variadic = this.Parameters .Where(it => it.IsVariadic) .Skip(1) // first variadic can be anonymous .FirstOrDefault(it => !it.IsNameRequired); if (tail_anon_variadic != null) { ctx.AddError(ErrorCode.AnonymousTailVariadicParameter, tail_anon_variadic); } // extensions { foreach (FunctionParameter param in this.Parameters.Skip(1).Where(it => it.Modifier.HasThis)) { ctx.AddError(ErrorCode.NonPrimaryThisParameter, param); } if (this.IsExtension) { FunctionParameter param = this.Parameters.First(); if (param.IsVariadic) { ctx.AddError(ErrorCode.VariadicThisParameter, param); } else if (param.IsOptional) { ctx.AddError(ErrorCode.OptionalThisParameter, param); } if (!ctx.Env.IsReferenceOfType(param.Evaluation.Components)) { ctx.AddError(ErrorCode.NonReferenceThisParameter, param); } } } }
public override int Compute(ComputationContext<int> task) { return firstOperand.Compute (task) + secondOperand.Compute (task); }
bool IExpression.IsLValue(ComputationContext ctx) { throw new NotImplementedException(); }
private async Task MLPTrainFFTest(ComputationContext ctx, params LayerBehavior[] rules) { var trainingData = new[,] { { -4.0f, 16.0f }, { -3.0f, 9.0f }, { -2.0f, 4.0f }, { -1.0f, 1.0f }, { 0.0f, 0.0f }, { 1.0f, 1.0f }, { 2.0f, 4.0f }, { 3.0f, 9.0f }, { 4.0f, 16.0f }, }; const float maxInput = 4.0f; const float minInput = -4.0f; const float maxOutput = 16.0f; const float minOutput = 0.0f; const int inputSize = 1; const int hiddenSize = 16; const int outputSize = 1; const int maxIterations = 1000; var layers = NNTestHelpers.CreateGDMLPLayers(true, inputSize, hiddenSize, outputSize, rules); using (var nn = ctx.NeuralNetworkFactory.CreateMultilayerPerceptron(layers, new MultilayerPerceptronProperties { GradientComputationMethod = GradientComputationMethod.FeedForward })) using (var batch = new SupervisedBatch()) using (var errors = ctx.DataArrayFactory.Create(maxIterations)) { for (int i = 0; i < trainingData.GetLength(0); i++) { batch.Add( ctx.DataArrayFactory.Create(new[] { NNTestHelpers.Normalize(trainingData[i, 0], minInput, maxInput) }), ctx.DataArrayFactory.Create(new[] { NNTestHelpers.Normalize(trainingData[i, 1], minOutput, maxOutput) }), ctx.DataArrayFactory.Create(1)); } bool first = true; var sw = new Stopwatch(); sw.Start(); for (int it = 0; it < maxIterations; it++) { nn.Train(batch); if (first) { using (var weights = ctx.DataArrayFactory.Create(nn.NumberOfWeights)) { nn.GetWeights(weights); float[] wa = new float[weights.Size]; await weights.Read(wa); // It must be randomized: Assert.IsTrue(wa.Sum() != 0.0f); } first = false; } ctx.VectorUtils.CalculateMSE(batch, errors, it); } float[] mses = new float[maxIterations]; await errors.Read(mses); sw.Stop(); foreach (var mse in mses) Console.WriteLine("Error: {0}", mse.ToString("0.00000000")); Console.WriteLine("Ellapsed: {0} ms", sw.Elapsed.TotalMilliseconds); } }
public override void Evaluate(ComputationContext ctx) { base.Evaluate(ctx); this.IsComputed = true; }