private void Parse(RawDataset dataset) { Stroke currentStroke = null; foreach (RawDatasetEntry entry in dataset.Entries) { if (entry.TouchSample.ButtonTouch == Sample.TOUCH_DOWN) { currentStroke = new Stroke(); currentStroke.RawDatasetEntries.Add(entry); } else if (entry.TouchSample.ButtonTouch == Sample.TOUCH_UP) { // Last entry has no actual features since it is TOUCH_UP //currentStroke.RawDatasetEntries.Add(entry); Strokes.Add(currentStroke); currentStroke = null; } else { currentStroke.RawDatasetEntries.Add(entry); } } }
private static void SampleXor() { SigmaEnvironment sigma = SigmaEnvironment.Create("logical"); sigma.SetRandomSeed(0); sigma.Prepare(); RawDataset dataset = new RawDataset("xor"); dataset.AddRecords("inputs", new[] { 0, 0 }, new[] { 0, 1 }, new[] { 1, 0 }, new[] { 1, 1 }); dataset.AddRecords("targets", new[] { 0 }, new[] { 0 }, new[] { 0 }, new[] { 1 }); ITrainer trainer = sigma.CreateTrainer("xor-trainer"); trainer.Network.Architecture = InputLayer.Construct(2) + FullyConnectedLayer.Construct(2) + FullyConnectedLayer.Construct(1) + OutputLayer.Construct(1) + SquaredDifferenceCostLayer.Construct(); trainer.TrainingDataIterator = new MinibatchIterator(1, dataset); trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset)); trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.1); trainer.Operator = new CudaSinglethreadedOperator(); trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.05)); trainer.AddLocalHook(new StopTrainingHook(atEpoch: 10000)); trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch), averageValues: true)); trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Stop), averageValues: true)); trainer.AddLocalHook(new ValueReporter("network.layers.*<external_output>._outputs.default.activations", TimeStep.Every(1, TimeScale.Stop))); trainer.AddLocalHook(new ValueReporter("network.layers.*-fullyconnected.weights", TimeStep.Every(1, TimeScale.Stop))); trainer.AddLocalHook(new ValueReporter("network.layers.*-fullyconnected.biases", TimeStep.Every(1, TimeScale.Stop))); sigma.Run(); }
/// <summary> /// Create a raw logical XOR dataset. /// The samples are (0, 0 => 0), (0, 1 => 1), (1, 0 => 1), (1, 1 => 0). /// </summary> /// <param name="name">The optional name.</param> /// <returns>The XOR dataset.</returns> public static IDataset Xor(string name = "xor") { RawDataset dataset = new RawDataset(name); dataset.AddRecords("inputs", new[] { 0, 0 }, new[] { 0, 1 }, new[] { 1, 0 }, new[] { 1, 1 }); dataset.AddRecords("targets", new[] { 0 }, new[] { 1 }, new[] { 1 }, new[] { 0 }); return(dataset); }
public static IDataset TicTacToe(string name = "tictactoe") { int[] board = new int[3 * 3]; int[] states = new int[] { 0, 1 }; //player o, empty, player x IDictionary <int[], int[]> scoredBoards = new Dictionary <int[], int[]>(); //_InternalScoreBoardsRec(0, board, states, scoredBoards); scoredBoards.Add(new[] { 1, 1, 1, 0, 0, 0, 0, 0, 0 }, new[] { 0, 0, 1 }); scoredBoards.Add(new[] { 0, 1, 1, 0, 0, 0, 0, 0, 0 }, new[] { 0, 1, 0 }); scoredBoards.Add(new[] { 1, 1, 0, 0, 0, 0, 0, 0, 0 }, new[] { 0, 1, 0 }); scoredBoards.Add(new[] { 1, 0, 1, 0, 0, 0, 0, 0, 0 }, new[] { 1, 0, 0 }); scoredBoards.Add(new[] { 1, 0, 0, 0, 0, 0, 0, 0, 0 }, new[] { 1, 0, 0 }); scoredBoards.Add(new[] { 0, 1, 0, 0, 0, 0, 0, 0, 0 }, new[] { 1, 0, 0 }); scoredBoards.Add(new[] { 0, 0, 1, 0, 0, 0, 0, 0, 0 }, new[] { 1, 0, 0 }); scoredBoards.Add(new[] { 0, 0, 0, 1, 1, 1, 0, 0, 0 }, new[] { 0, 0, 1 }); scoredBoards.Add(new[] { 0, 0, 0, 0, 1, 1, 0, 0, 0 }, new[] { 0, 1, 0 }); scoredBoards.Add(new[] { 0, 0, 0, 1, 1, 0, 0, 0, 0 }, new[] { 0, 1, 0 }); scoredBoards.Add(new[] { 0, 0, 0, 1, 0, 1, 0, 0, 0 }, new[] { 1, 0, 0 }); scoredBoards.Add(new[] { 0, 0, 0, 1, 0, 0, 0, 0, 0 }, new[] { 1, 0, 0 }); scoredBoards.Add(new[] { 0, 0, 0, 0, 1, 0, 0, 0, 0 }, new[] { 1, 0, 0 }); scoredBoards.Add(new[] { 0, 0, 0, 0, 0, 1, 0, 0, 0 }, new[] { 1, 0, 0 }); scoredBoards.Add(new[] { 0, 0, 0, 0, 0, 0, 1, 1, 1 }, new[] { 0, 0, 1 }); scoredBoards.Add(new[] { 0, 0, 0, 0, 0, 0, 0, 1, 1 }, new[] { 0, 1, 0 }); scoredBoards.Add(new[] { 0, 0, 0, 0, 0, 0, 1, 1, 0 }, new[] { 0, 1, 0 }); scoredBoards.Add(new[] { 0, 0, 0, 0, 0, 0, 1, 0, 1 }, new[] { 1, 0, 0 }); scoredBoards.Add(new[] { 0, 0, 0, 0, 0, 0, 1, 0, 0 }, new[] { 1, 0, 0 }); scoredBoards.Add(new[] { 0, 0, 0, 0, 0, 0, 0, 1, 0 }, new[] { 1, 0, 0 }); scoredBoards.Add(new[] { 0, 0, 0, 0, 0, 0, 0, 0, 1 }, new[] { 1, 0, 0 }); scoredBoards.Add(new[] { 1, 0, 0, 0, 1, 0, 0, 0, 1 }, new[] { 0, 0, 1 }); scoredBoards.Add(new[] { 0, 0, 0, 0, 1, 0, 0, 0, 1 }, new[] { 0, 1, 0 }); scoredBoards.Add(new[] { 1, 0, 0, 0, 1, 0, 0, 0, 0 }, new[] { 0, 1, 0 }); scoredBoards.Add(new[] { 0, 0, 1, 0, 1, 0, 1, 0, 0 }, new[] { 0, 0, 1 }); scoredBoards.Add(new[] { 0, 0, 0, 0, 1, 0, 1, 0, 0 }, new[] { 0, 1, 0 }); scoredBoards.Add(new[] { 0, 0, 1, 0, 1, 0, 0, 0, 0 }, new[] { 0, 1, 0 }); Random rng = new Random(); var scoredBoardsAsArray = scoredBoards.ToArray().OrderBy(x => rng.Next()); RawDataset dataset = new RawDataset(name); dataset.AddRecords("inputs", scoredBoardsAsArray.Select(x => x.Key).ToArray()); dataset.AddRecords("targets", scoredBoards.Select(x => x.Value).ToArray()); return(dataset); }
private static ITrainer CreateXorTrainer(SigmaEnvironment sigma) { RawDataset dataset = new RawDataset("xor"); dataset.AddRecords("inputs", new[] { 0, 0 }, new[] { 0, 1 }, new[] { 1, 0 }, new[] { 1, 1 }); dataset.AddRecords("targets", new[] { 0 }, new[] { 1 }, new[] { 1 }, new[] { 0 }); ITrainer trainer = sigma.CreateTrainer("xor-trainer"); trainer.Network = new Network(); trainer.Network.Architecture = InputLayer.Construct(2) + FullyConnectedLayer.Construct(1) + OutputLayer.Construct(1) + SquaredDifferenceCostLayer.Construct(); trainer.TrainingDataIterator = new UndividedIterator(dataset); trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset)); trainer.Operator = new CpuSinglethreadedOperator(); trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.01); trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1)); trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch), reportEpochIteration: true)); trainer.AddLocalHook(new ValueReporter("network.layers.1-fullyconnected._outputs.default.activations", TimeStep.Every(1, TimeScale.Epoch))); return(trainer); }
static async Task Main(string[] args) { AppDomain.CurrentDomain.UnhandledException += (s, e) => { if (e.ExceptionObject is Exception ex) { Console.WriteLine("Fatal: " + ex.Message); Console.WriteLine(ex.ToString()); } else { Console.WriteLine("Fatal: Unexpected error occurred."); } Environment.Exit(-1); }; Options options = null; Parser.Default.ParseArguments <Options>(args) .WithParsed(o => options = o); if (options == null) { return; } Graph graph; switch (options.InputFormat.ToLowerInvariant()) { case "caffe": { var file = File.ReadAllBytes(options.Input); var model = Caffe.NetParameter.Parser.ParseFrom(file); var tfc = new CaffeToGraphConverter(model); tfc.Convert(); graph = tfc.Graph; break; } case "paddle": { var tfc = new PaddleToGraphConverter(options.Input); tfc.Convert(0); graph = tfc.Graph; break; } case "tflite": { var file = File.ReadAllBytes(options.Input); var model = tflite.Model.GetRootAsModel(new FlatBuffers.ByteBuffer(file)); var tfc = new TfLiteToGraphConverter(model, model.Subgraphs(0).Value); tfc.Convert(); graph = tfc.Graph; break; } case "k210model": graph = null; break; default: throw new ArgumentException("input-format"); } var outputFormat = options.OutputFormat.ToLowerInvariant(); switch (outputFormat) { case "tf": { var ctx = new GraphPlanContext(); graph.Plan(ctx); using (var f = File.Open(options.Output, FileMode.Create, FileAccess.Write)) await ctx.SaveAsync(f); break; } case "addpad": { Transform.Process(graph, new Transform[] { new Conv2dAddSpaceToBatchNdTransform() }); var ctx = new GraphPlanContext(); graph.Plan(ctx); using (var f = File.Open(options.Output, FileMode.Create, FileAccess.Write)) await ctx.SaveAsync(f); break; } case "tflite": { await ConvertToTFLite(graph, options.Output); break; } case "k210model": case "k210pb": { float?mean = null, std = null; PostprocessMethods pm = PostprocessMethods.Normalize0To1; if (options.Postprocess == "n1to1") { pm = PostprocessMethods.NormalizeMinus1To1; } else if (!string.IsNullOrWhiteSpace(options.Postprocess)) { var match = Regex.Match(options.Postprocess, @"mean:(?<mean>(-?\d+)(\.\d+)?),std:(?<std>(-?\d+)(\.\d+)?)"); if (match.Success) { mean = float.Parse(match.Groups["mean"].Value); std = float.Parse(match.Groups["std"].Value); } else { throw new ArgumentOutOfRangeException("Invalid postprocess method"); } } if (options.InputFormat.ToLowerInvariant() != "tflite") { var tmpTflite = Path.GetTempFileName(); await ConvertToTFLite(graph, tmpTflite); var file = File.ReadAllBytes(tmpTflite); File.Delete(tmpTflite); var model = tflite.Model.GetRootAsModel(new FlatBuffers.ByteBuffer(file)); var tfc = new TfLiteToGraphConverter(model, model.Subgraphs(0).Value); tfc.Convert(); graph = tfc.Graph; } Transform.Process(graph, new Transform[] { new EliminateReshapeTransform(), new EliminateTwoReshapeTransform(), new EliminateTensorflowReshapeTransform(), new TensorflowReshapeToFlattenTransform(), new K210SeparableConv2dTransform(), new K210SpaceToBatchNdAndValidConv2dTransform(), new K210SameConv2dTransform(), new K210Stride2Conv2dTransform(), new GlobalAveragePoolTransform(), options.FloatFc ? (Transform) new DummyTransform() : new K210FullyConnectedTransform(), new LeakyReluTransform(), new K210Conv2dWithNonTrivialActTransform(), new K210Conv2dWithMaxAvgPoolTransform(), new Conv2d1x1ToFullyConnectedTransform(), new K210EliminateAddRemovePaddingTransform(), new QuantizedAddTransform(), new QuantizedMaxPool2dTransform(), new QuantizedResizeNearestNeighborTransform(), new ExclusiveConcatenationTransform(), new QuantizedExclusiveConcatenationTransform(), new QuantizedConcatenationTransform(), new EliminateQuantizeDequantizeTransform(), new EliminateInputQuantizeTransform(), new K210EliminateInputUploadTransform(), new K210EliminateConv2dUploadTransform(), new K210EliminateUploadAddPaddingTransform(), new K210EliminateConv2dRequantizeTransform(), options.ChannelwiseOutput ? (Transform) new K210Conv2dToChannelwiseTransform(): new DummyTransform(), //new EliminateDequantizeOutputTransform() }); { var ctx = new GraphPlanContext(); graph.Plan(ctx); if (outputFormat == "k210model") { var dim = graph.Inputs.First().Output.Dimensions.ToArray(); Dataset dataset; if (options.DatasetFormat == "image") { dataset = new ImageDataset( options.Dataset, dim.Skip(1).ToArray(), 1, PreprocessMethods.None, pm, mean, std); } else if (options.DatasetFormat == "raw") { dataset = new RawDataset( options.Dataset, dim.Skip(1).ToArray(), 1, PreprocessMethods.None, pm, mean, std); } else { throw new ArgumentException("Invalid dataset format"); } var k210c = new GraphToK210Converter(graph, options.WeightsBits); await k210c.ConvertAsync( dataset, ctx, Path.GetDirectoryName(options.Output), Path.GetFileNameWithoutExtension(options.Output), options.ChannelwiseOutput); } else { using (var f = File.Open(options.Output, FileMode.Create, FileAccess.Write)) await ctx.SaveAsync(f); } } break; } case "k210script": { { var dim = graph.Inputs.First().Output.Dimensions.ToArray(); var k210c = new GraphToScriptConverter(graph); await k210c.ConvertAsync( Path.GetDirectoryName(options.Output), Path.GetFileNameWithoutExtension(options.Output)); } break; } case "inference": { if (options.InputFormat.ToLowerInvariant() != "k210model") { throw new ArithmeticException("Inference mode only support k210model input."); } var emulator = new NnCase.Converter.K210.Emulator.K210Emulator( File.ReadAllBytes(options.Input)); await emulator.RunAsync(options.Dataset, options.Output); break; } default: throw new ArgumentException("output-format"); } }