public void TestUnifiedIteratorYield() { string filename = ".unittestfile" + nameof(TestUnifiedIteratorYield); CreateCsvTempFile(filename); SigmaEnvironment.Clear(); FileSource source = new FileSource(filename, Path.GetTempPath()); CsvRecordExtractor extractor = (CsvRecordExtractor) new CsvRecordReader(source).Extractor(new CsvRecordExtractor(new Dictionary <string, int[][]> { ["inputs"] = new[] { new[] { 0 } } })); ExtractedDataset dataset = new ExtractedDataset("test", 2, new DiskCacheProvider(Path.GetTempPath() + "/" + nameof(TestUnifiedIteratorYield)), true, extractor); UnifiedIterator iterator = new UnifiedIterator(dataset); SigmaEnvironment sigma = SigmaEnvironment.Create("test"); IComputationHandler handler = new CpuFloat32Handler(); foreach (var block in iterator.Yield(handler, sigma)) { Assert.AreEqual(new[] { 5.1f, 4.9f, 4.7f }, block["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 3)); } dataset.Dispose(); DeleteTempFile(filename); }
public void TestOneHotPreprocessorExtract() { OneHotPreprocessor preprocessor = new OneHotPreprocessor("test", minValue: 0, maxValue: 2); IComputationHandler handler = new CpuFloat32Handler(); Dictionary <string, INDArray> extracted = preprocessor.ExtractDirectFrom(GetNamedArrayTestData(), 2, handler); Assert.AreEqual(new float[] { 1, 0, 0, 0, 0, 1 }, extracted["test"].GetDataAs <float>().GetValuesArrayAs <float>(0, 6).ToArray()); }
public void TestNormalisingPreprocessorExtractDirect() { NormalisingPreprocessor normaliser = new NormalisingPreprocessor(1, 9, 0, 1, "test"); IComputationHandler handler = new CpuFloat32Handler(); Dictionary <string, INDArray> extracted = normaliser.ExtractDirectFrom(GetNamedArrayTestData(), 1, handler); Assert.AreEqual(new[] { 0.0f, 0.125f, 0.25f, 0.375f, 0.5f, 0.625f, 0.75f, 0.875f, 1.0f }, extracted["test"].GetDataAs <float>().GetValuesArrayAs <float>(0, 9).ToArray()); }
public void TestByteRecordExtractorExtract() { ArrayRecordExtractor <byte> extractor = new ArrayRecordExtractor <byte>(ArrayRecordExtractor <byte> .ParseExtractorParameters("inputs", new[] { 0L }, new[] { 1L })); IComputationHandler handler = new CpuFloat32Handler(); Assert.Throws <InvalidOperationException>(() => extractor.ExtractDirect(10, handler)); byte[][] rawData = new[] { new byte[] { 0 }, new byte[] { 1 } }; Assert.AreEqual(new float[] { 0, 1 }, extractor.ExtractDirectFrom(rawData, 2, handler)["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0L, 2L)); }
private static void SampleLoadExtractIterate() { SigmaEnvironment sigma = SigmaEnvironment.Create("test"); sigma.Prepare(); //var irisReader = new CsvRecordReader(new MultiSource(new FileSource("iris.data"), new UrlSource("http://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data"))); //IRecordExtractor irisExtractor = irisReader.Extractor("inputs2", new[] { 0, 3 }, "targets2", 4).AddValueMapping(4, "Iris-setosa", "Iris-versicolor", "Iris-virginica"); //irisExtractor = irisExtractor.Preprocess(new OneHotPreprocessor(sectionName: "targets2", minValue: 0, maxValue: 2), new NormalisingPreprocessor(sectionNames: "inputs2", minInputValue: 0, maxInputValue: 6)); ByteRecordReader mnistImageReader = new ByteRecordReader(headerLengthBytes: 16, recordSizeBytes: 28 * 28, source: new CompressedSource(new MultiSource(new FileSource("train-images-idx3-ubyte.gz"), new UrlSource("http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz")))); IRecordExtractor mnistImageExtractor = mnistImageReader.Extractor("inputs", new[] { 0L, 0L }, new[] { 28L, 28L }).Preprocess(new NormalisingPreprocessor(0, 255)); ByteRecordReader mnistTargetReader = new ByteRecordReader(headerLengthBytes: 8, recordSizeBytes: 1, source: new CompressedSource(new MultiSource(new FileSource("train-labels-idx1-ubyte.gz"), new UrlSource("http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz")))); IRecordExtractor mnistTargetExtractor = mnistTargetReader.Extractor("targets", new[] { 0L }, new[] { 1L }).Preprocess(new OneHotPreprocessor(minValue: 0, maxValue: 9)); IComputationHandler handler = new CpuFloat32Handler(); ExtractedDataset dataset = new ExtractedDataset("mnist-training", ExtractedDataset.BlockSizeAuto, mnistImageExtractor, mnistTargetExtractor); IDataset[] slices = dataset.SplitRecordwise(0.8, 0.2); IDataset trainingData = slices[0]; IDataset validationData = slices[1]; MinibatchIterator trainingIterator = new MinibatchIterator(1, trainingData); MinibatchIterator validationIterator = new MinibatchIterator(1, validationData); while (true) { foreach (var block in trainingIterator.Yield(handler, sigma)) { Thread.Sleep(100); PrintFormattedBlock(block, PrintUtils.AsciiGreyscalePalette); Thread.Sleep(1000); } } //Random random = new Random(); //INDArray array = new ADNDArray<float>(3, 1, 2, 2); //new GaussianInitialiser(0.05, 0.05).Initialise(array, Handler, random); //Console.WriteLine(array); //new ConstantValueInitialiser(1).Initialise(array, Handler, random); //Console.WriteLine(array); //dataset.InvalidateAndClearCaches(); }
private static void SampleNetworkArchitecture() { SigmaEnvironment sigma = SigmaEnvironment.Create("test"); IComputationHandler handler = new CpuFloat32Handler(); ITrainer trainer = sigma.CreateTrainer("test_trainer"); trainer.Network = new Network(); trainer.Network.Architecture = InputLayer.Construct(2, 2) + ElementwiseLayer.Construct(2 * 2) + FullyConnectedLayer.Construct(2) + 2 * (FullyConnectedLayer.Construct(4) + FullyConnectedLayer.Construct(2)) + OutputLayer.Construct(2); trainer.Network = (INetwork)trainer.Network.DeepCopy(); trainer.Operator = new CpuMultithreadedOperator(10); trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.1f)); trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.01f, mean: 0.03f)); trainer.Initialise(handler); trainer.Network = (INetwork)trainer.Network.DeepCopy(); Console.WriteLine(trainer.Network.Registry); IRegistryResolver resolver = new RegistryResolver(trainer.Network.Registry); Console.WriteLine("==============="); object[] weights = resolver.ResolveGet <object>("layers.*.weights"); Console.WriteLine(string.Join("\n", weights)); Console.WriteLine("==============="); //foreach (ILayerBuffer buffer in trainer.Network.YieldLayerBuffersOrdered()) //{ // Console.WriteLine(buffer.Layer.Name + ": "); // Console.WriteLine("inputs:"); // foreach (string input in buffer.Inputs.Keys) // { // Console.WriteLine($"\t{input}: {buffer.Inputs[input].GetHashCode()}"); // } // Console.WriteLine("outputs:"); // foreach (string output in buffer.Outputs.Keys) // { // Console.WriteLine($"\t{output}: {buffer.Outputs[output].GetHashCode()}"); // } //} }
private static void SampleDotProduct() { IComputationHandler handler = new CpuFloat32Handler(); INDArray a = handler.NDArray(ArrayUtils.Range(1, 6), 3, 2); INDArray b = handler.NDArray(ArrayUtils.Range(1, 6), 2, 3); Console.WriteLine("a = " + ArrayUtils.ToString(a, (ADNDArray <float> .ToStringElement)null, 0, true)); Console.WriteLine("b = " + ArrayUtils.ToString(b, (ADNDArray <float> .ToStringElement)null, 0, true)); INDArray c = handler.Dot(a, b); Console.WriteLine("c = " + ArrayUtils.ToString(c, (ADNDArray <float> .ToStringElement)null, 0, true)); }
public void TestGaussianInitialiserInitialise() { ConstantValueInitialiser initialiser = new ConstantValueInitialiser(2.0); INDArray array = new ADNDArray <float>(2, 1, 2, 2); IComputationHandler handler = new CpuFloat32Handler(); Random random = new Random(); Assert.Throws <ArgumentNullException>(() => initialiser.Initialise((INDArray)null, handler, random)); Assert.Throws <ArgumentNullException>(() => initialiser.Initialise((INumber)null, handler, random)); Assert.Throws <ArgumentNullException>(() => initialiser.Initialise(array, null, random)); Assert.Throws <ArgumentNullException>(() => initialiser.Initialise(array, handler, null)); // TODO implement gaussian initialiser test somehow }
public void TestConstantValueInitialiserInitialise() { ConstantValueInitialiser initialiser = new ConstantValueInitialiser(2.0); IComputationHandler handler = new CpuFloat32Handler(); INDArray array = handler.NDArray(2L, 1L, 2L, 2L); Random random = new Random(); Assert.Throws <ArgumentNullException>(() => initialiser.Initialise((INDArray)null, handler, random)); Assert.Throws <ArgumentNullException>(() => initialiser.Initialise((INumber)null, handler, random)); Assert.Throws <ArgumentNullException>(() => initialiser.Initialise(array, null, random)); Assert.Throws <ArgumentNullException>(() => initialiser.Initialise(array, handler, null)); initialiser.Initialise(array, handler, new Random()); Assert.AreEqual(new float[] { 2, 2, 2, 2, 2, 2, 2, 2 }, array.GetDataAs <float>().GetValuesArrayAs <float>(0, 8)); }
public void TestUndividedIteratorYield() { string filename = ".unittestfile" + nameof(TestUndividedIteratorCreate); CreateCsvTempFile(filename); SigmaEnvironment.Clear(); FileSource source = new FileSource(filename, Path.GetTempPath()); CsvRecordExtractor extractor = (CsvRecordExtractor) new CsvRecordReader(source).Extractor(new CsvRecordExtractor(new Dictionary <string, int[][]> { ["inputs"] = new[] { new[] { 0 } } })); ExtractedDataset dataset = new ExtractedDataset("test", 2, new DiskCacheProvider(Path.GetTempPath() + "/" + nameof(TestUndividedIteratorCreate)), true, extractor); UndividedIterator iterator = new UndividedIterator(dataset); SigmaEnvironment sigma = SigmaEnvironment.Create("test"); IComputationHandler handler = new CpuFloat32Handler(); int index = 0; foreach (var block in iterator.Yield(handler, sigma)) { if (index == 0) { Assert.AreEqual(new float[] { 5.1f, 4.9f }, block["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 2)); } else if (index == 1) { Assert.AreEqual(new float[] { 4.7f }, block["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 1)); } else { Assert.Fail("There can be a maximum of two iterations, but this is yield iteration 3 (index 2)."); } index++; } dataset.Dispose(); DeleteTempFile(filename); }
public void TestDatasetFetchBlockSequential() { RedirectGlobalsToTempPath(); string filename = $"test{nameof(TestDatasetFetchBlockSequential)}.dat"; CreateCsvTempFile(filename); CsvRecordExtractor extractor = new CsvRecordReader(new FileSource(filename, Path.GetTempPath())).Extractor("inputs", 1, 2, "targets", 3); ExtractedDataset dataset = new ExtractedDataset(name: "name", blockSizeRecords: 1, recordExtractors: extractor); CpuFloat32Handler handler = new CpuFloat32Handler(); IDictionary <string, INDArray> namedArrays = dataset.FetchBlock(0, handler, false); Assert.AreEqual(new[] { 3.5f, 1.4f }, namedArrays["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 2)); //fetch the same thing twice to check for same block namedArrays = dataset.FetchBlock(0, handler, false); Assert.AreEqual(new[] { 3.5f, 1.4f }, namedArrays["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 2)); //skipping second block (index 1) namedArrays = dataset.FetchBlock(2, handler, false); Assert.AreEqual(new[] { 3.2f, 1.3f }, namedArrays["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 2)); namedArrays = dataset.FetchBlock(1, handler, false); Assert.AreEqual(new[] { 3.0f, 1.4f }, namedArrays["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 2)); namedArrays = dataset.FetchBlock(3, handler, false); Assert.IsNull(namedArrays); dataset.Dispose(); DeleteTempFile(filename); }
public void TestDatasetFreeBlockSequential() { RedirectGlobalsToTempPath(); string filename = $"test{nameof(TestDatasetFetchBlockSequential)}.dat"; CreateCsvTempFile(filename); CsvRecordExtractor extractor = new CsvRecordReader(new FileSource(filename, Path.GetTempPath())).Extractor("inputs", 1, 2, "targets", 3); ExtractedDataset dataset = new ExtractedDataset(name: "name", blockSizeRecords: 1, recordExtractors: extractor); CpuFloat32Handler handler = new CpuFloat32Handler(); dataset.FetchBlock(0, handler, false); dataset.FetchBlock(1, handler, false); dataset.FetchBlock(2, handler, false); Assert.AreEqual(3, dataset.ActiveBlockRegionCount); dataset.FreeBlock(1, handler); dataset.FreeBlock(2, handler); Assert.AreEqual(1, dataset.ActiveBlockRegionCount); var namedArrays = dataset.FetchBlock(0, handler, false); Assert.AreEqual(new[] { 3.5f, 1.4f }, namedArrays["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 2)); namedArrays = dataset.FetchBlock(1, handler, false); Assert.AreEqual(new[] { 3.0f, 1.4f }, namedArrays["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 2)); namedArrays = dataset.FetchBlock(2, handler, false); Assert.AreEqual(new[] { 3.2f, 1.3f }, namedArrays["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 2)); dataset.Dispose(); DeleteTempFile(filename); }
private static void SampleAutomaticDifferentiation() { IComputationHandler handler = new CpuFloat32Handler(); uint traceTag = handler.BeginTrace(); INDArray array = handler.NDArray(ArrayUtils.Range(1, 6), 2, 3); INumber a = handler.Number(-1.0f), b = handler.Number(3.0f); INumber c = handler.Trace(handler.Add(a, b), traceTag); INumber d = handler.Multiply(c, 2); INumber e = handler.Add(d, handler.Add(c, 3)); INumber f = handler.SquareRoot(e); array = handler.Multiply(array, f); INumber cost = handler.Divide(handler.Sum(array), array.Length); Console.WriteLine("cost: " + cost); handler.ComputeDerivativesTo(cost); Console.WriteLine(array); Console.WriteLine("f: " + handler.GetDerivative(f)); Console.WriteLine("e: " + handler.GetDerivative(e)); Console.WriteLine("d: " + handler.GetDerivative(d)); Console.WriteLine("c: " + handler.GetDerivative(c)); Console.WriteLine("a: " + handler.GetDerivative(array)); handler.ComputeDerivativesTo(f); Console.WriteLine("f: " + handler.GetDerivative(f)); Console.WriteLine("e: " + handler.GetDerivative(e)); Console.WriteLine("d: " + handler.GetDerivative(d)); Console.WriteLine("c: " + handler.GetDerivative(c)); Console.WriteLine("a: " + handler.GetDerivative(array)); }
private static void SampleNetworkMerging() { SigmaEnvironment sigma = SigmaEnvironment.Create("merge_test"); ITrainer[] trainers = new ITrainer[3]; int[] constantValues = { 2, 10, 70 }; //INetworkMerger merger = new WeightedNetworkMerger(10d, 10d, 1d); INetworkMerger merger = new AverageNetworkMerger(); IComputationHandler handler = new CpuFloat32Handler(); for (int i = 0; i < trainers.Length; i++) { trainers[i] = sigma.CreateTrainer($"MergeTrainer{i}"); trainers[i].Network = new Network($"{i}"); trainers[i].Network.Architecture = InputLayer.Construct(2, 2) + ElementwiseLayer.Construct(2 * 2) + OutputLayer.Construct(2); trainers[i].AddInitialiser("*.weights", new ConstantValueInitialiser(constantValues[i])); trainers[i].Operator = new CpuMultithreadedOperator(5); trainers[i].Initialise(handler); } foreach (ITrainer trainer in trainers) { Console.WriteLine(trainer.Network.Registry); } merger.AddMergeEntry("layers.*.weights"); merger.Merge(trainers[1].Network, trainers[2].Network, handler); Console.WriteLine("*******************"); foreach (ITrainer trainer in trainers) { Console.WriteLine(trainer.Network.Registry); } }
public void TestMinibatchIteratorYield(int minibatchSize) { string filename = ".unittestfile" + nameof(TestMinibatchIteratorYield); CreateCsvTempFile(filename); SigmaEnvironment.Clear(); FileSource source = new FileSource(filename, Path.GetTempPath()); CsvRecordExtractor extractor = (CsvRecordExtractor) new CsvRecordReader(source).Extractor(new CsvRecordExtractor(new Dictionary <string, int[][]> { ["inputs"] = new[] { new[] { 0 } } })); ExtractedDataset dataset = new ExtractedDataset("test", 1, new DiskCacheProvider(Path.GetTempPath() + "/" + nameof(TestMinibatchIteratorYield)), true, extractor); MinibatchIterator iterator = new MinibatchIterator(minibatchSize, dataset); IComputationHandler handler = new CpuFloat32Handler(); SigmaEnvironment sigma = SigmaEnvironment.Create("test"); Assert.Throws <ArgumentNullException>(() => iterator.Yield(null, null).GetEnumerator().MoveNext()); Assert.Throws <ArgumentNullException>(() => iterator.Yield(handler, null).GetEnumerator().MoveNext()); Assert.Throws <ArgumentNullException>(() => iterator.Yield(null, sigma).GetEnumerator().MoveNext()); int index = 0; foreach (var block in iterator.Yield(handler, sigma)) { //pass through each more than 5 times to ensure consistency if (index++ > 20) { break; } Assert.Contains(block["inputs"].GetValue <float>(0, 0, 0), new float[] { 5.1f, 4.9f, 4.7f }); } dataset.Dispose(); DeleteTempFile(filename); }
public async Task TestDatasetFetchAsync() { RedirectGlobalsToTempPath(); string filename = $"test{nameof(TestDatasetFetchAsync)}.dat"; CreateCsvTempFile(filename); CsvRecordExtractor extractor = new CsvRecordReader(new FileSource(filename, Path.GetTempPath())).Extractor("inputs", 1, 2, "targets", 3); ExtractedDataset dataset = new ExtractedDataset(name: "name", blockSizeRecords: 1, recordExtractors: extractor); CpuFloat32Handler handler = new CpuFloat32Handler(); var block0 = dataset.FetchBlockAsync(0, handler); var block2 = dataset.FetchBlockAsync(2, handler); var block1 = dataset.FetchBlockAsync(1, handler); //mock a free block request to freak out the dataset controller dataset.FreeBlock(1, handler); IDictionary <string, INDArray> namedArrays0 = await block0; IDictionary <string, INDArray> namedArrays1 = await block1; IDictionary <string, INDArray> namedArrays2 = await block2; Assert.IsNotNull(namedArrays1); Assert.AreEqual(new[] { 3.0f, 1.4f }, namedArrays1["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 2)); Assert.IsNotNull(namedArrays2); Assert.AreEqual(new[] { 3.2f, 1.3f }, namedArrays2["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 2)); Assert.IsNotNull(namedArrays0); Assert.AreEqual(new[] { 3.5f, 1.4f }, namedArrays0["inputs"].GetDataAs <float>().GetValuesArrayAs <float>(0, 2)); dataset.Dispose(); DeleteTempFile(filename); }