public void SplitFullExample() { using var parts = SpanUtils.SplitList(Encoding.ASCII.GetBytes("1,2,,,4"), SEP, out var count); Assert.AreEqual(5, count); Assert.AreEqual(0..1, parts.Memory.Span[0]); Assert.AreEqual(2..3, parts.Memory.Span[1]); Assert.AreEqual(4..4, parts.Memory.Span[2]); Assert.AreEqual(5..5, parts.Memory.Span[3]); Assert.AreEqual(6..7, parts.Memory.Span[4]); }
public void TestMeasureOverflow() { var mem = new byte[1]; Assert.Throws <ArgumentException>(() => { var span1 = new Span <byte>(mem); _ = SpanUtils.MeasureWriteSize(UInt32.MaxValue); }); }
private static void AdjustSpeciesTargetSizes_AccommodateBestGenomeSpecies( NeatPopulation <T> pop, IRandomSource rng) { // Test if the best genome is in a species with a zero target size allocation. int bestGenomeSpeciesIdx = pop.NeatPopulationStats.BestGenomeSpeciesIdx; Species <T>[] speciesArr = pop.SpeciesArray !; if (speciesArr[bestGenomeSpeciesIdx].Stats.TargetSizeInt > 0) { // Nothing to do. The best genome is in a species with a non-zero allocation. return; } // Set the target size of the best genome species to a allow the best genome to survive to the next generation. speciesArr[bestGenomeSpeciesIdx].Stats.TargetSizeInt++; // Adjust down the target size of one of the other species to compensate. // Pick a species at random (but not the champ species). Note that this may result in a species with a zero // target size, this is OK at this stage. We handle allocations of zero elsewhere. // Create an array of shuffled indexes to select from, i.e. all of the species except for the one with the best genome in it. int speciesCount = speciesArr.Length; int[] speciesIdxArr = new int[speciesCount - 1]; for (int i = 0; i < bestGenomeSpeciesIdx; i++) { speciesIdxArr[i] = i; } for (int i = bestGenomeSpeciesIdx + 1; i < speciesCount; i++) { speciesIdxArr[i - 1] = i; } SpanUtils.Shuffle(speciesIdxArr.AsSpan(), rng); // Loop the species indexes. bool success = false; foreach (int speciesIdx in speciesIdxArr) { if (speciesArr[speciesIdx].Stats.TargetSizeInt > 0) { speciesArr[speciesIdx].Stats.TargetSizeInt--; success = true; break; } } if (!success) { throw new Exception("All species have a zero target size."); } }
public void Sort_ShortArray() { int[] keys = new int[] { 5, 8, 2, 16, 32, 12, 7 }; int[] v = new int[] { 45, 42, 48, 24, 8, 28, 43 }; int[] w = new int[] { 0, 1, 2, 3, 4, 5, 6 }; IntroSort <int, int, int> .Sort(keys, v, w); Assert.True(SpanUtils.Equal <int>(new int[] { 2, 5, 7, 8, 12, 16, 32 }, keys)); Assert.True(SpanUtils.Equal <int>(new int[] { 48, 45, 43, 42, 28, 24, 8 }, v)); Assert.True(SpanUtils.Equal <int>(new int[] { 2, 0, 6, 1, 5, 3, 4 }, w)); }
public void CalculateEuclideanCentroid() { // Init input gene arrays. var connGenes1 = new ConnectionGenes <double>(6); connGenes1[0] = (0, 1, 1.0); connGenes1[1] = (0, 2, 2.0); connGenes1[2] = (2, 2, 3.0); connGenes1[3] = (2, 4, 4.0); connGenes1[4] = (2, 5, 5.0); connGenes1[5] = (3, 0, 6.0); var connGenes2 = new ConnectionGenes <double>(8); connGenes2[0] = (0, 1, 10.0); connGenes2[1] = (0, 3, 20.0); connGenes2[2] = (2, 2, 30.0); connGenes2[3] = (2, 3, 40.0); connGenes2[4] = (2, 5, 50.0); connGenes2[5] = (2, 6, 60.0); connGenes2[6] = (3, 0, 70.0); connGenes2[7] = (4, 5, 80.0); var connGenes3 = new ConnectionGenes <double>(2); connGenes3[0] = (2, 5, 100.0); connGenes3[1] = (10, 20, 200.0); var arr = new ConnectionGenes <double>[] { connGenes1, connGenes2, connGenes3 }; // Calc centroid. ConnectionGenes <double> centroid = DistanceMetricUtils.CalculateEuclideanCentroid(arr); // Expected centroid. var expected = new ConnectionGenes <double>(11); expected[0] = (0, 1, 11 / 3.0); expected[1] = (0, 2, 2 / 3.0); expected[2] = (0, 3, 20 / 3.0); expected[3] = (2, 2, 33 / 3.0); expected[4] = (2, 3, 40 / 3.0); expected[5] = (2, 4, 4 / 3.0); expected[6] = (2, 5, 155 / 3.0); expected[7] = (2, 6, 60 / 3.0); expected[8] = (3, 0, 76 / 3.0); expected[9] = (4, 5, 80 / 3.0); expected[10] = (10, 20, 200 / 3.0); Assert.True(SpanUtils.Equal <DirectedConnection>(expected._connArr, centroid._connArr)); Assert.True(ArrayTestUtils.ConponentwiseEqual(expected._weightArr, centroid._weightArr, 1e-6)); }
public void TestSize() { var rnd = new System.Random(); for (var c = 0; c < count; c++) { var mem = new byte[sizeof(UInt32) * writeRepeatCount]; var span1 = new Span <byte>(mem); var span2 = new ReadOnlySpan <byte>(mem); var a = new UInt32[writeRepeatCount]; for (var i = 0; i < writeRepeatCount; i++) { var sr = rnd.Next(0, 4); uint n; switch (sr) { case 0: n = (UInt32)rnd.Next(0, 0b00111111); break; case 1: n = (UInt32)rnd.Next(0b01000000, 0b00111111_11111111); break; case 2: n = (UInt32)rnd.Next(0b01000000_00000000, 0b00111111_11111111_11111111); break; //case 3: default: n = (UInt32)rnd.Next(0b01000000_00000000_00000000, 0b00111111_11111111_11111111_11111111); break; } a[i] = n; span1.MoveWriteSize(n); } // Ensure span is not zero Assert.NotEqual(0, span2.ToArray().Select(b => (int)b).Sum()); for (var i = 0; i < writeRepeatCount; i++) { var r = span2.MoveReadSize(out var len); Assert.Equal(SpanUtils.MeasureWriteSize(a[i]), len); Assert.Equal(a[i], r); } } }
private static void Clip_Inner(UniformDistributionSampler sampler, int len) { // Alloc array and fill with uniform random noise. float[] x = new float[len]; sampler.Sample(x); // Clip the elements of the array with the safe routine. float[] expected = (float[])x.Clone(); PointwiseClip(expected, -1.1f, 18.8f); // Clip the elements of the array. float[] actual = (float[])x.Clone(); MathSpan.Clip(actual, -1.1f, 18.8f); // Compare expected with actual array. Assert.True(SpanUtils.Equal <float>(expected, actual)); }
private static void Clip_Inner(ISampler <int> sampler, int len) { // Alloc array and fill with uniform random noise. int[] x = new int[len]; sampler.Sample(x); // Clip the elements of the array with the safe routine. int[] expected = (int[])x.Clone(); PointwiseClip(expected, -1, 18); // Clip the elements of the array. int[] actual = (int[])x.Clone(); MathSpan.Clip(actual, -1, 18); // Compare expected with actual array. Assert.True(SpanUtils.Equal <int>(expected, actual)); }
/// <summary> /// Validation tests on an array of hidden node IDs and an associated array of connections. /// </summary> /// <param name="hiddenNodeIdArr">Array of hidden node IDs.</param> /// <param name="connArr">Array of connections.</param> /// <param name="inputOutputCount">The total number of input and output nodes.</param> /// <returns>true if the provided data is valid; otherwise false.</returns> public static bool ValidateHiddenNodeIds( int[] hiddenNodeIdArr, DirectedConnection[] connArr, int inputOutputCount) { // Test that the IDs are sorted (required to allow for efficient searching of IDs using a binary search). if (!SortUtils.IsSortedAscending <int>(hiddenNodeIdArr)) { return(false); } // Get the set of hidden node IDs described by the connections, and test that they match the supplied hiddenNodeIdArr. int[] idArr = CreateHiddenNodeIdArray(connArr, inputOutputCount, new HashSet <int>()); if (!SpanUtils.Equal <int>(idArr, hiddenNodeIdArr)) { return(false); } return(true); }
public void TestVLQUInt16FullRange() { var rnd = new Random(); var mem = new byte[sizeof(UInt16) + 1]; for (int c = UInt16.MinValue; c < UInt16.MaxValue; c++) { var answer = (UInt16)c; var span1 = new Span <byte>(mem); var span2 = new ReadOnlySpan <byte>(mem); span1.Fill(0); span1.WriteVLQ(answer, out var len); var back = span2.ReadVLQUInt16(out var len2); Assert.Equal(answer, back); Assert.Equal(SpanUtils.MeasureVLQ(answer), len); Assert.Equal(SpanUtils.MeasureVLQ(answer), len2); } }
public void TestVLQInt32() { var rnd = new Random(); for (var c = 0; c < count; c++) { var memSize = rnd.Next(1, 10_000); var mem = new byte[memSize * SpanUtils.MeasureVLQ(UInt32.MaxValue) + 1]; var span1 = new SpanStream(mem); var span2 = new SpanStream(mem); var data = new Int32[memSize]; for (var i = 0; i < memSize; i++) { data[i] = rnd.NextInt32(); } for (var i = 0; i < memSize; i++) { span1.WriteVLQ(data[i]); } for (var i = 0; i < memSize; i++) { Assert.Equal(data[i], span2.ReadVLQInt32(out var len)); Assert.Equal(SpanUtils.MeasureVLQ(data[i]), len); } // Check overflow new Span <byte>(mem).Fill(0xFF); Assert.Throws <OverflowException>(() => { var span3 = new SpanStream(mem); span3.ReadVLQInt32(out _); }); } }
public static DirectedGraphAcyclic CreateDirectedGraphAcyclic( DirectedGraph digraph, GraphDepthInfo depthInfo, out int[] newIdByOldId, out int[] connectionIndexMap, ref int[]?timsortWorkArr, ref int[]?timsortWorkVArr) { int inputCount = digraph.InputCount; int outputCount = digraph.OutputCount; // Assert that all input nodes are at depth zero. // Any input node with a non-zero depth must have an input connection, and this is not supported. Debug.Assert(SpanUtils.Equals(depthInfo._nodeDepthArr.AsSpan(0, inputCount), 0)); // Compile a mapping from current node IDs to new IDs (based on node depth in the graph). newIdByOldId = CompileNodeIdMap(depthInfo, digraph.TotalNodeCount, inputCount, ref timsortWorkArr, ref timsortWorkVArr); // Map the connection node IDs. ConnectionIdArrays connIdArrays = digraph.ConnectionIdArrays; MapIds(connIdArrays, newIdByOldId); // Init connection index map. int connCount = connIdArrays.Length; connectionIndexMap = new int[connCount]; for (int i = 0; i < connCount; i++) { connectionIndexMap[i] = i; } // Sort the connections based on sourceID, targetId; this will arrange the connections based on the depth // of the source nodes. // Note. This sort routine will also sort a secondary array, i.e. keep the items in both arrays aligned; // here we use this to create connectionIndexMap. ConnectionSorter <int> .Sort(connIdArrays, connectionIndexMap); // Make a copy of the sub-range of newIdMap that represents the output nodes. // This is required later to be able to locate the output nodes now that they have been sorted by depth. int[] outputNodeIdxArr = new int[outputCount]; Array.Copy(newIdByOldId, inputCount, outputNodeIdxArr, 0, outputCount); // Create an array of LayerInfo(s). // Each LayerInfo contains the index + 1 of both the last node and last connection in that layer. // // The array is in order of depth, from layer zero (inputs nodes) to the last layer (usually output nodes, // but not necessarily if there is a dead end pathway with a high number of hops). // // Note. There is guaranteed to be at least one connection with a source at a given depth level, this is // because for there to be a layer N there must necessarily be a connection from a node in layer N-1 // to a node in layer N. int graphDepth = depthInfo._graphDepth; LayerInfo[] layerInfoArr = new LayerInfo[graphDepth]; // Note. Scanning over nodes can start at inputCount instead of zero, because all nodes prior to that index // are input nodes and are therefore at depth zero. (input nodes are never the target of a connection, // therefore are always guaranteed to be at the start of a connectivity graph, and thus at depth zero). int nodeCount = digraph.TotalNodeCount; int nodeIdx = inputCount; int connIdx = 0; int[] nodeDepthArr = depthInfo._nodeDepthArr; int[] srcIdArr = connIdArrays._sourceIdArr; for (int currDepth = 0; currDepth < graphDepth; currDepth++) { // Scan for last node at the current depth. for (; nodeIdx < nodeCount && nodeDepthArr[nodeIdx] == currDepth; nodeIdx++) { ; } // Scan for last connection at the current depth. for (; connIdx < srcIdArr.Length && nodeDepthArr[srcIdArr[connIdx]] == currDepth; connIdx++) { ; } // Store node and connection end indexes for the layer. layerInfoArr[currDepth] = new LayerInfo(nodeIdx, connIdx); } // Construct and return. return(new DirectedGraphAcyclic( inputCount, outputCount, nodeCount, connIdArrays, layerInfoArr, outputNodeIdxArr)); }
private void PerformMutationOp() { int outcome = DiscreteDistribution.Sample(_rng, _opDistribution); switch (outcome) { case 0: // Write. { PerformMutationOp_Write(); break; } case 1: // Write byte. { byte b = (byte)_rng.Next(); _strmA.WriteByte(b); _strmB.WriteByte(b); Debug.WriteLine("WriteByte"); break; } case 2: // Change read/write head position. { PerformMutationOp_Position(); break; } case 3: // SetLength { PerformMutationOp_SetLength(); break; } case 4: // Seek { PerformMutationOp_Seek(); break; } case 5: // Trim { _strmB.Trim(); Debug.WriteLine("Trim"); break; } case 6: // Read byte. { int a = _strmA.ReadByte(); int b = _strmB.ReadByte(); if (a != b) { throw new Exception("ReadByte mismatch"); } Debug.WriteLine("ReadByte"); break; } case 7: // Read { int len = _rng.Next(20_000); byte[] abuf = new byte[len]; byte[] bbuf = new byte[len]; int alen = _strmA.Read(abuf); int blen = _strmB.Read(bbuf); if (alen != blen) { throw new Exception("Read mismatch"); } if (!SpanUtils.Equal <byte>(abuf, bbuf)) { throw new Exception("Read mismatch"); } Debug.WriteLine("Read"); break; } } }
public void SplitWhitespace() { using var parts = SpanUtils.SplitList(Encoding.ASCII.GetBytes(" "), SEP, out var count); Assert.AreEqual(0, count); }
public void SplitEmptyText() { using var parts = SpanUtils.SplitList(ReadOnlySpan <byte> .Empty, SEP, out var count); Assert.AreEqual(0, count); Assert.AreEqual(parts.Memory.Span[0], ..0); }