/// <summary> /// 神经网络 /// </summary> /// <param name="neuronsCounts">神经网络每一层神经元数量</param> public Network(int[] neuronsCounts) { this.LayersCount = neuronsCounts.Length; this.NeuronsCounts = neuronsCounts; // 高斯(正态)分布 随机数发生器 var pyRandom = new NumPyRandom(); this.Biases = neuronsCounts.Skip(1).Select(count => pyRandom.randn(count, 1)).ToArray(); this.Weights = neuronsCounts.Skip(1).Zip(neuronsCounts.SkipLast(1)).Select(counts => pyRandom.randn(counts.First, counts.Second)).ToArray(); }
public Box(float low, float high, Shape shape, Type dType = null, int seed = -1) : base(shape, (dType = dType ?? np.float32)) { if (Equals(shape, null)) { throw new ArgumentNullException(nameof(shape)); } Low = (low + np.zeros(shape, dType)).astype(dType); High = (high + np.zeros(shape, dType)).astype(dType); RandomState = seed != -1 ? np.random.RandomState(seed) : np.random; }
public CartPoleEnv(IEnvironmentViewerFactoryDelegate viewerFactory, NumPyRandom randomState) { _viewerFactory = viewerFactory; // Angle limit set to 2 * theta_threshold_radians so failing observation is still within bounds var high = np.array(x_threshold * 2, float.MaxValue, theta_threshold_radians * 2, float.MaxValue); ActionSpace = new Discrete(2); ObservationSpace = new Box(-high, high, np.float32); random = randomState ?? np.random.RandomState(); Metadata = new Dict("render.modes", new[] { "human", "rgb_array" }, "video.frames_per_second", 50); }
public static void Shuffle(this NumPyRandom rand, NDArrayGeneric <double> list) { var rng = new Random(); var count = list.Shape.Shapes[0]; while (count > 1) { count--; var k = rng.Next(count + 1); var value = list[k]; list[k] = list[count]; list[count] = value; } }
/// <summary>Initializes a new instance of the <see cref="T:System.Object" /> class.</summary> public Box(NDArray low, NDArray high, Type dType = null, int seed = -1) : base(null, (dType = dType ?? np.float32)) { if (Equals(low, null)) { throw new ArgumentNullException(nameof(low)); } if (Equals(high, null)) { throw new ArgumentNullException(nameof(high)); } Debug.Assert(low.shape.SequenceEqual(high.shape)); Shape = low.shape; Low = low.astype(dType); High = high.astype(dType); RandomState = seed != -1 ? np.random.RandomState(seed) : np.random; }
public void testZeroFraction() { var x_shape = new Shape(5, 17); var x_np = new NumPyRandom().randint(0, 2, x_shape); x_np.astype(np.float32); var y_np = this._ZeroFraction(x_np); var x_tf = constant_op.constant(x_np); x_tf.setShape(x_shape); var y_tf = nn_impl.zero_fraction(x_tf); var y_tf_np = self.evaluate <NDArray>(y_tf); var eps = 1e-8; self.assertAllClose(y_tf_np, y_np, eps); }
public static NDArrayGeneric <int> Permutation(this NumPyRandom rand, int max) { var random = new Random(); int[] orders = new int[max]; var np = new NDArrayGeneric <int>().arange(max); for (int i = 0; i < max; i++) { var pos = random.Next(0, max); var zero = np.Data[0]; np.Data[0] = np.Data[pos]; np.Data[pos] = zero; } return(np); }
/// <summary> /// 初始化神经网络 /// </summary> /// <param name="neuronCounts">神经网络每层神经元个数的集合</param> public Network(IEnumerable <int> neuronCounts) { // 初始化神经网络神经元集合 this.NeuronPool = new List <List <TNeuron> >( neuronCounts.Select( count => Enumerable.Range(0, count). Select(index => Activator.CreateInstance <TNeuron>()) .ToList()) ); // 高斯(正态)分布 随机数发生器 var pyRandom = new NumPyRandom(); // 为非输入层初始化偏置 _ = this.NeuronPool.Skip(1).All(neurons => { foreach (var(neuron, bias) in neurons.Zip(pyRandom.randn(new[] { neurons.Count, 1 }).Array as double[])) { neuron.Bias = bias; } return(true); }); // 为非输入层初始化对前一层神经元输入的权重 _ = this.NeuronPool.SkipLast(1).Zip(this.NeuronPool.Skip(1)).All(neurons => { var weights = pyRandom.randn(new int[] { neurons.Second.Count, neurons.First.Count }); for (int index = 0; index < neurons.Second.Count; index++) { neurons.Second[index].Weight = weights[index].Array as double[]; } return(true); }); }
public override void Seed(int seed) { RandomState = np.random.RandomState(seed); }
public static void NumPyTest() { //int[] size = { 1, 2 }; //var nd = new NumPyRandom().normal(0.0, 20, size); ////var shepherd = Function.Vec3ToNDArray(new Vector3(1, 1, 0)); ////var res = Function.Duplicate(shepherd, 10); //NDArray res = Function.Duplicate(nd, 10); //int[] size2 = { 10, 2 }; //var nc = new NumPyRandom().normal(0.0, 20, size2); //Debug.Log(res.ToString()); //Debug.Log(nc.ToString()); //var end = res - nc; //Debug.Log(end.ToString()); //int[] size = { 1, 2 }; //var nd = new NumPyRandom().normal(0.0, 20, size); //Function.Normalize(nd); //Debug.Log(nd.ToString()); /////////////////////////////测试NDArray与Vector的相互转换//////////////////////////////// ///NDArray to Vector3 //int[] size = { 2, 2 }; //var nd = new NumPyRandom().normal(0.0, 20, size); //Vector3 vec = Function.NDArrayToVec3(nd); //Debug.Log(vec.ToString()); //Debug.Log(nd); //Function.Normalize(nd); //Debug.Log(nd); // Vector3 To NDArray //var res = Function.Vec3ToNDArray(vec); /////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////Test GetS()//////////////////////////////////////// //int[] size2 = { Config.N, 2 }; //var sheeps = new NumPyRandom().normal(0.0, 20, size2); ////if (Generator.Instance.shepherd == null) ////{ //// return matS; ////} //var arr = Function.Vec3ToNDArray(new Vector3(60, 60, 0)); //int[] size = { 1, 2 }; //var temp = new NDArray(arr, size); //var matShepherd = Function.Duplicate(temp, sheeps.shape[0]); //Debug.Log(sheeps); //Debug.Log(matShepherd); //var sub = sheeps - matShepherd; //var matS = Function.Adjust(sub, Config.R_s); //Debug.Log(matS); ////////////////////////////////////Test GetS()//////////////////////////////////////// /// ////////////////////////////////////Test Mean()//////////////////////////////////////// int[] size = { 3, 2 }; var nd = new NumPyRandom().normal(0.0, 20, size); Debug.Log(nd); var mean = Global.Mean(nd); Debug.Log(mean); ////////////////////////////////////Test Mean()//////////////////////////////////////// Vector3 GCM = Global.NDArrayToVec3(mean); Debug.Log(GCM); }
public void normal() { var n = new NumPyRandom().normal(0, 1, 5); }
public void randn() { var n = new NumPyRandom().randn(5, 2); }
/// <summary>Initializes a new instance of the <see cref="T:System.Object" /> class.</summary> public Discrete(int n, Type dType = null, int seed = -1) : base(new Shape(n), (dType = dType ?? np.float32)) { N = n; RandomState = seed != -1 ? np.random.RandomState(seed) : np.random; }
public void StartTest() { int training_epochs = 1000; // Parameters float learning_rate = 0.01f; int display_step = 50; NumPyRandom rng = np.random; NDArray train_X, train_Y; int n_samples; train_X = np.array(3.3f, 4.4f, 5.5f, 6.71f, 6.93f, 4.168f, 9.779f, 6.182f, 7.59f, 2.167f, 7.042f, 10.791f, 5.313f, 7.997f, 5.654f, 9.27f, 3.1f); train_Y = np.array(1.7f, 2.76f, 2.09f, 3.19f, 1.694f, 1.573f, 3.366f, 2.596f, 2.53f, 1.221f, 2.827f, 3.465f, 1.65f, 2.904f, 2.42f, 2.94f, 1.3f); n_samples = train_X.shape[0]; // tf Graph Input var X = tf.placeholder(tf.float32); var Y = tf.placeholder(tf.float32); // Set model weights // We can set a fixed init value in order to debug // var rnd1 = rng.randn<float>(); // var rnd2 = rng.randn<float>(); var W = tf.Variable(-0.06f, name: "weight"); var b = tf.Variable(-0.73f, name: "bias"); // Construct a linear model var pred = tf.add(tf.multiply(X, W), b); // Mean squared error var cost = tf.reduce_sum(tf.pow(pred - Y, 2.0f)) / (2.0f * n_samples); // Gradient descent // Note, minimize() knows to modify W and b because Variable objects are trainable=True by default var optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost); // Initialize the variables (i.e. assign their default value) var init = tf.global_variables_initializer(); //var config = tf.ConfigProto('CPU':0); // Start training //var config = ConfigProto.IntraOpParallelismThreadsFieldNumber; using (var sess = tf.Session()) { // Run the initializer sess.run(init); // Fit all training data for (int epoch = 0; epoch < training_epochs; epoch++) { foreach (var(x, y) in zip <float>(train_X, train_Y)) { sess.run(optimizer, new FeedItem(X, x), new FeedItem(Y, y)); } // Display logs per epoch step if ((epoch + 1) % display_step == 0) { var c = sess.run(cost, new FeedItem(X, train_X), new FeedItem(Y, train_Y)); Console.WriteLine($"Epoch: {epoch + 1} cost={c} " + $"W={sess.run(W)} b={sess.run(b)}"); } } Console.WriteLine("Optimization Finished!"); var training_cost = sess.run(cost, new FeedItem(X, train_X), new FeedItem(Y, train_Y)); //var plotter = new Plotter(); //plotter.Plot( // train_X, // train_Y, // "Original data", ChartType.Scatter, "markers"); //plotter.Plot( // train_X, // sess.run(W) * train_X + sess.run(b), // "Fitted line", ChartType.Scatter, "Fitted line"); //plotter.Show(); // Testing example var test_X = np.array(6.83f, 4.668f, 8.9f, 7.91f, 5.7f, 8.7f, 3.1f, 2.1f); var test_Y = np.array(1.84f, 2.273f, 3.2f, 2.831f, 2.92f, 3.24f, 1.35f, 1.03f); Console.WriteLine("Testing... (Mean square loss Comparison)"); var testing_cost = sess.run(tf.reduce_sum(tf.pow(pred - Y, 2.0f)) / (2.0f * test_X.shape[0]), new FeedItem(X, test_X), new FeedItem(Y, test_Y)); Console.WriteLine($"Testing cost={testing_cost}"); var diff = Math.Abs((float)training_cost - (float)testing_cost); Console.WriteLine($"Absolute mean square loss difference: {diff}"); //plotter.Plot( // test_X, // test_Y, // "Testing data", ChartType.Scatter, "markers"); //plotter.Plot( // train_X, // sess.run(W) * train_X + sess.run(b), // "Fitted line", ChartType.Scatter); //plotter.Show(); //return diff < 0.01; } }
public void randint() { var a = new NumPyRandom().randint(low: 0, high: 10, shape: new Shape(5, 5)); Assert.IsTrue(a.Storage.GetData <int>().Count(x => x < 10) == 25); }