} // Main static double[][] MakeData(int numItems, int numInput, int[] numHidden, int numOutput, int seed) { // generate data using a Deep NN (tanh hidden activation) DeepNet dn = new DeepNet(numInput, numHidden, numOutput); // make a DNN generator Random rrnd = new Random(seed); // to make random weights & biases, random input vals double wtLo = -9.0; double wtHi = 9.0; int nw = DeepNet.NumWeights(numInput, numHidden, numOutput); double[] wts = new double[nw]; for (int i = 0; i < nw; ++i) { wts[i] = (wtHi - wtLo) * rrnd.NextDouble() + wtLo; } dn.SetWeights(wts); double[][] result = new double[numItems][]; // make the result matrix holder for (int r = 0; r < numItems; ++r) { result[r] = new double[numInput + numOutput]; // allocate the cols } double inLo = -4.0; // pseudo-Gaussian scaling double inHi = 4.0; for (int r = 0; r < numItems; ++r) // each row { double[] inputs = new double[numInput]; // random input values for (int i = 0; i < numInput; ++i) { inputs[i] = (inHi - inLo) * rrnd.NextDouble() + inLo; } //ShowVector(inputs, 2); double[] probs = dn.ComputeOutputs(inputs); // compute the outputs (as softmax probs) like [0.10, 0.15, 0.55, 0.20] //dn.Dump(); //Console.ReadLine(); //ShowVector(probs, 4); double[] outputs = ProbsToClasses(probs); // convert to outputs like [0, 0, 1, 0] int c = 0; for (int i = 0; i < numInput; ++i) { result[r][c++] = inputs[i]; } for (int i = 0; i < numOutput; ++i) { result[r][c++] = outputs[i]; } //Console.WriteLine(""); } return(result); } // MakeData
} // ctor public void InitializeWeights() { // make wts double lo = -0.10; double hi = +0.10; int numWts = DeepNet.NumWeights(this.nInput, this.nHidden, this.nOutput); double[] wts = new double[numWts]; for (int i = 0; i < numWts; ++i) { wts[i] = (hi - lo) * rnd.NextDouble() + lo; } this.SetWeights(wts); }