public static void LoadData() { DataFrame frame = new DataFrame(); Downloader.DownloadSample(SampleDataset.HousingRegression); var samplePath = Downloader.GetSamplePath(SampleDataset.HousingRegression); frame.LoadFromCsv(samplePath.Train); var xy = frame.SplitXY(14, new[] { 1, 13 }); traintest = xy.SplitTrainTest(0.25); }
private static void RunTest() { Random Rnd = new Random(); DataFrame trnX_fin = new DataFrame(); DataFrame trnY_fin = new DataFrame(); for (int cc = 0; (cc < 100); cc++) { float[] sngLst = new float[100]; for (int indx = 0; (indx < 100); indx++) { sngLst[indx] = (float)Rnd.NextDouble(); } trnX_fin.Add(sngLst); } for (int cc = 0; (cc < 100); cc++) { float[] sngLst = new float[3]; // fake one hot just for check sngLst[0] = 0; sngLst[1] = 1; sngLst[2] = 0; trnY_fin.Add(sngLst); } XYFrame XYfrm = new XYFrame(); XYfrm.XFrame = trnX_fin; XYfrm.YFrame = trnY_fin; // Split TrainTestFrame trainTestFrame = XYfrm.SplitTrainTest(0.3); // init some values int shape_of_input = XYfrm.XFrame.Shape[1]; int embval = 100; int seed = 2; Sequential model = new Sequential(); model.Add(new Reshape(Shape.Create(1, embval), Shape.Create(shape_of_input))); model.Add(new LSTM(64, returnSequence: false, cellDim: 4, weightInitializer: new SiaNet.Model.Initializers.GlorotUniform(0.05, seed), recurrentInitializer: new SiaNet.Model.Initializers.GlorotUniform(0.05, seed), biasInitializer: new SiaNet.Model.Initializers.GlorotUniform(0.05, seed))); model.Add(new Dense(3, act: "sigmoid", useBias: true, weightInitializer: new SiaNet.Model.Initializers.GlorotUniform(0.05, seed))); model.Compile(OptOptimizers.Adam, OptLosses.MeanSquaredError, OptMetrics.Accuracy); model.Train(trainTestFrame.Train, 200, 8, trainTestFrame.Test); }