public Form1() { InitializeComponent(); var jsonText = File.ReadAllText(@"ConvNetModel.json"); _net = SerializationExtensions.FromJson <double>(jsonText); }
public void NetSerialization() { var net = new Net <double>(); net.AddLayer(new InputLayer(28, 28, 1)); net.AddLayer(new ConvLayer(5, 5, 8) { Stride = 1, Pad = 2, BiasPref = 0.1 }); net.AddLayer(new ReluLayer()); net.AddLayer(new PoolLayer(2, 2) { Stride = 2 }); net.AddLayer(new SigmoidLayer()); net.AddLayer(new TanhLayer()); net.AddLayer(new FullyConnLayer(10) { BiasPref = 0.2 }); net.AddLayer(new SoftmaxLayer(10)); var json = net.ToJson(); var deserialized = SerializationExtensions.FromJson <double>(json); }
public ContentResult Update(string rowData) { RowData row = SerializationExtensions.FromJson <RowData>(rowData); row.LoadMetadata(); row.Column("Name", "Random " + Guid.NewGuid().ToString()); SqlBuilder builder = row.Update(false, true); ResultTable result = builder.Execute(30, false); if (result.Count == 1) { builder = SqlBuilder.Select() .From("Contact") .Column("ContactID") .Column("Name") .Column("Telephone") .Column("WorkEmail") .Column("ModifiedOn") .WithMetadata().InnerJoin("AccountID") .Column("Name", "AccountName") .From("Contact") .Where <decimal>("Contact", "ContactID", SqlOperators.Equal, result.First().Column <decimal>("ContactID")) .Builder(); result = builder.Execute(30, false, ResultTable.DateHandlingEnum.ConvertToDate); row = result.First(); return(Content(SerializationExtensions.ToJson <dynamic>(row), "application/json")); } return(Content("Hmmmm...?", "application/text")); }
public void LoadNetworkFromFile(string filePath) { var networkJSON = File.ReadAllText(filePath); network = SerializationExtensions.FromJson <double>(networkJSON); trainer = GetTrainerForNetwork(network); }
public MnistService(INetRepository netRepository) { _netRepository = netRepository; var json = _netRepository.GetSingle(7).NetText; _net = SerializationExtensions.FromJson <double>(json); }
public static MetadataDatabase CacheMetadata(string MetadataKey) { CacheItem item = MemoryCache.Default.GetCacheItem(MetadataKey); if (item != null) { return(SerializationExtensions.FromJson <MetadataDatabase>(item.Value.ToString())); } return(null); }
double[] net_output = new double[8]; // array untuk menampung hasil dari network emosi public MainWindow() { InitializeComponent(); //Load Haar Cascade cascade = new CascadeClassifier(@"D:\TA171801038\Expression Recognition\Alpha5\Alpha5\haarcascade_frontalface_default.xml"); //Load network var json = File.ReadAllText(@"D:\TA171801038\Expression Recognition\Alpha5\Alpha5\mynetwork.json"); fernet = SerializationExtensions.FromJson <double>(json); var frjson = File.ReadAllText(@"D:\TA171801038\Expression Recognition\Alpha5\Alpha5\frnetwork.json"); frnet = SerializationExtensions.FromJson <double>(frjson); // inisialisasi array for (int j = 0; j < 101; j++) { for (int k = 0; k < 5000; k++) { val_net[j, k] = -1; val_ang[j, k] = -1; val_dis[j, k] = -1; val_fea[j, k] = -1; val_hap[j, k] = -1; val_sad[j, k] = -1; val_sur[j, k] = -1; } } cv_net1 = new ChartValues <double> { }; cv_ang1 = new ChartValues <double> { }; cv_dis1 = new ChartValues <double> { }; cv_fea1 = new ChartValues <double> { }; cv_hap1 = new ChartValues <double> { }; cv_sad1 = new ChartValues <double> { }; cv_sur1 = new ChartValues <double> { }; DataContext = this; //Every 15 minutes var dayConfig = Mappers.Xy <DateModel>() .X(dateModel => dateModel.DateTime.Ticks / TimeSpan.FromMinutes(15).Ticks); //and the formatter Formatter = value => new DateTime((long)(value * TimeSpan.FromMinutes(15).Ticks)).ToString("t"); }
private void openButton_Click(object sender, EventArgs e) { try { if (openFileDialog1.ShowDialog() == DialogResult.OK) { var jsonText = File.ReadAllText(openFileDialog1.FileName); _net = SerializationExtensions.FromJson <double>(jsonText); } } catch (Exception ex) { MessageBox.Show("Invalid file."); } }
/// <summary> /// This sample shows how to serialize and deserialize a ConvNetSharp.Core network /// 1) Network creation /// 2) Dummy Training (only use a single data point) /// 3) Serialization /// 4) Deserialization /// </summary> private static void Main() { // 1) Network creation var net = new Net <double>(); net.AddLayer(new InputLayer(1, 1, 2)); net.AddLayer(new FullyConnLayer(20)); net.AddLayer(new ReluLayer()); net.AddLayer(new FullyConnLayer(10)); net.AddLayer(new SoftmaxLayer(10)); // 2) Dummy Training (only use a single data point) var x = BuilderInstance.Volume.From(new[] { 0.3, -0.5 }, new Shape(2)); var y = BuilderInstance.Volume.From(new[] { 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 }, new Shape(10)); var count = 0; var trainer = new SgdTrainer(net) { LearningRate = 0.01 }; do { trainer.Train(x, y); // train the network, specifying that x is class zero Console.WriteLine($"Loss: {trainer.Loss}"); count++; } while (trainer.Loss > 1e-2); Console.WriteLine($"{count}"); // Forward pass with original network var prob1 = net.Forward(x); Console.WriteLine("probability that x is class 0: " + prob1.Get(0)); // 3) Serialization var json = net.ToJson(); // 4) Deserialization var deserialized = SerializationExtensions.FromJson <double>(json); // Forward pass with deserialized network var prob2 = deserialized.Forward(x); Console.WriteLine("probability that x is class 0: " + prob2.Get(0)); // This should give exactly the same result as previous network evaluation Console.ReadLine(); }
public void ExecuteAndSerialize() { Guid g = StopWatch.Start(); SqlBuilder sb = SqlBuilder.Select(5) .From("Account") .AllColumns(false) .Builder; Console.WriteLine(sb.ToSql()); ResultTable result = sb.Execute(); string s = SerializationExtensions.ToJson <ResultTable>(result, true); Console.WriteLine("{0} rows executed and serialized in {1}ms", result.Count, StopWatch.Stop(g, StopWatch.WatchTypes.Milliseconds)); g = StopWatch.Start(); result = SerializationExtensions.FromJson <ResultTable>(s); Console.WriteLine("{0} rows de-serialized in {1}ms", result.Count, StopWatch.Stop(g, StopWatch.WatchTypes.Milliseconds)); Console.WriteLine(s); }
public void ExecuteDeepAndSerialize() { ResultTable result = ExecuteDeepInternal(); string file = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName() + ".json"); Guid g = StopWatch.Start(); File.WriteAllText(file, TinySql.Serialization.SerializationExtensions.ToJson <ResultTable>(result)); Console.WriteLine(StopWatch.Stop(g, StopWatch.WatchTypes.Milliseconds, "Results serialized in {0}ms")); g = StopWatch.Start(); ResultTable deserialized = SerializationExtensions.FromJson <ResultTable>(File.ReadAllText(file)); Console.WriteLine(StopWatch.Stop(g, StopWatch.WatchTypes.Milliseconds, "Results deserialized in {0}ms")); FileInfo fi = new FileInfo(file); Console.WriteLine("The File is {0:0.00}MB in size", (double)fi.Length / (double)(1024 * 1024)); fi.Delete(); Assert.IsFalse(File.Exists(file)); Assert.IsTrue(result.Count == deserialized.Count); }
public int LoadCNN(string path) { if ((path != "") && (File.Exists(path))) { loadedJson = ""; loadedJson = File.ReadAllText(path); var deserialized = SerializationExtensions.FromJson <double>(loadedJson); this.net = deserialized; isNetLearned = true; this.path = path; classes = net.Layers[GetLayersCount() - 1].OutputDepth; return(net.Layers.Count); } else { return(-1); } }
public void FluentNetSerialization() { // Fluent version var net = FluentNet <double> .Create(24, 24, 1) .Conv(5, 5, 8).Stride(1).Pad(2) .Relu() .Pool(2, 2).Stride(2) .Conv(5, 5, 16).Stride(1).Pad(2) .Relu() .Pool(3, 3).Stride(3) .FullyConn(10) .Softmax(10) .Build(); var json = net.ToJson(); var deserialized = SerializationExtensions.FromJson <double>(json); Assert.AreEqual(9, deserialized.Layers.Count); }
private void Main() { BuilderInstance <double> .Volume = new VolumeBuilder(); modelPath = projectPath + "/model/"; dataPath = projectPath + "/History/"; while (true) { Console.WriteLine("학습할 데이터를 입력."); var line = Console.ReadLine(); if (File.Exists(dataPath + line)) { try { data = CSVToList.Read(dataPath + line); parms = line.Replace(".csv", "_"); break; } catch (Exception e) { Console.WriteLine("올바른 파일을 입력."); } } else { Console.WriteLine("파일이 존재하지 않습니다."); } } dataSize = data[0].Count; screenDepth = dataSize * range + 1; mainNet = new Net <double>(); targetNet = new Net <double>(); BuildNetwork(mainNet); BuildNetwork(targetNet); trainer = new SgdTrainer(mainNet) { LearningRate = 0.01, BatchSize = batchSize }; StringBuilder log = new StringBuilder(); parms += DateTime.Now.ToString("yyyy_MM_dd_HH_mm_ss"); do { epoch++; double totalProfit = 0.0, totalGain = 0.0, totalLoss = 0.0, totalSpent = 0.0; for (int fromTime = 0; data.Count > fromTime + range; fromTime++) { episode++; var toTime = fromTime + range; var input = GetData(fromTime, toTime); input.Add(inventory.Count); var inputVol = ToVolume(input); var result = mainNet.Forward(inputVol); var state = new State(); state.state = input; state.action = GetAction(result, false); state.done = toTime < data.Count ? false : true; if (state.action == BUY) { state.price = data[toTime][LOW]; inventory.Add(state); totalSpent += state.price; } else if (state.action == SELL) { state.price = data[toTime][HIGH]; var reward = 0.0; inventory.ForEach(i => reward += (i.price - state.price)); inventory.Clear(); state.reward = reward; totalProfit += reward; if (reward > 0) { totalGain += reward; } else { totalLoss += reward; } } else if (state.action == HOLD) { state.price = data[toTime][LOW]; } var nextInput = state.done ? input : GetData(fromTime + 1, toTime + 1); nextInput.Add(inventory.Count); state.next_state = nextInput; stateMemory.Add(state); if (stateMemory.Count > batchSize) { ExperienceReplay(); } if (episode % 10 == 0 && age > 0) { targetNet = SerializationExtensions.FromJson <double>(mainNet.ToJson()); } var _loss = averageLossMemory.Count > 0 ? averageLossMemory.Average() : 0; log.AppendLine($"{epoch},{episode},{state.action},{state.price},{totalSpent},{totalGain},{totalLoss},{totalProfit},{totalProfit / fromTime},{age},{_loss},{_loss / batchSize}"); if (episode % 1000 == 0) { Console.WriteLine($"{parms}\n" + $"epoch: {epoch}\n" + $"episode: {episode}\n" + $"totalGain: {totalGain}\n" + $"totalLoss: {totalLoss}\n" + $"totalSpent: {totalSpent}\n" + $"totalProfit: {totalProfit}\n" + $"av.Profit: {totalProfit / fromTime}\n" + $"av.Loss: {(averageLossMemory.Count > 0 ? averageLossMemory.Average() : 0)}"); File.AppendAllText(projectPath + "/log/log" + parms + ".log", log.ToString()); log.Clear(); File.AppendAllText(projectPath + "/log/loss" + parms + ".log", lossWriter.ToString()); lossWriter.Clear(); File.WriteAllText(projectPath + "/model/main" + parms + ".model", mainNet.ToJson()); File.WriteAllText(projectPath + "/model/target" + parms + ".model", targetNet.ToJson()); } } Console.WriteLine("------------------------------"); Console.WriteLine($"epoch: {epoch}\n" + $"episode: {episode}\n" + $"totalGain: {totalGain}\n" + $"totalLoss: {totalLoss}\n" + $"totalSpent: {totalSpent}\n" + $"profit: {totalProfit}\n" + $"av.Profit: {totalProfit/dataSize}\n" + $"av.Loss: {(averageLossMemory.Count > 0 ? averageLossMemory.Average() : 0)}\n"); Console.WriteLine("------------------------------"); File.AppendAllText(projectPath + "/log/log" + parms + ".log", log.ToString()); log.Clear(); File.AppendAllText(projectPath + "/log/loss" + parms + ".log", lossWriter.ToString()); lossWriter.Clear(); inventory.Clear(); stateMemory.Clear(); averageLossMemory.Clear(); qda.Net.SendMail.Send(subject: $"{parms}", body: $"epoch: {epoch}\n" + $"episode: {episode}\n" + $"totalGain: {totalGain}\n" + $"totalLoss: {totalLoss}\n" + $"totalSpent: {totalSpent}\n" + $"av.Profit: {totalProfit}\n" + $"av.Loss: {(averageLossMemory.Count > 0 ? averageLossMemory.Average() : 0)}\n"); } while (true); }
private void MnistDemo(bool creatNew = true, int trainId = 1) { var datasets = new DataSets(); if (!datasets.Load(100)) { return; } // Create network if (creatNew) { this._net = new Net <double>(); this._net.AddLayer(new InputLayer(28, 28, 1)); this._net.AddLayer(new ConvLayer(5, 5, 8) { Stride = 1, Pad = 2 }); this._net.AddLayer(new ReluLayer()); this._net.AddLayer(new PoolLayer(2, 2) { Stride = 2 }); this._net.AddLayer(new ConvLayer(5, 5, 16) { Stride = 1, Pad = 2 }); this._net.AddLayer(new ReluLayer()); this._net.AddLayer(new PoolLayer(3, 3) { Stride = 3 }); this._net.AddLayer(new FullyConnLayer(10)); this._net.AddLayer(new SoftmaxLayer(10)); } else { HttpClient httpClient = new HttpClient(); var res = httpClient.GetStringAsync($"{url}/api/nets/net/{trainId}").Result; var net = JsonConvert.DeserializeObject <Net>(res); this._net = SerializationExtensions.FromJson <double>(net.NetText); } this._trainer = new SgdTrainer <double>(this._net) { LearningRate = 0.01, BatchSize = 20, L2Decay = 0.001, Momentum = 0.9 }; Console.WriteLine("Convolutional neural network learning...[Press any key to stop]"); do { var trainSample = datasets.Train.NextBatch(this._trainer.BatchSize); Train(trainSample.Item1, trainSample.Item2, trainSample.Item3); var testSample = datasets.Test.NextBatch(this._trainer.BatchSize); Test(testSample.Item1, testSample.Item3, this._testAccWindow); Console.WriteLine("Loss: {0} Train accuracy: {1}% Test accuracy: {2}%", this._trainer.Loss, Math.Round(this._trainAccWindow.Items.Average() * 100.0, 2), Math.Round(this._testAccWindow.Items.Average() * 100.0, 2)); Console.WriteLine("Example seen: {0} Fwd: {1}ms Bckw: {2}ms", this._stepCount, Math.Round(this._trainer.ForwardTimeMs, 2), Math.Round(this._trainer.BackwardTimeMs, 2)); } while (!Console.KeyAvailable); //训练结果上传到Service中 Task.Run(() => { var step = 3; var json = _net.ToJson(); while (step > 0) { var client = new HttpClient(); var x = client.PostAsync(@"{url}/api/nets/AddNet", new StringContent(JsonConvert.SerializeObject(new { NetText = json }), Encoding.UTF8, "application/json")); x.Wait(); if (x.Result.StatusCode == System.Net.HttpStatusCode.OK) { break; } step--; } }).Wait(); }
private void Main() { modelPath = projectPath + "/model/"; data = CSVToList.Read(projectPath + "/History/BTC_USD_TEST.csv"); dataSize = data[0].Count; screenDepth = dataSize * range; var json = File.ReadAllText(modelPath + "main2020_03_06_04_52_44.model"); var net = SerializationExtensions.FromJson <double>(json); double totalProfit = 0.0, totalGain = 0.0, totalLoss = 0.0, totalSpent = 0.0; for (int i = 0; i < data.Count; i++) { for (int fromTime = 0; fromTime < dataSize; fromTime++) { var toTime = fromTime + range; var input = GetData(fromTime, toTime); var inputVol = ToVolume(input); var result = net.Forward(inputVol); var state = new State(); state.state = input; state.action = GetAction(result, false); state.done = toTime < data.Count ? false : true; state.next_state = state.done ? input : GetData(fromTime + 1, toTime + 1); if (state.action == BUY) { state.price = data[toTime][LOW]; inventory.Add(state); totalSpent += state.price; } else if (state.action == SELL && inventory.Count > 0) { state.price = data[toTime][HIGH]; var reward = 0.0; inventory.ForEach(i => reward += (i.price - state.price)); inventory.Clear(); state.reward = reward; totalProfit += reward; if (reward > 0) { totalGain += reward; } else { totalLoss += reward; } } else if (state.action == HOLD) { state.price = data[toTime][LOW]; } stateMemory.Add(state); Console.WriteLine($"{ActionToString(state.action)}: {state.price} reward: {state.reward}\n"); } } Console.WriteLine($"totalGain: {totalGain}\n" + $"totalLoss: {totalLoss}\n" + $"totalSpent: {totalSpent}\n" + $"totalProfit: {totalProfit}\n" + $"av.Profit: {totalProfit / episode}\n"); Console.WriteLine(data.Count); }
/// <summary> /// main training module /// </summary> /// <param name="cfg">Model configurations</param> /// <param name="gameState">Game State module with access to game environment and dino</param> /// <param name="observe">Flag to indicate wherther the model is to be trained(weight updates), else just play</param> public void TrainModel(GameModelConfig cfg, GameState gameState, bool justPlay = false) { CacheUtils.InitCache( ("epsilon", cfg.InitialEpsilon), ("time", 0), ("D", new Queue <(Volume <double>, int, double, Volume <double>, bool)>())); //initial variable caching, done only once var model = BuildModel(cfg); var lastTime = DateTime.Now; //store the previous observations in replay memory var D = CacheUtils.LoadObj <Queue <(Volume <double>, int, double, Volume <double>, bool)> >("D"); //load from file system // get the first state by doing nothing var do_nothing = new double[cfg.Actions]; do_nothing[0] = 1; //0 => do nothing, //1 => jump var(x_t, r_0, terminal) = gameState.GetState(do_nothing, cfg.InputWidth, cfg.InputHeight); //get next step after performing the action var s_t = BuilderInstance.Volume.From(x_t.Repeat(4), new Shape(cfg.InputWidth, cfg.InputHeight, cfg.ImageChannels)); //s_t.ReShape(1, cfg.ImageRows, cfg.ImageCols, cfg.ImageChannels); var initial_state = x_t; double observe; double epsilon; model = SerializationExtensions.FromJson <double>(File.ReadAllText(modelPath)); var trainer = new AdamTrainer(model) { LearningRate = cfg.LearningRate }; if (justPlay) { observe = 999999999; //We keep observe, never train epsilon = cfg.FinalEpsilon; } else //We go to training mode { observe = cfg.Observation; epsilon = CacheUtils.LoadObj <double>("epsilon"); } int t = CacheUtils.LoadObj <int>("time"); // resume from the previous time step stored in file system while (true) //endless running { double loss = 0; double Q_sa = 0; int action_index = 0; double r_t = 0; //reward at 4 var a_t = new double[cfg.Actions]; //action at t //choose an action epsilon greedy if (t % cfg.FramePerAction == 0) //parameter to skip frames for actions { if (_random.NextDouble() <= epsilon) //randomly explore an action { _logger.LogInformation("----------Random Action----------"); action_index = _random.Next(cfg.Actions); a_t[action_index] = 1; } else //predict the output { model.Forward(s_t); //input a stack of 4 images, get the prediction var q = model.GetPrediction(); action_index = q[0]; //chosing index with maximum q value a_t[action_index] = 1; //o=> do nothing, 1=> jump } } //We reduced the epsilon (exploration parameter) gradually if (epsilon > cfg.FinalEpsilon && t > observe) { epsilon -= (cfg.InitialEpsilon - cfg.FinalEpsilon) / cfg.Explore; } //run the selected action and observed next state and reward double[] x_t1; (x_t1, r_t, terminal) = gameState.GetState(a_t, cfg.InputWidth, cfg.InputHeight); _logger.LogInformation($"fps: { 1 / (DateTime.Now - lastTime).TotalSeconds }"); //helpful for measuring frame rate lastTime = DateTime.Now; var s_t1 = BuilderInstance.Volume.From(s_t.ToArray().StackAndShift(x_t1), s_t.Shape); //append the new image to input stack and remove the first one //store the transition in D D.Enqueue((s_t, action_index, r_t, s_t1, terminal)); if (D.Count > cfg.ReplayMemory) { D.Dequeue(); } //only train if done observing if (t > observe) { //var minibatch } } }
//public ContentResult Save(FormCollection Model, string Table, ListTypes ListType, string ListName) // public ContentResult Save(SaveModel Model) public ContentResult Save(string rowData, string Table, ListTypes ListType, string ListName) { RowData row = SerializationExtensions.FromJson <RowData>(rowData); row.LoadMetadata(); row.LoadMissingColumns <bool>(); SqlBuilder builder = row.Update(false, true); ResultTable result = builder.Execute(); if (result.Count == 1) { builder = row.Select(ListType != ListTypes.Custom ? ListType.ToString() : ListName); //builder = row.Metadata.ToSqlBuilder(ListType != ListTypes.Custom ? ListType.ToString() : ListName); //builder.WhereConditions = row.PrimaryKey(builder); result = builder.Execute(30, false, ResultTable.DateHandlingEnum.ConvertToDate); if (result.Count == 1) { return(Content(SerializationExtensions.ToJson <dynamic>(result.First()), "application/json")); } //// object PK = result.First().Column(mt.PrimaryKey.Columns.First().Name); //Builder.BaseTable().WithMetadata().WherePrimaryKey(new object[] { (object)PK }); //ResultTable updated = Builder.Execute(30, false, ResultTable.DateHandlingEnum.ConvertToDate); //if (updated.Count == 1) //{ // return Content(SerializationExtensions.ToJson<dynamic>(updated.First()), "application/json"); //} } //// Retrieve by Primary key //MetadataTable mt = SqlBuilder.DefaultMetadata.FindTable(Table); //List<object> PKs = new List<object>(); //foreach (MetadataColumn mc in mt.PrimaryKey.Columns) //{ // PKs.Add(Model[Table + "_" + mc.Name]); //} //// Create an empty row with the primary key set //RowData row = RowData.Create(mt, true, PKs.ToArray()); //// Change the row //foreach (string key in Model.Keys) //{ // if (!key.StartsWith("__")) // { // string ColumnName = key.Replace(Table, "").Replace("_", ""); // MetadataColumn mc; // if (mt.Columns.TryGetValue(ColumnName, out mc)) // { // if (!mc.IsReadOnly) // { // // row.Column(mc.Name, (object)Model[key]); // row.Column(mc.Name, Model[key]); // } // } // } //} //// Build SQL and update //SqlBuilder builder = row.Update(true, true); //ResultTable result = builder.Execute(30, false); //if (result.Count == 1) //{ // object PK = result.First().Column(mt.PrimaryKey.Columns.First().Name); // SqlBuilder Builder = mt.ToSqlBuilder(ListType != ListTypes.Custom ? ListType.ToString() : ListName); // Builder.BaseTable().WithMetadata().WherePrimaryKey(new object[] { (object)PK }); // ResultTable updated = Builder.Execute(30, false, ResultTable.DateHandlingEnum.ConvertToDate); // if (updated.Count == 1) // { // return Content(SerializationExtensions.ToJson<dynamic>(updated.First()), "application/json"); // } //} return(Content("")); }
public static Network FromFile(string fileName) { return(new Network(SerializationExtensions.FromJson <double>(File.ReadAllText(fileName)))); }