private Action _AddText(string s) { return(() => { var lines = _textArea.Text.Split(new[] { "\n" }, StringSplitOptions.None); if (lines.Length < 16) { _textArea.Append($"{s}\n"); } else { _textArea.Text = ""; for (var i = 1; i < 16; i++) { lines[i - 1] = lines[i]; } foreach (var line in lines) { if (line != "") { _textArea.Append($"{line}\n"); } } _textArea.Append($"{s}\n"); } }); }
/// <summary> /// Posts the log async. This prevents the UI from hanging if there's a high volume of logs. /// </summary> private static void PostLogAsync(object source, System.Timers.ElapsedEventArgs e) { //run always on UI thread Application.Instance.Invoke(() => { if (buffer.Length > 0) { outputArea.Append(buffer, true); buffer = ""; } timer.AutoReset = false; }); }
public MainView() { Content = BuildContent(); ConfigureEvents(); ConfigureDataBindings(); this.Load += (_, e) => _textLog.Focus(); this.DataContextChanged += (_, e) => { if (this.DataContext is MainViewModel vm) { vm.LogReceived += (__, ev) => { Application.Instance.Invoke(() => { _textLog.Append($"[{ev.TimeStamp.ToString()}] {ev.LogLevel} {ev.Message.TrimEnd('\r', '\n')}{Environment.NewLine}", true); }); }; } }; }
public MainForm() { Title = "Instant Calculator"; ClientSize = new Size(400, 350); // Main content. // 1. Input text area // 2. Button to calculate // 3. Output text area (read only) Content = new StackLayout { Padding = 10, Spacing = 10, Orientation = Orientation.Vertical, HorizontalContentAlignment = HorizontalAlignment.Stretch, VerticalContentAlignment = VerticalAlignment.Stretch, Items = { new StackLayoutItem { Control = _textInput = new TextArea { SpellCheck = false, Wrap = true, ReadOnly = false }, Expand = false, }, new StackLayout { Padding = 0, Spacing = 5, Orientation = Orientation.Horizontal, VerticalContentAlignment = VerticalAlignment.Center, HorizontalContentAlignment = HorizontalAlignment.Stretch, Items = { (_buttonCalculate = new Button{ Text = "Calculate" }), new StackLayoutItem { Control = _textOutput = new TextBox { ReadOnly = true }, Expand = true, }, }, }, new StackLayoutItem { Control = _textLog = new TextArea { SpellCheck = false, Wrap = true, ReadOnly = true }, Expand = true, }, } }; // Quit application var quitCommand = new Command { MenuText = "Quit", Shortcut = Application.Instance.CommonModifier | Keys.Q }; quitCommand.Executed += (_, e) => Application.Instance.Quit(); // Show about dialog var aboutCommand = new Command { MenuText = "About..." }; aboutCommand.Executed += (_, e) => new AboutDialog().ShowDialog(this); // Create menu Menu = new MenuBar { Items = { }, ApplicationItems = { }, QuitItem = quitCommand, AboutItem = aboutCommand }; // Create toolbar, empty for now ToolBar = new ToolBar { Items = { } }; DataContextChanged += (_, e) => { if (!(this.DataContext is MainViewModel vm)) { return; } vm.AppendLogAction = (log) => _textLog.Append(log, true); ConfigureDataBinding(); }; DataContext = new MainViewModel(); }
public MainForm(Cpu cpu, int scale) { _cpu = cpu; var dia = new OpenFileDialog { MultiSelect = false }; var result = dia.ShowDialog(this); if (result == DialogResult.Ok || result == DialogResult.Yes) { _cpu.LoadRom(dia.FileName); } else { return; } var scaleFactor = scale; Title = "StonerAte"; ClientSize = new Size(1000, 600); _drawable.Size = new Size(64 * scaleFactor, 32 * scaleFactor); _drawable.Paint += (sender, e) => { var black = new Color(0, 0, 0); var white = new Color(255, 255, 255); for (var x = 0; x < _cpu.Gfx.GetLength(0); x++) { for (var y = 0; y < _cpu.Gfx.GetLength(1); y++) { e.Graphics.FillRectangle(_cpu.Gfx[x, y] == 1 ? white : black, x * scaleFactor, y * scaleFactor, scaleFactor, scaleFactor); } } }; _textArea.ReadOnly = true; _textArea.Size = new Size(1000 - ((64 * scaleFactor) + 30), 32 * scaleFactor); var basicsLayout = new DynamicLayout(); basicsLayout.BeginVertical(); _basicsLabels[0] = new Label { Text = "Opcode:" }; _basicsBoxs[0] = new TextBox { ReadOnly = true }; _basicsLabels[1] = new Label { Text = "I:" }; _basicsBoxs[1] = new TextBox { ReadOnly = true }; _basicsLabels[2] = new Label { Text = "PC:" }; _basicsBoxs[2] = new TextBox { ReadOnly = true }; _basicsLabels[3] = new Label { Text = "SP:" }; _basicsBoxs[3] = new TextBox { ReadOnly = true }; _basicsLabels[4] = new Label { Text = "DrawFlag:" }; _basicsBoxs[4] = new TextBox { ReadOnly = true }; for (var i = 0; i < 5; i++) { basicsLayout.BeginHorizontal(); basicsLayout.Add(_basicsLabels[i]); basicsLayout.Add(_basicsBoxs[i]); basicsLayout.EndHorizontal(); } basicsLayout.BeginHorizontal(); basicsLayout.Add(_sTLabel); basicsLayout.Add(_sTBox); basicsLayout.EndHorizontal(); basicsLayout.BeginHorizontal(); basicsLayout.Add(_dTLabel); basicsLayout.Add(_dTBox); basicsLayout.EndHorizontal(); basicsLayout.EndVertical(); var basicsBox = new GroupBox { Text = "Basics", Content = basicsLayout }; var regLayout = new DynamicLayout(); regLayout.BeginVertical(); for (var i = 0; i < 8; i++) { _regBoxs[i] = new TextBox { ReadOnly = true }; _regLabels[i] = new Label { Text = $"V[{i:X}]" }; _regBoxs[i + 8] = new TextBox { ReadOnly = true }; _regLabels[i + 8] = new Label { Text = $"V[{i+8:X}]" }; regLayout.BeginHorizontal(); regLayout.Add(_regLabels[i]); regLayout.Add(_regBoxs[i]); regLayout.Add(_regLabels[i + 8]); regLayout.Add(_regBoxs[i + 8]); regLayout.EndHorizontal(); } regLayout.EndVertical(); var registerBox = new GroupBox { Text = "Registers", Content = regLayout }; var controlLayout = new DynamicLayout(); controlLayout.BeginVertical(); controlLayout.BeginHorizontal(); controlLayout.Add(new Label { Text = "Freq: ", }); controlLayout.Add(_freqBox); controlLayout.EndHorizontal(); controlLayout.BeginHorizontal(); var startButton = new Button { Text = "Start" }; startButton.Click += (sender, args) => { _isRunning = true; }; var pauseButton = new Button { Text = "Pause" }; pauseButton.Click += (sender, args) => { _isRunning = false; }; controlLayout.Add(startButton); controlLayout.Add(pauseButton); controlLayout.EndHorizontal(); controlLayout.EndVertical(); var controlBox = new GroupBox { Text = "Controls", Content = controlLayout, }; var rom = new TextArea { Size = new Size(200, 500) }; for (var i = 0x200; i < 4096; i++) { rom.Append($"{i:X4} - {_cpu.Memory[i]:X2}\n"); } var layout = new PixelLayout(); layout.Add(_drawable, 10, 10); layout.Add(_textArea, (64 * scaleFactor) + 20, 10); layout.Add(basicsBox, 10, (32 * scaleFactor) + 20); layout.Add(registerBox, 260, (32 * scaleFactor) + 20); layout.Add(controlBox, 10, (32 * scaleFactor) + 240); layout.Add(rom, 750, (32 * scaleFactor) + 20); Content = layout; KeyDown += (sender, args) => { _cpu.Key = args; }; LoadComplete += (sender, args) => { var thread = new Thread(RunCpu); thread.Start(); }; }
protected override void OnLoad(EventArgs e) { base.OnLoad(e); text.Append(string.Empty, true); }
public void Train(IFlowsheet flowsheet, TextArea ta = null, Eto.OxyPlot.Plot plot = null) { var nl = Environment.NewLine; var g = tf.Graph(); g.as_default(); if (session != null) { session.Dispose(); session = null; } session = tf.Session(graph: g); tf_with(tf.variable_scope("Train"), delegate { if (flowsheet != null) { flowsheet.ShowMessage("Training Started...", IFlowsheet.MessageType.Information); } else { Application.Instance.Invoke(() => { ta.Append("Training Started..." + nl, true); }); } // tf Graph Input var X = tf.placeholder(tf.float32, shape: (-1, n_x), name: "X"); var Y = tf.placeholder(tf.float32, shape: (-1, n_y), name: "Y"); Tensor outlayer = null; var sigma = 1.0f; var weight_initializer = tf.variance_scaling_initializer(mode: "FAN_AVG", uniform: true, factor: sigma); var bias_initializer = tf.zeros_initializer; var n_neurons_1 = Parameters.NumberOfNeuronsOnFirstLayer; var n_neurons_2 = n_neurons_1 / 2; var n_neurons_3 = n_neurons_2 / 2; var n_neurons_4 = n_neurons_3 / 2; RefVariable W_hidden_1, W_hidden_2, W_hidden_3, W_hidden_4, W_out; RefVariable bias_hidden_1, bias_hidden_2, bias_hidden_3, bias_hidden_4, bias_out; Tensor hidden_1, hidden_2, hidden_3, hidden_4; switch (Parameters.NumberOfLayers) { case 2: // Hidden weights W_hidden_1 = tf.Variable(weight_initializer.call(new int[] { n_x, n_neurons_1 }, dtype: TF_DataType.TF_FLOAT), name: "W1"); bias_hidden_1 = tf.Variable(bias_initializer.call(n_neurons_1, dtype: TF_DataType.TF_FLOAT), name: "b1"); W_hidden_2 = tf.Variable(weight_initializer.call(new int[] { n_neurons_1, n_neurons_2 }, dtype: TF_DataType.TF_FLOAT), name: "W2"); bias_hidden_2 = tf.Variable(bias_initializer.call(n_neurons_2, dtype: TF_DataType.TF_FLOAT), name: "b2"); // Output weights W_out = tf.Variable(weight_initializer.call(new int[] { n_neurons_2, n_y }, dtype: TF_DataType.TF_FLOAT), name: "Wout"); bias_out = tf.Variable(bias_initializer.call(n_y, dtype: TF_DataType.TF_FLOAT), name: "bout"); // Hidden layer hidden_1 = tf.nn.relu(tf.add(tf.matmul(X, W_hidden_1), bias_hidden_1), name: "h1"); hidden_2 = tf.nn.relu(tf.add(tf.matmul(hidden_1, W_hidden_2), bias_hidden_2), name: "h2"); // Output layer outlayer = tf.add(tf.matmul(hidden_2, W_out), bias_out, name: "out"); break; case 3: // Hidden weights W_hidden_1 = tf.Variable(weight_initializer.call(new int[] { n_x, n_neurons_1 }, dtype: TF_DataType.TF_FLOAT), name: "W1"); bias_hidden_1 = tf.Variable(bias_initializer.call(n_neurons_1, dtype: TF_DataType.TF_FLOAT), name: "b1"); W_hidden_2 = tf.Variable(weight_initializer.call(new int[] { n_neurons_1, n_neurons_2 }, dtype: TF_DataType.TF_FLOAT), name: "W2"); bias_hidden_2 = tf.Variable(bias_initializer.call(n_neurons_2, dtype: TF_DataType.TF_FLOAT), name: "b2"); W_hidden_3 = tf.Variable(weight_initializer.call(new int[] { n_neurons_2, n_neurons_3 }, dtype: TF_DataType.TF_FLOAT), name: "W3"); bias_hidden_3 = tf.Variable(bias_initializer.call(n_neurons_3, dtype: TF_DataType.TF_FLOAT), name: "b3"); // Output weights W_out = tf.Variable(weight_initializer.call(new int[] { n_neurons_3, n_y }, dtype: TF_DataType.TF_FLOAT), name: "Wout"); bias_out = tf.Variable(bias_initializer.call(n_y, dtype: TF_DataType.TF_FLOAT), name: "bout"); // Hidden layer hidden_1 = tf.nn.relu(tf.add(tf.matmul(X, W_hidden_1), bias_hidden_1), name: "h1"); hidden_2 = tf.nn.relu(tf.add(tf.matmul(hidden_1, W_hidden_2), bias_hidden_2), name: "h2"); hidden_3 = tf.nn.relu(tf.add(tf.matmul(hidden_2, W_hidden_3), bias_hidden_3), name: "h3"); // Output layer outlayer = tf.add(tf.matmul(hidden_3, W_out), bias_out, name: "out"); break; case 4: // Hidden weights W_hidden_1 = tf.Variable(weight_initializer.call(new int[] { n_x, n_neurons_1 }, dtype: TF_DataType.TF_FLOAT), name: "W1"); bias_hidden_1 = tf.Variable(bias_initializer.call(n_neurons_1, dtype: TF_DataType.TF_FLOAT), name: "b1"); W_hidden_2 = tf.Variable(weight_initializer.call(new int[] { n_neurons_1, n_neurons_2 }, dtype: TF_DataType.TF_FLOAT), name: "W2"); bias_hidden_2 = tf.Variable(bias_initializer.call(n_neurons_2, dtype: TF_DataType.TF_FLOAT), name: "b2"); W_hidden_3 = tf.Variable(weight_initializer.call(new int[] { n_neurons_2, n_neurons_3 }, dtype: TF_DataType.TF_FLOAT), name: "W3"); bias_hidden_3 = tf.Variable(bias_initializer.call(n_neurons_3, dtype: TF_DataType.TF_FLOAT), name: "b3"); W_hidden_4 = tf.Variable(weight_initializer.call(new int[] { n_neurons_3, n_neurons_4 }, dtype: TF_DataType.TF_FLOAT), name: "W4"); bias_hidden_4 = tf.Variable(bias_initializer.call(n_neurons_4, dtype: TF_DataType.TF_FLOAT), name: "b4"); // Output weights W_out = tf.Variable(weight_initializer.call(new int[] { n_neurons_4, n_y }, dtype: TF_DataType.TF_FLOAT), name: "Wout"); bias_out = tf.Variable(bias_initializer.call(n_y, dtype: TF_DataType.TF_FLOAT), name: "bout"); // Hidden layer hidden_1 = tf.nn.relu(tf.add(tf.matmul(X, W_hidden_1), bias_hidden_1), name: "h1"); hidden_2 = tf.nn.relu(tf.add(tf.matmul(hidden_1, W_hidden_2), bias_hidden_2), name: "h2"); hidden_3 = tf.nn.relu(tf.add(tf.matmul(hidden_2, W_hidden_3), bias_hidden_3), name: "h3"); hidden_4 = tf.nn.relu(tf.add(tf.matmul(hidden_3, W_hidden_4), bias_hidden_4), name: "h4"); // Output layer outlayer = tf.add(tf.matmul(hidden_4, W_out), bias_out, name: "out"); break; } // Mean squared error var mse = tf.reduce_sum(tf.pow(outlayer - Y, 2.0f), name: "mse"); var learn_rate = tf.constant(Parameters.LearningRate); var opt = tf.train.AdamOptimizer(learn_rate).minimize(mse); // Fit neural net var batch_size = Parameters.BatchSize; var mse_train = new List <float>(); var mse_test = new List <float>(); // Initialize the variables (i.e. assign their default value) var init = tf.global_variables_initializer(); // Run the initializer session.run(init); // Start training var epochs = Parameters.NumberOfEpochs; foreach (var e in range(epochs)) { // Shuffle training data var shuffle_indices = np.random.permutation(np.arange(len(x_train))); var shuffled_x = new NDArray(np.float32, x_train.shape); var shuffled_y = new NDArray(np.float32, y_train.shape); int i0 = 0; foreach (var idx0 in shuffle_indices) { shuffled_x[i0] = x_train[idx0]; shuffled_y[i0] = y_train[idx0]; i0 += 1; } // Minibatch training foreach (var i in range(0, len(y_train) / batch_size)) { var start = i * batch_size; var batch_x = shuffled_x[start.ToString() + ":" + (start + batch_size).ToString(), Slice.All]; var batch_y = shuffled_y[start.ToString() + ":" + (start + batch_size).ToString(), Slice.All]; // Run optimizer with batch session.run(opt, (X, batch_x), (Y, batch_y)); // Show progress var divrem = 0; Math.DivRem(e, 5, out divrem); if (divrem == 0) { // MSE train and test mse_train.Add(session.run(mse, (X, x_train), (Y, y_train))); mse_test.Add(session.run(mse, (X, x_test), (Y, y_test))); if (flowsheet != null) { flowsheet.ShowMessage("Epoch: " + e.ToString(), IFlowsheet.MessageType.Information); flowsheet.ShowMessage("MSE (training): " + mse_train.Last().ToString(), IFlowsheet.MessageType.Information); flowsheet.ShowMessage("MSE (testing): " + mse_test.Last().ToString(), IFlowsheet.MessageType.Information); } else { Application.Instance.Invoke(() => { ta.Append("Epoch: " + e.ToString() + nl, true); ta.Append("MSE (training): " + mse_train.Last().ToString() + nl, true); ta.Append("MSE (testing): " + mse_test.Last().ToString() + nl, true); (plot.Model.Series[0] as OxyPlot.Series.LineSeries).Points.Add(new DataPoint(e, mse_train.Last())); (plot.Model.Series[1] as OxyPlot.Series.LineSeries).Points.Add(new DataPoint(e, mse_test.Last())); plot.Model.InvalidatePlot(true); }); } if (e > 10 && (Math.Abs(mse_train.Last() - mse_train[mse_train.Count - 2]) / mse_train[mse_train.Count - 2] < Parameters.RelativeMSETolerance)) { break; } } } } if (flowsheet != null) { flowsheet.ShowMessage("Training Finished!", IFlowsheet.MessageType.Information); } else { Application.Instance.Invoke(() => { ta.Append("Training Finished!" + nl, true); }); } x_test_unscaled = new NDArray(np.float32, x_test.shape); x_train_unscaled = new NDArray(np.float32, x_train.shape); for (var i = 0; i < x_test.shape[0]; i++) { for (var j = 0; j < x_test.shape[1]; j++) { x_test_unscaled[i][j] = Classes.Utils.UnScale(x_test[i][j], Parameters.MinValues[j], Parameters.MaxValues[j], Parameters.MinScale, Parameters.MaxScale); } } for (var i = 0; i < x_train.shape[0]; i++) { for (var j = 0; j < x_train.shape[1]; j++) { x_train_unscaled[i][j] = Classes.Utils.UnScale(x_train[i][j], Parameters.MinValues[j], Parameters.MaxValues[j], Parameters.MinScale, Parameters.MaxScale); } } var idx = Parameters.Labels.IndexOf(Parameters.Labels_Outputs.First()); y_test_unscaled = new NDArray(np.float32, y_test.shape); y_train_unscaled = new NDArray(np.float32, y_train.shape); for (var i = 0; i < y_test.shape[0]; i++) { for (var j = 0; j < y_test.shape[1]; j++) { y_test_unscaled[i][j] = Classes.Utils.UnScale(y_test[i][j], Parameters.MinValues[idx + j], Parameters.MaxValues[idx + j], Parameters.MinScale, Parameters.MaxScale); } } for (var i = 0; i < y_train.shape[0]; i++) { for (var j = 0; j < y_train.shape[1]; j++) { y_train_unscaled[i][j] = Classes.Utils.UnScale(y_train[i][j], Parameters.MinValues[idx + j], Parameters.MaxValues[idx + j], Parameters.MinScale, Parameters.MaxScale); } } yp_test = session.run(outlayer, (X, x_test)); yp_train = session.run(outlayer, (X, x_train)); yp_test_unscaled = new NDArray(np.float32, yp_test.shape); yp_train_unscaled = new NDArray(np.float32, yp_train.shape); for (var i = 0; i < yp_test.shape[0]; i++) { for (var j = 0; j < yp_test.shape[1]; j++) { yp_test_unscaled[i][j] = Classes.Utils.UnScale(yp_test[i][j], Parameters.MinValues[idx + j], Parameters.MaxValues[idx + j], Parameters.MinScale, Parameters.MaxScale); } } for (var i = 0; i < yp_train.shape[0]; i++) { for (var j = 0; j < yp_train.shape[1]; j++) { yp_train_unscaled[i][j] = Classes.Utils.UnScale(yp_train[i][j], Parameters.MinValues[idx + j], Parameters.MaxValues[idx + j], Parameters.MinScale, Parameters.MaxScale); } } // Testing example var training_cost = session.run(mse, (X, x_train), (Y, y_train)); var testing_cost = session.run(mse, (X, x_test), (Y, y_test)); var diff = Math.Abs((float)training_cost - (float)testing_cost); if (flowsheet != null) { flowsheet.ShowMessage($"Training Cost = {testing_cost}", IFlowsheet.MessageType.Information); flowsheet.ShowMessage($"Testing Cost = {testing_cost}", IFlowsheet.MessageType.Information); flowsheet.ShowMessage($"Absolute MSE = {diff}", IFlowsheet.MessageType.Information); } else { Application.Instance.Invoke(() => { ta.Append($"Training Cost = {testing_cost}" + nl, true); ta.Append($"Testing Cost = {testing_cost}" + nl, true); ta.Append($"Absolute MSE = {diff}" + nl, true); }); } });