public void TanhLayer_Forward()
        {
            var layer = new TanhLayer();
            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
            using (var bottomCpu = bottom.OnCpu())
            {
                int count = bottom.Count;

                for (int i = 0; i < bottom.Num; i++)
                {
                    for (int j = 0; j < bottom.Channels; j++)
                    {
                        for (int k = 0; k < bottom.Height; k++)
                        {
                            for (int l = 0; l < bottom.Width; l++)
                            {
                                var v = (Math.Exp(2 * bottomCpu.DataAt(i, j, k, l)) - 1) / (Math.Exp(2 * bottomCpu.DataAt(i, j, k, l)) + 1);
                                Assert.True(MathHelpers.Equality(topCpu.DataAt(i, j, k, l), v, 1e-4f));
                            }
                        }
                    }
                }
            }
        }
        public void SerializationTest()
        {
            // Create a SigmoidLayer
            var layer = new TanhLayer();

            layer.Init(10, 10, 3);

            TanhLayer deserialized;

            using (var ms = new MemoryStream())
            {
                // Serialize
                IFormatter formatter = new BinaryFormatter();
                formatter.Serialize(ms, layer);

                // Deserialize
                ms.Position  = 0;
                deserialized = formatter.Deserialize(ms) as TanhLayer;
            }

            Assert.AreEqual(layer.InputDepth, deserialized.InputDepth);
            Assert.AreEqual(layer.InputHeight, deserialized.InputHeight);
            Assert.AreEqual(layer.InputWidth, deserialized.InputWidth);
            Assert.AreEqual(layer.OutputDepth, deserialized.OutputDepth);
            Assert.AreEqual(layer.OutputHeight, deserialized.OutputHeight);
            Assert.AreEqual(layer.OutputWidth, deserialized.OutputWidth);
        }
        public void ComputeTwiceGradientShouldYieldTheSameResult()
        {
            const int inputWidth  = 20;
            const int inputHeight = 20;
            const int inputDepth  = 2;

            var layer = new TanhLayer <double>();

            layer.Init(inputWidth, inputHeight, inputDepth);

            // Forward pass
            var input = BuilderInstance <double> .Volume.Random(new Shape(inputWidth, inputHeight, inputDepth));

            var output = layer.DoForward(input, true);

            // Set output gradients to 1
            var outputGradient = BuilderInstance <double> .Volume.SameAs(new double[output.Shape.TotalLength].Populate(1.0), output.Shape);

            // Backward pass to retrieve gradients
            layer.Backward(outputGradient);
            var step1 = ((Volume <double>)layer.InputActivationGradients.Clone()).ToArray();

            layer.Backward(outputGradient);
            var step2 = ((Volume <double>)layer.InputActivationGradients.Clone()).ToArray();

            Assert.IsTrue(step1.SequenceEqual(step2));
        }
        public void TanhLayer_BackwardGradient()
        {
            var layer = new TanhLayer();

            var checker = new GradientChecker(1e-2f, 1e-3f, 1701, 0.0d, 0.01f);
            checker.CheckEltwise(layer, bottom, top);
        }
Exemple #5
0
        public void CopyConstructorTest1()
        {
            TanhLayer layer1 = new TanhLayer(new Shape(new int[] { 2 }));
            TanhLayer layer2 = new TanhLayer(layer1);

            Assert.AreEqual(JsonConvert.SerializeObject(layer1), JsonConvert.SerializeObject(layer2));
        }
Exemple #6
0
        public void CloneTest()
        {
            TanhLayer layer1 = new TanhLayer(new Shape(new int[] { 2 }));
            TanhLayer layer2 = layer1.Clone() as TanhLayer;

            Assert.AreEqual(JsonConvert.SerializeObject(layer1), JsonConvert.SerializeObject(layer2));
        }
Exemple #7
0
        public void TanhLayer_Forward()
        {
            var layer = new TanhLayer();

            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
                using (var bottomCpu = bottom.OnCpu())
                {
                    int count = bottom.Count;

                    for (int i = 0; i < bottom.Num; i++)
                    {
                        for (int j = 0; j < bottom.Channels; j++)
                        {
                            for (int k = 0; k < bottom.Height; k++)
                            {
                                for (int l = 0; l < bottom.Width; l++)
                                {
                                    var v = (Math.Exp(2 * bottomCpu.DataAt(i, j, k, l)) - 1) / (Math.Exp(2 * bottomCpu.DataAt(i, j, k, l)) + 1);
                                    Assert.True(MathHelpers.Equality(topCpu.DataAt(i, j, k, l), v, 1e-4f));
                                }
                            }
                        }
                    }
                }
        }
Exemple #8
0
        public void ForwardBackwardTest()
        {
            Shape     shape = new Shape(new int[] { 2 });
            TanhLayer layer = new TanhLayer(shape);

            Session session = new Session();

            Tensor source = new Tensor(null, shape);

            source.Set(new float[] { 2, -3 });

            Tensor x = source.Clone() as Tensor;
            Tensor y = layer.Forward(session, new[] { x })[0];

            float[] expected = source.Weights.Take(source.Length).Select(w => TanhLayerTest.activation(w)).ToArray();
            Helpers.AreArraysEqual(x.Length, expected, y.Weights);

            // unroll the graph
            float[] dy = Enumerable.Range(1, x.Length).Select(w => (float)w).ToArray();
            y.SetGradient(dy);
            session.Unroll();

            Helpers.AreArraysEqual(
                expected.Length,
                expected.Zip(dy, (w, dw) => TanhLayerTest.derivative(w) * dw).ToArray(),
                x.Gradient);
        }
Exemple #9
0
        public void LabTest1()
        {
            var inputLayer  = new LinearLayer(5);
            var hiddenLayer = new TanhLayer(neuronCount);
            var outputLayer = new TanhLayer(2);

            new BackpropagationConnector(inputLayer, hiddenLayer);
            new BackpropagationConnector(hiddenLayer, outputLayer);
            _xorNetwork = new BackpropagationNetwork(inputLayer, outputLayer);
            _xorNetwork.SetLearningRate(learningRate);

            var trainingSet = new TrainingSet(5, 2);

            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 0, 0 }, new double[] { 0, 0 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 1, 0 }, new double[] { 3, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 1, 0, 0 }, new double[] { 2, 2 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 1, 1, 0 }, new double[] { 2, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 0, 0, 0 }, new double[] { 1, 1 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 1, 0, 0 }, new double[] { 1, 2 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 1, 1, 0 }, new double[] { 1, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 22, 1, 1, 1, 22 }, new double[] { 1, 3 }));

            _errorList = new double[cycles];

            //_xorNetwork.EndEpochEvent += EndEpochEvent;
            _xorNetwork.Learn(trainingSet, cycles);

            var result = _xorNetwork.Run(new double[] { 0, 0, 1, 1, 0 });
        }
Exemple #10
0
        public static TanhLayer<T> Tanh<T>(this LayerBase<T> layer) where T : struct, IEquatable<T>, IFormattable
        {
            var tanh = new TanhLayer<T>();
            tanh.AcceptParent(layer);

            return tanh;
        }
Exemple #11
0
        public void ArchitectureConstructorTest1()
        {
            Shape     shape = new Shape(new int[] { 2 });
            TanhLayer layer = new TanhLayer(shape, "TH", null);

            CollectionAssert.AreEqual(shape.Axes, layer.OutputShape.Axes);
            Assert.AreEqual("TH", layer.Architecture);
        }
Exemple #12
0
        public static TanhLayer <T> Tanh <T>(this LayerBase <T> layer) where T : struct, IEquatable <T>, IFormattable
        {
            var tanh = new TanhLayer <T>();

            layer.ConnectTo(tanh);

            return(tanh);
        }
Exemple #13
0
        public void TanhLayer_BackwardGradient()
        {
            var layer = new TanhLayer();

            var checker = new GradientChecker(1e-2f, 1e-3f, 1701, 0.0d, 0.01f);

            checker.CheckEltwise(layer, bottom, top);
        }
Exemple #14
0
        public static TanhLayer Tanh(this LayerBase layer)
        {
            var tanh = new TanhLayer();

            layer.ConnectTo(tanh);

            return(tanh);
        }
Exemple #15
0
        public void SerializeTest()
        {
            TanhLayer layer1 = new TanhLayer(new Shape(new int[] { 2 }));
            string    s1     = JsonConvert.SerializeObject(layer1);
            TanhLayer layer2 = JsonConvert.DeserializeObject <TanhLayer>(s1);
            string    s2     = JsonConvert.SerializeObject(layer2);

            Assert.AreEqual(s1, s2);
        }
        public void TanhLayer_Setup()
        {
            var layer = new TanhLayer();
            layer.Setup(bottom, top);

            Assert.Equal(bottom.Num, top.Num);
            Assert.Equal(bottom.Channels, top.Channels);
            Assert.Equal(bottom.Height, top.Height);
            Assert.Equal(bottom.Width, top.Width);
        }
Exemple #17
0
        public void TanhLayer_Setup()
        {
            var layer = new TanhLayer();

            layer.Setup(bottom, top);

            Assert.Equal(bottom.Num, top.Num);
            Assert.Equal(bottom.Channels, top.Channels);
            Assert.Equal(bottom.Height, top.Height);
            Assert.Equal(bottom.Width, top.Width);
        }
        public void GradientWrtInputCheck()
        {
            const int inputWidth = 20;
            const int inputHeight = 20;
            const int inputDepth = 2;

            // Create layer
            var layer = new TanhLayer();

            GradientCheckTools.GradientCheck(layer, inputWidth, inputHeight, inputDepth);
        }
        public void GradientWrtInputCheck()
        {
            const int inputWidth  = 20;
            const int inputHeight = 20;
            const int inputDepth  = 2;

            // Create layer
            var layer = new TanhLayer();

            GradientCheckTools.GradientCheck(layer, inputWidth, inputHeight, inputDepth);
        }
        public void GradientWrtInputCheck()
        {
            const int inputWidth  = 20;
            const int inputHeight = 20;
            const int inputDepth  = 2;

            const int batchSize = 3;

            // Create layer
            var layer = new TanhLayer <double>();

            GradientCheckTools.GradientCheck(layer, inputWidth, inputHeight, inputDepth, batchSize, 1e-6);
        }
Exemple #21
0
        public void ArchitectureConstructorTest2()
        {
            string architecture = "THH";

            try
            {
                TanhLayer layer = new TanhLayer(new Shape(new int[] { 2 }), architecture, null);
            }
            catch (ArgumentException e)
            {
                Assert.AreEqual(
                    new ArgumentException(string.Format(CultureInfo.InvariantCulture, Properties.Resources.E_InvalidLayerArchitecture, architecture), nameof(architecture)).Message,
                    e.Message);
                throw;
            }
        }
Exemple #22
0
        public void TanhLayerSerialization()
        {
            var layer = new TanhLayer();

            layer.Init(28, 24, 1);
            var data = layer.GetData();

            Assert.AreEqual(28, data["InputWidth"]);
            Assert.AreEqual(24, data["InputHeight"]);
            Assert.AreEqual(1, data["InputDepth"]);

            var deserialized = LayerBase <double> .FromData(data) as TanhLayer;

            Assert.IsNotNull(deserialized);
            Assert.AreEqual(28, deserialized.InputWidth);
            Assert.AreEqual(24, deserialized.InputHeight);
            Assert.AreEqual(1, deserialized.InputDepth);
            Assert.AreEqual(layer.OutputWidth, deserialized.OutputWidth);
            Assert.AreEqual(layer.OutputHeight, deserialized.OutputHeight);
            Assert.AreEqual(layer.OutputDepth, deserialized.OutputDepth);
        }
Exemple #23
0
        public List <ActivationLayer> HiddenLayerList()
        {
            List <ActivationLayer> ActivationLayerList = new List <ActivationLayer>();

            for (int i = 0; i < neuronCount.Count; i++)
            {
                if (neuronCount[i] < 1)
                {
                    neuronCount[i] = 1;
                }
                ;

                if (layerStructure[i] == 0)
                {
                    SigmoidLayer currenthiddenLayer = new SigmoidLayer(neuronCount[i]); ActivationLayerList.Add(currenthiddenLayer);
                }
                else if (layerStructure[i] == 1)
                {
                    LinearLayer currenthiddenLayer = new LinearLayer(neuronCount[i]); ActivationLayerList.Add(currenthiddenLayer);
                }
                else if (layerStructure[i] == 2)
                {
                    LogarithmLayer currenthiddenLayer = new LogarithmLayer(neuronCount[i]); ActivationLayerList.Add(currenthiddenLayer);
                }
                else if (layerStructure[i] == 3)
                {
                    SineLayer currenthiddenLayer = new SineLayer(neuronCount[i]); ActivationLayerList.Add(currenthiddenLayer);
                }
                else if (layerStructure[i] == 4)
                {
                    TanhLayer currenthiddenLayer = new TanhLayer(neuronCount[i]); ActivationLayerList.Add(currenthiddenLayer);
                }
                else
                {
                    return(new List <ActivationLayer>());
                }
            }

            return(ActivationLayerList);
        }
        public void Forward()
        {
            const int inputWidth     = 2;
            const int inputHeight    = 2;
            const int inputDepth     = 2;
            const int inputBatchSize = 2;

            var layer = new TanhLayer <double>();

            layer.Init(inputWidth, inputHeight, inputDepth);

            var input = new Volume.Double.Volume(new[]
            {
                1.0, 2.0,
                3.0, 4.0,
                5.0, 6.0,
                7.0, 8.0,
                9.0, 10.0,
                11.0, 12.0,
                13.0, 14.0,
                15.0, 16.0
            }, new Shape(inputWidth, inputHeight, inputDepth, inputBatchSize));

            layer.DoForward(input);

            for (int n = 0; n < 2; n++)
            {
                for (int c = 0; c < 2; c++)
                {
                    for (int y = 0; y < 2; y++)
                    {
                        for (int x = 0; x < 2; x++)
                        {
                            Assert.AreEqual(Math.Tanh(input.Get(x, y, c, n)), layer.OutputActivation.Get(x, y, c, n));
                        }
                    }
                }
            }
        }
Exemple #25
0
        private static void AddActivationLayers(NetworkGraph graph)
        {
            foreach (Layer layer in graph.Vertices.Where(x => ((x as TrainableLayer)?.NeedsActivation).GetValueOrDefault()).ToList())
            {
                Layer source = layer;
                if (graph.OutDegree(source) == 1)
                {
                    // optimization - add activation layer after max pooling layer that follows stochastic
                    Edge <Layer> edge = graph.OutEdges(source)[0];
                    if (edge.Target is MaxPoolingLayer)
                    {
                        source = edge.Target;
                    }
                }

                if (graph.OutDegree(source) == 1)
                {
                    Edge <Layer> edge = graph.OutEdges(source)[0];
                    if (!(edge.Target is ActivationLayer) && !(edge.Target is LossLayer))
                    {
                        Layer activationLayer = new TanhLayer(edge.Source.OutputShape);
                        graph.AddVertex(activationLayer);

                        Edge <Layer> newEdge = new Edge <Layer>(edge.Source, activationLayer);
                        graph.OutEdges(source)[0] = newEdge;
                        graph.InEdges(activationLayer).Add(newEdge);

                        if (edge.Target != null)
                        {
                            IList <Edge <Layer> > inedges = graph.InEdges(edge.Target);
                            int index = inedges.IndexOf(edge);
                            newEdge        = new Edge <Layer>(activationLayer, edge.Target);
                            inedges[index] = newEdge;
                            graph.OutEdges(activationLayer).Add(newEdge);
                        }
                    }
                }
            }
        }
Exemple #26
0
        public void shrit(bool all = true)
        {
            List <int> referee = new List <int>();

            foreach (var item in layers)
            {
                referee.Add(item);
            }
            referee.Add(opt);
            LinearLayer inputLayer = new LinearLayer(npu);

            ActivationLayer last = null;

            for (int i = 0; i < layers.Count; i++)
            {
                ActivationLayer hiddenLayer = null;
                switch (valf)
                {
                case acti.Sigmoid:
                    hiddenLayer = new SigmoidLayer(layers[0]);
                    break;

                case acti.tanh:
                    hiddenLayer = new TanhLayer(layers[0]);
                    break;

                case acti.Logarith:
                    hiddenLayer = new LogarithmLayer(layers[0]);
                    break;

                case acti.Sine:
                    hiddenLayer = new SineLayer(layers[0]);
                    break;

                case acti.Linear:
                    hiddenLayer = new LinearLayer(layers[0]);
                    break;

                default:
                    break;
                }
                if (last == null)
                {
                    new BackpropagationConnector(inputLayer, hiddenLayer);
                }
                else
                {
                    new BackpropagationConnector(last, hiddenLayer);
                }

                last = hiddenLayer;
            }
            ActivationLayer outputLayer = new SigmoidLayer(opt);

            if (last != null)
            {
                new BackpropagationConnector(last, outputLayer);
            }
            else
            {
                new BackpropagationConnector(inputLayer, outputLayer);
            }

            netamp = new BackpropagationNetwork(inputLayer, outputLayer);
        }
Exemple #27
0
        private void buttonOK_Click(object sender, EventArgs e)
        {
            this.Cursor = Cursors.WaitCursor;
            try
            {
                //
                // 解析数据文件
                //
                int inputCount  = 0;
                int outputCount = 1;
                if (textInputCount.Text.Length > 0)
                {
                    inputCount = int.Parse(textInputCount.Text);
                }
                if (textOutputCount.Text.Length > 0)
                {
                    outputCount = int.Parse(textOutputCount.Text);
                }
                TrainingSet trainingSet   = DataFile.CsvFileToTrainingSet(textTrainingSet.Text, ref inputCount, ref outputCount);
                TrainingSet crossvalidSet = null;
                if (textCvSet.Text.Length > 0)
                {
                    crossvalidSet = DataFile.CsvFileToTrainingSet(textCvSet.Text, ref inputCount, ref outputCount);
                }


                //
                // 创建新网络
                //

                // 输入层始终与输入计数是线性关系
                LinearLayer inputLayer = new LinearLayer(inputCount);

                // 创建隐层
                ActivationLayer hiddenLayer1 = null;
                ActivationLayer hiddenLayer2 = null;
                ActivationLayer outputLayer  = null;
                if (comboActFunction1.SelectedIndex < 0)
                {
                    MessageBox.Show("请选择激活函数!"); return;
                }
                switch ((HiddenLayerType)comboActFunction1.SelectedItem)
                {
                case HiddenLayerType.Linear: hiddenLayer1 = new LinearLayer(int.Parse(textNeuronCount1.Text)); break;

                case HiddenLayerType.Logarithmic: hiddenLayer1 = new LogarithmLayer(int.Parse(textNeuronCount1.Text)); break;

                case HiddenLayerType.Sigmoid: hiddenLayer1 = new SigmoidLayer(int.Parse(textNeuronCount1.Text)); break;

                case HiddenLayerType.Sine: hiddenLayer1 = new SineLayer(int.Parse(textNeuronCount1.Text)); break;

                case HiddenLayerType.Tanh: hiddenLayer1 = new TanhLayer(int.Parse(textNeuronCount1.Text)); break;
                }
                if (textNeuronCount2.Text.Length > 0 && int.Parse(textNeuronCount2.Text) > 0)
                {
                    switch ((HiddenLayerType)comboActFunction2.SelectedItem)
                    {
                    case HiddenLayerType.Linear: hiddenLayer2 = new LinearLayer(int.Parse(textNeuronCount2.Text)); break;

                    case HiddenLayerType.Logarithmic: hiddenLayer2 = new LogarithmLayer(int.Parse(textNeuronCount2.Text)); break;

                    case HiddenLayerType.Sigmoid: hiddenLayer2 = new SigmoidLayer(int.Parse(textNeuronCount2.Text)); break;

                    case HiddenLayerType.Sine: hiddenLayer2 = new SineLayer(int.Parse(textNeuronCount2.Text)); break;

                    case HiddenLayerType.Tanh: hiddenLayer2 = new TanhLayer(int.Parse(textNeuronCount2.Text)); break;
                    }
                }

                if (comboOutputFunction.SelectedIndex < 0)
                {
                    MessageBox.Show("请选择输出函数!"); return;
                }
                switch ((HiddenLayerType)comboOutputFunction.SelectedItem)
                {
                case HiddenLayerType.Linear: outputLayer = new LinearLayer(outputCount); break;

                case HiddenLayerType.Logarithmic: outputLayer = new LogarithmLayer(outputCount); break;

                case HiddenLayerType.Sigmoid: outputLayer = new SigmoidLayer(outputCount); break;

                case HiddenLayerType.Sine: outputLayer = new SineLayer(outputCount); break;

                case HiddenLayerType.Tanh: outputLayer = new TanhLayer(outputCount); break;
                }

                // 连接层, hidden2是可选的
                new BackpropagationConnector(inputLayer, hiddenLayer1);
                if (hiddenLayer2 != null)
                {
                    new BackpropagationConnector(hiddenLayer1, hiddenLayer2);
                    new BackpropagationConnector(hiddenLayer2, outputLayer);
                }
                else
                {
                    new BackpropagationConnector(hiddenLayer1, outputLayer);
                }
                BackpropagationNetwork backpropNetwork = new BackpropagationNetwork(inputLayer, outputLayer);


                //
                // 设置学习和退出参数
                //

                double startLearningRate = double.Parse(textStartLearningRate.Text);
                double?finalLearningRate = null;
                if (textFinalLearningRate.Text.Length > 0)
                {
                    finalLearningRate = double.Parse(textFinalLearningRate.Text);
                }

                // 如果选择了学习率函数则使用
                LearningRateFunction?lrf = null;
                if (comboLRFunction.SelectedIndex > 0)
                {
                    lrf = (LearningRateFunction)comboLRFunction.SelectedItem;
                    backpropNetwork.SetLearningRate(
                        LearningRateFactory.GetLearningRateFunction(lrf.Value, startLearningRate, finalLearningRate.Value));
                }
                else
                {
                    // 否则使用普通学习率,也许有起点和终点
                    if (finalLearningRate.HasValue)
                    {
                        backpropNetwork.SetLearningRate(startLearningRate, finalLearningRate.Value);
                    }
                    else
                    {
                        backpropNetwork.SetLearningRate(startLearningRate);
                    }
                }

                // 如果给定,在连接器中设置动量
                double?momentum = null;
                if (textMomentum.Text.Length > 0)
                {
                    momentum = double.Parse(textMomentum.Text);
                    foreach (ILayer layer in backpropNetwork.Layers)
                    {
                        foreach (BackpropagationConnector conn in layer.SourceConnectors)
                        {
                            conn.Momentum = momentum.Value;
                        }
                        foreach (BackpropagationConnector conn in layer.TargetConnectors)
                        {
                            conn.Momentum = momentum.Value;
                        }
                    }
                }


                //
                // 新建工程和保存工程
                //
                int tmpInt;
                NewProject         = new NnProject();
                NewProject.Network = backpropNetwork;
                // 确保为新网络初始化权重,默认情况下新的训练周期将不会初始化
                NewProject.Network.Initialize();
                NewProject.ProjectName        = textProjectName.Text.Trim();
                NewProject.SaveFolder         = textSaveFolder.Text;
                NewProject.TrainingSet        = trainingSet;
                NewProject.CrossValidationSet = crossvalidSet;
                NewProject.LearningParameters = new NnProject.NnLearningParameters();
                NewProject.LearningParameters.InitialLearningRate  = startLearningRate;
                NewProject.LearningParameters.FinalLearningRate    = finalLearningRate;
                NewProject.LearningParameters.LearningRateFunction = lrf;
                NewProject.LearningParameters.Momentum             = momentum;
                if (int.TryParse(textTrainingCycles.Text, out tmpInt))
                {
                    NewProject.LearningParameters.MaxTrainingCycles = tmpInt;
                }

                NnProject.Save(NewProject, textSaveFolder.Text);

                this.Close();
            }
            catch (Exception ex)
            {
                this.Cursor = Cursors.Default;
                MessageBox.Show("Error creating network - " + ex.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            finally
            { this.Cursor = Cursors.Default; }
        }