/// <summary> /// Determine if the network is valid. If invalid, throw an error. /// </summary> /// /// <param name="network">The network to check.</param> public virtual void Validate(IEngineMachineLearning network) { String msg = ((String)IsValid(network)); if (msg != null) { throw new ValidateMachineLearningError(msg); } }
/// <summary> /// Determine if the specified neural network can be flat. If it can a null /// is returned, otherwise, an error is returned to show why the network /// cannot be flattened. /// </summary> /// <param name="eml">The network to check.</param> /// <returns>Null, if the net can not be flattened, an error message /// otherwise.</returns> public override String IsValid(IEngineMachineLearning eml) { if (!(eml is BasicNetwork)) { return "Only a BasicNetwork can be converted to a flat network."; } BasicNetwork network = (BasicNetwork)eml; ILayer inputLayer = network.GetLayer(BasicNetwork.TAG_INPUT); ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT); if (inputLayer == null) { return "To convert to a flat network, there must be an input layer."; } if (outputLayer == null) { return "To convert to a flat network, there must be an output layer."; } if (!(network.Logic is FeedforwardLogic) || (network.Logic is ThermalLogic)) { return "To convert to flat, must be using FeedforwardLogic or SimpleRecurrentLogic."; } foreach (ILayer layer in network.Structure.Layers) { if (layer.Next.Count > 2) { return "To convert to flat a network must have at most two outbound synapses."; } if (layer.GetType() != typeof(ContextLayer) && layer.GetType() != typeof(BasicLayer) && layer.GetType() != typeof(RadialBasisFunctionLayer)) { return "To convert to flat a network must have only BasicLayer and ContextLayer layers."; } } foreach (ISynapse synapse in network.Structure.Synapses) { if (synapse is NEATSynapse) { return "A NEAT synapse cannot be flattened."; } } return null; }
/// <summary> /// Determine if the specified neural network can be flat. If it can a null /// is returned, otherwise, an error is returned to show why the network /// cannot be flattened. /// </summary> /// <param name="eml">The network to check.</param> /// <returns>Null, if the net can not be flattened, an error message /// otherwise.</returns> public override String IsValid(IEngineMachineLearning eml) { if (!(eml is BasicNetwork)) { return("Only a BasicNetwork can be converted to a flat network."); } BasicNetwork network = (BasicNetwork)eml; ILayer inputLayer = network.GetLayer(BasicNetwork.TAG_INPUT); ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT); if (inputLayer == null) { return("To convert to a flat network, there must be an input layer."); } if (outputLayer == null) { return("To convert to a flat network, there must be an output layer."); } if (!(network.Logic is FeedforwardLogic) || (network.Logic is ThermalLogic)) { return("To convert to flat, must be using FeedforwardLogic or SimpleRecurrentLogic."); } foreach (ILayer layer in network.Structure.Layers) { if (layer.Next.Count > 2) { return("To convert to flat a network must have at most two outbound synapses."); } if (layer.GetType() != typeof(ContextLayer) && layer.GetType() != typeof(BasicLayer) && layer.GetType() != typeof(RadialBasisFunctionLayer)) { return("To convert to flat a network must have only BasicLayer and ContextLayer layers."); } } foreach (ISynapse synapse in network.Structure.Synapses) { if (synapse is NEATSynapse) { return("A NEAT synapse cannot be flattened."); } } return(null); }
/// <summary> /// Determine if the network is valid for OpenCL. /// </summary> /// /// <param name="network">The network to check.</param> /// <returns>The string indicating the error that prevents OpenCL from usingthe network, or null if the network is fine for OpenCL.</returns> public override String IsValid(IEngineMachineLearning network) { if (!(network is FlatNetwork)) { return "Only flat networks are valid to be used for OpenCL"; } FlatNetwork flat = (FlatNetwork)network; /* foreach */ foreach (IActivationFunction activation in flat.ActivationFunctions) { if (activation.GetOpenCLExpression(true) == null) { return "Can't use OpenCL if activation function does not have an OpenCL expression."; } } if (flat.HasSameActivationFunction() == null) { return "Can't use OpenCL training on a neural network that uses multiple activation functions."; } bool hasContext = false; for (int i = 0; i < flat.LayerCounts.Length; i++) { if (flat.ContextTargetOffset[i] != 0) { hasContext = true; } if (flat.ContextTargetSize[i] != 0) { hasContext = true; } } if (hasContext) { return "Can't use OpenCL if context neurons are present."; } return null; }
/// <summary> /// Determine if the network is valid for OpenCL. /// </summary> /// /// <param name="network">The network to check.</param> /// <returns>The string indicating the error that prevents OpenCL from usingthe network, or null if the network is fine for OpenCL.</returns> public override String IsValid(IEngineMachineLearning network) { if (!(network is FlatNetwork)) { return("Only flat networks are valid to be used for OpenCL"); } FlatNetwork flat = (FlatNetwork)network; /* foreach */ foreach (IActivationFunction activation in flat.ActivationFunctions) { if (activation.GetOpenCLExpression(true) == null) { return("Can't use OpenCL if activation function does not have an OpenCL expression."); } } if (flat.HasSameActivationFunction() == null) { return("Can't use OpenCL training on a neural network that uses multiple activation functions."); } bool hasContext = false; for (int i = 0; i < flat.LayerCounts.Length; i++) { if (flat.ContextTargetOffset[i] != 0) { hasContext = true; } if (flat.ContextTargetSize[i] != 0) { hasContext = true; } } if (hasContext) { return("Can't use OpenCL if context neurons are present."); } return(null); }
/// <summary> /// from org.encog.engine.validate.ValidateMachineLearning /// </summary> /// public abstract System.String IsValid( IEngineMachineLearning network);