/// <summary> /// Generate the RSOM network. /// </summary> /// <returns>The neural network.</returns> public BasicNetwork Generate() { ILayer output = new BasicLayer(new ActivationLinear(), false, this.outputNeurons); ILayer input = new BasicLayer(new ActivationLinear(), false, this.inputNeurons); BasicNetwork network = new BasicNetwork(); ILayer context = new ContextLayer(this.outputNeurons); network.AddLayer(input); network.AddLayer(output); output.AddNext(context, SynapseType.OneToOne); context.AddNext(input); int y = PatternConst.START_Y; input.X = PatternConst.START_X; input.Y = y; context.X = PatternConst.INDENT_X; context.Y = y; y += PatternConst.INC_Y; output.X = PatternConst.START_X; output.Y = y; network.Structure.FinalizeStructure(); network.Reset(); return(network); }
/// <summary> /// Unflatten the weights, copy the flat network weights to the /// neural network weight matrixes. /// </summary> public void UnflattenWeights() { if (flat != null) { double[] sourceWeights = flat.Weights; NetworkCODEC.ArrayToNetwork(sourceWeights, network); this.flatUpdate = FlatUpdateNeeded.None; // update context layers foreach (ILayer layer in this.layers) { if (layer is ContextLayer) { ContextLayer context = (ContextLayer)layer; if (context.FlatContextIndex != -1) { EngineArray.ArrayCopy( this.flat.LayerOutput, context.FlatContextIndex, context.Context.Data, 0, context.Context.Count); } } } } }
/// <summary> /// Save the specified Encog object to an XML writer. /// </summary> /// <param name="obj">The object to save.</param> /// <param name="xmlOut">The XML writer to save to.</param> public void Save(IEncogPersistedObject obj, WriteXML xmlOut) { PersistorUtil.BeginEncogObject( EncogPersistedCollection.TYPE_CONTEXT_LAYER, xmlOut, obj, false); ContextLayer layer = (ContextLayer)obj; xmlOut.AddProperty(BasicLayerPersistor.PROPERTY_NEURONS, layer .NeuronCount); xmlOut.AddProperty(BasicLayerPersistor.PROPERTY_X, layer.X); xmlOut.AddProperty(BasicLayerPersistor.PROPERTY_Y, layer.Y); if (layer.HasBias) { StringBuilder result = new StringBuilder(); NumberList.ToList(CSVFormat.EG_FORMAT, result, layer.BiasWeights); xmlOut.AddProperty(BasicLayerPersistor.PROPERTY_THRESHOLD, result .ToString()); } StringBuilder ctx = new StringBuilder(); NumberList.ToList(CSVFormat.EG_FORMAT, ctx, layer.Context.Data); xmlOut.AddProperty(PROPERTY_CONTEXT, ctx.ToString()); xmlOut.AddProperty(BasicLayerPersistor.PROPERTY_BIAS_ACTIVATION, layer.BiasActivation); BasicLayerPersistor.SaveActivationFunction(layer.ActivationFunction, xmlOut); xmlOut.EndTag(); }
public static void NewUser(int _fromClient, Packet _packet) { int _clientIdCheck = _packet.ReadInt(); string _username = _packet.ReadString(); string _email = _packet.ReadString(); string _password = _packet.ReadString(); try { using (var context = ContextLayer.GetContext()) { Console.WriteLine("New User: "******" - " + _email); if (!ContextLayer.DoesUserEmailExist(_email, context)) { //ServerSend.SendNewUserEmail(); // ContextLayer.AddNewUser(_username, _email, _password, context); ServerSend.Alert(_clientIdCheck, "User created check your email to verify your account before logging in."); } else { ServerSend.Alert(_clientIdCheck, "An Error occured or user already exists."); } } } catch (Exception e) { } }
public static void SendPlayerIntoGame(int _fromClient, Packet _packet) { using (var context = ContextLayer.GetContext()) { int _clientIdCheck = _packet.ReadInt(); var _username = _packet.ReadString(); var _characterId = _packet.ReadLong(); //change to use token / other var user = ContextLayer.GetUser(_username, context); var characters = PlayerCharacter.ToCharacterList(ContextLayer.GetUserCharacters(user.Id, context)); Server.clients[_fromClient].SendIntoGame(_username, characters.FirstOrDefault(c => c.Id == _characterId)); } }
/// <summary> /// Flatten the weights of a neural network. /// </summary> public void FlattenWeights() { if (this.flat != null) { this.flatUpdate = FlatUpdateNeeded.Flatten; double[] targetWeights = this.flat.Weights; double[] sourceWeights = NetworkCODEC.NetworkToArray(this.network); EngineArray.ArrayCopy(sourceWeights, targetWeights); this.flatUpdate = FlatUpdateNeeded.None; // update context layers foreach (ILayer layer in this.layers) { if (layer is ContextLayer) { ContextLayer context = (ContextLayer)layer; if (context.FlatContextIndex != -1) { EngineArray.ArrayCopy( context.Context.Data, 0, this.flat.LayerOutput, context.FlatContextIndex, context.Context.Count); } } } // handle limited connection networks if (this.connectionLimited) { this.flat.ConnectionLimit = this.connectionLimit; } else { this.flat.ClearConnectionLimit(); } } }
public static void WelcomeReceived(int _fromClient, Packet _packet) { int _clientIdCheck = _packet.ReadInt(); bool _newUser = _packet.ReadBool(); string _username = _packet.ReadString(); string _passwordAttempt = _packet.ReadString(); try { if (!_newUser) { using (var context = ContextLayer.GetContext()) { if (!ContextLayer.UserCredentialsCorrect(_username, _passwordAttempt, context)) { Console.WriteLine("Invalid User"); ServerSend.Alert(_clientIdCheck, "Username / password are invalid or User does not exist."); } else { var user = ContextLayer.GetUser(_username, context); Console.WriteLine($"Client: {Server.clients[_fromClient].tcp.socket.Client.RemoteEndPoint} connected successfully and {_username} is now player {_fromClient}."); if (_fromClient != _clientIdCheck) { Console.WriteLine($"Player \"{_username}\" (ID: {_fromClient}) has assumed the wrong client ID ({_clientIdCheck})!"); } // TODO: send player into game var characters = PlayerCharacter.ToCharacterList(ContextLayer.GetUserCharacters(user.Id, context)); ServerSend.LoginSuccess(_clientIdCheck, new UserSigninData { UserName = user.Name, Characters = characters }); //Server.clients[_fromClient].SendIntoGame(_username); } } } } catch (Exception e) { } }
public static void NewCharacter(int _fromClient, Packet _packet) { int _clientIdCheck = _packet.ReadInt(); string _userName = _packet.ReadString(); string _newCharacterName = _packet.ReadString(); try { using (var context = ContextLayer.GetContext()) { Console.WriteLine("New Character: " + _newCharacterName); var NewCharacter = ContextLayer.AddNewCharacter(_userName, _newCharacterName, context); if (NewCharacter != null) { ServerSend.NewCharacterCreated(_fromClient, new PlayerCharacter(NewCharacter.Id, NewCharacter.Name)); } } } catch (Exception e) { } }
/// <summary> /// Generate a Jordan neural network. /// </summary> /// <returns>A Jordan neural network.</returns> public BasicNetwork Generate() { // construct an Jordan type network ILayer input = new BasicLayer(this.activation, false, this.inputNeurons); ILayer hidden = new BasicLayer(this.activation, true, this.hiddenNeurons); ILayer output = new BasicLayer(this.activation, true, this.outputNeurons); ILayer context = new ContextLayer(this.outputNeurons); BasicNetwork network = new BasicNetwork(); network.AddLayer(input); network.AddLayer(hidden); network.AddLayer(output); output.AddNext(context, SynapseType.OneToOne); context.AddNext(hidden); int y = PatternConst.START_Y; input.X = PatternConst.START_X; input.Y = y; y += PatternConst.INC_Y; hidden.X = PatternConst.START_X; hidden.Y = y; context.X = PatternConst.INDENT_X; context.Y = y; y += PatternConst.INC_Y; output.X = PatternConst.START_X; output.Y = y; network.Structure.FinalizeStructure(); network.Reset(); return(network); }
/// <summary> /// Create the flat neural network. /// </summary> public void Flatten() { bool isRBF = false; IDictionary <ILayer, FlatLayer> regular2flat = new Dictionary <ILayer, FlatLayer>(); IDictionary <FlatLayer, ILayer> flat2regular = new Dictionary <FlatLayer, ILayer>(); IList <ObjectPair <ILayer, ILayer> > contexts = new List <ObjectPair <ILayer, ILayer> >(); this.flat = null; ValidateForFlat val = new ValidateForFlat(); if (val.IsValid(this.network) == null) { if (this.layers.Count == 3 && this.layers[1] is RadialBasisFunctionLayer) { RadialBasisFunctionLayer rbf = (RadialBasisFunctionLayer)this.layers[1]; this.flat = new FlatNetworkRBF(this.network.InputCount, rbf.NeuronCount, this.network.OutputCount, rbf.RadialBasisFunction); FlattenWeights(); this.flatUpdate = FlatUpdateNeeded.None; return; } int flatLayerCount = CountNonContext(); FlatLayer[] flatLayers = new FlatLayer[flatLayerCount]; int index = flatLayers.Length - 1; foreach (ILayer layer in this.layers) { if (layer is ContextLayer) { ISynapse inboundSynapse = network.Structure .FindPreviousSynapseByLayerType(layer, typeof(BasicLayer)); ISynapse outboundSynapse = network .Structure .FindNextSynapseByLayerType(layer, typeof(BasicLayer)); if (inboundSynapse == null) { throw new NeuralNetworkError( "Context layer must be connected to by one BasicLayer."); } if (outboundSynapse == null) { throw new NeuralNetworkError( "Context layer must connect to by one BasicLayer."); } ILayer inbound = inboundSynapse.FromLayer; ILayer outbound = outboundSynapse.ToLayer; contexts .Add(new ObjectPair <ILayer, ILayer>(inbound, outbound)); } else { double bias = this.FindNextBias(layer); IActivationFunction activationType; double[] param = new double[1]; if (layer.ActivationFunction == null) { activationType = new ActivationLinear(); param = new double[1]; param[0] = 1; } else { activationType = layer.ActivationFunction; param = layer.ActivationFunction.Params; } FlatLayer flatLayer = new FlatLayer(activationType, layer .NeuronCount, bias, param); regular2flat[layer] = flatLayer; flat2regular[flatLayer] = layer; flatLayers[index--] = flatLayer; } } // now link up the context layers foreach (ObjectPair <ILayer, ILayer> context in contexts) { // link the context layer on the FlatLayer ILayer layer = context.B; ISynapse synapse = this.network .Structure .FindPreviousSynapseByLayerType(layer, typeof(BasicLayer)); FlatLayer from = regular2flat[context.A]; FlatLayer to = regular2flat[synapse.FromLayer]; to.ContextFedBy = from; } this.flat = new FlatNetwork(flatLayers); // update the context indexes on the non-flat network for (int i = 0; i < flatLayerCount; i++) { FlatLayer fedBy = flatLayers[i].ContextFedBy; if (fedBy != null) { ILayer fedBy2 = flat2regular[flatLayers[i + 1]]; ISynapse synapse = FindPreviousSynapseByLayerType(fedBy2, typeof(ContextLayer)); if (synapse == null) { throw new NeuralNetworkError("Can't find parent synapse to context layer."); } ContextLayer context = (ContextLayer)synapse.FromLayer; // find fedby index int fedByIndex = -1; for (int j = 0; j < flatLayerCount; j++) { if (flatLayers[j] == fedBy) { fedByIndex = j; break; } } if (fedByIndex == -1) { throw new NeuralNetworkError("Can't find layer feeding context."); } context.FlatContextIndex = this.flat.ContextTargetOffset[fedByIndex]; } } // RBF networks will not train every layer if (isRBF) { this.flat.EndTraining = flatLayers.Length - 1; } FlattenWeights(); if (this.IsConnectionLimited) { } this.flatUpdate = FlatUpdateNeeded.None; } else { this.flatUpdate = FlatUpdateNeeded.Never; } }
/// <summary> /// Load the specified Encog object from an XML reader. /// </summary> /// <param name="xmlIn">The XML reader to use.</param> /// <returns>The loaded object.</returns> public IEncogPersistedObject Load(ReadXML xmlIn) { int neuronCount = 0; int x = 0; int y = 0; double biasActivation = 1; String threshold = null; IActivationFunction activation = null; String end = xmlIn.LastTag.Name; String context = null; while (xmlIn.ReadToTag()) { if (xmlIn.IsIt(BasicLayerPersistor.TAG_ACTIVATION, true)) { xmlIn.ReadToTag(); String type = xmlIn.LastTag.Name; activation = BasicLayerPersistor.LoadActivation(type, xmlIn); } else if (xmlIn.IsIt(BasicLayerPersistor.PROPERTY_NEURONS, true)) { neuronCount = xmlIn.ReadIntToTag(); } else if (xmlIn.IsIt(BasicLayerPersistor.PROPERTY_X, true)) { x = xmlIn.ReadIntToTag(); } else if (xmlIn.IsIt(BasicLayerPersistor.PROPERTY_Y, true)) { y = xmlIn.ReadIntToTag(); } else if (xmlIn.IsIt(BasicLayerPersistor.PROPERTY_THRESHOLD, true)) { threshold = xmlIn.ReadTextToTag(); } else if (xmlIn.IsIt(PROPERTY_CONTEXT, true)) { context = xmlIn.ReadTextToTag(); } else if (xmlIn.IsIt(BasicLayerPersistor.PROPERTY_BIAS_ACTIVATION, true)) { biasActivation = double.Parse(xmlIn.ReadTextToTag()); } else if (xmlIn.IsIt(end, false)) { break; } } if (neuronCount > 0) { ContextLayer layer; if (threshold == null) { layer = new ContextLayer(activation, false, neuronCount); } else { double[] t = NumberList.FromList(CSVFormat.EG_FORMAT, threshold); layer = new ContextLayer(activation, true, neuronCount); for (int i = 0; i < t.Length; i++) { layer.BiasWeights[i] = t[i]; } } if (context != null) { double[] c = NumberList.FromList(CSVFormat.EG_FORMAT, context); for (int i = 0; i < c.Length; i++) { layer.Context[i] = c[i]; } } layer.X = x; layer.Y = y; layer.BiasActivation = biasActivation; return(layer); } return(null); }
public VehicleRepository() { db = new ContextLayer(); }