public bool CheckRequirement(TrainingMethod trainingMethod) { if (!LevelRequirement(trainingMethod.requirements.levelRequirements)) { EventManager.Instance.ShowPopUpMsg("Need lvl x"); return(false); } if (!ItemRequirement(trainingMethod.requirements.itemIds)) { EventManager.Instance.ShowPopUpMsg("Need item x"); return(false); } if (!QuestRequirement(trainingMethod.requirements.questIds)) { EventManager.Instance.ShowPopUpMsg("Need quest x"); return(false); } if (!GeneralItemRequirement(trainingMethod.requirements.generalSkillItems)) { EventManager.Instance.ShowPopUpMsg("Need some kind of x item"); return(false); } return(true); }
public static void RollResources(Dictionary <long, BigInteger> dropTableDict, TrainingMethod trainingMethod, int boostedLvl) { List <DropTable> dropTables = trainingMethod.dropTables; //TODO is this better or slower? for (int i = 0; i < dropTables.Count; i++) { switch (dropTables[i].tableType) { case DropTable.DropTableType.General: dropTables[i].RollTable(dropTableDict); break; case DropTable.DropTableType.Clue: dropTables[i].RollTable(dropTableDict, boostedLvl); break; case DropTable.DropTableType.Pet: dropTables[i].RollTable(dropTableDict, boostedLvl); break; default: break; } } }
public async Task <IActionResult> Recognize([FromForm] IFormFile image, [FromQuery] TrainingMethod method = default) { var filePath = await _imageUploader.Upload(image); var prediction = _imagePredictor.Predict(filePath, method); return(Ok(prediction.MaxScore >= 0.6f ? $"{prediction.PredictedLabel} has been recognized!!!11one. Score {prediction.MaxScore}." : "Unfortunately nothing bas been recognized.")); }
public void SkillGameLoop(TrainingMethod trainingMethod, Skill skill, float deltaTime) { float currentDeltaTime = deltaTime; float actionIncrement = skill.GetResourceRate(trainingMethod.baseResourceRate) * currentDeltaTime * MainController.timeConstant; actionCount += actionIncrement; int actionDone = 0; while (actionCount >= 1.0F) { skill.xpFloat += trainingMethod.xpPerResource; actionCount -= 1.0F; actionDone++; EventManager.Instance.XpGained(skill.xp); //TODO handle multiple skill earning xp //foreach (skill in trainingMethod.additionSkills) // skill.xp += xpPerResource // raise xp event? --> eventManager.xpgained ////if (getlevel(xp) != skill.lvl) --> eventManager.levelup DropTableManager.RollResources(dropTableDict, trainingMethod, skill.boostedLevel); //TODO remove consumables if (skill.xp >= skill.xpNextLvl) { int newLvl = GetLevel(skill.xp); skill.xpNextLvl = Database.experienceTable[newLvl]; float deltaTimePerAction = currentDeltaTime / actionIncrement; currentDeltaTime = actionCount * deltaTimePerAction; actionDone = 0; skill.currentLevel = newLvl; if (skill.boostedLevel < skill.currentLevel) { skill.boostedLevel = skill.currentLevel; } actionIncrement = skill.GetResourceRate(trainingMethod.baseResourceRate) * currentDeltaTime * MainController.timeConstant; actionCount = actionIncrement; int totalLvl = GetTotalLevel(); EventManager.Instance.LevelUp(skill.skillName, skill.currentLevel, totalLvl); } } bank.AddMultipleItems(dropTableDict); EventManager.Instance.UpdateLastLoot(dropTableDict); ClearDropTable(dropTableDict); }
/// <summary> /// 反序列化构造函数。 假定提供的序列化信息包含有效数据。 /// </summary> /// <param name="info"> /// 序列化信息以反序列化和获取值 /// </param> /// <param name="context"> /// 序列化上下文 /// </param> /// <exception cref="ArgumentNullException"> /// 如果<c> info </ c>是<c> null </ c> /// </exception> public Network(SerializationInfo info, StreamingContext context) { // 验证 Helper.ValidateNotNull(info, "info"); this.inputLayer = info.GetValue("inputLayer", typeof(ILayer)) as ILayer; this.outputLayer = info.GetValue("outputLayer", typeof(ILayer)) as ILayer; this.layers = info.GetValue("layers", typeof(IList <ILayer>)) as IList <ILayer>; this.connectors = info.GetValue("connectors", typeof(IList <IConnector>)) as IList <IConnector>; this.trainingMethod = (TrainingMethod)info.GetValue("trainingMethod", typeof(TrainingMethod)); this.jitterEpoch = info.GetInt32("jitterEpoch"); this.jitterNoiseLimit = info.GetDouble("jitterNoiseLimit"); }
public static TrainingAlgorithm Use(TrainingMethod method) { switch (method) { case TrainingMethod.GD: default: return(new BackpropagationAlgorithm()); case TrainingMethod.GDM: return(new BackPropagationWithMomentumAlgorithm()); case TrainingMethod.LM: return(new LevenbergAlgorithm()); } }
public void SetTrainingMethod(int index) { selectedTrainingMethod = selectedSkill.trainingMethods[index]; if (CheckRequirement(selectedTrainingMethod)) { isTrainingMethodSelected = true; actionCount = 0; dropTableDict = DropTableManager.CreateDropTableDictionary(selectedTrainingMethod.dropTables); EventManager.Instance.SkillingStarted(); } else { isTrainingMethodSelected = false; } }
public void TrainingMethodConstructorTest() { string methodName = "myMethod"; int resourceRate = 5; float xpPerResource = 2.5F; Requirements req = new Requirements(); TrainingMethod trainingMethod = new TrainingMethod(methodName, resourceRate, req); trainingMethod.xpPerResource = xpPerResource; Assert.AreEqual(methodName, trainingMethod.name); Assert.AreEqual(resourceRate, trainingMethod.baseResourceRate); Assert.AreEqual(resourceRate * xpPerResource, trainingMethod.baseXpRate, 0.01); }
public void RollResourcesTest() { int boostedLvl = 50; long id = 500; Dictionary <long, BigInteger> dropTableDict; TrainingMethod trainingMethod = new TrainingMethod(); trainingMethod.dropTables.Add(new GeneralDropTable()); trainingMethod.dropTables[0].numRolls = 1; trainingMethod.dropTables[0].lootItems[0] = new DropTable.Loot(id); dropTableDict = DropTableManager.CreateDropTableDictionary(trainingMethod.dropTables); DropTableManager.RollResources(dropTableDict, trainingMethod, boostedLvl); Assert.AreEqual((BigInteger)1, dropTableDict[id]); trainingMethod.dropTables[0].numRolls = 2; dropTableDict = DropTableManager.CreateDropTableDictionary(trainingMethod.dropTables); DropTableManager.RollResources(dropTableDict, trainingMethod, boostedLvl); Assert.AreEqual((BigInteger)2, dropTableDict[id]); }
/// <summary> /// Creates a new neural network /// </summary> /// <param name="inputLayer"> /// The input layer /// </param> /// <param name="outputLayer"> /// The output layer /// </param> /// <param name="trainingMethod"> /// Training method to use /// </param> /// <exception cref="ArgumentNullException"> /// If <c>inputLayer</c> or <c>outputLayer</c> is <c>null</c>. /// </exception> /// <exception cref="ArgumentException"> /// If <c>trainingMethod</c> is invalid /// </exception> protected Network(ILayer inputLayer, ILayer outputLayer, TrainingMethod trainingMethod) { // Validate Helper.ValidateNotNull(inputLayer, "inputLayer"); Helper.ValidateNotNull(outputLayer, "outputLayer"); Helper.ValidateEnum(typeof(TrainingMethod), trainingMethod, "trainingMethod"); // Assign arguments to corresponding variables this.inputLayer = inputLayer; this.outputLayer = inputLayer; this.trainingMethod = trainingMethod; // Initialize jitter parameters with default values this.jitterEpoch = 73; this.jitterNoiseLimit = 0.03d; // Create the list of layers and connectors this.layers = new List<ILayer>(); this.connectors = new List<IConnector>(); // Populate the lists by visiting layers topologically starting from input layer Stack<ILayer> stack = new Stack<ILayer>(); stack.Push(inputLayer); // Indegree map IDictionary<ILayer, int> inDegree = new Dictionary<ILayer, int>(); while (stack.Count > 0) { // Add 'top of stack' to list of layers this.outputLayer = stack.Pop(); layers.Add(this.outputLayer); // Add targetConnectors to connectors list making sure that they do not lead to cycle foreach (IConnector connector in this.outputLayer.TargetConnectors) { connectors.Add(connector); ILayer targetLayer = connector.TargetLayer; if (layers.Contains(targetLayer)) { throw new InvalidOperationException("Cycle Exists in the network structure"); } // Virtually remove this layer inDegree[targetLayer] = inDegree.ContainsKey(targetLayer) ? inDegree[targetLayer] - 1 : targetLayer.SourceConnectors.Count - 1; // Push unvisited target layer onto the stack, if its effective inDegree is zero if (inDegree[targetLayer] == 0) { stack.Push(targetLayer); } } } // The last layer should be same as output layer if (outputLayer != this.outputLayer) { throw new ArgumentException("The outputLayer is invalid", "outputLayer"); } // Initialize the newly created network Initialize(); }
/// <summary> /// Deserialization constructor. It is assumed that the serialization info provided contains /// valid data. /// </summary> /// <param name="info"> /// The serialization info to deserialize and obtain values /// </param> /// <param name="context"> /// Serialization context /// </param> /// <exception cref="ArgumentNullException"> /// If <c>info</c> is <c>null</c> /// </exception> public Network(SerializationInfo info, StreamingContext context) { // Validate Helper.ValidateNotNull(info, "info"); this.inputLayer = info.GetValue("inputLayer", typeof(ILayer)) as ILayer; this.outputLayer = info.GetValue("outputLayer", typeof(ILayer)) as ILayer; this.layers = info.GetValue("layers", typeof(IList<ILayer>)) as IList<ILayer>; this.connectors = info.GetValue("connectors", typeof(IList<IConnector>)) as IList<IConnector>; this.trainingMethod = (TrainingMethod)info.GetValue("trainingMethod", typeof(TrainingMethod)); this.jitterEpoch = info.GetInt32("jitterEpoch"); this.jitterNoiseLimit = info.GetDouble("jitterNoiseLimit"); }
/// <summary> /// 创建一个新的神经网络 /// </summary> /// <param name="inputLayer"> /// 输入层 /// </param> /// <param name="outputLayer"> /// 输出层 /// </param> /// <param name="trainingMethod"> /// 使用培训方法 /// </param> /// <exception cref="ArgumentNullException"> /// 如果输入图层或输出图层为null。 /// </exception> /// <exception cref="ArgumentException"> /// 如果训练方法无效 /// </exception> protected Network(ILayer inputLayer, ILayer outputLayer, TrainingMethod trainingMethod) { // 验证 Helper.ValidateNotNull(inputLayer, "inputLayer"); Helper.ValidateNotNull(outputLayer, "outputLayer"); Helper.ValidateEnum(typeof(TrainingMethod), trainingMethod, "trainingMethod"); // 将参数分配给相应的变量 this.inputLayer = inputLayer; this.outputLayer = inputLayer; this.trainingMethod = trainingMethod; // 使用默认值初始化抖动参数 this.jitterEpoch = 73; this.jitterNoiseLimit = 0.03d; // 创建层和连接器的列表 this.layers = new List <ILayer>(); this.connectors = new List <IConnector>(); // 通过从输入图层拓扑地访问图层来填充列表 Stack <ILayer> stack = new Stack <ILayer>(); stack.Push(inputLayer); // Indegree地图 IDictionary <ILayer, int> inDegree = new Dictionary <ILayer, int>(); while (stack.Count > 0) { // 将“堆栈顶部”添加到图层列表 this.outputLayer = stack.Pop(); layers.Add(this.outputLayer); // 将目标连接器添加到连接器列表,确保它们不会导致循环 foreach (IConnector connector in this.outputLayer.TargetConnectors) { connectors.Add(connector); ILayer targetLayer = connector.TargetLayer; if (layers.Contains(targetLayer)) { throw new InvalidOperationException("Cycle Exists in the network structure"); } // 实际删除此图层 inDegree[targetLayer] = inDegree.ContainsKey(targetLayer) ? inDegree[targetLayer] - 1 : targetLayer.SourceConnectors.Count - 1; // 将未访问的目标层推送到栈上,如果它的有效inDree值为零 if (inDegree[targetLayer] == 0) { stack.Push(targetLayer); } } } // 最后一层应该与输出层相同 if (outputLayer != this.outputLayer) { throw new ArgumentException("The outputLayer is invalid", "outputLayer"); } // 初始化新创建的网络 Initialize(); }
/// <summary> /// Creates a new neural network /// </summary> /// <param name="inputLayer"> /// The input layer /// </param> /// <param name="outputLayer"> /// The output layer /// </param> /// <param name="trainingMethod"> /// Training method to use /// </param> /// <exception cref="ArgumentNullException"> /// If <c>inputLayer</c> or <c>outputLayer</c> is <c>null</c>. /// </exception> /// <exception cref="ArgumentException"> /// If <c>trainingMethod</c> is invalid /// </exception> protected Network(ILayer inputLayer, ILayer outputLayer, TrainingMethod trainingMethod) { // Validate Helper.ValidateNotNull(inputLayer, "inputLayer"); Helper.ValidateNotNull(outputLayer, "outputLayer"); Helper.ValidateEnum(typeof(TrainingMethod), trainingMethod, "trainingMethod"); // Assign arguments to corresponding variables this.inputLayer = inputLayer; this.outputLayer = inputLayer; this.trainingMethod = trainingMethod; // Initialize jitter parameters with default values this.jitterEpoch = 73; this.jitterNoiseLimit = 0.03d; // Create the list of layers and connectors this.layers = new List <ILayer>(); this.connectors = new List <IConnector>(); // Populate the lists by visiting layers topologically starting from input layer Stack <ILayer> stack = new Stack <ILayer>(); stack.Push(inputLayer); // Indegree map IDictionary <ILayer, int> inDegree = new Dictionary <ILayer, int>(); while (stack.Count > 0) { // Add 'top of stack' to list of layers this.outputLayer = stack.Pop(); layers.Add(this.outputLayer); // Add targetConnectors to connectors list making sure that they do not lead to cycle foreach (IConnector connector in this.outputLayer.TargetConnectors) { connectors.Add(connector); ILayer targetLayer = connector.TargetLayer; if (layers.Contains(targetLayer)) { throw new InvalidOperationException("Cycle Exists in the network structure"); } // Virtually remove this layer inDegree[targetLayer] = inDegree.ContainsKey(targetLayer) ? inDegree[targetLayer] - 1 : targetLayer.SourceConnectors.Count - 1; // Push unvisited target layer onto the stack, if its effective inDegree is zero if (inDegree[targetLayer] == 0) { stack.Push(targetLayer); } } } // The last layer should be same as output layer if (outputLayer != this.outputLayer) { throw new ArgumentException("The outputLayer is invalid", "outputLayer"); } // Initialize the newly created network Initialize(); }
public ImagePrediction Predict(string pathToImage, TrainingMethod method) => GetPredictionEngine(method) .Predict(new Image