// Update is called once per frame protected override void OnUpdate( ) { if (Time.ElapsedTime <= i_startTime) { return; // Delay startup. } EntityCommandBuffer ecb = becb.CreateCommandBuffer(); ComponentDataFromEntity <NNManagerComponent> a_manager; if (group_MMMamager.CalculateChunkCount() == 0) { // Debug.LogWarning ( "There is no active managers yet." ) ; return; } a_manager = GetComponentDataFromEntity <NNManagerComponent> (false); // NNManagerComponent manager ; ComponentDataFromEntity <NNTimerComponent> a_managerTimer = GetComponentDataFromEntity <NNTimerComponent> (false); ComponentDataFromEntity <IsTimeUpTag> a_isTimeUpTag = GetComponentDataFromEntity <IsTimeUpTag> (true); int i_activeManager = 0; l_managerSharedData.Clear(); EntityManager.GetAllUniqueSharedComponentData(l_managerSharedData); // Ignore default manager entity ( index = 0, version = 0 ), taken from prefab entity. for (int i = 0; i < l_managerSharedData.Count; i++) { NNManagerSharedComponent mangerSharedComponent = l_managerSharedData [i]; Entity managerEntity = new Entity() { Index = mangerSharedComponent.i_entityIndex, Version = mangerSharedComponent.i_entityVersion }; ComponentDataFromEntity <IsAliveTag> a_isAliveTag = GetComponentDataFromEntity <IsAliveTag> (false); // Debug.Log ( "nnManagerEntity: " + nnManagerEntity ) ; // Entity manager must be valid and active. if (ManagerMethods._SkipInvalidManager(managerEntity, ref a_isAliveTag)) { continue; } if (!a_isTimeUpTag.HasComponent(managerEntity)) { NNTimerComponent managerTimer = a_managerTimer [managerEntity]; NNManagerComponent manager = a_manager [managerEntity]; // Debug.Log ( "Timer" ) ; group_finishedPopulation.SetSharedComponentFilter(mangerSharedComponent); if (Time.ElapsedTime >= managerTimer.f || group_finishedPopulation.CalculateEntityCount() >= manager.i_populationSize) { managerTimer.f = (float)Time.ElapsedTime + manager.i_startLifeTime; a_managerTimer [managerEntity] = managerTimer; // Set back. ecb.AddComponent <IsTimeUpTag> (managerEntity); // Debug.LogError ( "Set" ) ; } } else // if ( a_isTimeUpTag.Exists ( managerEntity ) ) { // Debug.LogError ( "Reset" ) ; ecb.RemoveComponent <IsTimeUpTag> (managerEntity); } i_activeManager++; } // for becb.AddJobHandleForProducer(Dependency); }
protected override void OnUpdate( ) { EntityCommandBuffer.ParallelWriter ecbp = eecb.CreateCommandBuffer().AsParallelWriter(); this.random.NextInt2(); Unity.Mathematics.Random random = this.random; ComponentDataFromEntity <NNManagerComponent> a_manager = GetComponentDataFromEntity <NNManagerComponent> (true); Entities .WithName("NNCreateFirstGenerationWeightsJob") .WithAll <NNBrainTag, IsSpawningCompleteTag, NNIsFirstGenerationTag> () .WithNone <IsInitializedTag> () .WithReadOnly(a_manager) .ForEach((Entity entity, ref DynamicBuffer <NNInput2HiddenLayersWeightsBuffer> a_input2hiddenLayerWeights, ref DynamicBuffer <NNHidden2OutputLayersWeightsBuffer> a_hidden2OutputLayerWeights, in NNAssignedToManagerComponent assignedManager) => { random.InitState((uint)(random.NextInt() + entity.Index)); random.NextInt2(); NNManagerComponent managerComponent = a_manager [assignedManager.entity]; float f_muatationRange = managerComponent.f_muatationRange; // Initialize random weights. for (int i = 0; i < a_input2hiddenLayerWeights.Length; i++) { a_input2hiddenLayerWeights [i] = new NNInput2HiddenLayersWeightsBuffer() { f = random.NextFloat(-f_muatationRange, f_muatationRange) }; } // Initialize random weights. for (int i = 0; i < a_hidden2OutputLayerWeights.Length; i++) { a_hidden2OutputLayerWeights [i] = new NNHidden2OutputLayersWeightsBuffer() { f = random.NextFloat(-f_muatationRange, f_muatationRange) }; } }).ScheduleParallel(); this.random.NextInt2(); random = this.random; // DNA mutation. Entities .WithName("NNDNAMutationJob") .WithAll <NNBrainTag, IsSpawningCompleteTag> () .WithNone <IsInitializedTag, NNIsFirstGenerationTag> () .WithReadOnly(a_manager) .ForEach((Entity entity, ref DynamicBuffer <NNInput2HiddenLayersWeightsBuffer> a_offspringInput2HiddenLayersWeights, ref DynamicBuffer <NNHidden2OutputLayersWeightsBuffer> a_offspringtHidden2OutputLayersWeights, in NNAssignedToManagerComponent assignedManager) => { random.InitState((uint)(random.NextInt() + entity.Index)); random.NextInt2(); NNManagerComponent managerComponent = a_manager [assignedManager.entity]; float f_range = managerComponent.f_muatationRange; float f_groupSelection = random.NextFloat(); if (f_groupSelection <= managerComponent.f_firstGroupSizeInPercentage) { float f_majorMutationChance = managerComponent.f_majorMutationChance0; float f_minorMutationChance = managerComponent.f_minorMutationChance0; float f_minorMutationRangeScale = managerComponent.f_minorMutationRangeScale0; _MutationChances(ref a_offspringInput2HiddenLayersWeights, ref a_offspringtHidden2OutputLayersWeights, ref random, f_range, f_majorMutationChance, f_minorMutationChance, f_minorMutationRangeScale); } else if (f_groupSelection <= managerComponent.f_secondGroupSizeInPercentage) { float f_majorMutationChance = managerComponent.f_majorMutationChance1; float f_minorMutationChance = managerComponent.f_minorMutationChance1; float f_minorMutationRangeScale = managerComponent.f_minorMutationRangeScale1; _MutationChances(ref a_offspringInput2HiddenLayersWeights, ref a_offspringtHidden2OutputLayersWeights, ref random, f_range, f_majorMutationChance, f_minorMutationChance, f_minorMutationRangeScale); } else if (f_groupSelection <= managerComponent.f_thirdGroupSizeInPercentage) { float f_majorMutationChance = managerComponent.f_majorMutationChance2; float f_minorMutationChance = managerComponent.f_minorMutationChance2; float f_minorMutationRangeScale = managerComponent.f_minorMutationRangeScale2; _MutationChances(ref a_offspringInput2HiddenLayersWeights, ref a_offspringtHidden2OutputLayersWeights, ref random, f_range, f_majorMutationChance, f_minorMutationChance, f_minorMutationRangeScale); } else { float f_majorMutationChance = managerComponent.f_majorMutationChance2; float f_minorMutationChance = managerComponent.f_minorMutationChance2; float f_minorMutationRangeScale = managerComponent.f_minorMutationRangeScale2; _MutationChances(ref a_offspringInput2HiddenLayersWeights, ref a_offspringtHidden2OutputLayersWeights, ref random, f_range, f_majorMutationChance, f_minorMutationChance, f_minorMutationRangeScale); } }).ScheduleParallel(); random.NextUInt2(); // DNA mutation. Entities .WithName("NNDNAMutationOfFirstGenerationJob") .WithAll <NNBrainTag, IsSpawningCompleteTag, NNIsFirstGenerationTag> () .WithNone <IsInitializedTag> () .WithReadOnly(a_manager) .ForEach((Entity entity, ref DynamicBuffer <NNInput2HiddenLayersWeightsBuffer> a_offspringInput2HiddenLayersWeights, ref DynamicBuffer <NNHidden2OutputLayersWeightsBuffer> a_offspringtHidden2OutputLayersWeights, in NNAssignedToManagerComponent assignedManager) => { random.InitState((uint)(random.NextInt() + entity.Index)); random.NextInt2(); NNManagerComponent managerComponent = a_manager [assignedManager.entity]; float f_range = managerComponent.f_muatationRange; for (int i = 0; i < a_offspringInput2HiddenLayersWeights.Length; i++) { float f = random.NextFloat(-f_range, f_range); a_offspringInput2HiddenLayersWeights [i] = new NNInput2HiddenLayersWeightsBuffer() { f = f }; } for (int i = 0; i < a_offspringtHidden2OutputLayersWeights.Length; i++) { float f = random.NextFloat(-f_range, f_range); a_offspringtHidden2OutputLayersWeights [i] = new NNHidden2OutputLayersWeightsBuffer() { f = f }; } }).ScheduleParallel(); Entities .WithName("NNActivateNewPopulationJob") .WithAll <NNBrainTag, IsSpawningCompleteTag> () .WithNone <IsInitializedTag> () .ForEach((Entity entity, int entityInQueryIndex) => { ecbp.AddComponent <IsInitializedTag> (entityInQueryIndex, entity); ecbp.AddComponent <IsAliveTag> (entityInQueryIndex, entity); }).ScheduleParallel(); eecb.AddJobHandleForProducer(Dependency); }
protected override void OnUpdate( ) { if (group_MMMamager.CalculateChunkCount() == 0) { Debug.LogWarning("There is no active manager."); return; } EntityCommandBuffer ecb = becb.CreateCommandBuffer(); EntityCommandBuffer.ParallelWriter ecbp = ecb.AsParallelWriter(); l_managerSharedData.Clear(); EntityManager.GetAllUniqueSharedComponentData(l_managerSharedData); ComponentDataFromEntity <NNManagerBestFitnessComponent> a_managerBestFitness = GetComponentDataFromEntity <NNManagerBestFitnessComponent> (false); ComponentDataFromEntity <NNManagerComponent> a_manager = GetComponentDataFromEntity <NNManagerComponent> (true); ComponentDataFromEntity <NNScoreComponent> a_managerScore = GetComponentDataFromEntity <NNScoreComponent> (true); ComponentDataFromEntity <NNBrainScoreComponent> a_brainScore = GetComponentDataFromEntity <NNBrainScoreComponent> (true); ComponentDataFromEntity <NNMangerIsSpawningNewGenerationTag> a_mangerIsSpawningNewGeneration = GetComponentDataFromEntity <NNMangerIsSpawningNewGenerationTag> (false); BufferFromEntity <NNInput2HiddenLayersWeightsBuffer> NNInput2HiddenLayersWeightsBuffer = GetBufferFromEntity <NNInput2HiddenLayersWeightsBuffer> (false); BufferFromEntity <NNHidden2OutputLayersWeightsBuffer> NNHidden2OutputLayersWeightsBuffer = GetBufferFromEntity <NNHidden2OutputLayersWeightsBuffer> (false); // BufferFromEntity <NNHiddenLayersNeuronsBiasBuffer> NNHiddenLayersNeuronsBiasBuffer = GetBufferFromEntity <NNHiddenLayersNeuronsBiasBuffer> ( false ) ; // ComponentDataFromEntity <NNScoreComponent> a_managerScore = GetComponentDataFromEntity <NNScoreComponent> ( true ) ; BufferFromEntity <NNINdexProbabilityBuffer> indexProbabilityBuffer = GetBufferFromEntity <NNINdexProbabilityBuffer> (false); // int i_validManagersCount = 0 ; // bool canCalculateCrossovers = false ; for (int i = 0; i < l_managerSharedData.Count; i++) { NNManagerSharedComponent mangerSharedComponent = l_managerSharedData [i]; Entity nnManagerEntity = new Entity() { Index = mangerSharedComponent.i_entityIndex, Version = mangerSharedComponent.i_entityVersion }; if (a_mangerIsSpawningNewGeneration.HasComponent(nnManagerEntity)) { group_parentPopulation.SetSharedComponentFilter(mangerSharedComponent); group_offspringPopulation.SetSharedComponentFilter(mangerSharedComponent); NativeArray <Entity> na_parentPopulationEntities = group_parentPopulation.ToEntityArray(Allocator.TempJob); NativeArray <Entity> na_offspringPopulationEntities = group_offspringPopulation.ToEntityArray(Allocator.TempJob); DynamicBuffer <NNINdexProbabilityBuffer> a_indexProbability = indexProbabilityBuffer [nnManagerEntity]; NNScoreComponent managerScore = a_managerScore [nnManagerEntity]; // int i_eliteScore = managerScore.i ; Debug.Log("Total score: " + managerScore.i + "; elite score: " + managerScore.i_elite); if (managerScore.i_elite <= 1) { Dependency = new CopyLastBestGenerationDNAJob() { na_parentPopulationEntities = na_parentPopulationEntities, na_offspringPopulationEntities = na_offspringPopulationEntities, // na_indexProbability = na_indexProbability, input2HiddenLayersWeightsBuffer = NNInput2HiddenLayersWeightsBuffer, hidden2OutputLayersWeightsBuffer = NNHidden2OutputLayersWeightsBuffer, // hiddenLayersNeuronsBiasBuffer = NNHiddenLayersNeuronsBiasBuffer }.Schedule(na_parentPopulationEntities.Length, 256, Dependency); Dependency.Complete(); } else { // New score is fine. // Calculate index probability, to get best parents. // Each entity indicies will be in the array, as many times, as many score has // e.g. // 0th entity with 0 points won't be in the array // 1st entity with 2 points will be 2 times // nth entity with xth score will be xth times in the array NNManagerComponent manager = a_manager [nnManagerEntity]; NativeMultiHashMap <int, EntityIndex> nmhm_parentEntitiesScore = new NativeMultiHashMap <int, EntityIndex> (na_parentPopulationEntities.Length, Allocator.TempJob); // Debug.Log ( "crossover parent score" ) ; Dependency = new CommonJobs.GetPopulationScoreJob( ) { canGetEachScore = false, na_populationEntities = na_parentPopulationEntities, a_brainScore = a_brainScore, nmhm_populationEntitiesScore = nmhm_parentEntitiesScore.AsParallelWriter() }.Schedule(na_parentPopulationEntities.Length, 256, Dependency); Dependency.Complete(); NativeArray <int> na_parentSortedKeysWithDuplicates = nmhm_parentEntitiesScore.GetKeyArray(Allocator.TempJob); // This stores key keys in order. But keeps first unique keys at the front of an array. // Total array size matches of total elements. na_parentSortedKeysWithDuplicates.Sort(); // Sorted. int i_uniqueKeyCount = na_parentSortedKeysWithDuplicates.Unique(); int i_eltieCountTemp = (int)(na_parentSortedKeysWithDuplicates.Length * manager.f_eliteSize); // Minimum elite size mus be met. int i_eltiesCount = i_eltieCountTemp > 0 ? i_eltieCountTemp : na_parentSortedKeysWithDuplicates.Length; if (na_parentSortedKeysWithDuplicates.Length == 0) { Debug.LogError("Not enough elites for training. Please increase population, or elites %."); na_offspringPopulationEntities.Dispose(); na_parentPopulationEntities.Dispose(); nmhm_parentEntitiesScore.Dispose(); na_parentSortedKeysWithDuplicates.Dispose(); continue; } NativeArray <EntityIndex> na_elities = new NativeArray <EntityIndex> (i_eltiesCount, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); DynamicBuffer <NNINdexProbabilityBuffer> a_eliteIndexProbability = indexProbabilityBuffer [nnManagerEntity]; int i_totalElitesScore = managerScore.i_elite; a_eliteIndexProbability.ResizeUninitialized(i_totalElitesScore); Dependency = new CommonJobs.GetElitesEntitiesJob() { i_eltiesCount = i_eltiesCount, na_elities = na_elities, nmhm_entitiesScore = nmhm_parentEntitiesScore, na_currentSortedKeysWithDuplicates = na_parentSortedKeysWithDuplicates }.Schedule(); Dependency = new CalculateIndexProbabilityOfPopulationJob() { na_populationEntities = na_elities, a_indexProbability = a_eliteIndexProbability, a_brainScore = a_brainScore }.Schedule(Dependency); NativeArray <int> na_randomValues = new NativeArray <int> (na_parentPopulationEntities.Length, Allocator.TempJob, NativeArrayOptions.UninitializedMemory); random.NextInt2(); Dependency = new RandomIntsJob() { na_randomValues = na_randomValues, random = random }.Schedule(Dependency); Dependency.Complete(); // Debug.LogError ( "parent pop: " + na_parentPopulationEntities.Length + "; offspring pop: " + na_offspringPopulationEntities.Length ) ; Dependency = new DNACrossOverJob() { na_parentPopulationEntities = na_parentPopulationEntities, na_offspringPopulationEntities = na_offspringPopulationEntities, na_indexProbability = a_eliteIndexProbability.Reinterpret <int> ().AsNativeArray(), input2HiddenLayersWeightsBuffer = NNInput2HiddenLayersWeightsBuffer, hidden2OutputLayersWeightsBuffer = NNHidden2OutputLayersWeightsBuffer, na_randomValues = na_randomValues, random = random, // i_eliteScore = i_eliteScore }.Schedule(na_parentPopulationEntities.Length, 256, Dependency); Dependency.Complete(); na_randomValues.Dispose(); na_elities.Dispose(); nmhm_parentEntitiesScore.Dispose(); na_parentSortedKeysWithDuplicates.Dispose(); } ecb.RemoveComponent <NNMangerIsSpawningNewGenerationTag> (nnManagerEntity); becb.AddJobHandleForProducer(Dependency); na_offspringPopulationEntities.Dispose(); na_parentPopulationEntities.Dispose(); } } // for Entities .WithName("GenerationSpawningIsCompleteJob") .WithAll <NNBrainTag, IsSpawningTag> () .ForEach((Entity entity, int entityInQueryIndex) => { ecbp.RemoveComponent <IsSpawningTag> (entityInQueryIndex, entity); ecbp.AddComponent <IsSpawningCompleteTag> (entityInQueryIndex, entity); }).ScheduleParallel(); becb.AddJobHandleForProducer(Dependency); }