private OneVersusAllModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx) { // *** Binary format *** // byte: OutputFormula as byte // int: predictor count OutputFormula outputFormula = (OutputFormula)ctx.Reader.ReadByte(); int len = ctx.Reader.ReadInt32(); Host.CheckDecode(len > 0); if (outputFormula == OutputFormula.Raw) { var predictors = new TScalarPredictor[len]; LoadPredictors(Host, predictors, ctx); _impl = new ImplRaw(predictors); } else if (outputFormula == OutputFormula.ProbabilityNormalization) { var predictors = new IValueMapperDist[len]; LoadPredictors(Host, predictors, ctx); _impl = new ImplDist(predictors); } else if (outputFormula == OutputFormula.Softmax) { var predictors = new TScalarPredictor[len]; LoadPredictors(Host, predictors, ctx); _impl = new ImplSoftmax(predictors); } DistType = new VectorDataViewType(NumberDataViewType.Single, _impl.Predictors.Length); }
internal static OneVersusAllModelParameters Create(IHost host, OutputFormula outputFormula, TScalarPredictor[] predictors) { ImplBase impl; using (var ch = host.Start("Creating OVA predictor")) { if (outputFormula == OutputFormula.Softmax) { impl = new ImplSoftmax(predictors); return(new OneVersusAllModelParameters(host, impl)); } // Caller of this function asks for probability output. We check if input predictor can produce probability. // If that predictor can't produce probability, ivmd will be null. IValueMapperDist ivmd = null; if (outputFormula == OutputFormula.ProbabilityNormalization && ((ivmd = predictors[0] as IValueMapperDist) == null || ivmd.OutputType != NumberDataViewType.Single || ivmd.DistType != NumberDataViewType.Single)) { ch.Warning($"{nameof(OneVersusAllTrainer.Options.UseProbabilities)} specified with {nameof(OneVersusAllTrainer.Options.PredictorType)} that can't produce probabilities."); ivmd = null; } // If ivmd is null, either the user didn't ask for probability or the provided predictors can't produce probability. if (ivmd != null) { var dists = new IValueMapperDist[predictors.Length]; for (int i = 0; i < predictors.Length; ++i) { dists[i] = (IValueMapperDist)predictors[i]; } impl = new ImplDist(dists); } else { impl = new ImplRaw(predictors); } } return(new OneVersusAllModelParameters(host, impl)); }
private void CollectEnergyEffects() { foreach (var item in m_energyEffectsPerOutput) { item.Value.Clear(); } for (int i = 0; i < m_devices.items.Count; ++i) { var dev = m_devices.items[i]; if (!dev.active) { continue; } // calculate multipler caused by device effects float deviceEffectMultiplier = 1; for (int j = 0; j < dev.deviceEffects.Count; ++j) { if (!dev.deviceEffects[j].source.active) { continue; } deviceEffectMultiplier *= dev.deviceEffects[j].efficiencyMultiplier; } var accumulators = dev.slot != null ? dev.slot.accumulators : null; var targetAccumulators = dev.targetForEffects != null ? dev.targetForEffects.accumulators : null; var effects = dev.type.energyEffects; var size = (dev.grid != null ? dev.grid.area : 1) * dev.type.size; for (int j = 0; j < effects.Count; ++j) { var of = new OutputFormula { output = effects[j].outputTo, formula = effects[j].energyFormula }; List <EnergyEffect> list; if (!m_energyEffectsPerOutput.TryGetValue(of, out list)) { m_energyEffectsPerOutput[of] = list = new List <EnergyEffect>(); } EnergyAccumulator acc = null; if (effects[j].tryUseTargetAccumulators && targetAccumulators != null) { acc = targetAccumulators.Find((a) => a.output == of.output); } if (acc == null && accumulators != null) { acc = accumulators.Find((a) => a.output == of.output); } var ef = new EnergyEffect { size = size, efficiency = effects[j].efficiency * effects[j].magnitude, deviceEffectMultiplier = deviceEffectMultiplier, accumulator = acc }; dev.lastEnergyEffect = ef; if (dev.slot != null) { dev.slot.lastEnergyEffect = ef; } list.Add(ef); } } }