public override void OnePointCrossover(MOOSolution rhs) { if (mData.Count == 1) { return; } int cut_point = 1; if (mData.Count > 2) { cut_point = DistributionModel.NextInt(mData.Count); } ContinuousVector rhs_vector = (ContinuousVector)rhs; double temp = 0; for (int dimension = 0; dimension != cut_point; ++dimension) { temp = this[dimension]; this[dimension] = rhs_vector[dimension]; rhs_vector[dimension] = temp; } }
public IActionResult Distribution() { int DISnum = _dbContext.FinalDistribution.Count(); if (DISnum != 0) { return(RedirectToAction("FinalDistribution")); } DistributionModel distribution = new DistributionModel(); distribution.StudentNum = _dbContext.InfoTable.Where(i => i.RoleId == 2).Count(); distribution.StudentRegistered = _dbContext.StuSelection.Count(); var Department = _dbContext.Department.Select(s => new { s.Dname }).ToList(); int num = distribution.StudentNum / Department.Count(); for (int i = 0; i < Department.Count(); i++) { if (i != Department.Count() - 1) { distribution.Department.Add(new DepartmentSuggest { Name = Department[i].Dname, number = num }); } else { int fullnum = distribution.StudentNum - (num * (Department.Count() - 1)); distribution.Department.Add(new DepartmentSuggest { Name = Department[i].Dname, number = fullnum }); } } return(View(distribution)); }
/// <summary> /// 获取一条配送数据 /// </summary> /// <returns></returns> public DistributionModel GetOneDistribution(int pid) { if (con.State == System.Data.ConnectionState.Closed)//判断打开 { con.Open(); } SqlCommand cmd = new SqlCommand(); cmd.Connection = con; cmd.CommandText = $"select * from Distribution where DistributionId=" + pid;//语句 var read = cmd.ExecuteReader(); DistributionModel u = new DistributionModel(); while (read.Read())//循环 { u.ShippingOrder = read["ShippingOrder"].ToString(); u.StaffId = Convert.ToInt32(read["StaffId"]); u.WareHouseId = Convert.ToInt32(read["WareHouseId"]); u.PickTime = Convert.ToDateTime(read["PickTime"]); u.SendTime = Convert.ToDateTime(read["SendTime"]); u.SendType = read["SendType"].ToString(); } read.Read(); con.Close(); return(u); }
public TGPOperator FindRandomOperator(int parameter_count, TGPOperator current_operator = null) { TGPOperator selected_op = null; for (int attempts = 0; attempts < 10; attempts++) { double r = mWeightSum * DistributionModel.GetUniform(); double current_sum = 0; foreach (KeyValuePair <TGPOperator, double> point in mOperators) { if (selected_op == null && point.Key.Arity == parameter_count && point.Key != current_operator) { selected_op = point.Key; } current_sum += point.Value; if (current_sum >= r) { if (point.Key != current_operator && point.Key.Arity == parameter_count) { return(point.Key); } else { break; } } } } return(selected_op); }
public IHttpActionResult AgeDistr() { var fetchDate = this.GetEndDate(DataType.AGE); var manager = this.GetCAmanager(); var data = manager.GetAgeData(fetchDate); var model = new DistributionListModel(); if (data != null) { foreach (var item in data) { var media = new DistributionModel { Name = item.Name }; var sum = item.Details.Where(i => i.Name != "NULL").Sum(i => i.Value); foreach (var detail in item.Details) { if (detail.Name != "NULL") { var result = detail.Value * 100 / (double)sum; media.Values[detail.Name] = (int)(result + 1); } } model.Distributions.Add(media); } } return(this.Ok(model)); }
public override void Initialize(LGPPop pop) { // CSChen says: // specified here is a variable length initialization that selects initial program // lengths from a uniform distribution within a specified range of m_iInitialMinProgLength - m_iIinitialMaxProgLength // the method is recorded in chapter 7 section 7.6 page 164 of Linear Genetic Programming 2004 int iPopulationSize = pop.PopulationSize; // CSChen says: // the program generated in this way will have program length as small as // iMinProgLength and as large as iMaxProgLength // the program length is distributed uniformly between iMinProgLength and iMaxProgLength for (int i = 0; i < iPopulationSize; i++) { int iProgLength = m_iInitialMinProgLength + DistributionModel.NextInt(m_iInitialMaxProgLength - m_iInitialMinProgLength + 1); //Console.WriteLine("Prog Length: {0}", iProgLength); LGPProgram lgp = pop.CreateProgram(iProgLength, pop.Environment); pop.AddProgram(lgp); //Console.WriteLine("Min Length: {0}", m_iInitialMinProgLength); //Console.WriteLine("LGP: {0}", lgp.InstructionCount); if (lgp.InstructionCount < m_iInitialMinProgLength) { throw new ArgumentNullException(); } if (lgp.InstructionCount > m_iInitialMaxProgLength) { throw new ArgumentNullException(); } } }
public IHttpActionResult RegionDistr() { var fetchDate = this.GetEndDate(DataType.LOCATION); var manager = this.GetCAmanager(); var events = manager.GetLocationDistr(fetchDate); var model = new DistributionListModel(); if (events != null) { foreach (var item in events) { var media = new DistributionModel { Name = item.Name }; foreach (var detail in item.Details) { media.Values[detail.Name] = detail.VisitCount; } model.Distributions.Add(media); } } return(this.Ok(model)); }
public IHttpActionResult SentiDistr() { var fetchDate = this.GetEndDate(DataType.SENTIMENTS); var manager = this.GetCAmanager(); var events = manager.GetSentimentsData(fetchDate); var model = new DistributionListModel(); if (events != null) { foreach (var item in events) { var media = new DistributionModel { Name = item.Name }; foreach (var detail in item.Details) { media.Values[detail.Date] = detail.Value; } model.Distributions.Add(media); } } return(this.Ok(model)); }
public HiddenMarkovModel(int state_count, DistributionModel emissions) { mStateCount = state_count; mLogTransitionMatrix = new double[mStateCount, mStateCount]; mLogProbabilityVector = new double[mStateCount]; mLogProbabilityVector[0] = 1.0; for (int i = 0; i < mStateCount; ++i) { mLogProbabilityVector[i] = System.Math.Log(mLogProbabilityVector[i]); for (int j = 0; j < mStateCount; ++j) { mLogTransitionMatrix[i, j] = System.Math.Log(1.0 / mStateCount); } } mEmissionModels = new DistributionModel[mStateCount]; for (int i = 0; i < mStateCount; ++i) { mEmissionModels[i] = emissions.Clone(); } if (emissions is MultivariateDistributionModel) { mMultivariate = true; mDimension = ((MultivariateDistributionModel)mEmissionModels[0]).Dimension; } }
public TGPOperator FindRandomOperator(TGPOperator current_operator = null) { for (int attempts = 0; attempts < 10; attempts++) { double r = mWeightSum * DistributionModel.GetUniform(); double current_sum = 0; foreach (KeyValuePair <TGPOperator, double> point in mOperators) { current_sum += point.Value; if (current_sum >= r) { if (point.Key != current_operator) { return(point.Key); } else { break; } } } } return(current_operator); }
/// <summary> /// Population Initialization method following the "PTC1" method described in "Sean Luke. Two fast tree-creation algorithms for genetic programming. IEEE Transactions in Evolutionary Computation, 4(3), 2000b." /// </summary> /// <param name="parent_node">The node for which the child nodes are generated in this method</param> /// <param name="p">expected probability</param> /// <param name="allowableDepth">The maximum tree depth</param> private void PTC1(TGPNode parent_node, double p, int allowableDepth) { int child_count = parent_node.Arity; for (int i = 0; i != child_count; i++) { TGPPrimitive data = null; if (allowableDepth == 0) { data = FindRandomTerminal(); } else if (DistributionModel.GetUniform() <= p) { data = mOperatorSet.FindRandomOperator(); } else { data = FindRandomTerminal(); } TGPNode child = parent_node.CreateChild(data); if (!data.IsTerminal) { PTC1(child, p, allowableDepth - 1); } } }
/// <summary> /// Method that creates a GP tree with a maximum tree depth /// </summary> /// <param name="allowableDepth">The maximum tree depth</param> /// <param name="method">The name of the method used to create the GP tree</param> /// <param name="tag">The additional information used to create the GP tree if any</param> public void CreateWithDepth(int allowableDepth, string method, object tag = null) { // Population Initialization method following the "RandomBranch" method described in "Kumar Chellapilla. Evolving computer programs without subtree crossover. IEEE Transactions on Evolutionary Computation, 1(3):209–216, September 1997." if (method == INITIALIZATION_METHOD_RANDOMBRANCH) { int s = allowableDepth; //tree size TGPOperator non_terminal = FindRandomOperatorWithArityLessThan(s); if (non_terminal == null) { mRootNode = new TGPNode(FindRandomTerminal()); } else { mRootNode = new TGPNode(non_terminal); int b_n = non_terminal.Arity; s = (int)System.Math.Floor((double)s / b_n); RandomBranch(mRootNode, s); } CalcLength(); CalcDepth(); } // Population Initialization method following the "PTC1" method described in "Sean Luke. Two fast tree-creation algorithms for genetic programming. IEEE Transactions in Evolutionary Computation, 4(3), 2000b." else if (method == INITIALIZATION_METHOD_PTC1) { int expectedTreeSize = Convert.ToInt32(tag); int b_n_sum = 0; for (int i = 0; i < mOperatorSet.OperatorCount; ++i) { b_n_sum += mOperatorSet.FindOperatorByIndex(i).Arity; } double p = (1 - 1.0 / expectedTreeSize) / ((double)b_n_sum / mOperatorSet.OperatorCount); TGPPrimitive data = null; if (DistributionModel.GetUniform() <= p) { data = mOperatorSet.FindRandomOperator(); } else { data = FindRandomTerminal(); } mRootNode = new TGPNode(data); PTC1(mRootNode, p, allowableDepth - 1); CalcLength(); CalcDepth(); } else // handle full and grow method { mRootNode = new TGPNode(FindRandomPrimitive(allowableDepth, method)); CreateWithDepth(mRootNode, allowableDepth - 1, method); CalcLength(); CalcDepth(); } }
public async Task Update_IncorrectDistributionModel_BadRequest(DistributionModel distribution) { var result = await client.PutAsync(DistributionsControllerPath, ConvertToStringContent(distribution)); Assert.Equal(HttpStatusCode.BadRequest, result.StatusCode); }
public DistributionModelTests() { DebitCardAccount = new AccountModel { Id = 1, AccountType = AccountType.DebitCard, AvailBalance = 15000 }; CashAccount = new AccountModel { Id = 2, AccountType = AccountType.Cash, AvailBalance = 1200 }; FoodFlow = new ExpenseFlowModel { Id = 1, Balance = 100, }; TechFlow = new ExpenseFlowModel { Id = 2, Balance = 0 }; Debit = new DistributionAccount { Account = DebitCardAccount, UseInDistribution = true }; Cash = new DistributionAccount { Account = CashAccount, UseInDistribution = true }; Food = new DistributionItem { Flow = FoodFlow, Mode = DistributionMode.RegularExpenses, Amount = 10000 }; Tech = new DistributionItem { Flow = TechFlow, Mode = DistributionMode.Accumulation, Amount = 1000 }; Model = new DistributionModel { Accounts = new List <DistributionAccount> { Debit, Cash }, Items = new List <DistributionItem> { Food, Tech } }; }
public async Task <DistributionModel> Load() { var ownerId = _currentSession.UserId; var model = new DistributionModel { Accounts = await _repository.GetQuery <Account>() .Where(x => !x.IsDeleted && x.OwnerId == ownerId && x.AccountType != AccountType.CreditCard && x.AvailBalance > 0) .Select(x => new DistributionAccount { Account = x.ToModel(), UseInDistribution = true, StartBalance = x.AvailBalance, Distributed = 0, }) .ToListAsync(), Items = await _repository.GetQuery <ExpenseFlow>() .Where(x => !x.IsDeleted && x.OwnerId == ownerId) .Select(x => new DistributionItem { Flow = x.ToModel(), Mode = DistributionMode.RegularExpenses, StartBalance = x.Balance, Amount = 0, }) .ToListAsync(), FundDistributed = 0, }; var accountIds = model.Accounts.Select(x => x.Account.Id).ToList(); var flowIds = model.Items.Select(x => x.Flow.Id).ToList(); var accountSettings = (await _repository.GetQuery <AccountFlowSettings>() .Where(x => accountIds.Contains(x.AccountId)) .ToListAsync()).ToDictionary(x => x.AccountId); var flowSettings = (await _repository.GetQuery <ExpenseFlowSettings>() .Where(x => flowIds.Contains(x.ExpenseFlowId)) .ToListAsync()).ToDictionary(x => x.ExpenseFlowId); foreach (var account in model.Accounts) { var settings = accountSettings.GetOrDefault(account.Account.Id); account.UseInDistribution = settings?.CanFlow ?? true; } foreach (var item in model.Items) { var settings = flowSettings.GetOrDefault(item.Flow.Id); item.Mode = (settings?.IsRegularExpenses ?? true) ? DistributionMode.RegularExpenses : DistributionMode.Accumulation; item.Amount = item.Mode == DistributionMode.RegularExpenses ? settings?.Amount ?? 0 : item.Flow.Balance; } return(model); }
/// <summary> /// Return the confidence interval of the difference between two classes in terms of the proportion of SUCCESS in the population at a given confidence level /// /// Note that each class should be a categorical variable with two levels : {SUCCESS, FAILURE} /// Note that class 1 and class 2 are not paired or dependent /// </summary> /// <param name="p_hat1">point estimate of the proportion of SUCCESS in class 1</param> /// <param name="p_hat2">point estimate of the proportion of SUCCESS in class 2</param> /// <param name="n1">sample size in class 1</param> /// <param name="n2">sample size in class 2</param> /// <param name="confidence_level">The given confidence level</param> /// <param name="useSimulation">Flag for whether simulation should be used instead of the normal distribution for proportion of SUCCESS</param> /// <returns>The confidence interval of the difference between two classes in terms of the proportion of SUCCESS</returns> public static double[] GetConfidenceInterval(double p_hat1, double p_hat2, int n1, int n2, double confidence_level, bool useSimulation = false, int simulationCount = 500) { bool shouldUseSimulation = useSimulation; double p1 = (1 - confidence_level) / 2; double p2 = 1 - p1; if (!shouldUseSimulation && (p_hat1 * n1 < 10 || (1 - p_hat1) * n1 < 10 || p_hat2 * n2 < 10 || (1 - p_hat2) * n2 < 10)) { shouldUseSimulation = true; } if (shouldUseSimulation) { double[] sim_sample1 = new double[simulationCount]; // this will follow a normal distribution based on CTL for proportion double[] sim_sample2 = new double[simulationCount]; // this will follow a normal distribution based on CLT for proportion int simulationSampleSize = System.Math.Max((int)System.Math.Max(10 / p_hat1, 10 / (1 - p_hat1)) * 2, (int)System.Math.Max(10 / p_hat2, 10 / (1 - p_hat2)) * 2); for (int i = 0; i < simulationCount; ++i) { int successCount1 = 0; int successCount2 = 0; for (int j = 0; j < simulationSampleSize; ++j) { if (DistributionModel.GetUniform() <= p_hat1) { successCount1++; } if (DistributionModel.GetUniform() <= p_hat2) { successCount2++; } } sim_sample1[i] = (double)(successCount1) / simulationSampleSize; sim_sample2[i] = (double)(successCount2) / simulationSampleSize; } double sim_mu1 = Mean.GetMean(sim_sample1); double sim_sigma1 = StdDev.GetStdDev(sim_sample1, sim_mu1); double sim_mu2 = Mean.GetMean(sim_sample2); double sim_sigma2 = StdDev.GetStdDev(sim_sample2, sim_mu2); double sim_mud = sim_mu1 - sim_mu2; double sim_SE = System.Math.Sqrt(sim_sigma1 * sim_sigma1 + sim_sigma2 * sim_sigma2); return(new double[] { sim_mud + Gaussian.GetPercentile(p1) * sim_SE, sim_mud + Gaussian.GetQuantile(p2) * sim_SE }); } else { double SE = System.Math.Sqrt((p_hat1 * (1 - p_hat1) / n1 + (p_hat2 * (1 - p_hat2)) / n2)); double pd_hat = p_hat1 - p_hat2; return(new double[] { pd_hat + Gaussian.GetQuantile(p1) * SE, pd_hat + Gaussian.GetQuantile(p2) * SE }); } }
public void TestValidate_InvalidCommonData_ShouldHaveError(DistributionModel distributionModel, string propertyName, string ruleSet) { lighthouseServiceMock.Setup(x => x.IsValidUsers(It.IsAny <string[]>())).ReturnsAsync(false); var result = validator.TestValidate(distributionModel, ruleSet); result.ShouldHaveValidationErrorFor(propertyName); }
public SuccessDis() { InitializeComponent(); model = new DistributionModel(); model.distdone(); mydisvm = new DisViewModel(this); listView.ItemsSource = mydisvm.GetAllDistribution().Where(m => m.isDone == true); }
private DistributionReadModel GetCreatedDistribution(DistributionModel distribution) { return(FakeDataBase .Distributions .FirstOrDefault(x => x.Name == distribution?.Name && x.Owners.Select(x => x.Sid).OrderBy(x => x).Equals(distribution.OwnersSids.OrderBy(x => x)))); }
public IActionResult Index() { var model = new DistributionModel { }; return(View(model)); }
/// <summary> /// Return the distribution of a*x + b*y for correlated random variables x and y /// </summary> /// <param name="x">random variable x</param> /// <param name="y">random variable y</param> /// <param name="x_coefficient">a which is the coefficient of x</param> /// <param name="y_coefficient">b which is the coefficient of y</param> /// <param name="correlation">correlation between x and y</param> /// <returns></returns> public static DistributionModel Sum(DistributionModel x, DistributionModel y, int x_coefficient, double y_coefficient, double correlation) { DistributionModel sum = x.Clone(); sum.Mean = x_coefficient * x.Mean + y_coefficient * y.Mean; sum.StdDev = System.Math.Sqrt(System.Math.Pow(x_coefficient * x.StdDev, 2) + System.Math.Pow(y_coefficient * y.StdDev, 2) + 2 * correlation * x_coefficient * x.StdDev * y_coefficient * y.StdDev); return(sum); }
/// <summary> /// 修改 /// </summary> /// <param name="model"></param> /// <returns></returns> public async Task <bool> UpdateAsync(DistributionModel model) { using (var conn = MySqlHelper.GetConnection()) { var result = await conn.UpdateAsync(model); return(result > 0); } }
public static double NextDouble() { //if (mRandom == null) //{ // mRandom = new Random(); //} //return mRandom.NextDouble(); return(DistributionModel.GetUniform()); }
/// <summary> /// Method that traverse the subtree and randomly returns a node from one of the leave or function node /// </summary> /// <param name="pRoot">The root node of a subtree</param> /// <param name="node_depth">The depth at which the selected node is returned</param> /// <returns>The randomly selected node by traversing</returns> public TGPNode FindRandomNodeByTraversing(TGPNode pRoot, ref int node_depth) { int child_count = pRoot.Arity; int current_node_depth = node_depth; if (child_count == 0) { return(pRoot); } TGPNode pSelectedGene = null; int selected_child_node_depth = node_depth; for (int iChild = 0; iChild != child_count; iChild++) { TGPNode pChild = pRoot.FindChildByIndex(iChild); int child_node_depth = node_depth + 1; TGPNode pChildPickedGene = FindRandomNodeByTraversing(pChild, ref child_node_depth); if (pChildPickedGene != null) { if (pSelectedGene == null) { selected_child_node_depth = child_node_depth; pSelectedGene = pChildPickedGene; } else { double selection_prob = pChildPickedGene.IsTerminal ? 0.1 : 0.9; if (DistributionModel.GetUniform() < selection_prob) { selected_child_node_depth = child_node_depth; pSelectedGene = pChildPickedGene; } } } } if (pSelectedGene == null) { node_depth = current_node_depth; pSelectedGene = pRoot; } else { node_depth = selected_child_node_depth; if (DistributionModel.GetUniform() < 0.5) { node_depth = current_node_depth; pSelectedGene = pRoot; } } return(pSelectedGene); }
/// <summary> /// 添加 /// </summary> /// <param name="model"></param> /// <returns></returns> public async Task <bool> AddAsync(DistributionModel model) { using (var conn = MySqlHelper.GetConnection()) { var result = await conn.InsertAsync <string, DistributionModel>(model); return(!string.IsNullOrWhiteSpace(result)); } }
internal void CreateRandomly(int iMaximumDepthForCreation) { int upper_bound = CodonGeneUpperBound; for (int i = 0; i < iMaximumDepthForCreation; ++i) { mChromosome.Add(DistributionModel.NextInt(upper_bound)); } }
public override void Build() { m_interarrival_time_distribution = new Exponential(); m_interarrival_time_distribution.Mean = mean_interarrival_time; //Console.WriteLine("k: {0} lambda: {1}", service_time_erlang_k, service_time_erlang_k / mean_service_time); m_service_time_distribution = new Erlang(service_time_erlang_k, service_time_erlang_k / mean_service_time); for (int service_index = 0; service_index < simulated_service_count; ++service_index) { m_interarrival_times.Add(m_interarrival_time_distribution.Next()); m_service_times.Add(m_service_time_distribution.Next()); //Console.WriteLine("interarrival time: {0} service_time: {1}", m_interarrival_times[service_index], m_service_times[service_index]); m_waiting_times.Add(0); } for (int server_index = 0; server_index < s; ++server_index) { m_server_free_times.Add(0); } double current_time = 0; for (int service_index = 0; service_index < simulated_service_count; ++service_index) { current_time += m_interarrival_times[service_index]; bool is_waiting_required = true; for (int server_index = 0; server_index < s; ++server_index) { if (m_server_free_times[server_index] <= current_time) { m_server_free_times[server_index] = current_time + m_service_times[service_index]; m_waiting_times[service_index] = 0; is_waiting_required = false; break; } } if (is_waiting_required) { double min_free_time = double.MaxValue; int earliest_free_server = 0; for (int server_index = 0; server_index < s; ++server_index) { if (min_free_time > m_server_free_times[server_index]) { min_free_time = m_server_free_times[server_index]; earliest_free_server = server_index; } } m_waiting_times[service_index] = min_free_time - current_time; current_time = min_free_time; } } }
private void ValidateDistributionsEquality(DistributionModel distributionModel) { var createdDistribution = GetCreatedDistribution(distributionModel); Assert.NotNull(createdDistribution); Assert.Equal(distributionModel.OwnersSids, createdDistribution.Owners.Select(x => x.Sid)); Assert.Equal(distributionModel.BuildBindings.Length, createdDistribution.BuildBindings.Count); Assert.Equal(distributionModel.ProjectBindings.Length, createdDistribution.ProjectBindings.Count); Assert.NotEqual(distributionModel.Id, createdDistribution.Id); }
public async Task <IActionResult> Update( [Required][CustomizeValidator(RuleSet = "Update")] DistributionModel model) { UpdateDistribution command = await distributionCommandMapper.MapToUpdateCommand(model); await mediator.Send(command); return(Ok()); }
public async Task Create_IncorrectDistributionModel_BadRequest(DistributionModel distribution) { var result = await client.PostAsync(DistributionsControllerPath, ConvertToStringContent(distribution)); Assert.Equal(HttpStatusCode.BadRequest, result.StatusCode); var createdDistribution = GetCreatedDistribution(distribution); Assert.Null(createdDistribution); }
public static IFeature[] CreateDefaults(IEnumerable<string> labels, DistributionModel model = DistributionModel.Unknown) { Contract.Assert(labels != null && labels.Any()); int i = 0; return labels.Select(l => new Feature() { Key = ToKey(l), Index = i++, DataType = TypeCode.Object, Label = l, Model = model }).ToArray(); }