public void CanMakeNontrivialCut() { Vertex vert1 = new Vertex(); Vertex vert2 = new Vertex(); Vertex vert3 = new Vertex(); Vertex vert4 = new Vertex(); Vertex vert5 = new Vertex(); Vertex vert6 = new Vertex(); /* o:o-o6 * |\: * o-o-o * 1 * Where : is a weak edge */ Edge.AddEdge(vert1,vert2,1d,1d); Edge.AddEdge(vert2,vert3,1d,1d); Edge.AddEdge(vert2,vert4,1d,1d); Edge.AddEdge(vert3,vert4,1d,1d); Edge.AddEdge(vert3,vert5,0.1d,0.1d); Edge.AddEdge(vert4,vert5,0.1d,0.1d); Edge.AddEdge(vert5,vert6,1d,1d); while(vert1.AddFlowTo(vert6)); vert1.ResidualCapacityConnectedNodes(); bool all_on_that_should_be = vert1.tagged_as_one && vert2.tagged_as_one && vert3.tagged_as_one && vert4.tagged_as_one; bool all_off_that_should_be = !(vert5.tagged_as_one) && !(vert6.tagged_as_one); Assert.IsTrue(all_off_that_should_be && all_on_that_should_be); }
public Classification MaximumAPosterioriInfer(ImageData test_input) { Vertex[,] site_nodes = new Vertex[test_input.XSites, test_input.YSites]; for(int i = 0; i < test_input.XSites; i++) for(int j = 0; j < test_input.YSites; j++) { site_nodes[i,j] = new Vertex(); } Vertex source = new Vertex(); Vertex target = new Vertex(); for(int j = 0; j < test_input.YSites; j++) { for(int i = 0; i < test_input.XSites; i++) { Vertex t = site_nodes[i,j]; //Add the edge with capacity lambda_t from the source, or the edge with capacity -lambda_t to the target. //Lambda_t is the log-likelihood ratio: log( p(y | x = 1) / p(y | x = 0) ). //Using Bayes' law, we have //Posterior Odds = P(x = 1 | y)/P(x = 0 | y) = Likelihood Ratio * Prior Odds = (P(y | x = 1) / P(y | x = 0))*(P(x=1)/P(x=0)) = e^(lambda_t)*1 //So lambda_t should be log(Posterior Odds) + log(Prior Odds) = log(P(x=1|y))-log(P(x=0|y)) + possibly 0? //Now, P(x=1|y) is modeled as sigma(w^T * h(y)), so this should be //log(sigma(w^T * h(y))) - log(1-sigma(w^T * h(y))). //However, all these calculations were done at roughly 5:50 AM and I hadn't slept yet, so... //I could totally be wrong. //-Jesse Selover double modeled_prob_of_one = MathWrapper.Sigma(W.DotProduct(Transformer.Transform(test_input[i,j]))); /*double prob_one = ((double)Ons_seen)/((double) Sites_seen); double prob_zero = 1d - prob_one; double lambda = MathWrapper.Log(modeled_prob_of_one) - MathWrapper.Log (1 - modeled_prob_of_one) + MathWrapper.Log (prob_one/prob_zero);*/ Edge.AddEdge(source,t,-MathWrapper.Log(modeled_prob_of_one),0); Edge.AddEdge(t,target,-MathWrapper.Log(1-modeled_prob_of_one),0); Console.WriteLine("Edge to target with strength {0}",-MathWrapper.Log(1-modeled_prob_of_one)); //Add an edge from the source with the modeled probability of 1, and an edge to the target with the modeled probability of 0. //Console.WriteLine(ImageData.GetNewConnections(i,j).Count); foreach(Tuple<int,int> other in test_input.GetNewConnections(i,j)) { Vertex u = site_nodes[other.Item1,other.Item2]; //Add the edge with capacity Beta_{t,u} in both directions between t and u. //DRFS (2006) says that the data dependent smoothing term is max(0,v^T * mu_{i,j}y) DenseVector mu; if(ImageData.IsEarlier(i,j,other.Item1,other.Item2))mu = Crosser.Cross(test_input[i,j],test_input[other.Item1,other.Item2]); else mu = Crosser.Cross(test_input[other.Item1,other.Item2], test_input[i,j]); double capacity = Math.Max(0,V.DotProduct(mu)); Console.WriteLine ("\tInternode edge with strength {0}",capacity); Edge.AddEdge(t,u,capacity,capacity); } } } double flow_added = 0; while(true) { flow_added = source.AddFlowTo(new List<Vertex>(), target, 400000000d); if(flow_added <= 0.0000001d) break; }; //Find the maximum flow source.ResidualCapacityConnectedNodes(); //Find the source end of the minimum cut Label[,] toReturn = new Label[test_input.XSites, test_input.YSites]; for(int i = 0; i < test_input.XSites; i++) for(int j = 0; j < test_input.YSites; j++) { if(site_nodes[i,j].tagged_as_one) toReturn[i,j] = Label.ON; } return new Classification(toReturn); }