/// <summary> /// Reduce an instance of Set Cover using safe reduction rules /// </summary> /// <param name="U">The universe</param> /// <param name="F">The familiy of sets</param> /// <returns> /// (U, F, delta), where U and F are "irreducible" and /// delta is the number of sets that must be included /// </returns> private static Tuple <HashSet <int>, Dictionary <int, HashSet <int> >, List <int> > Reduce(HashSet <int> U, Dictionary <int, HashSet <int> > F) { // EdgeSets : Edges -> 2^Faces var EdgeSets = new Dictionary <int, HashSet <int> >(); // deep copy foreach (var edge in F.Keys) { foreach (var face in F[edge]) { if (EdgeSets.ContainsKey(edge)) { EdgeSets[edge].Add(face); } else { EdgeSets.Add(edge, new HashSet <int> { face }); } } } // FaceSets : Faces -> 2^Edges var FaceSets = new Dictionary <int, HashSet <int> >(); foreach (var edge in EdgeSets.Keys) { foreach (var face in EdgeSets[edge]) { if (FaceSets.ContainsKey(face)) { FaceSets[face].Add(edge); } else { FaceSets.Add(face, new HashSet <int> { edge }); } } } // main reduction loop // whether the solution changed at all var reduced = false; // whether the solution changed this iteration var changed = true; var delta = new List <int>(); do { changed = false; // last face was removed if (FaceSets.Count() == 0) { return(Tuple.Create(new HashSet <int>(), new Dictionary <int, HashSet <int> >(), delta)); } // remove duplicates var noDups = RemoveDuplicates(EdgeSets); if (noDups.Item1) { reduced = true; changed = true; EdgeSets = noDups.Item2; FaceSets = new Dictionary <int, HashSet <int> >(); foreach (var key in EdgeSets.Keys) { foreach (var edge in EdgeSets[key]) { if (FaceSets.ContainsKey(edge)) { FaceSets[edge].Add(key); } else { FaceSets.Add(edge, new HashSet <int> { key }); } } } continue; } //remove isolated faces var isolated = from set in FaceSets where (set.Value.Count == 1) select set.Key; if (isolated.Count() > 0) { reduced = true; changed = true; // collect all faces of sets which include the isolated faces var coveredFaces = new HashSet <int>(); var coveredEdges = new HashSet <int>(); var selectedEdges = new HashSet <int>(); foreach (var face in isolated) { // only one set that includes the face var edge = FaceSets[face].ElementAt(0); selectedEdges.Add(edge); foreach (var f in EdgeSets[edge]) { coveredFaces.Add(f); } coveredEdges.Add(edge); } delta.AddRange(selectedEdges); // build new FaceSets var newFaceSets = new Dictionary <int, HashSet <int> >(); foreach (var pair in FaceSets) { if (!coveredFaces.Contains(pair.Key)) { newFaceSets.Add(pair.Key, pair.Value); } } FaceSets = newFaceSets; // build new EdgeSets var newEdgeSets = new Dictionary <int, HashSet <int> >(); foreach (var pair in EdgeSets) { if (!coveredEdges.Contains(pair.Key)) { var faces = pair.Value; faces.ExceptWith(coveredFaces); newEdgeSets.Add(pair.Key, faces); } } // remove empty sets EdgeSets = new Dictionary <int, HashSet <int> >(); foreach (var pair in newEdgeSets) { if (pair.Value.Count > 0) { EdgeSets.Add(pair.Key, pair.Value); } } continue; } // find the first completely included subsets and remove it for (int i = 0; i < EdgeSets.Count && !changed; i++) { var edge = EdgeSets.ElementAt(i).Key; var A = EdgeSets.ElementAt(i).Value; for (int j = 0; j < EdgeSets.Count; j++) { if (i == j) { continue; } bool complete = true; var B = EdgeSets.ElementAt(j).Value; foreach (var x in A) { if (!B.Contains(x)) { complete = false; } } if (complete) { foreach (var face in A) { // face no longer occurs in Vis(edge) FaceSets[face].Remove(edge); } // remove empty sets var newFaceSets = new Dictionary <int, HashSet <int> >(); foreach (var pair in FaceSets) { if (pair.Value.Count > 0) { newFaceSets.Add(pair.Key, pair.Value); } } FaceSets = newFaceSets; EdgeSets.Remove(edge); reduced = true; changed = true; break; } } } if (changed) { continue; } // remove the first face which is present in every set var redundant = false; var redundantFace = 0; foreach (var face in FaceSets.Keys) { if (FaceSets[face].Count == EdgeSets.Keys.Count) { // no isolated faces redundant = true; redundantFace = face; break; } } if (redundant) { reduced = true; changed = true; FaceSets.Remove(redundantFace); foreach (var pair in EdgeSets) { pair.Value.Remove(redundantFace); } } } while (changed); // recover the instance if (reduced) { var new_U = new HashSet <int>(); foreach (var face in FaceSets.Keys) { new_U.Add(face); } return(Tuple.Create(new_U, FaceSets, delta)); } else { return(Tuple.Create(U, F, delta)); } }
private void MaxCardinalitySearch() { int noOfVer = vertexList.Length; int noOfEdg = edgeList.Length; ai = new AcyclicityInfo(noOfVer, noOfEdg); int i = noOfVer + 1; int j = 0; // Paper has a 1 based index. // Therefore, initial value is set to -1 instead of 0. int k = -1; int[] alpha = new int[noOfVer]; int[] betaV = ai.betaV; for (int vId = 0; vId < noOfVer; vId++) { alpha[vId] = -1; } EdgeSets sets = new EdgeSets(noOfEdg, noOfVer); int[] R = ai.R; int[] size = new int[noOfEdg]; int[] betaE = new int[noOfEdg]; int[] gamma = ai.gamma; for (int S = 0; S < noOfEdg; S++) { gamma[S] = -1; } while (j >= 0) { int S = sets.Remove(j); k++; betaE[S] = k; R[k] = S; size[S] = -1; foreach (int vId in edgeList[S]) { if (alpha[vId] >= 0) { continue; } i--; alpha[vId] = i; betaV[vId] = k; foreach (int eId in vertexList[vId]) { if (size[eId] < 0) { continue; } gamma[eId] = k; sets.Remove(eId, size[eId]); size[eId]++; if (size[eId] < edgeList[eId].Length) { sets.Add(eId, size[eId]); } else if (size[eId] == edgeList[eId].Length) { size[eId] = -1; } } // foreach eId } // foreach vId // In paper: j++ j = edgeList[S].Length; while (j >= 0 && sets.Size(j) == 0) { j--; } } }