public string ComputeEncoding(FlowMatrix flowSpec, int maxSelWidth = 6) { var sb = new StringBuilder(); sb.AppendLine("Control word encoding report"); sb.AppendFormat(" Number of c-steps: {0}", flowSpec.NumCSteps); sb.AppendLine(); sb.AppendFormat(" Maximum LUT inputs: {0}", maxSelWidth); sb.AppendLine(); FlowSpec = flowSpec; var flowMap = new Dictionary<SignalRef, List<Flow>>(); var neutralFlow = flowSpec.NeutralFlow; _vcf.AddFlow(neutralFlow); for (int i = 0; i < flowSpec.NumCSteps; i++) { var pflow = flowSpec.GetFlow(i); var nflow = new ParFlow(neutralFlow); nflow.Integrate(pflow); _vcf.AddFlow(nflow); foreach (var flow in nflow.Flows) { List<Flow> flows; if (!flowMap.TryGetValue(flow.Target, out flows)) { flows = new List<Flow>(); flowMap[flow.Target] = flows; } flows.Add(flow); } } _vcf.Encode(); var startTime = DateTime.Now; var encFlows = flowMap.Values .Select((l, i) => new EncodedFlow(l, i)).ToArray(); var uncompressedMuxBits = encFlows.Sum(ef => MathExt.CeilLog2(ef.NumSymbols)); sb.AppendFormat(" Uncompressed CW: {0} MUX bits + {1} value bits", uncompressedMuxBits, _vcf.GetUncompressedValueWordWidth()); sb.AppendLine(); int numTargets = encFlows.Length; var mergeCandidates = new List<Tuple<int, int, MergedFlow>>(); var indices = new SortedSet<int>(Enumerable.Range(0, numTargets)); var curGen = (EncodedFlow[])encFlows.Clone(); bool mergedAny; var nextCandidates = new List<Tuple<int, int, MergedFlow>>(); do { foreach (int i in indices) { if (curGen[i].NumSymbols <= 1) continue; var upview = indices.GetViewBetween(i + 1, numTargets); foreach (int j in upview) { if (curGen[j].NumSymbols <= 1) continue; var mergedFlow = new MergedFlow(curGen[i], curGen[j]); mergeCandidates.Add(Tuple.Create(i, j, mergedFlow)); } } var orderedMergeCandidates = mergeCandidates.OrderByDescending(t => t.Item3.Score); var nextGen = (EncodedFlow[])curGen.Clone(); var mergedIndices = new HashSet<int>(); var mergedLowIndices = new SortedSet<int>(); var mergedHiIndices = new HashSet<int>(); mergedAny = false; foreach (var tup in orderedMergeCandidates) { Debug.Assert(tup.Item2 > tup.Item1); var mergedFlow = tup.Item3; if (mergedFlow.Score == 0.0) break; int selWidth = MathExt.CeilLog2(mergedFlow.NumSymbols); if (selWidth > maxSelWidth) continue; if (mergedIndices.Contains(tup.Item1) || mergedIndices.Contains(tup.Item2)) continue; mergedIndices.Add(tup.Item1); mergedIndices.Add(tup.Item2); mergedLowIndices.Add(tup.Item1); mergedHiIndices.Add(tup.Item2); indices.Remove(tup.Item2); mergedFlow.Realize(); Debug.Assert(nextGen[tup.Item1].Targets.All(t => mergedFlow.Targets.Contains(t))); Debug.Assert(nextGen[tup.Item2].Targets.All(t => mergedFlow.Targets.Contains(t))); nextGen[tup.Item1] = mergedFlow; mergedAny = true; } nextCandidates.Clear(); curGen = nextGen; mergeCandidates.Clear(); mergeCandidates.AddRange(nextCandidates); } while (mergedAny); _strings = indices.Select(i => new MicroString(curGen[i], _vcf)).ToArray(); // Verification var coveredTargets = _strings.SelectMany(s => s.Targets); var allTargets = encFlows.SelectMany(f => f.Targets); var isect0 = coveredTargets.Except(allTargets); var isect1 = allTargets.Except(coveredTargets); Debug.Assert(!isect0.Any()); Debug.Assert(!isect1.Any()); // int offset = _vcf.ValueWordWidth; int order = 0; foreach (var ms in _strings) { ms.SelOffset = offset; ms.Order = order; offset += ms.SelWidth; order++; } CWWidth = offset; var stopTime = DateTime.Now; var runTime = stopTime - startTime; sb.AppendFormat(" Compressed CW: {0} MUX bits + {1} value bits", offset - _vcf.ValueWordWidth, _vcf.ValueWordWidth); sb.AppendLine(); sb.AppendFormat(" Maximum LUT inputs: {0}", _strings.Max(s => s.SelWidth)); sb.AppendFormat(" Running time: {0} ms", runTime.TotalMilliseconds); sb.AppendLine(); sb.AppendLine(); sb.AppendLine("Number of MUX inputs; Number of occurences"); var histo = _strings.GroupBy(s => s.SelWidth) .OrderByDescending(grp => grp.Key); foreach (var grp in histo) { sb.AppendFormat("{0}; {1}", grp.Key, grp.Count()); sb.AppendLine(); } return sb.ToString(); }
public string ComputeEncoding(FlowMatrix flowSpec, int maxSelWidth = 6) { var sb = new StringBuilder(); sb.AppendLine("Control word encoding report"); sb.AppendFormat(" Number of c-steps: {0}", flowSpec.NumCSteps); sb.AppendLine(); sb.AppendFormat(" Maximum LUT inputs: {0}", maxSelWidth); sb.AppendLine(); FlowSpec = flowSpec; var flowMap = new Dictionary <SignalRef, List <Flow> >(); var neutralFlow = flowSpec.NeutralFlow; _vcf.AddFlow(neutralFlow); for (int i = 0; i < flowSpec.NumCSteps; i++) { var pflow = flowSpec.GetFlow(i); var nflow = new ParFlow(neutralFlow); nflow.Integrate(pflow); _vcf.AddFlow(nflow); foreach (var flow in nflow.Flows) { List <Flow> flows; if (!flowMap.TryGetValue(flow.Target, out flows)) { flows = new List <Flow>(); flowMap[flow.Target] = flows; } flows.Add(flow); } } _vcf.Encode(); var startTime = DateTime.Now; var encFlows = flowMap.Values .Select((l, i) => new EncodedFlow(l, i)).ToArray(); var uncompressedMuxBits = encFlows.Sum(ef => MathExt.CeilLog2(ef.NumSymbols)); sb.AppendFormat(" Uncompressed CW: {0} MUX bits + {1} value bits", uncompressedMuxBits, _vcf.GetUncompressedValueWordWidth()); sb.AppendLine(); int numTargets = encFlows.Length; var mergeCandidates = new List <Tuple <int, int, MergedFlow> >(); var indices = new SortedSet <int>(Enumerable.Range(0, numTargets)); var curGen = (EncodedFlow[])encFlows.Clone(); bool mergedAny; var nextCandidates = new List <Tuple <int, int, MergedFlow> >(); do { foreach (int i in indices) { if (curGen[i].NumSymbols <= 1) { continue; } var upview = indices.GetViewBetween(i + 1, numTargets); foreach (int j in upview) { if (curGen[j].NumSymbols <= 1) { continue; } var mergedFlow = new MergedFlow(curGen[i], curGen[j]); mergeCandidates.Add(Tuple.Create(i, j, mergedFlow)); } } var orderedMergeCandidates = mergeCandidates.OrderByDescending(t => t.Item3.Score); var nextGen = (EncodedFlow[])curGen.Clone(); var mergedIndices = new HashSet <int>(); var mergedLowIndices = new SortedSet <int>(); var mergedHiIndices = new HashSet <int>(); mergedAny = false; foreach (var tup in orderedMergeCandidates) { Debug.Assert(tup.Item2 > tup.Item1); var mergedFlow = tup.Item3; if (mergedFlow.Score == 0.0) { break; } int selWidth = MathExt.CeilLog2(mergedFlow.NumSymbols); if (selWidth > maxSelWidth) { continue; } if (mergedIndices.Contains(tup.Item1) || mergedIndices.Contains(tup.Item2)) { continue; } mergedIndices.Add(tup.Item1); mergedIndices.Add(tup.Item2); mergedLowIndices.Add(tup.Item1); mergedHiIndices.Add(tup.Item2); indices.Remove(tup.Item2); mergedFlow.Realize(); Debug.Assert(nextGen[tup.Item1].Targets.All(t => mergedFlow.Targets.Contains(t))); Debug.Assert(nextGen[tup.Item2].Targets.All(t => mergedFlow.Targets.Contains(t))); nextGen[tup.Item1] = mergedFlow; mergedAny = true; } nextCandidates.Clear(); curGen = nextGen; mergeCandidates.Clear(); mergeCandidates.AddRange(nextCandidates); }while (mergedAny); _strings = indices.Select(i => new MicroString(curGen[i], _vcf)).ToArray(); // Verification var coveredTargets = _strings.SelectMany(s => s.Targets); var allTargets = encFlows.SelectMany(f => f.Targets); var isect0 = coveredTargets.Except(allTargets); var isect1 = allTargets.Except(coveredTargets); Debug.Assert(!isect0.Any()); Debug.Assert(!isect1.Any()); // int offset = _vcf.ValueWordWidth; int order = 0; foreach (var ms in _strings) { ms.SelOffset = offset; ms.Order = order; offset += ms.SelWidth; order++; } CWWidth = offset; var stopTime = DateTime.Now; var runTime = stopTime - startTime; sb.AppendFormat(" Compressed CW: {0} MUX bits + {1} value bits", offset - _vcf.ValueWordWidth, _vcf.ValueWordWidth); sb.AppendLine(); sb.AppendFormat(" Maximum LUT inputs: {0}", _strings.Max(s => s.SelWidth)); sb.AppendFormat(" Running time: {0} ms", runTime.TotalMilliseconds); sb.AppendLine(); sb.AppendLine(); sb.AppendLine("Number of MUX inputs; Number of occurences"); var histo = _strings.GroupBy(s => s.SelWidth) .OrderByDescending(grp => grp.Key); foreach (var grp in histo) { sb.AppendFormat("{0}; {1}", grp.Key, grp.Count()); sb.AppendLine(); } return(sb.ToString()); }