protected override void Initialize(AVSValue args) { LimitedRange = LimitedRange && GetVideoInfo().IsPlanar(); planes = GetVideoInfo().pixel_type.HasFlag(ColorSpaces.CS_INTERLEAVED) ? new[] { default(YUVPlanes) } : (Channels ?? "yuv").ToCharArray().Select(p => Enum.Parse(typeof(YUVPlanes), "PLANAR_" + p, true)) .Cast <YUVPlanes>().ToArray(); realChannels = GetVideoInfo().IsPlanar() ? new[] { 0 } : (Channels ?? "rgb").ToLower().ToCharArray().Select(p => "bgr".IndexOf(p)).ToArray(); if (!OverlayUtils.IsRealPlanar(Child)) { planes = new[] { default(YUVPlanes) } } ; sampleBits = Sample.GetVideoInfo().pixel_type.GetBitDepth(); referenceBits = Reference.GetVideoInfo().pixel_type.GetBitDepth(); var vi = GetVideoInfo(); vi.pixel_type = vi.pixel_type.ChangeBitDepth(referenceBits); SetVideoInfo(ref vi); var cacheSize = tr * 2 + 1; var cacheKey = StaticEnv.GetEnv2() == null ? CacheType.CACHE_25_ALL : CacheType.CACHE_GENERIC; Child.SetCacheHints(cacheKey, cacheSize); Sample.SetCacheHints(cacheKey, cacheSize); Reference.SetCacheHints(cacheKey, cacheSize); SampleMask?.SetCacheHints(cacheKey, cacheSize); ReferenceMask?.SetCacheHints(cacheKey, cacheSize); if (Intensity < 1 && sampleBits != referenceBits) { throw new AvisynthException("Intensity < 1 is not allowed when sample and reference bit depth are not equal"); } }
protected override void Initialize(AVSValue args) { parallelOptions = new ParallelOptions { MaxDegreeOfParallelism = Threads == 0 ? -1 : Threads }; LimitedRange = LimitedRange && GetVideoInfo().IsPlanar(); planes = GetVideoInfo().pixel_type.HasFlag(ColorSpaces.CS_INTERLEAVED) ? new[] { default(YUVPlanes) } : (Channels ?? "yuv").ToCharArray().Select(p => Enum.Parse(typeof(YUVPlanes), "PLANAR_" + p, true)) .Cast <YUVPlanes>().ToArray(); realChannels = GetVideoInfo().IsPlanar() ? new[] { 0 } : (Channels ?? "rgb").ToLower().ToCharArray().Select(p => "bgr".IndexOf(p)).ToArray(); if (!Child.IsRealPlanar()) { planes = new[] { default(YUVPlanes) } } ; sampleBits = Sample.GetVideoInfo().pixel_type.GetBitDepth(); referenceBits = Reference.GetVideoInfo().pixel_type.GetBitDepth(); var vi = GetVideoInfo(); vi.pixel_type = vi.pixel_type.ChangeBitDepth(referenceBits); SetVideoInfo(ref vi); var cacheSize = AdjacentFramesCount * 2 + 1; var cacheKey = StaticEnv.GetEnv2() == null ? CacheType.CACHE_25_ALL : CacheType.CACHE_GENERIC; Child.SetCacheHints(cacheKey, cacheSize); Sample.SetCacheHints(cacheKey, cacheSize); Reference.SetCacheHints(cacheKey, cacheSize); SampleMask?.SetCacheHints(cacheKey, cacheSize); ReferenceMask?.SetCacheHints(cacheKey, cacheSize); if (Intensity < 1 && sampleBits != referenceBits) { throw new AvisynthException("Intensity < 1 is not allowed when sample and reference bit depth are not equal"); } histogramCache = !string.IsNullOrEmpty(CacheId) ? HistogramCache.Get(CacheId) : new HistogramCache(planes, realChannels, SIMD, LimitedRange, Sample.GetVideoInfo().pixel_type, Reference.GetVideoInfo().pixel_type, Child.GetVideoInfo().pixel_type, AdjacentFramesCount, GreyMask, parallelOptions); if (histogramCache == null) { throw new AvisynthException($"$Histogram cache with ID: {CacheId} not found"); } }
protected override VideoFrame GetFrame(int n) { var input = Child.GetFrame(n, StaticEnv); var sampleFrames = Enumerable.Range(n - tr, tr * 2 + 1).Select(p => Sample.GetFrame(p, StaticEnv)).ToList(); var referenceFrames = Enumerable.Range(n - tr, tr * 2 + 1).Select(p => Reference.GetFrame(p, StaticEnv)).ToList(); var writable = GetVideoInfo().pixel_type == Child.GetVideoInfo().pixel_type&& StaticEnv.MakeWritable(input); var output = writable ? input : NewVideoFrame(StaticEnv); using (var sampleMaskFrame = SampleMask?.GetFrame(n, StaticEnv)) using (var refMaskFrame = ReferenceMask?.GetFrame(n, StaticEnv)) { var pixelSize = Sample.GetVideoInfo().IsRGB() ? 3 : 1; Parallel.ForEach(planes, plane => { Parallel.ForEach(realChannels, channel => { int[] sampleHist = null, referenceHist = null; Parallel.Invoke( () => sampleHist = GetHistogram(sampleFrames, sampleMaskFrame, pixelSize, channel, plane, Sample.GetVideoInfo().pixel_type, sampleBits, false), () => referenceHist = GetHistogram(referenceFrames, refMaskFrame, pixelSize, channel, plane, Reference.GetVideoInfo().pixel_type, referenceBits, LimitedRange)); var map = GetTransitionMap(sampleHist, referenceHist, n, plane); var tuple = map.GetColorsAndWeights(); NativeUtils.ApplyColorMap( input.GetReadPtr(plane), input.GetPitch(plane), sampleBits > 8, output.GetWritePtr(plane), output.GetPitch(plane), referenceBits > 8, input.GetRowSize(plane), input.GetHeight(plane), pixelSize, channel, map.fixedMap, tuple.Item1, tuple.Item2); }); }); } if (!writable) { input.Dispose(); } sampleFrames.ForEach(p => p.Dispose()); referenceFrames.ForEach(p => p.Dispose()); return(output); }
protected override VideoFrame GetFrame(int n) { var input = Child.GetFrame(n, StaticEnv); if (Intensity <= double.Epsilon) { return(input); } var writable = GetVideoInfo().pixel_type == Child.GetVideoInfo().pixel_type&& StaticEnv.MakeWritable(input); var output = writable ? input : NewVideoFrame(StaticEnv); var pixelSize = Sample.GetVideoInfo().IsRGB() ? 3 : 1; var firstFrame = Math.Max(0, n - AdjacentFramesCount); var lastFrame = Math.Min(Child.GetVideoInfo().num_frames - 1, n + AdjacentFramesCount); var dimensions = new[] { Enumerable.Range(n, lastFrame - n + 1), Enumerable.Range(firstFrame, n - firstFrame) }.SelectMany(range => range.Select(frame => { using (new VideoFrameCollector()) return(string.IsNullOrEmpty(CacheId) || frame == n ? histogramCache.GetFrame(frame, () => Extrapolation ? (frame == n ? input : Child.GetFrame(frame, StaticEnv)) : null, () => Sample.GetFrame(frame, StaticEnv), () => Reference.GetFrame(frame, StaticEnv), () => SampleMask?.GetFrame(frame, StaticEnv), () => ReferenceMask?.GetFrame(frame, StaticEnv)) : histogramCache[frame]); }).TakeWhile(dims => { var current = histogramCache[n]; return(dims != null && current.All(pair => current == dims || !dims[pair.Key].Empty && CompareHist(dims[pair.Key].DiffHist, pair.Value.DiffHist) < AdjacentFramesDiff)); }).SelectMany(p => p)).ToList(); Parallel.ForEach(planes, parallelOptions, plane => { Parallel.ForEach(realChannels, parallelOptions, channel => { var currentDimensions = dimensions .Where(p => p.Key.Equal(plane, channel)) .Select(p => p.Value).ToArray(); var sampleHist = AverageHist(sampleBits, currentDimensions.Select(p => p.SampleHist).ToArray()); var referenceHist = AverageHist(referenceBits, currentDimensions.Select(p => p.ReferenceHist).ToArray()); if (sampleHist == null || referenceHist == null) { return; } var map = GetTransitionMap(sampleHist, referenceHist, n, plane); if (Extrapolation) { var srcHist = AverageHist(referenceBits, currentDimensions.Select(p => p.InputHist).ToArray()); Extrapolate(map, srcHist, GetLowColor(referenceBits), GetHighColor(referenceBits, plane)); } Interpolate(map, GetLowColor(referenceBits), GetHighColor(referenceBits, plane), sampleBits, referenceBits); if (Intensity < 1) { var decreased = new ColorMap(sampleBits, n, Dither); for (var color = 0; color < 1 << sampleBits; color++) { decreased.AddReal(color, map.Average(color) * Intensity + color * (1 - Intensity)); } map = decreased; } var tuple = map.GetColorsAndWeights(); NativeUtils.ApplyColorMap(DynamicNoise ? Seed ^ n : 0, input.GetReadPtr(plane), input.GetPitch(plane), sampleBits > 8, output.GetWritePtr(plane), output.GetPitch(plane), referenceBits > 8, input.GetRowSize(plane), input.GetHeight(plane), pixelSize, channel, map.FixedMap, tuple.Item1, tuple.Item2); }); }); if (!writable) { input.Dispose(); } return(output); }
protected override VideoFrame GetFrame(int n) { var input = Child.GetFrame(n, StaticEnv); if (Intensity <= double.Epsilon) { return(input); } var sampleFrames = Enumerable.Range(n - tr, tr * 2 + 1).Select(p => Sample.GetFrame(p, StaticEnv)).ToList(); var referenceFrames = Enumerable.Range(n - tr, tr * 2 + 1).Select(p => Reference.GetFrame(p, StaticEnv)).ToList(); var inputFrames = tr == 0 ? new List <VideoFrame> { input } : Enumerable.Range(n - tr, tr * 2 + 1).Select(p => Child.GetFrame(p, StaticEnv)).ToList(); var writable = GetVideoInfo().pixel_type == Child.GetVideoInfo().pixel_type&& StaticEnv.MakeWritable(input); var output = writable ? input : NewVideoFrame(StaticEnv); using (var sampleMaskFrame = SampleMask?.GetFrame(n, StaticEnv)) using (var refMaskFrame = ReferenceMask?.GetFrame(n, StaticEnv)) { var pixelSize = Sample.GetVideoInfo().IsRGB() ? 3 : 1; Parallel.ForEach(planes, plane => { Parallel.ForEach(realChannels, channel => { int[] sampleHist = null, referenceHist = null, srcHist = null; Parallel.Invoke( () => sampleHist = GetHistogram(sampleFrames, sampleMaskFrame, pixelSize, channel, plane, Sample.GetVideoInfo().pixel_type, sampleBits, false), () => referenceHist = GetHistogram(referenceFrames, refMaskFrame, pixelSize, channel, plane, Reference.GetVideoInfo().pixel_type, referenceBits, LimitedRange), () => srcHist = Extrapolation ? GetHistogram(inputFrames, null, pixelSize, channel, plane, Child.GetVideoInfo().pixel_type, sampleBits, LimitedRange) : null); var map = GetTransitionMap(sampleHist, referenceHist, n, plane); if (Extrapolation) { Extrapolate(map, srcHist, GetLowColor(referenceBits), GetHighColor(referenceBits, plane)); } Interpolate(map, GetLowColor(referenceBits), GetHighColor(referenceBits, plane), sampleBits, referenceBits); if (Intensity < 1) { var decreased = new ColorMap(sampleBits, n, Dither); for (var color = 0; color < 1 << sampleBits; color++) { decreased.Add(color, map.Average(color) * Intensity + color * (1 - Intensity)); } map = decreased; } var tuple = map.GetColorsAndWeights(); NativeUtils.ApplyColorMap(DynamicNoise ? n : 0, input.GetReadPtr(plane), input.GetPitch(plane), sampleBits > 8, output.GetWritePtr(plane), output.GetPitch(plane), referenceBits > 8, input.GetRowSize(plane), input.GetHeight(plane), pixelSize, channel, map.FixedMap, tuple.Item1, tuple.Item2); }); }); } if (!writable) { input.Dispose(); } sampleFrames.ForEach(p => p.Dispose()); referenceFrames.ForEach(p => p.Dispose()); return(output); }