private void EnsureInputLoaded() { if (input == null) { input = YuvEncoder.Decode(Size.Width, Size.Height, FileName.Path, LogFileName.Path, MotionVectorFileName.Path); } }
public void TestVideoFunctionality() { int width = 352; int height = 240; string videoName = @"..\..\..\..\resources\americanFootball_352x240_125.yuv"; // be sure to adjust this beforehand string logFileName = @"..\..\..\..\resources\ModeGrid_AmFootball_OUR.dat"; string motionVectorFileName = @"..\..\..\..\resources\motion_info\motion_info_football_qp20.csv"; string saveName = @"..\..\..\..\output\"; // leave the file extension away YuvEncoder.Video video = YuvEncoder.Decode(width, height, videoName, logFileName, motionVectorFileName); // Don't expect this to work if the filename doesn't happen to be formatted // just the right way Assert.Equal(video.FrameCount, int.Parse(videoName.Substring(videoName.Length - 7, 3))); // Test video and frame indexer Assert.Equal(new Rgb(0, 0, 0), video[0].GetPixelOrBlack(-1, -1)); // obviously, this is only true if the video is either very large and black or not that large Assert.Equal(new Rgb(0, 0, 0), video[0].GetPixelOrBlack(9999999, 9999999)); Assert.Equal(video[0][0, 0], video[0].GetPixelOrBlack(0, 0)); Bitmap bmp; // I test the first and last three frames for (int i = 0; i < 3; i++) { bmp = FrameToBitmap(video[i]); bmp.Save(saveName + i.ToString() + ".png"); } for (int i = video.FrameCount - 4; i < video.FrameCount; i++) { bmp = FrameToBitmap(video[i]); bmp.Save(saveName + i.ToString() + ".png"); } }
public void ArtifactOverlay() { Frame testFrame = new Frame(new YuvKA.VideoModel.Size(80, 80)); for (int x = 0; x < testFrame.Size.Width; x++) { for (int y = 0; y < testFrame.Size.Height; y++) { testFrame[x, y] = new Rgb(111, 111, 111); } } Frame alteredTestFrame = new Frame(testFrame.Size); for (int x = 0; x < testFrame.Size.Width; x++) { for (int y = 0; y < testFrame.Size.Height; y++) { alteredTestFrame[x, y] = new Rgb((byte)(x + y), (byte)(x + y), (byte)(x + y)); } } Frame[] input = { alteredTestFrame, testFrame }; OverlayNode node = new OverlayNode { Type = new ArtifactsOverlay() }; node.ProcessCore(input, 0); List <Frame> output = new List <Frame>(); output.Add(node.Data); YuvEncoder.Encode(@"..\..\..\..\output\ArtifactOverlayTest_80x80.yuv", output); }
public void TestVecorOverlay() { Frame testFrame = new Frame(new YuvKA.VideoModel.Size(64, 48)); for (int x = 0; x < testFrame.Size.Width; x++) { for (int y = 0; y < testFrame.Size.Height; y++) { testFrame[x, y] = new Rgb(111, 111, 111); } } MacroblockDecision[] decisions = new MacroblockDecision[12]; decisions[0] = new MacroblockDecision { Movement = new Vector(0.0, 12.0) }; decisions[1] = new MacroblockDecision { Movement = new Vector(12.0, 12.0) }; decisions[2] = new MacroblockDecision { Movement = new Vector(12.0, 0.0) }; decisions[3] = new MacroblockDecision { Movement = new Vector(12.0, -12.0) }; decisions[4] = new MacroblockDecision { Movement = new Vector(3.0, -12.0) }; decisions[5] = new MacroblockDecision { Movement = new Vector(-38.0, -15.0) }; decisions[6] = new MacroblockDecision { Movement = new Vector(-120.0, 0.0) }; decisions[7] = new MacroblockDecision { Movement = new Vector(-20.0, 20.0) }; decisions[8] = new MacroblockDecision { Movement = new Vector(4.0, 0.0) }; decisions[9] = new MacroblockDecision { Movement = new Vector(0.0, 4.0) }; decisions[10] = new MacroblockDecision { Movement = new Vector(4.0, 4.0) }; decisions[11] = new MacroblockDecision { Movement = new Vector(-4.0, 0.0) }; Frame[] input = { new AnnotatedFrame(testFrame, decisions) }; OverlayNode node = new OverlayNode { Type = new MoveVectorsOverlay() }; node.ProcessCore(input, 0); List <Frame> output = new List <Frame>(); output.Add(node.Data); YuvEncoder.Encode(@"..\..\..\..\output\VectorOverlayTest_64x48.yuv", output); }
public void TestYuvEncoder() { int width = 352; int height = 240; string fileName = "..\\..\\..\\..\\resources\\americanFootball_352x240_125.yuv"; // be sure to adjust this beforehand string saveName = "..\\..\\..\\..\\output\\yuvencoder-output_352x240_125.yuv"; // warning, depending on the file, this produces a lot of images YuvEncoder.Video video = new YuvEncoder.Video(new YuvKA.VideoModel.Size(width, height), fileName, null, null); IEnumerable <Frame> frameList = Enumerable.Range(0, video.FrameCount).Select(i => video[i]); YuvEncoder.Encode(saveName, frameList); }
public SaveNodeOutputViewModel(Node.Output output, Stream stream, PipelineState model) { this.model = model; IEnumerable <Frame> frames = model.Driver.RenderTicks(new[] { output.Node }, 0, TickCount, cts.Token) .Do(_ => { CurrentTick++; NotifyOfPropertyChange(() => CurrentTick); }) .Select(dic => dic[output]) .ToEnumerable(); Task.Factory.StartNew(() => { using (stream) YuvEncoder.Encode(stream, frames); TryClose(); }); }
public void ViewlessPipeline() { var input = new VideoInputNode { FileName = new FilePath(@"..\..\..\..\resources\americanFootball_352x240_125.yuv") }; Node graph = new BrightnessContrastSaturationNode { Contrast = 10 }; //new BlurNode { Radius = 3 }; graph.Inputs[0].Source = input.Outputs[0]; IObservable <Frame> frames = new PipelineDriver().RenderTicks(new[] { graph }, tickCount: input.TickCount) .Select(dic => dic[graph.Outputs[0]]); YuvEncoder.Encode( @"..\..\..\..\output\ViewlessPipeline_sif.yuv", frames.ToEnumerable() ); }
public void TestMacroBlockOverlay() { Frame testFrame = new Frame(new YuvKA.VideoModel.Size(64, 64)); for (int x = 0; x < testFrame.Size.Width; x++) { for (int y = 0; y < testFrame.Size.Height; y++) { testFrame[x, y] = new Rgb(111, 111, 111); } } MacroblockDecision[] decisions = new MacroblockDecision[16]; decisions[0] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.InterSkip }; decisions[1] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter16x16 }; decisions[2] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter16x8 }; decisions[3] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter8x16 }; decisions[4] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter8x8 }; decisions[5] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter4x8 }; decisions[6] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter8x4 }; decisions[7] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter4x4 }; decisions[8] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Intra16x16 }; decisions[9] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Intra8x8 }; decisions[10] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Intra4x4 }; decisions[11] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Unknown }; decisions[12] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter8x8OrBelow }; decisions[13] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.IntraPCM }; decisions[14] = new MacroblockDecision { PartitioningDecision = null }; decisions[15] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Unknown }; Frame[] input = { new AnnotatedFrame(testFrame, decisions) }; OverlayNode node = new OverlayNode { Type = new BlocksOverlay() }; node.ProcessCore(input, 0); List <Frame> output = new List <Frame>(); output.Add(node.Data); YuvEncoder.Encode(@"..\..\..\..\output\BlockOverlayTest_64x64.yuv", output); }