Пример #1
0
		public Frame Process(Frame[] input)
		{
			Frame result = new Frame(input[0].Size);
			AnnotatedFrame frameWithLogs = (AnnotatedFrame)input[0];
			/* Create Bitmap to draw the vectors on */
			using (Bitmap drawableFrame = new Bitmap(input[0].Size.Width, input[0].Size.Width)) {
				for (int x = 0; x < input[0].Size.Width; x++) {
					for (int y = 0; y < input[0].Size.Height; y++) {
						Rgb pixel = input[0][x, y];
						drawableFrame.SetPixel(x, y, Color.FromArgb(pixel.R, pixel.G, pixel.B));
					}
				}
				/* Draw the movementvector of each macroblock */
				for (int x = 0; x < (input[0].Size.Width / 16); x++) {
					for (int y = 0; y < (input[0].Size.Height / 16); y++) {
						DrawVector(drawableFrame, x * 16, y * 16, GetScaledVector(14.0, frameWithLogs.Decisions[x, y].Movement));
					}
				}
				/* Print the frame with vectors into the resultframe */
				for (int x = 0; x < input[0].Size.Width; x++) {
					for (int y = 0; y < input[0].Size.Height; y++) {
						Rgb pixel = new Rgb(drawableFrame.GetPixel(x, y).R, drawableFrame.GetPixel(x, y).G, drawableFrame.GetPixel(x, y).B);
						result[x, y] = pixel;
					}
				}
			}
			return result;
		}
Пример #2
0
		public override YuvKA.VideoModel.Frame[] Process(YuvKA.VideoModel.Frame[] inputs, int tick)
		{
			Size maxSize = Frame.MaxBoundaries(inputs);
			Frame outputFrame = new Frame(maxSize);
			byte resultR;
			byte resultG;
			byte resultB;
			for (int x = 0; x < maxSize.Width; x++) {
				for (int y = 0; y < maxSize.Height; y++) {
					resultR = 0;
					resultG = 0;
					resultB = 0;

					resultR += inputs[0].GetPixelOrBlack(x, y).R;
					resultR += inputs[0].GetPixelOrBlack(x, y).G;
					resultR += inputs[0].GetPixelOrBlack(x, y).B;

					resultG += inputs[1].GetPixelOrBlack(x, y).R;
					resultG += inputs[1].GetPixelOrBlack(x, y).G;
					resultG += inputs[1].GetPixelOrBlack(x, y).B;

					resultB += inputs[2].GetPixelOrBlack(x, y).R;
					resultB += inputs[2].GetPixelOrBlack(x, y).G;
					resultB += inputs[2].GetPixelOrBlack(x, y).B;

					outputFrame[x, y] = new Rgb(resultR, resultG, resultB);
				}
			}
			return new[] { outputFrame };
		}
Пример #3
0
		public override YuvKA.VideoModel.Frame[] Process(Frame[] inputs, int tick)
		{
			if (tick == frameNum)
				return inputs;
			else
				return new Frame[] { blackFrame };

		}
Пример #4
0
		/// <summary>
		/// Returns the largest boundaries found in the specified frame array, so all frame sizes are smaller than the returned one.
		/// </summary>
		/// <param name="frames">The specified frame array.</param>
		/// <returns>The size object containing the larges boundaries.</returns>
		public static Size MaxBoundaries(Frame[] frames)
		{
			int maxX = 0;
			int maxY = 0;
			foreach (Frame frame in frames) {
				maxX = Math.Max(maxX, frame.Size.Width);
				maxY = Math.Max(maxY, frame.Size.Height);
			}
			return new Size(maxX, maxY);
		}
Пример #5
0
		/// <summary>
		/// Splits the first entry of the input into its RGB components
		/// </summary>
		/// <param name="inputs">An array of Frames, with only the first entry regarded.</param>
		/// <param name="tick">The index of the Frame which is processes now.</param>
		/// <returns>An array of Frames with the red components of the input in the first,
		/// the green component in the second and the blue component in the third entry. </returns>
		public override Frame[] Process(Frame[] inputs, int tick)
		{
			Size size = inputs[0].Size;
			Frame[] outputs = { new Frame(size), new Frame(size), new Frame(size) };
			for (int x = 0; x < size.Width; x++) {
				for (int y = 0; y < size.Height; y++) {
					outputs[0][x, y] = new Rgb(inputs[0][x, y].R, 0, 0);
					outputs[1][x, y] = new Rgb(0, inputs[0][x, y].G, 0);
					outputs[2][x, y] = new Rgb(0, 0, inputs[0][x, y].B);
				}
			}
			return outputs;
		}
Пример #6
0
		public Frame Process(Frame[] input)
		{
			Frame result = new Frame(input[0].Size);
			for (int x = 0; x < input[0].Size.Width; x++) {
				for (int y = 0; y < input[0].Size.Height; y++) {
					int difference = Math.Abs(input[0][x, y].R - input[1].GetPixelOrBlack(x, y).R);
					difference += Math.Abs(input[0][x, y].G - input[1].GetPixelOrBlack(x, y).G);
					difference += Math.Abs(input[0][x, y].B - input[1].GetPixelOrBlack(x, y).B);
					result[x, y] = (difference >= 40) ? new Rgb(255, 0, 0) : input[0][x, y];
				}
			}
			return result;
		}
Пример #7
0
		/// <summary>
		/// Calculates the distribution of the red color chanel in the given frame
		/// </summary>
		private void CalculateR(Frame input, int tick)
		{
			int value;
			int[] intData = new int[256];
			for (int x = 0; x < input.Size.Width; x++) {
				for (int y = 0; y < input.Size.Height; y++) {
					value = input[x, y].R;
					intData[value]++;
				}
			}
			int numberOfPixels = input.Size.Height * input.Size.Width;
			for (int i = 0; i < 256; i++) {
				Data[i] = (double)intData[i] / numberOfPixels;
			}
		}
Пример #8
0
		/// <summary>
		/// Calculates the data according to the Type of the HistogramNode.
		/// </summary>
		/// <param name="inputs">The list of inputs for the Node. In this case it is an aaray with 
		/// one entry</param>
		/// <param name="tick">The index of the current frame. This parameter is unnused in this method.</param>
		public override void ProcessCore(Frame[] inputs, int tick)
		{
			if (Type == HistogramType.R) {
				CalculateR(inputs[0], tick);
			}
			else if (Type == HistogramType.G) {
				CalculateG(inputs[0], tick);
			}
			else if (Type == HistogramType.B) {
				CalculateB(inputs[0], tick);
			}
			else if (Type == HistogramType.Value) {
				CalculateValue(inputs[0], tick);
			}
		}
Пример #9
0
		/// <summary>
		/// A Process method to be used by AnonymousNodes. 
		/// Generates an array of 1 Frame and 2 AnnotatedFrame with randomly filled Data.
		/// </summary>
		/// <param name="inputs">The inputs used for processing. This parameter is not used here.</param>
		/// <param name="tick"> The current index of the frame. This parameter is not used here.</param>
		/// <returns> An array of generated Frames.</returns>
		public static Frame[] SourceNode(Frame[] inputs, int tick)
		{
			var testSize = new YuvKA.VideoModel.Size(8, 8);
			Frame[] outputs = { GenerateAnnFrame(new MacroblockDecision[,] 
				{ { new MacroblockDecision { Movement = new Vector(0.0, 0.0), PartitioningDecision = MacroblockPartitioning.Inter4x4 },
					new MacroblockDecision { Movement = new Vector(0.0, 0.0), PartitioningDecision = MacroblockPartitioning.Inter4x4 },
					new MacroblockDecision { Movement = new Vector(0.0, 0.0), PartitioningDecision = MacroblockPartitioning.Inter8x4 } } }),
					new Frame(testSize), 
				GenerateAnnFrame(new MacroblockDecision[,] { {
					new MacroblockDecision { Movement = new Vector(0.0, 0.0), PartitioningDecision = MacroblockPartitioning.Intra4x4 },
					new MacroblockDecision { Movement = new Vector(0.0, 0.0), PartitioningDecision = MacroblockPartitioning.Intra4x4 },
					new MacroblockDecision { Movement = new Vector(0.0, 0.0), PartitioningDecision = MacroblockPartitioning.Inter8x4 } } }),
				};
			for (int x = 0; x < testSize.Width; x++) {
				for (int y = 0; y < testSize.Height; y++) {
					outputs[1][x, y] = new Rgb((byte)(x * y), (byte)(x * y), (byte)(x * y));
				}
			}
			return outputs;
		}
Пример #10
0
		/// <summary>
		/// Adds up the weighted frames and finally averages them according to the following formula:
		/// If there are n frames to be merged and w_1,..., w_n are their weights, the resulting frame will be computed by using this formula:
		/// newPixel_xy = (sum(w_i * oldValue_xy_i))/sum(w_i)
		/// (xy represents the position of the pixel in the frame.)
		/// </summary>
		public override Frame[] Process(Frame[] inputs, int tick)
		{
			Size maxSize = Frame.MaxBoundaries(inputs);
			double sumOfWeights = Weights.Sum();
			Frame[] output = { new Frame(new Size(maxSize.Width, maxSize.Height)) };

			for (int x = 0; x < maxSize.Width; x++) {
				for (int y = 0; y < maxSize.Height; y++) {
					// sums up the weighted values
					double newR = 0, newG = 0, newB = 0;
					for (int i = 0; i < inputs.Length; i++) {
						newR += Weights[i] * inputs[i].GetPixelOrBlack(x, y).R;
						newG += Weights[i] * inputs[i].GetPixelOrBlack(x, y).G;
						newB += Weights[i] * inputs[i].GetPixelOrBlack(x, y).B;
					}
					// averages the values
					newR = newR / sumOfWeights;
					newG = newG / sumOfWeights;
					newB = newB / sumOfWeights;
					output[0][x, y] = new Rgb((byte)newR, (byte)newG, (byte)newB);
				}
			}
			return output;
		}
Пример #11
0
		/// <summary>
		/// Calculates the distribution of the brightness(= Value in the HSV model) in the given frame
		/// </summary>
		private void CalculateValue(Frame input, int tick)
		{
			Color rgbValue;
			int value;
			int[] intData = new int[256];
			for (int x = 0; x < input.Size.Width; x++) {
				for (int y = 0; y < input.Size.Height; y++) {
					/* Convert Frame Rgb struct to Color struct. */
					rgbValue = Color.FromArgb(input[x, y].R, input[x, y].G, input[x, y].B);
					/* Brightness (=Value) is stored as a float from 0.0 to 1.0, hence we have to convert to an int from 0 to 255. */
					value = (int)(rgbValue.GetBrightness() * 255);
					intData[value]++;
				}
			}
			int numberOfPixels = input.Size.Height * input.Size.Width;
			for (int i = 0; i < 256; i++) {
				Data[i] = (double)intData[i] / numberOfPixels;
			}
		}
Пример #12
0
		public void TestNoOverlay()
		{
			Frame testFrame = new Frame(new YuvKA.VideoModel.Size(80, 80));
			for (int x = 0; x < testFrame.Size.Width; x++) {
				for (int y = 0; y < testFrame.Size.Height; y++) {
					testFrame[x, y] = new Rgb(111, 111, 111);
				}
			}
			Frame[] input = { testFrame };
			OverlayNode node = new OverlayNode { Type = new NoOverlay() };
			node.ProcessCore(input, 0);
			List<Frame> output = new List<Frame>();
			for (int x = 0; x < testFrame.Size.Width; x++) {
				for (int y = 0; y < testFrame.Size.Height; y++) {
					Assert.Equal(testFrame[x, y], node.Data[x, y]);
				}
			}
		}
Пример #13
0
			/// <summary>
			/// Indexer for video frames so the video can be accessed like an array.
			/// Throws IndexOutOfRangeException when an invalid index is chosen.
			/// </summary>
			/// <returns>
			/// The Frame at the given index.
			/// </returns>
			public Frame this[int index]
			{
				get
				{
					if (index < 0 || index >= FrameCount) {
						throw new IndexOutOfRangeException();
					}
					if (index != lastTick) {
						frameCache = YuvToRgb(ReadYuvFrames(fileName, index, frameSize, 1), frameSize.Width, frameSize.Height);
						if (logFileName != null || motionVectorFileName != null) {
							frameCache = AddAnnotations(frameCache, logFile, motionVectorFile, index);
						}
						lastTick = index;
					}
					return frameCache;
				}
			}
Пример #14
0
Файл: Node.cs Проект: Kha/YUV.KA
		public abstract Frame[] Process(Frame[] inputs, int tick);
Пример #15
0
		public abstract void ProcessCore(Frame[] inputs, int tick);
Пример #16
0
		public sealed override Frame[] Process(Frame[] inputs, int tick)
		{
			ProcessCore(inputs, tick);
			return new Frame[] { };
		}
Пример #17
0
		/// <summary>
		/// Creates the frame that belongs to the specified tick, according to the task of the inheriting node.
		/// </summary>
		public sealed override Frame[] Process(Frame[] inputs, int tick)
		{
			return new[] { OutputFrame(tick) };
		}
Пример #18
0
		public override Frame[] Process(Frame[] inputs, int tick)
		{
			return process(inputs, tick);
		}
Пример #19
0
		/// <summary>
		/// Helper method for converting a Frame object into raw yuv data that can be saved to file
		/// Operates under the assumption that the frame width and height are divisible by 2
		/// This can be made a lot more efficient, currently uses (w*h * 1.5) steps, can be done in (w*h)
		/// </summary>
		private static byte[] RgbToYuv(Frame inputFrame)
		{
			int yuvDataSize = (int)(inputFrame.Size.Height * inputFrame.Size.Width * 1.5);
			byte[] yuvData = new byte[yuvDataSize];

			int y, x;
			// fill Y data frame
			for (y = 0; y < inputFrame.Size.Height; y++) {
				for (x = 0; x < inputFrame.Size.Width; x++) {
					// This formula is taken from the wikipedia article for YCbCr
					// It's the ITU-R 601 version, but hand-tweaked.
					// This is optimized for readability, not speed
					int r = inputFrame[x, y].R;
					int g = inputFrame[x, y].G;
					int b = inputFrame[x, y].B;
					yuvData[y * inputFrame.Size.Width + x] = ClampToByte(16 + (65.738 * r / 256) + (129.657 * g / 256) + (25.064 * b / 256));
				}
			}

			// fill U and V data frames
			int offset = inputFrame.Size.Width * inputFrame.Size.Height;
			int smallOfset = offset / 4;
			for (y = 0; y < inputFrame.Size.Height / 2; y++) {
				for (x = 0; x < inputFrame.Size.Width / 2; x++) {
					// since the U and V data frames are only a quarter the size of the RGB version, we need to average the
					// 4 pixels that will be saved as one in order not to lose too much information
					int r = (inputFrame[2 * x, 2 * y].R + inputFrame[2 * x + 1, 2 * y].R + inputFrame[2 * x, 2 * y + 1].R + inputFrame[2 * x + 1, 2 * y + 1].R) / 4;
					int g = (inputFrame[2 * x, 2 * y].G + inputFrame[2 * x + 1, 2 * y].G + inputFrame[2 * x, 2 * y + 1].G + inputFrame[2 * x + 1, 2 * y + 1].G) / 4;
					int b = (inputFrame[2 * x, 2 * y].B + inputFrame[2 * x + 1, 2 * y].B + inputFrame[2 * x, 2 * y + 1].B + inputFrame[2 * x + 1, 2 * y + 1].B) / 4;
					byte value = ClampToByte(128 + (-37.945 * r / 256) - (74.394 * g / 256) + (112.439 * b / 256));
					yuvData[offset + y * inputFrame.Size.Width / 2 + x] = value;
					value = ClampToByte(128 + (112.439 * r / 256) - (94.074 * g / 256) - (18.285 * b / 256));
					yuvData[offset + smallOfset + y * inputFrame.Size.Width / 2 + x] = value;
				}
			}
			return yuvData;
		}
Пример #20
0
		public Frame Process(Frame[] input)
		{
			AnnotatedFrame frameWithLogs = (AnnotatedFrame)input[0];
			Frame result = new Frame(input[0].Size);
			for (int x = 0; x < input[0].Size.Width; x++) {
				for (int y = 0; y < input[0].Size.Height; y++) {
					/* Make result black and white first to emphasize color highlighting */
					byte average = (byte)((input[0][x, y].R + input[0][x, y].G + input[0][x, y].B) / 3);
					result[x, y] = new Rgb(average, average, average);
					result[x, y] = GetMaskedPixel(result[x, y], x + 1, y + 1, frameWithLogs.Decisions[x / 16, y / 16].PartitioningDecision);
				}
			}
			return result;
		}
Пример #21
0
		/// <summary>
		/// Adds log information and movement vector metadata to a given frame
		/// </summary>
		/// <param name="frame">
		/// The basic frame to be enhanced with metadata
		/// </param>
		/// <param name="macroblockPartitionData">
		/// A byte array containing the macroblock decision information to be added to the frame.
		/// Invalid values yield undefined behavior.
		/// </param>
		/// <param name="vectorData">
		/// An array of arrays containing the vector data to be added to the frame.
		/// If not enough data is present, the two-dimensional zero vector (0,0) is used for all remaining macroblocks.
		/// </param>
		/// <param name="index">
		/// The index of the given frame in the video stream.
		/// Used for selecting the right metadata for the given frame.
		/// </param>
		/// <returns>
		/// An instance of AnnotatedFrame bearing the given parameters as source of information.
		/// </returns>
		private static AnnotatedFrame AddAnnotations(Frame frame, byte[] macroblockPartitionData, int[][] vectorData, int index)
		{
			int macroBlockNumber = frame.Size.Width / 16 * frame.Size.Height / 16;
			MacroblockDecision[] decisions = new MacroblockDecision[macroBlockNumber];
			for (int i = 0; i < decisions.Length; i++) {
				decisions[i] = new MacroblockDecision();
			}
			if (macroblockPartitionData != null) {
				for (int i = 0; i < decisions.Length && macroBlockNumber * index + i < macroblockPartitionData.Length; i++) {
					decisions[i].PartitioningDecision = (MacroblockPartitioning)macroblockPartitionData[macroBlockNumber * index + i];
				}
			}
			if (vectorData != null) {
				for (int i = 0; i < decisions.Length; i++) {
					// if we run out of vectors just pretend there are plenty zero vectors
					if (index < vectorData.Length && vectorData[index].Length > i * 2 + 1) {
						decisions[i].Movement = new Vector(vectorData[index][i * 2], vectorData[index][i * 2 + 1]);
					}
					else {
						decisions[i].Movement = new Vector(0, 0);
					}
				}
			}
			return new AnnotatedFrame(frame, decisions);
		}
Пример #22
0
		public void ArtifactOverlay()
		{
			Frame testFrame = new Frame(new YuvKA.VideoModel.Size(80, 80));
			for (int x = 0; x < testFrame.Size.Width; x++) {
				for (int y = 0; y < testFrame.Size.Height; y++) {
					testFrame[x, y] = new Rgb(111, 111, 111);
				}
			}
			Frame alteredTestFrame = new Frame(testFrame.Size);
			for (int x = 0; x < testFrame.Size.Width; x++) {
				for (int y = 0; y < testFrame.Size.Height; y++) {
					alteredTestFrame[x, y] = new Rgb((byte)(x + y), (byte)(x + y), (byte)(x + y));
				}
			}
			Frame[] input = { alteredTestFrame, testFrame };
			OverlayNode node = new OverlayNode { Type = new ArtifactsOverlay() };
			node.ProcessCore(input, 0);
			List<Frame> output = new List<Frame>();
			output.Add(node.Data);
			YuvEncoder.Encode(@"..\..\..\..\output\ArtifactOverlayTest_80x80.yuv", output);
		}
Пример #23
0
		public void TestMacroBlockOverlay()
		{
			Frame testFrame = new Frame(new YuvKA.VideoModel.Size(64, 64));
			for (int x = 0; x < testFrame.Size.Width; x++) {
				for (int y = 0; y < testFrame.Size.Height; y++) {
					testFrame[x, y] = new Rgb(111, 111, 111);
				}
			}
			MacroblockDecision[] decisions = new MacroblockDecision[16];
			decisions[0] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.InterSkip };
			decisions[1] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter16x16 };
			decisions[2] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter16x8 };
			decisions[3] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter8x16 };
			decisions[4] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter8x8 };
			decisions[5] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter4x8 };
			decisions[6] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter8x4 };
			decisions[7] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter4x4 };
			decisions[8] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Intra16x16 };
			decisions[9] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Intra8x8 };
			decisions[10] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Intra4x4 };
			decisions[11] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Unknown };
			decisions[12] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Inter8x8OrBelow };
			decisions[13] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.IntraPCM };
			decisions[14] = new MacroblockDecision { PartitioningDecision = null };
			decisions[15] = new MacroblockDecision { PartitioningDecision = MacroblockPartitioning.Unknown };
			Frame[] input = { new AnnotatedFrame(testFrame, decisions) };
			OverlayNode node = new OverlayNode { Type = new BlocksOverlay() };
			node.ProcessCore(input, 0);
			List<Frame> output = new List<Frame>();
			output.Add(node.Data);
			YuvEncoder.Encode(@"..\..\..\..\output\BlockOverlayTest_64x64.yuv", output);
		}
Пример #24
0
		public Frame Process(Frame[] input)
		{
			return input[0];
		}
Пример #25
0
		public void TestVecorOverlay()
		{
			Frame testFrame = new Frame(new YuvKA.VideoModel.Size(64, 48));
			for (int x = 0; x < testFrame.Size.Width; x++) {
				for (int y = 0; y < testFrame.Size.Height; y++) {
					testFrame[x, y] = new Rgb(111, 111, 111);
				}
			}
			MacroblockDecision[] decisions = new MacroblockDecision[12];
			decisions[0] = new MacroblockDecision { Movement = new Vector(0.0, 12.0) };
			decisions[1] = new MacroblockDecision { Movement = new Vector(12.0, 12.0) };
			decisions[2] = new MacroblockDecision { Movement = new Vector(12.0, 0.0) };
			decisions[3] = new MacroblockDecision { Movement = new Vector(12.0, -12.0) };
			decisions[4] = new MacroblockDecision { Movement = new Vector(3.0, -12.0) };
			decisions[5] = new MacroblockDecision { Movement = new Vector(-38.0, -15.0) };
			decisions[6] = new MacroblockDecision { Movement = new Vector(-120.0, 0.0) };
			decisions[7] = new MacroblockDecision { Movement = new Vector(-20.0, 20.0) };
			decisions[8] = new MacroblockDecision { Movement = new Vector(4.0, 0.0) };
			decisions[9] = new MacroblockDecision { Movement = new Vector(0.0, 4.0) };
			decisions[10] = new MacroblockDecision { Movement = new Vector(4.0, 4.0) };
			decisions[11] = new MacroblockDecision { Movement = new Vector(-4.0, 0.0) };
			Frame[] input = { new AnnotatedFrame(testFrame, decisions) };
			OverlayNode node = new OverlayNode { Type = new MoveVectorsOverlay() };
			node.ProcessCore(input, 0);
			List<Frame> output = new List<Frame>();
			output.Add(node.Data);
			YuvEncoder.Encode(@"..\..\..\..\output\VectorOverlayTest_64x48.yuv", output);
		}
Пример #26
0
		/// <summary>
		/// Copy constructor for Frame object.
		/// Take heed: this is only a shallow copy. If you modify the copy's data, you modify the original.
		/// </summary>
		/// <param name="frame">
		/// The frame reference to be copied
		/// </param>
		public Frame(Frame frame)
		{
			data = frame.data;
			Size = frame.Size;
		}