public void write_video_test() { var videoWriter = new VideoFileWriter(); int width = 800; int height = 600; int framerate = 24; string path = Path.GetFullPath("output.avi"); int videoBitRate = 1200 * 1000; //int audioBitRate = 320 * 1000; videoWriter.Open(path, width, height, framerate, VideoCodec.H264, videoBitRate); var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(i)); } videoWriter.Close(); Assert.IsTrue(File.Exists(path)); }
public void MatrixToImageConstructorTest1() { double min = -100; double max = +100; MatrixToImage target = new MatrixToImage(min, max); Assert.AreEqual(min, target.Min); }
public void ConvertTest2() { // Create a matrix representation // of a 4x4 image with a inner 2x2 // square drawn in the middle double[,] pixels = { { 0, 0, 0, 0 }, { 0, 1, 1, 0 }, { 0, 1, 1, 0 }, { 0, 0, 0, 0 }, }; // Create the converter to convert the matrix to a image var conv = new MatrixToBitmapSource(); // Declare a bitmap source and store the pixels on it BitmapSource image; conv.Convert(pixels, out image); var conv2 = new MatrixToImage(); Bitmap image2; conv2.Convert(pixels, out image2); Assert.AreEqual(pixels, image.ToMatrix(0)); Assert.AreEqual(pixels, image2.ToMatrix(0)); Assert.AreEqual(PixelFormats.Gray32Float, image.Format); Assert.AreEqual(System.Drawing.Imaging.PixelFormat.Format8bppIndexed, image2.PixelFormat); }
public void MatrixToImageConstructorTest() { MatrixToImage target = new MatrixToImage(); Assert.AreEqual(0, target.Min); Assert.AreEqual(1, target.Max); }
public void write_video_test() { var videoWriter = new VideoFileWriter(); int width = 800; int height = 600; int framerate = 24; string path = Path.GetFullPath(Path.Combine(TestContext.CurrentContext.TestDirectory, "output.avi")); int videoBitRate = 1200 * 1000; videoWriter.Open(path, width, height, framerate, VideoCodec.H264, videoBitRate); Assert.AreEqual(videoBitRate, videoWriter.BitRate); var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(i)); } videoWriter.Close(); Assert.IsTrue(File.Exists(path)); }
public void ConvertTest2() { // Create a matrix representation // of a 4x4 image with a inner 2x2 // square drawn in the middle double[,] pixels = { { 0, 0, 0, 0 }, { 0, 1, 1, 0 }, { 0, 1, 1, 0 }, { 0, 0, 0, 0 }, }; // Create the converter to convert the matrix to a image MatrixToImage conv = new MatrixToImage(min: 0, max: 1); // Declare an image and store the pixels on it Bitmap image; conv.Convert(pixels, out image); // Show the image on screen image = new ResizeNearestNeighbor(320, 320).Apply(image); // ImageBox.Show(image, PictureBoxSizeMode.Zoom); Assert.AreEqual(0, conv.Min); Assert.AreEqual(1, conv.Max); Assert.AreEqual(320, image.Height); Assert.AreEqual(320, image.Width); }
public static Bitmap ToBitmap(float[,] texture) { MatrixToImage i2m = new MatrixToImage(); Bitmap image; i2m.Convert(texture, out image); return(image); }
public static Bitmap ToBitmap(double[,] rawImage) { MatrixToImage mtrxToImage = new MatrixToImage(); UnmanagedImage outputImage = null; mtrxToImage.Convert(rawImage, out outputImage); return(outputImage.ToManagedImage()); }
static void TestFFMPEG() { var videoWriter = new VideoFileWriter(); int width = 800; int height = 600; int framerate = 24; string path = Path.GetFullPath("output.webm"); int videoBitRate = 1200 * 1000; int audioFrameSize = 44100; int audioBitRate = 128000; int audioSampleRate = 44100; AudioLayout audioChannels = AudioLayout.Mono; videoWriter.Width = width; videoWriter.Height = height; videoWriter.FrameRate = framerate; videoWriter.VideoCodec = VideoCodec.Vp8; videoWriter.BitRate = videoBitRate; videoWriter.PixelFormat = AVPixelFormat.FormatYuv420P; videoWriter.Open(path); //, audioFrameSize, audioChannels, audioSampleRate, AudioCodec.Vorbis, audioBitRate); var a = new Accord.DirectSound.AudioDeviceCollection(DirectSound.AudioDeviceCategory.Capture); // Generate 1 second of audio SineGenerator gen = new SineGenerator() { SamplingRate = audioSampleRate, Channels = 1, Format = SampleFormat.Format16Bit, Frequency = 10, Amplitude = 1000.9f, }; Signal s = gen.Generate(TimeSpan.FromSeconds(255)); //s.Save("test.wav"); var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(1)); //// Generate 1 second of audio //s = gen.Generate(TimeSpan.FromSeconds(1)); //videoWriter.WriteAudioFrame(s); } videoWriter.Close(); }
public byte[,] Dat2DownMat(string path, int length, double scale) { //try //{ // Matrix2Bitmap = new MatrixToImage(); // Bitmap2matrix = new ImageToMatrix(); // // Stream loadstream = new FileStream(path, FileMode.Open); // byte[] oneshotVector = new byte[length]; // loadstream.Read( oneshotVector , 0 , oneshotVector.Length ); // loadstream.Dispose(); // // byte[,] temp = Vec2Mat(oneshotVector, ImgInfo.W, ImgInfo.H); // ok // oneshotVector = null; // // Bitmap tempbit = new Bitmap(ImgInfo.W, ImgInfo.H, PixelFormat.Format4bppIndexed); // Matrix2Bitmap.Convert( temp , out tempbit ); // temp = null; // Bitmap tempdown = new Bitmap(tempbit, new Size((int)(ImgInfo.W / scale), (int)(ImgInfo.H / scale))); // tempbit = null; // byte[,] arrdown = new byte[(int)(tempdown.Width / scale), (int)(tempdown.Height / scale)]; // Bitmap2matrix.Convert( tempdown , out arrdown ); // tempdown = null; // //arrdown = Matrix.Transpose(arrdown); // return arrdown; //} try { Matrix2Bitmap = new MatrixToImage(); Bitmap2matrix = new ImageToMatrix(); Stream loadstream = new FileStream(path, FileMode.Open); byte[] oneshotVector = new byte[length]; loadstream.Read(oneshotVector, 0, oneshotVector.Length); loadstream.Dispose(); byte[,] temp = Accord.Math.Matrix.Transpose(Vec2Mat(oneshotVector, ImgInfo.W, ImgInfo.H)); // ok oneshotVector = null; //Bitmap tempbit = new Bitmap(ImgInfo.W, ImgInfo.H, PixelFormat.Format4bppIndexed); //Matrix2Bitmap.Convert(temp, out tempbit); //temp = null; //Bitmap tempdown = new Bitmap(tempbit, new Size((int)(ImgInfo.W / scale), (int)(ImgInfo.H / scale))); //tempbit = null; //byte[,] arrdown = new byte[(int)(tempdown.Width / scale), (int)(tempdown.Height / scale)]; //Bitmap2matrix.Convert(tempdown, out arrdown); //tempdown = null; //arrdown = Matrix.Transpose(arrdown); return(temp); } catch (Exception ex) { MessageBox.Show(ex.ToString()); return(new byte[0, 0]); } }
static void TestFFMPEG2() { string outputPath = Path.GetFullPath("output.avi"); // First, we create a new VideoFileWriter: var videoWriter = new VideoFileWriter() { // Our video will have the following characteristics: Width = 800, Height = 600, FrameRate = 24, BitRate = 1200 * 1000, VideoCodec = VideoCodec.Mpeg4, //PixelFormat = Accord.Video.FFMPEG.PixelFormat.FormatYUV420P }; // We can open for it writing: videoWriter.Open(outputPath); // At this point, we can check the console of our application for useful // information regarding our media streams created by FFMPEG. We can also // check those properties using the class itself, specially for properties // that we didn't set beforehand but that have been filled by FFMPEG: int width = videoWriter.Width; int height = videoWriter.Height; int frameRate = videoWriter.FrameRate.Numerator; int bitRate = videoWriter.BitRate; VideoCodec videoCodec = videoWriter.VideoCodec; // We haven't set those properties, but FFMPEG has filled them for us: AudioCodec audioCodec = videoWriter.AudioCodec; int audioSampleRate = videoWriter.SampleRate; AudioLayout audioChannels = videoWriter.AudioLayout; int numberOfChannels = videoWriter.NumberOfChannels; // Now, let's say we would like to save dummy images of changing color var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { // Create bitmap matrix from a matrix of RGB values: byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); // Write the frame to the stream. We can optionally specify // the duration that this frame should remain in the stream: videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(i)); } }
public void ConvertTest1() { MatrixToImage target = new MatrixToImage(); double[,] pixels = { { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 0 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 1 { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 }, // 2 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 3 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 4 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 5 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 6 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 7 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 8 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 9 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 10 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 11new Bitmap(Properties { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 12 { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 }, // 13 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 14 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 15 }; Bitmap imageActual; target.Convert(pixels, out imageActual); double[,] actual; ImageToMatrix c = new ImageToMatrix(); c.Convert(imageActual, out actual); double[,] expected; Bitmap imageExpected = Accord.Imaging.Image.Clone(Resources.image1); new Threshold().ApplyInPlace(imageExpected); new Invert().ApplyInPlace(imageExpected); c.Convert(imageExpected, out expected); for (int i = 0; i < pixels.GetLength(0); i++) { for (int j = 0; j < pixels.GetLength(1); j++) { Assert.AreEqual(actual[i, j], expected[i, j]); } } }
public static BitmapSource CreateBitmapSource(byte[,] input) { try { MatrixToImage Matrix2Bitmap = new MatrixToImage(); Bitmap bitout = new Bitmap(input.GetLength(0), input.GetLength(1)); Matrix2Bitmap.Convert(input, out bitout); BitmapSource bitsource = ToWpfBitmap(bitout); return(bitsource); } catch (Exception) { return(null); } }
private static void write_and_open(Rational framerate, int num, int den) { int width = 800; int height = 600; string path = Path.GetFullPath(Path.Combine(TestContext.CurrentContext.TestDirectory, "output2.avi")); int videoBitRate = 1200 * 1000; { var videoWriter = new VideoFileWriter(); videoWriter.Open(path, width, height, framerate, VideoCodec.FfvHuff, videoBitRate); Assert.AreEqual(width, videoWriter.Width); Assert.AreEqual(height, videoWriter.Height); Assert.AreEqual(videoBitRate, videoWriter.BitRate); Assert.AreEqual(num, videoWriter.FrameRate.Numerator); Assert.AreEqual(den, videoWriter.FrameRate.Denominator); var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(i)); } videoWriter.Close(); } Assert.IsTrue(File.Exists(path)); { VideoFileReader reader = new VideoFileReader(); reader.Open(path); Assert.AreEqual(width, reader.Width); Assert.AreEqual(height, reader.Height); //Assert.AreEqual(videoBitRate, reader.BitRate); Assert.AreEqual(num, reader.FrameRate.Numerator); Assert.AreEqual(den, reader.FrameRate.Denominator); } }
private static long[,] create(byte[,] img, PixelFormat format) { long[,] actual8bpp; Bitmap image; MatrixToImage converter = new MatrixToImage(); converter.Format = format; converter.Convert(img, out image); Assert.AreEqual(format, image.PixelFormat); IntegralImage2 ii8bpp = IntegralImage2.FromBitmap(image, 0); actual8bpp = ii8bpp.Image; return(actual8bpp); }
public void ConvertTest2() { // Load a test image Bitmap sourceImage = Accord.Imaging.Image.Clone(Properties.Resources.image1); // Make sure values are binary new Threshold().ApplyInPlace(sourceImage); // Create the converters ImageToMatrix imageToMatrix = new ImageToMatrix() { Min = 0, Max = 255 }; MatrixToImage matrixToImage = new MatrixToImage() { Min = 0, Max = 255 }; // Convert to matrix double[,] matrix; // initialization is not needed imageToMatrix.Convert(sourceImage, out matrix); // Revert to image Bitmap resultImage; // initialization is not needed matrixToImage.Convert(matrix, out resultImage); // Show both images, which should be equal // ImageBox.Show(sourceImage, PictureBoxSizeMode.Zoom); // ImageBox.Show(resultImage, PictureBoxSizeMode.Zoom); UnmanagedImage img1 = UnmanagedImage.FromManagedImage(sourceImage); UnmanagedImage img2 = UnmanagedImage.FromManagedImage(resultImage); List <IntPoint> p1 = img1.CollectActivePixels(); List <IntPoint> p2 = img2.CollectActivePixels(); bool equals = new HashSet <IntPoint>(p1).SetEquals(p2); Assert.IsTrue(equals); }
public void ConvertTest() { MatrixToImage target = new MatrixToImage(min: 0, max: 128); byte[,] input = { { 0, 0, 0 }, { 0, 128, 0 }, { 0, 0, 128 }, }; UnmanagedImage bitmap; target.Convert(input, out bitmap); var pixels = bitmap.CollectActivePixels(); Assert.AreEqual(2, pixels.Count); Assert.IsTrue(pixels.Contains(new IntPoint(1, 1))); Assert.IsTrue(pixels.Contains(new IntPoint(2, 2))); }
/// <summary> /// Przetworzenie macierzy do Bitmap /// </summary> /// <param name="rawImage">Macierz obrazu</param> /// <returns>Bitmap z obrazem</returns> public BitmapSource ArrayToBitmapImage(double[,] rawImage) { Bitmap imageBitmap; MatrixToImage conventer = new MatrixToImage(min: 0, max: 1); conventer.Convert(rawImage, out imageBitmap); var imageBitmapImage = BitmapToBitmapImage(imageBitmap); BitmapEncoder encoder = new PngBitmapEncoder(); encoder.Frames.Add(BitmapFrame.Create(imageBitmapImage)); //Testowy zapis pliku wynikowego //using (var fileStream = new FileStream(@"C:\Users\wilu\Desktop\Filtr medianowy\file.png", FileMode.Create)) //{ // encoder.Save(fileStream); //} return(imageBitmapImage); }
public byte[,] Dat2Mat(string path) { try { Matrix2Bitmap = new MatrixToImage(); Bitmap2matrix = new ImageToMatrix(); Stream loadstream = new FileStream(path, FileMode.Open); byte[] oneshotVector = new byte[ImgInfo.WH]; loadstream.Read(oneshotVector, 0, oneshotVector.Length); loadstream.Dispose(); byte[,] temp = Vec2Mat(oneshotVector, ImgInfo.W, ImgInfo.H); // ok oneshotVector = null; return(temp); } catch (Exception ex) { MessageBox.Show(ex.ToString()); return(new byte[0, 0]); } }
public void ConvertTest2() { // Load a test image Bitmap sourceImage = Properties.Resources.image1; // Make sure values are binary new Threshold().ApplyInPlace(sourceImage); // Create the converters ImageToMatrix imageToMatrix = new ImageToMatrix() { Min = 0, Max = 255 }; MatrixToImage matrixToImage = new MatrixToImage() { Min = 0, Max = 255 }; // Convert to matrix double[,] matrix; // initialization is not needed imageToMatrix.Convert(sourceImage, out matrix); // Revert to image Bitmap resultImage; // initialization is not needed matrixToImage.Convert(matrix, out resultImage); // Show both images, which should be equal // ImageBox.Show(sourceImage, PictureBoxSizeMode.Zoom); // ImageBox.Show(resultImage, PictureBoxSizeMode.Zoom); UnmanagedImage img1 = UnmanagedImage.FromManagedImage(sourceImage); UnmanagedImage img2 = UnmanagedImage.FromManagedImage(resultImage); List<IntPoint> p1 = img1.CollectActivePixels(); List<IntPoint> p2 = img2.CollectActivePixels(); bool equals = new HashSet<IntPoint>(p1).SetEquals(p2); Assert.IsTrue(equals); }
public void ConvertTest1() { MatrixToImage target = new MatrixToImage(); double[,] pixels = { { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 0 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 1 { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 }, // 2 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 3 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 4 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 5 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 6 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 7 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 8 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 9 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 10 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 11 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 12 { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 }, // 13 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 14 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 15 }; Bitmap imageActual; target.Convert(pixels, out imageActual); double[,] actual; ImageToMatrix c = new ImageToMatrix(); c.Convert(imageActual, out actual); double[,] expected; Bitmap imageExpected = Properties.Resources.image1; new Threshold().ApplyInPlace(imageExpected); new Invert().ApplyInPlace(imageExpected); c.Convert(imageExpected, out expected); for (int i = 0; i < pixels.GetLength(0); i++) for (int j = 0; j < pixels.GetLength(1); j++) Assert.AreEqual(actual[i, j], expected[i, j]); }
public void write_video_new_api() { string basePath = TestContext.CurrentContext.TestDirectory; #region doc_new_api // Let's say we would like to save file using a .avi media // container and a MPEG4 (DivX/XVid) codec, saving it into: string outputPath = Path.Combine(basePath, "output_video.avi"); // First, we create a new VideoFileWriter: var videoWriter = new VideoFileWriter() { // Our video will have the following characteristics: Width = 800, Height = 600, FrameRate = 24, BitRate = 1200 * 1000, VideoCodec = VideoCodec.Mpeg4, }; // We can open for it writing: videoWriter.Open(outputPath); // At this point, we can check the console of our application for useful // information regarding the media streams created by FFMPEG. We can also // check those properties using the class itself, specially for properties // that we didn't set beforehand but that have been filled by FFMPEG: int width = videoWriter.Width; int height = videoWriter.Height; int frameRate = videoWriter.FrameRate.Numerator; int bitRate = videoWriter.BitRate; VideoCodec videoCodec = videoWriter.VideoCodec; // We haven't set those properties, but FFMPEG has filled them for us: AudioCodec audioCodec = videoWriter.AudioCodec; int audioSampleRate = videoWriter.SampleRate; AudioLayout audioLayout = videoWriter.AudioLayout; int audioChannels = videoWriter.NumberOfChannels; // Now, let's say we would like to save dummy images of changing color var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { // Create bitmap matrix from a matrix of RGB values: byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); // Write the frame to the stream. We can optionally specify // the moment that this frame should remain in the stream: videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(i)); } // We can get how long our written video is: TimeSpan duration = videoWriter.Duration; // Close the stream videoWriter.Close(); videoWriter.Dispose(); #endregion Assert.AreEqual(2540000000, duration.Ticks); Assert.AreEqual(800, width); Assert.AreEqual(600, height); Assert.AreEqual(24, frameRate); Assert.AreEqual(1200000, bitRate); Assert.AreEqual(VideoCodec.Mpeg4, videoCodec); Assert.IsTrue(AudioCodec.Default == audioCodec || AudioCodec.Mp3 == audioCodec); Assert.AreEqual(44100, audioSampleRate); Assert.AreEqual(audioLayout, audioLayout); Assert.AreEqual(2, audioChannels); }
private static long[,] create(byte[,] img, PixelFormat format) { long[,] actual8bpp; Bitmap image; MatrixToImage converter = new MatrixToImage(); converter.Format = format; converter.Convert(img, out image); Assert.AreEqual(format, image.PixelFormat); IntegralImage2 ii8bpp = IntegralImage2.FromBitmap(image, 0); actual8bpp = ii8bpp.Image; return actual8bpp; }
public List <List <byte[, ]> > ListDownScaledZoomedMat(List <List <string> > input, ZoomData data) { /* data에 들어있는 리얼 포지션 좌표 값은 스케일링이 안되있다. */ /* 1. 각 원본 DAt 파일에 어느 좌표부터 어느 좌표까지 가져와야 하는지 계산 */ /* 2. 데이터를 가져온다음에 리사이즈 */ /**/ /**/ /**/ /**/ //Check Start End File Pos and scale === data.Scale = CalcScale(input, data); double scale = data.Scale; // 여기서 스케일이 정해진다. 이것은 실제 데이터 가져온후 이미지 -> 스케일변환 -> Console.WriteLine("Scale Value is " + $" {scale} "); /* Class Instance*/ FormatConvert fcv = new FormatConvert(); Matrix2Bitmap = new MatrixToImage(); Bitmap2matrix = new ImageToMatrix(); int width = data.Ex - data.Sx; int height = data.Ey - data.Sy; int WCount = data.endNumX - data.startNumX; int HCount = data.endNumY - data.startNumY; int strPosX = 0; int strPosY = 0; int endPosX = 0; int endPosY = 0; List <List <byte[, ]> > box = new List <List <byte[, ]> >(); if ((data.endNumX - data.startNumX) == 0 && (data.endNumY - data.startNumY) == 0) { /*OK*/ #region List <byte[, ]> tempbox = new List <byte[, ]>(); string path = input[data.startNumX][data.startNumY]; strPosX = data.Sx - data.startNumX * data.Wo; strPosY = data.Sy - data.startNumY * data.Ho; endPosX = data.Ex - data.endNumX * data.Wo; endPosY = data.Ey - data.endNumY * data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); box.Add(tempbox); #endregion } else if ((data.endNumY - data.startNumY) == 0) { /*OK*/ #region for (int i = data.startNumX; i <= data.endNumX; i++) { string path = input[i][data.startNumY]; List <byte[, ]> tempbox = new List <byte[, ]>(); if (i == data.startNumX) { strPosX = data.Sx - data.startNumX * data.Wo; strPosY = data.Sy - data.startNumY * data.Ho; endPosX = data.Wo; endPosY = data.Ey - data.endNumY * data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); } else if (i == data.endNumX) { strPosX = 0; strPosY = data.Sy - data.startNumY * data.Ho; endPosX = data.Ex - data.endNumX * data.Wo; endPosY = data.Ey - data.endNumY * data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); } else { strPosX = 0; strPosY = data.Sy - data.startNumY * data.Ho; endPosX = data.Wo; endPosY = data.Ey - data.endNumY * data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); } box.Add(tempbox); } #endregion } else if ((data.endNumX - data.startNumX) == 0) { /*OK*/ #region List <byte[, ]> tempbox = new List <byte[, ]>(); for (int j = data.startNumY; j <= data.endNumY; j++) { string path = input[data.startNumX][j]; if (j == data.startNumY) { strPosX = data.Sx - data.startNumX * data.Wo; strPosY = data.Sy - data.startNumY * data.Ho; endPosX = data.Ex - data.startNumX * data.Wo; endPosY = data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); } else if (j == data.endNumY) { strPosX = data.Sx - data.startNumX * data.Wo; strPosY = 0; endPosX = data.Ex - data.startNumX * data.Wo; endPosY = data.Ey - data.endNumY * data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); } else { strPosX = data.Sx - data.startNumX * data.Wo; strPosY = 0; endPosX = data.Ex - data.startNumX * data.Wo; endPosY = data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); } } box.Add(tempbox); #endregion } else { for (int i = data.startNumX; i <= data.endNumX; i++) { List <byte[, ]> tempbox = new List <byte[, ]>(); if (i == data.startNumX) { #region Start X Pos for (int j = data.startNumY; j <= data.endNumY; j++) { string path = input[i][j]; if (j == data.startNumY) { /* Pixel Position in One File Matrix*/ strPosX = data.Sx - i * data.Wo; strPosY = data.Sy - j * data.Ho; endPosX = data.Wo; endPosY = data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); } else if (j == data.endNumY) { strPosX = data.Sx - i * data.Wo; strPosY = 0; endPosX = data.Wo; endPosY = data.Ey - j * data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); } else { strPosX = data.Sx - i * data.Wo; strPosY = 0; endPosX = data.Wo; endPosY = data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); } } #endregion } else if (i == data.endNumX) { #region End X Pos for (int j = data.startNumY; j <= data.endNumY; j++) { string path = input[i][j]; if (j == data.startNumY) { strPosX = 0; strPosY = data.Sy - j * data.Ho; endPosX = data.Ex - i * data.Wo; endPosY = data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); } else if (j == data.endNumY) { strPosX = 0; strPosY = 0; endPosX = data.Ex - i * data.Wo; endPosY = data.Ey - j * data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); } else { strPosX = 0; strPosY = 0; endPosX = data.Ex - i * data.Wo; endPosY = data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); } } #endregion } else { #region Middle for (int j = data.startNumY; j <= data.endNumY; j++) { string path = input[i][j]; if (j == data.startNumY) { strPosX = 0; strPosY = data.Sy - j * data.Ho; endPosX = data.Wo; endPosY = data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); } else if (j == data.endNumY) { strPosX = 0; strPosY = 0; endPosX = data.Wo; endPosY = data.Ey - j * data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); } else { strPosX = 0; strPosY = 0; endPosX = data.Wo; endPosY = data.Ho; tempbox.Add(PutinListBox(path, fcv, strPosX, strPosY, endPosX, endPosY, scale)); } } #endregion } box.Add(tempbox); } } return(box); }
public void write_video_new_api() { string basePath = TestContext.CurrentContext.TestDirectory; #region doc_new_api // Let's say we would like to save file using a .mp4 media // container, a H.265 video codec for the video stream, and // AAC for the audio stream, into the file: string outputPath = Path.Combine(basePath, "output_audio.avi"); // First, we create a new VideoFileWriter: var videoWriter = new VideoFileWriter() { // Our video will have the following characteristics: Width = 800, Height = 600, FrameRate = 24, BitRate = 1200 * 1000, VideoCodec = VideoCodec.H265, AudioCodec = AudioCodec.Aac, AudioBitRate = 44100, AudioLayout = AudioLayout.Stereo, FrameSize = 44100, PixelFormat = AVPixelFormat.FormatYuv420P }; // We can open for it writing: videoWriter.Open(outputPath); // At this point, we can check the console of our application for useful // information regarding the media streams created by FFMPEG. We can also // check those properties using the class itself, specially for properties // that we didn't set beforehand but that have been filled by FFMPEG: int width = videoWriter.Width; int height = videoWriter.Height; int frameRate = videoWriter.FrameRate.Numerator; int bitRate = videoWriter.BitRate; VideoCodec videoCodec = videoWriter.VideoCodec; AudioCodec audioCodec = videoWriter.AudioCodec; AudioLayout audioLayout = videoWriter.AudioLayout; int audioChannels = videoWriter.NumberOfChannels; // We haven't set those properties, but FFMPEG has filled them for us: int audioSampleRate = videoWriter.SampleRate; int audioSampleSize = videoWriter.FrameSize; // Now, let's say we would like to save dummy images of // changing color, with a sine wave as the audio stream: var g = new SineGenerator() { Channels = 1, // we will generate only one channel, and the file writer will convert on-the-fly Format = SampleFormat.Format32BitIeeeFloat, Frequency = 10f, Amplitude = 0.9f, SamplingRate = 44100 }; var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { // Create bitmap matrix from a matrix of RGB values: byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); // Write the frame to the stream. We can optionally specify // the moment when this frame should remain in the stream: videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(i)); // We can also write the audio samples if we need to: Signal signal = g.Generate(TimeSpan.FromSeconds(1)); // generate 1 second of audio videoWriter.WriteAudioFrame(signal); // save it to the stream } // We can get how long our written video is: TimeSpan duration = videoWriter.Duration; // Close the stream videoWriter.Close(); #endregion Assert.AreEqual(2540000000, duration.Ticks); Assert.AreEqual(800, width); Assert.AreEqual(600, height); Assert.AreEqual(24, frameRate); Assert.AreEqual(1200000, bitRate); Assert.AreEqual(VideoCodec.H265, videoCodec); Assert.AreEqual(AudioCodec.Aac, audioCodec); Assert.AreEqual(44100, audioSampleRate); Assert.AreEqual(AudioLayout.Stereo, audioLayout); Assert.AreEqual(2, audioChannels); }
static void Main(string[] args) { var qualityLevels = new Dictionary <int, int[, ]> { { 50, Q50 } }; foreach (int level in new[] { 10, 20, 30, 40 }) { int[,] newQ = new int[8, 8]; for (int i = 0; i < 8; i++) { for (int j = 0; j < 8; j++) { newQ[i, j] = (int)Math.Min(Q50[i, j] * (50d / level), 256d); } } qualityLevels.Add(level, newQ); } foreach (int level in new[] { 60, 70, 80, 90, 95 }) { int[,] newQ = new int[8, 8]; for (int i = 0; i < 8; i++) { for (int j = 0; j < 8; j++) { newQ[i, j] = (int)Math.Min(Q50[i, j] * ((100d - level) / 50d), 256d); } } qualityLevels.Add(level, newQ); } var imageToMatrix = new ImageToMatrix(); var bitmap = new Bitmap(File.OpenRead("sample_blackwhite.bmp")); double[,] output; imageToMatrix.Convert(bitmap, out output); var dct = GetDctMatrix(); var matrixMultiplied = Matrix.Dot(output, dct); foreach (var q in qualityLevels) { double[,] rounded = new double[8, 8]; for (int row = 0; row < 8; row++) { for (int column = 0; column < 8; column++) { int divided = (int)Math.Round(matrixMultiplied[row, column] * 256d / q.Value[row, column], 0); int multipliedAgain = divided * q.Value[row, column]; rounded[row, column] = multipliedAgain / 256d; } } var newMatrix = Matrix.Divide(rounded, dct); var mtoi = new MatrixToImage(); Bitmap bitmap2; mtoi.Convert(newMatrix, out bitmap2); bitmap2.Save($"sample_blackwhite_q{q.Key}.bmp"); } }
private double census(Bitmap myBitmap) { int[,] bi = new int[myBitmap.Width, myBitmap.Height]; for (int x = 0; x < myBitmap.Width; x++) { for (int y = 0; y < myBitmap.Height; y++) { Color pixelColor = myBitmap.GetPixel(x, y); int mainvalue = pixelColor.R; string s = ""; try { int a0 = myBitmap.GetPixel(x - 1, y - 1).R; if (a0 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } try { int a1 = myBitmap.GetPixel(x - 1, y).R; if (a1 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } try { int a2 = myBitmap.GetPixel(x - 1, y + 1).R; if (a2 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } try { int a3 = myBitmap.GetPixel(x, y - 1).R; if (a3 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } try { int a4 = myBitmap.GetPixel(x, y + 1).R; if (a4 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } try { int a6 = myBitmap.GetPixel(x + 1, y + 1).R; if (a6 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } try { int a7 = myBitmap.GetPixel(x + 1, y).R; if (a7 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } try { int a8 = myBitmap.GetPixel(x + 1, y - 1).R; if (a8 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } bi[x, y] = BitStringToInt(s); // neighbors(1) = img(r - 1, c - 1); % Upper left.r = row, c = column. // neighbors(2) = img(r - 1, c); % Upper middle.r = row, c = column. // neighbors(3) = img(r - 1, c + 1); % Upper right.r = row, c = column. // neighbors(4) = img(r, c - 1); % left.r = row, c = column. // neighbors(5) = img(r, c + 1); % right.r = row, c = column. // neighbors(6) = img(r + 1, c + 1); % Lowerleft.r = row, c = column. // neighbors(7) = img(r + 1, c); % lower middle.r = row, c = column. // neighbors(8) = img(r + 1, c - 1); % Lower left.r = row, c = column. // things we do with pixelColor } } MatrixToImage conv = new MatrixToImage(min: 0, max: 255); // Declare an image and store the pixels on it Bitmap image; conv.Convert(bi, out image); Accord.Imaging.ImageStatistics statistics = new Accord.Imaging.ImageStatistics(image); var histogram = statistics.Gray; return(histogram.Mean); }