public void write_video_test() { var videoWriter = new VideoFileWriter(); int width = 800; int height = 600; int framerate = 24; string path = Path.GetFullPath("output.avi"); int videoBitRate = 1200 * 1000; //int audioBitRate = 320 * 1000; videoWriter.Open(path, width, height, framerate, VideoCodec.H264, videoBitRate); var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(i)); } videoWriter.Close(); Assert.IsTrue(File.Exists(path)); }
public void ConvertTest2() { // Create a matrix representation // of a 4x4 image with a inner 2x2 // square drawn in the middle double[,] pixels = { { 0, 0, 0, 0 }, { 0, 1, 1, 0 }, { 0, 1, 1, 0 }, { 0, 0, 0, 0 }, }; // Create the converter to convert the matrix to a image MatrixToImage conv = new MatrixToImage(min: 0, max: 1); // Declare an image and store the pixels on it Bitmap image; conv.Convert(pixels, out image); // Show the image on screen image = new ResizeNearestNeighbor(320, 320).Apply(image); // ImageBox.Show(image, PictureBoxSizeMode.Zoom); Assert.AreEqual(0, conv.Min); Assert.AreEqual(1, conv.Max); Assert.AreEqual(320, image.Height); Assert.AreEqual(320, image.Width); }
public void ConvertTest2() { // Create a matrix representation // of a 4x4 image with a inner 2x2 // square drawn in the middle double[,] pixels = { { 0, 0, 0, 0 }, { 0, 1, 1, 0 }, { 0, 1, 1, 0 }, { 0, 0, 0, 0 }, }; // Create the converter to convert the matrix to a image var conv = new MatrixToBitmapSource(); // Declare a bitmap source and store the pixels on it BitmapSource image; conv.Convert(pixels, out image); var conv2 = new MatrixToImage(); Bitmap image2; conv2.Convert(pixels, out image2); Assert.AreEqual(pixels, image.ToMatrix(0)); Assert.AreEqual(pixels, image2.ToMatrix(0)); Assert.AreEqual(PixelFormats.Gray32Float, image.Format); Assert.AreEqual(System.Drawing.Imaging.PixelFormat.Format8bppIndexed, image2.PixelFormat); }
public void write_video_test() { var videoWriter = new VideoFileWriter(); int width = 800; int height = 600; int framerate = 24; string path = Path.GetFullPath(Path.Combine(TestContext.CurrentContext.TestDirectory, "output.avi")); int videoBitRate = 1200 * 1000; videoWriter.Open(path, width, height, framerate, VideoCodec.H264, videoBitRate); Assert.AreEqual(videoBitRate, videoWriter.BitRate); var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(i)); } videoWriter.Close(); Assert.IsTrue(File.Exists(path)); }
public static Bitmap ToBitmap(double[,] rawImage) { MatrixToImage mtrxToImage = new MatrixToImage(); UnmanagedImage outputImage = null; mtrxToImage.Convert(rawImage, out outputImage); return(outputImage.ToManagedImage()); }
public static Bitmap ToBitmap(float[,] texture) { MatrixToImage i2m = new MatrixToImage(); Bitmap image; i2m.Convert(texture, out image); return(image); }
static void TestFFMPEG() { var videoWriter = new VideoFileWriter(); int width = 800; int height = 600; int framerate = 24; string path = Path.GetFullPath("output.webm"); int videoBitRate = 1200 * 1000; int audioFrameSize = 44100; int audioBitRate = 128000; int audioSampleRate = 44100; AudioLayout audioChannels = AudioLayout.Mono; videoWriter.Width = width; videoWriter.Height = height; videoWriter.FrameRate = framerate; videoWriter.VideoCodec = VideoCodec.Vp8; videoWriter.BitRate = videoBitRate; videoWriter.PixelFormat = AVPixelFormat.FormatYuv420P; videoWriter.Open(path); //, audioFrameSize, audioChannels, audioSampleRate, AudioCodec.Vorbis, audioBitRate); var a = new Accord.DirectSound.AudioDeviceCollection(DirectSound.AudioDeviceCategory.Capture); // Generate 1 second of audio SineGenerator gen = new SineGenerator() { SamplingRate = audioSampleRate, Channels = 1, Format = SampleFormat.Format16Bit, Frequency = 10, Amplitude = 1000.9f, }; Signal s = gen.Generate(TimeSpan.FromSeconds(255)); //s.Save("test.wav"); var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(1)); //// Generate 1 second of audio //s = gen.Generate(TimeSpan.FromSeconds(1)); //videoWriter.WriteAudioFrame(s); } videoWriter.Close(); }
static void TestFFMPEG2() { string outputPath = Path.GetFullPath("output.avi"); // First, we create a new VideoFileWriter: var videoWriter = new VideoFileWriter() { // Our video will have the following characteristics: Width = 800, Height = 600, FrameRate = 24, BitRate = 1200 * 1000, VideoCodec = VideoCodec.Mpeg4, //PixelFormat = Accord.Video.FFMPEG.PixelFormat.FormatYUV420P }; // We can open for it writing: videoWriter.Open(outputPath); // At this point, we can check the console of our application for useful // information regarding our media streams created by FFMPEG. We can also // check those properties using the class itself, specially for properties // that we didn't set beforehand but that have been filled by FFMPEG: int width = videoWriter.Width; int height = videoWriter.Height; int frameRate = videoWriter.FrameRate.Numerator; int bitRate = videoWriter.BitRate; VideoCodec videoCodec = videoWriter.VideoCodec; // We haven't set those properties, but FFMPEG has filled them for us: AudioCodec audioCodec = videoWriter.AudioCodec; int audioSampleRate = videoWriter.SampleRate; AudioLayout audioChannels = videoWriter.AudioLayout; int numberOfChannels = videoWriter.NumberOfChannels; // Now, let's say we would like to save dummy images of changing color var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { // Create bitmap matrix from a matrix of RGB values: byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); // Write the frame to the stream. We can optionally specify // the duration that this frame should remain in the stream: videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(i)); } }
public void ConvertTest1() { MatrixToImage target = new MatrixToImage(); double[,] pixels = { { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 0 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 1 { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 }, // 2 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 3 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 4 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 5 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 6 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 7 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 8 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 9 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 10 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 11new Bitmap(Properties { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 12 { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 }, // 13 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 14 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 15 }; Bitmap imageActual; target.Convert(pixels, out imageActual); double[,] actual; ImageToMatrix c = new ImageToMatrix(); c.Convert(imageActual, out actual); double[,] expected; Bitmap imageExpected = Accord.Imaging.Image.Clone(Resources.image1); new Threshold().ApplyInPlace(imageExpected); new Invert().ApplyInPlace(imageExpected); c.Convert(imageExpected, out expected); for (int i = 0; i < pixels.GetLength(0); i++) { for (int j = 0; j < pixels.GetLength(1); j++) { Assert.AreEqual(actual[i, j], expected[i, j]); } } }
public static BitmapSource CreateBitmapSource(byte[,] input) { try { MatrixToImage Matrix2Bitmap = new MatrixToImage(); Bitmap bitout = new Bitmap(input.GetLength(0), input.GetLength(1)); Matrix2Bitmap.Convert(input, out bitout); BitmapSource bitsource = ToWpfBitmap(bitout); return(bitsource); } catch (Exception) { return(null); } }
private static void write_and_open(Rational framerate, int num, int den) { int width = 800; int height = 600; string path = Path.GetFullPath(Path.Combine(TestContext.CurrentContext.TestDirectory, "output2.avi")); int videoBitRate = 1200 * 1000; { var videoWriter = new VideoFileWriter(); videoWriter.Open(path, width, height, framerate, VideoCodec.FfvHuff, videoBitRate); Assert.AreEqual(width, videoWriter.Width); Assert.AreEqual(height, videoWriter.Height); Assert.AreEqual(videoBitRate, videoWriter.BitRate); Assert.AreEqual(num, videoWriter.FrameRate.Numerator); Assert.AreEqual(den, videoWriter.FrameRate.Denominator); var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(i)); } videoWriter.Close(); } Assert.IsTrue(File.Exists(path)); { VideoFileReader reader = new VideoFileReader(); reader.Open(path); Assert.AreEqual(width, reader.Width); Assert.AreEqual(height, reader.Height); //Assert.AreEqual(videoBitRate, reader.BitRate); Assert.AreEqual(num, reader.FrameRate.Numerator); Assert.AreEqual(den, reader.FrameRate.Denominator); } }
byte[,] SizeDown(byte[,] pickedMat, double scale) { Bitmap tempbit = new Bitmap(pickedMat.GetLength(0), pickedMat.GetLength(1)); Matrix2Bitmap.Convert(pickedMat, out tempbit); pickedMat = null; Bitmap tempDownbut = new Bitmap(tempbit, new System.Drawing.Size((int)(tempbit.Width / scale), (int)(tempbit.Height / scale))); tempbit = null; byte[,] downMat = new byte[tempDownbut.Width, tempDownbut.Height]; Bitmap2matrix.Convert(tempDownbut, out downMat); return(downMat = Matrix.Transpose(downMat)); // DownScaled byte[,] }
private static long[,] create(byte[,] img, PixelFormat format) { long[,] actual8bpp; Bitmap image; MatrixToImage converter = new MatrixToImage(); converter.Format = format; converter.Convert(img, out image); Assert.AreEqual(format, image.PixelFormat); IntegralImage2 ii8bpp = IntegralImage2.FromBitmap(image, 0); actual8bpp = ii8bpp.Image; return(actual8bpp); }
public void ConvertTest2() { // Load a test image Bitmap sourceImage = Accord.Imaging.Image.Clone(Properties.Resources.image1); // Make sure values are binary new Threshold().ApplyInPlace(sourceImage); // Create the converters ImageToMatrix imageToMatrix = new ImageToMatrix() { Min = 0, Max = 255 }; MatrixToImage matrixToImage = new MatrixToImage() { Min = 0, Max = 255 }; // Convert to matrix double[,] matrix; // initialization is not needed imageToMatrix.Convert(sourceImage, out matrix); // Revert to image Bitmap resultImage; // initialization is not needed matrixToImage.Convert(matrix, out resultImage); // Show both images, which should be equal // ImageBox.Show(sourceImage, PictureBoxSizeMode.Zoom); // ImageBox.Show(resultImage, PictureBoxSizeMode.Zoom); UnmanagedImage img1 = UnmanagedImage.FromManagedImage(sourceImage); UnmanagedImage img2 = UnmanagedImage.FromManagedImage(resultImage); List <IntPoint> p1 = img1.CollectActivePixels(); List <IntPoint> p2 = img2.CollectActivePixels(); bool equals = new HashSet <IntPoint>(p1).SetEquals(p2); Assert.IsTrue(equals); }
public void ConvertTest() { MatrixToImage target = new MatrixToImage(min: 0, max: 128); byte[,] input = { { 0, 0, 0 }, { 0, 128, 0 }, { 0, 0, 128 }, }; UnmanagedImage bitmap; target.Convert(input, out bitmap); var pixels = bitmap.CollectActivePixels(); Assert.AreEqual(2, pixels.Count); Assert.IsTrue(pixels.Contains(new IntPoint(1, 1))); Assert.IsTrue(pixels.Contains(new IntPoint(2, 2))); }
/// <summary> /// Przetworzenie macierzy do Bitmap /// </summary> /// <param name="rawImage">Macierz obrazu</param> /// <returns>Bitmap z obrazem</returns> public BitmapSource ArrayToBitmapImage(double[,] rawImage) { Bitmap imageBitmap; MatrixToImage conventer = new MatrixToImage(min: 0, max: 1); conventer.Convert(rawImage, out imageBitmap); var imageBitmapImage = BitmapToBitmapImage(imageBitmap); BitmapEncoder encoder = new PngBitmapEncoder(); encoder.Frames.Add(BitmapFrame.Create(imageBitmapImage)); //Testowy zapis pliku wynikowego //using (var fileStream = new FileStream(@"C:\Users\wilu\Desktop\Filtr medianowy\file.png", FileMode.Create)) //{ // encoder.Save(fileStream); //} return(imageBitmapImage); }
public void ConvertTest2() { // Load a test image Bitmap sourceImage = Properties.Resources.image1; // Make sure values are binary new Threshold().ApplyInPlace(sourceImage); // Create the converters ImageToMatrix imageToMatrix = new ImageToMatrix() { Min = 0, Max = 255 }; MatrixToImage matrixToImage = new MatrixToImage() { Min = 0, Max = 255 }; // Convert to matrix double[,] matrix; // initialization is not needed imageToMatrix.Convert(sourceImage, out matrix); // Revert to image Bitmap resultImage; // initialization is not needed matrixToImage.Convert(matrix, out resultImage); // Show both images, which should be equal // ImageBox.Show(sourceImage, PictureBoxSizeMode.Zoom); // ImageBox.Show(resultImage, PictureBoxSizeMode.Zoom); UnmanagedImage img1 = UnmanagedImage.FromManagedImage(sourceImage); UnmanagedImage img2 = UnmanagedImage.FromManagedImage(resultImage); List<IntPoint> p1 = img1.CollectActivePixels(); List<IntPoint> p2 = img2.CollectActivePixels(); bool equals = new HashSet<IntPoint>(p1).SetEquals(p2); Assert.IsTrue(equals); }
public void ConvertTest1() { MatrixToImage target = new MatrixToImage(); double[,] pixels = { { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 0 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 1 { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 }, // 2 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 3 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 4 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 5 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 6 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 7 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 8 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 9 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 10 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 11 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 12 { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 }, // 13 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 14 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 15 }; Bitmap imageActual; target.Convert(pixels, out imageActual); double[,] actual; ImageToMatrix c = new ImageToMatrix(); c.Convert(imageActual, out actual); double[,] expected; Bitmap imageExpected = Properties.Resources.image1; new Threshold().ApplyInPlace(imageExpected); new Invert().ApplyInPlace(imageExpected); c.Convert(imageExpected, out expected); for (int i = 0; i < pixels.GetLength(0); i++) for (int j = 0; j < pixels.GetLength(1); j++) Assert.AreEqual(actual[i, j], expected[i, j]); }
private static long[,] create(byte[,] img, PixelFormat format) { long[,] actual8bpp; Bitmap image; MatrixToImage converter = new MatrixToImage(); converter.Format = format; converter.Convert(img, out image); Assert.AreEqual(format, image.PixelFormat); IntegralImage2 ii8bpp = IntegralImage2.FromBitmap(image, 0); actual8bpp = ii8bpp.Image; return actual8bpp; }
public void write_video_new_api() { string basePath = TestContext.CurrentContext.TestDirectory; #region doc_new_api // Let's say we would like to save file using a .mp4 media // container, a H.265 video codec for the video stream, and // AAC for the audio stream, into the file: string outputPath = Path.Combine(basePath, "output_audio.avi"); // First, we create a new VideoFileWriter: var videoWriter = new VideoFileWriter() { // Our video will have the following characteristics: Width = 800, Height = 600, FrameRate = 24, BitRate = 1200 * 1000, VideoCodec = VideoCodec.H265, AudioCodec = AudioCodec.Aac, AudioBitRate = 44100, AudioLayout = AudioLayout.Stereo, FrameSize = 44100, PixelFormat = AVPixelFormat.FormatYuv420P }; // We can open for it writing: videoWriter.Open(outputPath); // At this point, we can check the console of our application for useful // information regarding the media streams created by FFMPEG. We can also // check those properties using the class itself, specially for properties // that we didn't set beforehand but that have been filled by FFMPEG: int width = videoWriter.Width; int height = videoWriter.Height; int frameRate = videoWriter.FrameRate.Numerator; int bitRate = videoWriter.BitRate; VideoCodec videoCodec = videoWriter.VideoCodec; AudioCodec audioCodec = videoWriter.AudioCodec; AudioLayout audioLayout = videoWriter.AudioLayout; int audioChannels = videoWriter.NumberOfChannels; // We haven't set those properties, but FFMPEG has filled them for us: int audioSampleRate = videoWriter.SampleRate; int audioSampleSize = videoWriter.FrameSize; // Now, let's say we would like to save dummy images of // changing color, with a sine wave as the audio stream: var g = new SineGenerator() { Channels = 1, // we will generate only one channel, and the file writer will convert on-the-fly Format = SampleFormat.Format32BitIeeeFloat, Frequency = 10f, Amplitude = 0.9f, SamplingRate = 44100 }; var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { // Create bitmap matrix from a matrix of RGB values: byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); // Write the frame to the stream. We can optionally specify // the moment when this frame should remain in the stream: videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(i)); // We can also write the audio samples if we need to: Signal signal = g.Generate(TimeSpan.FromSeconds(1)); // generate 1 second of audio videoWriter.WriteAudioFrame(signal); // save it to the stream } // We can get how long our written video is: TimeSpan duration = videoWriter.Duration; // Close the stream videoWriter.Close(); #endregion Assert.AreEqual(2540000000, duration.Ticks); Assert.AreEqual(800, width); Assert.AreEqual(600, height); Assert.AreEqual(24, frameRate); Assert.AreEqual(1200000, bitRate); Assert.AreEqual(VideoCodec.H265, videoCodec); Assert.AreEqual(AudioCodec.Aac, audioCodec); Assert.AreEqual(44100, audioSampleRate); Assert.AreEqual(AudioLayout.Stereo, audioLayout); Assert.AreEqual(2, audioChannels); }
public void write_video_new_api() { string basePath = TestContext.CurrentContext.TestDirectory; #region doc_new_api // Let's say we would like to save file using a .avi media // container and a MPEG4 (DivX/XVid) codec, saving it into: string outputPath = Path.Combine(basePath, "output_video.avi"); // First, we create a new VideoFileWriter: var videoWriter = new VideoFileWriter() { // Our video will have the following characteristics: Width = 800, Height = 600, FrameRate = 24, BitRate = 1200 * 1000, VideoCodec = VideoCodec.Mpeg4, }; // We can open for it writing: videoWriter.Open(outputPath); // At this point, we can check the console of our application for useful // information regarding the media streams created by FFMPEG. We can also // check those properties using the class itself, specially for properties // that we didn't set beforehand but that have been filled by FFMPEG: int width = videoWriter.Width; int height = videoWriter.Height; int frameRate = videoWriter.FrameRate.Numerator; int bitRate = videoWriter.BitRate; VideoCodec videoCodec = videoWriter.VideoCodec; // We haven't set those properties, but FFMPEG has filled them for us: AudioCodec audioCodec = videoWriter.AudioCodec; int audioSampleRate = videoWriter.SampleRate; AudioLayout audioLayout = videoWriter.AudioLayout; int audioChannels = videoWriter.NumberOfChannels; // Now, let's say we would like to save dummy images of changing color var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { // Create bitmap matrix from a matrix of RGB values: byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); // Write the frame to the stream. We can optionally specify // the moment that this frame should remain in the stream: videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(i)); } // We can get how long our written video is: TimeSpan duration = videoWriter.Duration; // Close the stream videoWriter.Close(); videoWriter.Dispose(); #endregion Assert.AreEqual(2540000000, duration.Ticks); Assert.AreEqual(800, width); Assert.AreEqual(600, height); Assert.AreEqual(24, frameRate); Assert.AreEqual(1200000, bitRate); Assert.AreEqual(VideoCodec.Mpeg4, videoCodec); Assert.IsTrue(AudioCodec.Default == audioCodec || AudioCodec.Mp3 == audioCodec); Assert.AreEqual(44100, audioSampleRate); Assert.AreEqual(audioLayout, audioLayout); Assert.AreEqual(2, audioChannels); }
static void Main(string[] args) { var qualityLevels = new Dictionary <int, int[, ]> { { 50, Q50 } }; foreach (int level in new[] { 10, 20, 30, 40 }) { int[,] newQ = new int[8, 8]; for (int i = 0; i < 8; i++) { for (int j = 0; j < 8; j++) { newQ[i, j] = (int)Math.Min(Q50[i, j] * (50d / level), 256d); } } qualityLevels.Add(level, newQ); } foreach (int level in new[] { 60, 70, 80, 90, 95 }) { int[,] newQ = new int[8, 8]; for (int i = 0; i < 8; i++) { for (int j = 0; j < 8; j++) { newQ[i, j] = (int)Math.Min(Q50[i, j] * ((100d - level) / 50d), 256d); } } qualityLevels.Add(level, newQ); } var imageToMatrix = new ImageToMatrix(); var bitmap = new Bitmap(File.OpenRead("sample_blackwhite.bmp")); double[,] output; imageToMatrix.Convert(bitmap, out output); var dct = GetDctMatrix(); var matrixMultiplied = Matrix.Dot(output, dct); foreach (var q in qualityLevels) { double[,] rounded = new double[8, 8]; for (int row = 0; row < 8; row++) { for (int column = 0; column < 8; column++) { int divided = (int)Math.Round(matrixMultiplied[row, column] * 256d / q.Value[row, column], 0); int multipliedAgain = divided * q.Value[row, column]; rounded[row, column] = multipliedAgain / 256d; } } var newMatrix = Matrix.Divide(rounded, dct); var mtoi = new MatrixToImage(); Bitmap bitmap2; mtoi.Convert(newMatrix, out bitmap2); bitmap2.Save($"sample_blackwhite_q{q.Key}.bmp"); } }
private double census(Bitmap myBitmap) { int[,] bi = new int[myBitmap.Width, myBitmap.Height]; for (int x = 0; x < myBitmap.Width; x++) { for (int y = 0; y < myBitmap.Height; y++) { Color pixelColor = myBitmap.GetPixel(x, y); int mainvalue = pixelColor.R; string s = ""; try { int a0 = myBitmap.GetPixel(x - 1, y - 1).R; if (a0 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } try { int a1 = myBitmap.GetPixel(x - 1, y).R; if (a1 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } try { int a2 = myBitmap.GetPixel(x - 1, y + 1).R; if (a2 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } try { int a3 = myBitmap.GetPixel(x, y - 1).R; if (a3 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } try { int a4 = myBitmap.GetPixel(x, y + 1).R; if (a4 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } try { int a6 = myBitmap.GetPixel(x + 1, y + 1).R; if (a6 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } try { int a7 = myBitmap.GetPixel(x + 1, y).R; if (a7 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } try { int a8 = myBitmap.GetPixel(x + 1, y - 1).R; if (a8 >= mainvalue) { s = s + 1; } else { s = s + 0; } } catch (Exception ex) { s = s + ""; } bi[x, y] = BitStringToInt(s); // neighbors(1) = img(r - 1, c - 1); % Upper left.r = row, c = column. // neighbors(2) = img(r - 1, c); % Upper middle.r = row, c = column. // neighbors(3) = img(r - 1, c + 1); % Upper right.r = row, c = column. // neighbors(4) = img(r, c - 1); % left.r = row, c = column. // neighbors(5) = img(r, c + 1); % right.r = row, c = column. // neighbors(6) = img(r + 1, c + 1); % Lowerleft.r = row, c = column. // neighbors(7) = img(r + 1, c); % lower middle.r = row, c = column. // neighbors(8) = img(r + 1, c - 1); % Lower left.r = row, c = column. // things we do with pixelColor } } MatrixToImage conv = new MatrixToImage(min: 0, max: 255); // Declare an image and store the pixels on it Bitmap image; conv.Convert(bi, out image); Accord.Imaging.ImageStatistics statistics = new Accord.Imaging.ImageStatistics(image); var histogram = statistics.Gray; return(histogram.Mean); }