public static void CompressWithoutTransparency(Surface surface, EncoderOptions options, AvifProgressCallback avifProgress, ref uint progressDone, uint progressTotal, CICPColorData colorInfo, out CompressedAV1Image color) { BitmapData bitmapData = new BitmapData { scan0 = surface.Scan0.Pointer, width = (uint)surface.Width, height = (uint)surface.Height, stride = (uint)surface.Stride }; ProgressContext progressContext = new ProgressContext(avifProgress, progressDone, progressTotal); using (CompressedAV1DataAllocator allocator = new CompressedAV1DataAllocator(1)) { IntPtr colorImage; CompressedAV1OutputAlloc outputAllocDelegate = new CompressedAV1OutputAlloc(allocator.Allocate); EncoderStatus status = EncoderStatus.Ok; if (IntPtr.Size == 8) { status = AvifNative_64.CompressImage(ref bitmapData, options, progressContext, ref colorInfo, outputAllocDelegate, out colorImage, IntPtr.Zero); } else { status = AvifNative_86.CompressImage(ref bitmapData, options, progressContext, ref colorInfo, outputAllocDelegate, out colorImage, IntPtr.Zero); } GC.KeepAlive(outputAllocDelegate); if (status != EncoderStatus.Ok) { HandleError(status, allocator.ExceptionInfo); } color = new CompressedAV1Image(allocator.GetCompressedAV1Data(colorImage), surface.Width, surface.Height, options.yuvFormat); } progressDone = progressContext.progressDone; GC.KeepAlive(avifProgress); }
/// <summary> /// Opens output stream for writing and returns both the input and output streams. Make sure to use a streaming format (like flv). /// </summary> /// <param name="options">Output options</param> /// <param name="process">FFmpeg process</param> /// <param name="inputArguments">Input arguments (such as -f, -v:c, -video_size, -ac, -ar...)</param> /// <param name="showOutput">Show output to terminal. Error stream will not be redirected if this is set to true.</param> /// <param name="ffmpegExecutable">Name or path to the ffmpeg executable</param> public static (Stream Input, Stream Output) StreamToStream(EncoderOptions options, out Process process, string inputArguments = "", bool showOutput = false, string ffmpegExecutable = "ffmpeg") { var(input, output) = FFmpegWrapper.Open(ffmpegExecutable, $"{inputArguments} -i - " + $"-c:v {options.EncoderName} {options.EncoderArguments} -f {options.Format} -", out process, showOutput); return(input, output); }
public Size GetRandomSize(EncoderOptions options) { double eee = Math.Round(((1 / 25.4 * options.widthdoubleFromat) * options.DpiInch), 1); int width = (int)Math.Round(eee, 0); int Height = options.QrRect.ModuleSize * 2; return(new Size(width, Height)); }
/// <summary> /// Converts given input file to output file. /// </summary> /// <param name="inputFilename">Input video file name/path</param> /// <param name="outputFilename">Input video file name/path</param> /// <param name="options">Output options</param> /// <param name="process">FFmpeg process</param> /// <param name="inputArguments">Input arguments (such as -f, -v:c, -video_size, -ac, -ar...)</param> /// <param name="showOutput">Show output to terminal. Error stream will not be redirected if this is set to true.</param> /// <param name="ffmpegExecutable">Name or path to the ffmpeg executable</param> public static void FileToFile(string inputFilename, string outputFilename, EncoderOptions options, out Process process, string inputArguments = "", bool showOutput = false, string ffmpegExecutable = "ffmpeg") { var output = FFmpegWrapper.ExecuteCommand(ffmpegExecutable, $"{inputArguments} -i \"{inputFilename}\" " + $"-c:v {options.EncoderName} {options.EncoderArguments} -f {options.Format} \"{outputFilename}\"", showOutput); process = output; }
/// <summary> /// Uses input file and returns the output stream. Make sure to use a streaming format (like flv). /// </summary> /// <param name="inputFilename">Input video file name/path</param> /// <param name="options">Output options</param> /// <param name="process">FFmpeg process</param> /// <param name="inputArguments">Input arguments (such as -f, -v:c, -video_size, -ac, -ar...)</param> /// <param name="showOutput">Show output to terminal. Error stream will not be redirected if this is set to true.</param> /// <param name="ffmpegExecutable">Name or path to the ffmpeg executable</param> public static Stream FileToStream(string inputFilename, EncoderOptions options, out Process process, string inputArguments = "", bool showOutput = false, string ffmpegExecutable = "ffmpeg") { var output = FFmpegWrapper.OpenOutput(ffmpegExecutable, $"{inputArguments} -i \"{inputFilename}\" " + $"-c:v {options.EncoderName} {options.EncoderArguments} -f {options.Format} -", out process, showOutput); return(output); }
public EncoderService(IHostApplicationLifetime applicationLifetime, IOptions <EncoderOptions> options, ILogger <EncoderService> logger) { this._applicationLifetime = applicationLifetime; this._logger = logger; this._options = options.Value; this._processes = new List <Process>(); }
internal string GetOptionsString() { if (EncoderOptions == null) { return(string.Empty); } return(string.Join(";", EncoderOptions.Select(o => $"{o.Key}={o.Value}"))); }
public Scope GetScope(Bitmap bitmap, EncoderOptions options) { int row = options.Matrix.Width; int mudleCount = options.QrRect.ModuleSize; return(new Scope { Row_Min = (mudleCount * row) - (mudleCount * 8), Row_Max = bitmap.Width, Column_Min = 0, Column_Max = mudleCount * 8 }); //get { options. } }
public Bitmap DrawQrCode(EncoderOptions options) { Bitmap bitmap = new Bitmap(options.BitmapWidth, options.BitmapWidth, PixelFormat.Format24bppRgb); //bitmap.SetResolution(options.DpiInch, options.DpiInch); Graphics graphics = Graphics.FromImage(bitmap); // bitmap.Save(@"D:\DrawQrCode.bmp", ImageFormat.Bmp); graphics.Clear(Color.White); graphics.Dispose(); return(DrawDarkModuleSquare(bitmap, options)); }
public void Test_Encode_Buffer() { if (!this.factory.CanEncode) { throw new BadImageFormatException("Dll is not libwebp or it doesn't contain encode functions."); } string[] test_files = { "Test1.png" }; foreach (var filename in test_files) { using (var fs = File.OpenRead(filename)) { // Test using the shared buffer from .NET instead of copying into unmanaged buffer of WebP's encoder. using (var bitmap = new Bitmap(fs, false)) { var wholeImg = new Rectangle(0, 0, bitmap.Width, bitmap.Height); Bitmap bm; if (bitmap.PixelFormat == PixelFormat.Format32bppArgb) { bm = bitmap; } else { bm = bitmap.Clone(wholeImg, PixelFormat.Format32bppArgb); } try { var opts = new EncoderOptions(CompressionType.Lossy, CompressionLevel.Highest, WebPPreset.Default, 90f); var lockedData = bm.LockBits(wholeImg, ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); try { using (var outputFileStream = new WrapperStream(Path.ChangeExtension(filename, ".webp"))) { this.factory.EncodeRGB(lockedData.Scan0, lockedData.Width, lockedData.Height, lockedData.Stride, true, outputFileStream, opts); } } finally { bm.UnlockBits(lockedData); } } finally { bm.Dispose(); } } } } }
public void Test_WinForms() { using (var webp = new Webp(this.factory, false)) { string[] test_files = { "Test_lossless.webp" }; foreach (var filename in test_files) { using (var fs = File.OpenRead(filename)) { var decoderOpts = new WindowsDecoderOptions() { PixelFormat = OutputPixelFormat.PreferSmallSize }; var encoderOpts = new EncoderOptions(CompressionType.Lossy, CompressionLevel.Highest, WebPPreset.Default, 90f); using (var bitmap = webp.Decode(fs, decoderOpts)) { using (var output = File.Create(Path.GetFileNameWithoutExtension(filename) + "_re-encode-stream.webp")) { webp.Encode(bitmap, output, encoderOpts); output.Flush(); } } fs.Position = 0; var length = (int)fs.Length; var buffer = ArrayPool <byte> .Shared.Rent(length); try { if (fs.Read(buffer, 0, buffer.Length) == length) { using (var bitmap = webp.Decode(new ReadOnlyMemory <byte>(buffer, 0, length), decoderOpts)) { using (var output = File.Create(Path.GetFileNameWithoutExtension(filename) + "_re-encode-buffer.webp")) { webp.Encode(bitmap, output, encoderOpts); output.Flush(); } } } } finally { ArrayPool <byte> .Shared.Return(buffer); } } } } }
private Bitmap DrawDarkModuleSquare(Bitmap bitmap, EncoderOptions options) { BitMatrix bitMatrix = options.Matrix; int ModuleSize = options.QrRect.ModuleSize; int QuietZoneModuleSize = (int)options.QrRect.ZoneModule * ModuleSize; Graphics graphics = Graphics.FromImage(bitmap); //int padding = (size.CodeWidth - size.ModuleSize * matrix.Width) / 2; int preX = -1; // int moduleSize = size.ModuleSize; for (int y = 0; y < bitMatrix.Height; y++) { for (int x = 0; x < bitMatrix.Width; x++) { if (bitMatrix[x, y]) { if (preX == -1) { preX = x; } if (x == bitMatrix.Width - 1) { Rectangle moduleArea = new Rectangle(preX * ModuleSize + QuietZoneModuleSize, y * ModuleSize + QuietZoneModuleSize, (x - preX + 1) * ModuleSize, ModuleSize); graphics.FillRectangle(Brushes.Black, moduleArea); preX = -1; } } else if (preX != -1) { options.DFT++; Rectangle moduleArea = new Rectangle(preX * ModuleSize + QuietZoneModuleSize, y * ModuleSize + QuietZoneModuleSize, (x - preX) * ModuleSize, ModuleSize); graphics.FillRectangle(Brushes.Black, moduleArea); preX = -1; } } } // bitmap.Save(@"D:\DrawDarkModuleSquare.bmp", ImageFormat.Bmp); graphics.Flush(); graphics.Dispose(); return(bitmap); }
private void DrawingContent(QrCodeCustom custom, EncoderOptions options) { List <QrBitmapMatrix> matrixs = options.BitMats.FindAll(f => f.Tag == custom.QrCodeTag); int offset = 1; int offsetmudule = 1; int ModuleSize = matrixs[0].Rectangle.Width; if (custom.QrCodeSize == QrCodeSquareSize.Max) { offset = ModuleSize / 2; offsetmudule = offset / 2; } if (custom.QrCodeSize == QrCodeSquareSize.Min) { offset = ModuleSize / 3; offsetmudule = offset; } // Graphics graphics = Graphics.FromImage(bitmap); for (int i = 0; i < matrixs.Count; i++) { int rowX = matrixs[i].Rectangle.X + offsetmudule; int columnY = matrixs[i].Rectangle.Y + offsetmudule; if (matrixs[i].BoolVar) { GraphicsCustom.FillEllipse(new SolidBrush(custom.ColorBrush), new Rectangle(rowX, columnY, offset, offset)); GraphicsCustom.Flush(); } else { GraphicsCustom.FillEllipse(Brushes.White, new Rectangle(rowX, columnY, offset, offset)); GraphicsCustom.Flush(); //int b = Bitmap.GetPixel(rowX, columnY).B; //int g = Bitmap.GetPixel(rowX, columnY).G; //int r = Bitmap.GetPixel(rowX, columnY).R; //int rgb = (b + g + r) / 3; //if (rgb < 128) //{ // GraphicsCustom.FillEllipse(Brushes.White, new Rectangle(rowX, columnY, offset, offset)); // GraphicsCustom.Flush(); //} } } GraphicsCustom.Flush(); GraphicsCustom.Dispose(); }
private void DrawTag(QrCodeCustom qrCodeCustom, EncoderOptions options) { List <QrBitmapMatrix> matrixs = options.BitMats.FindAll(f => f.Tag == qrCodeCustom.QrCodeTag);// qrCodeCustom.QrCodeTag int ModuleSize = MudelSize / 7; int rowX = matrixs[0].Point.X * ModuleSize; int columnY = matrixs[0].Point.Y * ModuleSize; Bitmap bitmap = GetTagImg(options.LogoImgPath, qrCodeCustom.QrCodeTag); bitmap = OtsuThreshold(bitmap).ToBitmap(); //bitmap.Save(@"D:\Desktop\DIYcode\bitmap212313123123.jpg"); GraphicsCustom.DrawImage(bitmap, rowX, columnY, Rectangles[0].Width, Rectangles[0].Height); // throw new NotImplementedException(); }
/// <summary> /// Used for encoding video and audio frames into a single stream /// </summary> /// <param name="outputStream">Output stream</param> /// <param name="video_width">Input video width in pixels</param> /// <param name="video_height">Input video height in pixels</param> /// <param name="video_framerate">Input video framerate in fps</param> /// <param name="audio_channels">Input audio channel count</param> /// <param name="audio_sampleRate">Input audio sample rate</param> /// <param name="audio_bitDepth">Input audio bits per sample</param> /// <param name="videoEncoderOptions">Video encoding options that will be passed to FFmpeg</param> /// <param name="audioEncoderOptions">Audio encoding options that will be passed to FFmpeg</param> /// <param name="ffmpegExecutable">Name or path to the ffmpeg executable</param> public AudioVideoWriter(Stream outputStream, int video_width, int video_height, double video_framerate, int audio_channels, int audio_sampleRate, int audio_bitDepth, EncoderOptions videoEncoderOptions, EncoderOptions audioEncoderOptions, string ffmpegExecutable = "ffmpeg") { if (video_width <= 0 || video_height <= 0) { throw new InvalidDataException("Video frame dimensions have to be bigger than 0 pixels!"); } if (video_framerate <= 0) { throw new InvalidDataException("Video framerate has to be bigger than 0!"); } if (outputStream == null) { throw new NullReferenceException("Stream can't be null!"); } if (audio_channels <= 0 || audio_sampleRate <= 0) { throw new InvalidDataException("Channels/Sample rate have to be bigger than 0!"); } if (audio_bitDepth != 16 && audio_bitDepth != 24 && audio_bitDepth != 32) { throw new InvalidOperationException("Acceptable bit depths are 16, 24 and 32"); } DestinationStream = outputStream; UseFilename = false; VideoWidth = video_width; VideoHeight = video_height; VideoFramerate = video_framerate; VideoEncoderOptions = videoEncoderOptions; AudioChannels = audio_channels; AudioSampleRate = audio_sampleRate; AudioBitDepth = audio_bitDepth; AudioEncoderOptions = audioEncoderOptions; ffmpeg = ffmpegExecutable; }
public Bitmap DrawQrCode(EncoderOptions options) { Bitmap logoimg = GetLogoBitmap(options); Bitmap = new Bitmap(options.QrRect.QrCodeMinWidth, options.QrRect.QrCodeMinWidth, PixelFormat.Format24bppRgb); GraphicsCustom = Graphics.FromImage(Bitmap); GraphicsCustom.Clear(Color.White); if (options.IsTopLogoImg) { // Bitmap = new Bitmap(logoimg.Width, logoimg.Height, PixelFormat.Format24bppRgb); if (logoimg != null) { GraphicsCustom.DrawImage(logoimg, 0, 0, logoimg.Width, logoimg.Height); } } else { if (logoimg != null) { int pointx = (int)(Bitmap.Width / 2 - logoimg.Width / 2); int pointy = (int)(Bitmap.Height / 2 - logoimg.Height / 2); GraphicsCustom.DrawImage(logoimg, pointx, pointy, logoimg.Width, logoimg.Height); } } GraphicsCustom.Flush(); for (int i = 0; i < options.QrCodeCustoms.Count; i++) { QrCodeCustom qrCodeCustom = options.QrCodeCustoms[i]; if (qrCodeCustom.QrCodeTag != Tag.Content) { DrawTag(qrCodeCustom, options); } else { DrawingContent(qrCodeCustom, options); } } return(Bitmap); }
private void DrawTag(QrCodeCustom custom, EncoderOptions options) { List <QrBitmapMatrix> matrices = options.BitMats.FindAll(f => f.Tag == custom.QrCodeTag); //int ModuleSize = options.QrRect.ModuleSize; //int QuietZoneModuleSize = (int)options.QrRect.ZoneModule * ModuleSize; switch (custom.QrCodeStyle) { case QrCodeStyleEnum.Square: DrawTagSquare(custom, matrices); break; case QrCodeStyleEnum.Circle: DrawTagCircle(custom, matrices); break; case QrCodeStyleEnum.Excircle: DrawTagExcircle(custom, matrices); break; case QrCodeStyleEnum.InnerCircle: DrawTagInnerCircle(custom, matrices); break; case QrCodeStyleEnum.ExInnerCircle: DrawTagExInnercircle(custom, matrices); break; case QrCodeStyleEnum.ExSquareCircle: DrawTagExSquareCircle(custom, matrices); break; //case QrCodeStyleEnum.CustomSquare: // DrawTagCustomSquare(custom, matrices); // break; default: break; } }
/// <summary> /// Used for encoding audio samples into a stream /// </summary> /// <param name="destinationStream">Output stream</param> /// <param name="channels">Input number of channels</param> /// <param name="sampleRate">Input sample rate</param> /// <param name="bitDepth">Input bits per sample</param> /// <param name="encoderOptions">Extra FFmpeg encoding options that will be passed to FFmpeg</param> /// <param name="ffmpegExecutable">Name or path to the ffmpeg executable</param> public AudioWriter(Stream destinationStream, int channels, int sampleRate, int bitDepth = 16, EncoderOptions encoderOptions = null, string ffmpegExecutable = "ffmpeg") { if (channels <= 0 || sampleRate <= 0) { throw new InvalidDataException("Channels/Sample rate have to be bigger than 0!"); } if (bitDepth != 16 && bitDepth != 24 && bitDepth != 32) { throw new InvalidOperationException("Acceptable bit depths are 16, 24 and 32"); } UseFilename = false; ffmpeg = ffmpegExecutable; Channels = channels; BitDepth = bitDepth; SampleRate = sampleRate; DestinationStream = destinationStream ?? throw new NullReferenceException("Stream can't be null!"); EncoderOptions = encoderOptions ?? new MP3Encoder().Create(); }
/// <summary> /// Used for encoding frames into a stream (Requires using a supported format like 'flv' for streaming) /// </summary> /// <param name="destinationStream">Output stream</param> /// <param name="width">Input width of the video in pixels</param> /// <param name="height">Input height of the video in pixels </param> /// <param name="framerate">Input framerate of the video in fps</param> /// <param name="encoderOptions">Extra FFmpeg encoding options that will be passed to FFmpeg</param> /// <param name="ffmpegExecutable">Name or path to the ffmpeg executable</param> public VideoWriter(Stream destinationStream, int width, int height, double framerate, EncoderOptions encoderOptions = null, string ffmpegExecutable = "ffmpeg") { if (width <= 0 || height <= 0) { throw new InvalidDataException("Video frame dimensions have to be bigger than 0 pixels!"); } if (framerate <= 0) { throw new InvalidDataException("Video framerate has to be bigger than 0!"); } UseFilename = false; ffmpeg = ffmpegExecutable; Width = width; Height = height; Framerate = framerate; DestinationStream = destinationStream ?? throw new NullReferenceException("Stream can't be null!"); EncoderOptions = encoderOptions ?? new H264Encoder().Create(); }
public void Encode_IgnoreMetadataIsFalse_ExifProfileIsWritten() { EncoderOptions options = new EncoderOptions() { IgnoreMetadata = false }; TestFile testFile = TestFile.Create(TestImages.Jpeg.Baseline.Floorplan); using (Image input = testFile.CreateImage()) { using (MemoryStream memStream = new MemoryStream()) { input.Save(memStream, new JpegFormat(), options); memStream.Position = 0; using (Image output = Image.Load(memStream)) { Assert.NotNull(output.MetaData.ExifProfile); } } } }
/// <summary> /// /// </summary> /// <param name="bitmap"> 二维码图片</param> /// <param name="modulesize">模块大小</param> /// <param name="dft">特征密度</param> /// <returns></returns> public Bitmap DrawDarkModulePolygon(Bitmap bitmap, EncoderOptions options) { //Bitmap bitmap = (Bitmap)bitmap1.Clone(); bitmap.SetResolution(options.DpiInch, options.DpiInch); // Size size = new Size((int)Math.Round(options.QrRect.ModuleSize * 2.0, 0), options.QrRect.ModuleSize / 4); Size size = GetRandomSize(options); Scope scope = GetScope(bitmap, options); List <PointF[]> pointFs = new QrRect().GetRandomPointF(scope, size, (int)(options.DFT * options.densitydoubleFromat)); Graphics graphics = Graphics.FromImage(bitmap); for (int i = 0; i < pointFs.Count; i++) { graphics.FillPolygon(Brushes.White, pointFs[i]); } graphics.Flush(); graphics.Dispose(); int row = options.Matrix.Width - 7; int mudleSize = options.QrRect.ModuleSize; int x = row * mudleSize + mudleSize; int y = mudleSize; // bitmap.Save(@"D:\DrawDarkModulePolygon.bmp", ImageFormat.Bmp); Bitmap map = bitmap.Clone(new Rectangle(x, y, mudleSize * 7, mudleSize * 7), System.Drawing.Imaging.PixelFormat.Format32bppRgb); // map.Save(@"D:\Polygon.bmp",ImageFormat.Bmp); return(map); //return bitmap.Clone(new Rectangle(0, y, bitmap.Width, bitmap.Height), System.Drawing.Imaging.PixelFormat.Format32bppArgb); //return bitmap; }
/// <summary>Encodes <seealso cref="Image"/> to Webp image and write the result into the given stream.</summary> /// <param name="image">The image which will be used to encode to WebP image.</param> /// <param name="outputStream">The output stream to write the encoded webp data to.</param> /// <param name="options">The encoder options for webp encoder.</param> /// <exception cref="WebpEncodeException">Thrown when the encoder has wrong options.</exception> public void Encode(Image image, Stream outputStream, EncoderOptions options) { if (this.disposed) { throw new ObjectDisposedException(nameof(Webp)); } if (image == null) { throw new ArgumentNullException(nameof(image)); } if (outputStream == null) { throw new ArgumentNullException(nameof(outputStream)); } if (!outputStream.CanWrite) { throw new ArgumentException("The output stream must be writable.", nameof(outputStream)); } bool shouldDispose = false; Bitmap bm = image as Bitmap; if (bm == null) { shouldDispose = true; if (image.PixelFormat == PixelFormat.Format32bppArgb) { bm = new Bitmap(image); } else { // using (var tmpBm = new Bitmap(image)) bm = tmpBm.Clone(new Rectangle(0, 0, tmpBm.Width, tmpBm.Height), PixelFormat.Format32bppArgb); bm = new Bitmap(image.Width, image.Height, PixelFormat.Format32bppArgb); using (var graphic = Graphics.FromImage(bm)) { graphic.DrawImageUnscaled(image, 0, 0); graphic.Flush(); } } } else { if (bm.PixelFormat != PixelFormat.Format32bppArgb) { var oldBm = bm; shouldDispose = true; bm = oldBm.Clone(new Rectangle(0, 0, bm.Width, bm.Height), PixelFormat.Format32bppArgb); } } try { var lockedData = bm.LockBits(new Rectangle(0, 0, bm.Width, bm.Height), ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb); try { var wrappedStream = new OutputStream(outputStream); this.webp.EncodeRGB(lockedData.Scan0, lockedData.Width, lockedData.Height, lockedData.Stride, true, wrappedStream, options); } finally { bm.UnlockBits(lockedData); } } finally { if (shouldDispose) { bm.Dispose(); } } }
//encoding CRF model from training corpus public bool Learn(EncoderOptions args) { if (args.MinDifference <= 0.0) { return(false); } if (args.CostFactor < 0.0) { return(false); } if (args.ThreadsNum <= 0) { return(false); } if (args.HugeLexMemLoad > 0) { } var modelWriter = new ModelWriter(args.ThreadsNum, args.CostFactor, args.HugeLexMemLoad, args.RetrainModelFileName); if (modelWriter.Open(args.TemplateFileName, args.TrainingCorpusFileName) == false) { return(false); } var xList = modelWriter.ReadAllRecords(); modelWriter.Shrink(xList, args.MinFeatureFreq); if (!modelWriter.SaveModelMetaData(args.ModelFileName)) { return(false); } else { } if (!modelWriter.BuildFeatureSetIntoIndex(args.ModelFileName, args.SlotUsageRateThreshold, args.DebugLevel)) { return(false); } else { } if (xList.Length == 0) { return(false); } var orthant = false; if (args.RegType == REG_TYPE.L1) { orthant = true; } if (runCRF(xList, modelWriter, orthant, args) == false) { } modelWriter.SaveFeatureWeight(args.ModelFileName, args.BVQ); return(true); }
private static void ShowEvaluation(int recordNum, ModelWriter feature_index, LBFGS lbfgs, int termNum, int itr, int[,] merr, int[] yfreq, double diff, DateTime startDT, long nonzero_feature_num, EncoderOptions args) { var ts = DateTime.Now - startDT; if (args.DebugLevel > 1) { for (var i = 0; i < feature_index.y_.Count; i++) { var total_merr = 0; var sdict = new SortedDictionary <double, List <string> >(); for (var j = 0; j < feature_index.y_.Count; j++) { total_merr += merr[i, j]; var v = (double)merr[i, j] / (double)yfreq[i]; if (v > 0.0001) { if (sdict.ContainsKey(v) == false) { sdict.Add(v, new List <string>()); } sdict[v].Add(feature_index.y_[j]); } } var vet = (double)total_merr / (double)yfreq[i]; vet = vet * 100.0F; Console.ForegroundColor = ConsoleColor.Green; Console.Write("{0} ", feature_index.y_[i]); Console.ResetColor(); Console.Write("[FR={0}, TE=", yfreq[i]); Console.ForegroundColor = ConsoleColor.Yellow; Console.Write("{0:0.00}%", vet); Console.ResetColor(); Console.WriteLine("]"); var n = 0; foreach (var pair in sdict.Reverse()) { for (int index = 0; index < pair.Value.Count; index++) { var item = pair.Value[index]; n += item.Length + 1 + 7; if (n > 80) { //only show data in one line, more data in tail will not be show. break; } Console.Write("{0}:", item); Console.ForegroundColor = ConsoleColor.Red; Console.Write("{0:0.00}% ", pair.Key * 100); Console.ResetColor(); } if (n > 80) { break; } } Console.WriteLine(); } } var act_feature_rate = (double)(nonzero_feature_num) / (double)(feature_index.feature_size()) * 100.0; //Logger.WriteLine("iter={0} terr={1:0.00000} serr={2:0.00000} diff={3:0.000000} fsize={4}({5:0.00}% act)", itr, 1.0 * lbfgs.err / termNum, 1.0 * lbfgs.zeroone / recordNum, diff, feature_index.feature_size(), act_feature_rate); //Logger.WriteLine("Time span: {0}, Aver. time span per iter: {1}", ts, new TimeSpan(0, 0, (int)(ts.TotalSeconds / (itr + 1)))); }
bool runCRF(EncoderTagger[] x, ModelWriter modelWriter, bool orthant, EncoderOptions args) { var old_obj = double.MaxValue; var converge = 0; var lbfgs = new LBFGS(args.ThreadsNum); lbfgs.expected = new double[modelWriter.feature_size() + 1]; var processList = new List <CRFEncoderThread>(); var parallelOption = new ParallelOptions(); parallelOption.MaxDegreeOfParallelism = args.ThreadsNum; //Initialize encoding threads for (var i = 0; i < args.ThreadsNum; i++) { var thread = new CRFEncoderThread(); thread.start_i = i; thread.thread_num = args.ThreadsNum; thread.x = x; thread.lbfgs = lbfgs; thread.Init(); processList.Add(thread); } //Statistic term and result tags frequency var termNum = 0; int[] yfreq; yfreq = new int[modelWriter.y_.Count]; for (int index = 0; index < x.Length; index++) { var tagger = x[index]; termNum += tagger.word_num; for (var j = 0; j < tagger.word_num; j++) { yfreq[tagger.answer_[j]]++; } } //Iterative training var startDT = DateTime.Now; var dMinErrRecord = 1.0; for (var itr = 0; itr < args.MaxIteration; ++itr) { //Clear result container lbfgs.obj = 0.0f; lbfgs.err = 0; lbfgs.zeroone = 0; Array.Clear(lbfgs.expected, 0, lbfgs.expected.Length); var threadList = new List <Thread>(); for (var i = 0; i < args.ThreadsNum; i++) { var thread = new Thread(processList[i].Run); thread.Start(); threadList.Add(thread); } int[,] merr; merr = new int[modelWriter.y_.Count, modelWriter.y_.Count]; for (var i = 0; i < args.ThreadsNum; ++i) { threadList[i].Join(); lbfgs.obj += processList[i].obj; lbfgs.err += processList[i].err; lbfgs.zeroone += processList[i].zeroone; //Calculate error for (var j = 0; j < modelWriter.y_.Count; j++) { for (var k = 0; k < modelWriter.y_.Count; k++) { merr[j, k] += processList[i].merr[j, k]; } } } long num_nonzero = 0; var fsize = modelWriter.feature_size(); var alpha = modelWriter.alpha_; if (orthant == true) { //L1 regularization Parallel.For <double>(1, fsize + 1, parallelOption, () => 0, (k, loop, subtotal) => { subtotal += Math.Abs(alpha[k] / modelWriter.cost_factor_); if (alpha[k] != 0.0) { Interlocked.Increment(ref num_nonzero); } return(subtotal); }, (subtotal) => // lock free accumulator { double initialValue; double newValue; do { initialValue = lbfgs.obj; // read current value newValue = initialValue + subtotal; //calculate new value }while (initialValue != Interlocked.CompareExchange(ref lbfgs.obj, newValue, initialValue)); }); } else { //L2 regularization num_nonzero = fsize; Parallel.For <double>(1, fsize + 1, parallelOption, () => 0, (k, loop, subtotal) => { subtotal += (alpha[k] * alpha[k] / (2.0 * modelWriter.cost_factor_)); lbfgs.expected[k] += (alpha[k] / modelWriter.cost_factor_); return(subtotal); }, (subtotal) => // lock free accumulator { double initialValue; double newValue; do { initialValue = lbfgs.obj; // read current value newValue = initialValue + subtotal; //calculate new value }while (initialValue != Interlocked.CompareExchange(ref lbfgs.obj, newValue, initialValue)); }); } //Show each iteration result var diff = (itr == 0 ? 1.0f : Math.Abs(old_obj - lbfgs.obj) / old_obj); old_obj = lbfgs.obj; ShowEvaluation(x.Length, modelWriter, lbfgs, termNum, itr, merr, yfreq, diff, startDT, num_nonzero, args); if (diff < args.MinDifference) { converge++; } else { converge = 0; } if (itr > args.MaxIteration || converge == 3) { break; // 3 is ad-hoc } if (args.DebugLevel > 0 && (double)lbfgs.zeroone / (double)x.Length < dMinErrRecord) { var cc = Console.ForegroundColor; Console.ForegroundColor = ConsoleColor.Red; Console.Write("[Debug Mode] "); Console.ForegroundColor = cc; //Save current best feature weight into file dMinErrRecord = (double)lbfgs.zeroone / (double)x.Length; modelWriter.SaveFeatureWeight("feature_weight_tmp", false); } int iret; iret = lbfgs.optimize(alpha, modelWriter.cost_factor_, orthant); if (iret <= 0) { return(false); } } return(true); }
public static void Save(Document document, Stream output, int quality, CompressionSpeed compressionSpeed, YUVChromaSubsampling chromaSubsampling, bool preserveExistingTileSize, int?maxEncoderThreadsOverride, Surface scratchSurface, ProgressEventHandler progressCallback, IByteArrayPool arrayPool) { using (RenderArgs args = new RenderArgs(scratchSurface)) { document.Render(args, true); } bool grayscale = IsGrayscaleImage(scratchSurface); AvifMetadata metadata = CreateAvifMetadata(document); EncoderOptions options = new EncoderOptions { quality = quality, compressionSpeed = compressionSpeed, // YUV 4:0:0 is always used for gray-scale images because it // produces the smallest file size with no quality loss. yuvFormat = grayscale ? YUVChromaSubsampling.Subsampling400 : chromaSubsampling, maxThreads = maxEncoderThreadsOverride ?? Environment.ProcessorCount }; // Use BT.709 with sRGB transfer characteristics as the default. CICPColorData colorConversionInfo = new CICPColorData { colorPrimaries = CICPColorPrimaries.BT709, transferCharacteristics = CICPTransferCharacteristics.Srgb, matrixCoefficients = CICPMatrixCoefficients.BT709, fullRange = true }; if (quality == 100 && !grayscale) { // The Identity matrix coefficient places the RGB values into the YUV planes without any conversion. // This reduces the compression efficiency, but allows for fully lossless encoding. options.yuvFormat = YUVChromaSubsampling.IdentityMatrix; // These CICP color values are from the AV1 Bitstream & Decoding Process Specification. colorConversionInfo = new CICPColorData { colorPrimaries = CICPColorPrimaries.BT709, transferCharacteristics = CICPTransferCharacteristics.Srgb, matrixCoefficients = CICPMatrixCoefficients.Identity, fullRange = true }; } else { Metadata docMetadata = document.Metadata; // Look for NCLX meta-data if the CICP meta-data was not found. // This preserves backwards compatibility with PDN files created by // previous versions of this plugin. string serializedData = docMetadata.GetUserValue(CICPMetadataName) ?? docMetadata.GetUserValue(NclxMetadataName); if (serializedData != null) { CICPColorData?colorData = CICPSerializer.TryDeserialize(serializedData); if (colorData.HasValue) { colorConversionInfo = colorData.Value; } } } ImageGridMetadata imageGridMetadata = TryGetImageGridMetadata(document, options.compressionSpeed, options.yuvFormat, preserveExistingTileSize); bool hasTransparency = HasTransparency(scratchSurface); CompressedAV1ImageCollection colorImages = new CompressedAV1ImageCollection(imageGridMetadata?.TileCount ?? 1); CompressedAV1ImageCollection alphaImages = hasTransparency ? new CompressedAV1ImageCollection(colorImages.Capacity) : null; // Progress is reported at the following stages: // 1. Before converting the image to the YUV color space // 2. Before compressing the color image // 3. After compressing the color image // 4. After compressing the alpha image (if present) // 5. After writing the color image to the file // 6. After writing the alpha image to the file (if present) uint progressDone = 0; uint progressTotal = hasTransparency ? 6U : 4U; if (colorImages.Capacity > 1) { progressTotal *= (uint)colorImages.Capacity; } try { Rectangle[] windowRectangles = GetTileWindowRectangles(imageGridMetadata, document); for (int i = 0; i < colorImages.Capacity; i++) { CompressedAV1Image color = null; CompressedAV1Image alpha = null; try { Rectangle windowRect = windowRectangles[i]; using (Surface window = scratchSurface.CreateWindow(windowRect)) { if (hasTransparency) { AvifNative.CompressWithTransparency(window, options, ReportCompressionProgress, ref progressDone, progressTotal, colorConversionInfo, out color, out alpha); } else { AvifNative.CompressWithoutTransparency(window, options, ReportCompressionProgress, ref progressDone, progressTotal, colorConversionInfo, out color); } } colorImages.Add(color); color = null; if (hasTransparency) { alphaImages.Add(alpha); alpha = null; } } finally { color?.Dispose(); alpha?.Dispose(); } } ColorInformationBox colorInformationBox; byte[] iccProfileBytes = metadata.GetICCProfileBytesReadOnly(); if (iccProfileBytes != null && iccProfileBytes.Length > 0) { colorInformationBox = new IccProfileColorInformation(iccProfileBytes); } else { colorInformationBox = new NclxColorInformation(colorConversionInfo.colorPrimaries, colorConversionInfo.transferCharacteristics, colorConversionInfo.matrixCoefficients, colorConversionInfo.fullRange); } AvifWriter writer = new AvifWriter(colorImages, alphaImages, metadata, imageGridMetadata, options.yuvFormat, colorInformationBox, progressCallback, progressDone, progressTotal, arrayPool); writer.WriteTo(output); } finally { colorImages?.Dispose(); alphaImages?.Dispose(); } bool ReportCompressionProgress(uint done, uint total) { try { progressCallback?.Invoke(null, new ProgressEventArgs(((double)done / total) * 100.0, true)); return(true); } catch (OperationCanceledException) { return(false); } } }
/// <summary>Encodes <seealso cref="BitmapSource"/> to Webp image and write the result into the given stream.</summary> /// <param name="image">The image which will be used to encode to WebP image.</param> /// <param name="outputStream">The output stream to write the encoded webp data to.</param> /// <param name="options">The encoder options for webp encoder.</param> /// <exception cref="WebpEncodeException">Thrown when the encoder has wrong options.</exception> public void Encode(BitmapSource image, Stream outputStream, EncoderOptions options) { if (this.disposed) { throw new ObjectDisposedException(nameof(Webp)); } if (image == null) { throw new ArgumentNullException(nameof(image)); } if (image.IsDownloading) { throw new ArgumentException("The image is still being downloaded.", nameof(image)); } if (outputStream == null) { throw new ArgumentNullException(nameof(outputStream)); } if (!outputStream.CanWrite) { throw new ArgumentException("The output stream must be writable.", nameof(outputStream)); } BitmapSource src; if (image.Format == PixelFormats.Bgra32) { src = image; if (image is WriteableBitmap wbm) { wbm.Lock(); try { var wrappedStream = new OutputStream(outputStream); this.webp.EncodeRGB(wbm.BackBuffer, wbm.PixelWidth, wbm.PixelHeight, wbm.BackBufferStride, true, wrappedStream, options); } finally { wbm.Unlock(); } return; } } else { src = new FormatConvertedBitmap(image, PixelFormats.Bgra32, image.Palette, 0d); } // BGRA32 is 32 bits-per-pixel => 4 bytes-per-pixel (1 byte = 8 bits) => Stride = Width (in pixels) * 4 var stride = src.PixelWidth * 4; var bufferSize = stride * src.PixelHeight; var buffer = ArrayPool <byte> .Shared.Rent(bufferSize); try { if (src.CanFreeze) { src.Freeze(); } src.CopyPixels(buffer, stride, 0); GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); if (handle.IsAllocated) { try { var wrappedStream = new OutputStream(outputStream); this.webp.EncodeRGB(handle.AddrOfPinnedObject(), src.PixelWidth, src.PixelHeight, stride, true, wrappedStream, options); } finally { handle.Free(); } } } finally { ArrayPool <byte> .Shared.Return(buffer); } }
private void Init() { // Create Options for the encoder and activate validation EncoderOptions encOptions = new EncoderOptions { EncoderPath = Path.Combine(BaseDir, "cjxl.exe"), Validate = false }; // Create new encoder with previously prepared Options Enc = new Encoder(encOptions); // Create Options for the encoder and activate validation DecoderOptions decOptions = new DecoderOptions { DecoderPath = Path.Combine(BaseDir, "djxl.exe"), }; //Try to get versions encOptions.TryGetEncoderVersionInfo(); enc.Decoder.Options.TryGetDecoderVersionInfo(); // listen to propertychange events of the encoder to get notified // if the images changes to update the FileSystem watcher // and it updates the image in the viewer component enc.PropertyChanged += enc_PropertyChanged; initWatcher(Image2); // Set example image as input file if (File.Exists(example)) { Enc.InFile = new FileInfo(example); } // Configure Quality jxlNET.Encoder.Parameters.Quality q = new jxlNET.Encoder.Parameters.Quality(); //q.Value = 101; //Throws ArgumentOutOfRangeException limit is 100 q.Value = 50; //Throws ArgumentOutOfRangeException limit is 100 Console.WriteLine("Encode with Quality: " + q.Value); Enc.AddOrReplaceParam(q); // Configure Speed jxlNET.Encoder.Parameters.Speed s = new jxlNET.Encoder.Parameters.Speed(3); Enc.AddOrReplaceParam(s); // print out all current set parameter foreach (var p in enc.Params) { Console.WriteLine("param: " + p.ToString()); } //Load saved Presets from files LoadPresets(); // Dynamically build controls to add parameter ListBoxParameter.FillListBox(LbParam, enc); }