private void canvas_Loaded(object sender, RoutedEventArgs e) { texInterop = new TextureGraphInterop(); // Set window bounds in dips texInterop.WindowBounds = new Windows.Foundation.Size( (float)canvas.ActualWidth, (float)canvas.ActualHeight ); // Set native resolution in pixels texInterop.NativeResolution = new Windows.Foundation.Size( (float)Math.Floor(canvas.ActualWidth * Application.Current.Host.Content.ScaleFactor / 100.0f + 0.5f), (float)Math.Floor(canvas.ActualHeight * Application.Current.Host.Content.ScaleFactor / 100.0f + 0.5f) ); // Set render resolution to the full native resolution texInterop.RenderResolution = texInterop.NativeResolution; // Hook-up native component to DrawingSurface canvas.SetContentProvider(texInterop.CreateContentProvider()); canvas.SetManipulationHandler(texInterop); var previewSize = new Windows.Foundation.Size(800, 448); cam = new Camera(previewSize, CameraSensorLocation.Back); im = new ImageProcessing(detector); // When we have an input frame, call ImageProcessing::processFrame cam.OnFrameReady += im.processFrame; // When we have processed a frame, output it to the textureInterop im.frameProcessed += texInterop.setTexturePtr; }
private async void 二值化ToolStripMenuItem_Click(object sender, EventArgs e) { if (pictureBox1.Image != null) { 二值化ToolStripMenuItem.Enabled = false; imgpr = new ImageProcessing(pictureBox1.Image as Bitmap); await Task.Run(() => pictureBox1.Image = imgpr.TwoValue(imgpr.GetBestTresh())); 二值化ToolStripMenuItem.Enabled = true; } }
private async void 中值滤波ToolStripMenuItem_Click(object sender, EventArgs e) { if (pictureBox1.Image != null) { 中值滤波ToolStripMenuItem.Enabled = false; imgpr = new ImageProcessing(pictureBox1.Image as Bitmap); await Task.Run(() => pictureBox1.Image = imgpr.MidFilter()); 中值滤波ToolStripMenuItem.Enabled = true; } }
public void Rotate_Image_Test() { using (var _imgProcessing = new ImageProcessing()) { var _rotatedImage = _imgProcessing.Process( imagePath: _testFileName, rotate: Rotation.Rotate90 ); ImageHelper.SaveStream(_rotatedImage, "c:\\Temp\\test_rotated.jpg"); Assert.IsTrue(File.Exists("c:\\Temp\\test_rotated.jpg")); } }
public void GrayScale_Image_Test() { using (var _imgProcessing = new ImageProcessing()) { var _grayImage = _imgProcessing.Process( imagePath: _testFileName, colorFormat: ColorFormat.Gray //Gray, BlackAndWhite, RGB, BGR, CMYK ); ImageHelper.SaveStream(_grayImage, "c:\\Temp\\test_gray.jpg"); Assert.IsTrue(File.Exists("c:\\Temp\\test_gray.jpg")); } }
public void Flip_Image_Test() { using (var _imgProcessing = new ImageProcessing()) { var _flipImage = _imgProcessing.Process( imagePath: _testFileName, flipHorizontal: true, flipVertical: false ); ImageHelper.SaveStream(_flipImage, "c:\\Temp\\test_flip.jpg"); Assert.IsTrue(File.Exists("c:\\Temp\\test_flip.jpg")); } }
protected virtual void PaintFlatControlBorder(Graphics g) { int pw = 1; Color c1 = Color.Empty, c2 = Color.Empty, cb = Color.Empty, cText = Color.Empty; c1 = Enabled ? ThemeManager.GradientNormalColor1 : ThemeManager.BackColor; c2 = Enabled ? ThemeManager.GradientNormalColor2 : ThemeManager.BackColor; cb = Enabled ? ThemeManager.BorderColor : ThemeManager.GradientNormalColor2; cText = Enabled ? ThemeManager.ForeColor : Color.FromKnownColor(KnownColor.ControlDark); if (Enabled && (_isHovered || Focused)) { if (_isHovered && Focused) { c1 = ThemeManager.GradientFocusHoverColor1; c2 = ThemeManager.GradientFocusHoverColor2; cb = ThemeManager.FocusBorderColor; //pw = 2; } else if (Focused) { c1 = ThemeManager.GradientFocusColor1; c2 = ThemeManager.GradientFocusColor2; cb = ThemeManager.FocusBorderColor; //pw = 2; } else { c1 = ThemeManager.GradientHoverColor1; c2 = ThemeManager.GradientHoverColor2; cText = ThemeManager.SelectedTextColor; } } if (_overrideForeColor != Color.Empty) { cText = _overrideForeColor; } Rectangle rc = ClientRectangle; rc.Inflate(2, 2); using (Brush b = new SolidBrush(ThemeManager.BackColor)) { g.FillRectangle(b, rc); } rc = ClientRectangle; rc.Width -= 1; rc.Height -= 1; using (Pen p = new Pen(cb, pw)) using (Brush b = new LinearGradientBrush(rc, c1, c2, 90)) { g.FillRectangle(b, rc); g.DrawRectangle(p, rc); } rc = new Rectangle(ClientRectangle.Left + 2, ClientRectangle.Top + 2, ClientRectangle.Width - 6, ClientRectangle.Height - 6); Rectangle rcText = new Rectangle(rc.Left, rc.Top, rc.Width - 12, rc.Height); Rectangle rcArrow = new Rectangle(rcText.Right, rc.Top, 12, rc.Height); Image img = null; ComboBoxItem item = this.SelectedItem as ComboBoxItem; if (item != null) { img = item.Image; } using (Brush b = new SolidBrush(cText)) { StringFormat sf = new StringFormat(); sf.Alignment = StringAlignment.Near; sf.LineAlignment = StringAlignment.Center; sf.Trimming = StringTrimming.EllipsisCharacter; //sf.FormatFlags = StringFormatFlags.NoWrap; string text = SelectedItem.GetFieldValueAsText(DisplayMember); if (img != null) { g.DrawImage(img, rcText.X + 2, rcText.Top, 16, 16); rcText.X += 20; rcText.Width -= 20; } g.DrawString(text, this.Font, b, rcText, sf); } using (GraphicsPath gp = ImageProcessing.GenerateCenteredArrow(rcArrow)) using (Brush b = new SolidBrush(cText)) using (Pen p = new Pen(b, 1)) { g.FillPath(b, gp); g.DrawPath(p, gp); } }
public JsonOutput(ImageProcessing.JsonCallback handler) { _handler = handler; }
/// <summary> /// set function for choosing correct imageproccessor /// </summary> /// <param name="imageProcessing"></param> public void SetProcessor(ImageProcessing imageProcessing) { this.imageProcessing = imageProcessing; }
private async void marrToolStripMenuItem_Click(object sender, EventArgs e) { if (pictureBox1.Image != null) { marrToolStripMenuItem.Enabled = false; imgpr = new ImageProcessing(pictureBox1.Image as Bitmap); await Task.Run(() => pictureBox1.Image = imgpr.Marr()); marrToolStripMenuItem.Enabled = true; #if nDEBUG MessageBox.Show("success!"); #endif } }
/// <summary> /// Get animations for current spellcast. /// This happens the first time a spell is cast and stored for re-casting. /// It's likely player will use a wide variety of spell types in normal play. /// </summary> void SetCurrentAnims(SpellTypes spellType, int border = 0, bool dilate = false) { // Attempt to get current anims if (castAnims.ContainsKey(spellType)) { currentAnims = castAnims[spellType]; return; } // Load spellcast file string filename = WeaponBasics.GetSpellAnimFilename(spellType); string path = Path.Combine(DaggerfallUnity.Instance.Arena2Path, filename); CifRciFile cifFile = new CifRciFile(); if (!cifFile.Load(path, FileUsage.UseMemory, true)) { throw new Exception(string.Format("Could not load spell anims file {0}", path)); } // Load textures - spells have a single frame per record unlike weapons Texture2D[] frames = new Texture2D[cifFile.RecordCount]; for (int record = 0; record < cifFile.RecordCount; record++) { Texture2D texture = null; // Import custom texture or load classic texture if (TextureReplacement.CustomCifExist(filename, record, 0, MetalTypes.None)) { texture = TextureReplacement.LoadCustomCif(filename, record, 0, MetalTypes.None); } else { // Get Color32 array DFSize sz; Color32[] colors = cifFile.GetColor32(record, 0, 0, border, out sz); // Dilate edges if (border > 0 && dilate) { ImageProcessing.DilateColors(ref colors, sz); } // Create Texture2D texture = new Texture2D(sz.Width, sz.Height, TextureFormat.RGBA32, false); texture.SetPixels32(colors); texture.Apply(true); } // Set filter mode and store in frames array if (texture) { texture.filterMode = (FilterMode)DaggerfallUnity.Settings.MainFilterMode; frames[record] = texture; } } // Add frames array to dictionary castAnims.Add(spellType, frames); // Use as current anims currentAnims = frames; }
public void UpdateProperties() { ImageProcessing.SetProcessorProperties(_registeredPipelineId, _id, GetJsonProperties()); ImageProcessing.GetProcessorProperties(_registeredPipelineId, _id, GetPropertyCallback); }
static void Main(string[] args) { var files = Directory.GetFiles(@"C:\Users\Jonathan\Documents\visual studio 2017\Projects\HandRecognition\Core\Datasets\MOHI-S1-P1-50"); List <DataSet> datasets = new List <DataSet>(); var hiddenSizes = new int[] { 100, 100 }; var network = new Network(67 * 50, hiddenSizes, 1); Console.WriteLine("datasets"); string[] noAcceptable = { @"C:\Users\Jonathan\Documents\visual studio 2017\Projects\HandRecognition\Core\Datasets\no.jpg", @"C:\Users\Jonathan\Documents\visual studio 2017\Projects\HandRecognition\Core\Datasets\no2.fw.png", @"C:\Users\Jonathan\Documents\visual studio 2017\Projects\HandRecognition\Core\Datasets\no3.fw.png", @"C:\Users\Jonathan\Documents\visual studio 2017\Projects\HandRecognition\Core\Datasets\no4.fw.png", @"C:\Users\Jonathan\Documents\visual studio 2017\Projects\HandRecognition\Core\Datasets\no5.fw.png", @"C:\Users\Jonathan\Documents\visual studio 2017\Projects\HandRecognition\Core\Datasets\no6.fw.png", @"C:\Users\Jonathan\Documents\visual studio 2017\Projects\HandRecognition\Core\Datasets\no7.fw.png", }; foreach (var file in noAcceptable) { var image = ImageProcessing.ReadImage(file); var vts = ImageProcessing.GetGrayscaleBlue(image); datasets.Add(new DataSet(vts, new double[] { 0.0 })); } foreach (var file in files) { var image = ImageProcessing.ReadImage(file); var vts = ImageProcessing.GetGrayscaleBlue(image); datasets.Add(new DataSet(vts, new double[] { 1.0 })); } Console.WriteLine("Training........."); network.Train(datasets, 0.015); datasets = new List <DataSet>(); Console.WriteLine("Done, testing..."); string[] testfiles = { @"C:\Users\Jonathan\Documents\visual studio 2017\Projects\HandRecognition\Core\Datasets\no.jpg", @"C:\Users\Jonathan\Documents\visual studio 2017\Projects\HandRecognition\Core\Datasets\no2.fw.png", @"C:\Users\Jonathan\Documents\visual studio 2017\Projects\HandRecognition\Core\Datasets\no3.fw.png", @"C:\Users\Jonathan\Documents\visual studio 2017\Projects\HandRecognition\Core\Datasets\MOHI-S1-P1-50\S1-P3-F-42-1.jpg", @"C:\Users\Jonathan\Documents\visual studio 2017\Projects\HandRecognition\Core\Datasets\other.jpg", @"C:\Users\Jonathan\Documents\visual studio 2017\Projects\HandRecognition\Core\Datasets\S1-P52-M-14-1.jpg" }; foreach (var test in testfiles) { var imageTest = ImageProcessing.ReadImage(test); var vtsTest = ImageProcessing.GetGrayscaleBlue(imageTest); var results = network.Compute(vtsTest); foreach (var result in results) { Console.WriteLine(result); } Console.WriteLine("--"); } Console.WriteLine("End."); Console.ReadLine(); HandRecognition.Core.Helpers.ExportHelper.ExportNetwork(network, "ho.txt"); }
public ImageViewModelBase() { _imageProcessing = new ImageProcessing(); _imageModel = new ImageModel(); }
private async void kirschToolStripMenuItem_Click(object sender, EventArgs e) { if (pictureBox1.Image != null) { kirschToolStripMenuItem.Enabled = false; imgpr = new ImageProcessing(pictureBox1.Image as Bitmap); pictureBox1.Image = await imgpr.Kirsch(); kirschToolStripMenuItem.Enabled = true; #if nDEBUG MessageBox.Show("success!"); #endif } }
public override void InitCamera() { ImageProcessing.SetOpenCVCamera(); }
public void AddOnLatexResultListener(ImageProcessing latexResult) { LatexResultEvent += latexResult; }
public async Task<SunDiskCondition> CalcSDC_NN(string SDC_NNconfigFile, string SDC_NNtrainedParametersFile, string NormMeansFile, string NormRangeFile) { if (!File.Exists(SDC_NNconfigFile)) { throw new FileNotFoundException("couldn`t find the file specified: " + SDC_NNconfigFile); } if (!File.Exists(SDC_NNtrainedParametersFile)) { throw new FileNotFoundException("couldn`t find the file specified: " + SDC_NNtrainedParametersFile); } if (!File.Exists(NormMeansFile)) { throw new FileNotFoundException("couldn`t find the file specified: " + NormMeansFile); } if (!File.Exists(NormRangeFile)) { throw new FileNotFoundException("couldn`t find the file specified: " + NormRangeFile); } if ((ImagesRoundMasksXMLfilesMappingList == "") || (ImagesRoundMasksXMLfilesMappingList == null)) { ImagesRoundMasksXMLfilesMappingList = Directory.GetCurrentDirectory() + Path.DirectorySeparatorChar + "settings" + Path.DirectorySeparatorChar + "ImagesRoundMasksXMLfilesMappingList.csv"; } if (!File.Exists(ImagesRoundMasksXMLfilesMappingList)) { throw new FileNotFoundException("couldn`t find the file specified: " + ImagesRoundMasksXMLfilesMappingList); } #region read or calculate GrIxYRGB stats if (Directory.Exists(imageYRGBstatsXMLdataFilesDirectory)) { List<string> foundXMLfiles = Directory.EnumerateFiles(imageYRGBstatsXMLdataFilesDirectory, ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(ImageFilename, "", false), SearchOption.AllDirectories).ToList(); if (foundXMLfiles.Any()) { currImageStatsData = (SkyImageIndexesStatsData) ServiceTools.ReadObjectFromXML(foundXMLfiles[0], typeof(SkyImageIndexesStatsData)); } } if (currImageStatsData == null) { Task<SkyImageIndexesStatsData> tskImageStatsCalculation = new Task<SkyImageIndexesStatsData>(() => { Dictionary<string, object> optionalParameters = new Dictionary<string, object>(); optionalParameters.Add("ImagesRoundMasksXMLfilesMappingList", ImagesRoundMasksXMLfilesMappingList); Stopwatch sw = new Stopwatch(); sw.Start(); optionalParameters.Add("Stopwatch", sw); ImageStatsDataCalculationResult currImageProcessingResult = null; try { currImageProcessingResult = ImageProcessing.CalculateImageStatsData(ImageFilename, optionalParameters); } catch (Exception ex) { return null; } if (currImageProcessingResult == null) { return null; } else { string strImageGrIxYRGBDataFileName = ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(ImageFilename, imageYRGBstatsXMLdataFilesDirectory); ServiceTools.WriteObjectToXML(currImageProcessingResult.grixyrgbStatsData, strImageGrIxYRGBDataFileName); // currImageProcessingResult.stopwatch.Stop(); // string currentFullFileName = currImageProcessingResult.imgFilename; // string strPerfCountersData = currentFullFileName + ";" + // currImageProcessingResult.stopwatch.ElapsedMilliseconds + ";" + // (currImageProcessingResult.procTotalProcessorTimeEnd - // currImageProcessingResult.procTotalProcessorTimeStart) // .TotalMilliseconds + // Environment.NewLine; // ServiceTools.logToTextFile(strPerformanceCountersStatsFile, strPerfCountersData, true); currImageStatsData = currImageProcessingResult.grixyrgbStatsData; return currImageProcessingResult.grixyrgbStatsData; } }); currImageStatsData = await tskImageStatsCalculation; } if (currImageStatsData == null) { // theLogWindow = ServiceTools.LogAText(theLogWindow, "ERROR searching concurrent data for image. Will not proceed."); throw new Exception("ERROR searching concurrent data for image. Will not proceed."); } #endregion read or calculate GrIxYRGB stats ConcurrentData nearestConcurrentData = null; #region search for concurrent data Task<ConcurrentData> tskImageConcurrentDataSearching = new Task<ConcurrentData>(() => { string currImgFilename = Path.GetFileNameWithoutExtension(ImageFilename); string ptrn = @"(devID\d)"; Regex rgxp = new Regex(ptrn, RegexOptions.IgnoreCase); string strCurrImgDT = rgxp.Replace(currImgFilename.Substring(4), ""); //2015-12-16T06-01-38 strCurrImgDT = strCurrImgDT.Substring(0, 11) + strCurrImgDT.Substring(11).Replace("-", ":"); DateTime currImgDT = DateTime.Parse(strCurrImgDT, null, System.Globalization.DateTimeStyles.AdjustToUniversal); List<Tuple<string, ConcurrentData>> lImagesConcurrentData = new List<Tuple<string, ConcurrentData>>(); List<string> filesListConcurrentData = Directory.EnumerateFiles(ConcurrentDataXMLfilesBasePath, ConventionalTransitions.ImageConcurrentDataFilesNamesPattern(), SearchOption.AllDirectories) .ToList(); List<Tuple<string, DateTime>> XMLfilesListConcurrentData = filesListConcurrentData.ConvertAll( strXMLfilename => { string xmlFile1DateTimeString = Path.GetFileNameWithoutExtension(strXMLfilename).Replace("data-", "").Substring(0, 19); xmlFile1DateTimeString = xmlFile1DateTimeString.Substring(0, 11) + xmlFile1DateTimeString.Substring(11).Replace("-", ":"); DateTime dt1 = DateTime.Parse(xmlFile1DateTimeString); return new Tuple<string, DateTime>(strXMLfilename, dt1); }); string nearestConcurrentDataFileName = XMLfilesListConcurrentData.Aggregate((cDt1, cDt2) => { TimeSpan tspan1 = new TimeSpan(Math.Abs((cDt1.Item2 - currImgDT).Ticks)); TimeSpan tspan2 = new TimeSpan(Math.Abs((cDt2.Item2 - currImgDT).Ticks)); return ((tspan1 <= tspan2) ? (cDt1) : (cDt2)); }).Item1; Dictionary<string, object> currDict = ServiceTools.ReadDictionaryFromXML(nearestConcurrentDataFileName); currDict.Add("XMLfileName", Path.GetFileName(nearestConcurrentDataFileName)); ConcurrentData nearestConcurrentDataObtained = new ConcurrentData(currDict); if (new TimeSpan(Math.Abs((nearestConcurrentDataObtained.datetimeUTC - currImgDT).Ticks)) >= new TimeSpan(0, 2, 0)) { //theLogWindow = ServiceTools.LogAText(theLogWindow, // "couldn`t find close enough concurrent data file for image:" + Environment.NewLine + // bgwCurrImageFInfo.FullName + Environment.NewLine + "closest concurrent data file is:" + // Environment.NewLine + nearestConcurrentData.filename + Environment.NewLine + // "with date-time value " + nearestConcurrentData.datetimeUTC.ToString("o")); nearestConcurrentDataObtained = null; } return nearestConcurrentDataObtained; }); nearestConcurrentData = await tskImageConcurrentDataSearching; if (nearestConcurrentData == null) { // theLogWindow = ServiceTools.LogAText(theLogWindow, "ERROR searching concurrent data for image. Will not proceed."); throw new Exception("ERROR searching concurrent data for image. Will not proceed."); } #endregion search for concurrent data DenseVector dvMeans = (DenseVector)((DenseMatrix)ServiceTools.ReadDataFromCSV(NormMeansFile, 0, ",")).Row(0); DenseVector dvRanges = (DenseVector)((DenseMatrix)ServiceTools.ReadDataFromCSV(NormRangeFile, 0, ",")).Row(0); DenseVector dvThetaValues = (DenseVector)ServiceTools.ReadDataFromCSV(SDC_NNtrainedParametersFile, 0, ","); List<int> NNlayersConfig = new List<double>(((DenseMatrix)ServiceTools.ReadDataFromCSV(SDC_NNconfigFile, 0, ",")).Row(0)).ConvertAll (dVal => Convert.ToInt32(dVal)); List<double> decisionProbabilities = new List<double>(); return PredictSDC_NN(currImageStatsData, nearestConcurrentData, NNlayersConfig, dvThetaValues, dvMeans, dvRanges, out decisionProbabilities); }
/// <summary> /// Gets a rectangle from ColorArray /// </summary> /// <param name="topLeft"></param> /// <param name="bottomRight"></param> /// <returns></returns> internal Color[,] ScreenPiece(int left, int right, int top, int bottom, out Point trimOffset) { return(ImageProcessing.ScreenPiece(Screen, left, right, top, bottom, out trimOffset)); }
/// <summary> /// Determines the fraction of piece of an RGB image that matches a color filter /// </summary> /// <param name="filter">filter to use for matching</param> /// <param name="left">left bound (inclusive)</param> /// <param name="right">right bound (inclusive)</param> /// <param name="top">top bound (inclusive)</param> /// <param name="bottom">bottom bound (inclusive)</param> /// <returns>The fraction (0-1) of the image that matches the filter</returns> internal double FractionalMatchPiece(ColorFilter filter, int left, int right, int top, int bottom) { bool[,] binaryImage = ColorFilterPiece(filter, left, right, top, bottom); return(ImageProcessing.FractionalMatch(binaryImage)); }
private async void 直方图规定化ToolStripMenuItem_Click(object sender, EventArgs e) { if (pictureBox1.Image != null) { 直方图规定化ToolStripMenuItem.Enabled = false; imgpr = new ImageProcessing(pictureBox1.Image as Bitmap); await Task.Run(() => pictureBox1.Image = imgpr.Normalize(210,600)); 直方图规定化ToolStripMenuItem.Enabled = true; } }
public Waifu(ImageProcessing img, ShindenClient client, IConfig config) { _img = img; _config = config; _shClient = client; }
protected override void PaintFlatControlBorder(Graphics g) { int pw = 1; Color c1 = Color.Empty, c2 = Color.Empty, cb = Color.Empty, cText = Color.Empty; c1 = Enabled ? ThemeManager.GradientNormalColor1 : ThemeManager.BackColor; c2 = Enabled ? ThemeManager.GradientNormalColor2 : ThemeManager.BackColor; cb = Enabled ? ThemeManager.BorderColor : ThemeManager.GradientNormalColor2; cText = Enabled ? ThemeManager.ForeColor : Color.FromKnownColor(KnownColor.ControlDark); if (Enabled && (_isHovered || Focused)) { if (_isHovered && Focused) { c1 = ThemeManager.GradientFocusHoverColor1; c2 = ThemeManager.GradientFocusHoverColor2; cb = ThemeManager.FocusBorderColor; //pw = 2; } else if (Focused) { c1 = ThemeManager.GradientFocusColor1; c2 = ThemeManager.GradientFocusColor2; cb = ThemeManager.FocusBorderColor; //pw = 2; } else { c1 = ThemeManager.GradientHoverColor1; c2 = ThemeManager.GradientHoverColor2; cText = ThemeManager.SelectedTextColor; } } if (_overrideForeColor != Color.Empty) { cText = _overrideForeColor; } Rectangle rc = ClientRectangle; rc.Inflate(2, 2); using (Brush b = new SolidBrush(ThemeManager.BackColor)) { g.FillRectangle(b, rc); } rc = ClientRectangle; rc.Width -= 1; rc.Height -= 1; using (Pen p = new Pen(cb, pw)) using (Brush b = new LinearGradientBrush(rc, c1, c2, 90)) { g.FillRectangle(b, rc); g.DrawRectangle(p, rc); } rc = new Rectangle(ClientRectangle.Left + 2, ClientRectangle.Top + 2, ClientRectangle.Width - 6, ClientRectangle.Height - 6); Rectangle rcText = new Rectangle(rc.Left, rc.Top, rc.Width - 12, rc.Height); Rectangle rcArrow = new Rectangle(rcText.Right, rc.Top, 12, rc.Height); using (Brush b = new SolidBrush(cText)) { StringFormat sf = new StringFormat(); sf.Alignment = StringAlignment.Near; sf.LineAlignment = StringAlignment.Center; sf.Trimming = StringTrimming.EllipsisCharacter; //sf.FormatFlags = StringFormatFlags.NoWrap; FontFamily ff = SelectedItem as FontFamily; if (ff != null) { string text = ff.Name; Font fDraw = new System.Drawing.Font(ff, this.Font.SizeInPoints, this.Font.Style, GraphicsUnit.Point); g.DrawString(text, fDraw, b, rcText, sf); } } using (GraphicsPath gp = ImageProcessing.GenerateCenteredArrow(rcArrow)) using (Brush b = new SolidBrush(cText)) using (Pen p = new Pen(b, 1)) { g.FillPath(b, gp); g.DrawPath(p, gp); } }
public VideoProcessing(EventAggregator eventAggregator) { this.eventAggregator = eventAggregator; this.imageProcessing = new ImageProcessing(this.eventAggregator); }
protected override void PaintFlatControlBorder(Graphics g) { int pw = 1; Color c1 = Color.Empty, c2 = Color.Empty, cb = Color.Empty, cText = Color.Empty; c1 = Enabled ? ThemeManager.GradientNormalColor1 : ThemeManager.BackColor; c2 = Enabled ? ThemeManager.GradientNormalColor2 : ThemeManager.BackColor; cb = Enabled ? ThemeManager.BorderColor : ThemeManager.GradientNormalColor2; cText = Enabled ? ThemeManager.ForeColor : Color.FromKnownColor(KnownColor.ControlDark); if (Enabled && (_isHovered || Focused)) { if (_isHovered && Focused) { c1 = ThemeManager.GradientFocusHoverColor1; c2 = ThemeManager.GradientFocusHoverColor2; cb = ThemeManager.FocusBorderColor; //pw = 2; } else if (Focused) { c1 = ThemeManager.GradientFocusColor1; c2 = ThemeManager.GradientFocusColor2; cb = ThemeManager.FocusBorderColor; //pw = 2; } else { c1 = ThemeManager.GradientHoverColor1; c2 = ThemeManager.GradientHoverColor2; cText = ThemeManager.SelectedTextColor; } } if (_overrideForeColor != Color.Empty) { cText = _overrideForeColor; } Color c = Color.Empty; if (this.SelectedIndex > 0) { c = (Color)this.Items[this.SelectedIndex]; } Color cString = ColorHelper.GetContrastingColor(c); Rectangle rc = ClientRectangle; rc.Inflate(2, 2); using (Brush b = new SolidBrush(ThemeManager.BackColor)) { g.FillRectangle(b, rc); } rc = ClientRectangle; rc.Width -= 1; rc.Height -= 1; using (Pen p = new Pen(cb, pw)) using (Brush b = new LinearGradientBrush(rc, c1, c2, 90)) { g.FillRectangle(b, rc); g.DrawRectangle(p, rc); } rc = new Rectangle(ClientRectangle.Left + 2, ClientRectangle.Top + 2, ClientRectangle.Width - 6, ClientRectangle.Height - 6); Rectangle rcText = new Rectangle(rc.Left, rc.Top, rc.Width - 12, rc.Height); Rectangle rcArrow = new Rectangle(rcText.Right, rc.Top, 12, rc.Height); using (Brush b = new SolidBrush(cString)) using (Brush b2 = new SolidBrush(c)) { StringFormat sf = new StringFormat(); sf.Alignment = StringAlignment.Near; sf.LineAlignment = StringAlignment.Center; sf.Trimming = StringTrimming.EllipsisCharacter; g.FillRectangle(b2, rcText); g.DrawString((c == Color.Empty) ? "" : c.Name, this.Font, b, rcText, sf); } using (GraphicsPath gp = ImageProcessing.GenerateCenteredArrow(rcArrow)) using (Brush b = new SolidBrush(cText)) using (Pen p = new Pen(b, 1)) { g.FillPath(b, gp); g.DrawPath(p, gp); } }
/// <summary> /// Gets inventory/equip image for specified item. /// Image will be cached based on material and hand for faster subsequent fetches. /// Animated item images do not support dyes. /// </summary> /// <param name="item">Item to fetch image for.</param> /// <param name="removeMask">Removes mask index (e.g. around helmets) from final image.</param> /// <param name="forPaperDoll">Image is for paper doll.</param> /// <param name="allowAnimation">Read animated textures.</param> /// <returns>ImageData.</returns> public ImageData GetItemImage(DaggerfallUnityItem item, bool removeMask = false, bool forPaperDoll = false, bool allowAnimation = false) { // Get colour int color = (int)item.dyeColor; // Get archive and record indices int archive = item.InventoryTextureArchive; int record = item.InventoryTextureRecord; // Paper doll handling if (forPaperDoll) { // 1H Weapons in right hand need record + 1 if (item.ItemGroup == ItemGroups.Weapons && item.EquipSlot == EquipSlots.RightHand) { if (ItemEquipTable.GetItemHands(item) == ItemHands.Either) { record += 1; } } } else { // Katanas need +1 for inventory image as they use right-hand image instead of left if (item.IsOfTemplate(ItemGroups.Weapons, (int)Weapons.Katana)) { record += 1; } } // Use world texture archive if inventory texture not set // Examples are gold pieces and wayrest painting if (archive == 0 && record == 0) { archive = item.ItemTemplate.worldTextureArchive; record = item.ItemTemplate.worldTextureRecord; } // Get unique key int key = MakeImageKey(color, archive, record, removeMask); // Get existing icon if in cache if (itemImages.ContainsKey(key)) { return(itemImages[key]); } // Load image data string filename = TextureFile.IndexToFileName(archive); ImageData data = ImageReader.GetImageData(filename, record, 0, true, false, allowAnimation); if (data.type == ImageTypes.None) { throw new Exception("GetItemImage() could not load image data."); } // Fix items with known incorrect paper doll offsets if (archive == 237 && (record == 52 || record == 54)) { // "Short shirt" template index 202 variants 2 and 5 for human female data.offset = new DaggerfallConnect.Utility.DFPosition(237, 43); } // Remove mask if requested if (removeMask) { data.dfBitmap = ImageProcessing.ChangeMask(data.dfBitmap); } // Change dye or just update texture ItemGroups group = item.ItemGroup; DyeColors dye = (DyeColors)color; if (group == ItemGroups.Weapons || group == ItemGroups.Armor) { data = ChangeDye(data, dye, DyeTargets.WeaponsAndArmor); } else if (item.ItemGroup == ItemGroups.MensClothing || item.ItemGroup == ItemGroups.WomensClothing) { data = ChangeDye(data, dye, DyeTargets.Clothing); } else { ImageReader.UpdateTexture(ref data); } // Add to cache itemImages.Add(key, data); return(data); }
public Matrix Apply(Matrix input) { var guassian = ImageFilters.Gaussian(0.6f, 5); var r = new ImageData(input.Width, input.Height, PixelFormats.Gray8); var blurred = ImageProcessing.Convolve(input, guassian); r[0] = ImageProcessing.Scale(blurred, 0, 255); r.Save <JpegBitmapEncoder>(@"C:\imageprocessing\blurred.jpg"); var gx = ImageProcessing.Convolve(input, new float[, ] { { -1, 0, 1 }, { -2, 0, 2 }, { -1, 0, 1 } }); r[0] = ImageProcessing.Scale(gx, 0, 255); r.Save <JpegBitmapEncoder>(@"C:\imageprocessing\gx.jpg"); var gy = ImageProcessing.Convolve(input, new float[, ] { { 1, 2, 1 }, { 0, 0, 0 }, { -1, -2, -1 } }); r[0] = ImageProcessing.Scale(gy, 0, 255); r.Save <JpegBitmapEncoder>(@"C:\imageprocessing\gy.jpg"); var gradient = Matrix.Sqrt(Matrix.Pow(gx, 2) + Matrix.Pow(gy, 2)); r[0] = ImageProcessing.Scale(gradient, 0, 255); r.Save <JpegBitmapEncoder>(@"C:\imageprocessing\gradient.jpg"); var angle = new Matrix(input.Width, input.Height); for (var x = 1; x < gradient.Width - 1; x++) { for (var y = 1; y < gradient.Height - 1; y++) { var orientation = (float)(Math.Atan2(gy[x, y], gx[x, y]) * 180f / Math.PI); if (orientation < 0) { orientation += 180f; } angle[x, y] = orientation; } } r[0] = ImageProcessing.Scale(angle, 0, 255); r.Save <JpegBitmapEncoder>(@"C:\imageprocessing\angle.jpg"); var result = new Matrix(input.Width, input.Height); for (var x = 1; x < gradient.Width - 1; x++) { for (var y = 1; y < gradient.Height - 1; y++) { var orientation = angle[x, y]; var val = gradient[x, y]; // N-S if (orientation <= 22.5 || orientation >= 157.5) { if (gradient[x, y] > gradient[x, y - 1] && gradient[x, y] > gradient[x, y + 1]) { result[x, y] = val; } } // E-W if (orientation >= 67.5 && orientation <= 112.5) { if (gradient[x, y] > gradient[x - 1, y] && gradient[x, y] > gradient[x + 1, y]) { result[x, y] = val; } } // NE-SW if (orientation >= 22.5 && orientation <= 67.5) { if (gradient[x, y] > gradient[x + 1, y - 1] && gradient[x, y] > gradient[x - 1, y + 1]) { result[x, y] = val; } } // SE-NW if (orientation <= 157.5 && orientation >= 112.5) { if (gradient[x, y] > gradient[x - 1, y - 1] && gradient[x, y] > gradient[x + 1, y + 1]) { result[x, y] = val; } } } } return(result); }
private void ProcessImage(ImageStatsCollectingData srcData) { Interlocked.Increment(ref totalFilesProcessed); int perc = Convert.ToInt32(100.0d * (double)totalFilesProcessed / (double)totalFilesCountToProcess); Console.WriteLine(DateTime.Now.ToString("s") + " : " + perc + "% : started processing file " + Environment.NewLine + srcData.filename); Dictionary <string, object> optionalParameters = new Dictionary <string, object>(); optionalParameters.Add("ImagesRoundMasksXMLfilesMappingList", ImagesRoundMasksXMLfilesMappingList); Stopwatch sw = new Stopwatch(); sw.Start(); optionalParameters.Add("Stopwatch", sw); optionalParameters.Add("logFileName", errorLogFilename); ImageStatsDataCalculationResult currImageProcessingResult = ImageProcessing.CalculateImageStatsData(srcData.filename, optionalParameters); currImageProcessingResult.stopwatch.Stop(); if (currImageProcessingResult.calcResult) { string currentFullFileName = currImageProcessingResult.imgFilename; string strPerfCountersData = currentFullFileName + ";" + currImageProcessingResult.stopwatch.ElapsedMilliseconds + ";" + (currImageProcessingResult.procTotalProcessorTimeEnd - currImageProcessingResult.procTotalProcessorTimeStart).TotalMilliseconds + Environment.NewLine; ServiceTools.logToTextFile(strPerformanceCountersStatsFile, strPerfCountersData, true); //string strImageGrIxMedianP5DataFileName = // ConventionalTransitions.ImageGrIxMedianP5DataFileName(currentFullFileName, imageMP5statsXMLdataFilesDirectory); //ServiceTools.WriteObjectToXML(currImageProcessingResult.mp5Result, strImageGrIxMedianP5DataFileName); string strImageGrIxYRGBDataFileName = ConventionalTransitions.ImageGrIxYRGBstatsDataFileName(currentFullFileName, imageYRGBstatsXMLdataFilesDirectory, true, currPath2Process); ServiceTools.WriteObjectToXML(currImageProcessingResult.grixyrgbStatsData, strImageGrIxYRGBDataFileName); ImageStatsCollectingData foundDataObj = lStatsCalculation.Find(obj => obj.filename == currentFullFileName); foundDataObj.State = ImageStatsCollectingState.Finished; foundDataObj.GrIxMedianValue = currImageProcessingResult.mp5Result.GrIxStatsMedian; foundDataObj.GrIxPerc5Value = currImageProcessingResult.mp5Result.GrIxStatsPerc5; Console.WriteLine(DateTime.Now.ToString("s") + " : finished processing file " + Environment.NewLine + currentFullFileName); } else { string currentFullFileName = currImageProcessingResult.imgFilename; ImageStatsCollectingData foundDataObj = lStatsCalculation.Find(obj => obj.filename == currentFullFileName); foundDataObj.State = ImageStatsCollectingState.Error; Console.WriteLine("ERROR processing file " + Path.GetFileName(currentFullFileName)); try { //report full error to error log file #region report error #if (DEBUG && MONO) ServiceTools.logToTextFile(errorLogFilename, "Error processing file: " + Environment.NewLine + currentFullFileName + Environment.NewLine + "messages: " + ServiceTools.GetExceptionMessages(currImageProcessingResult.exception) + Environment.NewLine + "Stack trace: " + Environment.NewLine + Environment.StackTrace + Environment.NewLine + Environment.NewLine, true, true); #else #if MONO ServiceTools.logToTextFile(errorLogFilename, "Error processing file: " + Environment.NewLine + currentFullFileName + Environment.NewLine + "messages: " + ServiceTools.GetExceptionMessages(currImageProcessingResult.exception) + Environment.NewLine + "Stack trace: " + Environment.NewLine + Environment.StackTrace + Environment.NewLine + Environment.NewLine, true, true); #else ServiceTools.logToTextFile(errorLogFilename, "Error processing file: " + Environment.NewLine + currentFullFileName + Environment.NewLine + "message: " + ServiceTools.GetExceptionMessages(currImageProcessingResult.exception) + Environment.NewLine + ServiceTools.CurrentCodeLineDescription() + Environment.NewLine + "Stack trace: " + Environment.NewLine + Environment.StackTrace + Environment.NewLine + Environment.NewLine, true, true); #endif #endif #endregion report error } catch (Exception ex) { return; } } }
protected override void OnPaint(PaintEventArgs e) { using (SmoothGraphics sg = SmoothGraphics.New(e.Graphics, this.ClientRectangle)) { int pw = 1; Color c1 = Color.Empty, c2 = Color.Empty, cb = Color.Empty, cText = Color.Empty; c1 = Enabled ? ThemeManager.GradientNormalColor1 : ThemeManager.BackColor; c2 = Enabled ? ThemeManager.GradientNormalColor2 : ThemeManager.BackColor; cb = Enabled ? ThemeManager.BorderColor : ThemeManager.GradientNormalColor2; cText = Enabled ? ThemeManager.ForeColor : Color.FromKnownColor(KnownColor.ControlDark); if (Enabled && (_isHovered || Focused)) { if (_isHovered && Focused) { c1 = ThemeManager.GradientFocusHoverColor1; c2 = ThemeManager.GradientFocusHoverColor2; cb = ThemeManager.FocusBorderColor; //pw = 2; } else if (Focused) { c1 = ThemeManager.GradientFocusColor1; c2 = ThemeManager.GradientFocusColor2; cb = ThemeManager.FocusBorderColor; //pw = 2; } else { c1 = ThemeManager.GradientHoverColor1; c2 = ThemeManager.GradientHoverColor2; cText = ThemeManager.SelectedTextColor; } } if (_overrideBackColor != Color.Empty) { c1 = c2 = _overrideBackColor; } if (_overrideForeColor != Color.Empty) { cText = _overrideForeColor; } Rectangle rcb = ClientRectangle; rcb.Inflate(1, 1); using (Brush b = new SolidBrush(ThemeManager.BackColor)) using (Pen p = new Pen(b, 4)) { sg.Graphics.FillRectangle(b, rcb); } Rectangle rc = ClientRectangle; if (_isMouseDown || _isKeyDown) { rc = new Rectangle(2, 2, Width - 4, Height - 4); } else { rc = new Rectangle(1, 1, Width - 2, Height - 2); } using (Brush b = new LinearGradientBrush(rc, c1, c2, 90f)) using (Pen p = new Pen(cb, pw)) { sg.Graphics.FillRectangle(b, rc); sg.Graphics.DrawRectangle(p, rc); } if (this.Image != null) { rc = ClientRectangle; Rectangle rcImage = new Rectangle( (rc.Size.Width - Image.Size.Width) / 2, (rc.Size.Height - Image.Size.Height) / 2, rc.Size.Width, rc.Size.Height); int l = rcImage.Left; int t = rcImage.Top; rcImage.Location = new Point(l, t); int w = Math.Min(Image.Width, rcImage.Size.Width); int h = Math.Min(Image.Height, rcImage.Size.Height); rcImage.Size = new System.Drawing.Size(w, h); sg.Graphics.DrawImage(Image, rcImage); } else { using (Brush b = new SolidBrush(cText)) { StringFormat sf = new StringFormat(); sf.Alignment = StringAlignments.FromContentAlignment(TextAlign).Alignment; sf.LineAlignment = StringAlignments.FromContentAlignment(TextAlign).LineAlignment; sf.Trimming = StringTrimming.EllipsisWord; //sf.FormatFlags = StringFormatFlags.NoWrap; sf.HotkeyPrefix = System.Drawing.Text.HotkeyPrefix.Show; Rectangle rcText = rc; if (ShowDropDown) { rcText = new Rectangle(0, 0, this.Width - ArrowSize, this.Height); } sg.Graphics.DrawString(this.Text, this.Font, b, rcText, sf); } if (ShowDropDown) { Rectangle rcArrow = new Rectangle(this.Width - ArrowSize, 0, ArrowSize, this.Height); using (GraphicsPath gp = ImageProcessing.GenerateCenteredArrow(rcArrow)) using (Brush b = new SolidBrush(cText)) using (Pen p = new Pen(b, 1)) { sg.Graphics.FillPath(b, gp); sg.Graphics.DrawPath(p, gp); Point p1 = new Point(this.Width - ArrowSize + 2, 2); Point p2 = new Point(this.Width - ArrowSize + 2, this.Height - 4); sg.Graphics.DrawLine(p, p1, p2); } } } } }
public void Deregister(int pipelineId) { ImageProcessing.RemoveProcessor(pipelineId, _id); _id = -1; _registeredPipelineId = -1; }
public ActionResult EditPhotographer() { if (Session["UserID"] == null && Session["UserName"] == null) { return(RedirectToAction("Login", "Login")); } try { if (ModelState.IsValid) { int PhotographerID = Convert.ToInt32(Request.Form["PhotographerID"]); tblPhotographer PGH = db.tblPhotographers.SingleOrDefault(c => c.PhotographerID == PhotographerID); PGH.PhotographerName = Request.Form["PhotographerName"]; PGH.Email = Request.Form["Email"]; PGH.PhoneNumber = Request.Form["PhoneNumber"]; PGH.Address = Request.Form["Address"];; PGH.Salary = Convert.ToDecimal(Request.Form["Salary"]); PGH.IsActive = Request.Form["IsActive"] == "true" ? true : false; PGH.IsFreeLancer = Request.Form["IsFreeLancer"] == "true" ? true : false; if (Request.Files.Count > 0) { int fileSize = 0; string fileName = string.Empty; string mimeType = string.Empty; System.IO.Stream fileContent; HttpPostedFileBase file = Request.Files[0]; fileSize = file.ContentLength; fileName = file.FileName; mimeType = file.ContentType; fileContent = file.InputStream; if (mimeType.ToLower() != "image/jpeg" && mimeType.ToLower() != "image/jpg" && mimeType.ToLower() != "image/png") { return(Json(new { Formatwarning = true, message = "Profile pic format must be JPEG or JPG or PNG." }, JsonRequestBehavior.AllowGet)); } #region Save And compress file //To save file, use SaveAs method file.SaveAs(Server.MapPath("~/PhotographerProfilePic/") + fileName); if (!ImageProcessing.InsertImages(Server.MapPath("~/PhotographerProfilePic/") + fileName)) { return(Json(new { success = false, message = "Error occur while uploading image." }, JsonRequestBehavior.AllowGet)); } string path = Server.MapPath("~/PhotographerProfilePic/" + PGH.ProfilePic); if (PGH.ProfilePic != "" && PGH.ProfilePic != null && PGH.ProfilePic.Length > 0) { FileInfo delfile = new FileInfo(path); delfile.Delete(); } #endregion PGH.ProfilePic = fileName; } PGH.UpdatedDate = DateTime.Now; db.Entry(PGH).State = EntityState.Modified; db.SaveChanges(); } return(Json(new { success = true, message = "Record updated successfully" }, JsonRequestBehavior.AllowGet)); } catch (Exception ex) { return(Json(new { success = false, message = "Error!" + ex.Message }, JsonRequestBehavior.AllowGet)); } }
public static async Task PostprocessingSingle(string path, bool dontResize = false, int retryCount = 10) { string newPath = ""; if (Path.GetExtension(path) != ".tmp") { newPath = path.Substring(0, path.Length - 8); } else { newPath = path.Substring(0, path.Length - 4); } try { File.Move(path, newPath); } catch (Exception e) // An I/O error can appear if the file is still locked by python (?) { Logger.Log("Failed to move/rename! " + e.Message + "\n" + e.StackTrace); if (retryCount > 0) { await Task.Delay(200); // Wait 200ms and retry up to 10 times int newRetryCount = retryCount - 1; Logger.Log("Retrying - " + newRetryCount + " attempts left."); PostprocessingSingle(path, dontResize, newRetryCount); } else { Logger.ErrorMessage($"Failed to rename {Path.GetFileName(path)} and ran out of retries!", e); } return; } path = newPath; if (outputFormat.Text == ImgExportMode.PNG.ToStringTitleCase()) { await ImageProcessing.PostProcessImage(path, ImageProcessing.Format.Png50, dontResize); } if (outputFormat.Text == ImgExportMode.SameAsSource.ToStringTitleCase()) { await ImageProcessing.ConvertImageToOriginalFormat(path, true, false, dontResize); } if (outputFormat.Text == ImgExportMode.JPEG.ToStringTitleCase()) { await ImageProcessing.PostProcessImage(path, ImageProcessing.Format.Jpeg, dontResize); } if (outputFormat.Text == ImgExportMode.WEBP.ToStringTitleCase()) { await ImageProcessing.PostProcessImage(path, ImageProcessing.Format.Weppy, dontResize); } if (outputFormat.Text == ImgExportMode.BMP.ToStringTitleCase()) { await ImageProcessing.PostProcessImage(path, ImageProcessing.Format.BMP, dontResize); } if (outputFormat.Text == ImgExportMode.TGA.ToStringTitleCase()) { await ImageProcessing.PostProcessImage(path, ImageProcessing.Format.TGA, dontResize); } if (outputFormat.Text == ImgExportMode.DDS.ToStringTitleCase()) { await ImageProcessing.PostProcessDDS(path); } if (outputFormat.Text == ImgExportMode.GIF.ToStringTitleCase()) { await ImageProcessing.PostProcessImage(path, ImageProcessing.Format.GIF, dontResize); } }
public ActionResult InsertPhotographer() { if (Session["UserID"] == null && Session["UserName"] == null) { return(RedirectToAction("Login", "Login")); } try { if (ModelState.IsValid) { tblPhotographer PHG = new tblPhotographer(); PHG.PhotographerName = Request.Form["PhotographerName"]; PHG.Email = Request.Form["Email"]; PHG.PhoneNumber = Request.Form["PhoneNumber"]; PHG.Address = Request.Form["Address"];; PHG.Salary = Convert.ToDecimal(Request.Form["Salary"]); PHG.IsActive = Request.Form["IsActive"] == "true" ? true : false; PHG.IsFreeLancer = Request.Form["IsFreelancer"] == "true" ? true : false; PHG.CreatedDate = DateTime.Now; if (ModelState.IsValid) { int fileSize = 0; string fileName = string.Empty; string mimeType = string.Empty; System.IO.Stream fileContent; if (Request.Files.Count > 0) { HttpPostedFileBase file = Request.Files[0]; fileSize = file.ContentLength; fileName = file.FileName; mimeType = file.ContentType; fileContent = file.InputStream; if (mimeType.ToLower() != "image/jpeg" && mimeType.ToLower() != "image/jpg" && mimeType.ToLower() != "image/png") { return(Json(new { Formatwarning = true, message = "Profile pic format must be JPEG or JPG or PNG." }, JsonRequestBehavior.AllowGet)); } #region Save And compress file //To save file, use SaveAs method file.SaveAs(Server.MapPath("~/PhotographerProfilePic/") + fileName); if (!ImageProcessing.InsertImages(Server.MapPath("~/PhotographerProfilePic/") + fileName)) { return(Json(new { success = false, message = "Error occur while uploading image." }, JsonRequestBehavior.AllowGet)); } #endregion } PHG.ProfilePic = fileName; } db.tblPhotographers.Add(PHG); db.SaveChanges(); } return(Json(new { success = true, message = "Record inserted" }, JsonRequestBehavior.AllowGet)); } catch (Exception ex) { return(Json(new { success = false, message = "Error!" + ex.Message }, JsonRequestBehavior.AllowGet)); } }
public void Clusterize() { ArithmeticsOnImages aoi = new ArithmeticsOnImages(); aoi.dmY = dmDensityMesh; aoi.ExprString = "grad5p(Y)"; aoi.RPNeval(true); List <DenseMatrix> lDMGradField = aoi.lDMRes; DenseMatrix dmMask = dmDensityMesh.Copy(); dmMask.MapIndexedInplace((r, c, dVal) => { // r = y - perc5 // c = x - median Point currPt = new Point(c, r); return((conditionOnPoints(currPt)) ? (1.0d) : (0.0d)); //if (r > c) return 0.0d; //else return 1.0d; }); Image <Gray, Byte> imgMask = ImageProcessing.grayscaleImageFromDenseMatrixWithFixedValuesBounds(dmMask, 0.0d, 1.0d); // imgMask = imgMask.Flip(FlipType.Vertical); imgMask = imgMask.Flip(FLIP.VERTICAL); // отфильтровать малые значения - ? // выделить классы List <ConnectedObjectsAtASlice> lSlicesData = new List <ConnectedObjectsAtASlice>(); double dthresholdingMaxValue = dmDensityMesh.Values.Max(); //double dthresholdingMinValue = dmSmoothed.Values.Min(); double dthresholdingMinValue = 0.0d; double dthresholdingDiscrete = (dthresholdingMaxValue - dthresholdingMinValue) / 30.0d; for (double dThresholding = dthresholdingMaxValue; dThresholding > dthresholdingMinValue - dthresholdingDiscrete; dThresholding -= dthresholdingDiscrete) { ConnectedObjectsAtASlice corrSliceObj = new ConnectedObjectsAtASlice(dmDensityMesh, dmDensityMeshXcoord, dmDensityMeshYcoord, dThresholding); corrSliceObj.DetectConnectedObjects(); //ServiceTools.ShowPicture(corrSliceObj.previewImage, "thresholding value = " + dThresholding.ToString("e")); lSlicesData.Add(corrSliceObj); } ConnectedObjectsAtASlice prevSlice = lSlicesData[0]; foundClassesContours.AddRange(prevSlice.edgeContoursList); foreach (ConnectedObjectsAtASlice currSlice in lSlicesData) { if (lSlicesData.IndexOf(currSlice) == 0) { continue; // самый верхний пропускаем } //List<Tuple<Contour<Point>, Contour<Point>>> currSliceCoveringContours = // new List<Tuple<Contour<Point>, Contour<Point>>>(); List <Tuple <Contour <Point>, Contour <Point> > > currSliceCoveringContours = new List <Tuple <Contour <Point>, Contour <Point> > >(); //item1 - внутренний, из предыдущего слайса //item2 - внешний, из текущего слайса foreach (Contour <Point> caughtCont in foundClassesContours) { Contour <Point> coveringCaughtCont = currSlice.FindContourContainingSample(caughtCont); currSliceCoveringContours.Add(new Tuple <Contour <Point>, Contour <Point> >(caughtCont, coveringCaughtCont)); } // добавим контуры, которые только что появились и раньше не были видны на срезах // но только если количество допустимых клатеров еще позволяет // Иначе - будем ждать, когда они вольются в в какой-нибудь из вновь расширившихся контуров foreach (Contour <Point> newContour in currSlice.edgeContoursList) { if ((currSliceCoveringContours.Find(tpl => (tpl.Item2 == newContour)) == null) && (currSliceCoveringContours.Count() < maxClustersCount)) { currSliceCoveringContours.Add(new Tuple <Contour <Point>, Contour <Point> >(newContour, newContour)); } } // что делать, если какой-нибудь новый контур покрывает больше одного предыдущего List <IGrouping <Contour <Point>, Tuple <Contour <Point>, Contour <Point> > > > groups = new List <IGrouping <Contour <Point>, Tuple <Contour <Point>, Contour <Point> > > > (currSliceCoveringContours.GroupBy(tpl => tpl.Item2)); if (groups.Count(grp => (grp.Count() > 1)) > 0) { // есть контуры текущего среза, которые содержат более одного контура предыдущего среза foreach (IGrouping <Contour <Point>, Tuple <Contour <Point>, Contour <Point> > > currGroup in groups) { if (currGroup.Count() == 1) { Tuple <Contour <Point>, Contour <Point> > contourTuple = currGroup.First(); foundClassesContours.Remove(contourTuple.Item1); foundClassesContours.Add(contourTuple.Item2); } else { // currGroup - группа кортежей контуров, где // item1 - внутренний, из предыдущего слайса // item2 - внешний, из текущего слайса // надо точки, которые лежат вне контуров предыдущего слайса отнести к "своим" контурам // попробуем по направлению градиента - относить точку к тому контуру, на который укажет вектор градиента Contour <Point> currCoveringContour = currGroup.Key; // item2 - внешний, из текущего слайса - см.строку группировки Rectangle currCoveringContourBoundingRectangle = currCoveringContour.BoundingRectangle; Image <Gray, byte> tmpImg1 = new Image <Gray, byte>(new Size(currCoveringContourBoundingRectangle.Right, currCoveringContourBoundingRectangle.Bottom)); tmpImg1.Draw(currCoveringContour, white, -1); foreach (Tuple <Contour <Point>, Contour <Point> > tpl in currGroup) { Contour <Point> excludingCntr = tpl.Item1; Image <Gray, byte> tmpExcl = tmpImg1.CopyBlank(); tmpExcl.Draw(excludingCntr, white, -1); tmpImg1 = tmpImg1 - tmpExcl; } // в картинке tmpImg1 закрашенными остались только точки, которые надо классифицировать List <Point> lPointsToClassify = new List <Point>(); for (int x = 0; x < tmpImg1.Width; x++) { for (int y = 0; y < tmpImg1.Height; y++) { Point currPt = new Point(x, y); if (tmpImg1[currPt].Equals(white)) { lPointsToClassify.Add(currPt); } } } List <List <Point> > llArraysOfPointsAdding = new List <List <Point> >(); foreach (Tuple <Contour <Point>, Contour <Point> > tpl in currGroup) { llArraysOfPointsAdding.Add(new List <Point>()); } List <Contour <Point> > lContoursOfTheCurrGroup = (new List <Tuple <Contour <Point>, Contour <Point> > >(currGroup.ToArray())).ConvertAll( tpl => tpl.Item1); List <PointD> lPtdMassCenters = lContoursOfTheCurrGroup.ConvertAll(cntr => cntr.MassCenter()); Contour <Point> themassCentersPolygon = new Contour <Point>(new MemStorage()); themassCentersPolygon.PushMulti(lPtdMassCenters.ConvertAll <Point>(ptd => ptd.Point()).ToArray(), BACK_OR_FRONT.BACK); //themassCentersPolygon.Push(lPtdMassCenters.ConvertAll<Point>(ptd => ptd.Point()).ToArray()); Image <Gray, byte> tmpImg = imgMask.CopyBlank(); tmpImg.Draw(themassCentersPolygon, white, -1); themassCentersPolygon = tmpImg.DetectContours()[0]; foreach (Point currPtToClassify in lPointsToClassify) { int cntrToAddPointTo = AttachPointToOneOfConcurrentContours( lContoursOfTheCurrGroup, lPtdMassCenters, themassCentersPolygon, currPtToClassify, lDMGradField); if (cntrToAddPointTo == -1) { continue; } else { llArraysOfPointsAdding[cntrToAddPointTo].Add(currPtToClassify); } } // распределили. теперь надо сформировать новые контуры - с учетом добавленных точек. List <Image <Gray, byte> > lImagesToDetectNewContours = new List <Image <Gray, byte> >(); foreach (Tuple <Contour <Point>, Contour <Point> > tpl in currGroup) { Image <Gray, byte> tmpImgCurrCont = tmpImg1.CopyBlank(); tmpImgCurrCont.Draw(tpl.Item1, white, -1); lImagesToDetectNewContours.Add(tmpImgCurrCont); } for (int cntIdx = 0; cntIdx < currGroup.Count(); cntIdx++) { foreach (Point pt in llArraysOfPointsAdding[cntIdx]) { lImagesToDetectNewContours[cntIdx][pt.Y, pt.X] = white; } #region // obsolete //Contour<Point> cnt1 = // lImagesToDetectNewContours[cntIdx].FindContours( // Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, // Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_LIST); //List<Contour<Point>> lTmpCtrs = new List<Contour<Point>>(); //while (true) //{ // lTmpCtrs.Add(cnt1); // cnt1 = cnt1.HNext; // if (cnt1 == null) // break; //} #endregion // obsolete ////найдем наибольший из получившихся контуров List <Contour <Point> > lTmpCtrs = lImagesToDetectNewContours[cntIdx].DetectContours(); foundClassesContours.Remove(currGroup.ElementAt(cntIdx).Item1); double maxArea = lTmpCtrs.Max(cntr => cntr.Area); int idxOfMaxAreaContour = lTmpCtrs.FindIndex(cntr => cntr.Area >= maxArea); foundClassesContours.Add(lTmpCtrs[idxOfMaxAreaContour]); } } } } else { foreach (Tuple <Contour <Point>, Contour <Point> > contourTuple in currSliceCoveringContours) { foundClassesContours.Remove(contourTuple.Item1); foundClassesContours.Add(contourTuple.Item2); } } //theLogWindow = ServiceTools.LogAText(theLogWindow, // "processing thresholding value = " + currSlice.slicingThresholdingValue, true); } //theLogWindow = ServiceTools.LogAText(theLogWindow, // Environment.NewLine + // "========" + Environment.NewLine + // "FINISHED" + Environment.NewLine + // "========" + Environment.NewLine, true); Image <Gray, Byte> imgDataBinary = ImageProcessing.grayscaleImageFromDenseMatrixWithFixedValuesBounds(dmDensityMesh, 0.0d, 1.0d); Image <Bgr, byte> previewImage = imgDataBinary.CopyBlank().Convert <Bgr, Byte>(); var colorGen = new RandomPastelColorGenerator(); foreach (Contour <Point> currCntr in foundClassesContours) { Color currentColor = colorGen.GetNext(); var currentColorBgr = new Bgr(currentColor); previewImage.Draw(currCntr, currentColorBgr, -1); } previewImage = previewImage.And(imgMask.Convert <Bgr, byte>()); ServiceTools.ShowPicture(previewImage, ""); }
public IEnumerator Init(Func <IEnumerator> OnFinish) { if (this.IsCached()) { yield return(OnFinish()); } else { PakFile pak; if (TerrainTileFile.ToUpper() == "Tiles".ToUpper()) { pak = new PakFile(Path.Combine(MechCommanderUnity.Instance.MCGPath, "TILES/TILES90.PAK"), MechCommanderUnity.Instance.FileManager.File(palettePath)); } else if (TerrainTileFile.ToUpper() == "GTiles".ToUpper()) { pak = new PakFile(Path.Combine(MechCommanderUnity.Instance.MCGPath, "TILES/GTILES90.PAK"), MechCommanderUnity.Instance.FileManager.File(GpalettePath)); } else { throw (new Exception()); } this.LstTiles = new Dictionary <int, MCBitmap>(); this.LstTilesOV = new Dictionary <int, MCBitmap>(); for (int i = 0; i < LstTileIndexes.Count; i++) { var tiledata = pak.GetFileInner(LstTileIndexes[i]); if (tiledata == null) { continue; } MCBitmap bitmap; if (tiledata[0] != 68 && tiledata[1] != 78) { var tile = new MCTileFile(tiledata); bitmap = tile.GetBitMap(); if (bitmap == null) { continue; } bitmap.Name = LstTileIndexes[i].ToString();//this.TerrainTileFile.ToUpper() + "-" + LstTiles.Add(LstTileIndexes[i], bitmap); } else { var tile = new MCTileFileOverlay(tiledata); bitmap = tile.GetBitMap(); if (bitmap == null) { continue; } bitmap.Name = LstTileIndexes[i].ToString();//this.TerrainTileFile.ToUpper() + "-" + LstTilesOV.Add(LstTileIndexes[i], bitmap); } // lstText.Add(ImageProcessing.MakeTexture2D(ref bitmap, pak.Palette)); } yield return(null); MCBitmap atlas; MCBitmap atlasOV; Dictionary <string, Rect> DictRects; Dictionary <string, Rect> DictRectsOV; MechCommanderUnity.LogMessage("Starting Terrain Atlas Generation after Tiles"); // try // { var result = ImageProcessing.CreateAtlas(LstTiles.Values.ToArray(), out atlas, out DictRects); yield return(null); this.CacheMap(atlas); DictTileInfo = new Dictionary <string, TileInfo>(); foreach (var rect in DictRects) { var bitmap = LstTiles[int.Parse(rect.Key)]; DictTileInfo.Add(rect.Key, new TileInfo() { rect = rect.Value, pivot = bitmap.Pivot //new Vector2((float)((double)bitmap.PivotX / bitmap.Width), 1f - (float)((double)bitmap.PivotY / bitmap.Height)) }); } this.CacheTileInfo(DictTileInfo); if (result) { MainText = ImageProcessing.MakeIndexedTexture2D(atlas); PalText = pak.Palette.ExportPaletteTexture(); atlas.Dispose(); } else { MechCommanderUnity.LogMessage("Error Packing file " + result); } var resultOv = ImageProcessing.CreateAtlas(LstTilesOV.Values.ToArray(), out atlasOV, out DictRectsOV); yield return(null); this.CacheMap(atlasOV, "OV"); DictTileOVInfo = new Dictionary <string, TileInfo>(); foreach (var rect in DictRectsOV) { var bitmap = LstTilesOV[int.Parse(rect.Key)]; DictTileOVInfo.Add(rect.Key, new TileInfo() { rect = rect.Value, pivot = bitmap.Pivot }); } this.CacheTileInfo(DictTileOVInfo, "OV"); if (resultOv) { MainOVText = ImageProcessing.MakeIndexedTexture2D(atlasOV); atlasOV.Dispose(); } else { MechCommanderUnity.LogMessage("Error Packing file " + resultOv); } // } catch (Exception e) // { // MechCommanderUnity.LogMessage("Error Packing file " + e.Message); // // throw; // } foreach (var tile in LstTiles) { tile.Value.Dispose(); } foreach (var tile in LstTilesOV) { tile.Value.Dispose(); } MechCommanderUnity.LogMessage("Finish Init Terrain"); yield return(OnFinish()); } }
void ExportPalette(bool Image = true, int i = 1) { var pal = MechCommanderUnity.Instance.ContentReader.ShapesPakFile.Palette; if (!Image) { string ExportPal = ""; for (int z = 0; z < 256; z++) { ExportPal += z.ToString() + " : " + pal.Get(z, true).ToString() + "\r\n"; } System.IO.File.WriteAllText("Palette" + ".txt", ExportPal); } { var bmpT = new MCBitmap(256, 1); for (int y = 0; y < 256; y++) { MCBitmap.SetPixel(bmpT, y, 0, (byte)y); } var PalText = ImageProcessing.MakeTexture2D(bmpT, MechCommanderUnity.Instance.ContentReader.ShapesPakFile.Palette); ImageProcessing.SaveTextureAsPng(PalText, "PAL"); } return; // var mccolors=pal.ExportPalette(); // int i = 4; var bmp = new MCBitmap(16 * i, 16 * i); for (int y = 0; y < 16 * i; y++) { for (int x = 0; x < 16 * i; x++) { int pos = (((y * bmp.Stride) / i) + (x / i)) * bmp.FormatWidth; MCBitmap.SetPixel(bmp, x, y, (byte)pos); } } string Export = ""; for (int y = 0; y < 16 * i; y++) { for (int x = 0; x < 16 * i; x++) { int pos = (((y * bmp.Stride) / i) + (x / i)) * bmp.FormatWidth; Export += bmp.Data[pos].ToString("D3") + ","; } Export += "\r\n"; } System.IO.File.WriteAllText("PAL" + i + ".txt", Export); var MainText = ImageProcessing.MakeTexture2D(bmp, MechCommanderUnity.Instance.ContentReader.ShapesPakFile.Palette); ImageProcessing.SaveTextureAsPng(MainText, "PAL" + i);//Assets/ //var MainText = ImageProcessing.MakeTexture2D(ref bmp); //ImageProcessing.SaveTextureAsPng(MainText, "PAL");//Assets/ }
public ActionResult EditCustomer() { if (Session["StudioID"] == null && Session["StudioName"] == null && Session["StudioPhoneNo"] == null) { return(RedirectToAction("Login", "Login")); } try { if (ModelState.IsValid) { int CustomerID = Convert.ToInt32(Request.Form["CustomerID"]); tblCustomer newCust = db.tblCustomers.SingleOrDefault(c => c.CustomerID == CustomerID); newCust.CustomerName = Request.Form["CustomerName"]; newCust.CustomerEmail = Request.Form["CustomerEmail"]; newCust.PhoneNumber = Request.Form["PhoneNo"]; newCust.IsActive = Request.Form["IsActive"] == "true" ? true : false; if (Request.Files.Count > 0) { int fileSize = 0; string fileName = string.Empty; string mimeType = string.Empty; System.IO.Stream fileContent; HttpPostedFileBase file = Request.Files[0]; fileSize = file.ContentLength; fileName = file.FileName; mimeType = file.ContentType; fileContent = file.InputStream; if (mimeType.ToLower() != "image/jpeg" && mimeType.ToLower() != "image/jpg" && mimeType.ToLower() != "image/png") { return(Json(new { Formatwarning = true, message = "Profile pic format must be JPEG or JPG or PNG." }, JsonRequestBehavior.AllowGet)); } //WebImage img = new WebImage(file.InputStream); #region Save And compress file //To save file, use SaveAs method file.SaveAs(Server.MapPath("~/CustomerProfile/") + fileName); if (!ImageProcessing.InsertImages(Server.MapPath("~/CustomerProfile/") + fileName)) { return(Json(new { success = false, message = "Error occur while uploading image." }, JsonRequestBehavior.AllowGet)); } string path = Server.MapPath("~/CustomerProfile/" + newCust.ProfilePic); if (newCust.ProfilePic != "" && newCust.ProfilePic != null && newCust.ProfilePic.Length > 0) { FileInfo delfile = new FileInfo(path); delfile.Delete(); } #endregion newCust.ProfilePic = fileName; } newCust.UpdatedDate = DateTime.Now; db.Entry(newCust).State = EntityState.Modified; db.SaveChanges(); return(Json(new { success = true, message = "Record updated successfully" }, JsonRequestBehavior.AllowGet)); } } catch (Exception ex) { throw new Exception("Error occur while updating customer info." + ex.Message); } return(Json(new { success = false, message = "Error occur while updating record." }, JsonRequestBehavior.AllowGet)); }
public void WaterMark_Text_Test() { using (var _imgProcessing = new ImageProcessing()) { var _waterMarkedImage = _imgProcessing.Process( imagePath: _testFileName, waterMarkText: "karthik20522", waterMarkOpacity: 0.5 ); ImageHelper.SaveStream(_waterMarkedImage, "c:\\Temp\\test_watermarked.jpg"); Assert.IsTrue(File.Exists("c:\\Temp\\test_watermarked.jpg")); } }
public ActionResult InsertCustomer() { if (Session["StudioID"] == null && Session["StudioName"] == null && Session["StudioPhoneNo"] == null) { return(RedirectToAction("Login", "Login")); } try { if (ModelState.IsValid) { tblCustomer newCust = new tblCustomer(); newCust.CustomerName = Request.Form["CustomerName"]; newCust.CustomerEmail = Request.Form["CustomerEmail"]; newCust.PhoneNumber = Request.Form["PhoneNo"]; newCust.Password = Request.Form["Password"]; newCust.IsActive = Request.Form["IsActive"] == "true" ? true : false; newCust.CreatedDate = DateTime.Now; if (ModelState.IsValid) { int fileSize = 0; string fileName = string.Empty; string mimeType = string.Empty; System.IO.Stream fileContent; if (Request.Files.Count > 0) { HttpPostedFileBase file = Request.Files[0]; fileSize = file.ContentLength; fileName = file.FileName; mimeType = file.ContentType; fileContent = file.InputStream; if (mimeType.ToLower() != "image/jpeg" && mimeType.ToLower() != "image/jpg" && mimeType.ToLower() != "image/png") { return(Json(new { Formatwarning = true, message = "Profile pic format must be JPEG or JPG or PNG." }, JsonRequestBehavior.AllowGet)); } #region Save And compress file //To save file, use SaveAs method file.SaveAs(Server.MapPath("~/CustomerProfile/") + fileName); if (!ImageProcessing.InsertImages(Server.MapPath("~/CustomerProfile/") + fileName)) { return(Json(new { success = false, message = "Error occur while uploading image." }, JsonRequestBehavior.AllowGet)); } #endregion } newCust.ProfilePic = fileName; } db.tblCustomers.Add(newCust); db.SaveChanges(); } } catch (Exception ex) { return(Json(new { success = false, message = "Record not Inserted" }, JsonRequestBehavior.AllowGet)); } return(Json(new { success = true, message = "Record inserted" }, JsonRequestBehavior.AllowGet)); }
public SaturationControl(ImageProcessing ip) : base(ip) { _repo = ip; }
/// <summary> /// Get animations for current spellcast. /// This happens the first time a spell is cast and stored for re-casting. /// It's likely player will use a wide variety of spell types in normal play. /// </summary> void SetCurrentAnims(ElementTypes elementType, int border = 0, bool dilate = false) { // Attempt to get current anims if (castAnims.ContainsKey(elementType)) { currentAnimType = elementType; currentAnims = castAnims[elementType]; return; } // Load spellcast file string filename = WeaponBasics.GetMagicAnimFilename(elementType); string path = Path.Combine(DaggerfallUnity.Instance.Arena2Path, filename); CifRciFile cifFile = new CifRciFile(); if (!cifFile.Load(path, FileUsage.UseMemory, true)) { throw new Exception(string.Format("Could not load spell anims file {0}", path)); } // Load CIF palette cifFile.Palette.Load(Path.Combine(DaggerfallUnity.Instance.Arena2Path, cifFile.PaletteName)); // Load textures - spells have a single frame per record unlike weapons AnimationRecord[] animationRecords = new AnimationRecord[cifFile.RecordCount]; for (int record = 0; record < cifFile.RecordCount; record++) { Texture2D texture; if (!TextureReplacement.TryImportCifRci(filename, record, 0, false, out texture)) { // Get Color32 array DFSize sz; Color32[] colors = cifFile.GetColor32(record, 0, 0, border, out sz); // Dilate edges if (border > 0 && dilate) { ImageProcessing.DilateColors(ref colors, sz); } // Create Texture2D texture = new Texture2D(sz.Width, sz.Height, TextureFormat.ARGB32, false); texture.SetPixels32(colors); texture.Apply(true); } // Set filter mode and store in frames array if (texture) { texture.filterMode = (FilterMode)DaggerfallUnity.Settings.MainFilterMode; animationRecords[record].Texture = texture; animationRecords[record].Size = cifFile.GetSize(record); } } // Add frames array to dictionary castAnims.Add(elementType, animationRecords); // Use as current anims currentAnimType = elementType; currentAnims = animationRecords; }
public void WaterMark_Image_Test() { using (var _imgProcessing = new ImageProcessing()) { var _waterMarkedImage = _imgProcessing.Process( imagePath: _testFileName, resize:1024, waterMarkPath: _waterMarkFileName, waterMarkOpacity: 1, waterMarkPosition: WaterMarkPosition.BottomRight ); ImageHelper.SaveStream(_waterMarkedImage, "c:\\Temp\\test_watermarked.jpg"); Assert.IsTrue(File.Exists("c:\\Temp\\test_watermarked.jpg")); } }
private void 灰度图像膨胀ToolStripMenuItem_Click(object sender, EventArgs e) { if (pictureBox1.Image != null) { 灰度图像膨胀ToolStripMenuItem.Enabled = false; imgpr = new ImageProcessing(pictureBox1.Image as Bitmap); Task.Run(() => pictureBox1.Image = imgpr.GrayDilate()); 灰度图像膨胀ToolStripMenuItem.Enabled = true; #if nDEBUG MessageBox.Show("success!"); #endif } }