private void ComboBox1_SelectedIndexChanged(object sender, EventArgs e) { if (_img != null) { var filter = comboBox1.SelectedItem.ToString(); switch (filter) { case "Negative": _filter = filter.ToLower(); var negative = new NegativeImage(_img); _newImg = negative.AddNegativeFilter(); ShowImage(_newImg); break; case "Grayscale": _filter = filter.ToLower(); var grayscale = new GrayscaleImage(_img); _newImg = grayscale.AddGrayscaleFilter(); ShowImage(_newImg); break; case "Blurred": _filter = filter.ToLower(); var blur = new BlurredImage(_img); _newImg = blur.AddBlurFilter(); ShowImage(_newImg); break; } } else { MessageBox.Show("No image has been selected, please select an image for continuing", "No Image", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
public static Bitmap RecognizeDocumentInImage(Bitmap originalBmp) { var grayscale = new GrayscaleImage(originalBmp); var compressionBmp = BitmapProcessing.ImageCompression(originalBmp); var processedImage = ImageFilters.ImageFiltering(compressionBmp); var resultSearch = SearchSingularPoints.SerchSPForImage(originalBmp, compressionBmp, processedImage); var spPoints = resultSearch.Item1; var equations = resultSearch.Item2; BitmapProcessing.SelectBackground(grayscale, equations); var widthAndHeight = MakeWidthAndHeightDocument(spPoints); var documentWidth = widthAndHeight.Item1; var documentHeight = widthAndHeight.Item2; var anglePoints = MakeAnglePoints(documentWidth, documentHeight, spPoints); var H = GetMatrixHomography(spPoints, anglePoints); var inverseH = H.Inverse(); var correctImage = ImageCorrection(inverseH, grayscale); var correctSpPoints = TransformPoints(inverseH, spPoints); var correctEquations = MakeEquationsLines(correctSpPoints); var angle = correctEquations[2].AngleDeviationOX(); correctImage = BitmapProcessing.RotateGrayscaleImage(correctImage, angle); var document = ImageCutter.CutDocument(correctImage); return(BitmapProcessing.MakeBitmap(document.Width, document.Height, document.Colors)); //return BitmapProcessing.MakeBitmap(correctImage.Width, correctImage.Height, correctImage.Colors); }
/// <summary> /// Устранение проективных искажений на изображении. /// </summary> /// <param name="inverseH">Обратная матрица гомографии</param> /// <param name="grayscale">Изображение в оттенках серого с выделенными границами</param> private static GrayscaleImage ImageCorrection(Matrix <double> inverseH, GrayscaleImage grayscale) { var width = grayscale.Width; var height = grayscale.Height; var result = new byte[width * height]; var pRes = Parallel.For(0, grayscale.Height, y => { for (int x = 0; x < grayscale.Width; x++) { var newX = (inverseH[0, 0] * x + inverseH[0, 1] * y + inverseH[0, 2]) / (inverseH[2, 0] * x + inverseH[2, 1] * y + inverseH[2, 2]); var newY = (inverseH[1, 0] * x + inverseH[1, 1] * y + inverseH[1, 2]) / (inverseH[2, 0] * x + inverseH[2, 1] * y + inverseH[2, 2]); if (newX >= 0 && newX < width - 1 && newY >= 0 && newY < height - 1) { result[(int)newY * width + (int)newX] = grayscale.Colors[y * grayscale.Width + x]; } } }); if (!pRes.IsCompleted) { throw new Exception("Parallel error"); } return(new GrayscaleImage(width, height, result)); }
/// <summary> /// Фильтр Собеля для выделения границ документа. /// </summary> /// <param name="grayscale">Изображение в оттенках серого.</param> /// <param name="bytes">Обработанное изображение.</param> /// <param name="sx">Матрица оператора Собеля</param> public static byte[] SobelFilter2(GrayscaleImage grayscale, byte[] bytes, double[,] sx) { if (sx.GetLength(0) % 2 != 1 || sx.GetLength(0) != sx.GetLength(1)) { throw new Exception("The matrix is not set correctly"); } var width = grayscale.Width; var height = grayscale.Height; var sy = TransposeMatrix(sx); var dim = sx.GetLength(0); var imageFilt = new byte[bytes.Length]; var pRes = Parallel.For(dim / 2, height - dim / 2, y => { for (var x = dim / 2; x < width - dim / 2; x++) { var gx = ApplyMatrix(bytes, sx, x, y, width, height); var gy = ApplyMatrix(bytes, sy, x, y, width, height); imageFilt[y * width + x] = (byte)Math.Sqrt(gx * gx + gy * gy); } }); if (!pRes.IsCompleted) { throw new Exception("Parallel error"); } return(imageFilt); }
static TestResult TestImage(NeuralNetwork network, GrayscaleImage image) { //Console.Write("Test Image with Label '"+image.ImageLabel+"': "); network.FeedImage(image); network.FeedForward(); var classified = network.Neurons[network.LayerCount - 1][0].Output; var classifiedIndex = 0; for (int i = 1; i < network.Neurons[network.LayerCount - 1].Length; i++) { var value = network.Neurons[network.LayerCount - 1][i].Output; if (value > classified) { classified = value; classifiedIndex = i; } } var isCorrect = classifiedIndex == image.ImageLabel; var cost = network.CalculateCost(classifiedIndex); //Console.Write(classifiedIndex + " ("+(isCorrect ? "Correct" : "Incorrect")+"). Cost: "); //Console.WriteLine(cost); //network.PrintLayer(network.LayerCount - 1); //Console.WriteLine("-----------------------"); return(new TestResult() { Cost = cost, IsCorrect = isCorrect }); }
public void _regist_exec(HVCP2Api hvc_p2_api) { var out_register_image = new GrayscaleImage(); var ret = hvc_p2_api.register_data(0, 0, out_register_image); if (ret == p2def.RESPONSE_CODE_NORMAL) { this.SetText(" Success to register. user_id=0 data_id=0"); this.SetText(Environment.NewLine); out_register_image.save("registerd_img.jpg"); } else if (ret == p2def.RESPONSE_CODE_NO_FACE) { this.SetText(" Number of faces that can be registered is 0."); this.SetText(Environment.NewLine); } else if (ret == p2def.RESPONSE_CODE_PLURAL_FACE) { this.SetText(" Number of detected faces is 2 or more."); this.SetText(Environment.NewLine); } else { // error this.SetText(" Error: Invalid register album. "); this.SetText(Environment.NewLine); } }
/// <summary> /// Поворачивает изображение на заданный угол относительно центра изображения. /// </summary> /// <param name="grayscaleImage">Изображение в оттенках серого.</param> /// <param name="rotationAngle">Угол поворота изображения .</param> public static GrayscaleImage RotateGrayscaleImage(GrayscaleImage grayscaleImage, double rotationAngle) { var width = grayscaleImage.Width; var height = grayscaleImage.Height; double sinA = Math.Sin(rotationAngle); double cosA = Math.Cos(rotationAngle); var newColors = new byte[width * height]; double x0 = width / 2; double y0 = height / 2; Parallel.For(0, height, newY => { for (int newX = 0; newX < width; ++newX) { //Вращение относительно центра изображения int x = (int)(x0 + (newX - x0) * cosA - (newY - y0) * sinA); int y = (int)(y0 + (newX - x0) * sinA + (newY - y0) * cosA); if (x >= grayscaleImage.Width || y >= grayscaleImage.Height || x < 0 || y < 0) { continue; } newColors[newY * width + newX] = grayscaleImage.Colors[y * grayscaleImage.Width + x]; } }); return(new GrayscaleImage(width, height, newColors)); }
/// <summary> /// Медианный фильтр для устранения пикселей чёрного цвета, возникших в результате коррекции изображения. /// </summary> /// <param name="grayscale">Изображение в оттенках серого с выделенным и выравненным документом.</param> public static GrayscaleImage MedianFilterMod2(GrayscaleImage grayscale) { var width = grayscale.Width; var height = grayscale.Height; var medianImage = new byte[width * height]; var pRes = Parallel.For(0, height, y => { for (var x = 0; x < width; x++) { if (grayscale.Colors[y * width + x] == 0) { var listNeighbors = new List <double>(); MakeListNeighbors(grayscale.Colors, listNeighbors, width, height, x, y); var med = MakeMedian(listNeighbors.OrderBy(e => e).ToList()); medianImage[y * width + x] = (byte)med; } else { medianImage[y * width + x] = grayscale.Colors[y * width + x]; } } }); if (!pRes.IsCompleted) { throw new Exception("Parallel error"); } return(new GrayscaleImage(width, height, medianImage)); }
/// <summary> /// Computes the distribution of the image by normalizing. /// </summary> /// <returns>The image distribution</returns> public unsafe GrayscaleImage ComputeDistribution() { int rows = Rows; int columns = Columns; float[, ,] dist = new float[rows, columns, 1]; float norm = 1 / _sum; fixed(float *dataSrc = RawArray, distSrc = dist) { float *dataPtr = dataSrc; float *distPtr = distSrc; int total = rows * columns; while (total-- > 0) { *distPtr = *dataPtr * norm; distPtr++; dataPtr++; } } GrayscaleImage gray = new GrayscaleImage(); gray.SetData(dist); return(gray); }
public int execute(int exec_func, int out_img_type, HVCResult frame_result, GrayscaleImage img) { var cmd = new List <byte>(HVC_CMD_HDR_EXECUTE); // Executes specified functions. e.g. Face detection, Age estimation, etc // Adds face flag if using facial estimation function if ((exec_func & (p2def.EX_DIRECTION | p2def.EX_AGE | p2def.EX_GENDER | p2def.EX_GAZE | p2def.EX_BLINK | p2def.EX_EXPRESSION)) != 0x00) { exec_func |= p2def.EX_FACE + p2def.EX_DIRECTION; } cmd.AddRange(BitConverter.GetBytes((UInt16)(exec_func))); cmd.Add((byte)out_img_type); var sendResult = this._send_command(cmd.ToArray(), cmd.ToArray().Length); if (sendResult.response_code == 0x00) { // Success int rc = frame_result.read_from_buffer(exec_func, sendResult.data_len, sendResult.data); if (out_img_type != 0x00) { img.width = BitConverter.ToInt16(sendResult.data, rc); img.height = BitConverter.ToInt16(sendResult.data, (rc + 2)); img.data = sendResult.data.Skip(rc + 4).Take(img.width * img.height).ToArray(); } } return(sendResult.response_code); }
private static string Process(string str) { var message = ""; var valid = true; Bitmap img = null; string path = null; try { path = str; img = new Bitmap(path); } catch (FileNotFoundException) { return("Something was wrong with the input, Please try again"); } catch (IndexOutOfRangeException) { return("Something was wrong with the input, Please try again"); } catch (ArgumentException) { return("Something was wrong with the input, Please try again"); } if (valid && img != null && path != null) { if (!FileHandler.CheckPath(path)) { return("Something was wrong with the input, Please try again"); } // Check if size is over 400x400 = 160 000 else if (!FileHandler.CheckImageSize(img)) { return("The image you have entered is too big, Please enter a smaller one"); } else { // Create and save a negative image var negative = new NegativeImage(img); var negativeImage = negative.AddNegativeFilter(); negativeImage.Save(FileHandler.GetNewPath(path, "_negative")); // Create and save a grayscale image var grayscale = new GrayscaleImage(img); var grayscaleImage = grayscale.AddGrayscaleFilter(); grayscaleImage.Save(FileHandler.GetNewPath(path, "_grayscale")); // Create and save a blurred image var blurred = new BlurredImage(img); var blurredImage = blurred.AddBlurFilter(); blurredImage.Save(FileHandler.GetNewPath(path, "_blurred")); // If everything works, exit the application Environment.Exit(0); } } return(message); }
public void AddGrayscaleFilter_FilterAdded_ColorsGrayscale() { // Arrange var r = new Random(); const int width = 3; const int height = 3; var img = new Bitmap(width, height); var expectedImg = new Bitmap(width, height); for (var x = 0; x < img.Width; x++) { for (var y = 0; y < img.Height; y++) { const int alpha = 255; var red = 200; var green = 100; var blue = 0; if (y % 2 == 0) { red = 0; blue = 200; } img.SetPixel(x, y, Color.FromArgb(alpha, red, green, blue)); } } expectedImg.SetPixel(0, 0, Color.FromArgb(255, 100, 100, 100)); expectedImg.SetPixel(0, 1, Color.FromArgb(255, 100, 100, 100)); expectedImg.SetPixel(0, 2, Color.FromArgb(255, 100, 100, 100)); expectedImg.SetPixel(1, 0, Color.FromArgb(255, 100, 100, 100)); expectedImg.SetPixel(1, 1, Color.FromArgb(255, 100, 100, 100)); expectedImg.SetPixel(1, 2, Color.FromArgb(255, 100, 100, 100)); expectedImg.SetPixel(2, 0, Color.FromArgb(255, 100, 100, 100)); expectedImg.SetPixel(2, 1, Color.FromArgb(255, 100, 100, 100)); expectedImg.SetPixel(2, 2, Color.FromArgb(255, 100, 100, 100)); // Act var gray = new GrayscaleImage(img); var grayImage = gray.AddGrayscaleFilter(); // Assert for (var x = 0; x < grayImage.Width; x++) { for (var y = 0; y < grayImage.Height; y++) { int expectedAlpha = img.GetPixel(x, y).A; var expectedRed = expectedImg.GetPixel(x, y).R; var expectedGreen = expectedImg.GetPixel(x, y).G; var expectedBlue = expectedImg.GetPixel(x, y).B; Assert.AreEqual(expectedAlpha, grayImage.GetPixel(x, y).A); Assert.AreEqual(expectedRed, grayImage.GetPixel(x, y).R); Assert.AreEqual(expectedGreen, grayImage.GetPixel(x, y).G); Assert.AreEqual(expectedBlue, grayImage.GetPixel(x, y).B); } } }
private void GetGrayscaleImage() { if (DipLibImage is GrayscaleImage) { return; } Image = new FormatConvertedBitmap(OriginImage, PixelFormats.Gray8, BitmapPalettes.Gray256, 0); byte[] pixels = new byte[Image.PixelWidth * Image.PixelHeight]; Image.CopyPixels(pixels, Image.PixelWidth, 0); DipLibImage = new GrayscaleImage(pixels, Image.PixelWidth, Image.PixelHeight); }
public static void FeedImage(this NeuralNetwork network, GrayscaleImage image) { for (var row = 0; row < image.Height; row++) { for (var column = 0; column < image.Width; column++) { var neuronIndex = row * image.Width + column; var value = (double)image.ImageData[column, row] / 255.0; network.Neurons[0][neuronIndex].Input = value; } } }
/// <summary> /// Вычисление расстояний до границ документа. /// </summary> /// <param name="grayscale">Изображение в оттенках серого с выделенным и выравненным документом.</param> private static List <int> GetDistanceToDocument(GrayscaleImage grayscale) { var width = grayscale.Width; var height = grayscale.Height; var result = new List <int>(); byte pixel; // После обработки, выделения фона и выравнивания изображения в фоне содержатся только пиксели белого // и чёрного цвета, тут можно придумать что-нибудь получше чем есть сейчас(просто ищем координаты первых // попавшихся пикселей по центру изображения со всех сторон по очереди). for (int y = 0; y < height; y++) { pixel = grayscale.Colors[y * width + width / 2]; if (pixel == 0 || pixel == 255) { continue; } result.Add(y); break; } for (int x = width - 1; x > -1; x--) { pixel = grayscale.Colors[(height / 2) * width + x]; if (pixel == 0 || pixel == 255) { continue; } result.Add(x); break; } for (int y = height - 1; y > -1; y--) { pixel = grayscale.Colors[y * width + width / 2]; if (pixel == 0 || pixel == 255) { continue; } result.Add(y); break; } for (int x = 0; x < width; x++) { pixel = grayscale.Colors[(height / 2) * width + x]; if (pixel == 0 || pixel == 255) { continue; } result.Add(x); break; } return(result); }
/// <summary> /// Returns a grayscale representation of the filter. /// </summary> /// <returns>A grayscale representation of the filter</returns> public GrayscaleImage GetRepresentation() { GrayscaleImage image = new GrayscaleImage(_rows, _columns); for (int r = 0; r < _rows; r++) { for (int c = 0; c < _columns; c++) { image[r, c] = _filterValues[r, c]; } } return(image); }
public static byte[] ImageFiltering(Bitmap bmp) { var matrix = new double[, ] { { -1, -2, -1 }, { 0, 0, 0 }, { 1, 2, 1 } }; var grayscale = new GrayscaleImage(bmp); var medianImage = grayscale.Colors; for (int i = 0; i < 3; i++) { medianImage = MedianFilter(grayscale, medianImage); } var sobelImage = SobelFilter2(grayscale, medianImage, matrix); var binaryImage = BinarizeImage(grayscale, sobelImage); return(binaryImage); }
public int register_data(int user_id, int data_id, GrayscaleImage img) { // Registers data for Recognition and gets a normalized image. var cmd = new List <byte>(HVC_CMD_HDR_REGISTER_DATA); cmd.AddRange(BitConverter.GetBytes((UInt16)(user_id))); cmd.Add((byte)data_id); var sendResult = this._send_command(cmd.ToArray(), cmd.ToArray().Length); if (sendResult.response_code == 0x00) { // Success img.width = BitConverter.ToInt16(sendResult.data, 0); img.height = BitConverter.ToInt16(sendResult.data, 2); img.data = sendResult.data.Skip(4).Take(img.width * img.height).ToArray(); } return(sendResult.response_code); }
/// <summary> /// Бинаризация изображения, алгоритм Оцу. /// </summary> /// <param name="grayscale">Изображение в оттенках серого.</param> /// <param name="bytes">Обработанное изображение.</param> public static byte[] BinarizeImage(GrayscaleImage grayscale, byte[] bytes) { var threshold = OtsuThreshold(grayscale); var height = grayscale.Height; var width = grayscale.Width; var pRes = Parallel.For(0, height, y => { for (int x = 0; x < width; x++) { bytes[y * width + x] = (byte)((bytes[y * width + x] < threshold) ? 0 : 255); } }); if (!pRes.IsCompleted) { throw new Exception("Parallel error"); } return(bytes); }
/// <summary> /// Вычисление порогового значения. /// </summary> /// <param name="grayscale">Изображение в оттенках серого.</param> public static int OtsuThreshold(GrayscaleImage grayscale) { var bytes = grayscale.Colors; var width = grayscale.Width; var height = grayscale.Height; var histSize = 256; var hist = MakeHistogram(bytes, width, height); var m = 0; var n = 0; for (int t = 0; t < histSize; t++) { m += t * hist[t]; n += hist[t]; } var threshold = MakeThreshold(histSize, hist, m, n); return(threshold); }
/// <summary> /// Отделяет изображение документа от фона. /// </summary> /// <param name="grayscale">Изображение в оттенках серого с выделенным и выравненным документом.</param> public static GrayscaleImage CutDocument(GrayscaleImage grayscale) { var distanceToDocument = GetDistanceToDocument(grayscale); var topDistance = distanceToDocument[0]; var rightDistance = distanceToDocument[1]; var downDistance = distanceToDocument[2]; var leftDistance = distanceToDocument[3]; var width = grayscale.Width; var height = grayscale.Height; var newWidth = rightDistance - leftDistance; var newHeight = downDistance - topDistance; if (newWidth % 4 != 0) { rightDistance -= newWidth % 4; newWidth = rightDistance - leftDistance; } var newColors = new byte[newWidth * newHeight]; var pRes = Parallel.For(0, height, y => { for (int x = 0; x < width; x++) { if (y >= topDistance && y < downDistance && x >= leftDistance && x < rightDistance) { newColors[(y - topDistance) * newWidth + (x - leftDistance)] = grayscale.Colors[y * width + x]; } } }); if (!pRes.IsCompleted) { throw new Exception("Parallel error"); } return(ImageFilters.MedianFilterMod2(new GrayscaleImage(newWidth, newHeight, newColors))); }
/// <summary> /// Окрашивает фон вокруг изображения в белый цвет. /// </summary> /// <param name="grayscale">Изображение в оттенках серого.</param> /// <param name="equationsLines">Уравнения прямых, ограничивающих документ на изображении.</param> public static void SelectBackground(GrayscaleImage grayscale, List <EquationLine> equationsLines) { var width = grayscale.Width; var height = grayscale.Height; var pRes = Parallel.For(0, height, y => { for (int x = 0; x < width; x++) { var position1 = equationsLines[0].DeterminePosition(x, y); var position2 = equationsLines[1].DeterminePosition(x, y); var position3 = equationsLines[2].DeterminePosition(x, y); var position4 = equationsLines[3].DeterminePosition(x, y); if (position1 <= 0 || position2 >= 0 || position3 <= 0 || position4 >= 0) { grayscale.Colors[y * width + x] = 255; } } }); if (!pRes.IsCompleted) { throw new Exception("Parallel error"); } }
// ========================================================================== // APIs for Album operation of Face recognition // ========================================================================== /// <summary> /// Registers data for Recognition and gets a normalized image. /// </summary> /// <param name="user_id">User ID [0-9]</param> /// <param name="data_id">Data ID [0-99]</param> /// <param name="out_register_image">normalized face image</param> /// <returns>response_code form B5T-007001.</returns> public int register_data(int user_id, int data_id, GrayscaleImage out_register_image) { return(this._hvc_p2_wrapper.register_data(user_id, data_id, out_register_image)); }
public Image <Gray, byte> IsolateColorBlack(double threshold = 80) { return(GrayscaleImage.ThresholdBinary(new Gray(threshold), new Gray(255))); }
public void mainloop() { var hvc_tracking_result = new HVCTrackingResult(); var img = new GrayscaleImage(); HVCP2Api.EXECUTE_RET exec_ret; var sw = new Stopwatch(); while (true) { try { if (this.isConnectRequest) { var connector = new SerialConnector(); var exec_func = 0x00; if (BodyDetectionCheck.Checked == true) { exec_func += p2def.EX_BODY; } if (HandDetectionCheck.Checked == true) { exec_func += p2def.EX_HAND; } if (FaceDetectionCheck.Checked == true) { exec_func += p2def.EX_FACE; } if (FaceDirectionCheck.Checked == true) { exec_func += p2def.EX_DIRECTION; } if (AgeDetectionCheck.Checked == true) { exec_func += p2def.EX_AGE; } if (GenderDetectionCheck.Checked == true) { exec_func += p2def.EX_GENDER; } if (GazeDetectionCheck.Checked == true) { exec_func += p2def.EX_GAZE; } if (BlinkDetectionCheck.Checked == true) { exec_func += p2def.EX_BLINK; } if (ExpressionDetectionCheck.Checked == true) { exec_func += p2def.EX_EXPRESSION; } if (RecognitionDetectionCheck.Checked == true) { exec_func += p2def.EX_RECOGNITION; } this.hvc_p2_api = new HVCP2Api(connector, exec_func, StablirizationCheck.Checked); var comnum = int.Parse(this.connect_comport.Substring(3)); var ret = this.hvc_p2_api.connect(comnum, 9600, timeout * 1000); if (ret == true) { HVCP2Wrapper.GET_VERSION_RET result; this.isConnectRequest = false; var retcode = _check_connection(this.hvc_p2_api, out result); if (retcode == true) { _get_hvc_version(result); _set_hvc_p2_parameters(this.hvc_p2_api); // Sets STB library parameters _set_stb_parameters(hvc_p2_api); this.isConnected = true; } else { this.SetToDefault(); try { this.hvc_p2_api.disconnect(); } catch { } } } } if (this.isConnected) { if (this.isRegistExecute) { _regist_exec(this.hvc_p2_api); this.SetToDefault(); try { this.hvc_p2_api.disconnect(); } catch { } } else { this.isExecuting = true; sw.Reset(); sw.Start(); exec_ret = hvc_p2_api.execute(output_img_type, hvc_tracking_result, img); sw.Stop(); this.isExecuting = false; if (output_img_type != p2def.OUT_IMG_TYPE_NONE) { img.save(img_fname); } this.SetText(string.Format(" ==== Elapsed time:{0}[msec] ====", sw.ElapsedMilliseconds)); this.SetText(Environment.NewLine); this.SetText(hvc_tracking_result.ToString()); this.SetText(Environment.NewLine); this.SetText(string.Format(" Press Stop Button to end:", sw.ElapsedMilliseconds)); this.SetText(Environment.NewLine); this.SetText(Environment.NewLine); System.Windows.Forms.Application.DoEvents(); } } if (this.isEndRequest) { try { this.hvc_p2_api.disconnect(); } catch { } break; } } catch (Exception ex) { this.SetText(string.Format("Unexpected exception : {0}", ex.Message)); this.SetText(Environment.NewLine); this.SetToDefault(); try { this.hvc_p2_api.disconnect(); } catch { } } System.Threading.Thread.Sleep(10); } }
/// <summary> /// <para>Executes functions specified in the constructor.</para> /// <para>e.g. Face detection, Age estimation etc.</para> /// </summary> /// <param name="out_img_type">output image type /// <para> OUT_IMG_TYPE_NONE (00h): no image output</para> /// <para> OUT_IMG_TYPE_QVGA (01h): 320x240 pixel resolution(QVGA)</para> /// <para> OUT_IMG_TYPE_QQVGA (02h): 160x120 pixel resolution(QQVGA)</para></param> /// <param name="tracking_result">the tracking result is stored</param> /// <param name="out_img">output image</param> /// <returns>struct of (response_code, stb_return) /// <para> response_code (int): response code form B5T-007001</para> /// <para> stb_return (bool): return status of STB library</para> /// </returns> public EXECUTE_RET execute(int out_img_type, HVCTrackingResult tracking_result, GrayscaleImage out_img) { EXECUTE_RET retvalue; retvalue.response_code = 0; retvalue.stb_return = 0; var frame_result = new HVCResult(); retvalue.response_code = this._hvc_p2_wrapper.execute(this._exec_func, out_img_type, frame_result, out_img); tracking_result.clear(); if ((this.use_stb == true) && (this._exec_func != p2def.EX_NONE)) { var stb_in = new STBLibWrapper.STB_FRAME_RESULT(); stb_in.bodys.body = new STBLibWrapper.STB_FRAME_RESULT_DETECTION[STB.STB_FRAME_NUM]; stb_in.faces.face = new STBLibWrapper.STB_FRAME_RESULT_FACE[STB.STB_FRAME_NUM]; frame_result.export_to_C_FRAME_RESULT(ref stb_in); var stb_out_f = new STBLibWrapper.STB_FACE[STB.STB_FRAME_NUM]; var stb_out_b = new STBLibWrapper.STB_BODY[STB.STB_FRAME_NUM]; STB.STB_EXECUTE_RET stb_ret = this._stb.execute(stb_in, stb_out_f, stb_out_b); if (stb_ret.retcode < 0) { // STB error retvalue.stb_return = stb_ret.retcode; return(retvalue); } tracking_result.faces.append_C_FACE_RES35(this._exec_func, (int)stb_ret.face_count, stb_out_f); if ((this._exec_func & p2def.EX_DIRECTION) != 0) { tracking_result.faces.append_direction_list(frame_result.faces); } if ((this._exec_func & p2def.EX_GAZE) != 0) { tracking_result.faces.append_gaze_list(frame_result.faces); } if ((this._exec_func & p2def.EX_BLINK) != 0) { tracking_result.faces.append_blink_list(frame_result.faces); } if ((this._exec_func & p2def.EX_EXPRESSION) != 0) { tracking_result.faces.append_expression_list(frame_result.faces); } tracking_result.bodies.append_BODY_RES35(this._exec_func, (int)stb_ret.body_count, stb_out_b); tracking_result.hands.append_hand_list(frame_result.hands); } else { tracking_result.appned_FRAME_RESULT(frame_result); } return(retvalue); }