static void Main(string[] args)
    {
        Gst.Application.Init();
        TransformSample.Register();

        Pipeline pipeline         = new Pipeline();
        Element  videotestsrc     = ElementFactory.Make("videotestsrc");
        Element  transform        = new TransformSample();
        Element  ffmpegcolorspace = ElementFactory.Make("ffmpegcolorspace");
        Element  videosink        = ElementFactory.Make("autovideosink");

        pipeline.Add(videotestsrc, transform, ffmpegcolorspace, videosink);
        Element.Link(videotestsrc, transform, ffmpegcolorspace, videosink);

        GLib.MainLoop loop = new GLib.MainLoop();

        pipeline.Bus.AddSignalWatch();
        pipeline.Bus.Message += delegate(object sender, MessageArgs margs) {
            Message message = margs.Message;

            switch (message.Type)
            {
            case MessageType.Error:
                Enum   err;
                string msg;

                message.ParseError(out err, out msg);
                System.Console.WriteLine(String.Format("Error message: {0}", msg));
                loop.Quit();
                break;

            case MessageType.Eos:
                loop.Quit();
                break;
            }
        };

        pipeline.SetState(State.Playing);
        loop.Run();
        pipeline.SetState(State.Null);
    }
    static void Main(string[] args)
    {
        Gst.Application.Init ();
        TransformSample.Register ();

        Pipeline pipeline = new Pipeline ();
        Element videotestsrc = ElementFactory.Make ("videotestsrc");
        Element transform = new TransformSample ();
        Element ffmpegcolorspace = ElementFactory.Make ("ffmpegcolorspace");
        Element videosink = ElementFactory.Make ("autovideosink");

        pipeline.Add (videotestsrc, transform, ffmpegcolorspace, videosink);
        Element.Link (videotestsrc, transform, ffmpegcolorspace, videosink);

        GLib.MainLoop loop = new GLib.MainLoop ();

        pipeline.Bus.AddSignalWatch();
        pipeline.Bus.Message += delegate (object sender, MessageArgs margs) {
          Message message = margs.Message;

          switch (message.Type) {
        case MessageType.Error:
          Enum err;
          string msg;

          message.ParseError (out err, out msg);
          System.Console.WriteLine (String.Format ("Error message: {0}", msg));
          loop.Quit ();
          break;
        case MessageType.Eos:
          loop.Quit ();
          break;
          }
        };

        pipeline.SetState (State.Playing);
        loop.Run ();
        pipeline.SetState (State.Null);
    }
        /// <summary>
        /// Extract a normalized face from an input image. Normalization is done
        /// by ensuring that the eye positions in the original image
        /// map to the specified position in the face image. The input
        /// inage is assumed to be 1 byte per channel. Steps involved
        /// 1. Construct an affine mapping from Destination to original eye location
        /// 2. Fill in the destination image using the mapping
        /// </summary>
        /// <param name="origImage">Input image as a byte array</param>
        /// <param name="origRect">Size of the original image</param>
        /// <param name="origLeftEye">Left eye location in source image</param>
        /// <param name="origRightEye">Right eye location in source</param>
        /// <param name="bytePerPix"># bytes per Pixel in original image.Since we assume 1 byte per channel this is same as # channels Face is constructed with same</param>
        /// <param name="faceRect">Desired face size</param>
        /// <param name="faceLeftEye">Desired left eye location in face</param>
        /// <param name="faceRightEye">Desired right eye loc in face</param>
        /// <returns>Byte array of extracted face</returns>
        static public byte[] ExtractNormalizeFace(byte[] origImage, Rect origRect, int bytePerPix, int origStride,
                                                  Rect mapFrom, Rect faceRect, int faceStride, TransformSample txfm)
        {
            // Sanity check eye location
            double[,] affineMat = new double[2, 3];
            double scaleX = mapFrom.Width / faceRect.Width;
            double scaleY = mapFrom.Height / faceRect.Height;
            double theta  = txfm.ThetaRad;

            affineMat[0, 0] = scaleX * Math.Cos(theta);
            affineMat[0, 1] = -scaleY *Math.Sin(theta);

            affineMat[0, 2] = txfm.Xpix;
            affineMat[1, 0] = scaleX * Math.Sin(theta);
            affineMat[1, 1] = scaleY * Math.Cos(theta);
            affineMat[1, 2] = txfm.Ypix;

            return(DoAffine(origImage, origRect, bytePerPix, origStride, mapFrom, faceRect, faceStride, affineMat));
        }