Пример #1
0
    void Start()
    {
        _processor            = new OffscreenProcessor();
        _processor.ShaderName = "Leon/EyeBlender";

        _output = new RenderTexture(1920, 1080, 24, RenderTextureFormat.Default);
    }
    // Use this for initialization
    void Start()
    {
        _processor            = new OffscreenProcessor();
        _processor.ShaderName = "Image/ColorCorrection";

        _Load();
    }
Пример #3
0
    // Use this for initialization
    void Start()
    {
        _Correction              = new OffscreenProcessor();
        _Correction.ShaderName   = "Image/DistortionCorrection";
        _Correction.TargetFormat = RenderTextureFormat.ARGB32;

        _SrcBlitter              = new OffscreenProcessor();
        _SrcBlitter.ShaderName   = "Image/Blitter";
        _SrcBlitter.TargetFormat = RenderTextureFormat.Default;
        _SrcBlitter.TargetFormat = RenderTextureFormat.ARGB32;

        _GazeBlitterCircle = Shader.Find("Image/GazeBlit_Circle");

        _GazeBlitterRect = Shader.Find("Image/GazeBlit_Rect");

        _GazeBlitter              = new OffscreenProcessor();
        _GazeBlitter.ShaderName   = "Image/Blitter";
        _GazeBlitter.TargetFormat = RenderTextureFormat.Default;
        _GazeBlitter.TargetFormat = RenderTextureFormat.ARGB32;

        _SceneBlitter              = new OffscreenProcessor();
        _SceneBlitter.ShaderName   = "Image/Blitter";
        _SceneBlitter.TargetFormat = RenderTextureFormat.Default;
        _SceneBlitter.TargetFormat = RenderTextureFormat.ARGB32;

        _CombinedTexture = new RenderTexture((int)1, (int)1, 16, RenderTextureFormat.ARGB32);

        _blurGenerator            = new BlurImageGenerator();
        _blurGenerator.DownScaler = 0;
        _blurGenerator.Iterations = 1;

        CamSource.GetBaseTexture().OnFrameBlitted += OnFrameGrabbed;

        _GazeTexture = new Texture[0];
    }
    public TxFoveatedRenderingImageProcessor(TxKitEyes e)
    {
        _eyes = e;

        /*
         * _SrcBlitter=new OffscreenProcessor();
         * _SrcBlitter.ShaderName = "Image/Blitter";
         * _SrcBlitter.TargetFormat = RenderTextureFormat.Default;
         * _SrcBlitter.TargetFormat = RenderTextureFormat.ARGB32;
         */
        _GazeBlitterCircle              = new OffscreenProcessor();
        _GazeBlitterCircle.ShaderName   = "Image/GazeBlit_Circle";
        _GazeBlitterCircle.TargetFormat = RenderTextureFormat.ARGB32;

        _GazeBlitterRect              = new OffscreenProcessor();
        _GazeBlitterRect.ShaderName   = "Image/GazeBlit_Rect";
        _GazeBlitterRect.TargetFormat = RenderTextureFormat.ARGB32;

        _GazeBlitter              = new OffscreenProcessor();
        _GazeBlitter.ShaderName   = "Image/Blitter";
        _GazeBlitter.TargetFormat = RenderTextureFormat.ARGB32;


        //		CamSource.GetBaseTexture ().OnFrameBlitted+= OnFrameGrabbed;

        _blitMtrl = new Material(Shader.Find("Image/Blitter"));
        _blitMtrl.SetPass(1);
    }
    // Use this for initialization
    void Start()
    {
        if (TargetRobot == null)
        {
            TargetRobot = GetComponent <TxKitEyes> ();
        }
        if (SrcLayer == null)
        {
            SrcLayer = GetComponent <PresenceLayerComponent> ();
        }

        SrcLayer.AddDependencyNode(this);

        _blurProcessorH            = new OffscreenProcessor();
        _blurProcessorH.ShaderName = "Image/SimpleGrabPassBlur";
        _blurProcessorV            = new OffscreenProcessor();
        _blurProcessorV.ShaderName = "Image/SimpleGrabPassBlur";

        _renderProcessor = new OffscreenProcessor();
        _renderProcessor.TargetFormat = RenderTextureFormat.ARGB32;
        _renderProcessor.ShaderName   = "GazeBased/Blend_Stream";

        _correction            = new OffscreenProcessor();
        _correction.ShaderName = "Image/ColorCorrection";

        TargetRobot.OnCameraRendererCreated += OnCameraRendererCreated;
    }
Пример #6
0
    // Use this for initialization
    protected override void Start()
    {
        _Processor = new OffscreenProcessor();
        m_Texture  = gameObject.GetComponent <GstCustomTexture>();

        material = gameObject.GetComponent <MeshRenderer> ().material;
        // Check to make sure we have an instance.
        if (m_Texture == null)
        {
            DestroyImmediate(this);
        }

        m_Texture.Initialize();
        //		pipeline = "filesrc location=\""+VideoPath+"\" ! decodebin ! videoconvert ! video/x-raw,format=I420 ! appsink name=videoSink sync=true";
        //		pipeline = "filesrc location=~/Documents/Projects/BeyondAR/Equirectangular_projection_SW.jpg ! jpegdec ! videoconvert ! imagefreeze ! videoconvert ! imagefreeze ! videoconvert ! video/x-raw,format=I420 ! appsink name=videoSink sync=true";
        //		pipeline = "videotestsrc ! videoconvert ! video/x-raw,width=3280,height=2048,format=I420 ! appsink name=videoSink sync=true";
        m_Texture.SetPipeline(_GetPipeline());
        m_Texture.Play();


        if (Debugger != null)
        {
            Debugger.AddDebugElement(new DebugCameraCaptureElement(m_Texture));
        }

        m_Texture.OnFrameGrabbed += OnFrameGrabbed;

        _Processor.ShaderName = "Image/I420ToRGB";

        Debug.Log("Starting Base");
        base.Start();
    }
Пример #7
0
    public void Init(RobotInfo ifo)
    {
        GStreamerCore.Ref();
        m_Texture = TargetNode.AddComponent <GstCustomTexture> ();
        m_Texture.Initialize();

        FileName = ifo.URL;

        _Processor            = new OffscreenProcessor();
        _Processor.ShaderName = "Image/I420ToRGB";
        _needProcessing       = false;

        string path = Application.dataPath + "/" + FileName;

        m_Texture.SetPipeline("filesrc location=\"" + path + "\" ! qtdemux name=demux " +
                              "demux.video_0 ! queue ! avdec_h264 ! videoconvert ! video/x-raw,format=I420 ! appsink name=videoSink " +
                              "demux.audio_0 ! queue ! decodebin ! audioconvert ! volume volume=5 ! appsink name=audioSink");
        m_Texture.OnFrameGrabbed += OnFrameGrabbed;
        m_Texture.Play();


        GameObject audioObj = new GameObject("AudioObject_" + TargetNode.name);

        audioObj.transform.parent   = TargetNode.transform;
        audioObj.transform.position = Vector3.zero;
        AudioObject       = audioObj.AddComponent <AudioSource> ();
        AudioObject.loop  = true;
        _player           = audioObj.AddComponent <GstAudioPlayer> ();
        _player.Player    = m_Texture.Player.AudioWrapper;
        _player.TargetSrc = AudioObject;
    }
Пример #8
0
 public ThetaEquirectangulerConverter()
 {
     _RicohProcessor              = new OffscreenProcessor();
     _RicohProcessor.ShaderName   = "Theta/RealtimeEquirectangular";
     _BlitterProcessor            = new OffscreenProcessor();
     _BlitterProcessor.ShaderName = "Image/Blitter";
 }
Пример #9
0
 // Use this for initialization
 void Start()
 {
     _Blitter              = new OffscreenProcessor();
     _Blitter.ShaderName   = "Image/Blitter";
     _Blitter.TargetFormat = RenderTextureFormat.Default;
     _Blitter.TargetFormat = RenderTextureFormat.ARGB32;
     _Blitter.ProcessingMaterial.SetInt("flipImage", 1);
 }
Пример #10
0
 // Use this for initialization
 protected virtual void Start()
 {
     _Correction              = new OffscreenProcessor();
     _Correction.ShaderName   = "Image/DistortionCorrection";
     _Correction.TargetFormat = RenderTextureFormat.ARGB32;
     _Blitter              = new OffscreenProcessor();
     _Blitter.ShaderName   = "Image/Blitter";
     _Blitter.TargetFormat = RenderTextureFormat.ARGB32;
 }
Пример #11
0
    // Use this for initialization
    void Start()
    {
        _srcTexture            = new WebCamTexture();
        _srcTexture.deviceName = WebCamTexture.devices [0].name;
        _srcTexture.Play();

        _processor            = new OffscreenProcessor();
        _processor.ShaderName = "Theta/RealtimeEquirectangular";
    }
Пример #12
0
    void Start()
    {
        _layerProcessors = new OffscreenProcessor[2];
        for (int i = 0; i < _layerProcessors.Length; ++i)
        {
            _layerProcessors[i]            = new OffscreenProcessor();
            _layerProcessors[i].ShaderName = "Telexistence/BlendShader";
        }

        _output.SetTexture(Utilities.BlackTexture, 0);
        _output.SetTexture(Utilities.WhiteTexture, 1);
    }
Пример #13
0
    void Start()
    {
        _flashTween.target   = 0;
        _sizeTween.target    = 1;
        _renderer            = new OffscreenProcessor();
        _renderer.ShaderName = "Hidden/CalibrationScreenShader";

        _hearBeatCor = StartCoroutine(HeartBeat(_sizeTween));

        //if(_hearBeatCor!=null)
        //	StopCoroutine (_hearBeatCor);
    }
Пример #14
0
    void Start()
    {
        //Init UnityWebCamera plugin
        _instance = CreateTextureWrapper();

        _BlitterProcessor = new OffscreenProcessor("UnityCam/Image/Blitter");


        _wrapper = new TextureWrapper();

        //Add Post Renderer invoker, it will handle the rest
        gameObject.AddComponent <UnityCamPostRenderer> ();
    }
Пример #15
0
 void _Recreate()
 {
     if (_Blur != null && _Blur.Length == Iterations)
     {
         return;
     }
     _Blur = new OffscreenProcessor[Iterations];
     for (int i = 0; i < _Blur.Length; ++i)
     {
         _Blur [i]            = new OffscreenProcessor();
         _Blur [i].ShaderName = "Image/ImageBlur";
     }
 }
Пример #16
0
    public TxOVRVisionImageProcessor(TxKitEyes e)
    {
        _eyes = e;
        _ovr  = new COvrvisionUnity();
        _ovr.useProcessingQuality = COvrvisionUnity.OV_CAMQT_DMS;
        _processorThread          = new ProcessorThread(this);
        _processorThread.Start();

        for (int i = 0; i < 2; ++i)
        {
            _flipProcessor[i]              = new OffscreenProcessor();
            _flipProcessor[i].ShaderName   = "Image/FlipCoord";
            _flipProcessor[i].TargetFormat = RenderTextureFormat.ARGB32;
        }
    }
    public TxOVRFoveatedRenderingImageProcessor(TxKitEyes e) : base(e)
    {
        _ovrProcessor = new CustomOvrvisionProcessor(e);
        for (int i = 0; i < 4; ++i)
        {
            _SrcBlitter[i]              = new OffscreenProcessor();
            _SrcBlitter[i].ShaderName   = "Image/Blitter";
            _SrcBlitter[i].TargetFormat = RenderTextureFormat.Default;
            _SrcBlitter[i].TargetFormat = RenderTextureFormat.ARGB32;
        }

        _blitMtrl = new Material(Shader.Find("Image/Blitter"));
        _blitMtrl.SetPass(1);

        CombinedTexture = null;
    }
    public override void ProcessMainThread(ref TxVisionOutput result)
    {
        if (_texture.StreamsCount == 0)
        {
            return;
        }
        Parameters.frameSize = result.Configuration.FrameSize;
        InitResults(_texture.CameraStreams);

        if (_SrcBlitter == null || _SrcBlitter.Length != result.Configuration.CameraStreams)
        {
            _SrcBlitter = new OffscreenProcessor[result.Configuration.CameraStreams];
            for (int i = 0; i < result.Configuration.CameraStreams; ++i)
            {
                _SrcBlitter [i]             = new OffscreenProcessor();
                _SrcBlitter[i].ShaderName   = "Image/Blitter";
                _SrcBlitter[i].TargetFormat = RenderTextureFormat.Default;
                _SrcBlitter[i].TargetFormat = RenderTextureFormat.ARGB32;
            }
        }

        Rect texRect = new Rect(0, 0, 1, 1.0f / _texture.CameraStreams);
        var  srcTex  = _texture.GetEyeTexture(0);

        if (srcTex == null)
        {
            return;
        }
        Texture tex = srcTex;

        result.SetSourceTexture(srcTex, 0);
        for (int i = 0; i < _texture.CameraStreams; ++i)
        {
            if (_texture.CameraStreams > 1)
            {
                _SrcBlitter [i].ProcessingMaterial.SetVector("TextureRect", new Vector4(texRect.x, texRect.y, texRect.width, texRect.height));
                tex        = _SrcBlitter[i].ProcessTextureSized(srcTex, (int)(texRect.width * srcTex.width), (int)(texRect.height * srcTex.height));
                texRect.y += texRect.height;
            }
            var t = BlitImage(tex, i, new Vector2(1, 1));
            if (t != null)
            {
                result.SetTexture(t, i);
            }
        }
    }
Пример #19
0
    public override void ProcessMainThread(ref TxVisionOutput result)
    {
        if (result.Configuration.SeparateStreams)
        {
            for (int i = 0; i < _texture.GetTexturesCount(); ++i)
            {
                result.SetTexture(_texture.GetEyeTexture(i), i);
            }
        }
        else
        {
            if (_texture.GetTexturesCount() == 0)
            {
                return;
            }

            if (_SrcBlitter == null || _SrcBlitter.Length != result.Configuration.CameraStreams)
            {
                _SrcBlitter = new OffscreenProcessor[result.Configuration.CameraStreams];
                for (int i = 0; i < result.Configuration.CameraStreams; ++i)
                {
                    _SrcBlitter [i]             = new OffscreenProcessor();
                    _SrcBlitter[i].ShaderName   = "Image/Blitter";
                    _SrcBlitter[i].TargetFormat = RenderTextureFormat.Default;
                    _SrcBlitter[i].TargetFormat = RenderTextureFormat.ARGB32;
                    _SrcBlitter[i].ProcessingMaterial.SetInt("Flip", 1);
                }
            }
            Rect texRect = new Rect(0, 0, 1, 1.0f / (float)result.Configuration.CameraStreams);
            var  tex     = _texture.GetEyeTexture(0);
            if (tex == null)
            {
                return;
            }
            result.SetSourceTexture(tex, 0);
            for (int i = 0; i < result.Configuration.CameraStreams; ++i)
            {
                _SrcBlitter[i].ProcessingMaterial.SetVector("TextureRect", new Vector4(texRect.x, texRect.y, texRect.width, texRect.height));

                var t = _SrcBlitter[i].ProcessTextureSized(tex, (int)(texRect.width * tex.width), (int)(texRect.height * tex.height));          //CamTexture;//
                result.SetTexture(t, i);

                texRect.y += texRect.height;
            }
        }
    }
Пример #20
0
    public void Init(RobotInfo ifo, string profileType, GstImageInfo.EPixelFormat fmt = GstImageInfo.EPixelFormat.EPixel_I420)
    {
        m_Texture = TargetNode.AddComponent <GstNetworkMultipleTexture> ();
        m_Texture.StreamsCount = StreamsCount;
        m_Texture.profileType  = profileType;
        m_Texture.Initialize();

        int texCount = m_Texture.GetTextureCount();

        _Processor      = new OffscreenProcessor[texCount];
        _needProcessing = new bool[texCount];
        for (int i = 0; i < texCount; ++i)
        {
            _Processor[i]            = new OffscreenProcessor();
            _Processor[i].ShaderName = "Image/I420ToRGB";
            _needProcessing [i]      = false;
        }
        string ip = Settings.Instance.GetValue("Ports", "ReceiveHost", ifo.IP);

        m_Texture.ConnectToHost(ip, port, StreamsCount, fmt);
        m_Texture.Play();


        if (_config != null)
        {
            m_Texture.SetConfiguration(_config);
        }

        m_Texture.OnFrameGrabbed += OnFrameGrabbed;


        _videoPorts = new uint[StreamsCount];        //{0,0};
        string streamsVals = "";

        for (int i = 0; i < StreamsCount; ++i)
        {
            _videoPorts [i] = Texture.Player.GetVideoPort(i);
            streamsVals    += _videoPorts [i].ToString();
            if (i != StreamsCount - 1)
            {
                streamsVals += ",";
            }
        }
        RobotConnector.Connector.SendData(TxKitEyes.ServiceName, "VideoPorts", streamsVals, true);
    }
Пример #21
0
    // Use this for initialization
    void Start()
    {
        GStreamerCore.Ref();

        _Processor            = new OffscreenProcessor();
        _Processor.ShaderName = "Image/I420ToRGB";
        _needProcessing       = false;

        _texture = gameObject.AddComponent <GstCustomTexture> ();
        _texture.Initialize();

        string path = Application.dataPath + "/" + FileName;

        _texture.SetPipeline("filesrc location=\"" + path + "\" ! qtdemux name=demux demux.video_0 ! avdec_h264 ! videorate ! videoconvert ! video/x-raw,format=I420,framerate=30/1 ");

        _texture.OnFrameGrabbed += _OnFrameGrabbed;
        _texture.Play();
    }
Пример #22
0
    // Use this for initialization
    protected virtual void Start()
    {
        if (!Provider)
        {
            if (TargetEyes == null)
            {
                TargetEyes = gameObject.GetComponent <TxKitEyes> ();
            }
            if (TargetEyes)
            {
                TelubeeCameraAVProvider p = gameObject.AddComponent <TelubeeCameraAVProvider> ();
                TargetEyes.OnCameraSourceCreated += OnCameraSourceCreated;
                p.TargetEars = TargetEars;
                Provider     = p;
            }
            if (TargetEars)
            {
                TargetEars.OnAudioSourceCreated += OnAudioSourceCreated;
            }
        }
        _blurProcessorH            = new OffscreenProcessor();
        _blurProcessorH.ShaderName = "Image/SimpleGrabPassBlur";
        _blurProcessorV            = new OffscreenProcessor();
        _blurProcessorV.ShaderName = "Image/SimpleGrabPassBlur";

        Provider.OnCameraRendererCreated += OnCameraRendererCreated;

        if (RenderMaterial == null)
        {
            RenderMaterial = new Material(Shader.Find("GazeBased/Blend_Stream"));
        }

        _renderProcessor              = new OffscreenProcessor();
        _renderProcessor.ShaderName   = RenderMaterial.shader.name;
        _renderProcessor.TargetFormat = RenderTextureFormat.ARGB32;

        base._OnStarted();
    }
Пример #23
0
    // Use this for initialization
    void Start()
    {
        _mr = GetComponent <MeshRenderer> ();
        _mr.material.mainTexture = Movie;
        _mr.material.renderQueue = RenderQueueEnum.MediaLayer;
        Movie.Play();
        Movie.loop = true;

        _blurProcessorH            = new OffscreenProcessor();
        _blurProcessorH.ShaderName = "Image/SimpleGrabPassBlur";
        _blurProcessorV            = new OffscreenProcessor();
        _blurProcessorV.ShaderName = "Image/SimpleGrabPassBlur";

        if (RenderMaterial == null)
        {
            RenderMaterial = new Material(Shader.Find("GazeBased/Blend_Stream"));
        }

        _renderProcessor              = new OffscreenProcessor();
        _renderProcessor.ShaderName   = RenderMaterial.shader.name;
        _renderProcessor.TargetFormat = RenderTextureFormat.ARGB32;
        _tmpTexture = new Texture2D(1, 1);
    }
Пример #24
0
    void Start()
    {
        //Init UnityWebCamera plugin
        _instance = CreateTextureWrapper();

        _BlitterProcessor = new OffscreenProcessor("UnityCam/Image/Blitter");

        _wrapper = new TextureWrapper();

        //If Camera already has a RenderTexture use this
        Texture tex = GetComponent <Camera>().targetTexture;

        if (tex is RenderTexture)
        {
            source           = (RenderTexture)tex;
            _deleteRenderTex = false;
        }
        else
        {
            source = new RenderTexture(WIDTH, HEIGHT, 24);
            GetComponent <Camera>().targetTexture = source;
        }
    }
Пример #25
0
    // Use this for initialization
    protected override void Start()
    {
        _Processor = new OffscreenProcessor();
        m_Texture  = gameObject.GetComponent <GstCustomTexture>();

        //material=gameObject.GetComponent<MeshRenderer> ().material;
        // Check to make sure we have an instance.
        if (m_Texture == null)
        {
            DestroyImmediate(this);
        }


        m_Texture.Initialize();
        if (Debugger != null)
        {
            Debugger.AddDebugElement(new DebugCameraCaptureElement(m_Texture));
        }
        m_Texture.OnFrameGrabbed += OnFrameGrabbed;

        _Processor.ShaderName = "Image/I420ToRGB";
        Debug.Log("Starting Base");
        base.Start();
    }
Пример #26
0
 // Use this for initialization
 void Start()
 {
     _processor            = new OffscreenProcessor();
     _processor.ShaderName = Shader.name;
 }
Пример #27
0
 // Use this for initialization
 void Start()
 {
     _processor = new OffscreenProcessor();
 }
 // Use this for initialization
 void Start()
 {
     _Processor=new OffscreenProcessor();
     _Processor.ShaderName = "Image/I420ToRGB";
 }
Пример #29
0
    void Start()
    {
        Gaze                  = GameObject.FindObjectOfType <GazeFollowComponent> ();
        _processor            = new OffscreenProcessor();
        _processor.ShaderName = "GazeBased/Blend";


        Image <Gray, byte> cache = null;

        EmguImageUtil.UnityTextureToOpenCVImageGray(TargetTexture, ref cache);        //new Mat(pictureStr, LoadImageType.Color); //Read the files as an 8-bit Bgr image
        long             detectionTime;
        List <Rectangle> faces = new List <Rectangle>();
        List <Rectangle> eyes  = new List <Rectangle>();

        //The cuda cascade classifier doesn't seem to be able to load "haarcascade_frontalface_default.xml" file in this release
        //disabling CUDA module for now
        bool tryUseCuda   = true;
        bool tryUseOpenCL = false;

        DetectObjectCL.DetectFace(
            cache.Mat, false,
            faces, eyes,
            tryUseCuda,
            tryUseOpenCL,
            out detectionTime);

        foreach (Rectangle face in faces)
        {
            CvInvoke.Rectangle(cache.Mat, face, new Bgr(0, 0, 1).MCvScalar, 2);
        }
        foreach (Rectangle eye in eyes)
        {
            CvInvoke.Rectangle(cache.Mat, eye, new Bgr(1, 0, 0).MCvScalar, 2);
        }

        Debug.Log("detected faces:" + faces.Count);
        Debug.Log("face detection time:" + detectionTime.ToString() + "ms");

        //display the image

        /*	ImageViewer.Show(image, String.Format(
         *      "Completed face and eye detection using {0} in {1} milliseconds",
         *      (tryUseCuda && CudaInvoke.HasCuda) ? "GPU"
         *      : (tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice) ? "OpenCL"
         *      : "CPU",
         *      detectionTime));
         */

        _map = new ImageFeatureMap(128, 128);
        foreach (Rectangle face in faces)
        {
            Rectangle r = new Rectangle();
            r.X      = face.X - 50;
            r.Y      = face.Y - 5;
            r.Width  = face.Width + 50;
            r.Height = face.Height + 520;
            _map.FillRectangle((float)r.X / (float)cache.Mat.Width, (float)r.Y / (float)cache.Mat.Height,
                               (float)r.Width / (float)cache.Mat.Width, (float)r.Height / (float)cache.Mat.Height, 1);
        }

        _map.Blur();
        _map.Blur();
        _map.Blur();
        _map.Blur();
        Texture2D tex = new Texture2D(1, 1);

        tex.filterMode = FilterMode.Point;

        _map.ConvertToTexture(tex, true);

        _processor.ProcessingMaterial.SetTexture("_TargetMask", tex);
        _processor.ProcessingMaterial.SetTexture("_MainTex", TargetTexture);

        GetComponent <UITexture> ().mainTexture = tex;       //_processor.ProcessTexture (TargetTexture);
    }
Пример #30
0
 // Use this for initialization
 protected override void Start()
 {
     base.Start();
     _correction            = new OffscreenProcessor();
     _correction.ShaderName = "Image/ColorCorrection";
 }