private Texture textureForInputKey(string key, bool segmentationMap)
    {
        Texture2D texture = null;

        if (inputData[key] is Texture)
        {
            texture = inputData[key] as Texture2D;
        }
        else if (inputData[key] is GameObject)
        {
            GameObject go         = ((GameObject)inputData[key]);
            Camera     mainCamera = go.GetComponent <Camera>();
            if (segmentationMap)
            {
                ImageSynthesis synthesis = go.GetComponent <ImageSynthesis>();
                if (synthesis == null)
                {
                    synthesis = go.AddComponent <ImageSynthesis>();
                }
                Camera cam = synthesis.capturePasses[2].camera;
                texture = RunwayUtils.CameraToTexture(cam, mainCamera.pixelWidth, mainCamera.pixelHeight);
            }
            else
            {
                texture = RunwayUtils.CameraToTexture(mainCamera, mainCamera.pixelWidth, mainCamera.pixelHeight);
            }
        }
        if (texture == null)
        {
            return(null);
        }
        return(texture);
    }
    void Start()
    {
        var camera = GameObject.Find("Main Camera");

        _imageSynthesis = camera.GetComponent(typeof(ImageSynthesis)) as ImageSynthesis;
        created         = new GameObject[maxObjects];
    }
Beispiel #3
0
 private void addImageSynthesisImageForm(WWWForm form, ImageSynthesis synth, bool flag, string captureName, string fieldName)
 {
     if (flag)
     {
         if (!synth.hasCapturePass(captureName))
         {
             Debug.LogError(captureName + " not available - sending empty image");
         }
         byte[] bytes = synth.Encode(captureName);
         form.AddBinaryData(fieldName, bytes);
     }
 }
    public override void OnInspectorGUI()
    {
        DrawDefaultInspector();

        ImageSynthesis imageSynthesis = (ImageSynthesis)target;

        // Only display the "Save" button if playing
        if (EditorApplication.isPlaying && GUILayout.Button("Save Captures"))
        {
            Vector2 gameViewSize = Handles.GetMainGameViewSize();
            imageSynthesis.Save(imageSynthesis.filename, width: (int)gameViewSize.x, height: (int)gameViewSize.y, imageSynthesis.filepath);
        }
    }
Beispiel #5
0
    /// <summary>
    /// Start the simulator and build the environment.
    /// </summary>
    private void Start()
    {
        // fixed framerate to equalize differences in performance
        UnityEngine.QualitySettings.vSyncCount  = 0;
        UnityEngine.Application.targetFrameRate = 30;

        // init variables
        grid               = FindObjectOfType <Grid>();
        flowCam            = cam.GetComponent <ImageSynthesis>();
        flowCamSensitivity = flowCam.opticalFlowSensitivity;
        if (!fixedObstacle)
        {
            currentObstacleID = 0;
        }

        lastData = new CollectedData(-1, "init", 0, 0, 0, Vector3.zero, Vector3.zero, 0, currentObstacleID, currentEpoch);

        // load obstacles
        loadedObstacles = Resources.LoadAll <GameObject>("Ready");

        // IMPORTANT: RUN THESE LINES ONLY WITH NEW MODELS ONCE!!!
        // otherwise new meshcolliders will be added to the prefabs all the time
        //foreach (GameObject obj in loadedObstacles)
        //{
        //    MeshCollider mc = obj.AddComponent<MeshCollider>();
        //    mc.sharedMesh = obj.GetComponentInChildren<MeshFilter>().sharedMesh;
        //}

        // generate environment
        GenerateEnvironment();

        // create output path
        if (Directory.Exists(outputFolder))
        {
            Directory.Delete(outputFolder, true);
        }
        Directory.CreateDirectory(outputFolder);

        // request new path & start moving
        PathRequestManager.RequestPath(new PathRequest(transform.position, target.position, OnPathFound));
    }
Beispiel #6
0
 //public override void InitializeAgent()
 void Start()
 {
     academy = GameObject.FindObjectOfType <UAVAcademy> ();
     Application.runInBackground = true;
     //mysteps = 0;
     vel_limit    = 10;
     rb           = GetComponent <Rigidbody> ();
     TargetRb     = Target.GetComponent <Rigidbody> ();
     old_distance = Vector3.Distance(StartPos.position, TargetRb.position);
     distance     = Vector3.Distance(StartPos.position, TargetRb.position);
     if (GetComponentInChildren <ImageSynthesis> ().capturePasses [2].camera == null)
     {
         print("Null!");
     }
     imgSyn = GetComponentInChildren <ImageSynthesis> ();
     //observations [0] = GetComponentInChildren<ImageSynthesis>().capturePasses[2].camera;
     agentParameters.agentCameras [0] = imgSyn.capturePasses[2].camera;
     agentParameters.agentCameras [1] = imgSyn.capturePasses[3].camera;
     //imgSyn.OnSceneChange ();
     request = false;
     density = 0;
 }
Beispiel #7
0
 private void enableImageSynthesis()
 {
     imageSynthesis         = this.gameObject.GetComponentInChildren <ImageSynthesis> () as ImageSynthesis;
     imageSynthesis.enabled = true;
 }
Beispiel #8
0
    public IEnumerator EmitFrame()
    {
        frameCounter += 1;

        bool shouldRender = this.renderImage && serverSideScreenshot;

        if (shouldRender)
        {
            // we should only read the screen buffer after rendering is complete
            yield return(new WaitForEndOfFrame());
        }

        WWWForm form = new WWWForm();

        MultiAgentMetadata multiMeta = new MultiAgentMetadata();

        multiMeta.agents        = new MetadataWrapper[this.agents.Count];
        multiMeta.activeAgentId = this.activeAgentId;
        multiMeta.sequenceId    = this.currentSequenceId;


        ThirdPartyCameraMetadata[] cameraMetadata = new ThirdPartyCameraMetadata[this.thirdPartyCameras.Count];
        RenderTexture       currentTexture        = null;
        JavaScriptInterface jsInterface           = null;

        if (shouldRender)
        {
            currentTexture = RenderTexture.active;
            for (int i = 0; i < this.thirdPartyCameras.Count; i++)
            {
                ThirdPartyCameraMetadata cMetadata = new ThirdPartyCameraMetadata();
                Camera camera = thirdPartyCameras.ToArray()[i];
                cMetadata.thirdPartyCameraId = i;
                cMetadata.position           = camera.gameObject.transform.position;
                cMetadata.rotation           = camera.gameObject.transform.eulerAngles;
                cameraMetadata[i]            = cMetadata;
                ImageSynthesis imageSynthesis = camera.gameObject.GetComponentInChildren <ImageSynthesis> () as ImageSynthesis;
                addThirdPartyCameraImageForm(form, camera);
                addImageSynthesisImageForm(form, imageSynthesis, this.renderDepthImage, "_depth", "image_thirdParty_depth");
                addImageSynthesisImageForm(form, imageSynthesis, this.renderNormalsImage, "_normals", "image_thirdParty_normals");
                addImageSynthesisImageForm(form, imageSynthesis, this.renderObjectImage, "_id", "image_thirdParty_image_ids");
                addImageSynthesisImageForm(form, imageSynthesis, this.renderClassImage, "_class", "image_thirdParty_classes");
                addImageSynthesisImageForm(form, imageSynthesis, this.renderClassImage, "_flow", "image_thirdParty_flow");//XXX fix this in a bit
            }
        }

        for (int i = 0; i < this.agents.Count; i++)
        {
            BaseFPSAgentController agent = this.agents.ToArray() [i];
            jsInterface = agent.GetComponent <JavaScriptInterface>();
            MetadataWrapper metadata = agent.generateMetadataWrapper();
            metadata.agentId = i;
            // we don't need to render the agent's camera for the first agent
            if (shouldRender)
            {
                addImageForm(form, agent);
                addImageSynthesisImageForm(form, agent.imageSynthesis, this.renderDepthImage, "_depth", "image_depth");
                addImageSynthesisImageForm(form, agent.imageSynthesis, this.renderNormalsImage, "_normals", "image_normals");
                addObjectImageForm(form, agent, ref metadata);
                addImageSynthesisImageForm(form, agent.imageSynthesis, this.renderClassImage, "_class", "image_classes");
                addImageSynthesisImageForm(form, agent.imageSynthesis, this.renderFlowImage, "_flow", "image_flow");

                metadata.thirdPartyCameras = cameraMetadata;
            }
            multiMeta.agents [i] = metadata;
        }

        if (shouldRender)
        {
            RenderTexture.active = currentTexture;
        }

        var serializedMetadata = Newtonsoft.Json.JsonConvert.SerializeObject(multiMeta);

                #if UNITY_WEBGL
        // JavaScriptInterface jsI =  FindObjectOfType<JavaScriptInterface>();
        // jsInterface.SendAction(new ServerAction(){action = "Test"});
        if (jsInterface != null)
        {
            jsInterface.SendActionMetadata(serializedMetadata);
        }
        #endif

        //form.AddField("metadata", JsonUtility.ToJson(multiMeta));
        form.AddField("metadata", serializedMetadata);
        form.AddField("token", robosimsClientToken);

        #if !UNITY_WEBGL
        if (synchronousHttp)
        {
            if (this.sock == null)
            {
                // Debug.Log("connecting to host: " + robosimsHost);
                IPAddress  host   = IPAddress.Parse(robosimsHost);
                IPEndPoint hostep = new IPEndPoint(host, robosimsPort);
                this.sock = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);
                try {
                    this.sock.Connect(hostep);
                }
                catch (SocketException e) {
                    Debug.Log("Socket exception: " + e.ToString());
                }
            }


            if (this.sock != null && this.sock.Connected)
            {
                byte[] rawData = form.data;

                string request = "POST /train HTTP/1.1\r\n" +
                                 "Content-Length: " + rawData.Length.ToString() + "\r\n";

                foreach (KeyValuePair <string, string> entry in form.headers)
                {
                    request += entry.Key + ": " + entry.Value + "\r\n";
                }
                request += "\r\n";

                int sent = this.sock.Send(Encoding.ASCII.GetBytes(request));
                sent = this.sock.Send(rawData);

                // waiting for a frame here keeps the Unity window in sync visually
                // its not strictly necessary, but allows the interact() command to work properly
                // and does not reduce the overall FPS
                yield return(new WaitForEndOfFrame());

                byte[] headerBuffer      = new byte[1024];
                int    bytesReceived     = 0;
                byte[] bodyBuffer        = null;
                int    bodyBytesReceived = 0;
                int    contentLength     = 0;

                // read header
                while (true)
                {
                    int received = this.sock.Receive(headerBuffer, bytesReceived, headerBuffer.Length - bytesReceived, SocketFlags.None);
                    if (received == 0)
                    {
                        Debug.LogError("0 bytes received attempting to read header - connection closed");
                        break;
                    }

                    bytesReceived += received;;
                    string headerMsg = Encoding.ASCII.GetString(headerBuffer, 0, bytesReceived);
                    int    offset    = headerMsg.IndexOf("\r\n\r\n");
                    if (offset > 0)
                    {
                        contentLength     = parseContentLength(headerMsg.Substring(0, offset));
                        bodyBuffer        = new byte[contentLength];
                        bodyBytesReceived = bytesReceived - (offset + 4);
                        Array.Copy(headerBuffer, offset + 4, bodyBuffer, 0, bodyBytesReceived);
                        break;
                    }
                }

                // read body
                while (bodyBytesReceived < contentLength)
                {
                    // check for 0 bytes received
                    int received = this.sock.Receive(bodyBuffer, bodyBytesReceived, bodyBuffer.Length - bodyBytesReceived, SocketFlags.None);
                    if (received == 0)
                    {
                        Debug.LogError("0 bytes received attempting to read body - connection closed");
                        break;
                    }

                    bodyBytesReceived += received;
                    //Debug.Log("total bytes received: " + bodyBytesReceived);
                }

                string msg = Encoding.ASCII.GetString(bodyBuffer, 0, bodyBytesReceived);
                ProcessControlCommand(msg);
            }
        }
        else
        {
            using (var www = UnityWebRequest.Post("http://" + robosimsHost + ":" + robosimsPort + "/train", form))
            {
                yield return(www.SendWebRequest());

                if (www.isNetworkError || www.isHttpError)
                {
                    Debug.Log("Error: " + www.error);
                    yield break;
                }
                ProcessControlCommand(www.downloadHandler.text);
            }
        }
        #endif
    }
Beispiel #9
0
    private IEnumerator EmitFrame()
    {
        frameCounter += 1;


        // we should only read the screen buffer after rendering is complete
        yield return(new WaitForEndOfFrame());

        WWWForm form = new WWWForm();

        MultiAgentMetadata multiMeta = new MultiAgentMetadata();

        multiMeta.agents = new MetadataWrapper[this.agents.Count];
        ThirdPartyCameraMetadata[] cameraMetadata = new ThirdPartyCameraMetadata[this.thirdPartyCameras.Count];
        multiMeta.activeAgentId = this.activeAgentId;
        multiMeta.sequenceId    = this.currentSequenceId;
        RenderTexture currentTexture = RenderTexture.active;

        for (int i = 0; i < this.thirdPartyCameras.Count; i++)
        {
            ThirdPartyCameraMetadata cMetadata = new ThirdPartyCameraMetadata();
            Camera camera = thirdPartyCameras.ToArray()[i];
            cMetadata.thirdPartyCameraId = i;
            cMetadata.position           = camera.gameObject.transform.position;
            cMetadata.rotation           = camera.gameObject.transform.eulerAngles;
            cameraMetadata[i]            = cMetadata;
            ImageSynthesis imageSynthesis = camera.gameObject.GetComponentInChildren <ImageSynthesis> () as ImageSynthesis;
            addThirdPartyCameraImageForm(form, camera);
            addImageSynthesisImageForm(form, imageSynthesis, this.renderDepthImage, "_depth", "image_thirdParty_depth");
            addImageSynthesisImageForm(form, imageSynthesis, this.renderNormalsImage, "_normals", "image_thirdParty_normals");
            addImageSynthesisImageForm(form, imageSynthesis, this.renderObjectImage, "_id", "image_thirdParty_image_ids");
            addImageSynthesisImageForm(form, imageSynthesis, this.renderClassImage, "_class", "image_thirdParty_classes");
        }

        for (int i = 0; i < this.agents.Count; i++)
        {
            BaseFPSAgentController agent = this.agents.ToArray() [i];
            if (i > 0)
            {
                this.agents.ToArray() [i - 1].m_Camera.enabled = false;
            }
            agent.m_Camera.enabled = true;
            MetadataWrapper metadata = agent.generateMetadataWrapper();
            metadata.agentId = i;
            // we don't need to render the agent's camera for the first agent
            addImageForm(form, agent);
            addImageSynthesisImageForm(form, agent.imageSynthesis, this.renderDepthImage, "_depth", "image_depth");
            addImageSynthesisImageForm(form, agent.imageSynthesis, this.renderNormalsImage, "_normals", "image_normals");
            addObjectImageForm(form, agent, ref metadata);
            addImageSynthesisImageForm(form, agent.imageSynthesis, this.renderClassImage, "_class", "image_classes");
            metadata.thirdPartyCameras = cameraMetadata;
            multiMeta.agents [i]       = metadata;
        }
        if (this.agents.Count != 1)
        {
            this.agents.ToArray()[this.agents.Count - 1].m_Camera.enabled = false;
        }
        this.agents.ToArray()[0].m_Camera.enabled = true;

        RenderTexture.active = currentTexture;

        //form.AddField("metadata", JsonUtility.ToJson(multiMeta));
        form.AddField("metadata", Newtonsoft.Json.JsonConvert.SerializeObject(multiMeta));
        form.AddField("token", robosimsClientToken);

        #if !UNITY_WEBGL
        using (var www = UnityWebRequest.Post("http://" + robosimsHost + ":" + robosimsPort + "/train", form))
        {
            yield return(www.SendWebRequest());

            if (www.isNetworkError || www.isHttpError)
            {
                Debug.Log("Error: " + www.error);
                yield break;
            }
            ProcessControlCommand(www.downloadHandler.text);
        }
        #endif
    }
 void Start()
 {
     frame      = 0;
     sintetizer = GetComponent <ImageSynthesis>();
     Screen.SetResolution(640, 480, true);
 }
    void RenderSegmentationInput(Field input, int index)
    {
        GUILayout.BeginHorizontal(horizontalStyle);
        GUILayout.FlexibleSpace();

        Texture tex = textureForInputKey(input.name, true);

        if (inputData[input.name] != null)
        {
            RenderTextureInfo(tex);
        }
        else
        {
            RenderNotAvailable();
        }
        GUILayout.FlexibleSpace();
        GUILayout.EndHorizontal();

        GUILayout.Space(5);

        GUILayout.BeginHorizontal();
        GUILayout.FlexibleSpace();

        if (userPickedObjectForIndex == index)
        {
            GameObject go = EditorGUIUtility.GetObjectPickerObject() as GameObject;
            inputData[input.name] = go;
            if (go != null)
            {
                ImageSynthesis synthesis = go.GetComponent <ImageSynthesis>();
                synthesis.labels       = input.labels;
                synthesis.colors       = input.colors;
                synthesis.defaultColor = input.defaultColor;
                inputWidths[index]     = maxWidths[index] = (int)go.GetComponent <Camera>().pixelWidth;
                inputHeights[index]    = maxHeights[index] = (int)go.GetComponent <Camera>().pixelHeight;
            }
            userPickedObjectForIndex = -1;
        }

        if (Event.current.commandName == "ObjectSelectorUpdated" && EditorGUIUtility.GetObjectPickerControlID() == index)
        {
            userPickedObjectForIndex = index;
        }


        GUILayout.FlexibleSpace();
        GUILayout.EndHorizontal();

        GUILayout.BeginHorizontal();
        GUILayout.FlexibleSpace();

        if (GUILayout.Button("Select"))
        {
            EditorGUIUtility.ShowObjectPicker <UnityEngine.Object>(inputData[input.name] as UnityEngine.Object, true, "t:Camera", index);
        }

        GUILayout.Space(5);

        if (GUILayout.Button("Preview"))
        {
            if (index == 0)
            {
                inputWindows[index] = GetWindow <RunwayInput1Window>(false, "Runway - Model Input 1", true);
            }
            else
            {
                inputWindows[index] = GetWindow <RunwayInput2Window>(false, "Runway - Model Input 2", true);
            }
        }

        GUILayout.Space(5);

        if (GUILayout.Button("Save"))
        {
            string path = EditorUtility.SaveFilePanel("Save as PNG", "", "ModelInput.png", "png");
            byte[] data = RunwayUtils.TextureToPNG(tex as Texture2D);
            File.WriteAllBytes(path, data);
        }

        GUILayout.FlexibleSpace();
        GUILayout.EndHorizontal();

        RenderObjectTagger(input, index);

        if (inputData[input.name] != null)
        {
            if (inputWindows.ContainsKey(index))
            {
                inputWindows[index].texture = tex;
                inputWindows[index].Repaint();
            }
        }
    }
Beispiel #12
0
    private IEnumerator EmitFrame()
    {
        frameCounter += 1;
        bool shouldRender = this.renderImage && serverSideScreenshot;

        if (shouldRender)
        {
            // we should only read the screen buffer after rendering is complete
            yield return(new WaitForEndOfFrame());

            if (synchronousHttp)
            {
                // must wait an additional frame when in synchronous mode otherwise the frame lags
                yield return(new WaitForEndOfFrame());
            }
        }

        WWWForm form = new WWWForm();

        MultiAgentMetadata multiMeta = new MultiAgentMetadata();

        multiMeta.agents        = new MetadataWrapper[this.agents.Count];
        multiMeta.activeAgentId = this.activeAgentId;
        multiMeta.sequenceId    = this.currentSequenceId;


        ThirdPartyCameraMetadata[] cameraMetadata = new ThirdPartyCameraMetadata[this.thirdPartyCameras.Count];
        RenderTexture currentTexture = null;

        if (shouldRender)
        {
            currentTexture = RenderTexture.active;
            for (int i = 0; i < this.thirdPartyCameras.Count; i++)
            {
                ThirdPartyCameraMetadata cMetadata = new ThirdPartyCameraMetadata();
                Camera camera = thirdPartyCameras.ToArray()[i];
                cMetadata.thirdPartyCameraId = i;
                cMetadata.position           = camera.gameObject.transform.position;
                cMetadata.rotation           = camera.gameObject.transform.eulerAngles;
                cameraMetadata[i]            = cMetadata;
                ImageSynthesis imageSynthesis = camera.gameObject.GetComponentInChildren <ImageSynthesis> () as ImageSynthesis;
                addThirdPartyCameraImageForm(form, camera);
                addImageSynthesisImageForm(form, imageSynthesis, this.renderDepthImage, "_depth", "image_thirdParty_depth");
                addImageSynthesisImageForm(form, imageSynthesis, this.renderNormalsImage, "_normals", "image_thirdParty_normals");
                addImageSynthesisImageForm(form, imageSynthesis, this.renderObjectImage, "_id", "image_thirdParty_image_ids");
                addImageSynthesisImageForm(form, imageSynthesis, this.renderClassImage, "_class", "image_thirdParty_classes");
            }
        }

        for (int i = 0; i < this.agents.Count; i++)
        {
            BaseFPSAgentController agent    = this.agents.ToArray() [i];
            MetadataWrapper        metadata = agent.generateMetadataWrapper();
            metadata.agentId = i;
            // we don't need to render the agent's camera for the first agent
            if (shouldRender)
            {
                addImageForm(form, agent);
                addImageSynthesisImageForm(form, agent.imageSynthesis, this.renderDepthImage, "_depth", "image_depth");
                addImageSynthesisImageForm(form, agent.imageSynthesis, this.renderNormalsImage, "_normals", "image_normals");
                addObjectImageForm(form, agent, ref metadata);
                addImageSynthesisImageForm(form, agent.imageSynthesis, this.renderClassImage, "_class", "image_classes");
                metadata.thirdPartyCameras = cameraMetadata;
            }
            multiMeta.agents [i] = metadata;
        }

        if (shouldRender)
        {
            RenderTexture.active = currentTexture;
        }

        //form.AddField("metadata", JsonUtility.ToJson(multiMeta));
        form.AddField("metadata", Newtonsoft.Json.JsonConvert.SerializeObject(multiMeta));
        form.AddField("token", robosimsClientToken);

        #if !UNITY_WEBGL
        if (synchronousHttp)
        {
            if (this.sock == null)
            {
                // Debug.Log("connecting to host: " + robosimsHost);
                IPAddress  host   = IPAddress.Parse(robosimsHost);
                IPEndPoint hostep = new IPEndPoint(host, robosimsPort);
                try
                {
                    Socket s = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);
                    s.Connect(hostep);
                    this.sock = s;
                }
                catch (SocketException ex)
                {
                        # if UNITY_EDITOR
                    // swallowing the error since its fine to run without the Python side with the editor
                    yield break;
                        # else
                    throw ex;
                        # endif
                }
            }
    public void CreateImage()
    {
        Camera camera = view.GetGameObject().GetComponent <Camera>();
        int    width  = ApplicationManager.instance.GlobalSettingsModel.GeneralParameterModel.ResolutionX;
        int    height = ApplicationManager.instance.GlobalSettingsModel.GeneralParameterModel.ResolutionY;

        RenderTexture previousActive = RenderTexture.active;
        RenderTexture previousCamera = camera.targetTexture;

        RenderTextureDescriptor descriptor = new RenderTextureDescriptor(width, height);

        camera.targetTexture = new RenderTexture(descriptor);
        string targetPath = ApplicationManager.instance.GlobalSettingsModel.GeneralParameterModel.TargetFolder;

        RenderTexture.active = camera.targetTexture;

        Texture2D imageTexture = new Texture2D(width, height);

        //Create color image
        if (ApplicationManager.instance.GlobalSettingsModel.GeneralParameterModel.ColorPictures)
        {
            Directory.CreateDirectory(targetPath + "\\Color");

            camera.Render();
            imageTexture.ReadPixels(new Rect(0, 0, width, height), 0, 0);
            imageTexture.Apply();

            byte[] bytes = imageTexture.EncodeToPNG();
            File.WriteAllBytes(targetPath + "\\Color\\" + ApplicationManager.instance.generatedImages + ".png", bytes);
        }

        //Create depth image with greyScale
        if (ApplicationManager.instance.GlobalSettingsModel.GeneralParameterModel.DepthPictures)
        {
            Directory.CreateDirectory(targetPath + "\\Depth");

            camera.GetComponent <RenderDepth>().enabled = true;
            camera.Render();

            imageTexture.ReadPixels(new Rect(0, 0, width, height), 0, 0);
            imageTexture.Apply();

            byte[] depthBytes = imageTexture.EncodeToPNG();

            File.WriteAllBytes(targetPath + "\\Depth\\" + ApplicationManager.instance.generatedImages + ".png", depthBytes);
            camera.GetComponent <RenderDepth>().enabled = false;
        }

        if (ApplicationManager.instance.GlobalSettingsModel.GeneralParameterModel.SegmentationPicture)
        {
            Directory.CreateDirectory(targetPath + "\\Segmentation");
            ImageSynthesis imageSynthesis = camera.GetComponent <ImageSynthesis>();
            imageSynthesis.enabled               = false;
            imageSynthesis.enabled               = true;
            imageSynthesis.saveDepth             = false;
            imageSynthesis.saveImage             = false;
            imageSynthesis.saveOpticalFlow       = false;
            imageSynthesis.saveNormals           = false;
            imageSynthesis.saveLayerSegmentation = false;
            imageSynthesis.saveIdSegmentation    = true;

            imageSynthesis.Save(ApplicationManager.instance.generatedImages + ".png", width, height, targetPath + "\\Segmentation");
        }
        RenderTexture.active = previousActive;

        Texture2D.Destroy(imageTexture);
    }