public RawImageAlphaToAction(RawImage _rawImage, float _desiredAlpha, float _actionDuration)
            {
                mRawImage = _rawImage;
                SetGraph(Graph.Linear);
                SetDesiredAlpha(_desiredAlpha);
                SetActionDuration(_actionDuration);

                SetupAction();
            }
Example #2
0
        private void ProcessLoop()
        {
            Thread.CurrentThread.IsBackground = true;

            CLMParameters clmParams = new CLMParameters();
            CLM clmModel = new CLM();
            float fx = 500, fy = 500, cx = 0, cy = 0;

            FaceAnalyser analyser = new FaceAnalyser();

            DateTime? startTime = CurrentTime;

            arousalPlot.AssocColor(0, Colors.Red);
            valencePlot.AssocColor(0, Colors.Blue);

            while (true)
            {
                var newFrames = frameQueue.Take();

                var frame = new RawImage(newFrames.Item1);
                var grayFrame = newFrames.Item2;

                if (!startTime.HasValue)
                    startTime = CurrentTime;

                if (cx == 0 && cy == 0)
                {
                    cx = grayFrame.Width / 2f;
                    cy = grayFrame.Height / 2f;
                }

                if (reset)
                {
                    clmModel.Reset();
                    analyser.Reset();
                    reset = false;
                }

                if (resetPoint.HasValue)
                {
                    clmModel.Reset(resetPoint.Value.X, resetPoint.Value.Y);
                    analyser.Reset();
                    resetPoint = null;
                }

                detectionSucceeding = clmModel.DetectLandmarksInVideo(grayFrame, clmParams);

                List<Tuple<Point, Point>> lines = null;
                List<Point> landmarks = null;
                if (detectionSucceeding)
                {
                    landmarks = clmModel.CalculateLandmarks();
                    lines = clmModel.CalculateBox(fx, fy, cx, cy);
                }
                else
                {
                    analyser.Reset();
                }

                //////////////////////////////////////////////
                // Analyse frame and detect AUs
                //////////////////////////////////////////////

                analyser.AddNextFrame(grayFrame, clmModel, (CurrentTime - startTime.Value).TotalSeconds);

                var alignedFace = analyser.GetLatestAlignedFace();
                var hogDescriptor = analyser.GetLatestHOGDescriptorVisualisation();

                trackingFps.AddFrame();

                Dictionary<String, double> aus = analyser.GetCurrentAUs();
                string emotion = analyser.GetCurrentCategoricalEmotion();
                double arousal = analyser.GetCurrentArousal();
                double valence = analyser.GetCurrentValence();
                double confidence = analyser.GetConfidence();
                try
                {
                    Dispatcher.Invoke(() =>
                    {

                        if (latestAlignedFace == null)
                            latestAlignedFace = alignedFace.CreateWriteableBitmap();

                        if (latestHOGDescriptor == null)
                            latestHOGDescriptor = hogDescriptor.CreateWriteableBitmap();

                        confidenceBar.Value = confidence;

                        if (detectionSucceeding)
                        {

                            frame.UpdateWriteableBitmap(latestImg);
                            alignedFace.UpdateWriteableBitmap(latestAlignedFace);
                            hogDescriptor.UpdateWriteableBitmap(latestHOGDescriptor);

                            imgAlignedFace.Source = latestAlignedFace;
                            imgHOGDescriptor.Source = latestHOGDescriptor;

                            video.OverlayLines = lines;
                            video.OverlayPoints = landmarks;
                            video.Confidence = confidence;

                            video.Source = latestImg;

                            Dictionary<int, double> arousalDict = new Dictionary<int, double>();
                            arousalDict[0] = arousal * 0.5 + 0.5;
                            arousalPlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = arousalDict, Confidence = confidence });

                            Dictionary<int, double> valenceDict = new Dictionary<int, double>();
                            valenceDict[0] = valence * 0.5 + 0.5;
                            valencePlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = valenceDict, Confidence = confidence });

                            Dictionary<int, double> avDict = new Dictionary<int, double>();
                            avDict[0] = arousal;
                            avDict[1] = valence;
                            avPlot.AddDataPoint(new DataPoint() { Time = CurrentTime, values = avDict, Confidence = confidence });

                            auGraph.Update(aus, confidence);

                            emotionLabelHistory.Enqueue(new Tuple<DateTime, string>(CurrentTime, emotion));

                            UpdateEmotionLabel();
                        }
                        else
                        {
                            foreach (var k in aus.Keys.ToArray())
                                aus[k] = 0;

                            auGraph.Update(aus, 0);
                        }
                    });
                }
                catch (TaskCanceledException)
                {
                    // Quitting
                    break;
                }
            }
        }
Example #3
0
 void Awake()
 {
     image = this.GetComponent <RawImage>();
     ReloadTexture();
 }
Example #4
0
        private static void DumpImage2(string fileName)
        {
            using (var fileStream = File.Open(fileName, FileMode.Open, FileAccess.Read))
                using (var binaryReader = new BinaryReader(fileStream))
                {
                    var rawImage = new RawImage(binaryReader);

                    // Image #2 is RGB, 16 bits per color, little endian.

                    var image = rawImage.Directories.Skip(2).First();
                    Assert.AreEqual(13, image.Entries.Length);

                    var imageWidth = image.Entries.Single(e => e.TagId == 0x0100 && e.TagType == 3).ValuePointer;
                    Assert.AreEqual(592u, imageWidth);

                    var imageHeight = image.Entries.Single(e => e.TagId == 0x0101 && e.TagType == 3).ValuePointer;
                    Assert.AreEqual(395u, imageHeight);

                    var imageFileEntry0102 = image.Entries.Single(e => e.TagId == 0x0102 && e.TagType == 3);
                    // Assert.AreEqual(72014u, imageFileEntry0102.ValuePointer);
                    // Assert.AreEqual(3u, imageFileEntry0102.NumberOfValue);
                    var bitsPerSample = RawImage.ReadUInts16(binaryReader, imageFileEntry0102);
                    CollectionAssert.AreEqual(new[] { (ushort)16, (ushort)16, (ushort)16 }, bitsPerSample);

                    var compression = image.Entries.Single(e => e.TagId == 0x0103 && e.TagType == 3).ValuePointer;
                    Assert.AreEqual(1u, compression); // 1 == uncompressed

                    var photometricInterpretation =
                        image.Entries.Single(e => e.TagId == 0x0106 && e.TagType == 3).ValuePointer;
                    Assert.AreEqual(2u, photometricInterpretation); // 2 == RGB

                    var stripOffset = image.Entries.Single(e => e.TagId == 0x0111 && e.TagType == 4).ValuePointer;
                    // Assert.AreEqual(1229532u, stripOffset);

                    var samplesPerPixel = image.Entries.Single(e => e.TagId == 0x0115 && e.TagType == 3).ValuePointer;
                    Assert.AreEqual(3u, samplesPerPixel);

                    var rowsPerStrip = image.Entries.Single(e => e.TagId == 0x0116 && e.TagType == 3).ValuePointer;
                    Assert.AreEqual(395u, rowsPerStrip);

                    var stripByteCounts = image.Entries.Single(e => e.TagId == 0x0117 && e.TagType == 4).ValuePointer;
                    Assert.AreEqual(1403040u, stripByteCounts);
                    Assert.AreEqual(stripByteCounts, imageWidth * imageHeight * samplesPerPixel * 2);

                    var planarConfiguration = image.Entries.Single(e => e.TagId == 0x011C && e.TagType == 3).ValuePointer;
                    Assert.AreEqual(1u, planarConfiguration); // 1 == chunky

                    // unknown
                    var table1 = image.Entries.Single(e => e.TagId == 0xC5D9 && e.TagType == 4).ValuePointer;
                    Assert.AreEqual(2u, table1);

                    var table2 = image.Entries.Single(e => e.TagId == 0xC6C5 && e.TagType == 4).ValuePointer;
                    Assert.AreEqual(3u, table2);

                    var imageFileEntryC6DC = image.Entries.Single(e => e.TagId == 0xC6DC && e.TagType == 4);
                    // Assert.AreEqual(72020u, imageFileEntry011C.ValuePointer);
                    // Assert.AreEqual(4u, imageFileEntryC6DC.NumberOfValue);
                    var stuff = RawImage.ReadUInts(binaryReader, imageFileEntryC6DC);
                    CollectionAssert.AreEqual(new[] { 577u, 386u, 14u, 9u }, stuff);
                    Assert.AreEqual(imageWidth, stuff[0] + stuff[2] + 1);
                    Assert.AreEqual(imageHeight, stuff[1] + stuff[3]);

                    var outFile = Path.ChangeExtension(fileName, ".bmp");
                    CreateBitmap(binaryReader, outFile, stripOffset, imageWidth, imageHeight);
                }
        }
 private void Awake()
 {
     pointerImage         = tutorialPointer.GetComponent <RawImage>();//.enabled = false;
     pointerImage.enabled = false;
 }
Example #6
0
 // Use this for initialization
 void Start()
 {
     doorImage = GetComponent <RawImage>();
     Moving    = false;
 }
Example #7
0
        private void ProcessIndividualImages(ImageReader reader)
        {
            // Make sure the GUI is setup appropriately
            SetupFeatureExtractionMode();

            // Indicate we will start running the thread
            thread_running = true;

            // Setup the parameters optimized for working on individual images rather than sequences
            face_model_params.optimiseForImages();

            // Setup the visualization
            Visualizer visualizer_of = new Visualizer(ShowTrackedVideo || RecordTracked, ShowAppearance, ShowAppearance);

            // Initialize the face detector if it has not been initialized yet
            if (face_detector == null)
            {
                face_detector = new FaceDetector();
            }

            // Initialize the face analyser
            face_analyser = new FaceAnalyserManaged(AppDomain.CurrentDomain.BaseDirectory, false, image_output_size, MaskAligned);

            // Loading an image file
            var frame      = new RawImage(reader.GetNextImage());
            var gray_frame = new RawImage(reader.GetCurrentFrameGray());

            // For FPS tracking
            DateTime?startTime     = CurrentTime;
            var      lastFrameTime = CurrentTime;

            // This will be false when the image is not available
            while (reader.isOpened())
            {
                if (!thread_running)
                {
                    break;
                }

                // Setup recording
                RecorderOpenFaceParameters rec_params = new RecorderOpenFaceParameters(false, false,
                                                                                       Record2DLandmarks, Record3DLandmarks, RecordModelParameters, RecordPose, RecordAUs,
                                                                                       RecordGaze, RecordHOG, RecordTracked, RecordAligned,
                                                                                       reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), 0);

                RecorderOpenFace recorder = new RecorderOpenFace(reader.GetName(), rec_params, record_root);

                // Detect faces here and return bounding boxes
                List <Rect>   face_detections = new List <Rect>();
                List <double> confidences     = new List <double>();
                face_detector.DetectFacesHOG(face_detections, gray_frame, confidences);

                // For visualization
                double progress = reader.GetProgress();

                for (int i = 0; i < face_detections.Count; ++i)
                {
                    bool detection_succeeding = landmark_detector.DetectFaceLandmarksInImage(gray_frame, face_detections[i], face_model_params);

                    var landmarks = landmark_detector.CalculateAllLandmarks();

                    // Predict action units
                    var au_preds = face_analyser.PredictStaticAUsAndComputeFeatures(frame, landmarks);

                    // Predic eye gaze
                    gaze_analyser.AddNextFrame(landmark_detector, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());

                    // Only the final face will contain the details
                    VisualizeFeatures(frame, visualizer_of, landmarks, landmark_detector.GetVisibilities(), detection_succeeding, i == 0, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), progress);

                    // Record an observation
                    RecordObservation(recorder, visualizer_of.GetVisImage(), detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), 0);
                }

                frame      = new RawImage(reader.GetNextImage());
                gray_frame = new RawImage(reader.GetCurrentFrameGray());

                // Do not cary state accross images
                landmark_detector.Reset();
                face_analyser.Reset();
                recorder.Close();

                lastFrameTime = CurrentTime;
                processing_fps.AddFrame();

                // TODO how to report errors from the reader here? exceptions? logging? Problem for future versions?
            }

            EndMode();
        }
Example #8
0
 // Use this for initialization
 void Start()
 {
     m_Image = GetComponent <RawImage>();
 }
    private static void Fix(GameObject _go)
    {
        CanvasRenderer r = _go.GetComponent <CanvasRenderer>();

        if (r != null)
        {
            Image img = _go.GetComponent <Image>();

            Text text = _go.GetComponent <Text>();

            RawImage rawImg = _go.GetComponent <RawImage>();

            if (img == null && text == null && rawImg == null)
            {
                GameObject.DestroyImmediate(r, true);
            }
        }

        MonoBehaviour[] b = _go.GetComponents <MonoBehaviour>();

        foreach (MonoBehaviour m in b)
        {
            if (m == null)
            {
                SuperDebug.LogErrorFormat("发现脚本丢失  root:{0}--->{1}", _go.transform.root.name, _go.name);

                break;
            }
        }

        Button bt = _go.GetComponent <Button>();

        if (bt != null)
        {
            int num = bt.onClick.GetPersistentEventCount();

            for (int i = 0; i < num; i++)
            {
                UnityEngine.Object t = bt.onClick.GetPersistentTarget(i);

                string methodName = bt.onClick.GetPersistentMethodName(i);

                if (!(t is MonoBehaviour))
                {
                    Debug.LogError("Button target gameObject is not a MonoBehaviour!  GameObject.name:" + _go.name + "   root.name:" + _go.transform.root.gameObject.name);
                }
                else
                {
                    MonoBehaviour script = t as MonoBehaviour;

                    MethodInfo mi = script.GetType().GetMethod(methodName);

                    if (mi == null)
                    {
                        Debug.LogError("Button target method is not found in target!  GameObject.name:" + _go.name + "   root.name:" + _go.transform.root.gameObject.name);
                    }
                }
            }
        }

//		SuperList superList = _go.GetComponent<SuperList>();
//
//		if(superList != null){
//
//			Mask mask = _go.GetComponent<Mask>();
//
//			if(mask != null){
//
//				GameObject.DestroyImmediate(mask);
//
//				Image img = _go.GetComponent<Image>();
//
//				GameObject.DestroyImmediate(img);
//
//				_go.AddComponent<RectMask2D>();
//
//				_hasChange = true;
//			}
//		}

        for (int i = 0; i < _go.transform.childCount; i++)
        {
            Fix(_go.transform.GetChild(i).gameObject);
        }
    }
Example #10
0
 private void Awake()
 {
     _image = transform.Find("MaskImage/Image").GetComponent <RawImage>();
     GetComponent <Button>().onClick.AddListener(OnClick);
 }
Example #11
0
        internal void AddCalibrationScreen(CameraPlusBehaviour camplus, Camera camera)
        {
            _targetBehaviour = camplus;

            _webCamCanvas = new GameObject("WebCamCanvas").AddComponent <Canvas>();
            _webCamCanvas.gameObject.transform.SetParent(this.transform);
            _webCamCanvas.renderMode    = RenderMode.ScreenSpaceCamera;
            _webCamCanvas.worldCamera   = camera;
            _webCamCanvas.planeDistance = 1;

            CanvasScaler canvasScaler = _webCamCanvas.gameObject.AddComponent <CanvasScaler>();

            canvasScaler.uiScaleMode         = CanvasScaler.ScaleMode.ScaleWithScreenSize;
            canvasScaler.referenceResolution = new Vector2(Screen.width, Screen.height);
            canvasScaler.matchWidthOrHeight  = 1;

            RawImage raw = new GameObject("RawImage").AddComponent <RawImage>();

            raw.transform.SetParent(_webCamCanvas.transform);
            raw.transform.localPosition    = Vector3.zero;
            raw.transform.localEulerAngles = Vector3.zero;

            var rect = raw.gameObject.GetComponent <RectTransform>();

            rect.anchorMin        = new Vector2(0.5f, 0.5f);
            rect.anchorMax        = new Vector2(0.5f, 0.5f);
            rect.pivot            = new Vector2(0.5f, 0.5f);
            rect.localScale       = new Vector3(-1f, 1f, 1);
            rect.anchoredPosition = new Vector2(0, 0);
            rect.sizeDelta        = new Vector2(Screen.width / 4, Screen.height / 4);
            rect.localPosition    = new Vector3(0, 0, 0);

            _webCamTexture = new WebCamTexture(camplus.Config.webCamera.name);
            raw.texture    = _webCamTexture;
            Material rawMaterial = new Material(Plugin.cameraController.Shaders["BeatSaber/BlitCopyWithDepth"]);

            rawMaterial.SetColor("_Color", new Color(1.0f, 1.0f, 1.0f, 0));
            rawMaterial.SetFloat("_CullMode", 0);
            raw.material = rawMaterial;
            _webCamTexture.Play();

            _cursorImage = new GameObject("CursorImage").AddComponent <RawImage>();
            _cursorImage.transform.SetParent(_webCamCanvas.transform);
            _cursorImage.transform.localPosition    = Vector3.zero;
            _cursorImage.transform.localEulerAngles = Vector3.zero;
            _cursorImage.texture = CustomUtils.LoadTextureFromResources("CameraPlus.Resources.Xross.png");
            Material cursorMat = new Material(Plugin.cameraController.Shaders["ChromaKey/Unlit/Cutout"]);

            cursorMat.SetColor("_Color", new Color(1.0f, 1.0f, 1.0f, 0));
            cursorMat.SetColor("_ChromaKeyColor", Color.white);
            cursorMat.SetFloat("_ChromaKeyHueRange", 0.5f);
            _cursorImage.material        = cursorMat;
            _rectCursor                  = _cursorImage.GetComponent <RectTransform>();
            _rectCursor.anchorMin        = new Vector2(0.5f, 0.5f);
            _rectCursor.anchorMax        = new Vector2(0.5f, 0.5f);
            _rectCursor.pivot            = new Vector2(0.5f, 0.5f);
            _rectCursor.localScale       = new Vector3(1f, 1f, 1);
            _rectCursor.anchoredPosition = new Vector2(0, 0);
            _rectCursor.sizeDelta        = new Vector2(Screen.width / 8, Screen.height / 4.5f);
            _rectCursor.localPosition    = new Vector3(0, 0, -0.1f);

            _calText = new GameObject("CalibrationText").AddComponent <HMUI.CurvedTextMeshPro>();
            _calText.transform.SetParent(_webCamCanvas.transform);
            _calText.transform.localPosition    = Vector3.zero;
            _calText.transform.localEulerAngles = Vector3.zero;
            _calText.alignment = TMPro.TextAlignmentOptions.Bottom;
            _calText.fontSize  = 24;
            _calText.text      = "Pull the trigger in front of the webcam.";
            var cakRect = _calText.gameObject.GetComponent <RectTransform>();

            cakRect.anchorMin        = new Vector2(0.5f, 0.5f);
            cakRect.anchorMax        = new Vector2(0.5f, 0.5f);
            cakRect.pivot            = new Vector2(0.5f, 0.5f);
            cakRect.localScale       = new Vector3(1f, 1f, 1);
            cakRect.anchoredPosition = new Vector2(0, 0);
            cakRect.sizeDelta        = new Vector2(Screen.width / 4, Screen.height / 3);
            cakRect.localPosition    = new Vector3(0, 0, -0.1f);
        }
Example #12
0
    private void SetRewardButtonImage(GameObject rb, GameObject rp, RawImage ri, Text rt, Text label)
    {
        rb.SetActive(true);

        var rewards = ds.GetRewardsTable();

        Debug.Log("SetRewardButtonImage method starting, with rb = " + rb.name + ", rp = " + rp.name + ", ri = " + ri.name + ", rt = " + rt.name);

        RawImage   img             = rb.GetComponent <RawImage>();
        RawImage   customPanelImg  = ri.GetComponent <RawImage>();
        Text       customPanelText = rt.GetComponent <Text>();
        GameObject customPanel     = rp;

        foreach (var row in rewards)
        {
            if (chooseRewardObject.rewardIdsList.Contains(row.reward_id) && !alreadyLoadedImageList.Contains(row.reward_name))
            {
                Debug.Log("Reward Loading. name = " + row.reward_name + ", type = " + row.reward_type + ", url = " + row.reward_url);

                customPanel.name = row.reward_name;
                label.text       = row.reward_name;

                Texture2D tx        = new Texture2D(75, 75);
                byte[]    rewardPic = FileAccessUtil.LoadRewardPic(row.reward_name);

                // If it is stock reward pic:
                if (rewardPic == null)
                {
                    Debug.Log("It's a stock pic");
                    img.texture = Resources.Load <Texture2D>("RewardPictures/" + row.reward_name);
                    alreadyLoadedImageList.Add(row.reward_name);

                    if (row.reward_url != "" || row.reward_url != null)
                    {
                        Debug.Log("Setting button name to reward url");
                        rb.name = row.reward_url;
                    }

                    return;
                }
                // Else it is a custom reward pic:
                else
                {
                    tx.LoadImage(rewardPic);
                    img.texture            = tx;
                    customPanelImg.texture = tx;
                    customPanelText.text   = rp.name;

                    if (row.reward_type == "website")
                    {
                        rb.name = row.reward_url;
                    }
                    else
                    {
                        rb.name = row.reward_name;
                        rp.name = row.reward_name;
                        rt.text = row.reward_name;
                        Debug.Log("itsa custom");
                    }

                    alreadyLoadedImageList.Add(row.reward_name);

                    return;
                }
            }
        }
    }
 void OnEnable()
 {
     videoPlayer = GetComponent <VideoPlayer>();
     rawImage    = GetComponent <RawImage>();
 }
Example #14
0
 // Use this for initialization
 void Start()
 {
     bkgSprite  = gameObject.GetComponent <RawImage>();
     scrollRate = normScrollRate;
 }
Example #15
0
 protected override void Reset()
 {
     base.Reset();
     this.rawImage = this.elements?.GetComponentInChildren <RawImage>();
 }
    protected virtual void Update()
    {
        if (canvas == null)
        {
            canvas = transform.GetComponentInParent <Canvas>();
        }
        if (canvas == null)
        {
            canvas = transform.GetComponentInChildren <Canvas>();
        }
        if (rectTransform == null)
        {
            rectTransform = GetComponent <RectTransform>();
        }

        sizeX = Size().x;
        sizeY = Size().y;

        screenWidth  = Screen.width;
        screenHeight = Screen.height;

        if (backgroundRawImage != null)
        {
            if (backgroundRawImage.enabled)
            {
                rectTransform.sizeDelta = backgroundRawImage.rectTransform.sizeDelta;
            }
            else
            {
                rectTransform.sizeDelta = Size();
            }
        }
        else
        {
            rectTransform.sizeDelta = Size();
        }

        rectTransform.position = new Vector3(percentageOfScreenX * screenWidth, screenHeight - percentageOfScreenY * screenHeight);

        //Manage Slot Position and Size
        float positionX = percentageOfScreenX * screenWidth;
        float positionY = percentageOfScreenY * screenHeight;

        if (Slots.Count != 0)
        {
            //Over the bottom of the screen
            if (positionY + rectTransform.sizeDelta.y / 2 > screenHeight)
            {
                float diff = positionY + rectTransform.sizeDelta.y / 2 - screenHeight + .5f;

                rectTransform.position = new Vector2(rectTransform.position.x, rectTransform.position.y + diff);
                positionY -= diff;
            }
            //Above the screen
            if (positionY - rectTransform.sizeDelta.y / 2 < 0)
            {
                float diff = positionY - rectTransform.sizeDelta.y / 2 - .5f;

                rectTransform.position = new Vector2(rectTransform.position.x, rectTransform.position.y + diff);
                positionY -= diff;
            }
            //Right of the screen
            if (positionX + rectTransform.sizeDelta.x / 2 > screenWidth)
            {
                float diff = positionX + rectTransform.sizeDelta.x / 2 - screenWidth - .5f;

                rectTransform.position = new Vector2(rectTransform.position.x - diff, rectTransform.position.y);
                positionX -= diff;
            }
            //Left of the screen
            if (positionX - rectTransform.sizeDelta.x / 2 < 0)
            {
                float diff = positionX - rectTransform.sizeDelta.x / 2 + .5f;

                rectTransform.position = new Vector2(rectTransform.position.x - diff, rectTransform.position.y);
                positionX -= diff;
            }
        }

        //Cache Slots, Background, and Text
        if (backgroundImageTransform == null)
        {
            backgroundImageTransform = transform.FindChild("Background Image Main");

            if (backgroundImageTransform == null)
            {
                GameObject newGO = new GameObject("Main Background");
                newGO.AddComponent <CanvasGroup>().blocksRaycasts = false;
                newGO.AddComponent <RectTransform>().position     = rectTransform.position;
                newGO.transform.parent = transform;
                newGO.transform.SetSiblingIndex(0);
                backgroundImageTransform = newGO.transform;
                backgroundRawImage       = newGO.AddComponent <RawImage>();
                backgroundRawImage.rectTransform.position  = rectTransform.position + bgOffset;
                backgroundRawImage.rectTransform.sizeDelta = rectTransform.sizeDelta;
            }
        }

        if (slotsTransform == null)
        {
            slotsTransform = transform.FindChild("Slots");

            if (slotsTransform == null)
            {
                GameObject newGO = new GameObject("Slots");
                slotsRectTransform     = newGO.AddComponent <RectTransform>();
                newGO.transform.parent = transform;
                newGO.transform.SetAsLastSibling();
                slotsTransform = newGO.transform;
            }
        }

        if (slotsRectTransform != null)
        {
            slotsRectTransform.transform.position = rectTransform.position;
            slotsRectTransform.sizeDelta          = rectTransform.sizeDelta;
        }
    }
Example #17
0
 private void Awake()
 {
     _rawImage     = GetComponent <RawImage>();
     _framesToPlay = new List <Texture2D>();
     _play         = false;
 }
Example #18
0
 // Use this for initialization
 void Start()
 {
     currentEnergy = maxEnergy;
     energyUI      = energyBar.GetComponent <RawImage>();
 }
Example #19
0
        // The main function call for processing sequences
        private void ProcessSequence(SequenceReader reader)
        {
            Thread.CurrentThread.Priority = ThreadPriority.Highest;

            SetupFeatureExtractionMode();

            thread_running = true;

            face_model_params.optimiseForVideo();

            // Setup the visualization
            Visualizer visualizer_of = new Visualizer(ShowTrackedVideo || RecordTracked, ShowAppearance, ShowAppearance);

            // Initialize the face analyser
            face_analyser = new FaceAnalyserManaged(AppDomain.CurrentDomain.BaseDirectory, DynamicAUModels, image_output_size, MaskAligned);

            // Reset the tracker
            landmark_detector.Reset();

            // Loading an image file
            var frame      = new RawImage(reader.GetNextImage());
            var gray_frame = new RawImage(reader.GetCurrentFrameGray());

            // Setup recording
            RecorderOpenFaceParameters rec_params = new RecorderOpenFaceParameters(true, reader.IsWebcam(),
                                                                                   Record2DLandmarks, Record3DLandmarks, RecordModelParameters, RecordPose, RecordAUs,
                                                                                   RecordGaze, RecordHOG, RecordTracked, RecordAligned,
                                                                                   reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), reader.GetFPS());

            RecorderOpenFace recorder = new RecorderOpenFace(reader.GetName(), rec_params, record_root);

            // For FPS tracking
            DateTime?startTime     = CurrentTime;
            var      lastFrameTime = CurrentTime;

            // Empty image would indicate that the stream is over
            while (gray_frame.Width != 0)
            {
                if (!thread_running)
                {
                    break;
                }

                double progress             = reader.GetProgress();
                bool   detection_succeeding = landmark_detector.DetectLandmarksInVideo(gray_frame, face_model_params);

                // The face analysis step (for AUs and eye gaze)
                face_analyser.AddNextFrame(frame, landmark_detector.CalculateAllLandmarks(), detection_succeeding, false);
                gaze_analyser.AddNextFrame(landmark_detector, detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());

                // Only the final face will contain the details
                VisualizeFeatures(frame, visualizer_of, landmark_detector.CalculateAllLandmarks(), landmark_detector.GetVisibilities(), detection_succeeding, true, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), progress);

                // Record an observation
                RecordObservation(recorder, visualizer_of.GetVisImage(), detection_succeeding, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy(), reader.GetTimestamp());

                while (thread_running & thread_paused && skip_frames == 0)
                {
                    Thread.Sleep(10);
                }

                if (skip_frames > 0)
                {
                    skip_frames--;
                }

                frame      = new RawImage(reader.GetNextImage());
                gray_frame = new RawImage(reader.GetCurrentFrameGray());

                lastFrameTime = CurrentTime;
                processing_fps.AddFrame();
            }

            // Finalize the recording and flush to disk
            recorder.Close();

            // Post-process the AU recordings
            if (RecordAUs)
            {
                face_analyser.PostProcessOutputFile(recorder.GetCSVFile());
            }

            // Close the open video/webcam
            reader.Close();

            EndMode();
        }
Example #20
0
 public LJPEGPlain(byte[] data, RawImage img, bool UseBigTable, bool DNGCompatible) : this(new ImageBinaryReader(data), img, UseBigTable, DNGCompatible)
 {
 }
Example #21
0
        private void VisualizeFeatures(RawImage frame, Visualizer visualizer, List <Tuple <double, double> > landmarks, List <bool> visibilities, bool detection_succeeding,
                                       bool new_image, float fx, float fy, float cx, float cy, double progress)
        {
            List <Tuple <Point, Point> >   lines         = null;
            List <Tuple <double, double> > eye_landmarks = null;
            List <Tuple <Point, Point> >   gaze_lines    = null;
            Tuple <double, double>         gaze_angle    = new Tuple <double, double>(0, 0);

            List <double> pose = new List <double>();

            landmark_detector.GetPose(pose, fx, fy, cx, cy);
            List <double> non_rigid_params = landmark_detector.GetNonRigidParams();

            double confidence = landmark_detector.GetConfidence();

            if (confidence < 0)
            {
                confidence = 0;
            }
            else if (confidence > 1)
            {
                confidence = 1;
            }

            double scale = landmark_detector.GetRigidParams()[0];

            // Helps with recording and showing the visualizations
            if (new_image)
            {
                visualizer.SetImage(frame, fx, fy, cx, cy);
            }
            visualizer.SetObservationHOG(face_analyser.GetLatestHOGFeature(), face_analyser.GetHOGRows(), face_analyser.GetHOGCols());
            visualizer.SetObservationLandmarks(landmarks, confidence, visibilities);
            visualizer.SetObservationPose(pose, confidence);
            visualizer.SetObservationGaze(gaze_analyser.GetGazeCamera().Item1, gaze_analyser.GetGazeCamera().Item2, landmark_detector.CalculateAllEyeLandmarks(), landmark_detector.CalculateAllEyeLandmarks3D(fx, fy, cx, cy), confidence);

            if (detection_succeeding)
            {
                eye_landmarks = landmark_detector.CalculateVisibleEyeLandmarks();
                lines         = landmark_detector.CalculateBox(fx, fy, cx, cy);

                gaze_lines = gaze_analyser.CalculateGazeLines(fx, fy, cx, cy);
                gaze_angle = gaze_analyser.GetGazeAngle();
            }

            // Visualisation (as a separate function)
            Dispatcher.Invoke(DispatcherPriority.Render, new TimeSpan(0, 0, 0, 0, 200), (Action)(() =>
            {
                if (ShowAUs)
                {
                    var au_classes = face_analyser.GetCurrentAUsClass();
                    var au_regs = face_analyser.GetCurrentAUsReg();

                    auClassGraph.Update(au_classes);

                    var au_regs_scaled = new Dictionary <String, double>();
                    foreach (var au_reg in au_regs)
                    {
                        au_regs_scaled[au_reg.Key] = au_reg.Value / 5.0;
                        if (au_regs_scaled[au_reg.Key] < 0)
                        {
                            au_regs_scaled[au_reg.Key] = 0;
                        }

                        if (au_regs_scaled[au_reg.Key] > 1)
                        {
                            au_regs_scaled[au_reg.Key] = 1;
                        }
                    }
                    auRegGraph.Update(au_regs_scaled);
                }

                if (ShowGeometry)
                {
                    int yaw = (int)(pose[4] * 180 / Math.PI + 0.5);
                    int roll = (int)(pose[5] * 180 / Math.PI + 0.5);
                    int pitch = (int)(pose[3] * 180 / Math.PI + 0.5);

                    YawLabel.Content = yaw + "°";
                    RollLabel.Content = roll + "°";
                    PitchLabel.Content = pitch + "°";

                    XPoseLabel.Content = (int)pose[0] + " mm";
                    YPoseLabel.Content = (int)pose[1] + " mm";
                    ZPoseLabel.Content = (int)pose[2] + " mm";

                    nonRigidGraph.Update(non_rigid_params);

                    // Update eye gaze
                    String x_angle = String.Format("{0:F0}°", gaze_angle.Item1 * (180.0 / Math.PI));
                    String y_angle = String.Format("{0:F0}°", gaze_angle.Item2 * (180.0 / Math.PI));
                    GazeXLabel.Content = x_angle;
                    GazeYLabel.Content = y_angle;
                }

                if (ShowTrackedVideo)
                {
                    if (new_image)
                    {
                        latest_img = frame.CreateWriteableBitmap();
                    }

                    frame.UpdateWriteableBitmap(latest_img);

                    overlay_image.Source = latest_img;
                    overlay_image.Confidence = confidence;
                    overlay_image.FPS = processing_fps.GetFPS();
                    overlay_image.Progress = progress;
                    overlay_image.FaceScale = scale;

                    if (!detection_succeeding)
                    {
                        overlay_image.OverlayLines.Clear();
                        overlay_image.OverlayPoints.Clear();
                        overlay_image.OverlayPointsVisibility.Clear();
                        overlay_image.OverlayEyePoints.Clear();
                        overlay_image.GazeLines.Clear();
                    }
                    else
                    {
                        List <Point> landmark_points = new List <Point>();
                        foreach (var p in landmarks)
                        {
                            landmark_points.Add(new Point(p.Item1, p.Item2));
                        }

                        List <Point> eye_landmark_points = new List <Point>();
                        foreach (var p in eye_landmarks)
                        {
                            eye_landmark_points.Add(new Point(p.Item1, p.Item2));
                        }


                        if (new_image)
                        {
                            overlay_image.OverlayLines = lines;
                            overlay_image.OverlayPoints = landmark_points;
                            overlay_image.OverlayPointsVisibility = visibilities;
                            overlay_image.OverlayEyePoints = eye_landmark_points;
                            overlay_image.GazeLines = gaze_lines;
                        }
                        else
                        {
                            // In case of multiple faces just add them to the existing drawing list
                            overlay_image.OverlayLines.AddRange(lines.GetRange(0, lines.Count));
                            overlay_image.OverlayPoints.AddRange(landmark_points.GetRange(0, landmark_points.Count));
                            overlay_image.OverlayPointsVisibility.AddRange(visibilities.GetRange(0, visibilities.Count));
                            overlay_image.OverlayEyePoints.AddRange(eye_landmark_points.GetRange(0, eye_landmark_points.Count));
                            overlay_image.GazeLines.AddRange(gaze_lines.GetRange(0, gaze_lines.Count));
                        }
                    }
                }

                if (ShowAppearance)
                {
                    RawImage aligned_face = face_analyser.GetLatestAlignedFace();
                    RawImage hog_face = visualizer.GetHOGVis();

                    if (latest_aligned_face == null)
                    {
                        latest_aligned_face = aligned_face.CreateWriteableBitmap();
                        latest_HOG_descriptor = hog_face.CreateWriteableBitmap();
                    }

                    aligned_face.UpdateWriteableBitmap(latest_aligned_face);
                    hog_face.UpdateWriteableBitmap(latest_HOG_descriptor);

                    AlignedFace.Source = latest_aligned_face;
                    AlignedHOG.Source = latest_HOG_descriptor;
                }
            }));
        }
 void Start()
 {
     bookDisplay = GetComponentInChildren <RawImage>();
 }
Example #23
0
 void Start()
 {
     mUISpinner = FindSpinnerImage();
     Application.backgroundLoadingPriority = ThreadPriority.Low;
     mChangeLevel = true;
 }
        private void SetupTerrainPaintTexturePickerEvents()
        {
            if (TERRAIN_PAINT_TEXTURES_PICKER != null)
            {
                // update selection if it is changed (e.g. by clicking on a texture)
                LE_GUIInterface.Instance.events.OnTerrainPaintTextureChanged += (object p_obj, LE_GUIInterface.EventHandlers.IntEventArgs p_args) =>
                {
                    if (TERRAIN_PAINT_TEXTURES_PICKER != null)
                    {
                        TERRAIN_PAINT_TEXTURES_PICKER.SetSelection(p_args.Value);
                    }
                };

                // handle the clicked event coming back from UI
                TERRAIN_PAINT_TEXTURES_PICKER.ButtonCallback = (int p_selectedIndex) =>
                {
                    if (TERRAIN_PAINT_TEXTURES_PICKER != null)
                    {
                        if (TERRAIN_PAINT_TEXTURES_PICKER.Textures[p_selectedIndex] == TERRAIN_ADD_PAINT_TEXTURE_ICON)
                        {
                            // user wants to add one more texture to the paint selection
                            uMyGUI_Popup popup = ((uMyGUI_PopupTexturePicker)uMyGUI_PopupManager.Instance.ShowPopup(POPUP_TEXTURE_PICKER))
                                                 .SetPicker(m_unusedPaintTextures, -1, (int p_clickedIndex) =>
                            {
                                LE_GUIInterface.Instance.OnTerrainPaintTextureAdded(m_unusedPaintTextures[p_clickedIndex]);
                            })
                                                 .SetText("Select Texture", "Click on the texture which you want to add to the terrain.")
                                                 .ShowButton("back");
                            AddButtonClickSoundsToGeneratedUI(popup.transform as RectTransform);
                        }
                        else
                        {
                            // user has selected a paint texture
                            LE_GUIInterface.Instance.OnTerrainPaintTextureChanged(p_selectedIndex);
                        }
                    }
                    else
                    {
                        Debug.LogError("LE_GUIInterface_uGUIimpl: TERRAIN_PAINT_TEXTURES_PICKER is not set in inspector!");
                    }
                };
            }
            else
            {
                Debug.LogError("LE_GUIInterface_uGUIimpl: TERRAIN_PAINT_TEXTURES_PICKER is not set in inspector!");
            }

            // handle the initialization/update event coming from the level editor
            LE_GUIInterface.Instance.delegates.SetTerrainPaintTextures += (Texture2D[] p_textures, Texture2D[] p_unusedTextures, int p_selectedIndex, bool p_isAddTextureBtn) =>
            {
                if (TERRAIN_PAINT_TEXTURES_PICKER != null)
                {
                    m_unusedPaintTextures = p_unusedTextures;
                    if (p_isAddTextureBtn && TERRAIN_ADD_PAINT_TEXTURE_ICON != null)
                    {
                        if (p_textures.Length == 0)
                        {
                            p_selectedIndex = -1;                             // make sure that the add new icon is never selected
                        }
                        // add additional texture to use it as add new texture button
                        Texture2D[] texturesWithAddIcon = new Texture2D[p_textures.Length + 1];
                        System.Array.Copy(p_textures, texturesWithAddIcon, p_textures.Length);
                        texturesWithAddIcon[texturesWithAddIcon.Length - 1] = TERRAIN_ADD_PAINT_TEXTURE_ICON;
                        TERRAIN_PAINT_TEXTURES_PICKER.SetTextures(texturesWithAddIcon, p_selectedIndex);
                        // make the add new texture button be rendered with alpha
                        RawImage addBtnImg = TERRAIN_PAINT_TEXTURES_PICKER.Instances[texturesWithAddIcon.Length - 1].GetComponent <RawImage>();
                        if (addBtnImg != null)
                        {
                            addBtnImg.material = null;
                        }
                    }
                    else
                    {
                        if (p_isAddTextureBtn)
                        {
                            Debug.LogError("LE_GUIInterface_uGUIimpl: TERRAIN_ADD_PAINT_TEXTURE_ICON is not set in inspector!");
                        }
                        // textures cannot be modified any more -> simply pass through the array
                        TERRAIN_PAINT_TEXTURES_PICKER.SetTextures(p_textures, p_selectedIndex);
                    }
                    AddButtonClickSoundsToGeneratedUI(TERRAIN_PAINT_TEXTURES_PICKER.transform as RectTransform);
                }
                else
                {
                    Debug.LogError("LE_GUIInterface_uGUIimpl: TERRAIN_PAINT_TEXTURES_PICKER is not set in inspector!");
                }
            };
        }
Example #25
0
        public bool heartBeat;                          //< To make GUI elements blink

        /// <summary>Use this for initialization</summary>
        /// <param name="divisionsX">Divisions of X axis</para>
        /// <param name="diviaionsY">Divisions of Y axis</para>
        /// <param name="subdivisions">Subdivisions</para>
        public void Initialize(OscSettings oscSettings)
        {
            this.oscSettings  = oscSettings;
            screenImage       = GetComponent <RawImage>();
            screenImage.color = Color.white;
        }
Example #26
0
        private static void DumpGpsInfo(BinaryReader binaryReader, uint offset)
        {
            binaryReader.BaseStream.Seek(offset, SeekOrigin.Begin);

            var tags = new ImageFileDirectory(binaryReader);

            Assert.AreEqual(0x00000302u, tags.Entries.Single(e => e.TagId == 0x0000 && e.TagType == 1).ValuePointer);    // version number
            // tags.DumpDirectory(binaryReader);

            if (tags.Entries.Length == 1)
            {
                Console.WriteLine("GPS info not found....");
                return;
            }

            Assert.AreEqual(16, tags.Entries.Length);
            var expected = new[]
            {
                (ushort)0x00, (ushort)0x01, (ushort)0x02, (ushort)0x03,
                (ushort)0x04, (ushort)0x05, (ushort)0x06, (ushort)0x07,
                (ushort)0x08, (ushort)0x09, (ushort)0x0A, (ushort)0x0B,
                (ushort)0x10, (ushort)0x11, (ushort)0x12, (ushort)0x1D
            };

            CollectionAssert.AreEqual(expected.ToArray(), tags.Entries.Select(e => e.TagId).ToArray());

            // "A" active, "V" void
            Console.WriteLine("Satellite signal status {0}", RawImage.ReadChars(binaryReader, tags.Entries.Single(e => e.TagId == 0x0009 && e.TagType == 2)));

            var date     = RawImage.ReadChars(binaryReader, tags.Entries.Single(e => e.TagId == 0x001D && e.TagType == 2));
            var timeData = RawImage.ReadRational(binaryReader, tags.Entries.Single(e => e.TagId == 0x0007 && e.TagType == 5));

            Assert.AreEqual(6, timeData.Length);
            var time1    = (double)timeData[0] / timeData[1];
            var time2    = (double)timeData[2] / timeData[3];
            var time3    = (double)timeData[4] / timeData[5];
            var dateTime = ConvertDateTime(date, time1, time2, time3);

            Console.WriteLine("Timestamp {0:M\'/\'d\'/\'yyyy\' \'h\':\'mm\':\'ss\' \'tt}", dateTime.ToLocalTime());

            var latitudeData = RawImage.ReadRational(binaryReader, tags.Entries.Single(e => e.TagId == 0x0002 && e.TagType == 5));

            Assert.AreEqual(6, latitudeData.Length);
            var latitude1         = (double)latitudeData[0] / latitudeData[1];
            var latitude2         = (double)latitudeData[2] / latitudeData[3];
            var latitude3         = (double)latitudeData[4] / latitudeData[5];
            var latitudeDirection = RawImage.ReadChars(binaryReader, tags.Entries.Single(e => e.TagId == 0x0001 && e.TagType == 2));

            Console.WriteLine("Latitude {0}° {1}\' {2}\" {3}", latitude1, latitude2, latitude3, latitudeDirection);

            var longitudeData = RawImage.ReadRational(binaryReader, tags.Entries.Single(e => e.TagId == 0x0004 && e.TagType == 5));

            Assert.AreEqual(6, longitudeData.Length);
            var longitude1         = (double)longitudeData[0] / longitudeData[1];
            var longitude2         = (double)longitudeData[2] / longitudeData[3];
            var longitude3         = (double)longitudeData[4] / longitudeData[5];
            var longitudeDirection = RawImage.ReadChars(binaryReader, tags.Entries.Single(e => e.TagId == 0x0003 && e.TagType == 2));

            Console.WriteLine("Longitude {0}° {1}\' {2}\" {3}", longitude1, longitude2, longitude3, longitudeDirection);

            var altitudeData = RawImage.ReadRational(binaryReader, tags.Entries.Single(e => e.TagId == 0x0006 && e.TagType == 5));

            Assert.AreEqual(2, altitudeData.Length);
            var altitude = (double)altitudeData[0] / altitudeData[1];

            Console.WriteLine("Altitude {0:0.00} m", altitude);
            Assert.AreEqual(0x00000000u, tags.Entries.Single(e => e.TagId == 0x0005 && e.TagType == 1).ValuePointer);

            Console.WriteLine("Geographic coordinate system {0}", RawImage.ReadChars(binaryReader, tags.Entries.Single(e => e.TagId == 0x0012 && e.TagType == 2)));

            Assert.AreEqual("M", RawImage.ReadChars(binaryReader, tags.Entries.Single(e => e.TagId == 0x0010 && e.TagType == 2)));     // Magnetic Direction
            var directionData = RawImage.ReadRational(binaryReader, tags.Entries.Single(e => e.TagId == 0x0011 && e.TagType == 5));

            Assert.AreEqual(2, directionData.Length);
            var direction = (double)directionData[0] / directionData[1];

            Console.WriteLine("Direction {0}°", direction);

            var dopData = RawImage.ReadRational(binaryReader, tags.Entries.Single(e => e.TagId == 0x000B && e.TagType == 5));

            Assert.AreEqual(2, dopData.Length);
            var dop = (double)dopData[0] / dopData[1];

            Console.WriteLine("Dilution of Position {0}", dop);

            var quality = RawImage.ReadChars(binaryReader, tags.Entries.Single(e => e.TagId == 0x000A && e.TagType == 2));

            Console.WriteLine("Fix quality = {0}", DumpFixQuality(quality));
            Console.WriteLine("Number of satellites = {0}", RawImage.ReadChars(binaryReader, tags.Entries.Single(e => e.TagId == 0x0008 && e.TagType == 2)));
        }
Example #27
0
 void Start()
 {
     charImage = GetComponent <RawImage>();
 }
 private void Awake()
 {
     balancePixelsUI = balancePixelsUI.GetComponent <RawImage>();
     openCvManager   = FindObjectOfType <OpenCVManager>();
 }
Example #29
0
 // Use this for initialization
 void Start()
 {
     enemy             = GetComponentInParent <EnemyAI>(); // Different to way player's health bar finds player
     healthBarRawImage = GetComponent <RawImage>();
 }
Example #30
0
        private static void DumpImage3Raw(string fileName)
        {
            using (var fileStream = File.Open(fileName, FileMode.Open, FileAccess.Read))
                using (var binaryReader = new BinaryReader(fileStream))
                {
                    var rawImage = new RawImage(binaryReader);

                    // Image #3 is a raw image compressed in ITU-T81 lossless JPEG

                    var image = rawImage.Directories.Skip(3).First();

                    var offset = image.Entries.Single(e => e.TagId == 0x0111 && e.TagType == 4).ValuePointer;
                    // Assert.AreEqual(0x2D42DCu, offset);

                    var count = image.Entries.Single(e => e.TagId == 0x0117 && e.TagType == 4).ValuePointer;
                    // Assert.AreEqual(0x1501476u, count);

                    var imageFileEntry = image.Entries.Single(e => e.TagId == 0xC640 && e.TagType == 3);
                    var slices         = RawImage.ReadUInts16(binaryReader, imageFileEntry);
                    CollectionAssert.AreEqual(new[] { (ushort)1, (ushort)3144, (ushort)3144 }, slices);

                    binaryReader.BaseStream.Seek(offset, SeekOrigin.Begin);
                    var startOfImage = new StartOfImage(binaryReader, offset, count);

                    var startOfFrame = startOfImage.StartOfFrame;
                    Assert.AreEqual(4056u, startOfFrame.ScanLines);
                    Assert.AreEqual(3144u, startOfFrame.SamplesPerLine);
                    Assert.AreEqual(6288, startOfFrame.Width);

                    Assert.AreEqual(14, startOfFrame.Precision); // RGGB

                    // chrominance subsampling factors
                    Assert.AreEqual(2, startOfFrame.Components.Length); // RGGB

                    Assert.AreEqual(1, startOfFrame.Components[0].ComponentId);
                    Assert.AreEqual(1, startOfFrame.Components[0].HFactor);
                    Assert.AreEqual(1, startOfFrame.Components[0].VFactor);
                    Assert.AreEqual(0, startOfFrame.Components[0].TableId);

                    Assert.AreEqual(2, startOfFrame.Components[1].ComponentId);
                    Assert.AreEqual(1, startOfFrame.Components[1].HFactor);
                    Assert.AreEqual(1, startOfFrame.Components[1].VFactor);
                    Assert.AreEqual(0, startOfFrame.Components[1].TableId);

                    Assert.AreEqual(2, startOfFrame.Components.Sum(component => component.HFactor * component.VFactor));

                    var startOfScan = startOfImage.StartOfScan;
                    // DumpStartOfScan(startOfScan);

                    Assert.AreEqual(1, startOfScan.Bb1);               // Start of spectral or predictor selection
                    Assert.AreEqual(0, startOfScan.Bb2);               // end of spectral selection
                    Assert.AreEqual(0, startOfScan.Bb3);               // successive approximation bit positions
                    Assert.AreEqual(2, startOfScan.Components.Length); // RGGB

                    Assert.AreEqual(1, startOfScan.Components[0].Id);
                    Assert.AreEqual(0, startOfScan.Components[0].Dc);
                    Assert.AreEqual(0, startOfScan.Components[0].Ac); // in lossless, this value is always zero

                    Assert.AreEqual(2, startOfScan.Components[1].Id);
                    Assert.AreEqual(1, startOfScan.Components[1].Dc);
                    Assert.AreEqual(0, startOfScan.Components[1].Ac);

                    startOfImage.ImageData.Reset();

                    var outFile = Path.ChangeExtension(fileName, ".png");
                    CreateBitmap(binaryReader, startOfImage, outFile, offset, slices);

                    Assert.AreEqual(25504128, cc);
                    Assert.AreEqual(3, startOfImage.ImageData.DistFromEnd);
                }
        }
Example #31
0
    void Start()
    {
        _stasisScript = GetComponent <Stasis>();
        _fuelScript   = GetComponent <Fuel>();
        _rigidbody    = GetComponent <Rigidbody>();

        _wheels = new List <Transform>();

        foreach (Transform t in transform)
        {
            if (t.name == "Thruster")
            {
                _thruster = t.GetComponent <ParticleSystem>();
            }
            else if (t.name == "Exhaust")
            {
                _exhaust = t.GetComponent <ParticleSystem>();
            }
            if (t.name.Contains("Left"))
            {
                _wheels.Add(t);
            }
            if (t.name.Contains("Right"))
            {
                _wheels.Add(t);
            }
        }
        Vector3 wheelSize = _wheels[0].localScale;

        _wheelDefaultSize = wheelSize;
        _wheelClimbSize   = new Vector3(wheelSize.x, 0.7f, wheelSize.z);

        TurnOffFireEffects();

        _carState   = CarState.Driving;
        _driveState = DriveState.Park;

        if (START_POSITIONS == null)
        {
            START_POSITIONS = new List <Vector3>();
            START_POSITIONS.Add(transform.position);
        }
        _startPosition = transform.position;
        _startRotation = transform.rotation.eulerAngles;

        _currentStartPosition = -1;

        _teleport = false;

        _oldWallNormal = Vector3.zero;

        cameraRig.position    = transform.position;
        _freeLookCameraScript = cameraRig.GetComponent <FreeLookCam>();
        _mouseSpeed           = _freeLookCameraScript.m_MoveSpeed;
        _cameraTurnSpeed      = _freeLookCameraScript.m_TurnSpeed;

        _flash = GameObject.Find("Flash").GetComponent <RawImage>();

        _otherFreeLookCamerScript = cameraRig.GetComponent <ProtectCameraFromWallClip>();
        _camDistance = _otherFreeLookCamerScript.closestDistance;

        _canClimb = true;
        _drifting = false;
        _jetting  = false;
        _turbo    = false;

        _oldForward = Vector3.zero;

        _helpPad      = GameObject.Find("HelpControls");
        _helpKeyboard = GameObject.Find("HelpKeyboard");
        _helpRunes    = GameObject.Find("HelpRunes");

        if (!SHOW_CONTROLS)
        {
            _helpPad.SetActive(false);
            _helpKeyboard.SetActive(false);
        }
        else
        {
            if (StartMenu.USE_KEYBOARD)
            {
                _helpPad.SetActive(false);
            }
            else
            {
                _helpKeyboard.SetActive(false);
            }
        }
        _helpRunes.SetActive(false);

        _quitTick      = 0;
        _quitTime      = 2;
        _climbIdleTick = 0;
        _climbIdleTime = 3;
    }
Example #32
0
 // Use this for initialization
 void Start()
 {
     image       = GetComponent <RawImage>();
     videoPlayer = GetComponent <VideoPlayer>();
     StartCoroutine(playVideo());
 }