Example #1
0
        public bool Add(CameraWindow camwin, DeviceInfo deviceInfo, CameraInfo camera)
        {
            // create video source

            IVideoSource videoSource = DeviceBusiness.Instance.CreateVideoSource(deviceInfo.ProviderName);
            if (videoSource == null)
            {
                return false;
            }
            DeviceDriver device = new DeviceDriver(videoSource);

            if (!RunningDeviceList.ContainsKey(deviceInfo.DeviceId))
            {
                deviceInfo.ServiceID = -1;
                device.Init(ref deviceInfo);
                RunningDeviceList.Add(deviceInfo.DeviceId, deviceInfo);
                RunningDriverList.Add(camera.CameraId, device);

            }
            else
            {
                deviceInfo = RunningDeviceList[deviceInfo.DeviceId];
                device.Init(ref deviceInfo);

                RunningDriverList.Add(camera.CameraId, device);
            }

            device.Start(ref camera);
               // camwin.Camera = device;
            return true;
        }
        public Dictionary<int, ChangeSynGroup> GetAllChangeSynGroup(ref string errMessage)
        {
            Database db = DatabaseFactory.CreateDatabase();
            errMessage = "";
            var list = new Dictionary<int, ChangeSynGroup>();
            try
            {
                ChangeSynGroup synGroup;
                DeviceInfo oDevice;
                DataSet ds = ChangeSynGroupDataAccess.GetAllChangeSynGroupInfo(db);
                DataSet dsCamera;
                CameraInfo oCamera;
                for (int i = 0; i < ds.Tables[0].Rows.Count; i++)
                {
                    synGroup = new ChangeSynGroup(ds.Tables[0].Rows[i]);
                    dsCamera = CameraDataAccess.GetCamInfoByChangeSynGroupId(db, synGroup.ChangeSynGroupId);
                    synGroup.ListCamera = new Dictionary<int, CameraInfo>();
                    foreach (DataRow drCam in dsCamera.Tables[0].Rows)
                    {
                        oCamera = new CameraInfo(drCam);
                        synGroup.ListCamera.Add(oCamera.CameraId, oCamera);
                    }

                    list.Add(synGroup.ChangeSynGroupId, synGroup);
                }
                return list;

            }
            catch (Exception ex)
            {
                errMessage = ex.Message + ex.StackTrace;
                Logger.Error("Error Message:" + ex.Message + " Trace:" + ex.StackTrace);
                return null;
            }
        }
Example #3
0
        public static bool MoveMouseToWoWCoords(float x, float y, float z)
        {
            var pseudoVec = new Vector3D(x, y, z); //not really a vector. its the location we want to click
            IntPtr hwnd = ProcessManager.WowProcess.WindowHandle; //windowhandle for getting size
            var camera = new CameraInfo();
            //Read information
            uint pAddr2 =
                ProcessManager.WowProcess.ReadUInt((ProcessManager.WowProcess.ReadUInt(
                    ProcessManager.GlobalOffsets.CameraPointer)) +
                        ProcessManager.GlobalOffsets.CameraOffset);
            var bCamera = new byte[68];
            bCamera = ProcessManager.WowProcess.ReadBytes(pAddr2, 68);

            //Convert bytes to usable data
            camera.Pos = new Vector3D(BitConverter.ToSingle(bCamera, 8),
                                    BitConverter.ToSingle(bCamera, 12),
                                        BitConverter.ToSingle(bCamera, 16));
            camera.ViewMat = new Matrix(BitConverter.ToSingle(bCamera, 20),
                BitConverter.ToSingle(bCamera, 24), BitConverter.ToSingle(bCamera, 28),
                BitConverter.ToSingle(bCamera, 32), BitConverter.ToSingle(bCamera, 36),
                BitConverter.ToSingle(bCamera, 40), BitConverter.ToSingle(bCamera, 44),
                BitConverter.ToSingle(bCamera, 48), BitConverter.ToSingle(bCamera, 52));
            camera.Foc = BitConverter.ToSingle(bCamera, 64);
            //Get windoesize
            var rc = new Rect();
            GetClientRect(hwnd, ref rc);

            //Vector camera -> object
            Vector3D Diff = pseudoVec - camera.Pos;

            if ((Diff*camera.ViewMat.getFirstColumn) < 0)
            {
                return false;
            }

            Vector3D View = Diff * camera.ViewMat.inverse();
            var Cam = new Vector3D(-View.Y, -View.Z, View.X);

            float fScreenX = (rc.right - rc.left)/2.0f;
            float fScreenY = (rc.bottom - rc.top)/2.0f;
            //Aspect ratio
            float fTmpX = fScreenX/(float) Math.Tan(((camera.Foc*44.0f)/2.0f)*Deg2Rad);
            float fTmpY = fScreenY/(float) Math.Tan(((camera.Foc*35.0f)/2.0f)*Deg2Rad);

            var pctMouse = new Point();
            pctMouse.X = (int) (fScreenX + Cam.X*fTmpX/Cam.Z);
            pctMouse.Y = (int) (fScreenY + Cam.Y*fTmpY/Cam.Z);

            if (pctMouse.X < 0 || pctMouse.Y < 0 || pctMouse.X > rc.right || pctMouse.Y > rc.bottom)
            {
                return false;
            }

            ProcessManager.CommandManager.MoveMouse(pctMouse.X, pctMouse.Y);
            return true;
        }
Example #4
0
        static void PrintCameraInfo(CameraInfo camInfo)
        {
            StringBuilder newStr = new StringBuilder();
            newStr.Append("\n*** CAMERA INFORMATION ***\n");
            newStr.AppendFormat("Serial number - {0}\n", camInfo.serialNumber);
            newStr.AppendFormat("Camera model - {0}\n", camInfo.modelName);
            newStr.AppendFormat("Camera vendor - {0}\n", camInfo.vendorName);
            newStr.AppendFormat("Sensor - {0}\n", camInfo.sensorInfo);
            newStr.AppendFormat("Resolution - {0}\n", camInfo.sensorResolution);

            Console.WriteLine(newStr);
        }
 public int loadbody(System.IO.BinaryReader br)
 {
     long pos = br.BaseStream.Position;
     generator = br.ReadUInt32();
     int elementcount = br.ReadInt32();
     //objectInformation = new ObjectElement[br.ReadInt32()];
     for (int i = 0; i < elementcount; i++)
     {
         objectInformation.Add(new ObjectElement(br));
     }
     camera = new CameraInfo();
     camera.fromStream(br);
     return Convert.ToInt32(br.BaseStream.Position - pos);
 }
Example #6
0
        public Camera(float Width, float Height, float zNear, float zFar, CameraInfo CameraInformation)
        {
            info = CameraInformation;

            //Set the Perspective and View Matricies using the floats and info passed to the constructor
            cameraMatricies.PerspectiveMatrix = Matrix4.CreatePerspectiveFieldOfView(MathHelper.PiOver4, (Width / Height), zNear, zFar);
            cameraMatricies.ViewMatrix = Matrix4.LookAt(CameraInformation.Pos, CameraInformation.Target, CameraInformation.Up);

            GL.GenBuffers(1, out globalMatrixUBO); //Generate a new Buffer Object
            GL.BindBuffer(BufferTarget.UniformBuffer, globalMatrixUBO);
            GL.BufferData(BufferTarget.UniformBuffer, cameraMatricies.Size, ref cameraMatricies, BufferUsageHint.DynamicDraw);
            GL.BindBuffer(BufferTarget.UniformBuffer, 0);

            GL.BindBufferRange(BufferRangeTarget.UniformBuffer, globalBindingIndex, globalMatrixUBO, IntPtr.Zero, cameraMatricies.Size);
        }
Example #7
0
        public void Load()
        {
            var cam = new CameraInfo();
            cam.Load();
            CurrentCameraResolution = SizeToString(cam.CurrentCameraResolution);
            IsFocusAtPointSupported = cam.HasFocusAtPoint;
            IsFocusSupported = cam.HasFocus;
            PhotoPixelLayout = GetPixelLayoutStrings(cam.PhotoPixelLayout);
            SupportedResolutions = GetSupportedResolutions(cam.SupportedResolutions);

            RaisePropertyChanged("CurrentCameraResolution");
            RaisePropertyChanged("IsFocusAtPointSupported");
            RaisePropertyChanged("IsFocusSupported");
            RaisePropertyChanged("PhotoPixelLayout");
            RaisePropertyChanged("SupportedResolutions");
        }
Example #8
0
		abstract protected void SetOutput(CameraInfo info, int i);
Example #9
0
    List <LinkedListNode <IShareThingHandle> > temRemoves = new List <LinkedListNode <IShareThingHandle> >();//用于计算删除的临时变量
    void Sample()
    {
        CameraHandle handle = m_share.Get <CameraHandle>();

        if (handle == null || m_ca == null || !m_cache || Time.unscaledDeltaTime == 0)
        {
            return;
        }

        //当前镜头需要跟随者才能计算的话,返回
        if (m_follow == null && handle.NeedFollow)
        {
            return;
        }

        CameraInfo info = handle.m_info;

        //不受战斗镜头影响的要重置下参数
        if (!info.isBattleDisturb)
        {
            m_lookPosOffset     = Vector3.zero;
            m_lastLookPosOffset = Vector3.zero;
            m_disRate           = 1;
        }

        //相机位置、方向、fov计算
        //直接切的情况
        if (handle.m_isDuation && handle.IsDurationInvalid)
        {
            m_isSampleDurationInvalid = true;
            ResetVelocity();
            m_smoothLookPos = m_newLookPos = m_lastLookPos = (handle.LookPos + m_lookPosOffset);
            m_lastDistance  = handle.Distance * m_disRate;
            //m_lastLookPosOffset = m_lookPosOffset;
            m_tran.eulerAngles = handle.Euler;
            m_tran.position    = m_lastLookPos - m_tran.forward * m_lastDistance;
//             m_tran.position = handle.Pos;
//             m_tran.forward = m_newLookPos - m_tran.position;
            m_ca.fieldOfView = handle.Fov;
        }
        //渐变的情况
        else
        {
            m_isSampleDurationInvalid = false;
            float delta  = Time.unscaledDeltaTime;
            float factor = handle.m_isDuation? handle.Factor:1;

            //计算看的点
            m_newLookPos = handle.LookPos;

            //计算渐变速度
            float v1 = Mathf.Lerp(handle.m_duration, 0, info.animationCurve.Evaluate(factor));
            float v2 = 0;

            if (info.useDisSmooth)//渐变过程和距离渐变都有的情况
            {
                Vector3 link = m_newLookPos + m_lookPosOffset - m_lastLookPos;
                float   dis  = link.magnitude;
                v2 = MathUtil.easeInExpo(0, info.disSmooth, Mathf.Lerp(1, 0, dis / info.disSmoothLimit)); //这里用exp函数可以让过度更平滑
            }
            m_smoothVelocity = Mathf.Max(v1, v2);


            if (Mathf.Abs(m_smoothVelocity) > Distance_Smooth_Min) //非常小的时候会有抖动,要用直接设置的方式
            {
                m_smoothLookPos     = Vector3.SmoothDamp(m_lastLookPos, m_newLookPos + m_lookPosOffset, ref m_curLookPosVelocity, m_smoothVelocity, float.MaxValue, delta);
                m_lastLookPosOffset = m_lookPosOffset;
                //与主角的距离渐变
                m_lastDistance = Mathf.SmoothDamp(m_lastDistance, handle.Distance * m_disRate, ref m_curDistanceVelocity, v1, float.MaxValue, delta);
                //角度渐变
                m_tran.rotation = Quaternion.Slerp(m_tran.rotation, handle.Rotate, (delta * factor) / v1);

                //                 //看着的点渐变
                //                 m_smoothLookPos = Vector3.SmoothDamp(m_lastLookPos, m_newLookPos, ref m_curLookPosVelocity, m_smoothVelocity, float.MaxValue, delta);
                //
                //                 //相机的点渐变
                //                 m_tran.position = Vector3.SmoothDamp(m_tran.position, handle.Pos, ref m_curPosVelocity, m_smoothVelocity, float.MaxValue, delta);
                //
                //                 //由于上面两个点都渐变了,方向不用渐变
                //                 m_tran.forward = m_smoothLookPos - m_tran.position;
            }
            else    //直接设置
            {
                m_lastLookPosOffset = Vector3.Slerp(m_lastLookPosOffset, m_lookPosOffset, delta);
                m_smoothLookPos     = m_newLookPos + m_lastLookPosOffset;
                m_lastDistance      = handle.Distance * m_disRate;
                m_tran.rotation     = handle.Rotate;
                //                 m_smoothLookPos = m_newLookPos;
                //                 m_tran.position  = handle.Pos;
                //                 m_tran.forward = m_smoothLookPos - m_tran.position;
            }

            //位置渐变,注意这里不是当前位置渐变,是看着的点渐变、距离渐变、方向渐变,然后推导出当前的位置,这样看着才是渐变的。
            m_lastLookPos = m_smoothLookPos;

            m_tran.position = m_smoothLookPos - m_tran.forward * m_lastDistance;


            //视野渐变
            float newFov = handle.Fov;
            if (Mathf.Abs(newFov - m_ca.fieldOfView) > Fov_Smooth_Min)
            {
                m_ca.fieldOfView = Mathf.SmoothDampAngle(m_ca.fieldOfView, newFov, ref m_curFovVelocity, m_smoothVelocity, float.MaxValue, delta);
            }

            ++m_samlpeCounter;
        }


        //3 计算渐变结束(要修改优先级),计算需要删除的
        LinkedListNode <IShareThingHandle> node = m_share.m_handles.First;
        CameraHandle tempHandle;
        bool         needSort = false;

        do
        {
            tempHandle = node.Value as CameraHandle;
            //将置顶优先级降为不置顶优先级
            if (tempHandle.m_isDuation)
            {
                if (tempHandle.m_info.isDurationInvalid || tempHandle.m_firstDurationInvalid ||                    //立即切
                    (Time.unscaledTime - tempHandle.m_beginTime >= tempHandle.m_duration || handle != tempHandle)) //时间到或者不是置顶的handle
                {
                    needSort = true;
                    tempHandle.m_isDuation = false;
                    tempHandle.m_priority  = tempHandle.m_info.priority;

                    if (tempHandle == handle)
                    {
                        CameraMgr.instance.m_isDuration = false;
                    }
                }
            }

            //销毁片段
            if ((tempHandle == handle && !handle.m_isDuation && info.isOverAfterDuration) ||                             //不永久的时间到了
                (tempHandle != handle && tempHandle.m_info.durationType == CameraInfo.enDurationType.overWhenOverlay) || //不置顶就删除的
                (tempHandle.m_info.duration != -1 && tempHandle.CurTime >= tempHandle.m_info.duration)                   //时间到了
                )
            {
                temRemoves.Add(node);
            }
            node = node.Next;
        } while (node != null);
        if (needSort)
        {
            m_share.Sort();
        }
        if (temRemoves.Count != 0)
        {
            m_share.Remove(temRemoves);
            temRemoves.Clear();
        }
    }
 public VideoPlay_ZLVideo(VideoInfo vInfo, CameraInfo cInfo)
 {
     CurrentVideoInfo  = vInfo;
     CurrentCameraInfo = cInfo;
 }
Example #11
0
 private void subscribeCamera(CameraInfo argCi)
 {
     // Subscribe to the simulator camera, using a handler
     // that will only update the GUI if this camera is
     // selected (because there is currently no way to 
     // unsubscribe from a camera.
     //if (ci.Camera.IsRealTimeCamera)
     //{
     CameraInfo ci = argCi;
     ci.Port = new Port<System.Drawing.Bitmap>();
     ci.Camera.Subscribe(ci.Port);
     Interleave interleave = new Interleave(new ExclusiveReceiverGroup(), new ConcurrentReceiverGroup());
     interleave.CombineWith(
         new Interleave(new ExclusiveReceiverGroup(
             Arbiter.Receive(true, ci.Port,
             delegate(System.Drawing.Bitmap inbmp)
             {
                 if (interleave.PendingExclusiveCount <= 1 && curCamera == ci)
                     if (DateTime.Now.Subtract(lastFrameTime).Milliseconds >= frameInterval)
                     {
                         lastFrameTime = DateTime.Now;
                         updateImageDisplay(inbmp);
                     }
             })),
         new ConcurrentReceiverGroup()));
     Arbiter.Activate(throttledQueue, interleave);
     //}
     //else
     //{
     //    // If it's not a real time camera, we have to start a loop
     //    // to query it
     //    new Thread(new ThreadStart(delegate()
     //    {
     //        var resultPort = new PortSet<System.Drawing.Bitmap, Exception>();
     //        while (shouldStay)
     //        {
     //            if (curCamera == ci)
     //            {
     //                // The throttledQueue has only 1 thread, so concurrent execution
     //                // will not happen here if the updateImageDisplay handler cannot
     //                // keep up with the rate at which we're querying frames.
     //                ci.Camera.CaptureScene(System.Drawing.Imaging.ImageFormat.Bmp, resultPort);
     //                Arbiter.Activate(throttledQueue, Arbiter.Choice(resultPort,
     //                    updateImageDisplay,
     //                    delegate(Exception e)
     //                    {
     //                        Console.WriteLine(e);
     //                    }));
     //            }
     //            Thread.Sleep(frameInterval);
     //        }
     //    })).Start();
     //}
 }
Example #12
0
 internal static extern IntPtr rs2_get_device_info(DeviceHandle device, CameraInfo info, out RsError error);
 public void SetPlayInfo(CameraInfo cInfo, VideoPlaySetting videoPlaySetting)
 {
     SetPlayInfo(CurrentV, cInfo.VideoInfo, cInfo, videoPlaySetting);
 }
Example #14
0
 private void multiplexer1_DoubleCamera(bool isFullScreen, CameraInfo camera)
 {
     this.FullScreen(isFullScreen);
 }
Example #15
0
 public VideoPlay_BuleSky(IntPtr intPtr, VideoInfo vInfo, CameraInfo cInfo)
 {
     VideoPlayHandle   = intPtr;
     CurrentVideoInfo  = vInfo;
     CurrentCameraInfo = cInfo;
 }
 protected override bool IsValid(CameraInfo camera) => camera?.Canon != null;
Example #17
0
        static void Main(string[] args)
        {
            PrintBuildInfo();

            const int NumImages          = 10;
            bool      useSoftwareTrigger = true;

            ManagedBusManager busMgr = new ManagedBusManager();
            uint numCameras          = busMgr.GetNumOfCameras();

            Console.WriteLine("Number of cameras detected: {0}", numCameras);

            // Finish if there are no cameras
            if (numCameras == 0)
            {
                Console.WriteLine("Not enough cameras!");
                Console.WriteLine("Press Enter to exit...");
                Console.ReadLine();
                return;
            }

            ManagedPGRGuid guid = busMgr.GetCameraFromIndex(0);

            ManagedCamera cam = new ManagedCamera();

            cam.Connect(guid);

            // Power on the camera
            const uint CameraPower      = 0x610;
            const uint CameraPowerValue = 0x80000000;

            cam.WriteRegister(CameraPower, CameraPowerValue);

            const Int32 MillisecondsToSleep  = 100;
            uint        cameraPowerValueRead = 0;

            // Wait for camera to complete power-up
            do
            {
                System.Threading.Thread.Sleep(MillisecondsToSleep);

                cameraPowerValueRead = cam.ReadRegister(CameraPower);
            }while ((cameraPowerValueRead & CameraPowerValue) == 0);

            // Get the camera information
            CameraInfo camInfo = cam.GetCameraInfo();

            PrintCameraInfo(camInfo);

            if (!useSoftwareTrigger)
            {
                // Check for external trigger support
                TriggerModeInfo triggerModeInfo = cam.GetTriggerModeInfo();
                if (triggerModeInfo.present != true)
                {
                    Console.WriteLine("Camera does not support external trigger!");
                    Console.WriteLine("Press enter to exit...");
                    Console.ReadLine();
                    return;
                }
            }

            // Get current trigger settings
            TriggerMode triggerMode = cam.GetTriggerMode();

            // Set camera to trigger mode 0
            // A source of 7 means software trigger
            triggerMode.onOff     = true;
            triggerMode.mode      = 0;
            triggerMode.parameter = 0;

            if (useSoftwareTrigger)
            {
                // A source of 7 means software trigger
                triggerMode.source = 7;
            }
            else
            {
                // Triggering the camera externally using source 0.
                triggerMode.source = 0;
            }

            // Set the trigger mode
            cam.SetTriggerMode(triggerMode);

            // Poll to ensure camera is ready
            bool retVal = PollForTriggerReady(cam);

            if (retVal != true)
            {
                Console.WriteLine("Poll for trigger read failed!");
                Console.WriteLine("Press enter to exit...");
                Console.ReadLine();
                return;
            }

            // Get the camera configuration
            FC2Config config = cam.GetConfiguration();

            // Set the grab timeout to 5 seconds
            config.grabTimeout = 5000;

            // Set the camera configuration
            cam.SetConfiguration(config);

            // Camera is ready, start capturing images
            cam.StartCapture();

            if (useSoftwareTrigger)
            {
                if (CheckSoftwareTriggerPresence(cam) == false)
                {
                    Console.WriteLine("SOFT_ASYNC_TRIGGER not implemented on this camera!  Stopping application\n");
                    Console.WriteLine("Press enter to exit...");
                    Console.ReadLine();
                    return;
                }
            }
            else
            {
                Console.WriteLine("Trigger the camera by sending a trigger pulse to GPIO%d.\n",
                                  triggerMode.source);
            }

            ManagedImage rawImage = new ManagedImage();

            for (int iImageCount = 0; iImageCount < NumImages; iImageCount++)
            {
                if (useSoftwareTrigger)
                {
                    // Check that the trigger is ready
                    retVal = PollForTriggerReady(cam);

                    Console.WriteLine("Press the Enter key to initiate a software trigger.\n");
                    Console.ReadLine();

                    // Fire software trigger
                    retVal = FireSoftwareTrigger(cam);
                    if (retVal != true)
                    {
                        Console.WriteLine("Error firing software trigger!");
                        Console.WriteLine("Press enter to exit...");
                        Console.ReadLine();
                        return;
                    }
                }

                try
                {
                    // Retrieve an image
                    cam.RetrieveBuffer(rawImage);
                }
                catch (FC2Exception ex)
                {
                    Console.WriteLine("Error retrieving buffer : {0}", ex.Message);
                    continue;
                }

                Console.WriteLine(".\n");
            }

            Console.WriteLine("Finished grabbing images");

            // Stop capturing images
            cam.StopCapture();

            // Turn off trigger mode
            triggerMode.onOff = false;
            cam.SetTriggerMode(triggerMode);

            // Disconnect the camera
            cam.Disconnect();

            Console.WriteLine("Done! Press enter to exit...");
            Console.ReadLine();
        }
Example #18
0
        private void UpdateNodeCurrentInformation(ManagedCameraBase cam, CameraInfo camInfo)
        {
            if (camInfo.iidcVersion < iidcVersion)
            {
                m_nodeCurrentsValue.Text = "N/A";
                return;
            }

            const uint CurrentReg    = 0x1A58;
            uint       currentRegVal = 0;

            try
            {
                currentRegVal = cam.ReadRegister(CurrentReg);
            }
            catch (FC2Exception ex)
            {
                m_nodeCurrentsValue.Text = "N/A";
                ex.Dispose();
                return;
            }

            if (currentRegVal >> 31 == 0)
            {
                m_nodeCurrentsValue.Text = "N/A";
                return;
            }

            int numAvailableCurrents = (int)((currentRegVal & 0x00FFF000) >> 12);

            if (numAvailableCurrents == 0)
            {
                m_nodeCurrentsValue.Text = "N/A";
                return;
            }

            const uint CurrentOffsetReg    = 0x1A5C;
            uint       currentOffsetRegVal = 0;

            try
            {
                currentOffsetRegVal = cam.ReadRegister(CurrentOffsetReg);
            }
            catch (FC2Exception ex)
            {
                m_nodeCurrentsValue.Text = "N/A";
                ex.Dispose();
                return;
            }

            List <double> currentsList        = new List <double>();
            uint          properCurrentOffset = (currentOffsetRegVal * 4) & 0xFFFF;

            for (uint i = 0; i < numAvailableCurrents; i++)
            {
                uint currCurrentOffset = properCurrentOffset + (i * 4);
                uint currCurrentRegVal = 0;
                try
                {
                    currCurrentRegVal = cam.ReadRegister(currCurrentOffset);
                }
                catch (FC2Exception ex)
                {
                    m_nodeCurrentsValue.Text = "N/A";
                    ex.Dispose();
                    return;
                }

                double current = MathUtilities.Convert32bitIEEEToFloat(currCurrentRegVal);
                currentsList.Add(current);
            }

            string currentStr = string.Empty;

            foreach (double current in currentsList)
            {
                currentStr += string.Format(" | {0:0.000}A", current);
            }

            m_nodeCurrentsValue.Text = currentStr.Substring(3);
        }
Example #19
0
 internal static extern int rs2_supports_device_info(IntPtr device, CameraInfo info, [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(Helpers.ErrorMarshaler))] out object error);
Example #20
0
 internal static extern IntPtr rs2_get_sensor_info(IntPtr sensor, CameraInfo info, [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(Helpers.ErrorMarshaler))] out object error);
Example #21
0
		protected override void SetOutput(CameraInfo info, int i)
		{
			FVendor[i] = info.vendorName;
			FModel[i] = info.modelName;
			FSerial[i] = (int)info.serialNumber;
			FSensor[i] = info.sensorInfo;
			FResolution[i] = info.sensorResolution;
			FColor[i] = info.isColorCamera;
		}
Example #22
0
 public abstract void Execute(HexInfo hexInfo, PositionInfo positionInfo, CameraInfo camInfo, SelectionController selectionController);
Example #23
0
 public void UpdateDeviceInfo(CameraInfo cameraStatus)
 {
     CameraInfo = cameraStatus;
 }
Example #24
0
 private int CheckCamera(CameraInfo newinfo, CameraInfo oldinfo)
 {
     return(0);
 }
Example #25
0
 ///<exclude/>
 public bool Equals(CameraInfo other)
 {
     if (ReferenceEquals(null, other)) return false;
     if (ReferenceEquals(this, other)) return true;
     return other._FocalLength.Equals(_FocalLength) && other._PrincipalPoint.Equals(_PrincipalPoint) && other._K1.Equals(_K1) && other._K2.Equals(_K2) && other._P1.Equals(_P1) && other._P2.Equals(_P2);
 }
Example #26
0
        private static CameraInfo GetOrCreateCameraInfo(RadCartesianChart chart)
        {
            CameraInfo cameraInfo = (CameraInfo)chart.GetValue(CameraInfoProperty);
            if (cameraInfo == null)
            {
                cameraInfo = new CameraInfo();
                chart.SetValue(CameraInfoProperty, cameraInfo);
            }

            return cameraInfo;
        }
Example #27
0
        static void Main(string[] args)
        {
            PrintBuildInfo();

            const int NumImages = 50;

            Program program = new Program();

            //
            // Initialize BusManager and retrieve number of cameras detected
            //
            ManagedBusManager busMgr = new ManagedBusManager();
            uint numCameras          = busMgr.GetNumOfCameras();

            Console.WriteLine("Number of cameras detected: {0}", numCameras);

            //
            // Check to make sure at least two cameras are connected before
            // running example
            //
            if (numCameras < 2)
            {
                Console.WriteLine("Insufficient number of cameras.");
                Console.WriteLine("Make sure at least two cameras are connected for example to run.");
                Console.WriteLine("Press Enter to exit.");
                Console.ReadLine();
                return;
            }

            //
            // Initialize an array of cameras
            //
            // *** NOTES ***
            // The size of the array is equal to the number of cameras detected.
            // The array of cameras will be used for connecting, configuring,
            // and capturing images.
            //
            ManagedCamera[] cameras = new ManagedCamera[numCameras];

            //
            // Prepare each camera to acquire images
            //
            // *** NOTES ***
            // For pseudo-simultaneous streaming, each camera is prepared as if it
            // were just one, but in a loop. Notice that cameras are selected with
            // an index. We demonstrate pseduo-simultaneous streaming because true
            // simultaneous streaming would require multiple process or threads,
            // which is too complex for an example.
            //
            for (uint i = 0; i < numCameras; i++)
            {
                cameras[i] = new ManagedCamera();

                ManagedPGRGuid guid = busMgr.GetCameraFromIndex(i);

                // Connect to a camera
                cameras[i].Connect(guid);

                // Get the camera information
                CameraInfo camInfo = cameras[i].GetCameraInfo();
                PrintCameraInfo(camInfo);

                try
                {
                    // Turn trigger mode off
                    TriggerMode trigMode = new TriggerMode();
                    trigMode.onOff = false;
                    cameras[i].SetTriggerMode(trigMode);

                    // Turn Timestamp on
                    EmbeddedImageInfo imageInfo = new EmbeddedImageInfo();
                    imageInfo.timestamp.onOff = true;
                    cameras[i].SetEmbeddedImageInfo(imageInfo);
                }
                catch (System.Exception ex)
                {
                    Console.WriteLine("Error configuring camera : {0}", ex.Message);
                    Console.WriteLine("Press any key to exit...");
                    Console.ReadLine();
                    return;
                }

                try
                {
                    // Start streaming on camera
                    cameras[i].StartCapture();
                }
                catch (System.Exception ex)
                {
                    Console.WriteLine("Error starting camera : {0}", ex.Message);
                    Console.WriteLine("Press any key to exit...");
                    Console.ReadLine();
                    return;
                }
            }

            //
            // Retrieve images from all cameras
            //
            // *** NOTES ***
            // In order to work with simultaneous camera streams, nested loops are
            // needed. It is important that the inner loop be the one iterating
            // through the cameras; otherwise, all images will be grabbed from a
            // single camera before grabbing any images from another.
            //
            ManagedImage tempImage = new ManagedImage();

            for (int imageCnt = 0; imageCnt < NumImages; imageCnt++)
            {
                for (int camCount = 0; camCount < numCameras; camCount++)
                {
                    try
                    {
                        // Retrieve an image
                        cameras[camCount].RetrieveBuffer(tempImage);
                    }
                    catch (System.Exception ex)
                    {
                        Console.WriteLine("Error retrieving buffer : {0}", ex.Message);
                        Console.WriteLine("Press any key to exit...");
                        Console.ReadLine();
                        return;
                    }

                    // Display the timestamps of the images grabbed for each camera
                    TimeStamp timeStamp = tempImage.timeStamp;
                    Console.Out.WriteLine("Camera {0} - Frame {1} - TimeStamp {2} {3}", camCount, imageCnt, timeStamp.cycleSeconds, timeStamp.cycleCount);
                }
            }

            //
            // Stop streaming for each camera
            //
            for (uint i = 0; i < numCameras; i++)
            {
                try
                {
                    cameras[i].StopCapture();
                    cameras[i].Disconnect();
                }
                catch (System.Exception ex)
                {
                    Console.WriteLine("Error cleaning up camera : {0}", ex.Message);
                    Console.WriteLine("Press any key to exit...");
                    Console.ReadLine();
                    return;
                }
            }

            Console.WriteLine("Press enter to exit...");
            Console.ReadLine();
        }
Example #28
0
        private void AddCamera(CameraEntity camera, bool select)
        {
            if (camera != null)
            {
                // Store camera entity and subscribe to it to receive images
                CameraInfo ci = new CameraInfo() { Camera = camera };
                subscribeCamera(ci);

                // Add it to the camera list
                Dispatcher.BeginInvoke(System.Windows.Threading.DispatcherPriority.Normal,
                    new ThreadStart(delegate()
                    {
                        SimCamBox.Items.Add(new ComboBoxItem()
                        {
                            Content = (camera.EntityState != null ?
                                        camera.EntityState.Name :
                                        "Unnamed camera"),
                            Tag = ci
                        });
                        if (select)
                            SimCamBox.SelectedIndex = SimCamBox.Items.Count - 1;

                        // Trigger the SimulatorFound event if this is the first camera found
                        if (SimCamBox.Items.Count == 1)
                            SimulatorFound.Invoke(this, new EventArgs());
                    }));
            }
        }
Example #29
0
 abstract protected void SetOutput(CameraInfo info, int i);
Example #30
0
        private void OnSimCamChange(object sender, SelectionChangedEventArgs e)
        {
            try
            {
                if (SimCamBox.SelectedItem != null)
                {
                    ComboBoxItem item = SimCamBox.SelectedItem as ComboBoxItem;
                    if (item != null && item.Tag != null && item.Tag is CameraInfo)
                    {
                        // Set the current camera, subscribe it if it has not yet
                        // been subscribed, and set it's frame update rate.
                        curCamera = (CameraInfo)item.Tag;

                        // Clear the display in case the new images are smaller
                        initBitmap();
                    }
                }
            }
            catch (Exception err)
            {
                GUIUtilities.ReportUnexpectedException(err);
            }
        }
    public static void DrawCameraInfo(CameraInfo info, string name = "", bool isDefault = false, bool sync = false)
    {
        EditorGUILayoutEx.FadeArea area;
        Color tmp1 = GUI.color;
        Color tmp2;

        if (isDefault)
        {
            area      = EditorGUILayoutEx.instance.BeginFadeArea(info.isExpand, "", name + "_camera", EditorStyleEx.BoxStyle); //
            tmp2      = GUI.color;                                                                                             //BeginFadeArea 需要
            GUI.color = tmp1;                                                                                                  //BeginFadeArea 需要
            using (new AutoBeginHorizontal())
            {
                if (GUILayout.Button(name, EditorGUILayoutEx.defaultLabelStyle))
                {
                    info.isExpand = !info.isExpand;
                }

                if (isDefault && GUILayout.Button(EditorGUIUtility.IconContent(CameraMgr.instance.m_curCameraInfo == info ? "preAudioLoopOn" : "preAudioLoopOff"), EditorStyles.toolbarButton, GUILayout.Width(25)))
                {
                    CameraMgr.instance.Add(info, false, CameraInfo.Camera_Editor_Priority);//优先级要提高点
                    return;
                }
            }
        }
        else
        {
            area          = EditorGUILayoutEx.instance.BeginFadeArea(info.isExpand, "镜头参数", name + "_camera", EditorStyleEx.BoxStyle); //
            info.isExpand = area.open;
            tmp2          = GUI.color;                                                                                                 //BeginFadeArea 需要
            GUI.color     = tmp1;                                                                                                      //BeginFadeArea 需要
        }


        if (area.Show())      //using (AutoEditorToggleGroup tg = new AutoEditorToggleGroup(info.isExpand, "默认镜头"))
        {
            GUI.color = tmp2; //BeginFadeArea 需要

            bool needSample = false;
            //1 镜头变化相关参数

            EditorGUI.BeginChangeCheck();
            info.lookType = (CameraInfo.enLookType)EditorGUILayout.Popup("类型", (int)info.lookType, CameraInfo.LookTypeName);
            if (info.NeedShowRefPos)
            {
                using (new AutoBeginHorizontal())
                {
                    EditorGUILayout.LabelField("看的点(白)", info.refPos.ToString());

                    //在某些状态下参考点不能自动计算,这个增加一个同步按钮
                    if (GUILayout.Button("同步到跟随者", GUILayout.Width(100)))
                    {
                        info.refPos = CameraMgr.instance.GetFollowPos();
                    }
                }
            }

            using (new AutoBeginHorizontal())
            {
                EditorGUILayout.PrefixLabel(info.NeedShowRefPos ? "偏移(黄)" : "偏移");
                info.offset = EditorGUILayout.Vector3Field(GUIContent.none, info.offset, GUILayout.Height(18));
            }
            info.verticalAngle   = EditorGUILayout.Slider("高度角", info.verticalAngle, -90f, 90f);
            info.horizontalAngle = EditorGUILayout.Slider("水平角", info.horizontalAngle, 0f, 360f);
            info.fov             = EditorGUILayout.Slider("视野", info.fov, 30f, 90f);
            info.distance        = EditorGUILayout.Slider("距离", info.distance, 3f, 50f);

            info.blur            = EditorGUILayout.Slider("模糊程度", info.blur, 0f, 5f);
            info.blurDuration    = EditorGUILayout.Slider("模糊总时间", info.blurDuration, 0f, 50f);
            info.blurBeginSmooth = EditorGUILayout.Slider("开始模糊时间", info.blurBeginSmooth, 0f, 1f);
            info.blurEndSmooth   = EditorGUILayout.Slider("结束模糊时间", info.blurEndSmooth, 0f, 1f);
            using (new AutoBeginHorizontal())
            {
                EditorGUILayout.PrefixLabel("模糊偏移");
                info.blurOffset = EditorGUILayout.Vector3Field(GUIContent.none, info.blurOffset, GUILayout.Height(18));
            }


            //跟随对象和敌人之间的特有参数
            if (info.lookType == CameraInfo.enLookType.betweenTwo)
            {
                using (new AutoBeginHorizontal())
                {
                    info.useBetweenTwoLimit = EditorGUILayout.Toggle("两者距离限制", info.useBetweenTwoLimit, GUILayout.ExpandWidth(false));
                    if (info.useBetweenTwoLimit)
                    {
                        info.betweenTwoLimit = EditorGUILayout.Slider(GUIContent.none, info.betweenTwoLimit, 0f, 30f);
                    }
                }
            }
            //盯着的特有参数
            if (info.lookType == CameraInfo.enLookType.stillLook)
            {
                using (new AutoBeginHorizontal())
                {
                    info.useStilllookLimit = EditorGUILayout.Toggle("盯着最远距离", info.useStilllookLimit, GUILayout.ExpandWidth(false));
                    if (info.useStilllookLimit)
                    {
                        info.stillLookLimit = EditorGUILayout.Slider(GUIContent.none, info.stillLookLimit, 0f, 30f);
                    }
                }
            }
            //镜头轨道
            if (info.lookType == CameraInfo.enLookType.path)
            {
                info.cameraPath = (CameraPath)EditorGUILayout.ObjectField("轨道", info.cameraPath, typeof(CameraPath), true);
                info.pathLag    = EditorGUILayout.Slider("轨道镜头偏移", info.pathLag, -10, 10);
            }

            //跟随对象盯住目标
            if (info.lookType == CameraInfo.enLookType.followBehind)
            {
                info.targetId = EditorGUILayout.TextField("目标的角色id", info.targetId);
                info.bone     = EditorGUILayout.TextField("骨骼路径", info.bone);

                using (new AutoBeginHorizontal())
                {
                    EditorGUILayout.PrefixLabel("偏移");
                    info.bornOffset = EditorGUILayout.Vector3Field(GUIContent.none, info.bornOffset, GUILayout.Height(18));
                }
            }

            needSample = EditorGUI.EndChangeCheck();//改变的话要sample下

            //2 渐变和跟随控制相关
            if (!isDefault)
            {
                info.durationType = (CameraInfo.enDurationType)EditorGUILayout.Popup("置顶时渐变策略", (int)info.durationType, CameraInfo.DuratioTypeName);
            }
            info.isDurationInvalid = EditorGUILayout.Toggle("不渐变", info.isDurationInvalid);
            if (!info.isDurationInvalid)
            {
                info.durationSmooth = EditorGUILayout.Slider("渐变时间", info.durationSmooth, 0f, 10f);
                info.animationCurve = EditorGUILayout.CurveField("渐变曲线", info.animationCurve, GUILayout.Width(300f), GUILayout.Height(30f));
            }

            if (!isDefault)
            {
                info.isOverAfterDuration = EditorGUILayout.Toggle("渐变完结束", info.isOverAfterDuration);
                info.duration            = EditorGUILayout.FloatField("结束时间", info.duration);
                using (new AutoBeginHorizontal())
                {
                    info.useOverDuration = EditorGUILayout.Toggle("结束渐变时间", info.useOverDuration, GUILayout.ExpandWidth(false));
                    if (info.useOverDuration)
                    {
                        info.overDuationSmooth = EditorGUILayout.Slider(GUIContent.none, info.overDuationSmooth, 0f, 10f);
                    }
                }
            }

            EditorGUI.BeginChangeCheck();
            using (new AutoBeginHorizontal())
            {
                info.uselock = EditorGUILayout.Toggle(info.uselock ? "锁定方向(黄线)" : "锁定方向", info.uselock, GUILayout.ExpandWidth(false));
                if (info.uselock)
                {
                    //info.m_lockEuler = EditorGUILayout.Vector3Field(GUIContent.none, info.m_lockEuler, GUILayout.Height(18),GUILayout.ExpandWidth(true));
                    float yEuler = EditorGUILayout.Slider(GUIContent.none, info.lockEuler.y, 0f, 360f);
                    if (yEuler != info.lockEuler.y)
                    {
                        info.lockEuler = new Vector3(info.lockEuler.x, yEuler, info.lockEuler.z);
                    }
                }
            }
            using (new AutoEditorTipButton("看着的点距离衰减计算出的渐变速度(目的是为了让跟随对象离相机越远相机跟随越快,而离相机越进则跟随越慢),注意和渐变过程的计算出的渐变速度是取两者的最大值(也就是取慢的那个)"))
                info.useDisSmooth = EditorGUILayout.Toggle("距离渐变", info.useDisSmooth);
            if (info.useDisSmooth)
            {
                info.disSmooth      = EditorGUILayout.FloatField("距离渐变值", info.disSmooth);
                info.disSmoothLimit = EditorGUILayout.FloatField("距离渐变距离", info.disSmoothLimit);
            }
            needSample = EditorGUI.EndChangeCheck() || needSample;//锁定的相关参数变化也要同步下


            //3 sample下
            if (needSample && sync)
            {
                CameraHandle handle = CameraMgr.instance.Set(info, CameraInfo.Camera_Editor_Priority);//优先级要提高点
            }
        }
        EditorGUILayoutEx.instance.EndFadeArea();
    }
Example #32
0
 internal static extern int rs2_supports_device_info(DeviceHandle device, CameraInfo info, out RsError error);
Example #33
0
        private void Form1_Load(object sender, EventArgs e)
        {
            Hide();

            CameraSelectionDialog camSlnDlg = new CameraSelectionDialog();
            bool retVal = camSlnDlg.ShowModal();

            if (retVal)
            {
                try
                {
                    ManagedPGRGuid[] selectedGuids = camSlnDlg.GetSelectedCameraGuids();
                    if (selectedGuids.Length == 0)
                    {
                        Debug.WriteLine("No cameras selected!");
                        Close();
                        return;
                    }

                    ManagedPGRGuid guidToUse = selectedGuids[0];

                    ManagedBusManager busMgr = new ManagedBusManager();
                    InterfaceType     ifType = busMgr.GetInterfaceTypeFromGuid(guidToUse);

                    if (ifType == InterfaceType.GigE)
                    {
                        m_camera = new ManagedGigECamera();
                    }
                    else
                    {
                        m_camera = new ManagedCamera();
                    }

                    // Connect to the first selected GUID
                    m_camera.Connect(guidToUse);

                    m_camCtlDlg.Connect(m_camera);

                    CameraInfo camInfo = m_camera.GetCameraInfo();
                    UpdateFormCaption(camInfo);

                    // Set embedded timestamp to on
                    EmbeddedImageInfo embeddedInfo = m_camera.GetEmbeddedImageInfo();
                    embeddedInfo.timestamp.onOff = true;
                    m_camera.SetEmbeddedImageInfo(embeddedInfo);

                    m_camera.StartCapture();

                    m_grabImages = true;

                    StartGrabLoop();
                }
                catch (FC2Exception ex)
                {
                    Debug.WriteLine("Failed to load form successfully: " + ex.Message);
                    Close();
                }

                toolStripButtonStart.Enabled = false;
                toolStripButtonStop.Enabled  = true;
            }
            else
            {
                Close();
            }

            Show();
        }
Example #34
0
        static void PrintCameraInfo(CameraInfo camInfo)
        {
            StringBuilder newStr = new StringBuilder();
            newStr.Append("\n*** CAMERA INFORMATION ***\n");
            newStr.AppendFormat("Serial number - {0}\n", camInfo.serialNumber);
            newStr.AppendFormat("Camera model - {0}\n", camInfo.modelName);
            newStr.AppendFormat("Camera vendor - {0}\n", camInfo.vendorName);
            newStr.AppendFormat("Sensor - {0}\n", camInfo.sensorInfo);
            newStr.AppendFormat("Resolution - {0}\n", camInfo.sensorResolution);
            newStr.AppendFormat("Firmware version - {0}\n", camInfo.firmwareVersion);
            newStr.AppendFormat("Firmware build time - {0}\n", camInfo.firmwareBuildTime);
            newStr.AppendFormat("GigE version - {0}.{1}\n", camInfo.gigEMajorVersion, camInfo.gigEMinorVersion);
            newStr.AppendFormat("User defined name - {0}\n", camInfo.userDefinedName);
            newStr.AppendFormat("XML URL 1 - {0}\n", camInfo.xmlURL1);
            newStr.AppendFormat("XML URL 2 - {0}\n", camInfo.xmlURL2);
            newStr.AppendFormat("MAC address - {0}\n", camInfo.macAddress.ToString());
            newStr.AppendFormat("IP address - {0}\n", camInfo.ipAddress.ToString());
            newStr.AppendFormat("Subnet mask - {0}\n", camInfo.subnetMask.ToString());
            newStr.AppendFormat("Default gateway - {0}\n", camInfo.defaultGateway.ToString());

            Console.WriteLine(newStr);
        }
Example #35
0
 internal static extern int rs2_supports_sensor_info(SensorHandle sensor, CameraInfo info, out RsError error);
Example #36
0
        private void PopulateCameraList()
        {
            uint numCameras = 0;

            CameraInfo[] discoveredCameras = new CameraInfo[0];

            try
            {
                numCameras        = m_busMgr.GetNumOfCameras();
                discoveredCameras = ManagedBusManager.DiscoverGigECameras();
            }
            catch (FC2Exception ex)
            {
                BasePage.ShowErrorMessageDialog("Error getting number of cameras.", ex);
            }

            if (numCameras == 0 && discoveredCameras.Length == 0)
            {
                m_cameraListLabel.Text = string.Format("Camera List (No cameras detected)");
                m_cameraDataGridView.Rows.Clear();
                m_cameraInfoPanel.ClearInformation();
                HideGigEInformation();
                AdjustWindowMinimumSize();
                this.Height = this.MinimumSize.Height;
                m_needShrinkWindowHeight = false;
                return;
            }

            SortedDictionary <uint, CameraInfo> discoveredCameraInfo = new SortedDictionary <uint, CameraInfo>();

            m_badCameraInfo  = new Dictionary <string, CameraInfo>();
            m_goodCameraInfo = new Dictionary <ManagedPGRGuid, CameraInfo>();

            for (uint currCamIdx = 0; currCamIdx < discoveredCameras.Length; currCamIdx++)
            {
                try
                {
                    Debug.WriteLine(
                        String.Format(
                            "Discovered camera: {0} ({1})",
                            discoveredCameras[currCamIdx].modelName,
                            discoveredCameras[currCamIdx].serialNumber));

                    // Check if the camera already exists - we sometimes get duplicate cameras
                    // returned from the discover call
                    if (!discoveredCameraInfo.ContainsKey(discoveredCameras[currCamIdx].serialNumber))
                    {
                        discoveredCameraInfo.Add(
                            discoveredCameras[currCamIdx].serialNumber,
                            discoveredCameras[currCamIdx]);
                    }
                }
                catch (ArgumentNullException ex)
                {
                    Debug.WriteLine("A null key was specified for discovered camera lookup.");
                    Debug.WriteLine(ex.Message);
                    Debug.WriteLine(ex.StackTrace);
                    continue;
                }
                catch (ArgumentException ex)
                {
                    Debug.WriteLine("An element with the same key already exists in the discovered camera dictionary.");
                    Debug.WriteLine(ex.Message);
                    Debug.WriteLine(ex.StackTrace);
                    continue;
                }
                catch (System.Exception ex)
                {
                    Debug.WriteLine("An error occurred while updating the discovered GigE camera list.");
                    Debug.WriteLine(ex.Message);
                    Debug.WriteLine(ex.StackTrace);
                    continue;
                }
            }

            List <DataGridViewRow> goodCameraList = new List <DataGridViewRow>();
            List <DataGridViewRow> badCameraList  = new List <DataGridViewRow>();

            for (uint i = 0; i < numCameras; i++)
            {
                try
                {
                    ManagedPGRGuid guid;
                    guid = m_busMgr.GetCameraFromIndex(i);

                    InterfaceType currInterface;
                    currInterface = m_busMgr.GetInterfaceTypeFromGuid(guid);

                    using (ManagedCamera camera = new ManagedCamera())
                    {
                        camera.Connect(guid);
                        CameraInfo camInfo;
                        camInfo = camera.GetCameraInfo();

                        if (discoveredCameraInfo.ContainsKey(camInfo.serialNumber) == true)
                        {
                            // Remove good camera from dictionary
                            discoveredCameraInfo.Remove(camInfo.serialNumber);
                            m_goodCameraInfo.Add(guid, camInfo);
                        }

                        // Append the camera to the list
                        try
                        {
                            DataGridViewRow           newCamera = new DataGridViewRow();
                            DataGridViewTextBoxCell[] cells     = new DataGridViewTextBoxCell[4];
                            for (int ci = 0; ci < cells.Length; ci++)
                            {
                                cells[ci] = new DataGridViewTextBoxCell();
                            }

                            cells[0].Value = camInfo.serialNumber.ToString();
                            cells[1].Value = camInfo.modelName;
                            cells[2].Value = InterfaceTranslator.GetInterfaceString(currInterface);
                            cells[3].Value = camInfo.ipAddress.Equals(new IPAddress(0)) ? "N/A" : camInfo.ipAddress.ToString();

                            newCamera.Cells.AddRange(cells);
                            goodCameraList.Add(newCamera);
                        }
                        catch (InvalidOperationException ex)
                        {
                            Debug.WriteLine("Error appending new row to camera list.");
                            Debug.WriteLine(ex.Message);
                            Debug.WriteLine(ex.StackTrace);
                            continue;
                        }
                        catch (ArgumentNullException ex)
                        {
                            Debug.WriteLine("The cell in camera list contains null value.");
                            Debug.WriteLine(ex.Message);
                            Debug.WriteLine(ex.StackTrace);
                            continue;
                        }
                    }
                }
                catch (FC2Exception ex)
                {
                    BasePage.ShowErrorMessageDialog("Error populating camera list.", ex);
                    continue;
                }
            }


            foreach (KeyValuePair <uint, CameraInfo> pair in discoveredCameraInfo)
            {
                try
                {
                    CameraInfo info = pair.Value;

                    m_badCameraInfo.Add(info.serialNumber.ToString(), info);

                    DataGridViewRow newCamera = new DataGridViewRow();

                    newCamera.DefaultCellStyle.BackColor = IP_PROBLEM;
                    DataGridViewTextBoxCell[] cells = new DataGridViewTextBoxCell[4];
                    for (int ci = 0; ci < cells.Length; ci++)
                    {
                        cells[ci] = new DataGridViewTextBoxCell();
                    }

                    cells[0].Value = info.serialNumber.ToString();
                    cells[1].Value = info.modelName;
                    cells[2].Value = "GigE";
                    cells[3].Value = info.ipAddress.Equals(new IPAddress(0)) ? "N/A" : info.ipAddress.ToString();

                    cells[0].ToolTipText = "This camera is discoverable but can not be controlled";

                    foreach (DataGridViewTextBoxCell cell in cells)
                    {
                        if (m_GigEEnumerationIsDisabled)
                        {
                            cell.ToolTipText = "This camera cannot be enumerated by FlyCapture2 because GigE camera enumeration \n" +
                                               "has been disabled)";
                        }
                        else
                        {
                            cell.ToolTipText = "Camera IP settings or local interface is mis-configured. Use \"Force IP\" to \n" +
                                               "correct it";
                        }
                    }

                    newCamera.Cells.AddRange(cells);
                    badCameraList.Add(newCamera);
                }
                catch (InvalidOperationException ex)
                {
                    Debug.WriteLine("Error appending new row to camera list.");
                    Debug.WriteLine(ex.Message);
                    Debug.WriteLine(ex.StackTrace);
                    continue;
                }
                catch (ArgumentNullException ex)
                {
                    Debug.WriteLine("The cell in camera list contains null value.");
                    Debug.WriteLine(ex.Message);
                    Debug.WriteLine(ex.StackTrace);
                    continue;
                }
            }

            m_cameraDataGridView.Rows.Clear();
            m_cameraListLabel.Text = string.Format("Camera List ({0} cameras detected)", (goodCameraList.Count + badCameraList.Count));
            for (int i = 0; i < goodCameraList.Count; i++)
            {
                try
                {
                    m_cameraDataGridView.Rows.Add(goodCameraList[i]);
                }
                catch (InvalidOperationException ex)
                {
                    Debug.WriteLine("Error adding camera list to the view.");
                    Debug.WriteLine(ex.Message);
                    Debug.WriteLine(ex.StackTrace);
                }
                catch (ArgumentNullException ex)
                {
                    Debug.WriteLine("The camera list contains null value.");
                    Debug.WriteLine(ex.Message);
                    Debug.WriteLine(ex.StackTrace);
                }
                catch (ArgumentException ex)
                {
                    Debug.WriteLine("The camera list contains invalid value.");
                    Debug.WriteLine(ex.Message);
                    Debug.WriteLine(ex.StackTrace);
                }
            }

            for (int i = 0; i < badCameraList.Count; i++)
            {
                try
                {
                    m_cameraDataGridView.Rows.Add(badCameraList[i]);
                }
                catch (InvalidOperationException ex)
                {
                    Debug.WriteLine("Error adding camera list to the view.");
                    Debug.WriteLine(ex.Message);
                    Debug.WriteLine(ex.StackTrace);
                }
                catch (ArgumentNullException ex)
                {
                    Debug.WriteLine("The camera list contains null value.");
                    Debug.WriteLine(ex.Message);
                    Debug.WriteLine(ex.StackTrace);
                }
                catch (ArgumentException ex)
                {
                    Debug.WriteLine("The camera list contains invalid value.");
                    Debug.WriteLine(ex.Message);
                    Debug.WriteLine(ex.StackTrace);
                }
            }

            if (m_cameraDataGridView.Rows.Count > 0)
            {
                // display first camera information
                DisplayCameraInformationFromRowIndex(0);
            }
            else
            {
                // Nothing need to display
                m_cameraInfoPanel.ClearInformation();
            }
        }
Example #37
0
		protected override void SetOutput(CameraInfo info, int i)
		{
			FDriver[i] = info.driverName;
			FType[i] = info.driverType.ToString();
			FBus[i] = (int)info.busNumber;
			FBusSpeed[i] = info.maximumBusSpeed.ToString();
			FPCIeSpeed[i] = info.pcieBusSpeed.ToString();
			FFirmware[i] = info.firmwareVersion;
			FUserName[i] = info.userDefinedName;
			FNodeNumber[i] = info.nodeNumber;
			/*
			info.firmwareBuildTime;
			info.iidcVersion;
			*/
		}
Example #38
0
 public VIdeoPlay_KD(VideoInfo vInfo, CameraInfo cInfo)
 {
     CurrentVideoInfo  = vInfo;
     CurrentCameraInfo = cInfo;
 }
Example #39
0
        private static void RenderLinesExtracted(ref NetSegment instance, CameraInfo cameraInfo, ushort segmentID, int layerMask, NetInfo info, ref Instance data, NetManager netManager, int propIndex2, uint num2, NetNode.Flags flags3, NetNode.Flags flags4, Color color3, Color color4, float startAngle2, float endAngle2, bool invert2, Vector4 objectIndex, Vector4 objectIndex2)
        {
            IEnumerable <CodeInstruction> Transpiler(IEnumerable <CodeInstruction> instructions)
            {
                var newInstructions = new List <CodeInstruction>();

                var argIndex = 5;

                newInstructions.Add(new CodeInstruction(OpCodes.Ldarg_S, argIndex += 1));
                newInstructions.Add(new CodeInstruction(OpCodes.Stloc_S, NetManagerVarIndex));

                newInstructions.Add(new CodeInstruction(OpCodes.Ldarg_S, argIndex += 1));
                newInstructions.Add(new CodeInstruction(OpCodes.Stloc_S, PropIndex2VarIndex));

                newInstructions.Add(new CodeInstruction(OpCodes.Ldarg_S, argIndex += 1));
                newInstructions.Add(new CodeInstruction(OpCodes.Stloc_S, Num2VarIndex));

                newInstructions.Add(new CodeInstruction(OpCodes.Ldarg_S, argIndex += 1));
                newInstructions.Add(new CodeInstruction(OpCodes.Stloc_S, Flags3VarIndex));

                newInstructions.Add(new CodeInstruction(OpCodes.Ldarg_S, argIndex += 1));
                newInstructions.Add(new CodeInstruction(OpCodes.Stloc_S, Flags4VarIndex));

                newInstructions.Add(new CodeInstruction(OpCodes.Ldarg_S, argIndex += 1));
                newInstructions.Add(new CodeInstruction(OpCodes.Stloc_S, Color3VarIndex));

                newInstructions.Add(new CodeInstruction(OpCodes.Ldarg_S, argIndex += 1));
                newInstructions.Add(new CodeInstruction(OpCodes.Stloc_S, Color4VarIndex));

                newInstructions.Add(new CodeInstruction(OpCodes.Ldarg_S, argIndex += 1));
                newInstructions.Add(new CodeInstruction(OpCodes.Stloc_S, StartAngle2VarIndex));

                newInstructions.Add(new CodeInstruction(OpCodes.Ldarg_S, argIndex += 1));
                newInstructions.Add(new CodeInstruction(OpCodes.Stloc_S, EndAngle2VarIndex));

                newInstructions.Add(new CodeInstruction(OpCodes.Ldarg_S, argIndex += 1));
                newInstructions.Add(new CodeInstruction(OpCodes.Stloc_S, Invert2VarIndex));

                newInstructions.Add(new CodeInstruction(OpCodes.Ldarg_S, argIndex += 1));
                newInstructions.Add(new CodeInstruction(OpCodes.Stloc_S, ObjectIndexVarIndex));

                newInstructions.Add(new CodeInstruction(OpCodes.Ldarg_S, argIndex += 1));
                newInstructions.Add(new CodeInstruction(OpCodes.Stloc_S, ObjectIndex2VarIndex));

                newInstructions.AddRange(CollapsedIfBlockFind);

                var enumerator     = instructions.GetEnumerator();
                var findEnumerator = CollapsedIfBlockFind.GetEnumerator();

                Patcher.FindIfBegin(enumerator, findEnumerator, out _);

                newInstructions.Add(enumerator.Current);

                for (var prev = (CodeInstruction)null; enumerator.MoveNext(); prev = enumerator.Current)
                {
                    if (prev != null)
                    {
                        newInstructions.Add(prev);
                    }
                }
#if Debug
                //newInstructions.Clear();
                //Patcher.AddStopWatch(newInstructions);
#endif
                newInstructions.Add(enumerator.Current);

#if Debug && Trace
                Logger.AddDebugInstructions(newInstructions, nameof(NetSegmentPatch), nameof(RenderLinesExtracted));
#endif
#if Debug && IL
                Logger.Debug(nameof(NetSegmentPatch), nameof(RenderLinesExtracted), newInstructions);
#endif
                return(newInstructions);
            }

            _ = Transpiler(null);
        }
Example #40
0
 internal static extern IntPtr rs2_get_sensor_info(SensorHandle sensor, CameraInfo info, out RsError error);
Example #41
0
        private void InitializeBusSpeedControlPanel(CameraInfo cameraInfo)
        {
            m_AsyncSpeedCombo.Items.Clear();
            m_IsochSpeedCombo.Items.Clear();
            m_IsochHelperLabel.Visible = false; // Hide "Camera is streaming label by default

            if (cameraInfo.interfaceType == InterfaceType.Ieee1394)
            {
                if (cameraInfo.maximumBusSpeed == BusSpeed.S1600)
                {
                    m_AsyncSpeedCombo.Items.Add("S1600");
                    m_IsochSpeedCombo.Items.Add("S1600");

                    m_AsyncSpeedCombo.Items.Add("S800");
                    m_IsochSpeedCombo.Items.Add("S800");

                    m_AsyncSpeedCombo.Items.Add("S400");
                    m_IsochSpeedCombo.Items.Add("S400");

                    m_AsyncSpeedCombo.Items.Add("S200");
                    m_IsochSpeedCombo.Items.Add("S200");

                    m_AsyncSpeedCombo.Items.Add("S100");
                    m_IsochSpeedCombo.Items.Add("S100");
                }

                if (cameraInfo.maximumBusSpeed == BusSpeed.S800)
                {
                    m_AsyncSpeedCombo.Items.Add("S800");
                    m_IsochSpeedCombo.Items.Add("S800");

                    m_AsyncSpeedCombo.Items.Add("S400");
                    m_IsochSpeedCombo.Items.Add("S400");

                    m_AsyncSpeedCombo.Items.Add("S200");
                    m_IsochSpeedCombo.Items.Add("S200");

                    m_AsyncSpeedCombo.Items.Add("S100");
                    m_IsochSpeedCombo.Items.Add("S100");
                }

                if (cameraInfo.maximumBusSpeed == BusSpeed.S400)
                {
                    m_AsyncSpeedCombo.Items.Add("S400");
                    m_IsochSpeedCombo.Items.Add("S400");

                    m_AsyncSpeedCombo.Items.Add("S200");
                    m_IsochSpeedCombo.Items.Add("S200");

                    m_AsyncSpeedCombo.Items.Add("S100");
                    m_IsochSpeedCombo.Items.Add("S100");
                }

                if (cameraInfo.maximumBusSpeed == BusSpeed.S200)
                {
                    m_AsyncSpeedCombo.Items.Add("S200");
                    m_IsochSpeedCombo.Items.Add("S200");

                    m_AsyncSpeedCombo.Items.Add("S100");
                    m_IsochSpeedCombo.Items.Add("S100");
                }

                if (cameraInfo.maximumBusSpeed == BusSpeed.S100)
                {
                    m_AsyncSpeedCombo.Items.Add("S100");
                    m_IsochSpeedCombo.Items.Add("S100");
                }

                if (m_camera != null)
                {
                    if (m_camera.ReadRegister(0x614) != 0) //Camera is streaming
                    {
                        m_IsochSpeedCombo.Enabled  = false;
                        m_IsochHelperLabel.Visible = true;
                    }
                }
            }
            if (cameraInfo.interfaceType == InterfaceType.Usb2)
            {
                m_AsyncSpeedCombo.Items.Add("S480");
                m_IsochSpeedCombo.Items.Add("S480");
            }
            if (cameraInfo.interfaceType == InterfaceType.Usb3)
            {
                m_AsyncSpeedCombo.Items.Add("S3200");
                m_IsochSpeedCombo.Items.Add("S3200");
            }
            if (cameraInfo.interfaceType == InterfaceType.GigE)
            {
                m_AsyncSpeedCombo.Items.Add("GigE_1000Base_T");
                m_IsochSpeedCombo.Items.Add("GigE_1000Base_T");
                m_speedControlPanel.Hide(); // Hide all speed controls for GigE
            }

            // Set default selection to Max
            if (m_AsyncSpeedCombo.Items.Count > 0 && m_IsochSpeedCombo.Items.Count > 0)
            {
                m_AsyncSpeedCombo.SelectedIndex = 0;
                m_IsochSpeedCombo.SelectedIndex = 0;
            }
            else
            {
                // Hide panel in case any of the comboboxes were empty
                m_speedControlPanel.Hide();
            }

            // Hide all speed controls for ZEBRA1 camera
            if (cameraInfo.interfaceType == InterfaceType.Ieee1394 && cameraInfo.modelName.ToLower().Contains("zebra"))
            {
                m_speedControlPanel.Hide();
            }
        }
Example #42
0
        private async Task <PhotographerInfo> GetPhotographerAsync(int guideId)
        {
            var tokenResponse = await tokenClient.RequestRefreshTokenAsync(ConfigurationManager.AppSettings["refreshToken"]);

            var httpClient = new HttpClient
            {
                BaseAddress = new Uri(ConfigurationManager.AppSettings["userManagementBaseUri"])
            };

            httpClient.SetBearerToken(tokenResponse.AccessToken);

            var httpResponse = await httpClient.GetAsync($"api/Photographer/{guideId}");

            var content = httpResponse.Content;

            var photographerJson = await content.ReadAsStringAsync();

            var photographer = JsonConvert.DeserializeObject <Photographer>(photographerJson);

            var camera = new CameraInfo
            {
                Id             = photographer.Camera.Id,
                IsProfessional = photographer.Camera.IsProfessional,
                Model          = photographer.Camera.Model
            };

            var photographerInfo = new PhotographerInfo
            {
                Id                    = photographer.Id,
                FirstName             = photographer.FirstName,
                LastName              = photographer.LastName,
                DateOfBirth           = photographer.DateOfBirth,
                KnowledgeOfLanguages  = photographer.KnowledgeOfLanguages,
                Email                 = photographer.Email,
                Gender                = photographer.Gender,
                NumberOfAppraisers    = photographer.NumberOfAppraisers,
                PhoneNumber           = photographer.PhoneNumber,
                Rating                = photographer.Rating,
                WorkExperience        = photographer.WorkExperience,
                Profession            = photographer.Profession,
                HasCameraStabilizator = photographer.HasCameraStabilizator,
                HasDron               = photographer.HasDron,
                HasGopro              = photographer.HasGopro,
                Camera                = camera
            };

            if (photographerInfo.Image != null)
            {
                photographerInfo.Image = ImageConverter.ConvertImageToImageSource(photographer.Image);
            }
            else
            {
                BitmapImage img = new BitmapImage();
                img.BeginInit();
                if (photographerInfo.Gender == "Female")
                {
                    img.UriSource = new Uri(@"pack://*****:*****@"pack://application:,,,/Kanch;component/Images/male.jpg");
                }
                img.EndInit();
                photographerInfo.Image = img;
            }

            return(photographerInfo);
        }
        public void RunSingleCamera(ManagedPGRGuid guid)
        {
            const int k_numImages = 10;

            ManagedCamera cam = new ManagedCamera();

            // Connect to a camera
            cam.Connect(guid);

            // Get the camera information
            CameraInfo camInfo = cam.GetCameraInfo();

            PrintCameraInfo(camInfo);

            // Get embedded image info from camera
            EmbeddedImageInfo embeddedInfo = cam.GetEmbeddedImageInfo();

            // Enable timestamp collection
            if (embeddedInfo.timestamp.available == true)
            {
                embeddedInfo.timestamp.onOff = true;
            }

            // Set embedded image info to camera
            cam.SetEmbeddedImageInfo(embeddedInfo);

            // Start capturing images
            cam.StartCapture();

            // Create a raw image
            ManagedImage rawImage = new ManagedImage();

            // Create a converted image
            ManagedImage convertedImage = new ManagedImage();

            for (int imageCnt = 0; imageCnt < k_numImages; imageCnt++)
            {
                // Retrieve an image
                cam.RetrieveBuffer(rawImage);

                // Get the timestamp
                TimeStamp timeStamp = rawImage.timeStamp;

                Console.WriteLine(
                    "Grabbed image {0} - {1} {2} {3}",
                    imageCnt,
                    timeStamp.cycleSeconds,
                    timeStamp.cycleCount,
                    timeStamp.cycleOffset);

                // Convert the raw image
                rawImage.Convert(PixelFormat.PixelFormatBgr, convertedImage);

                // Create a unique filename
                string filename = String.Format(
                    "FlyCapture2Test_CSharp-{0}-{1}.bmp",
                    camInfo.serialNumber,
                    imageCnt);

                // Get the Bitmap object. Bitmaps are only valid if the
                // pixel format of the ManagedImage is RGB or RGBU.
                System.Drawing.Bitmap bitmap = convertedImage.bitmap;

                // Save the image
                bitmap.Save(filename);
            }

            // Stop capturing images
            cam.StopCapture();

            // Disconnect the camera
            cam.Disconnect();
        }
Example #44
0
 /// <summary>
 /// 获取设备录像文件文件名(设备上文件名称)
 /// </summary>
 /// <param name="cInfo"></param>
 /// <returns></returns>
 public static string GetFileMapName(CameraInfo cInfo)
 {
     return("FILE_MAP_" + Convert.ToString(cInfo.Channel).PadLeft(2, '0'));
 }
Example #45
0
    protected override void _Load()
    {
        for (int i = 0; i < m_csv.GetRows(); i++)
        {
            MapData ani = new MapData();
            ani.id   = m_csv.GetIntData(i, (int)eMapCsv_Enum.eID);
            ani.name = m_csv.GetData(i, (int)eMapCsv_Enum.eName);
            ani.size = m_csv.GetData(i, (int)eMapCsv_Enum.eSize);
            ani.MakeSize();
            ani.type          = m_csv.GetIntData(i, (int)eMapCsv_Enum.eType);
            ani.sceneCfgResID = m_csv.GetIntData(i, (int)eMapCsv_Enum.eSceneCfgResID);

            ani.terrainResID = m_csv.GetIntData(i, (int)eMapCsv_Enum.eTerrainResID);
            ani.maxClimb     = m_csv.GetFloatData(i, (int)eMapCsv_Enum.eMaxClimb);
            ani.maxSlope     = m_csv.GetFloatData(i, (int)eMapCsv_Enum.eMaxSlope);

            ani.skyBoxID        = m_csv.GetIntData(i, (int)eMapCsv_Enum.eSkyBoxID);
            ani.bgMusic         = m_csv.GetIntData(i, (int)eMapCsv_Enum.eBgMusic);
            ani.bgFightMusic    = m_csv.GetIntData(i, (int)eMapCsv_Enum.eBgFightMusic);
            ani.necessaryResIDs = m_csv.GetData(i, (int)eMapCsv_Enum.eNecessaryResIDs);
            ani.iconName        = m_csv.GetData(i, (int)eMapCsv_Enum.eIconName);
            string birthPos    = m_csv.GetData(i, (int)eMapCsv_Enum.eBirthPos);
            string birthDir    = m_csv.GetData(i, (int)eMapCsv_Enum.eBirthDir);
            string fixbirthDir = m_csv.GetData(i, (int)eMapCsv_Enum.eFixedBirthDir);

            if (!string.IsNullOrEmpty(birthPos))
            {
                ani.vBirthPos = GetVector3(birthPos);
            }
            if (!string.IsNullOrEmpty(birthDir))
            {
                ani.vBirthDir = GetVector3(birthDir);
            }
            if (!string.IsNullOrEmpty(fixbirthDir))
            {
                ani.vFixBirthDir = GetVector3(fixbirthDir);
            }


            ani.cameraInfo = m_csv.GetData(i, (int)eMapCsv_Enum.eCamInfo);
            if (!string.IsNullOrEmpty(ani.cameraInfo))
            {
                string[] camInfoList = ani.cameraInfo.Split('#');       // 对应地图表的摄像机配置
                for (int cam = 0; cam < camInfoList.Length; cam++)
                {
                    string[]   camItem = camInfoList[cam].Split('|');
                    Vector3    pos     = GetVector3(camItem[0]);
                    Vector3    rota    = GetVector3(camItem[1]);
                    float      fov     = float.Parse(camItem[2]);
                    CameraInfo cInfo   = new CameraInfo();
                    cInfo.pos  = pos;
                    cInfo.rota = rota;
                    cInfo.fov  = fov;
                    ani.cameraInfoList.Add(cInfo);
                }
            }
            // 主灯光
            string mainLight = m_csv.GetData(i, (int)eMapCsv_Enum.eMainLight);
            if (!string.IsNullOrEmpty(mainLight))
            {
                string[] mainLightList = mainLight.Split('|');
                ani.mainLightDir       = GetVector3(mainLightList[0]);
                ani.mainLightColor     = GetColor(mainLightList[1]);
                ani.mainLightIntensity = float.Parse(mainLightList[2]);
            }
            // 环境光
            string envLight = m_csv.GetData(i, (int)eMapCsv_Enum.eEnvLight);
            if (!string.IsNullOrEmpty(envLight))
            {
                string[] envLightList = envLight.Split('|');
                ani.envLightColor     = GetColor(envLightList[0]);
                ani.envLightIntensity = float.Parse(envLightList[1]);
            }
            // 雾
            string fog = m_csv.GetData(i, (int)eMapCsv_Enum.eFog);
            if (!string.IsNullOrEmpty(fog))
            {
                string[] fogList = fog.Split('|');
                ani.fogColor = GetColor(fogList[0]);
                ani.fogType  = int.Parse(fogList[1]);
                ani.fogVal1  = float.Parse(fogList[2]);
                ani.fogVal2  = float.Parse(fogList[3]);
            }
            // bloom
            string bloom = m_csv.GetData(i, (int)eMapCsv_Enum.eBloom);
            if (!string.IsNullOrEmpty(bloom))
            {
                string[] bloomList = bloom.Split('|');
                ani.m_bloomIntensity     = float.Parse(bloomList[0]);
                ani.m_bloomColorMix      = GetColor(bloomList[1]) / 255.0f;
                ani.m_bloomColorMixBlend = float.Parse(bloomList[2]);
            }

            // cc_vintage
            string ccVin = m_csv.GetData(i, (int)eMapCsv_Enum.eCCVintage);
            if (!string.IsNullOrEmpty(ccVin))
            {
                string[] ccVinList = ccVin.Split('|');
                ani.CCVintageFilter = int.Parse(ccVinList[0]);
                ani.CCVintageAmount = float.Parse(ccVinList[1]);
            }

            ani.customParam = m_csv.GetData(i, (int)eMapCsv_Enum.eCustomParam);
            if (!string.IsNullOrEmpty(ani.customParam))
            {
                ani.m_listCustomParam = ani.customParam.Split('|');
            }

            ani.fightParam = m_csv.GetData(i, (int)eMapCsv_Enum.eFightParam);
            if (!string.IsNullOrEmpty(ani.fightParam))
            {
                string[] fParma = ani.fightParam.Split('|');
                ani.fightSceneId   = int.Parse(fParma[0]);
                ani.fightPosOffset = GetVector3(fParma[1]);
            }
            ani.sceneAnimaId = m_csv.GetIntData(i, (int)eMapCsv_Enum.eSceneAnimaId);
            ani.isPK         = m_csv.GetIntData(i, (int)eMapCsv_Enum.eIsPK) == 1;
            m_mapDataDic.Add(ani.id, ani);
        }
    }
 public void SetPlayInfo(CameraInfo cInfo)
 {
     SetPlayInfo(CurrentV, cInfo.VideoInfo, cInfo, CurrentVideoPlaySet);
 }
Example #47
0
        protected void CleanupCameraObjects(CameraInfo camInfo)
        {
            //Debug.Log("Deleting objects for camera " + camInfo.srcCamera.GetInstanceID());

            if (camInfo.portalCamera && camInfo.portalCamera.gameObject) DestroyImmediate(camInfo.portalCamera.gameObject);
        }
Example #48
0
        // Remove camera from the collection and signal to stop it
        public void Remove(CameraInfo camera)
        {
            DeviceDriver device;
            if (RunningDriverList.ContainsKey(camera.CameraId))
            {
                device = RunningDriverList[camera.CameraId];
                device.SignalToStop();
                RunningDriverList.Remove(camera.CameraId);

            }
        }
Example #49
0
 public void SetCamera(GameObject cam)
 {
     Cam = new CameraInfo (cam);
 }
Example #50
0
        /**
         * This will render the slave camera to a texture as it would be seen by the given camera.
         * Render results are placed in {result}. Pass this to AppearAsPreviouslyRendered to apply the rendered
         * texture to our current model/appearance.
         */
        internal void RenderSlaveCamera(Camera cam, RenderedFrame result)
        {
            //skip if not on or ready
            if (!enabled || !GetComponent<Renderer>() || !GetComponent<Renderer>().sharedMaterial || !GetComponent<Renderer>().enabled || !destination || !cam)
            return;

            CameraInfo trackingInfo = null;
            if (!cameraInfo.TryGetValue(cam, out trackingInfo)) {
            //Not actually being tracked? Just use a temp. Generally shouldn't happen.
            trackingInfo = new CameraInfo();
            }

            Vector3 entryFaceDirection = transform.rotation * entryFace;

            var isBehind = Vector3.Dot(transform.position - cam.transform.position, entryFaceDirection) >= 0;
            var isReallyNear = PortalMath.DistanceFromPointToPlane(transform.forward, transform.position, cam.transform.position) <= cam.nearClipPlane;

            //Depending on where we've been and are, we might or might not want to render. Keep reading.
            if (isReallyNear) {
            if (isBehind) {
                //we are just behind the portal
                if (trackingInfo.nearFront) {
                    //We were in front of it earlier and can we can still see the portal (the portal mesh has more geometry behind the front plane),
                    //Render.
                    //(If we're going to teleport something, we'll usually do it just after this frame.)
                } else {
                    //We weren't just in front of the portal. Perhaps we teleported in from somewhere or walked up to a portal that's
                    //invisible from behind while looking backwards.
                    //Don't render.
                    return;
                }
            } else {
                //We are in front of the portal (and rather close to boot).
                //Render.

                //Also set the "I was close to the front of the portal" flag so we know to keep rendering if we move behind it.
                trackingInfo.nearFront = true;
            }
            } else {
            //We are not close.

            //Reset this flag.
            trackingInfo.nearFront = false;

            if (isBehind) {
                //Don't render
                return;
            } else {
                //Render.
            }
            }

            //Note that, if we don't (try to) render the portal for a frame or so the tracking information will be deleted.
            //This covers the corner case where you are behind the portal with it rendering, then teleport away and back.
            //Without clearing the nearFront flag while we're away, we would incorrectly show the inside of the portal if we jumped back.

            //If we have a two-sided portal on the other end as our destination, the opposite face on the destination portal can sometimes
            //block the view as we look through.
            //Therefore, if we are the destination of the currently rendering portal, don't render at all.
            if (lastRecursivePortal && lastRecursivePortal.destination == this.transform) {
            return;
            }

            //Stop rendering if we are too recursively deep.
            if (currentPortalDepth + 1 > renderOptions.maximumPortalDepth) {
            result.renderOpaque = true;
            return;
            }

            #if UNITY_EDITOR
            if (!Application.isPlaying && currentPortalDepth + 1 > 1) {
            //don't render more than one deep in the editor (todo: make this configurable)
            result.renderOpaque = true;
            return;
            }
            #endif

            currentPortalDepth++;

            var lastLastRecursiveCamera = lastRecursiveCamera;
            var lastLastRecursivePortal = lastRecursivePortal;
            lastRecursiveCamera = cam;
            lastRecursivePortal = this;

            try {

            var camInfo = CreateCameraObjects(cam);

            var portalCamera = camInfo.portalCamera;

            UpdateCameraModes(cam, portalCamera);

            //Move the render target camera to where we'll be rendering from.
            TeleportRelativeToDestination(cam.transform, portalCamera.transform);

            //get the portal's plane
            var pos = destination.transform.position;
            var normal = destination.transform.rotation * exitFace;

            if (renderOptions.useOblique) {
                /*
                Normally, when you do a projection, the near and far (clipping) planes are perpendicular to the camera.

                They don't have to be, however, and here we take advantage of this fact to cull unwanted geometry.

                Here we set up an oblique projection matrix so that near clipping plane coincides with our portal plane.
                (Then shim it a bit, to avoid z-fighting.)
                This way the z-buffer will automatically clip out everything between the camera and portal.
                You'll only see things beyond the destination portal.
                 */
                Vector4 clipPlane = PortalMath.CameraSpacePlane(portalCamera, pos, normal, 1.0f, renderOptions.clipPlaneOffset);

                Matrix4x4 projection;
                if (currentPortalDepth > 1) {
                    //If we have a regular projection matrix we can just go ahead and turn it into an oblique matrix.
                    //But if we started with an oblique matrix (as happens when a portal renders a portal), re-obliquifying it
                    //messes up the far clipping plane.
                    //Instead, start with a fresh matrix for the camera and tweak that.

                    //Note that we don't want to modify the src camera's matrix, just get a copy of what its normal matrix would be.
                    //(Too bad Unity doesn't have an API to just fetch it.)
                    //Also note: If we do this to a scene camera inside the Unity Editor (even though we put it back) the scene cameras might FREAK OUT.
                    //(That's not a concern, however, because we only do this to slave cameras we generated.)
                    var origMatrix = cam.projectionMatrix;//backup
                    cam.ResetProjectionMatrix();
                    projection = cam.projectionMatrix;//get what we need
                    cam.projectionMatrix = origMatrix;//leave the original camera unmodified
                } else {
                    projection = cam.projectionMatrix;
                }

                //how far is the camera on this side from the portal entrance?
                var cameraDistanceFromPortal = PortalMath.DistanceFromPointToPlane(transform.forward, transform.position, cam.transform.position);
                if (cameraDistanceFromPortal < cam.nearClipPlane * 3) {
                    //When the camera's this close, the math we're using to construct the oblique matrix tends to break down and construct a matrix
                    //with a far plane that intersects the original frustum.

                    //If we're this close, we'll rely on the empty space that should be behind the portal actually being empty and just use a
                    //regular near plan on our frustum.
                } else {
                    if (portalCamera.orthographic)
                        PortalMath.CalculateOrthographicObliqueMatrix(ref projection, clipPlane);
                    else
                        PortalMath.CalculatePerspectiveObliqueMatrix(ref projection, clipPlane);
                }

                //we don't use the normal near clip plane, but still need to tell culling algorithms about where we're looking
                //Never mind, occlusion culling is broken in Unity. //portalCamera.nearClipPlane = PortalMath.DistanceFromPointToPlane(destination.forward, destination.position, portalCamera.transform.position);//Vector3.Distance(portalCamera.transform.position, destination.transform.position);

                portalCamera.projectionMatrix = projection;
            }

            var renderTexture = result.CreateTexture(renderOptions, cam);

            portalCamera.cullingMask = renderOptions.renderLayers.value;
            portalCamera.targetTexture = renderTexture;
            portalCamera.Render();
            // Debug.Log("portal texture (after render) is " + this+ "-"+camInfo.portalTexture.GetInstanceID());

            camInfo.renderedLastFrame = true;
            } finally {
            currentPortalDepth--;
            lastRecursiveCamera = lastLastRecursiveCamera;
            lastRecursivePortal = lastLastRecursivePortal;
            }
        }
 public VideoPlay_TDWY(CameraInfo cInfo)
 {
     CurrentCameraInfo = cInfo;
 }
Example #52
0
        /** Creates the objects we need, as needed. */
        protected CameraInfo CreateCameraObjects(Camera currentCamera)
        {
            //get the state for this particular camera
            CameraInfo camInfo;
            if (!cameraInfo.TryGetValue(currentCamera, out camInfo)) {
            //Debug.Log("Creating objects for camera " + currentCamera.GetInstanceID());
            camInfo = new CameraInfo();
            camInfo.srcCamera = currentCamera;
            cameraInfo[currentCamera] = camInfo;
            }

            //Camera for seeing through the portal
            if (!camInfo.portalCamera) {
            GameObject cameraObject = new GameObject("Portal Camera id" + GetInstanceID() + " for " + currentCamera.GetInstanceID(), typeof(Camera), typeof(Skybox));
            cameraObject.hideFlags = Portal.hideObjects ? HideFlags.HideAndDontSave : HideFlags.DontSave;
            camInfo.portalCamera = cameraObject.GetComponent<Camera>();
            camInfo.portalCamera.enabled = false;
            camInfo.portalCamera.transform.position = transform.position;
            camInfo.portalCamera.transform.rotation = transform.rotation;
            camInfo.portalCamera.useOcclusionCulling = false;//occlusion culling in Unity is broken, much more visibly so with a nonstandard near plane
            #if UNITY_5
            camInfo.portalCamera.gameObject.AddComponent<FlareLayer>();
            #endif
            }

            if (!portalMaterial) {
            SetupMaterial();
            }

            return camInfo;
        }
        public ChangeSynGroup GetChangeSynGroupById(ref string errMessage, int synGroupId)
        {
            Database db = DatabaseFactory.CreateDatabase();
            errMessage = "";
            try
            {
                DataSet ds = ChangeSynGroupDataAccess.GetChangeSynGroupById(db, synGroupId);
                if (ds.Tables[0].Rows.Count == 0)
                {
                    return null;
                }
                CameraInfo oCamera;
                DeviceInfo oDevice;

                var synGroup = new ChangeSynGroup(ds.Tables[0].Rows[0]) {ListCamera = new Dictionary<int, CameraInfo>()};

                DataSet dsCamera = CameraDataAccess.GetCamInfoByDeviceId(db, synGroup.ChangeSynGroupId);
                synGroup.ListCamera = new Dictionary<int, CameraInfo>();
                foreach (DataRow drCam in dsCamera.Tables[0].Rows)
                {
                    oCamera = new CameraInfo(drCam);
                    synGroup.ListCamera.Add(oCamera.CameraId, oCamera);
                }
                return synGroup;

            }
            catch (Exception ex)
            {
                errMessage = ex.Message + ex.StackTrace;
                Logger.Error("Error Message:" + ex.Message + " Trace:" + ex.StackTrace);
                return null;
            }
        }