public DeviceInfo GetDeviceFromFileMenu(string fileName) { ImplDesc desc = new ImplDesc() { group = ImplGroup.IMPL_GROUP_SENSOR, subgroup = ImplSubgroup.IMPL_SUBGROUP_VIDEO_CAPTURE }; ImplDesc desc1; DeviceInfo dinfo; SenseManager pp = SenseManager.CreateInstance(); if (pp == null) { return(null); } try { desc1 = session.QueryImpl(desc, 0); if (desc1 == null) { throw null; } if (pp.CaptureManager == null) { throw null; } if (pp.CaptureManager.SetFileName(fileName, false) < Status.STATUS_NO_ERROR) { throw null; } if (pp.CaptureManager.LocateStreams() < Status.STATUS_NO_ERROR) { throw null; } if (pp.CaptureManager.Device != null) { pp.CaptureManager.Device.QueryDeviceInfo(out dinfo); } else { throw null; } } catch { pp.Dispose(); return(null); } pp.Close(); pp.Dispose(); StatusLabel.Text = "Ok"; return(dinfo); }
public void InitializeStream(int resolutionWidth, int resolutionHeight, float framesPerSecond) { // Creating a SDK session Session = PXCMSession.CreateInstance(); // Creating the SenseManager SenseManager = Session.CreateSenseManager(); if (SenseManager == null) { Status_pipeline = "Failed to create an SDK pipeline object"; //Console.WriteLine("Failed to create the SenseManager object."); return; } else { Status_pipeline = "Pipeline created"; } foreach (var stream in StreamType) { // Enabling the stream pxcmStatus enableStreamStatus = SenseManager.EnableStream(stream, resolutionWidth, resolutionHeight, framesPerSecond); if (enableStreamStatus != pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new InvalidRealSenseStatusException(enableStreamStatus, string.Format("Failed to enable the {0} stream. Return code: {1}", StreamType, enableStreamStatus)); } } }
private void InitCamera() { session = Session.CreateInstance(); System.Diagnostics.Debug.WriteLine("Version: " + session.Version.major); // Instantiate and initialize the SenseManager senseManager = session.CreateSenseManager(); reader = SampleReader.Activate(senseManager); reader.EnableStream(StreamType.STREAM_TYPE_COLOR, WIDTH, HEIGHT, FRAME_RATE, StreamOption.STREAM_OPTION_STRONG_STREAM_SYNC); reader.EnableStream(StreamType.STREAM_TYPE_DEPTH, WIDTH, HEIGHT, FRAME_RATE, StreamOption.STREAM_OPTION_STRONG_STREAM_SYNC); //Configure the Face Module faceModule = FaceModule.Activate(senseManager); FaceConfiguration faceConfig = faceModule.CreateActiveConfiguration(); faceConfig.Detection.isEnabled = detectionEnabled; faceConfig.Detection.maxTrackedFaces = maxTrackedFaces; faceConfig.Landmarks.isEnabled = landmarksEnabled; faceConfig.Landmarks.maxTrackedFaces = maxTrackedFaces; faceConfig.TrackingMode = Intel.RealSense.Face.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH; faceConfig.EnableAllAlerts(); faceConfig.ApplyChanges(); //init senseManager senseManager.Init(); projection = senseManager.CaptureManager.Device.CreateProjection(); System.Diagnostics.Debug.WriteLine("IsConnected: " + senseManager.IsConnected()); }
public void StopStream() { if (CaptureThread != null) { int delayMillis = 1000; try { IsRunning = false; CaptureThread.Join(delayMillis); if (CaptureThread.IsAlive) { Console.WriteLine("Tempo excedido; Thread ainda não terminou seu processamento"); } else { Console.WriteLine("thread terminou de uma forma não bloqueante!"); } } catch (ThreadInterruptedException e) { Console.WriteLine(e + "Thread foi interrompida por alguma excessão lançada"); } //CaptureThread.Join(); //CaptureThread = null; SenseManager.Dispose(); Session.Dispose(); } }
/// <summary> /// 音声入力デバイス選択待機 /// </summary> protected void Init() { // コマンドライン引数 DeviceName = CommandLineArgs.AudioInputDevice; if (DeviceName != null) { if (DeviceName == "" || Microphone.devices.Contains(DeviceName)) { StartRecording(); } else { Canvas.DisplayMessage($"Device named “{DeviceName}” do not exists."); DeviceName = null; } } else if (CommandLineArgs.HideTextDefault) { return; } if (DeviceName == null) { StartCoroutine("SelectMicrophone"); } // 平滑化初期化 // 参考:https://software.intel.com/sites/landingpage/realsense/camera-sdk/v2016r3/documentation/html/index.html?doc_utils_the_smoother_utility.html SenseManager = SenseManager.CreateInstance(); Smoother = Smoother.CreateInstance(SenseManager.Session); SmoothMouth = Smoother.Create1DWeighted(5); MonitorMicrophone(); }
/// <summary> /// 機能の初期化 /// </summary> private void Initialize() { try { //SenseManagerを生成 senseManager = SenseManager.CreateInstance(); SampleReader reader = SampleReader.Activate(senseManager); //カラーストリームを有効にする reader.EnableStream(StreamType.STREAM_TYPE_COLOR, COLOR_WIDTH, COLOR_HEIGHT, COLOR_FPS); //パイプラインを初期化する //(インスタンスはInit()が正常終了した後作成されるので,機能に対する各種設定はInit()呼び出し後となる) var sts = senseManager.Init(); if (sts < Status.STATUS_NO_ERROR) { throw new Exception("パイプラインの初期化に失敗しました"); } //デバイスを取得する device = senseManager.CaptureManager.Device; //ミラー表示にする device.MirrorMode = MirrorMode.MIRROR_MODE_HORIZONTAL; //座標変換オブジェクトを作成 projection = device.CreateProjection(); } catch (Exception ex) { MessageBox.Show(ex.Message); Close(); } }
protected void Init() { try { // RealSense初期化 // 参考:https://software.intel.com/sites/landingpage/realsense/camera-sdk/v2016r3/documentation/html/index.html?doc_face_general_procedure.html // 参考:.\Intel\RSSDK\sample\core\RawStreams.unity SenseManager = SenseManager.CreateInstance(); FaceModule = FaceModule.Activate(SenseManager); FaceModule.FrameProcessed += FaceModule_FrameProcessed; FaceData = FaceModule.CreateOutput(); FaceConfig = FaceModule.CreateActiveConfiguration(); FaceConfig.TrackingMode = TrackingModeType.FACE_MODE_COLOR; FaceConfig.Expressions.Properties.Enabled = true; FaceConfig.ApplyChanges(); SampleReader = SampleReader.Activate(SenseManager); SampleReader.EnableStream(StreamType.STREAM_TYPE_COLOR, 640, 480, 30); SampleReader.SampleArrived += SampleReader_SampleArrived; SenseManager.Init(); SenseManager.StreamFrames(false); // RawStreams Texture = NativeTexturePlugin.Activate(); Material.mainTexture = new Texture2D(640, 480, TextureFormat.BGRA32, false); Material.mainTextureScale = new Vector2(-1, -1); TexPtr = Material.mainTexture.GetNativeTexturePtr(); // 解像度取得 StreamProfileSet profile; SenseManager.CaptureManager.Device.QueryStreamProfileSet(out profile); Resolution = profile.color.imageInfo; // 平滑化初期化 // 参考:https://software.intel.com/sites/landingpage/realsense/camera-sdk/v2016r3/documentation/html/index.html?doc_utils_the_smoother_utility.html Smoother = Smoother.CreateInstance(SenseManager.Session); SmoothBody = Smoother.Create3DWeighted(BodyPosSmoothWeight); SmoothHead = Smoother.Create3DWeighted(HeadAngSmoothWeight); SmoothEyes = Smoother.Create2DWeighted(EyesPosSmoothWeight); SmoothEyesClose = Smoother.Create1DWeighted(EyesCloseSmoothWeight); SmoothBrowRai = Smoother.Create1DWeighted(FaceSmoothWeight); SmoothBrowLow = Smoother.Create1DWeighted(FaceSmoothWeight); SmoothSmile = Smoother.Create1DWeighted(FaceSmoothWeight); SmoothKiss = Smoother.Create1DWeighted(FaceSmoothWeight); SmoothMouth = Smoother.Create1DWeighted(FaceSmoothWeight); SmoothTongue = Smoother.Create1DWeighted(FaceSmoothWeight); } catch (Exception e) { ErrorLog.text = "RealSense Error\n"; ErrorLog.text += e.Message; } }
//public void ShowPerformanceTick() //{ // /* Optional: Show performance tick */ // if (image != null) // { // timer.Tick(RS.ImageExtension.PixelFormatToString(image.Info.format) + " " + image.Info.width + "x" + image.Info.height); // } //} public void InitSenseManager() { if (SenseManager.Init() == RS.Status.STATUS_NO_ERROR) { SetStatus("SenseManager Init Successfull"); } else { SetStatus("SenseManager Init Failed"); Stop = true; } }
/// <summary>Checks against the guards for the command.</summary> /// <param name="actionInput">The full input specified for executing the command.</param> /// <returns>A string with the error message for the user upon guard failure, else null.</returns> public override string Guards(ActionInput actionInput) { IController sender = actionInput.Controller; string commonFailure = this.VerifyCommonGuards(actionInput, ActionGuards); if (commonFailure != null) { return(commonFailure); } if (actionInput.Params.Length == 0) { return(null); } // TODO: CommonGuards.RequiresAtLeastOneArgument makes the length check redundant? string targetName = (actionInput.Params.Length > 0) ? actionInput.Params[0] : string.Empty; string targetFullName = actionInput.Tail.Trim().ToLower(); // Try to find the target either by all the parameter text or by just the first parameter. this.target = GetPlayerOrMobile(targetFullName) ?? GetPlayerOrMobile(targetName); // Rule: Is the target an entity? if (this.target == null) { return("You cannot see " + targetName + "."); } // Rule: Is the target the initator? if (sender.Thing.Name.ToLower() == this.target.Name.ToLower()) { return("You can't follow yourself."); } // Rule: Is the target in the same room? if (sender.Thing.Parent.Id != this.target.Parent.Id) { return(targetName + " does not appear to be in the vicinity."); } SenseManager senses = new SenseManager(); senses.AddSense(new Sense { SensoryType = SensoryType.Sight, Enabled = true }); if (!this.target.IsDetectableBySense(senses)) { return(targetName + " does not appear to be in the vicinity."); } return(null); }
// Use this for initialization void Start() { /* Create SenseManager Instance */ sm = SenseManager.CreateInstance(); sm.CaptureManager.Realtime = false; // Selecting a higher resolution profile StreamProfileSet profiles = new StreamProfileSet(); profiles.color.imageInfo.width = 1280; profiles.color.imageInfo.height = 720; RangeF32 f_rate = new RangeF32(30, 30); profiles.color.frameRate = f_rate; profiles.depth.imageInfo.width = 640; profiles.depth.imageInfo.height = 480; RangeF32 f_drate = new RangeF32(30, 30); profiles.depth.frameRate = f_drate; // Setting the resolution profile sm.CaptureManager.FilterByStreamProfiles(profiles); sampleReader = SampleReader.Activate(sm); sampleReader2 = SampleReader.Activate(sm); sampleReader2.EnableStream(StreamType.STREAM_TYPE_DEPTH, depthWidth, depthHeight, colorFPS); // Enable and Get a segmentation instance here for configuration seg = Seg3D.Activate(sm); // Initialize seg.FrameProcessed += OnFrameProcessed; seg.OnAlert += Seg_OnAlert; sampleReader2.SampleArrived += SampleArrived; /* Initialize pipeline */ sm.Init(); // Flip the image horizontally sm.CaptureManager.Device.IVCAMAccuracy = IVCAMAccuracy.IVCAM_ACCURACY_FINEST; sm.CaptureManager.Device.MirrorMode = MirrorMode.MIRROR_MODE_HORIZONTAL; /* Create NativeTexturePlugin to render Texture2D natively */ texPlugin = NativeTexturePlugin.Activate(); // Configuring the material and its texture RGBMaterial.mainTexture = new Texture2D(colorWidth, colorHeight, TextureFormat.BGRA32, false); // Update material's Texture2D with enabled image size. RGBMaterial.mainTextureScale = new Vector2(-1, -1); // Flip the image colorTex2DPtr = RGBMaterial.mainTexture.GetNativeTexturePtr(); // Retrieve native Texture2D Pointer /* Start Streaming */ sm.StreamFrames(false); }
// Initialize Sense Manager void InititalizeSenseManager() { senseManager = Session.CreateInstance().CreateSenseManager(); if (senseManager == null) { Debug.Log(TAG + "Sense Manager Initialize Failure!"); } else { Debug.Log(TAG + "Sense Manager Initialize Successful"); InitializeCaptureManager(); } }
/// <summary> /// 終了処理 /// </summary> private void Uninitialize() { if (senseManager != null) { senseManager.Dispose(); senseManager = null; } if (projection != null) { projection.Dispose(); projection = null; } }
/// <summary>Checks against the guards for the command.</summary> /// <param name="actionInput">The full input specified for executing the command.</param> /// <returns>A string with the error message for the user upon guard failure, else null.</returns> public override string Guards(ActionInput actionInput) { var commonFailure = VerifyCommonGuards(actionInput, ActionGuards); if (commonFailure != null) { return(commonFailure); } if (actionInput.Params.Length == 0) { return(null); } // TODO: CommonGuards.RequiresAtLeastOneArgument makes the length check redundant? var targetName = (actionInput.Params.Length > 0) ? actionInput.Params[0] : string.Empty; var targetFullName = actionInput.Tail.Trim().ToLower(); // Try to find the target either by all the parameter text or by just the first parameter. target = GetPlayerOrMobile(targetFullName) ?? GetPlayerOrMobile(targetName); // Rule: Is the target an entity? if (target == null) { return("You cannot see " + targetName + "."); } // Rule: Is the target the initiator? if (string.Equals(actionInput.Actor.Name, target.Name, StringComparison.CurrentCultureIgnoreCase)) { return("You can't follow yourself."); } // Rule: Is the target in the same room? if (actionInput.Actor.Parent.Id != target.Parent.Id) { return($"{targetName} does not appear to be in the vicinity."); } var senses = new SenseManager(); senses.AddSense(new Sense { SensoryType = SensoryType.Sight, Enabled = true }); if (!target.IsDetectableBySense(senses)) { return($"{targetName} does not appear to be in the vicinity."); } return(null); }
public void DisposeSenseManager() { try { if (SenseManager != null) { SenseManager.Dispose(); } } catch (Exception e) { MessageBox.Show(null, e.ToString(), "Can not dispose SenseManager", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
public static void Dispose() { if (SenseManager != null) { SenseManager.Close(); SenseManager.Dispose(); SenseManager = null; } if (Session != null) { Session.Dispose(); Session = null; } }
public RS.Sample GetSample() { RS.Sample sample = null; /* Wait until a frame is ready: Synchronized or Asynchronous */ if (SenseManager.AcquireFrame(false) == RS.Status.STATUS_NO_ERROR) { /* Aquire Frame from Camera */ sample = SenseManager.Sample; return(sample); } else { return(sample = null); } }
private void SetFaceConfiguration() { SenseManager.EnableFace(); FaceModule = SenseManager.QueryFace(); FaceConfiguration = FaceModule.CreateActiveConfiguration(); FaceConfiguration.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH); FaceConfiguration.strategy = PXCMFaceConfiguration.TrackingStrategyType.STRATEGY_CLOSEST_TO_FARTHEST; //Detection //FaceConfiguration.detection.isEnabled = false; //FaceConfiguration.detection.maxTrackedFaces = 0; //Landmarks FaceConfiguration.landmarks.isEnabled = true; FaceConfiguration.landmarks.maxTrackedFaces = 1; FaceConfiguration.landmarks.smoothingLevel = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED; //Configuration of Pose FaceConfiguration.pose.isEnabled = true; FaceConfiguration.pose.maxTrackedFaces = 1; FaceConfiguration.pose.smoothingLevel = PXCMFaceConfiguration.SmoothingLevelType.SMOOTHING_DISABLED; //Configuration of Gaze //FaceConfiguration. //Configuration of Expressions ExpressionsConfiguration = FaceConfiguration.QueryExpressions(); ExpressionsConfiguration.properties.isEnabled = true; ExpressionsConfiguration.properties.maxTrackedFaces = 1; ExpressionsConfiguration.EnableExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_KISS); ExpressionsConfiguration.EnableExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_TONGUE_OUT); ExpressionsConfiguration.EnableExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_SMILE); ExpressionsConfiguration.EnableExpression(PXCMFaceData.ExpressionsData.FaceExpression.EXPRESSION_MOUTH_OPEN); FaceConfiguration.EnableAllAlerts(); FaceConfiguration.SubscribeAlert(OnAlert); pxcmStatus applyChangesStatus = FaceConfiguration.ApplyChanges(); if (applyChangesStatus < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("FaceConfiguration.ApplyChanges() error: " + applyChangesStatus.ToString()); } }
public void StartTracking() { SelectHardwareAndStreamProfile(); SetFaceConfiguration(); pxcmStatus senseManagerInitStatus = SenseManager.Init(); if (senseManagerInitStatus < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("SenseManager.Init() error: " + senseManagerInitStatus.ToString()); } _shouldStop = false; Thread threadLoop = new Thread(this.StartTrackingLoop); threadLoop.Start(); }
// Use this for initialization void Start() { /* Create SenseManager Instance */ sm = SenseManager.CreateInstance(); /* Selecting a higher resolution profile*/ StreamProfileSet profiles = new StreamProfileSet(); profiles.color.imageInfo.width = 1280; profiles.color.imageInfo.height = 720; RangeF32 f_rate = new RangeF32(30, 30); profiles.color.frameRate = f_rate; profiles.depth.imageInfo.width = 640; profiles.depth.imageInfo.height = 480; RangeF32 f_drate = new RangeF32(30, 30); profiles.depth.frameRate = f_drate; /* Setting the resolution profile */ sm.CaptureManager.FilterByStreamProfiles(profiles); /* Enable and Get a segmentation instance here for configuration */ seg = Seg3D.Activate(sm); /* Subscribe to seg arrived event */ seg.FrameProcessed += OnFrameProcessed; /* Initialize pipeline */ sm.Init(); /* Create NativeTexturePlugin to render Texture2D natively */ texPlugin = NativeTexturePlugin.Activate(); SegMaterial.mainTexture = new Texture2D(segWidth, segHeight, TextureFormat.BGRA32, false); // Update material's Texture2D with enabled image size. SegMaterial.mainTextureScale = new Vector2(-1, -1); // Flip the image segTex2DPtr = SegMaterial.mainTexture.GetNativeTexturePtr(); // Retrieve native Texture2D Pointer /* Start Streaming */ sm.StreamFrames(false); }
public void StartStream() { SenseManager.captureManager.FilterByDeviceInfo(DeviceInfo); // Initializing the SenseManager pxcmStatus initSenseManagerStatus = SenseManager.Init(); // Initializing the SenseManager if (initSenseManagerStatus != pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new InvalidRealSenseStatusException(initSenseManagerStatus, string.Format("Failed to initialize the SenseManager. Return code: {0}", initSenseManagerStatus)); } if (SenseManager == null) { throw new NullReferenceException("The SenseManager isn't initialized. Please check if you already called the InitializeStream method."); } IsRunning = true; CaptureThread = new Thread(() => { while (IsRunning) { if (SenseManager == null || !SenseManager.IsConnected()) { throw new Exception("The SenseManager is not ready to stream."); } // Acquiring a frame with ifall=true to wait for both samples to be ready (aligned samples) pxcmStatus acquireFrameStatus = SenseManager.AcquireFrame(true); if (acquireFrameStatus != pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new InvalidRealSenseStatusException(acquireFrameStatus, string.Format("Failed to acquire a frame. Return code: {0}", acquireFrameStatus)); } SenseManager.ReleaseFrame(); } }); CaptureThread.Start(); }
// Use this for initialization void Start() { /* Create SenseManager Instance */ sm = SenseManager.CreateInstance(); /* Create a SampleReader Instance */ sampleReader = SampleReader.Activate(sm); /* Enable Color & Depth Stream */ sampleReader.EnableStream(StreamType.STREAM_TYPE_COLOR, colorWidth, colorHeight, colorFPS); sampleReader.EnableStream(StreamType.STREAM_TYPE_DEPTH, depthWidth, depthHeight, depthFPS); /* Subscribe to sample arrived event */ sampleReader.SampleArrived += SampleArrived; /////////////////////////////////////////////////////// terry add start faceModule = FaceModule.Activate(sm); if (faceModule == null) { Debug.LogError("FaceModule Initialization Failed"); } //faceModule.FrameProcessed += FaceModule_FrameProcessed; FaceConfiguration moduleConfiguration = faceModule.CreateActiveConfiguration(); if (moduleConfiguration == null) { Debug.LogError("FaceConfiguration Initialization Failed"); } moduleConfiguration.TrackingMode = TrackingModeType.FACE_MODE_COLOR; moduleConfiguration.Strategy = TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT; moduleConfiguration.Detection.maxTrackedFaces = 1; moduleConfiguration.Landmarks.maxTrackedFaces = 1; moduleConfiguration.Detection.isEnabled = true; moduleConfiguration.Landmarks.isEnabled = true; moduleConfiguration.Pose.isEnabled = false; moduleConfiguration.EnableAllAlerts(); //moduleConfiguration.AlertFired += OnFiredAlert; Status applyChangesStatus = moduleConfiguration.ApplyChanges(); Debug.Log(applyChangesStatus.ToString()); ////////////////////////////////////////////////////// terry add end /* Initialize pipeline */ sm.Init(); /* Create NativeTexturePlugin to render Texture2D natively */ texPlugin = NativeTexturePlugin.Activate(); RGBMaterial.mainTexture = new Texture2D(colorWidth, colorHeight, TextureFormat.BGRA32, false); // Update material's Texture2D with enabled image size. RGBMaterial.mainTextureScale = new Vector2(-1, -1); // Flip the image colorTex2DPtr = RGBMaterial.mainTexture.GetNativeTexturePtr(); // Retrieve native Texture2D Pointer DepthMaterial.mainTexture = new Texture2D(depthWidth, depthHeight, TextureFormat.BGRA32, false); // Update material's Texture2D with enabled image size. DepthMaterial.mainTextureScale = new Vector2(-1, -1); // Flip the image depthTex2DPtr = DepthMaterial.mainTexture.GetNativeTexturePtr(); // Retrieve native Texture2D Pointer /* Start Streaming */ sm.StreamFrames(false); }
/* Using SenseManager to handle data */ public void SimplePipeline() { _form.UpdateInfo(String.Empty, System.Drawing.Color.Black); bool liveCamera = false; bool flag = true; SenseManager instance = null; _disconnected = false; instance = _form.session.CreateSenseManager(); if (instance == null) { _form.UpdateStatus("Failed creating SenseManager"); _form.EnableTrackingMode(true); return; } CaptureManager captureManager = instance.CaptureManager; DeviceInfo info = null; if (captureManager != null) { if (_form.GetPlaybackState()) { captureManager.SetFileName(_form.GetFileName(), false); info = _form.GetDeviceFromFileMenu(_form.GetFileName()); } else { if (_form.Devices.Count == 0) { _form.UpdateStatus("No device were found"); return; } _form.Devices.TryGetValue(_form.GetCheckedDevice(), out info); if (_form.GetRecordState()) { captureManager.SetFileName(_form.GetFileName(), true); if (_form.Devices.TryGetValue(_form.GetCheckedDevice(), out info)) { captureManager.FilterByDeviceInfo(_form.GetCheckedDeviceInfo()); } } else { captureManager.FilterByDeviceInfo(_form.GetCheckedDeviceInfo()); liveCamera = true; } if (info == null) { _form.UpdateStatus("Device Failure"); return; } } } /* Set Module */ HandModule handAnalysis; // SenseManager.Handler handler = new SenseManager Handler(); // handler.onModuleProcessedFrame = new SenseManager.Handler.OnModuleProcessedFrameDelegate(OnNewFrame); HandConfiguration handConfiguration = null; HandData handData = null; handAnalysis = HandModule.Activate(instance); if (handAnalysis == null) { _form.UpdateStatus("Failed Loading Module"); _form.EnableTrackingMode(true); return; } handConfiguration = handAnalysis.CreateActiveConfiguration(); if (handConfiguration == null) { _form.UpdateStatus("Failed Create Configuration"); _form.EnableTrackingMode(true); instance.Close(); instance.Dispose(); return; } handData = handAnalysis.CreateOutput(); if (handData == null) { _form.UpdateStatus("Failed Create Output"); _form.EnableTrackingMode(true); handConfiguration.Dispose(); instance.Close(); instance.Dispose(); return; } FPSTimer timer = new FPSTimer(_form); _form.UpdateStatus("Init Started"); if (instance.Init() == Status.STATUS_NO_ERROR) { DeviceInfo dinfo; DeviceModel dModel = DeviceModel.DEVICE_MODEL_F200; Device device = instance.CaptureManager.Device; if (device != null) { device.QueryDeviceInfo(out dinfo); dModel = dinfo.model; _maxRange = device.DepthSensorRange.max; } if (handConfiguration != null) { TrackingModeType trackingMode = TrackingModeType.TRACKING_MODE_FULL_HAND; if (_form.GetFullHandModeState()) { trackingMode = TrackingModeType.TRACKING_MODE_FULL_HAND; } handConfiguration.TrackingMode = trackingMode; handConfiguration.EnableAllAlerts(); handConfiguration.SegmentationImageEnabled = true; bool isEnabled = handConfiguration.SegmentationImageEnabled; handConfiguration.ApplyChanges(); _form.resetGesturesList(); int totalNumOfGestures = handConfiguration.NumberOfGestures; if (totalNumOfGestures > 0) { this._form.UpdateGesturesToList("", 0); for (int i = 0; i < totalNumOfGestures; i++) { string gestureName = string.Empty; if (handConfiguration.QueryGestureNameByIndex(i, out gestureName) == Status.STATUS_NO_ERROR) { this._form.UpdateGesturesToList(gestureName, i + 1); } } _form.UpdateGesturesListSize(); } } _form.UpdateStatus("Streaming"); int frameCounter = 0; int frameNumber = 0; while (!_form.stop) { string gestureName = _form.GetGestureName(); if (handConfiguration != null) { if (string.IsNullOrEmpty(gestureName) == false) { if (handConfiguration.IsGestureEnabled(gestureName) == false) { handConfiguration.DisableAllGestures(); handConfiguration.EnableGesture(gestureName, true); handConfiguration.ApplyChanges(); } } else { handConfiguration.DisableAllGestures(); handConfiguration.ApplyChanges(); } } if (instance.AcquireFrame(true) < Status.STATUS_NO_ERROR) { break; } frameCounter++; if (!DisplayDeviceConnection(!instance.IsConnected())) { Sample sample = instance.Sample; if (sample != null && sample.Depth != null) { frameNumber = liveCamera ? frameCounter : instance.CaptureManager.FrameIndex; if (handData != null) { handData.Update(); DisplayPicture(sample.Depth, handData); DisplayGesture(handData, frameNumber); DisplayJoints(handData); DisplayAlerts(handData, frameNumber); } _form.UpdatePanel(); } timer.Tick(); } instance.ReleaseFrame(); } } else { _form.UpdateStatus("Init Failed"); flag = false; } foreach (Image Image in _mImages) { Image.Dispose(); } // Clean Up if (handData != null) { handData.Dispose(); } if (handConfiguration != null) { handConfiguration.Dispose(); } instance.Close(); instance.Dispose(); if (flag) { _form.UpdateStatus("Stopped"); } }
public void Dispose() { Session.Dispose(); SenseManager.Dispose(); }
// Start is called before the first frame update void Start() { gameManager = GameObject.FindGameObjectWithTag("GameManager").GetComponent <GameManager>(); senseManager = gameManager.GetComponent <SenseManager>(); }
private void StartTrackingLoop() { PXCMFaceData FaceData = FaceModule.CreateOutput(); while (!_shouldStop) { pxcmStatus acquireFrameStatus = SenseManager.AcquireFrame(true); if (acquireFrameStatus < pxcmStatus.PXCM_STATUS_NO_ERROR) { ResetTrackData(); Console.WriteLine("SenseManager.AcquireFrame(true) error: " + acquireFrameStatus.ToString()); continue; } PXCMCapture.Sample captureSample = SenseManager.QueryFaceSample(); if (captureSample == null) { ResetTrackData(); SenseManager.ReleaseFrame(); continue; } //TODO: Image Daten holen TrackImageData(captureSample); FaceData.Update(); int numberOfDetectedFaces = FaceData.QueryNumberOfDetectedFaces(); if (numberOfDetectedFaces != 1) { ResetTrackData(); SenseManager.ReleaseFrame(); continue; } PXCMFaceData.Face faceDataFace = FaceData.QueryFaceByIndex(0); TrackPose(faceDataFace); TrackExpressions(faceDataFace); //TrackLandmarks(faceDataFace); //TrackGaze(); //FaceData.QueryRecognitionModule(); //im nächsten object steckt boundingrectangle und avarageDepth drin //PXCMFaceData.DetectionData faceDataDetectionData = faceDataFace.QueryDetection(); //faceDataDetectionData.QueryFaceAverageDepth(); //faceDataDetectionData.QueryBoundingRect(); SenseManager.ReleaseFrame(); Thread.Sleep(250); } if (FaceData != null) { FaceData.Dispose(); } FaceConfiguration.Dispose(); SenseManager.Close(); SenseManager.Dispose(); }