/// <summary> /// 機能の初期化 /// </summary> private void Initialize() { try { //SenseManagerを生成 senseManager = SenseManager.CreateInstance(); SampleReader reader = SampleReader.Activate(senseManager); //カラーストリームを有効にする reader.EnableStream(StreamType.STREAM_TYPE_COLOR, COLOR_WIDTH, COLOR_HEIGHT, COLOR_FPS); //パイプラインを初期化する //(インスタンスはInit()が正常終了した後作成されるので,機能に対する各種設定はInit()呼び出し後となる) var sts = senseManager.Init(); if (sts < Status.STATUS_NO_ERROR) { throw new Exception("パイプラインの初期化に失敗しました"); } //デバイスを取得する device = senseManager.CaptureManager.Device; //ミラー表示にする device.MirrorMode = MirrorMode.MIRROR_MODE_HORIZONTAL; //座標変換オブジェクトを作成 projection = device.CreateProjection(); } catch (Exception ex) { MessageBox.Show(ex.Message); Close(); } }
private void InitCamera() { session = Session.CreateInstance(); System.Diagnostics.Debug.WriteLine("Version: " + session.Version.major); // Instantiate and initialize the SenseManager senseManager = session.CreateSenseManager(); reader = SampleReader.Activate(senseManager); reader.EnableStream(StreamType.STREAM_TYPE_COLOR, WIDTH, HEIGHT, FRAME_RATE, StreamOption.STREAM_OPTION_STRONG_STREAM_SYNC); reader.EnableStream(StreamType.STREAM_TYPE_DEPTH, WIDTH, HEIGHT, FRAME_RATE, StreamOption.STREAM_OPTION_STRONG_STREAM_SYNC); //Configure the Face Module faceModule = FaceModule.Activate(senseManager); FaceConfiguration faceConfig = faceModule.CreateActiveConfiguration(); faceConfig.Detection.isEnabled = detectionEnabled; faceConfig.Detection.maxTrackedFaces = maxTrackedFaces; faceConfig.Landmarks.isEnabled = landmarksEnabled; faceConfig.Landmarks.maxTrackedFaces = maxTrackedFaces; faceConfig.TrackingMode = Intel.RealSense.Face.TrackingModeType.FACE_MODE_COLOR_PLUS_DEPTH; faceConfig.EnableAllAlerts(); faceConfig.ApplyChanges(); //init senseManager senseManager.Init(); projection = senseManager.CaptureManager.Device.CreateProjection(); System.Diagnostics.Debug.WriteLine("IsConnected: " + senseManager.IsConnected()); }
protected void Init() { try { // RealSense初期化 // 参考:https://software.intel.com/sites/landingpage/realsense/camera-sdk/v2016r3/documentation/html/index.html?doc_face_general_procedure.html // 参考:.\Intel\RSSDK\sample\core\RawStreams.unity SenseManager = SenseManager.CreateInstance(); FaceModule = FaceModule.Activate(SenseManager); FaceModule.FrameProcessed += FaceModule_FrameProcessed; FaceData = FaceModule.CreateOutput(); FaceConfig = FaceModule.CreateActiveConfiguration(); FaceConfig.TrackingMode = TrackingModeType.FACE_MODE_COLOR; FaceConfig.Expressions.Properties.Enabled = true; FaceConfig.ApplyChanges(); SampleReader = SampleReader.Activate(SenseManager); SampleReader.EnableStream(StreamType.STREAM_TYPE_COLOR, 640, 480, 30); SampleReader.SampleArrived += SampleReader_SampleArrived; SenseManager.Init(); SenseManager.StreamFrames(false); // RawStreams Texture = NativeTexturePlugin.Activate(); Material.mainTexture = new Texture2D(640, 480, TextureFormat.BGRA32, false); Material.mainTextureScale = new Vector2(-1, -1); TexPtr = Material.mainTexture.GetNativeTexturePtr(); // 解像度取得 StreamProfileSet profile; SenseManager.CaptureManager.Device.QueryStreamProfileSet(out profile); Resolution = profile.color.imageInfo; // 平滑化初期化 // 参考:https://software.intel.com/sites/landingpage/realsense/camera-sdk/v2016r3/documentation/html/index.html?doc_utils_the_smoother_utility.html Smoother = Smoother.CreateInstance(SenseManager.Session); SmoothBody = Smoother.Create3DWeighted(BodyPosSmoothWeight); SmoothHead = Smoother.Create3DWeighted(HeadAngSmoothWeight); SmoothEyes = Smoother.Create2DWeighted(EyesPosSmoothWeight); SmoothEyesClose = Smoother.Create1DWeighted(EyesCloseSmoothWeight); SmoothBrowRai = Smoother.Create1DWeighted(FaceSmoothWeight); SmoothBrowLow = Smoother.Create1DWeighted(FaceSmoothWeight); SmoothSmile = Smoother.Create1DWeighted(FaceSmoothWeight); SmoothKiss = Smoother.Create1DWeighted(FaceSmoothWeight); SmoothMouth = Smoother.Create1DWeighted(FaceSmoothWeight); SmoothTongue = Smoother.Create1DWeighted(FaceSmoothWeight); } catch (Exception e) { ErrorLog.text = "RealSense Error\n"; ErrorLog.text += e.Message; } }
// Start Device public void StartDevice() { if (senseManager.Init() == Status.STATUS_NO_ERROR) { Debug.Log(TAG + "Initialization Successful!"); isStart = true; } else { Debug.Log(TAG + "Initialization Failed!"); } }
//public void ShowPerformanceTick() //{ // /* Optional: Show performance tick */ // if (image != null) // { // timer.Tick(RS.ImageExtension.PixelFormatToString(image.Info.format) + " " + image.Info.width + "x" + image.Info.height); // } //} public void InitSenseManager() { if (SenseManager.Init() == RS.Status.STATUS_NO_ERROR) { SetStatus("SenseManager Init Successfull"); } else { SetStatus("SenseManager Init Failed"); Stop = true; } }
// Use this for initialization void Start() { /* Create SenseManager Instance */ sm = SenseManager.CreateInstance(); sm.CaptureManager.Realtime = false; // Selecting a higher resolution profile StreamProfileSet profiles = new StreamProfileSet(); profiles.color.imageInfo.width = 1280; profiles.color.imageInfo.height = 720; RangeF32 f_rate = new RangeF32(30, 30); profiles.color.frameRate = f_rate; profiles.depth.imageInfo.width = 640; profiles.depth.imageInfo.height = 480; RangeF32 f_drate = new RangeF32(30, 30); profiles.depth.frameRate = f_drate; // Setting the resolution profile sm.CaptureManager.FilterByStreamProfiles(profiles); sampleReader = SampleReader.Activate(sm); sampleReader2 = SampleReader.Activate(sm); sampleReader2.EnableStream(StreamType.STREAM_TYPE_DEPTH, depthWidth, depthHeight, colorFPS); // Enable and Get a segmentation instance here for configuration seg = Seg3D.Activate(sm); // Initialize seg.FrameProcessed += OnFrameProcessed; seg.OnAlert += Seg_OnAlert; sampleReader2.SampleArrived += SampleArrived; /* Initialize pipeline */ sm.Init(); // Flip the image horizontally sm.CaptureManager.Device.IVCAMAccuracy = IVCAMAccuracy.IVCAM_ACCURACY_FINEST; sm.CaptureManager.Device.MirrorMode = MirrorMode.MIRROR_MODE_HORIZONTAL; /* Create NativeTexturePlugin to render Texture2D natively */ texPlugin = NativeTexturePlugin.Activate(); // Configuring the material and its texture RGBMaterial.mainTexture = new Texture2D(colorWidth, colorHeight, TextureFormat.BGRA32, false); // Update material's Texture2D with enabled image size. RGBMaterial.mainTextureScale = new Vector2(-1, -1); // Flip the image colorTex2DPtr = RGBMaterial.mainTexture.GetNativeTexturePtr(); // Retrieve native Texture2D Pointer /* Start Streaming */ sm.StreamFrames(false); }
public void StartTracking() { SelectHardwareAndStreamProfile(); SetFaceConfiguration(); pxcmStatus senseManagerInitStatus = SenseManager.Init(); if (senseManagerInitStatus < pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new Exception("SenseManager.Init() error: " + senseManagerInitStatus.ToString()); } _shouldStop = false; Thread threadLoop = new Thread(this.StartTrackingLoop); threadLoop.Start(); }
// Use this for initialization void Start() { /* Create SenseManager Instance */ sm = SenseManager.CreateInstance(); /* Selecting a higher resolution profile*/ StreamProfileSet profiles = new StreamProfileSet(); profiles.color.imageInfo.width = 1280; profiles.color.imageInfo.height = 720; RangeF32 f_rate = new RangeF32(30, 30); profiles.color.frameRate = f_rate; profiles.depth.imageInfo.width = 640; profiles.depth.imageInfo.height = 480; RangeF32 f_drate = new RangeF32(30, 30); profiles.depth.frameRate = f_drate; /* Setting the resolution profile */ sm.CaptureManager.FilterByStreamProfiles(profiles); /* Enable and Get a segmentation instance here for configuration */ seg = Seg3D.Activate(sm); /* Subscribe to seg arrived event */ seg.FrameProcessed += OnFrameProcessed; /* Initialize pipeline */ sm.Init(); /* Create NativeTexturePlugin to render Texture2D natively */ texPlugin = NativeTexturePlugin.Activate(); SegMaterial.mainTexture = new Texture2D(segWidth, segHeight, TextureFormat.BGRA32, false); // Update material's Texture2D with enabled image size. SegMaterial.mainTextureScale = new Vector2(-1, -1); // Flip the image segTex2DPtr = SegMaterial.mainTexture.GetNativeTexturePtr(); // Retrieve native Texture2D Pointer /* Start Streaming */ sm.StreamFrames(false); }
public void StartStream() { SenseManager.captureManager.FilterByDeviceInfo(DeviceInfo); // Initializing the SenseManager pxcmStatus initSenseManagerStatus = SenseManager.Init(); // Initializing the SenseManager if (initSenseManagerStatus != pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new InvalidRealSenseStatusException(initSenseManagerStatus, string.Format("Failed to initialize the SenseManager. Return code: {0}", initSenseManagerStatus)); } if (SenseManager == null) { throw new NullReferenceException("The SenseManager isn't initialized. Please check if you already called the InitializeStream method."); } IsRunning = true; CaptureThread = new Thread(() => { while (IsRunning) { if (SenseManager == null || !SenseManager.IsConnected()) { throw new Exception("The SenseManager is not ready to stream."); } // Acquiring a frame with ifall=true to wait for both samples to be ready (aligned samples) pxcmStatus acquireFrameStatus = SenseManager.AcquireFrame(true); if (acquireFrameStatus != pxcmStatus.PXCM_STATUS_NO_ERROR) { throw new InvalidRealSenseStatusException(acquireFrameStatus, string.Format("Failed to acquire a frame. Return code: {0}", acquireFrameStatus)); } SenseManager.ReleaseFrame(); } }); CaptureThread.Start(); }
/* Using SenseManager to handle data */ public void SimplePipeline() { _form.UpdateInfo(String.Empty, System.Drawing.Color.Black); bool liveCamera = false; bool flag = true; SenseManager instance = null; _disconnected = false; instance = _form.session.CreateSenseManager(); if (instance == null) { _form.UpdateStatus("Failed creating SenseManager"); _form.EnableTrackingMode(true); return; } CaptureManager captureManager = instance.CaptureManager; DeviceInfo info = null; if (captureManager != null) { if (_form.GetPlaybackState()) { captureManager.SetFileName(_form.GetFileName(), false); info = _form.GetDeviceFromFileMenu(_form.GetFileName()); } else { if (_form.Devices.Count == 0) { _form.UpdateStatus("No device were found"); return; } _form.Devices.TryGetValue(_form.GetCheckedDevice(), out info); if (_form.GetRecordState()) { captureManager.SetFileName(_form.GetFileName(), true); if (_form.Devices.TryGetValue(_form.GetCheckedDevice(), out info)) { captureManager.FilterByDeviceInfo(_form.GetCheckedDeviceInfo()); } } else { captureManager.FilterByDeviceInfo(_form.GetCheckedDeviceInfo()); liveCamera = true; } if (info == null) { _form.UpdateStatus("Device Failure"); return; } } } /* Set Module */ HandModule handAnalysis; // SenseManager.Handler handler = new SenseManager Handler(); // handler.onModuleProcessedFrame = new SenseManager.Handler.OnModuleProcessedFrameDelegate(OnNewFrame); HandConfiguration handConfiguration = null; HandData handData = null; handAnalysis = HandModule.Activate(instance); if (handAnalysis == null) { _form.UpdateStatus("Failed Loading Module"); _form.EnableTrackingMode(true); return; } handConfiguration = handAnalysis.CreateActiveConfiguration(); if (handConfiguration == null) { _form.UpdateStatus("Failed Create Configuration"); _form.EnableTrackingMode(true); instance.Close(); instance.Dispose(); return; } handData = handAnalysis.CreateOutput(); if (handData == null) { _form.UpdateStatus("Failed Create Output"); _form.EnableTrackingMode(true); handConfiguration.Dispose(); instance.Close(); instance.Dispose(); return; } FPSTimer timer = new FPSTimer(_form); _form.UpdateStatus("Init Started"); if (instance.Init() == Status.STATUS_NO_ERROR) { DeviceInfo dinfo; DeviceModel dModel = DeviceModel.DEVICE_MODEL_F200; Device device = instance.CaptureManager.Device; if (device != null) { device.QueryDeviceInfo(out dinfo); dModel = dinfo.model; _maxRange = device.DepthSensorRange.max; } if (handConfiguration != null) { TrackingModeType trackingMode = TrackingModeType.TRACKING_MODE_FULL_HAND; if (_form.GetFullHandModeState()) { trackingMode = TrackingModeType.TRACKING_MODE_FULL_HAND; } handConfiguration.TrackingMode = trackingMode; handConfiguration.EnableAllAlerts(); handConfiguration.SegmentationImageEnabled = true; bool isEnabled = handConfiguration.SegmentationImageEnabled; handConfiguration.ApplyChanges(); _form.resetGesturesList(); int totalNumOfGestures = handConfiguration.NumberOfGestures; if (totalNumOfGestures > 0) { this._form.UpdateGesturesToList("", 0); for (int i = 0; i < totalNumOfGestures; i++) { string gestureName = string.Empty; if (handConfiguration.QueryGestureNameByIndex(i, out gestureName) == Status.STATUS_NO_ERROR) { this._form.UpdateGesturesToList(gestureName, i + 1); } } _form.UpdateGesturesListSize(); } } _form.UpdateStatus("Streaming"); int frameCounter = 0; int frameNumber = 0; while (!_form.stop) { string gestureName = _form.GetGestureName(); if (handConfiguration != null) { if (string.IsNullOrEmpty(gestureName) == false) { if (handConfiguration.IsGestureEnabled(gestureName) == false) { handConfiguration.DisableAllGestures(); handConfiguration.EnableGesture(gestureName, true); handConfiguration.ApplyChanges(); } } else { handConfiguration.DisableAllGestures(); handConfiguration.ApplyChanges(); } } if (instance.AcquireFrame(true) < Status.STATUS_NO_ERROR) { break; } frameCounter++; if (!DisplayDeviceConnection(!instance.IsConnected())) { Sample sample = instance.Sample; if (sample != null && sample.Depth != null) { frameNumber = liveCamera ? frameCounter : instance.CaptureManager.FrameIndex; if (handData != null) { handData.Update(); DisplayPicture(sample.Depth, handData); DisplayGesture(handData, frameNumber); DisplayJoints(handData); DisplayAlerts(handData, frameNumber); } _form.UpdatePanel(); } timer.Tick(); } instance.ReleaseFrame(); } } else { _form.UpdateStatus("Init Failed"); flag = false; } foreach (Image Image in _mImages) { Image.Dispose(); } // Clean Up if (handData != null) { handData.Dispose(); } if (handConfiguration != null) { handConfiguration.Dispose(); } instance.Close(); instance.Dispose(); if (flag) { _form.UpdateStatus("Stopped"); } }
// Use this for initialization void Start() { /* Create SenseManager Instance */ sm = SenseManager.CreateInstance(); /* Create a SampleReader Instance */ sampleReader = SampleReader.Activate(sm); /* Enable Color & Depth Stream */ sampleReader.EnableStream(StreamType.STREAM_TYPE_COLOR, colorWidth, colorHeight, colorFPS); sampleReader.EnableStream(StreamType.STREAM_TYPE_DEPTH, depthWidth, depthHeight, depthFPS); /* Subscribe to sample arrived event */ sampleReader.SampleArrived += SampleArrived; /////////////////////////////////////////////////////// terry add start faceModule = FaceModule.Activate(sm); if (faceModule == null) { Debug.LogError("FaceModule Initialization Failed"); } //faceModule.FrameProcessed += FaceModule_FrameProcessed; FaceConfiguration moduleConfiguration = faceModule.CreateActiveConfiguration(); if (moduleConfiguration == null) { Debug.LogError("FaceConfiguration Initialization Failed"); } moduleConfiguration.TrackingMode = TrackingModeType.FACE_MODE_COLOR; moduleConfiguration.Strategy = TrackingStrategyType.STRATEGY_RIGHT_TO_LEFT; moduleConfiguration.Detection.maxTrackedFaces = 1; moduleConfiguration.Landmarks.maxTrackedFaces = 1; moduleConfiguration.Detection.isEnabled = true; moduleConfiguration.Landmarks.isEnabled = true; moduleConfiguration.Pose.isEnabled = false; moduleConfiguration.EnableAllAlerts(); //moduleConfiguration.AlertFired += OnFiredAlert; Status applyChangesStatus = moduleConfiguration.ApplyChanges(); Debug.Log(applyChangesStatus.ToString()); ////////////////////////////////////////////////////// terry add end /* Initialize pipeline */ sm.Init(); /* Create NativeTexturePlugin to render Texture2D natively */ texPlugin = NativeTexturePlugin.Activate(); RGBMaterial.mainTexture = new Texture2D(colorWidth, colorHeight, TextureFormat.BGRA32, false); // Update material's Texture2D with enabled image size. RGBMaterial.mainTextureScale = new Vector2(-1, -1); // Flip the image colorTex2DPtr = RGBMaterial.mainTexture.GetNativeTexturePtr(); // Retrieve native Texture2D Pointer DepthMaterial.mainTexture = new Texture2D(depthWidth, depthHeight, TextureFormat.BGRA32, false); // Update material's Texture2D with enabled image size. DepthMaterial.mainTextureScale = new Vector2(-1, -1); // Flip the image depthTex2DPtr = DepthMaterial.mainTexture.GetNativeTexturePtr(); // Retrieve native Texture2D Pointer /* Start Streaming */ sm.StreamFrames(false); }