public async Task <IActionResult> PostAsync([FromBody] DataApiModel data) { if (data == null) { return(new BadRequestObjectResult("Data is empty")); } var response = await _faceRepository.Post(data.Data); var visioResponse = await _visioRepository.Post(data.Data); var visioModel = new VisioModel(); if (visioResponse.IsSuccessStatusCode) { visioModel = JsonConvert.DeserializeObject <VisioModel>(visioResponse.ResponseBody); } var faceModel = new FaceModel(); if (response.IsSuccessStatusCode) { faceModel = JsonConvert.DeserializeObject <IList <FaceModel> >(response.ResponseBody).FirstOrDefault(); } var o = new { Id = faceModel?.FaceId, Description = visioModel.Description.Captions.FirstOrDefault()?.Text, FaceAttributes = faceModel?.FaceAttributes, Tags = visioModel.Tags }; return(new OkObjectResult(o)); }
private void FaceCompreaCoreOnShowFaceDeteiveImageEventHandler(byte[] image, FaceModel faceModel, string port) { if (this.ShowFaceDeteiveImageEventHandler != null) { this.ShowFaceDeteiveImageEventHandler.BeginInvoke(image, faceModel, base.Name, null, null); } }
private void InitializeKinect() { _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth);// | FrameSourceTypes.LongExposureInfrared); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; FrameDescription colorFrameDescription = _sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra); ColorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceReader = _faceSource.OpenReader(); _faceSource.TrackingQuality = FaceAlignmentQuality.Low; _faceReader.FrameArrived += FaceReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); _sensor.Open(); } }
private void Window_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); _sensor.Open(); //Added by Aditya _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; } }
public FaceModel AddFaceItems(FaceModel items) { _inventroyItems.Add(items.name, items); return(items); //throw new NotImplementedException(); }
/// <summary> /// Function of the max face /// </summary> /// <param name="faces"></param> /// <param name="mainActivity"></param> public static void setImageOutput(List <FaceModel> faces, CognitiveActivity cognitiveActivity) { List <FaceModel> FM = new List <FaceModel>(); foreach (var faceTemp in faces) { FM.Add(faceTemp); } if (FM.Count > 0) { FaceModel faceMax = new FaceModel(); faceMax = FM[0]; foreach (var face in FM) { if (faceMax.faceRectangle.height * faceMax.faceRectangle.width <= face.faceRectangle.height * face.faceRectangle.width) { faceMax = face; //제일 큰 얼굴값을 택함 } } string gender; if (faceMax.faceAttributes.Gender == "female" || faceMax.faceAttributes.Gender == "Female" || faceMax.faceAttributes.Gender == "FEMALE") { gender = "여성"; } else { gender = "남성"; } int age = (int)faceMax.faceAttributes.Age; cognitiveActivity.textValue += " 약 " + (age.ToString()) + "세인 " + gender + "이 "; } }
public MainWindow() { InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { // Listen for body data. _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; // Listen for HD face data. _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceSourceSub = new HighDefinitionFaceFrameSource(_sensor); // _faceSource.TrackingIdLost += OnTrackingIdLost; _faceReader = _faceSource.OpenReader(); _faceReaderSub = _faceSourceSub.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; _faceReaderSub.FrameArrived += FaceReaderSub_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); _faceAlignmentSub = new FaceAlignment(); // Start tracking! _sensor.Open(); } }
//特征提取 private FaceModel ExtractFeature(Bitmap bitmap) { var detectResult = Detection(bitmap); var bmpData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height) , ImageLockMode.ReadOnly, PixelFormat.Format24bppRgb); var imageData = new ImageData { PixelArrayFormat = 513,//Rgb24, Width = bitmap.Width, Height = bitmap.Height, Pitch = new int[4] { bmpData.Stride, 0, 0, 0 }, ppu8Plane = new IntPtr[4] { bmpData.Scan0, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero } }; var FFI = new FaceFeatureInput(); FFI.FaceRect = Marshal.PtrToStructure<FaceRect>(detectResult.PFaceRect); FFI.Orient = 1; FaceModel faceModel = new FaceModel() { Size = 22020, PFeature = Marshal.AllocCoTaskMem(22020) }; faceModel.Size = 0; if (ArcWrapper.ExtractFeature(_REnginer, ref imageData, ref FFI, out var fm) == (int)ErrorCode.Ok) { faceModel.Size = fm.Size; ArcWrapper.CopyMemory(faceModel.PFeature, fm.PFeature, fm.Size); } return faceModel; }
public static TileData[][,] CreateEmptyFaceArray(IntVector3 sizes, TileData[][,] oldFaces = null) { TileData[][,] result = new TileData[FaceModel.NUMBER_FACES][, ]; for (int i = 0; i < FaceModel.NUMBER_FACES; i++) { FaceModel face = FaceModel.ModelsDictionary[(CubeFace)i]; int XSize = sizes[face.axes[0]]; int YSize = sizes[face.axes[1]]; result[i] = new TileData[XSize, YSize]; for (int x = 0; x < XSize; x++) { for (int y = 0; y < YSize; y++) { if (oldFaces != null && oldFaces[i].GetLength(0) > x && oldFaces[i].GetLength(1) > y) { result[i][x, y] = oldFaces[i][x, y]; } else { result[i][x, y] = GetNormalTile(); } } } } return(result); }
private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e) { if (faceModelBuilder != null) { faceModelBuilder.Dispose(); faceModelBuilder = null; } if (hdFaceFrameReader != null) { hdFaceFrameReader.Dispose(); hdFaceFrameReader = null; } if (bodyFrameReader != null) { bodyFrameReader.Dispose(); bodyFrameReader = null; } if (faceModel != null) { faceModel.Dispose(); faceModel = null; } if (kinect != null) { kinect.Close(); kinect = null; } }
public static async Task <IEnumerable <FaceModel> > GetFacesAsync(byte[] byteData) { try { HttpClient client = new HttpClient(); client.DefaultRequestHeaders.Add(AzureApiConstants.SubscriptionKeyHeader, AzureApiConstants.SubscriptionKey); string uri = AzureApiConstants.UriBase + "?" + AzureApiConstants.DefaultRequestParameters; HttpResponseMessage response; using (ByteArrayContent content = new ByteArrayContent(byteData)) { content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); response = await client.PostAsync(uri, content); } return(FaceModel.FromJson(await response.Content.ReadAsStringAsync())); } catch (Exception e) { Console.WriteLine("\n" + e.Message); } return(null); }
public void ChangeFaceSize(CubeFace face, int XSize, int YSize) { FaceModel faceModel = FaceModel.ModelsDictionary[face]; IntVector3 sizes = faceModel.GetRealSizes(XSize, YSize, balls.GetLength((int)faceModel.axes[2])); CreateBoard(sizes); }
public async Task <IActionResult> Edit(int id, [Bind("Id,PictureId,Top,Left,Width,Height")] FaceModel faceModel) { if (id != faceModel.Id) { return(NotFound()); } if (ModelState.IsValid) { try { _context.Update(faceModel); await _context.SaveChangesAsync(); } catch (DbUpdateConcurrencyException) { if (!FaceModelExists(faceModel.Id)) { return(NotFound()); } else { throw; } } return(RedirectToAction(nameof(Index))); } ViewData["PictureId"] = new SelectList(_context.Pictures, "Id", "Id", faceModel.PictureId); return(View(faceModel)); }
//------------ public MainWindow() { InitializeComponent(); //configuring end point ep = new IPEndPoint(IP, 9999); //initializing KinectSensor _sensor = KinectSensor.GetDefault(); if (_sensor != null) { // Listen for body data. _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; /**/ // Listen for multisurce data. _multiReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color); _multiReader.MultiSourceFrameArrived += MultiReader_MultiSourceFrameArrived; /**/ // Listen for HD face data. _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); // Start tracking! _sensor.Open(); } }
public void Initialization(FaceModel face) { Name.Value = face.Name; Level.Value = face.Level; Face.Value = face.Face; Badge.Value = face.Badge; }
private void Page_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); _coordinateMapper = _sensor.CoordinateMapper; _collectedMeasurements = new List <double>(); if (_sensor != null) { _infraredFrameDescription = _sensor.InfraredFrameSource.FrameDescription; _infraredBitmap = new WriteableBitmap(_infraredFrameDescription.Width, _infraredFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null); camera.Source = _infraredBitmap; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyCount = _sensor.BodyFrameSource.BodyCount; _bodies = new Body[_bodyCount]; _bodyReader.FrameArrived += BodyReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); _faceFrameSource = new HighDefinitionFaceFrameSource(_sensor); _faceFrameReader = _faceFrameSource.OpenReader(); _faceFrameReader.FrameArrived += FaceReader_FrameArrived; _irReader = _sensor.InfraredFrameSource.OpenReader(); _irReader.FrameArrived += InfraredReader_FrameArrived; _sensor.Open(); } _settingsVM = DevPortalVM.LoadContext(SETTINGS_FILENAME); DevPortalGrid.DataContext = _settingsVM; _devicePortalClient = new DevPortalHelper(_settingsVM); }
void OnApplicationQuit() { if (CurrentFaceModel != null) { CurrentFaceModel.Dispose(); CurrentFaceModel = null; } highDefinitionFaceFrameSource = null; if (highDefinitionFaceFrameReader != null) { highDefinitionFaceFrameReader.Dispose(); highDefinitionFaceFrameReader = null; } if (_Reader != null) { _Reader.Dispose(); _Reader = null; } if (_Sensor != null) { if (_Sensor.IsOpen) { _Sensor.Close(); } _Sensor = null; } }
internal static void CopyToFrameToDrawingContext(this HighDefinitionFaceFrame highDefinitionFaceFrame, DrawingContext context, bool useDepthSpace = true, byte bodyIndex = 1, double pointRadius = 2F) { var faceAlignment = new FaceAlignment(); var coordinateMapper = highDefinitionFaceFrame.HighDefinitionFaceFrameSource.KinectSensor.CoordinateMapper; var brush = BodyIndexColor.GetBrushFromBodyIndex(bodyIndex); highDefinitionFaceFrame.GetAndRefreshFaceAlignmentResult(faceAlignment); var faceModel = new FaceModel(); var vertices = faceModel.CalculateVerticesForAlignment(faceAlignment); if (vertices.Count > 0) { for (int index = 0; index < vertices.Count; index++) { CameraSpacePoint vertice = vertices[index]; DepthSpacePoint point = coordinateMapper.MapCameraPointToDepthSpace(vertice); if (float.IsInfinity(point.X) || float.IsInfinity(point.Y)) { return; } context.DrawEllipse(brush, null, point.GetPoint(), pointRadius, pointRadius); } } }
private List <FaceItem> LoadFaces(DataTable dt) { List <FaceItem> items = new List <FaceItem>(); foreach (DataRow dr in dt.Rows) { if (dr["faceResult"] != null) { byte[] faceresult_value = dr["faceResult"] as byte[]; if (faceresult_value != null) { FaceItem item = new FaceItem(); item.OrderId = DateTime.Now.Ticks; item.FaceID = dr[0].ToString(); FaceModel faceModel = new FaceModel(); faceModel.lFeatureSize = faceresult_value.Length; faceModel.pbFeature = Marshal.AllocHGlobal(faceresult_value.Length); Marshal.Copy(faceresult_value, 0, faceModel.pbFeature, faceresult_value.Length); item.FaceModel = faceModel; items.Add(item); } } } return(items); }
/// <summary> /// Initialize Kinect object /// </summary> private void InitializeHDFace() { this.CurrentBuilderStatus = "Ready To Start Capture"; this.sensor = KinectSensor.GetDefault(); this.bodySource = this.sensor.BodyFrameSource; this.bodyReader = this.bodySource.OpenReader(); this.bodyReader.FrameArrived += this.BodyReader_FrameArrived; this.bodyCount = this.sensor.BodyFrameSource.BodyCount; this.highDefinitionFaceFrameSource = new HighDefinitionFaceFrameSource(this.sensor); this.highDefinitionFaceFrameSource.TrackingIdLost += this.HdFaceSource_TrackingIdLost; this.highDefinitionFaceFrameReader = this.highDefinitionFaceFrameSource.OpenReader(); this.highDefinitionFaceFrameReader.FrameArrived += this.HdFaceReader_FrameArrived; this.currentFaceModel = new FaceModel(); this.currentFaceAlignment = new FaceAlignment(); this.coordinateMapper = this.sensor.CoordinateMapper; FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; // create the face frame source with the required face frame features and an initial tracking Id of 0 this.faceFrameSource = new FaceFrameSource(this.sensor, 0, faceFrameFeatures); // open the corresponding reader this.faceFrameReader = this.faceFrameSource.OpenReader(); this.faceFrameResult = null; // wire handler for face frame arrival if (this.faceFrameReader != null) { // wire handler for face frame arrival this.faceFrameReader.FrameArrived += this.Reader_FaceFrameArrived; } this.InitializeMesh(); this.UpdateMesh(); this.sensor.Open(); }
static void Main(string[] args) { _sensor = KinectSensor.GetDefault(); _worker.getSubjectID(); if (_sensor != null) { _sensor.Open(); Console.WriteLine("sensorOpened"); if (_sensor.IsOpen) { _coordinateMapper = _sensor.CoordinateMapper; _bodyFrameReader = _sensor.BodyFrameSource.OpenReader(); _bodyFrameReader.FrameArrived += BodyFrameReader_FrameArrived; _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); } } string input = Console.ReadLine(); _sensor.Close(); }
void Start() { //this like InitializeHDFace() theGeometry = new Mesh(); //SetViewCollectionStatus(); sensor = KinectSensor.GetDefault(); bodySource = sensor.BodyFrameSource; bodyReader = bodySource.OpenReader(); bodyReader.FrameArrived += BodyReader_FrameArrived; highDefinitionFaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor); highDefinitionFaceFrameSource.TrackingIdLost += HdFaceSource_TrackingIdLost; highDefinitionFaceFrameReader = highDefinitionFaceFrameSource.OpenReader(); highDefinitionFaceFrameReader.FrameArrived += HdFaceReader_FrameArrived; CurrentFaceModel = FaceModel.Create(); currentFaceAlignment = FaceAlignment.Create(); sensor.Open(); tempAus = new Dictionary <string, float>(); actorBlendshapeNames = getBlendShapeNames(actorMesh); }
/// <summary> /// Kinectセンサーを初期化し、データの取得用に各種変数を初期化します /// </summary> private void Initialize() { // Kinectセンサーを取得 this.kinect = KinectSensor.GetDefault(); if (kinect == null) { return; } // KinectセンサーからBody(骨格情報)とColor(色情報)を取得するFrameReaderを作成 reader = kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Body); reader.MultiSourceFrameArrived += OnMultiSourceFrameArrived; // Kinectセンサーから詳細なFaceTrackingを行う、ソースとFrameReaderを宣言 this.hdFaceFrameSource = new HighDefinitionFaceFrameSource(this.kinect); this.hdFaceFrameSource.TrackingIdLost += this.OnTrackingIdLost; this.hdFaceFrameReader = this.hdFaceFrameSource.OpenReader(); this.hdFaceFrameReader.FrameArrived += this.OnFaceFrameArrived; this.faceModel = new FaceModel(); this.faceAlignment = new FaceAlignment(); // 各種Viewのアップデート InitializeMesh(); UpdateMesh(); // センサーの開始 kinect.Open(); }
private void Window_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); /*ColorFrameReader cfr = _sensor.ColorFrameSource.OpenReader(); * fd = _sensor.ColorFrameSource.FrameDescription; * colordata=new byte[fd.LengthInPixels*4]; * bitmap = new WriteableBitmap(fd.Width, fd.Height, 96, 96, PixelFormats.Bgr32, null); * * this.image.Source = bitmap;*/ if (_sensor != null) { _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); //_bodyReader.FrameArrived += BodyReader_FrameArrived; _faceSource = new HighDefinitionFaceFrameSource(_sensor); _faceReader = _faceSource.OpenReader(); //_faceReader.FrameArrived += FaceReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); //cfr.FrameArrived += cfr_FrameArrived; //_sensor.Open(); } }
void initialize() { IsFaceModelCollectCompleted = false; FaceCaptureStatus = ""; FaceVertices = new List <CameraSpacePoint>(); sensor = KinectSensor.GetDefault(); if (sensor == null) { return; } sensor.Open(); bodySource = sensor.BodyFrameSource; bodyReader = bodySource.OpenReader(); hdFaceFrameSource = HighDefinitionFaceFrameSource.Create(sensor); hdFaceFrameReader = hdFaceFrameSource.OpenReader(); faceModel = FaceModel.Create(); faceAlignment = FaceAlignment.Create(); FaceModelBuilderAttributes attributes = FaceModelBuilderAttributes.None; faceModelBuilder = hdFaceFrameSource.OpenModelBuilder(attributes); faceModelBuilder.CollectFaceDataAsync(collectFaceModelCompleted, collectFaceModelFailed); }
private void GetPic(object _bitmap) { //var image = Convert.ToBase64String(Common.GetFilebyte(_bitmap as Image)); lock (obj) { var faceDetectResponse = client.FaceDetect(_bitmap as Image); if (faceDetectResponse.facemodels.Count == 0) { return; } Image bitmap = null; float avalue = 0; int imgIndex = 0; FaceModel facemodel = null; for (int i = 0; i < faceDetectResponse.facemodels.Count; i++) { var model = faceDetectResponse.facemodels[i]; float o = client.FaceContrast(facetrue1, model.base64feature).similar; if (o > avalue) { avalue = o; facemodel = model; } //bitmap = Common.DrawImage(_bitmap as Image, model.facerectanglex, model.facerectangley, model.facerectanglewidth, model.facerectangleheight); } bitmap = Common.DrawImage(_bitmap as Image, facemodel.facerectanglex, facemodel.facerectangley, facemodel.facerectanglewidth, facemodel.facerectangleheight); Send(bitmap, avalue); } }
void collectFaceModelCompleted(FaceModelData faceModelData) { print("Model created!"); faceModel = faceModelData.ProduceFaceModel(); faceModelBuilder.Dispose(); faceModelBuilder = null; IsFaceModelCollectCompleted = true; }
public void Awake() { this.Body = null; this.Face = null; this.FaceAlignment = null; this.FaceModel = null; this.id = 0; }
public void Dispose() { if (_faceModel != null) { _faceModel.Dispose(); _faceModel = null; } }
private void HdFaceBuilder_CollectionCompleted(object sender, FaceModelBuilderCollectionCompletedEventArgs e) { var modelData = e.ModelData; this.faceModel = modelData.ProduceFaceModel(); this.faceModelBuilder.Dispose(); this.faceModelBuilder = null; }