/// <summary> /// Constructor. /// </summary> /// <param name="detectionResults">The detection results.</param> /// <param name="width">The width of the bitmap buffer that was used for detection in screen coordinates.</param> /// <param name="height">The height of the bitmap buffer that was used for detection in screen coordinates.</param> /// <param name="frameNumber">The frame number where the results where detected.</param> public MarkerDetectionEventArgs(DetectionResults detectionResults, int width, int height, long frameNumber) { this.DetectionResults = detectionResults; this.BufferWidth = width; this.BufferHeight = height; this.FrameNumber = frameNumber; }
public void Detect(DetectionResults detectionResults) { // Initialize results to null mBoard.setDetectionResult(null); mSelector.setDetectionResult(null); mSelector.setBoardMarker(null); // Iterate through results foreach (DetectionResult result in detectionResults) { switch (result.Marker.Name) { case "selection_marker": mSelector.setDetectionResult(result); break; case "board_marker": mBoard.setDetectionResult(result); mSelector.setBoardMarker(result); break; default: break; } } }
private void Detect() { //Here is where we try to detect the marker if (isDetecting || !isInitialized) { return; } isDetecting = true; try { // Update buffer size var pixelWidth = photoCamera.PreviewResolution.Width; var pixelHeight = photoCamera.PreviewResolution.Height; if (buffer == null || buffer.Length != pixelWidth * pixelHeight) { buffer = new byte[System.Convert.ToInt32(pixelWidth * pixelHeight)]; } // Grab snapshot for the marker detection photoCamera.GetPreviewBufferY(buffer); //Detect the markers arDetector.Threshold = 100; DetectionResults dr = arDetector.DetectAllMarkers(buffer, System.Convert.ToInt32(pixelWidth), System.Convert.ToInt32(pixelHeight)); GameState.getInstance().Detect(dr); } finally { isDetecting = false; } }
public DetectionResult Detect(params FilterTypes[] types) { if (types.Length == 0) { throw new ArgumentException("Value cannot be an empty collection.", nameof(types)); } log.Debug("Detect"); anomaly.Clear(); if (document.Sentences.Length <= 3) { log.Debug("Detect - text too short"); return(new DetectionResult(reconstructor.Reconstruct(document.Sentences), anomaly.ToArray())); } log.Info("Using sentence clustering"); var sentenceClusters = GetSentencesBlock().ToArray(); foreach (FilterTypes filterTypes in types) { DetectionResults result = factory.Create(filterTypes).Filter(new DocumentClusters(sentenceClusters)); anomaly.AddRange(result.Anomaly); sentenceClusters = result.Result; } return(new DetectionResult(reconstructor.Reconstruct(sentenceClusters.SelectMany(item => item.Sentences).Distinct().ToArray()), anomaly.ToArray())); }
public async Task <IActionResult> Detect([FromBody] ImagePost image) { byte[] bytes = Convert.FromBase64String(image.ImageData); IList <DetectedFace> detectedFaces; using (MemoryStream stream = new MemoryStream(bytes)) { HttpOperationResponse <IList <DetectedFace> > response = await _altClient.Face.DetectWithStreamWithHttpMessagesAsync(stream); detectedFaces = response.Body; } if (detectedFaces != null && detectedFaces.Count > 0) { using (MemoryStream stream = new MemoryStream(bytes)) { HttpOperationResponse <IList <DetectedFace> > response = await _client.Face.DetectWithStreamWithHttpMessagesAsync(stream); detectedFaces = response.Body; } List <Guid> guids = detectedFaces .Where(d => d.FaceId.HasValue) .Select(d => d.FaceId.Value) .ToList(); HttpOperationResponse <IList <IdentifyResult> > identifyResult = await _client.Face.IdentifyWithHttpMessagesAsync("1", guids); List <DetectedPerson> foundPersons = identifyResult.Body .Select(r => GetPersonFromResult(r, GetPersons().ToList())) .Where(dp => dp != null) .ToList(); string url; DateTime dateTime = TimeZoneInfo.ConvertTimeBySystemTimeZoneId (DateTime.UtcNow, "Mountain Standard Time"); using (MemoryStream stream = new MemoryStream(bytes)) { url = await _imageManager.UploadImageAsync(stream, image.DeviceName + dateTime.ToString("yyMMddHHmmss") + ".png"); } DetectionResults results = new DetectionResults { Persons = foundPersons, Url = url, DeviceName = image.DeviceName, DateTime = dateTime.ToString("G") }; string email = HttpContext.Session.GetString("Email"); string serializeObject = JsonConvert.SerializeObject(results); if (email != null) { await EmailSender.SendEmailsAsync(email, "Mobot Alert", serializeObject); } IotMessageSender.SendDeviceToCloudMessagesAsync(image.DeviceName, serializeObject); return(Ok(results.ToString())); } return(Ok("No face detected!")); }
private string displayAR(DetectionResults detectedResults) { string result = ""; for (int i = 0; i < markerList.Count; i++) { if (detectedResults.Where(r => r.Marker == markerList[i]).FirstOrDefault() != null) { result = dbmList[i].Direction; } } return(result); }
private void ApplyTransformations(DetectionResults detectedResults) { // Find L marker in result and transform object var resultL = detectedResults.Where(r => r.Marker == markerL).FirstOrDefault(); if (resultL == null) { resultL = detectedResults[0]; } ApplyTransformation(GrdARContent1, resultL.Transformation); // Find SLAR marker in result if (detectedResults.Count > 1) { var resultSlar = detectedResults.Where(r => r.Marker == markerSlar).FirstOrDefault(); if (resultSlar == null) { resultSlar = detectedResults[1]; } ApplyTransformation(GrdARContent2, resultSlar.Transformation); this.GrdARContent2.Visibility = System.Windows.Visibility.Visible; } }
private void Detect() { if (isDetecting || !isInitialized) { return; } if (null == videoCapture) { return; } isDetecting = true; try { byte[] buffer = null; if (config.GetBool("CameraSetup", "Mirror")) { buffer = videoCapture.BufferGray; // the gray buffer is mirrored by default } else { buffer = Utils.FlipHorizontalGrayscale(videoCapture.BufferGray, videoCapture.GrayWidth, videoCapture.GrayHeight); } //Detect the markers var results = arDetector.DetectAllMarkers(buffer, videoCapture.GrayWidth, videoCapture.GrayHeight); DetectionResults = results; } finally { isDetecting = false; } }
/// <summary> /// Detects all markers in the buffer. /// </summary> /// <param name="buffer">The buffer which should be searched for markers.</param> /// <returns>The results of the detection.</returns> protected DetectionResults DetectAllMarkers(INyARRgbRaster buffer) { // Filter buffer if (!isAdaptive) { ((NyARRasterFilter_ARToolkitThreshold)this.bufferFilter).setThreshold(this.Threshold); } this.bufferFilter.doFilter(buffer, this.filteredBuffer); // Detect and return results this.squareDetectionListener.Reset(); this.squareDetectionListener.Buffer = buffer; this.squareDetector.detectMarkerCB(this.filteredBuffer, squareDetectionListener); var results = this.squareDetectionListener.Results; // Prevent jittering if (previousResults != null && JitteringThreshold != 0) { var jitThresholdSq = JitteringThreshold * JitteringThreshold; var newResults = new DetectionResults(); foreach (var result in results) { var previousResult = previousResults.FirstOrDefault(r => r.Marker == result.Marker); if (previousResult == null) { newResults.Add(result); } else { var pcenter = previousResult.Square.Center; var center = result.Square.Center; var dx = pcenter.X - center.X; var dy = pcenter.X - center.X; var lenSq = dx * dx + dy * dy; newResults.Add(lenSq > jitThresholdSq ? result : previousResult); } } previousResults = newResults; return newResults; } previousResults = results; return results; }
/// <summary> /// Resets the Results /// </summary> public void Reset() { this.Results = new DetectionResults(); }
private void Detect() { if (isDetecting || !isInitialized) return; if (null == videoCapture) return; isDetecting = true; try { byte[] buffer = null; if (config.GetBool("CameraSetup", "Mirror")) buffer = videoCapture.BufferGray; // the gray buffer is mirrored by default else buffer = Utils.FlipHorizontalGrayscale(videoCapture.BufferGray, videoCapture.GrayWidth, videoCapture.GrayHeight); //Detect the markers var results = arDetector.DetectAllMarkers(buffer, videoCapture.GrayWidth, videoCapture.GrayHeight); DetectionResults = results; } finally { isDetecting = false; } }
public void OnPost(List <IFormFile> files, string imageUrl) { long size = 0; this.ViewData["error"] = -1; if (files != null && files.Count > 0) { // 上传图片的模式 var file = files[0]; var stream = file.OpenReadStream(); size = stream.Length; var bytes = new byte[size]; stream.Read(bytes, 0, (int)size); if (bytes.Length > 0) { bytes = ReSizeImageFile(bytes); var baiduApi = new BaiduAI.ImageAI(); var baiduRet = baiduApi.Detection(bytes); DetectionResults.Add(baiduRet); var qcloudRet = QcloudAI.ImageAI.Detection(bytes); DetectionResults.Add(qcloudRet); // 阿里云平台,则需要先上传到他们的oss,然后再进行测试 var aliyunApi = new AliyunAI.ImageAI(); var url = aliyunApi.UploadFile(file.FileName.Replace(" ", ""), bytes); var aliyunRet = aliyunApi.Detection(url); DetectionResults.Add(aliyunRet); aliyunApi.DeleteFile(file.FileName); ImageBase64 = Convert.ToBase64String(bytes); } } else if (!string.IsNullOrEmpty(imageUrl)) { var webclient = new WebClient(); var bytes = webclient.DownloadData(imageUrl); if (bytes.Length > 0) { bytes = ReSizeImageFile(bytes); var baiduApi = new BaiduAI.ImageAI(); var baiduRet = baiduApi.Detection(bytes); DetectionResults.Add(baiduRet); var qcloudRet = QcloudAI.ImageAI.Detection(bytes); DetectionResults.Add(qcloudRet); var aliyunApi = new AliyunAI.ImageAI(); var aliyunRet = aliyunApi.Detection(imageUrl); DetectionResults.Add(aliyunRet); ImageBase64 = Convert.ToBase64String(bytes); } } else { ErrorMessage = "上传图片或者url,你总得设置一个吧"; } }