private async void TestButton_Click(object sender, EventArgs e) { // Create source. object O = Resources.ResourceManager.GetObject("OnGuard"); //Return an object from the image chan1.png in the project using (Bitmap bm = (Bitmap)O) { using (MemoryStream mem = new MemoryStream()) { bm.Save(mem, ImageFormat.Jpeg); mem.Position = 0; try { AILocation location = new AILocation(Guid.NewGuid(), ipAddressText.Text, (int)portNumeric.Value); AIAnalyzer ai = new AIAnalyzer(); List <ImageObject> imageObjects = await ai.ProcessImageSync(location, mem, "Test Image").ConfigureAwait(false); if (imageObjects != null && imageObjects.Count > 0) { MessageBox.Show(this, "Successfully processed a picture via DeepStack", "Success!"); } else { MessageBox.Show(this, "The AI Test FAILED!. DeepStack was found, but the image was not processed successfully. Check your DeepStack startup to make sure --VISION-DETECTION True is set!", "Test Failure!"); } } catch (AiNotFoundException ex) { MessageBox.Show(this, "The AI Test FAILED!. Check the IP Address and port. Make sure DeepStack is running.", "Test Failed!"); } } } }
private void OKButton_Click(object sender, EventArgs e) { if (!string.IsNullOrEmpty(ipAddressText.Text)) { if (ipAddressText.Text.Contains("http") || ipAddressText.Text.Contains("//")) { MessageBox.Show("The IP Address or machine name must not include \"http\" or \"//\""); } else { if (Location == null) { Location = new AILocation(Guid.NewGuid(), ipAddressText.Text, (int)portNumeric.Value); } else { Location.IPAddress = ipAddressText.Text; Location.Port = (int)portNumeric.Value; } Storage.SetAILocation(Location); AILocation.Refresh(); DialogResult = DialogResult.OK; } } }
private void RemoveButton_Click(object sender, EventArgs e) { if (aiLocationListView.SelectedItems.Count > 0) { int index = aiLocationListView.SelectedIndices[0]; AILocation location = (AILocation)aiLocationListView.Items[index].Tag; Storage.RemoveAILocation(location.ID.ToString()); AILocation.Refresh(); aiLocationListView.Items.RemoveAt(index); } }
// Called by the AI to navigate through the working set public async Task <List <ImageObject> > ProcessVideoImageViaAI(Stream stream, string imageName) { List <ImageObject> result = null; AILocation ai = AILocation.GetAvailableAI(); if (ai != null) { result = await ProcessImageSync(ai, stream, imageName).ConfigureAwait(false); // yes, it is not really async, but this doesn't know it } return(result); }
public static void SetAILocation(AILocation location) { if (location != null) { using (RegistryKey key = s_base.OpenSubKey("AILocations", true)) { using (RegistryKey aiKey = key.CreateSubKey(location.ID.ToString(), true)) { aiKey.SetValue("IPAddress", location.IPAddress, RegistryValueKind.String); aiKey.SetValue("Port", location.Port, RegistryValueKind.DWord); } } } }
public AILocationDialog(AILocation location) { if (location != null) { Location = location; InitializeComponent(); ipAddressText.Text = Location.IPAddress; portNumeric.Value = Location.Port; } else { throw new ArgumentNullException(); } }
// This is the one called by the UI indirectly, and by the connection test function directly public async Task <List <ImageObject> > ProcessImageSync(AILocation ai, Stream stream, string imageName) { List <ImageObject> objectList; try { objectList = await DetectObjects(ai, stream, imageName).ConfigureAwait(true); } catch (AiNotFoundException ex) { throw ex; } return(objectList); }
private void OnActivate(object sender, EventArgs e) { if (aiLocationListView.SelectedItems.Count > 0) { int index = aiLocationListView.SelectedIndices[0]; ListViewItem item = aiLocationListView.Items[index]; AILocation location = (AILocation)item.Tag; using (AILocationDialog dlg = new AILocationDialog(location)) { DialogResult result = dlg.ShowDialog(); if (result == DialogResult.OK) { aiLocationListView.Items[index].SubItems[0].Text = dlg.Location.IPAddress; aiLocationListView.Items[index].SubItems[1].Text = dlg.Location.Port.ToString(); } } } }
public static List <AILocation> GetAILocations() { List <AILocation> result = new List <AILocation>(); using (RegistryKey key = s_base.OpenSubKey("AILocations")) { foreach (string locationID in key.GetSubKeyNames()) { using (RegistryKey locationKey = key.OpenSubKey(locationID)) { AILocation location = new AILocation(Guid.Parse(locationID), (string)locationKey.GetValue("IPAddress"), (int)locationKey.GetValue("Port")); result.Add(location); } } } return(result); }
static public AILocation GetAvailableAI() { AILocation result = null; lock (s_lock) { // The global list may have changed since the last call! int useItem = 0; if (AILocation.Next < AILocation.AILocations.Count) { useItem = AILocation.Next; } else { AILocation.Next = 0; // start at the zero position, but still may not exist! useItem = 0; // yeah we will try it } if (useItem < AILocation.AILocations.Count) // which also accounts for the empty list possiblity { result = AILocation.AILocations[useItem]; } // Now, change the next one! ++Next; if (Next >= AILocation.AILocations.Count) { Next = 0; // start the list over (which may only have one (usually)) } } if (null == result) { Dbg.Trace("No AI Locations were found!"); throw new AiNotFoundException("No AI Locations were found! Please define at least one!"); } return(result); }
public SettingsDialog() { InitializeComponent(); aiLocationListView.Sorting = SortOrder.Ascending; // The older (pre 1-6-1) version may have used the old registry format. // If so, get it, but delete it. string oldIPAddress = Storage.GetGlobalString("DeepStackIPAddress"); if (!string.IsNullOrEmpty(oldIPAddress)) { int aiPort = Storage.GetGlobalInt("DeepStackPort"); AILocation location = new AILocation(Guid.NewGuid(), oldIPAddress, aiPort); AILocation.AILocations.Add(location); // put it in the new format Storage.RemoveGlobalValue("DeepStackIPAddress"); // get rid of the old format Storage.RemoveGlobalValue("DeepStackPort"); } foreach (var location in AILocation.AILocations) { ListViewItem item = new ListViewItem(new string[] { location.IPAddress, location.Port.ToString() }); aiLocationListView.Items.Add(item); item.Tag = location; } double snapshot = Storage.GetGlobalDouble("FrameInterval"); if (snapshot == 0.0) { snapshot = (double)Settings.Default.TimePerFrame; } snapshotNumeric.Value = (decimal)snapshot; int maxEvent = Storage.GetGlobalInt("MaxEventTime"); if (maxEvent == 0) { maxEvent = Settings.Default.MaxEventTime; } maxEventNumeric.Value = maxEvent; int eventInterval = Storage.GetGlobalInt("EventInterval"); if (eventInterval == 0) { eventInterval = Settings.Default.EventInterval; } eventIntervalNumeric.Value = eventInterval; // Database Stuff string customConnectionString = Storage.GetGlobalString("CustomDatabaseConnectionString"); if (string.IsNullOrEmpty(customConnectionString)) { ConnectionStringText.Text = Storage.GetGlobalString("DBConnectionString"); // the fully formatted one that is in use! } else { ConnectionStringText.Text = customConnectionString; } }
// This function is used by the (semi) live data, not the UI public static async Task <AIResult> DetectObjectsAsync(Stream stream, PendingItem pending) { List <ImageObject> objects = null; AIResult aiResult = new AIResult { ObjectsFound = objects, Item = pending }; using (HttpClient client = new HttpClient()) { using (StreamContent content = new StreamContent(stream)) { using (var request = new MultipartFormDataContent { { content, "image", pending.PendingFile } }) { AILocation ai = AILocation.GetAvailableAI(); if (ai == null) { throw new AiNotFoundException("No AI Location is Currently Defined"); } string url = string.Format("http://{0}:{1}/v1/vision/detection", ai.IPAddress, ai.Port); HttpResponseMessage output = null; try { DateTime startPost = DateTime.Now; pending.TimeDispatched = startPost; output = await client.PostAsync(new Uri(url), request).ConfigureAwait(false); pending.TimeProcessingByAI(); TimeSpan postTime = DateTime.Now - startPost; } catch (AggregateException ex) { throw new AiNotFoundException(url); } catch (Exception ex) { throw new AiNotFoundException(url); } if (!output.IsSuccessStatusCode) { throw new AiNotFoundException(url); } var jsonString = await output.Content.ReadAsStringAsync().ConfigureAwait(false); output.Dispose(); TimeSpan processTime = pending.TimeProcessingByAI(); // Console.WriteLine("Process Time: " + processTime.TotalMilliseconds.ToString()); Response response = JsonConvert.DeserializeObject <Response>(jsonString); if (response.Predictions != null && response.Predictions.Length > 0) { foreach (var result in response.Predictions) { if (objects == null) { objects = new List <ImageObject>(); } result.Success = true; // Windows likes Rectangles, so it is easier to create one now result.ObjectRectangle = Rectangle.FromLTRB(result.X_min, result.Y_min, result.X_max, result.Y_max); result.ID = Guid.NewGuid(); objects.Add(result); string o = string.Format("{0}\t{1}\t{2}\t{3}\t{4}\t{5}", result.Label, result.Confidence, result.X_min, result.Y_min, result.X_max, result.Y_max); Dbg.Trace(o); } } // DebugWriter.Write(jsonString); } } } aiResult.ObjectsFound = objects; return(aiResult); }
// This function is used by the UI to detect objects. It is not currently // used async, but may be in the future public async static Task <List <ImageObject> > DetectObjects(AILocation aiLocation, Stream stream, string imageName) { List <ImageObject> objects = null; using (HttpClient client = new HttpClient()) { using (StreamContent content = new StreamContent(stream)) { using (var request = new MultipartFormDataContent { { content, "image", imageName } }) { string url = string.Format("http://{0}:{1}/v1/vision/detection", aiLocation.IPAddress, aiLocation.Port); HttpResponseMessage output = null; try { output = /*await*/ client.PostAsync(new Uri(url), request).Result; } catch (AggregateException ex) { throw new AiNotFoundException(url); } catch (Exception ex) { throw new AiNotFoundException(url); } if (!output.IsSuccessStatusCode) { throw new AiNotFoundException(url); } var jsonString = /*await*/ output.Content.ReadAsStringAsync().Result; output.Dispose(); Response response = JsonConvert.DeserializeObject <Response>(jsonString); if (response.Predictions != null && response.Predictions.Length > 0) { foreach (var result in response.Predictions) { if (objects == null) { objects = new List <ImageObject>(); } result.Success = true; // Windows likes Rectangles, so it is easier to create one now result.ObjectRectangle = Rectangle.FromLTRB(result.X_min, result.Y_min, result.X_max, result.Y_max); result.ID = Guid.NewGuid(); // Keep an ID around for the life of the object objects.Add(result); } } } } } return(objects); }