public static Dictionary<long, Dictionary<string, List<double>>> GetSliceLocationThreats(Prediction prediction) { Dictionary<long, Dictionary<string, List<double>>> sliceLocationThreats = new Dictionary<long, Dictionary<string, List<double>>>(); DiscreteChoiceModel model = prediction.Model; long sliceTicks = -1; if (model is TimeSliceDCM) sliceTicks = (model as TimeSliceDCM).TimeSliceTicks; Dictionary<int, Point> idPoint = new Dictionary<int, Point>(); foreach (Point point in prediction.Points) idPoint.Add(point.Id, point); foreach (PointPrediction pointPrediction in prediction.PointPredictions) { long slice = 1; if (sliceTicks > 0) slice = pointPrediction.Time.Ticks / sliceTicks; PostGIS.Point point = idPoint[pointPrediction.PointId].Location; int row = (int)((point.Y - prediction.PredictionArea.BoundingBox.MinY) / prediction.PredictionPointSpacing); int col = (int)((point.X - prediction.PredictionArea.BoundingBox.MinX) / prediction.PredictionPointSpacing); string location = row + "-" + col; sliceLocationThreats.EnsureContainsKey(slice, typeof(Dictionary<string, List<double>>)); sliceLocationThreats[slice].EnsureContainsKey(location, typeof(List<double>)); sliceLocationThreats[slice][location].Add(pointPrediction.TotalThreat); } return sliceLocationThreats; }
public static Dictionary<long, Dictionary<string, int>> GetSliceLocationTrueCount(IEnumerable<Incident> incidents, Prediction prediction) { Dictionary<long, Dictionary<string, int>> sliceLocationTrueCount = new Dictionary<long, Dictionary<string, int>>(); DiscreteChoiceModel model = prediction.Model; long sliceTicks = -1; if (model is TimeSliceDCM) sliceTicks = (model as TimeSliceDCM).TimeSliceTicks; foreach (Incident incident in incidents) { long slice = 1; if (sliceTicks > 0) slice = incident.Time.Ticks / sliceTicks; int row = (int)((incident.Location.Y - prediction.PredictionArea.BoundingBox.MinY) / prediction.PredictionPointSpacing); int col = (int)((incident.Location.X - prediction.PredictionArea.BoundingBox.MinX) / prediction.PredictionPointSpacing); string location = row + "-" + col; sliceLocationTrueCount.EnsureContainsKey(slice, typeof(Dictionary<string, int>)); sliceLocationTrueCount[slice].EnsureContainsKey(location, typeof(int)); sliceLocationTrueCount[slice][location]++; } return sliceLocationTrueCount; }
/// <summary> /// Gets lexically related words for the current synset. Many of the relations in WordNet are lexical instead of semantic. Whereas /// the latter indicate relations between entire synsets (e.g., hypernym), the former indicate relations between specific /// words in synsets. This method retrieves all lexical relations and the words related thereby. /// </summary> /// <returns>Mapping from relations to mappings from words in the current synset to related words in the related synsets</returns> public Dictionary<WordNetEngine.SynSetRelation, Dictionary<string, Set<string>>> GetLexicallyRelatedWords() { Dictionary<WordNetEngine.SynSetRelation, Dictionary<string, Set<string>>> relatedWords = new Dictionary<WordNetEngine.SynSetRelation, Dictionary<string, Set<string>>>(); foreach (WordNetEngine.SynSetRelation relation in _lexicalRelations.Keys) { relatedWords.EnsureContainsKey(relation, typeof(Dictionary<string, Set<string>>)); foreach (SynSet relatedSynSet in _lexicalRelations[relation].Keys) { // make sure related synset is initialized if (!relatedSynSet.Instantiated) relatedSynSet.Instantiate(); foreach (int sourceWordIndex in _lexicalRelations[relation][relatedSynSet].Keys) { string sourceWord = _words[sourceWordIndex - 1]; relatedWords[relation].EnsureContainsKey(sourceWord, typeof(Set<string>), false); foreach (int targetWordIndex in _lexicalRelations[relation][relatedSynSet][sourceWordIndex]) { string targetWord = relatedSynSet.Words[targetWordIndex - 1]; relatedWords[relation][sourceWord].Add(targetWord); } } } } return relatedWords; }
/// <summary> /// Constructor /// </summary> /// <param name="wordNetDirectory">Path to WorNet directory (the one with the data and index files in it)</param> /// <param name="inMemory">Whether or not to store all data in memory. In-memory storage requires quite a bit of space /// but it is also very quick. The alternative (false) will cause the data to be searched on-disk with an efficient /// binary search algorithm.</param> public WordNetEngine(string wordNetDirectory, bool inMemory) { _wordNetDirectory = wordNetDirectory; _inMemory = inMemory; _posIndexWordSearchStream = null; _posSynSetDataFile = null; if (!System.IO.Directory.Exists(_wordNetDirectory)) throw new DirectoryNotFoundException("Отсутствует WordNet директория: " + _wordNetDirectory); // get data and index paths string[] dataPaths = new string[] { Path.Combine(_wordNetDirectory, "data.adj"), Path.Combine(_wordNetDirectory, "data.adv"), Path.Combine(_wordNetDirectory, "data.noun"), Path.Combine(_wordNetDirectory, "data.verb") }; string[] indexPaths = new string[] { Path.Combine(_wordNetDirectory, "index.adj"), Path.Combine(_wordNetDirectory, "index.adv"), Path.Combine(_wordNetDirectory, "index.noun"), Path.Combine(_wordNetDirectory, "index.verb") }; // make sure all files exist foreach (string path in dataPaths.Union(indexPaths)) if (!System.IO.File.Exists(path)) throw new FileNotFoundException("Failed to find WordNet file: " + path); #region index file sorting string sortFlagPath = Path.Combine(_wordNetDirectory, ".sorted_for_dot_net"); if (!System.IO.File.Exists(sortFlagPath)) { /* make sure the index files are sorted according to the current sort order. the index files in the * wordnet distribution are sorted in the order needed for (presumably) the java api, which uses * a different sort order than the .net runtime. thus, unless we resort the lines in the index * files, we won't be able to do a proper binary search over the data. */ foreach (string indexPath in indexPaths) { // create temporary file for sorted lines string tempPath = Path.GetTempFileName(); StreamWriter tempFile = new StreamWriter(tempPath); // get number of words (lines) in file int numWords = 0; StreamReader indexFile = new StreamReader(indexPath); string line; while (indexFile.TryReadLine(out line)) if (!line.StartsWith(" ")) ++numWords; // get lines in file, sorted by first column (i.e., the word) Dictionary<string, string> wordLine = new Dictionary<string, string>(numWords); indexFile = new StreamReader(indexPath); while (indexFile.TryReadLine(out line)) // write header lines to temp file immediately if (line.StartsWith(" ")) tempFile.WriteLine(line); else { // trim useless blank spaces from line and map line to first column line = line.Trim(); wordLine.Add(line.Substring(0, line.IndexOf(' ')), line); } // get sorted words List<string> sortedWords = new List<string>(wordLine.Count); sortedWords.AddRange(wordLine.Keys); sortedWords.Sort(); // write lines sorted by word foreach (string word in sortedWords) tempFile.WriteLine(wordLine[word]); tempFile.Close(); // replace original index file with properly sorted one System.IO.File.Delete(indexPath); System.IO.File.Move(tempPath, indexPath); } // create flag file, indicating that we've sorted the data StreamWriter sortFlagFile = new StreamWriter(sortFlagPath); sortFlagFile.WriteLine("This file serves no purpose other than to indicate that the WordNet distribution data in the current directory has been sorted for use by the .NET API."); sortFlagFile.Close(); } #endregion #region engine init if (inMemory) { // pass 1: get total number of synsets int totalSynsets = 0; foreach (string dataPath in dataPaths) { // scan synset data file for lines that don't start with a space...these are synset definition lines StreamReader dataFile = new StreamReader(dataPath); string line; while (dataFile.TryReadLine(out line)) { int firstSpace = line.IndexOf(' '); if (firstSpace > 0) ++totalSynsets; } } // pass 2: create synset shells (pos and offset only) _idSynset = new Dictionary<string, SynSet>(totalSynsets); foreach (string dataPath in dataPaths) { POS pos = GetFilePOS(dataPath); // scan synset data file StreamReader dataFile = new StreamReader(dataPath); string line; while (dataFile.TryReadLine(out line)) { int firstSpace = line.IndexOf(' '); if (firstSpace > 0) { // get offset and create synset shell int offset = int.Parse(line.Substring(0, firstSpace)); SynSet synset = new SynSet(pos, offset, null); _idSynset.Add(synset.ID, synset); } } } // pass 3: instantiate synsets (hooks up relations, set glosses, etc.) foreach (string dataPath in dataPaths) { POS pos = GetFilePOS(dataPath); // scan synset data file StreamReader dataFile = new StreamReader(dataPath); string line; while (dataFile.TryReadLine(out line)) { int firstSpace = line.IndexOf(' '); if (firstSpace > 0) // instantiate synset defined on current line, using the instantiated synsets for all references _idSynset[pos + ":" + int.Parse(line.Substring(0, firstSpace))].Instantiate(line, _idSynset); } } // organize synsets by pos and words...also set most common synset for word-pos pairs that have multiple synsets _posWordSynSets = new Dictionary<POS, Dictionary<string, Set<SynSet>>>(); foreach (string indexPath in indexPaths) { POS pos = GetFilePOS(indexPath); _posWordSynSets.EnsureContainsKey(pos, typeof(Dictionary<string, Set<SynSet>>)); // scan word index file, skipping header lines StreamReader indexFile = new StreamReader(indexPath); string line; while (indexFile.TryReadLine(out line)) { int firstSpace = line.IndexOf(' '); if (firstSpace > 0) { // grab word and synset shells, along with the most common synset string word = line.Substring(0, firstSpace); SynSet mostCommonSynSet; Set<SynSet> synsets = GetSynSetShells(line, pos, out mostCommonSynSet, null); // set flag on most common synset if it's ambiguous if (synsets.Count > 1) _idSynset[mostCommonSynSet.ID].SetAsMostCommonSynsetFor(word); // use reference to the synsets that we instantiated in our three-pass routine above _posWordSynSets[pos].Add(word, new Set<SynSet>(synsets.Count)); foreach (SynSet synset in synsets) _posWordSynSets[pos][word].Add(_idSynset[synset.ID]); } } } } else { // open binary search streams for index files _posIndexWordSearchStream = new Dictionary<POS, BinarySearchTextStream>(); foreach (string indexPath in indexPaths) { // create binary search stream for index file BinarySearchTextStream searchStream = new BinarySearchTextStream(indexPath, new BinarySearchTextStream.SearchComparisonDelegate(delegate(object searchWord, string currentLine) { // if we landed on the header text, search further down if (currentLine[0] == ' ') return 1; // get word on current line string currentWord = currentLine.Substring(0, currentLine.IndexOf(' ')); // compare searched-for word to the current word return ((string)searchWord).CompareTo(currentWord); })); // add search stream for current POS _posIndexWordSearchStream.Add(GetFilePOS(indexPath), searchStream); } // open readers for synset data files _posSynSetDataFile = new Dictionary<POS, StreamReader>(); foreach (string dataPath in dataPaths) _posSynSetDataFile.Add(GetFilePOS(dataPath), new StreamReader(dataPath)); } #endregion }
private Dictionary<int, float> GetPerClassWeights(StreamReader trainingInstancesReader) { Dictionary<int, int> classCount = new Dictionary<int, int>(); string line; while (trainingInstancesReader.TryReadLine(out line)) { int firstSpace = line.IndexOf(' '); if (firstSpace == -1) firstSpace = line.Length; int classNum = int.Parse(line.Substring(0, firstSpace)); classCount.EnsureContainsKey(classNum, typeof(int)); classCount[classNum]++; } Dictionary<int, float> classWeight = new Dictionary<int, float>(); int total = classCount.Values.Sum(); foreach (int classNum in classCount.Keys) if (_libLinear.GetUnmappedLabel(classNum.ToString()) != PointPrediction.NullLabel) classWeight.Add(classNum, (total - classCount[classNum]) / (float)classCount[classNum]); return classWeight; }
/// <summary> /// Instantiates the current synset. If idSynset is non-null, related synsets references are set to those from /// idSynset; otherwise, related synsets are created as shells. /// </summary> /// <param name="definition">Definition line of synset from data file</param> /// <param name="idSynset">Lookup for related synsets. If null, all related synsets will be created as shells.</param> internal void Instantiate(string definition, Dictionary<string, SynSet> idSynset) { // don't re-instantiate if (_instantiated) throw new Exception("Synset has already been instantiated"); /* get lexicographer file name...the enumeration lines up precisely with the wordnet spec (see the lexnames file) except that * it starts with None, so we need to add 1 to the definition line's value to get the correct file name */ int lexicographerFileNumber = int.Parse(GetField(definition, 1)) + 1; if (lexicographerFileNumber <= 0) throw new Exception("Invalid lexicographer file name number. Should be >= 1."); _lexicographerFileName = (WordNetEngine.LexicographerFileName)lexicographerFileNumber; // get number of words in the synset and the start character of the word list int wordStart; int numWords = int.Parse(GetField(definition, 3, out wordStart), NumberStyles.HexNumber); wordStart = definition.IndexOf(' ', wordStart) + 1; // get words in synset _words = new List<string>(numWords); for (int i = 0; i < numWords; ++i) { int wordEnd = definition.IndexOf(' ', wordStart + 1) - 1; int wordLen = wordEnd - wordStart + 1; string word = definition.Substring(wordStart, wordLen); if (word.Contains(' ')) throw new Exception("Unexpected space in word: " + word); _words.Add(word); // skip lex_id field wordStart = definition.IndexOf(' ', wordEnd + 2) + 1; } // get gloss _gloss = definition.Substring(definition.IndexOf('|') + 1).Trim(); if (_gloss.Contains('|')) throw new Exception("Unexpected pipe in gloss"); // get number and start of relations int relationCountField = 3 + (_words.Count * 2) + 1; int relationFieldStart; int numRelations = int.Parse(GetField(definition, relationCountField, out relationFieldStart)); relationFieldStart = definition.IndexOf(' ', relationFieldStart) + 1; // grab each related synset _relationSynSets = new Dictionary<WordNetEngine.SynSetRelation, Set<SynSet>>(); _lexicalRelations = new Dictionary<WordNetEngine.SynSetRelation, Dictionary<SynSet, Dictionary<int, Set<int>>>>(); for (int relationNum = 0; relationNum < numRelations; ++relationNum) { string relationSymbol = null; int relatedSynSetOffset = -1; WordNetEngine.POS relatedSynSetPOS = WordNetEngine.POS.None; int sourceWordIndex = -1; int targetWordIndex = -1; // each relation has four columns for (int relationField = 0; relationField <= 3; ++relationField) { int fieldEnd = definition.IndexOf(' ', relationFieldStart + 1) - 1; int fieldLen = fieldEnd - relationFieldStart + 1; string fieldValue = definition.Substring(relationFieldStart, fieldLen); // relation symbol if (relationField == 0) relationSymbol = fieldValue; // related synset offset else if (relationField == 1) relatedSynSetOffset = int.Parse(fieldValue); // related synset POS else if (relationField == 2) relatedSynSetPOS = GetPOS(fieldValue); // source/target word for lexical relation else if (relationField == 3) { sourceWordIndex = int.Parse(fieldValue.Substring(0, 2), NumberStyles.HexNumber); targetWordIndex = int.Parse(fieldValue.Substring(2), NumberStyles.HexNumber); } else throw new Exception(); relationFieldStart = definition.IndexOf(' ', relationFieldStart + 1) + 1; } // get related synset...create shell if we don't have a lookup SynSet relatedSynSet; if (idSynset == null) relatedSynSet = new SynSet(relatedSynSetPOS, relatedSynSetOffset, _wordNetEngine); // look up related synset directly else relatedSynSet = idSynset[relatedSynSetPOS + ":" + relatedSynSetOffset]; // get relation WordNetEngine.SynSetRelation relation = WordNetEngine.GetSynSetRelation(_pos, relationSymbol); // add semantic relation if we have neither a source nor a target word index if (sourceWordIndex == 0 && targetWordIndex == 0) { _relationSynSets.EnsureContainsKey(relation, typeof(Set<SynSet>)); _relationSynSets[relation].Add(relatedSynSet); } // add lexical relation else { _lexicalRelations.EnsureContainsKey(relation, typeof(Dictionary<SynSet, Dictionary<int, Set<int>>>)); _lexicalRelations[relation].EnsureContainsKey(relatedSynSet, typeof(Dictionary<int, Set<int>>)); _lexicalRelations[relation][relatedSynSet].EnsureContainsKey(sourceWordIndex, typeof(Set<int>)); if (!_lexicalRelations[relation][relatedSynSet][sourceWordIndex].Contains(targetWordIndex)) _lexicalRelations[relation][relatedSynSet][sourceWordIndex].Add(targetWordIndex); } } // release the wordnet engine if we have one...don't need it anymore if (_wordNetEngine != null) _wordNetEngine = null; _instantiated = true; }
private void GetThreatSurfaces(Rectangle bitmapDimensions, bool displayFirstSlice, Dictionary<long, List<Tuple<RectangleF, double, string>>> sliceSquareThreatType = null) { if (_sliceIncidentPointScores == null) return; Set<string> selectedIncidents = new Set<string>(incidentTypeCheckBoxes.Controls.Cast<ColoredCheckBox>().Where(c => c.CheckState != CheckState.Unchecked).Select(c => c.Text).ToArray()); float pixelsPerMeter; float threatRectanglePixelWidth; GetDrawingParameters(bitmapDimensions, out pixelsPerMeter, out threatRectanglePixelWidth); List<long> slices = _sliceIncidentPointScores.Keys.OrderBy(s => s).ToList(); Dictionary<long, Dictionary<int, Dictionary<int, Tuple<double, string>>>> sliceRowColScoreIncident = new Dictionary<long, Dictionary<int, Dictionary<int, Tuple<double, string>>>>(slices.Count); Dictionary<long, Bitmap> newSliceThreatSurface = new Dictionary<long, Bitmap>(slices.Count); double overallMinScore = double.MaxValue; double overallMaxScore = double.MinValue; List<Thread> threads = new List<Thread>(Configuration.ProcessorCount); for (int i = 0; i < Configuration.ProcessorCount; ++i) { Thread t = new Thread(new ParameterizedThreadStart(core => { for (int j = (int)core; j < slices.Count; j += Configuration.ProcessorCount) { long slice = slices[j]; #region create bitmap for current slice's threat surface try { lock (newSliceThreatSurface) { newSliceThreatSurface.Add(slice, new Bitmap(bitmapDimensions.Width, bitmapDimensions.Height, PixelFormat.Format16bppRgb565)); } } catch (ArgumentException) { Console.Out.WriteLine("Maximum zoom exceeded. Reset zoom to refresh display."); return; } #endregion #region get incident scores for each row and column of current slice Dictionary<int, Dictionary<int, Dictionary<string, List<double>>>> rowColIncidentScores = new Dictionary<int, Dictionary<int, Dictionary<string, List<double>>>>(); foreach (string incident in _sliceIncidentPointScores[slice].Keys) if (selectedIncidents.Contains(incident)) foreach (Tuple<PointF, double> pointScore in _sliceIncidentPointScores[slice][incident]) { PointF drawingPoint = ConvertMetersPointToDrawingPoint(pointScore.Item1, _regionBottomLeftInMeters, pixelsPerMeter, bitmapDimensions); int row, col; GetThreatRectangleRowColumn(drawingPoint, threatRectanglePixelWidth, out row, out col); rowColIncidentScores.EnsureContainsKey(row, typeof(Dictionary<int, Dictionary<string, List<double>>>)); rowColIncidentScores[row].EnsureContainsKey(col, typeof(Dictionary<string, List<double>>)); rowColIncidentScores[row][col].EnsureContainsKey(incident, typeof(List<double>)); rowColIncidentScores[row][col][incident].Add(pointScore.Item2); } #endregion #region get score/incident pairs for each cell, tracking min and max scores Dictionary<int, Dictionary<int, Tuple<double, string>>> rowColScoreIncident = new Dictionary<int, Dictionary<int, Tuple<double, string>>>(); double sliceMinScore = double.MaxValue; double sliceMaxScore = double.MinValue; foreach (int row in rowColIncidentScores.Keys) foreach (int col in rowColIncidentScores[row].Keys) { Dictionary<string, List<double>> incidentScores = rowColIncidentScores[row][col]; string mostLikelyIncident = null; double scoreForMostLikelyIncident = double.MinValue; foreach (string incident in incidentScores.Keys) { double score = incidentScores[incident].Average(); if (score > scoreForMostLikelyIncident) { mostLikelyIncident = incident; scoreForMostLikelyIncident = score; } } if (scoreForMostLikelyIncident < sliceMinScore) sliceMinScore = scoreForMostLikelyIncident; if (scoreForMostLikelyIncident > sliceMaxScore) sliceMaxScore = scoreForMostLikelyIncident; rowColScoreIncident.EnsureContainsKey(row, typeof(Dictionary<int, Tuple<double, string>>)); rowColScoreIncident[row].Add(col, new Tuple<double, string>(scoreForMostLikelyIncident, mostLikelyIncident)); } #endregion #region store information from thread lock (sliceRowColScoreIncident) { sliceRowColScoreIncident.Add(slice, rowColScoreIncident); } lock (this) { if (sliceMinScore < overallMinScore) overallMinScore = sliceMinScore; } lock (this) { if (sliceMaxScore > overallMaxScore) overallMaxScore = sliceMaxScore; } #endregion } })); t.Start(i); threads.Add(t); } foreach (Thread t in threads) t.Join(); #region draw threat surfaces double scoreRange = overallMaxScore - overallMinScore; if (scoreRange == 0) scoreRange = float.Epsilon; threads.Clear(); for (int i = 0; i < Configuration.ProcessorCount; ++i) { Thread t = new Thread(new ParameterizedThreadStart(core => { using(Pen pen = new Pen(BackColor, 1)) using(SolidBrush brush = new SolidBrush(BackColor)) { for (int j = (int)core; j < slices.Count; j += Configuration.ProcessorCount) { long slice = slices[j]; Graphics g = Graphics.FromImage(newSliceThreatSurface[slice]); g.Clear(BackColor); #region threat foreach (int row in sliceRowColScoreIncident[slice].Keys) foreach (int col in sliceRowColScoreIncident[slice][row].Keys) { Tuple<double, string> scoreIncident = sliceRowColScoreIncident[slice][row][col]; double scaledScore = (scoreIncident.Item1 - overallMinScore) / scoreRange; double percentTransparent = 1 - scaledScore; Color color = _incidentColor[scoreIncident.Item2]; byte red = (byte)(scaledScore * color.R + percentTransparent * BackColor.R); byte green = (byte)(scaledScore * color.G + percentTransparent * BackColor.G); byte blue = (byte)(scaledScore * color.B + percentTransparent * BackColor.B); brush.Color = Color.FromArgb(red, green, blue); RectangleF threatSquare = new RectangleF(col * threatRectanglePixelWidth, row * threatRectanglePixelWidth, threatRectanglePixelWidth, threatRectanglePixelWidth); g.FillRectangle(brush, threatSquare); if (sliceSquareThreatType != null) { sliceSquareThreatType.EnsureContainsKey(slice, typeof(List<Tuple<RectangleF, double, string>>)); sliceSquareThreatType[slice].Add(new Tuple<RectangleF, double, string>(threatSquare, scoreIncident.Item1, scoreIncident.Item2)); } } #endregion #region overlays foreach (Overlay overlay in Overlays) if (overlay.Displayed) { pen.Color = overlay.Color; brush.Color = overlay.Color; foreach (List<PointF> points in overlay.Points) if (points.Count == 1) { PointF drawingPoint = ConvertMetersPointToDrawingPoint(points[0], _regionBottomLeftInMeters, pixelsPerMeter, bitmapDimensions); RectangleF circle = GetCircleBoundingBox(drawingPoint, _pointDrawingDiameter); g.FillEllipse(brush, circle); g.DrawEllipse(pen, circle); } else for (int p = 1; p < points.Count; ++p) g.DrawLine(pen, ConvertMetersPointToDrawingPoint(points[p - 1], _regionBottomLeftInMeters, pixelsPerMeter, bitmapDimensions), ConvertMetersPointToDrawingPoint(points[p], _regionBottomLeftInMeters, pixelsPerMeter, bitmapDimensions)); } #endregion #region true incidents Set<string> selectedTrueIncidentOverlays = new Set<string>(incidentTypeCheckBoxes.Controls.Cast<ColoredCheckBox>().Where(c => c.CheckState == CheckState.Checked).Select(c => c.Text).ToArray()); DateTime sliceStart = DisplayedPrediction.PredictionStartTime; DateTime sliceEnd = DisplayedPrediction.PredictionEndTime; if (slice != -1) { if (!(DisplayedPrediction.Model is TimeSliceDCM)) throw new Exception("Expected TimeSliceDCM since slice != 1"); long sliceTicks = (DisplayedPrediction.Model as TimeSliceDCM).TimeSliceTicks; sliceStart = new DateTime(slice * sliceTicks); sliceEnd = sliceStart + new TimeSpan(sliceTicks); } foreach (string trueIncidentOverlay in selectedTrueIncidentOverlays) { brush.Color = _incidentColor[trueIncidentOverlay]; pen.Color = Color.Black; foreach (Incident incident in Incident.Get(sliceStart, sliceEnd, DisplayedPrediction.PredictionArea, trueIncidentOverlay)) { PointF drawingPoint = ConvertMetersPointToDrawingPoint(new PointF((float)incident.Location.X, (float)incident.Location.Y), _regionBottomLeftInMeters, pixelsPerMeter, bitmapDimensions); RectangleF circle = GetCircleBoundingBox(drawingPoint, _pointDrawingDiameter); g.FillEllipse(brush, circle); g.DrawEllipse(pen, circle); } } #endregion #region prediction points if (_displayPredictionPoints) { brush.Color = _predictionPointColor; pen.Color = Color.Black; foreach (Point p in DisplayedPrediction.Points) { PointF drawingPoint = ConvertMetersPointToDrawingPoint(new PointF((float)p.Location.X, (float)p.Location.Y), _regionBottomLeftInMeters, pixelsPerMeter, bitmapDimensions); RectangleF circle = GetCircleBoundingBox(drawingPoint, _pointDrawingDiameter); g.FillEllipse(brush, circle); g.DrawEllipse(pen, circle); } } #endregion } } })); t.Start(i); threads.Add(t); } foreach (Thread t in threads) t.Join(); #endregion if (_sliceThreatSurface != null) { foreach (Bitmap threatSurface in _sliceThreatSurface.Values) threatSurface.Dispose(); _sliceThreatSurface.Clear(); } _sliceThreatSurface = newSliceThreatSurface; timeSlice.ValueChanged -= new EventHandler(timeSlice_ValueChanged); timeSlice.Minimum = (int)_sliceThreatSurface.Keys.Min(); timeSlice.Maximum = (int)_sliceThreatSurface.Keys.Max(); if (displayFirstSlice) timeSlice.Value = timeSlice.Minimum; timeSlice.ValueChanged += new EventHandler(timeSlice_ValueChanged); _zoomedImageWidth = CurrentThreatSurface.Width; Invalidate(); }
public override void Display(Prediction prediction, IEnumerable<Overlay> overlays) { base.Display(prediction, overlays); _dragging = false; _draggingStart = System.Drawing.Point.Empty; _panOffset = new Size(0, 0); _panIncrement = 50; DiscreteChoiceModel model = prediction.Model; Dictionary<int, Point> idPoint = new Dictionary<int, Point>(); foreach (Point p in prediction.Points) idPoint.Add(p.Id, p); _incidentColor = new Dictionary<string, Color>(); _sliceIncidentPointScores = new Dictionary<long, Dictionary<string, List<Tuple<PointF, double>>>>(); float minPointX = float.MaxValue; float minPointY = float.MaxValue; float maxPointX = float.MinValue; float maxPointY = float.MinValue; foreach (PointPrediction pointPrediction in prediction.PointPredictions) { long slice = -1; if (model is TimeSliceDCM) slice = (long)(pointPrediction.Time.Ticks / (model as TimeSliceDCM).TimeSliceTicks); _sliceIncidentPointScores.EnsureContainsKey(slice, typeof(Dictionary<string, List<Tuple<PointF, double>>>)); Point point = idPoint[pointPrediction.PointId]; foreach (string incident in pointPrediction.IncidentScore.Keys) { Color color; if (!_incidentColor.TryGetValue(incident, out color)) { color = ColorPalette.GetColor(); _incidentColor.Add(incident, color); } double score = pointPrediction.IncidentScore[incident]; _sliceIncidentPointScores[slice].EnsureContainsKey(incident, typeof(List<Tuple<PointF, double>>)); _sliceIncidentPointScores[slice][incident].Add(new Tuple<PointF, double>(new PointF((float)point.Location.X, (float)point.Location.Y), score)); } float x = (float)point.Location.X; float y = (float)point.Location.Y; if (x < minPointX) minPointX = x; if (x > maxPointX) maxPointX = x; if (y < minPointY) minPointY = y; if (y > maxPointY) maxPointY = y; } if (_sliceIncidentPointScores.Count == 0) { Console.Out.WriteLine("No prediction points were generated for this prediction. There is nothing to display or evaluate."); Clear(); return; } Invoke(new Action(delegate() { incidentTypeCheckBoxes.Controls.Clear(); bool first = true; foreach (string incidentType in _incidentColor.Keys) { ColoredCheckBox cb = new ColoredCheckBox(true, first ? CheckState.Checked : CheckState.Unchecked, incidentType, _incidentColor[incidentType]); cb.CheckBoxCheckStateChanged += new EventHandler(IncidentCheckBox_CheckStateChanged); cb.LabelClicked += new EventHandler(IncidentCheckBox_LabelClicked); incidentTypeCheckBoxes.Controls.Add(cb); first = false; } overlayCheckBoxes.Controls.Clear(); foreach (Overlay overlay in Overlays) { ColoredCheckBox cb = new ColoredCheckBox(false, overlay.Displayed ? CheckState.Checked : CheckState.Unchecked, overlay.Name, overlay.Color); cb.CheckBoxCheckedChanged += new EventHandler(OverlayCheckBox_CheckedChanged); cb.LabelClicked += new EventHandler(OverlayCheckBox_LabelClicked); overlayCheckBoxes.Controls.Add(cb); IEnumerable<float> xs = overlay.Points.SelectMany(points => points).Select(point => point.X); IEnumerable<float> ys = overlay.Points.SelectMany(points => points).Select(point => point.Y); float minX = xs.Min(); float maxX = xs.Max(); float minY = ys.Min(); float maxY = ys.Max(); if (minX < minPointX) minPointX = minX; if (maxX > maxPointX) maxPointX = maxX; if (minY < minPointY) minPointY = minY; if (maxY > maxPointY) maxPointY = maxY; } ColoredCheckBox displayPredictionPointsCheckbox = new ColoredCheckBox(false, CheckState.Unchecked, "prediction points", _predictionPointColor); displayPredictionPointsCheckbox.CheckBoxCheckedChanged += new EventHandler(DisplayPredictionPoints_CheckedChanged); displayPredictionPointsCheckbox.LabelClicked += new EventHandler(DisplayPredictionPoints_LabelClicked); overlayCheckBoxes.Controls.Add(displayPredictionPointsCheckbox); _displayPredictionPoints = displayPredictionPointsCheckbox.Checked; _regionBottomLeftInMeters = new PointF(minPointX, minPointY); _regionSizeInMeters = new SizeF(maxPointX - minPointX, maxPointY - minPointY); bool generateThreatSurfaces = threatResolution.Value != prediction.PredictionPointSpacing; // changing the threat resolution will generate new threat surfaces, so only do it here if we won't be changing the current resolution value threatResolution.Value = threatResolution.Minimum = prediction.PredictionPointSpacing; if (!generateThreatSurfaces) GetThreatSurfaces(ClientRectangle, true); GetSliceTimeText(); })); }
/// <summary> /// Gets lexically related words for the current synset. Many of the relations in WordNet are lexical instead of semantic. Whereas /// the latter indicate relations between entire synsets (e.g., hypernym), the former indicate relations between specific /// words in synsets. This method retrieves all lexical relations and the words related thereby. /// </summary> /// <returns>Mapping from relations to mappings from words in the current synset to related words in the related synsets</returns> public Dictionary<SynSetRelation, Dictionary<string, List<string>>> GetLexicallyRelatedWords() { var relatedWords = new Dictionary<SynSetRelation, Dictionary<string, List<string>>>(); foreach (var relation in lexicalRelations.Keys) { relatedWords.EnsureContainsKey(relation, typeof(Dictionary<string, List<string>>)); foreach (var relatedSynSet in lexicalRelations[relation].Keys) { // make sure related synset is initialized if (!relatedSynSet.Instantiated) relatedSynSet.Instantiate(wordNet.Provider); foreach (var sourceWordIndex in lexicalRelations[relation][relatedSynSet].Keys) { var sourceWord = Words[sourceWordIndex - 1]; relatedWords[relation].EnsureContainsKey(sourceWord, typeof(List<string>), false); foreach (var targetWordIndex in lexicalRelations[relation][relatedSynSet][sourceWordIndex]) { var targetWord = relatedSynSet.Words[targetWordIndex - 1]; relatedWords[relation][sourceWord].Add(targetWord); } } } } return relatedWords; }
protected override void Run(Prediction prediction) { List<PostGIS.Point> predictionPoints = new List<PostGIS.Point>(); Area predictionArea = prediction.PredictionArea; double areaMinX = predictionArea.BoundingBox.MinX; double areaMaxX = predictionArea.BoundingBox.MaxX; double areaMinY = predictionArea.BoundingBox.MinY; double areaMaxY = predictionArea.BoundingBox.MaxY; for (double x = areaMinX + prediction.PredictionPointSpacing / 2d; x <= areaMaxX; x += prediction.PredictionPointSpacing) // place points in the middle of the square boxes that cover the region - we get display errors from pixel rounding if the points are exactly on the boundaries for (double y = areaMinY + prediction.PredictionPointSpacing / 2d; y <= areaMaxY; y += prediction.PredictionPointSpacing) predictionPoints.Add(new PostGIS.Point(x, y, predictionArea.Shapefile.SRID)); List<PostGIS.Point> incidentPoints = new List<PostGIS.Point>(Incident.Get(TrainingStart, TrainingEnd, predictionArea, IncidentTypes.ToArray()).Select(i => i.Location)); predictionPoints.AddRange(incidentPoints); Console.Out.WriteLine("Filtering prediction points to prediction area"); predictionPoints = predictionArea.Intersects(predictionPoints, prediction.PredictionPointSpacing / 2f).Select(i => predictionPoints[i]).ToList(); NpgsqlConnection connection = DB.Connection.OpenConnection; try { Console.Out.WriteLine("Inserting points into prediction"); Point.CreateTable(prediction, predictionArea.Shapefile.SRID); List<int> predictionPointIds = Point.Insert(connection, predictionPoints.Select(p => new Tuple<PostGIS.Point, string, DateTime>(p, PointPrediction.NullLabel, DateTime.MinValue)), prediction, predictionArea, false); Console.Out.WriteLine("Running overall KDE for " + IncidentTypes.Count + " incident type(s)"); List<float> density = GetDensityEstimate(incidentPoints, _trainingSampleSize, false, 0, 0, predictionPoints, _normalize); Dictionary<int, float> pointIdOverallDensity = new Dictionary<int, float>(predictionPointIds.Count); int pointNum = 0; foreach (int predictionPointId in predictionPointIds) pointIdOverallDensity.Add(predictionPointId, density[pointNum++]); Dictionary<int, Dictionary<string, double>> pointIdIncidentDensity = new Dictionary<int, Dictionary<string, double>>(pointIdOverallDensity.Count); if (IncidentTypes.Count == 1) { string incident = IncidentTypes.First(); foreach (int pointId in pointIdOverallDensity.Keys) { Dictionary<string, double> incidentDensity = new Dictionary<string, double>(); incidentDensity.Add(incident, pointIdOverallDensity[pointId]); pointIdIncidentDensity.Add(pointId, incidentDensity); } } else foreach (string incidentType in IncidentTypes) { Console.Out.WriteLine("Running KDE for incident \"" + incidentType + "\""); incidentPoints = new List<PostGIS.Point>(Incident.Get(TrainingStart, TrainingEnd, predictionArea, incidentType).Select(i => i.Location)); density = GetDensityEstimate(incidentPoints, _trainingSampleSize, false, 0, 0, predictionPoints, _normalize); if (density.Count > 0) { pointNum = 0; foreach (int predictionPointId in predictionPointIds) { pointIdIncidentDensity.EnsureContainsKey(predictionPointId, typeof(Dictionary<string, double>)); pointIdIncidentDensity[predictionPointId].Add(incidentType, density[pointNum++]); } } } PointPrediction.CreateTable(prediction); PointPrediction.Insert(GetPointPredictionValues(pointIdOverallDensity, pointIdIncidentDensity), prediction, false); Smooth(prediction); } finally { DB.Connection.Return(connection); } }
/// <summary> /// Initializes a new instance of the <see cref="WordNetMemoryProvider"/> class. /// </summary> /// <param name="dataPath">The data path.</param> /// <exception cref="System.ArgumentNullException">dataPath</exception> /// <exception cref="System.IO.DirectoryNotFoundException">The data directory does not exist.</exception> /// <exception cref="System.IO.FileNotFoundException">A required WordNet file does not exist: [filename]</exception> public WordNetMemoryProvider(string dataPath) { if (string.IsNullOrEmpty(dataPath)) throw new ArgumentNullException("dataPath"); var dir = new DirectoryInfo(dataPath); if (!dir.Exists) throw new DirectoryNotFoundException("The data directory does not exist."); var dataPaths = new [] { new FileInfo(Path.Combine(dataPath, "data.adj")), new FileInfo(Path.Combine(dataPath, "data.adv")), new FileInfo(Path.Combine(dataPath, "data.noun")), new FileInfo(Path.Combine(dataPath, "data.verb")) }; var indexPaths = new [] { new FileInfo(Path.Combine(dataPath, "index.adj")), new FileInfo(Path.Combine(dataPath, "index.adv")), new FileInfo(Path.Combine(dataPath, "index.noun")), new FileInfo(Path.Combine(dataPath, "index.verb")) }; foreach (var file in dataPaths.Union(indexPaths).Where(file => !file.Exists)) throw new FileNotFoundException("A required WordNet file does not exist: " + file.Name); // Pass 1: Get total number of synsets var totalSynsets = 0; foreach (var dataInfo in dataPaths) { // scan synset data file for lines that don't start with a space... // these are synset definition lines using (var dataFile = new StreamReader(dataInfo.FullName)) { string line; while ((line = dataFile.ReadLine()) != null) { var firstSpace = line.IndexOf(' '); if (firstSpace > 0) ++totalSynsets; } } } // Pass 2: Create synset shells (pos and offset only) idSynset = new Dictionary<string, SynSet>(totalSynsets); foreach (var dataInfo in dataPaths) { var pos = WordNetFileProvider.GetFilePos(dataInfo.FullName); // scan synset data file using (var dataFile = new StreamReader(dataInfo.FullName)) { string line; while ((line = dataFile.ReadLine()) != null) { var firstSpace = line.IndexOf(' '); if (firstSpace <= 0) continue; // get offset and create synset shell var offset = int.Parse(line.Substring(0, firstSpace)); var synset = new SynSet(pos, offset, null); idSynset.Add(synset.Id, synset); } } } // Pass 3: Instantiate synsets (hooks up relations, set glosses, etc.) foreach (var dataInfo in dataPaths) { var pos = WordNetFileProvider.GetFilePos(dataInfo.FullName); // scan synset data file using (var dataFile = new StreamReader(dataInfo.FullName)) { string line; while ((line = dataFile.ReadLine()) != null) { var firstSpace = line.IndexOf(' '); if (firstSpace > 0) // instantiate synset defined on current line, using the instantiated synsets for all references idSynset[pos + ":" + int.Parse(line.Substring(0, firstSpace))].Instantiate(line, idSynset); } } } // organize synsets by pos and words... // also set most common synset for word-pos pairs that have multiple synsets posWordSynSets = new Dictionary<WordNetPos, Dictionary<string, List<SynSet>>>(); foreach (var indexInfo in indexPaths) { var pos = WordNetFileProvider.GetFilePos(indexInfo.FullName); posWordSynSets.EnsureContainsKey(pos, typeof(Dictionary<string, List<SynSet>>)); // scan word index file, skipping header lines using (var indexFile = new StreamReader(indexInfo.FullName)) { string line; while ((line = indexFile.ReadLine()) != null) { var firstSpace = line.IndexOf(' '); if (firstSpace <= 0) continue; // grab word and synset shells, along with the most common synset var word = line.Substring(0, firstSpace); SynSet mostCommonSynSet; var synsets = WordNetFileProvider.GetSynSetShells(line, pos, out mostCommonSynSet, wordNet); // set flag on most common synset if it's ambiguous if (synsets.Count > 1) idSynset[mostCommonSynSet.Id].SetAsMostCommonSynsetFor(word); // use reference to the synsets that we instantiated in our three-pass routine above posWordSynSets[pos].Add(word, new List<SynSet>(synsets.Count)); foreach (var synset in synsets) posWordSynSets[pos][word].Add(idSynset[synset.Id]); } } } }