public void AnalyseTest()
        {
            // Setup a new Event Collection
            EventCollection collection = new EventCollection();

            // Add 4 Clusters
            collection.AddRange(GenerateCluster(51.0, 51.1, -21.0, -21.1, 10));
            collection.AddRange(GenerateCluster(41.0, 41.1, -11.0, -11.1, 10));
            collection.AddRange(GenerateCluster(31.0, 31.1, -1.0, -1.1, 10));
            collection.AddRange(GenerateCluster(21.0, 21.1, 1.0, 1.1, 10));

            // Cluster the data
            DBSCAN scan = new DBSCAN(collection, 5);

            scan.Analyse();

            // Ensure that there are 4 clusters
            Assert.AreEqual(4, scan.Clusters.Count);

            // Esnure that there are 10 objects within each cluster
            Assert.AreEqual(10, scan.Clusters[0].Count);
            Assert.AreEqual(10, scan.Clusters[1].Count);
            Assert.AreEqual(10, scan.Clusters[2].Count);
            Assert.AreEqual(10, scan.Clusters[3].Count);
        }
Exemple #2
0
        public void Initialise()
        {
            // A dictionary of week-events
            Events = new EventCollection();

            // Read in the Week 32 logs
            KMLReader w32_reader = new KMLReader(ROOT_DIR + "data\\L_wk32_drops.kml");

            Events.AddRange(w32_reader.GetCallLogs());

            // Read in the Week 33 logs
            KMLReader w33_reader = new KMLReader(ROOT_DIR + "data\\L_wk33_drops.kml");

            Events.AddRange(w33_reader.GetCallLogs());

            // Read in the Week 34 logs
            KMLReader w34_reader = new KMLReader(ROOT_DIR + "data\\L_wk34_drops.kml");

            Events.AddRange(w34_reader.GetCallLogs());

            // Read in the Week 35 logs
            KMLReader w35_reader = new KMLReader(ROOT_DIR + "data\\L_wk35_drops.kml");

            Events.AddRange(w35_reader.GetCallLogs());

            // Initialise the Multi-Product Analysis Object
            Analysis = new MultiProductAnalysis();
            Analysis.AddRange(Events);

            // Cluster the data
            Analysis.Cluster(EPS, MIN_POINTS);

            // Analyse all weeks
            Analysis.AnalyseProducts();
        }
        /// <summary>
        /// This method will analyse a given directory. It will firstly scan the
        /// directoy for all kml files. The analysis will produce an output KML
        /// file and a number of output JSON analysis files, depending upon the
        /// runtime options supplied.
        /// </summary>
        private static void AnalyseDirectory()
        {
            String directory = InputArguments["source"];

            EventCollection collection = new EventCollection();

            // Only continue if the input directory exists
            if (!Directory.Exists(directory))
            {
                return;
            }

            // Get a list of all the KML file names within the directory
            String[] files = Directory.GetFiles(directory, "*.kml", SearchOption.TopDirectoryOnly);

            // Loop over each of the file and add to the data
            foreach (String file in files)
            {
                // Parse the KML file
                KMLReader reader = new KMLReader(file);
                // Add the data to the known events
                collection.AddRange(reader.GetCallLogs());
            }

            // Perform the analysis
            PerformAnalysis(collection);
        }
Exemple #4
0
        static private void GetEvents(Journal.OptionsRow or, ILJServer iLJ, ref SyncItemCollection sic,
                                      ref SyncItemCollection deletedsic, ref EventCollection ec)
        {
            // for an explanation of this algorithm, see
            // http://www.livejournal.com/community/lj_clients/143312.html
            // note that this is a very painful algorithm.  it will loop an extra time for each
            // deleted syncitem that getevents doesn't return an event for.  if LJ decides to revise
            // how they return syncitems, this algorithm can be made more efficient.
            int total = sic.Count;

            while (sic.Count > 0)
            {
                SyncItem             oldest     = sic.GetOldest();
                DateTime             oldestTime = DateTime.Parse(oldest.time, CultureInfo.InvariantCulture).AddSeconds(-1);
                GetChallengeResponse gcr        = iLJ.GetChallenge();
                string          auth_response   = MD5Hasher.Compute(gcr.challenge + or.HPassword);
                GetEventsParams gep             = new GetEventsParams(or.UserName, "challenge", gcr.challenge,
                                                                      auth_response, 1, 0, 0, 0, "syncitems", oldestTime.ToString(_datetimeformat), 0, 0, 0, 0,
                                                                      string.Empty, 0, "unix", (or.IsUseJournalNull() ? string.Empty : or.UseJournal));
                GetEventsResponse ger;
                socb(new SyncOperationEventArgs(SyncOperation.GetEvents, total - sic.Count, total));
                ger = iLJ.GetEvents(gep);
                // remove this item in case it isn't returned by getevents
                // this signifies that the item has been deleted
                // this also ensures we don't get stuck in an endless loop
                sic.Remove(oldest);
                sic.RemoveDownloaded(ger.events);
                deletedsic.RemoveDownloaded(ger.events);
                ec.AddRange(ger.events);
            }
        }
Exemple #5
0
 public ActionResult Save(Bam.Net.Logging.Data.Event[] values)
 {
     try
     {
         EventCollection saver = new EventCollection();
         saver.AddRange(values);
         saver.Save();
         return(Json(new { Success = true, Message = "", Dao = "" }));
     }
     catch (Exception ex)
     {
         return(GetErrorResult(ex));
     }
 }
        public void AddRangeTest()
        {
            // Ensure there are four elements before starting
            Assert.AreEqual(4, collection.Count);

            // Make a clone of the collection
            EventCollection collection2 = (EventCollection)collection.Clone();

            // Add the cloned data back into the original collection
            collection.AddRange(collection2);

            // Test to ensure the count has gone up.
            Assert.AreEqual(8, collection.Count);
        }
        public void AnalyseTest()
        {
            // Setup a new Event Collection
              EventCollection collection = new EventCollection();

              // Add 4 Clusters
              collection.AddRange(GenerateCluster(51.0, 51.1, -21.0, -21.1, 10));
              collection.AddRange(GenerateCluster(41.0, 41.1, -11.0, -11.1, 10));
              collection.AddRange(GenerateCluster(31.0, 31.1, -1.0, -1.1, 10));
              collection.AddRange(GenerateCluster(21.0, 21.1, 1.0, 1.1, 10));

              // Cluster the data
              DBSCAN scan = new DBSCAN(collection, 5);
              scan.Analyse();

              // Ensure that there are 4 clusters
              Assert.AreEqual(4, scan.Clusters.Count);

              // Esnure that there are 10 objects within each cluster
              Assert.AreEqual(10, scan.Clusters[0].Count);
              Assert.AreEqual(10, scan.Clusters[1].Count);
              Assert.AreEqual(10, scan.Clusters[2].Count);
              Assert.AreEqual(10, scan.Clusters[3].Count);
        }
        /// <summary>
        /// This method will analyse a given directory. It will firstly scan the 
        /// directoy for all kml files. The analysis will produce an output KML 
        /// file and a number of output JSON analysis files, depending upon the 
        /// runtime options supplied.
        /// </summary>
        private static void AnalyseDirectory()
        {
            String directory = InputArguments["source"];

              EventCollection collection = new EventCollection();

              // Only continue if the input directory exists
              if (!Directory.Exists(directory))
              {
            return;
              }

              // Get a list of all the KML file names within the directory
              String[] files = Directory.GetFiles(directory, "*.kml", SearchOption.TopDirectoryOnly);

              // Loop over each of the file and add to the data
              foreach (String file in files)
              {
            // Parse the KML file
            KMLReader reader = new KMLReader(file);
            // Add the data to the known events
            collection.AddRange(reader.GetCallLogs());
              }

              // Perform the analysis
              PerformAnalysis(collection);
        }
        public void Initialise()
        {
            // A dictionary of week-events
              Events = new EventCollection();

              // Read in the Week 32 logs
              KMLReader w32_reader = new KMLReader(ROOT_DIR + "data\\L_wk32_drops.kml");
              Events.AddRange(w32_reader.GetCallLogs());

              // Read in the Week 33 logs
              KMLReader w33_reader = new KMLReader(ROOT_DIR + "data\\L_wk33_drops.kml");
              Events.AddRange(w33_reader.GetCallLogs());

              // Read in the Week 34 logs
              KMLReader w34_reader = new KMLReader(ROOT_DIR + "data\\L_wk34_drops.kml");
              Events.AddRange(w34_reader.GetCallLogs());

              // Read in the Week 35 logs
              KMLReader w35_reader = new KMLReader(ROOT_DIR + "data\\L_wk35_drops.kml");
              Events.AddRange(w35_reader.GetCallLogs());

              // Initialise the Multi-Product Analysis Object
              Analysis = new MultiProductAnalysis();
              Analysis.AddRange(Events);

              // Cluster the data
              Analysis.Cluster(EPS, MIN_POINTS);

              // Analyse all weeks
              Analysis.AnalyseProducts();
        }