public void NonEmptyZip() { PrepareZip("a", "b"); Assert.True(_sut.HasNext()); Assert.AreEqual("a", _sut.GetNext <string>()); Assert.True(_sut.HasNext()); Assert.AreEqual("b", _sut.GetNext <string>()); Assert.False(_sut.HasNext()); }
public void Run() { Console.Write("looking (recursively) for events in folder {0}\n", Path.GetFullPath(_eventsDir)); /* * Each .zip that is contained in the eventsDir represents all events * that we have collected for a specific user, the folder represents the * first day when the user uploaded data. */ var userZips = FindUserZips(); foreach (var userZip in userZips) { Console.Write("\n#### processing user zip: {0} #####\n", userZip); // open the .zip file ... using (IReadingArchive ra = new ReadingArchive(Path.Combine(_eventsDir, userZip))) { // ... and iterate over content. while (ra.HasNext()) { /* * within the userZip, each stored event is contained as a * single file that contains the Json representation of a * subclass of IDEEvent. */ var e = ra.GetNext <IDEEvent>(); // the events can then be processed individually process(e); } } } }
private void Process(int taskId) { Log("({0}) starting", taskId); string relZip; while (GetNextZip(taskId, out relZip)) { var zip = Path.Combine(_baseDir, relZip); using (var ra = new ReadingArchive(zip)) { while (ra.HasNext()) { try { var e = ra.GetNext <IDEEvent>(); // ReSharper disable once UnusedVariable var hc = e.GetHashCode(); } catch { Log("Exception thrown in {0} ({1}).", zip, ra.CurrentInternalFileName); throw; } } } } Log("({0}) stopping", taskId); }
public void Run() { Console.Write("looking (recursively) for events in folder {0}\n", Path.GetFullPath(_eventsDir)); /* * Each .zip that is contained in the eventsDir represents all events * that we have collected for a specific user, the folder represents the * first day when the user uploaded data. */ var userZips = FindUserZips(); var ZipIterator = 0; foreach (var userZip in userZips) { ZipIterator += 1; Console.Write("\n#### processing user zip: {0} #####\n", userZip); // open the .zip file ... using (IReadingArchive ra = new ReadingArchive(Path.Combine(_eventsDir, userZip))) { // ... and iterate over content. while (ra.HasNext()) { /* * within the userZip, each stored event is contained as a * single file that contains the Json representation of a * subclass of IDEEvent. */ try { var e = ra.GetNext <IDEEvent>(); // the events can then be processed individually foreach (Process task in _tasks) { task.process(e); } } catch (System.InvalidOperationException e) { Console.WriteLine(e.Message); } catch (Newtonsoft.Json.JsonReaderException e) { Console.WriteLine(e.Message); } catch (Exception e) { Console.WriteLine("not tested error!" + e.Message); } } } //getThe result for this actual zip and stock it in the .txt Result file. foreach (Process task in _tasks) { task.getResult(ZipIterator + " / " + userZips.Count); } } }
private static IEnumerable <IDEEvent> ReadEventsFromZip(string zip) { using (var ra = new ReadingArchive(zip)) { while (ra.HasNext()) { yield return(ra.GetNext <IDEEvent>()); } } }
public IEnumerable <IDEEvent> ReadEvents(string exportFile) { using (var ra = new ReadingArchive(exportFile)) { while (ra.HasNext()) { yield return(ra.GetNext <IDEEvent>()); } } }
private IEnumerable <IDEEvent> ReadEventsForUser(string userZip) { using (var ra = new ReadingArchive(_root + userZip)) { while (ra.HasNext()) { yield return(ra.GetNext <IDEEvent>()); } } }
private EditLocationResults Analyze(string zip) { var res = new EditLocationResults { Zip = zip }; var file = _io.GetFullPath_In(zip); using (var ra = new ReadingArchive(file)) { var locAnal = new RelativeEditLocationAnalysis(); while (ra.HasNext()) { var @event = ra.GetNext <IDEEvent>(); res.NumEvents++; var complEvent = @event as CompletionEvent; if (complEvent == null) { continue; } var fileName = complEvent.ActiveDocument.FileName; if (fileName != null && !fileName.EndsWith(".cs")) { continue; } res.NumCompletionEvents++; var loc = locAnal.Analyze(complEvent.Context2.SST); if (!loc.HasEditLocation || loc.Size < 2) { continue; } res.NumLocations++; if (complEvent.TerminatedState == TerminationState.Applied) { res.AppliedEditLocations.Add(loc); } else { res.OtherEditLocations.Add(loc); } } } return(res); }
public IEnumerable <CompletionEvent> ReadCce(string zipName) { var fullPath = Path.Combine(_dirCcEvents, zipName); using (var ra = new ReadingArchive(fullPath)) { while (ra.HasNext()) { var e = ra.GetNext <IDEEvent>() as CompletionEvent; if (e != null) { yield return(e); } } } }
public IEnumerable <IDEEvent> Read(string zip) { var fullPath = Path.Combine(_dirEvents, zip); using (var ra = new ReadingArchive(fullPath)) { while (ra.HasNext()) { var e = ra.GetNext <IDEEvent>(); if (e != null) { yield return(e); } } } }
public void Run(string rootDir) { int numRepos = 0; int numSolutions = 0; int numSSTs = 0; long loc = 0; foreach (var user in GetSubdirs(rootDir)) { foreach (var repo in GetSubdirs(Path.Combine(rootDir, user))) { numRepos++; Console.Write("##### {0}/{1} ##############################", user, repo); var repoPath = Path.Combine(rootDir, user, repo); foreach (var zip in GetArchives(repoPath)) { numSolutions++; Console.WriteLine(); Console.WriteLine("@@ {0} @@", zip); var zipPath = Path.Combine(repoPath, zip); using (var ra = new ReadingArchive(zipPath)) { while (ra.HasNext()) { numSSTs++; Console.Write('.'); var ctx = ra.GetNext <Context>(); var sstloc = CountLoc(ctx.SST); loc += sstloc; } } } } } Console.WriteLine("## RESULTS ##"); Console.WriteLine("#repos: {0}", numRepos); Console.WriteLine("#solutions: {0}", numSolutions); Console.WriteLine("#types: {0}", numSSTs); Console.WriteLine("loc: {0}", loc); }
public IUserProfileEvent TryGetUserProfile(string zipName) { var fullPath = Path.Combine(_dirAllEvents, zipName); using (var ra = new ReadingArchive(fullPath)) { while (ra.HasNext()) { var e = ra.GetNext <IDEEvent>() as IUserProfileEvent; if (e != null) { return(e); } } } return(null); }
private void Run(int taskId) { string zip; while (_zips.TryTake(out zip)) { Console.WriteLine(); Console.WriteLine(@"({0}) Next Zip", taskId); using (var ra = new ReadingArchive(zip)) { while (ra.HasNext()) { var context = ra.GetNext <Context>(); Console.Write('.'); context.SST.Accept(new NameFixTester(), -1); } } } }
public IEnumerable <IDEEvent> ReadAllLazy() { while (_ra.HasNext()) { IDEEvent e = null; try { e = _ra.GetNext <IDEEvent>(); } catch (Exception ex) { _log(_ra.CurrentInternalFileName, ex); } if (e != null) { yield return(e); } } }
private IEnumerable <ICompletionEvent> ReadCce(string zipName) { var fullPath = Path.Combine(_dirIn, zipName); using (var ra = new ReadingArchive(fullPath)) { while (ra.HasNext()) { var e = ra.GetNext <IDEEvent>(); var ce = e as CompletionEvent; if (ce != null) { Console.Write('x'); yield return(ce); } else { Console.Write('.'); } } } }
private IEnumerable <ICompletionEvent> ReadCce(string zipName) { var fullPath = Path.Combine(_dirIn, zipName); using (var ra = new ReadingArchive(fullPath)) { while (ra.HasNext()) { var e = ra.GetNext <IDEEvent>() as CompletionEvent; if (e != null) { if (e.TerminatedState == TerminationState.Applied) { var sel = e.LastSelectedProposal; if (sel != null && sel.Name is IMethodName) { yield return(e); } } } } } }
public void Run() { int progressPortion; Stopwatch watch = new Stopwatch(); Dictionary <String, Developer> developers = new Dictionary <String, Developer>(); String developerId; Completion comEvent; Developer developer; btn_run.Enabled = false; String eventsDir = tb_eventDir.Text; Console.Write("looking (recursively) for events in folder {0}\n", Path.GetFullPath(eventsDir)); pb_file_progress.Value = 0; pb_quick.Value = 0; /* * Each .zip that is contained in the eventsDir represents all events * that we have collected for a specific user, the folder represents the * first day when the user uploaded data. */ var userZips = FindUserZips(eventsDir); progressPortion = 100 / userZips.Count; watch.Start(); foreach (var userZip in userZips) { Console.Write("\n#### processing user zip: {0} #####\n", userZip); // open the .zip file ... using (IReadingArchive ra = new ReadingArchive(Path.Combine(eventsDir, userZip))) { // ... and iterate over content. while (ra.HasNext()) { /* * within the userZip, each stored event is contained as a * single file that contains the Json representation of a * subclass of IDEEvent. */ var e = ra.GetNext <IDEEvent>(); // the events can then be processed individually if (pb_quick.Value == 100) { pb_quick.Value = 0; } developerId = process(e, out comEvent); if (comEvent != null) { developers.TryGetValue(developerId, out developer); if (developer == null) { developer = new Developer(developerId); developer.addEvent(comEvent); developers.Add(developerId, developer); } else { developer.addEvent(comEvent); } } pb_quick.Value += 1; lbl_time.Text = watch.Elapsed.ToString(@"hh\:mm\:ss"); Application.DoEvents(); } } pb_file_progress.Value += progressPortion; } using (StreamWriter writer = new StreamWriter(tb_output.Text + "\\summaries.csv")) { TimeSpan total_time = new TimeSpan(0, 0, 0); TimeSpan total_approved = new TimeSpan(0, 0, 0); TimeSpan total_canceled = new TimeSpan(0, 0, 0); TimeSpan total_filtered = new TimeSpan(0, 0, 0); TimeSpan total_dev_time = new TimeSpan(0, 0, 0); foreach (KeyValuePair <string, Developer> entry in developers) { developer = entry.Value; writer.WriteLine("Developer " + developer.session_id); developer.runStats(); developer.writeEvents(tb_output.Text); writer.WriteLine("Total time per dev " + developer.total_time.ToString(@"hh\:mm\:ss\:fff")); total_time = total_time.Add(developer.total_time); total_approved = total_approved.Add(developer.total_approved); total_canceled = total_canceled.Add(developer.total_canceled); total_filtered = total_filtered.Add(developer.total_filtered); total_dev_time = total_dev_time.Add(developer.total_span); } writer.WriteLine("Approved " + total_approved.ToString(@"hh\:mm\:ss\:fff")); writer.WriteLine("Canceled " + total_canceled.ToString(@"hh\:mm\:ss\:fff")); writer.WriteLine("Filetered " + total_filtered.ToString(@"hh\:mm\:ss\:fff")); writer.WriteLine("Total time " + total_time.ToString(@"hh\:mm\:ss\:fff")); writer.WriteLine("Total Dev Time " + total_dev_time.ToString(@"hh\:mm\:ss\:fff")); } //using(StreamWriter writer = new StreamWriter(tb_output.Text + "\\summaries.csv")){ // foreach (KeyValuePair<string, Developer> entry in developers) // { // developer = entry.Value; // if (developer.eventsLists.Count > 0) // { // developer.writeEvents(tb_output.Text); // writer.WriteLine("Developer: " + developer.session_id); // foreach (String summary in developer.summaries) // { // writer.WriteLine("\t" + summary); // } // } // } //} watch.Stop(); pb_file_progress.Value = 100; lbl_time.Text = watch.Elapsed.ToString(@"hh\:mm\:ss"); btn_run.Enabled = true; }
public void Run() { Console.WriteLine("Grab Names from Contexts"); var ctx = FindInputFiles(); var numZips = ctx.Count(); var currentZip = 1; var numTotalCtxs = 0; var numTotalUsages = 0; List <Tuple <string, List <string> > > ssts = new KaVEList <Tuple <string, List <string> > >(); foreach (var fileName in ctx) { Log("### processing zip {0}/{1}: {2}", currentZip++, numZips, fileName); var fullFileIn = _dirIn + fileName; using (var ra = new ReadingArchive(fullFileIn)) { Log("reading contexts..."); var numCtxs = 0; while (ra.HasNext()) { var context = ra.GetNext <Context>(); var list = new KaVEList <string>(); // TODO: grab names in a NameToJsonConverter numCtxs++; } Log("found {0} contexts\n\n", numCtxs); if (_numMaxZips != -1 && currentZip == _numMaxZips + 1) { break; } } } var typeNameNullCount = 0; var typeNameCount = 0; var methodNameNullCount = 0; var methodNameCount = 0; List <Tuple <string, string> > wrongSyntaxTypeName = new KaVEList <Tuple <string, string> >(); foreach (var t in ssts) { foreach (var s in t.Item2) { var type = s.Split(':'); if (type[0].Equals("CSharp.PropertyName")) { typeNameCount++; var name = s.Deserialize <IName>(); if (name.Identifier == "?") { wrongSyntaxTypeName.Add(new Tuple <string, string>(s, t.Item1)); typeNameNullCount++; } } } } Log("{0} of {1} names are null", typeNameNullCount, typeNameCount); Log("{0} of {1} names are null", methodNameNullCount, methodNameCount); double percentageTypeNames = typeNameNullCount / (double)typeNameCount; double percentageMethodNames = methodNameNullCount / (double)methodNameCount; Log("TypeNames not parseable: {0}%\n", percentageTypeNames); Log("PropertyName not parseable: {0}%\n\n", percentageMethodNames); //showInvalidNames(wrongSyntaxTypeName); Log("\n\n"); //showInvalidNames(wrongSyntaxMethodName); if (_writeToFile) { Log("File with invalid names written to {0}", _dirOut); writeToFile(wrongSyntaxTypeName, _dirOut + "/typename.txt"); } //Log(wrongSyntax[0].Item1 + "\n\n"); //Log(wrongSyntax[0].Item2 + "\n\n"); }
public void Run(string rootDir) { var numRepos = 0; var numSolutions = 0; var numSSTs = 0; long loc = 0; var repoSizes = new Dictionary <string, long>(); var repoCounts = new Dictionary <IAssemblyName, int>(); var slnCounts = new Dictionary <IAssemblyName, int>(); var sstCounts = new Dictionary <IAssemblyName, int>(); foreach (var user in GetSubdirs(rootDir)) { foreach (var repo in GetSubdirs(Path.Combine(rootDir, user))) { var repoLoc = 0; numRepos++; Console.Write( "({2}) ##### {0}/{1} (repo #{3}) ############################## ", user, repo, DateTime.Now, numRepos); var repoApis = new HashSet <IAssemblyName>(); var repoPath = Path.Combine(rootDir, user, repo); foreach (var zip in GetArchives(repoPath)) { numSolutions++; Console.WriteLine(); Console.WriteLine("({1}) @@ {0} (sln #{2}) @@", zip, DateTime.Now, numSolutions); var slnApis = new HashSet <IAssemblyName>(); var zipPath = Path.Combine(repoPath, zip); using (var ra = new ReadingArchive(zipPath)) { while (ra.HasNext()) { numSSTs++; Console.Write('.'); var ctx = ra.GetNext <Context>(); var sstloc = CountLoc(ctx.SST); repoLoc += sstloc; var apis = FindAPIs(ctx.SST); foreach (var api in apis) { repoApis.Add(api); slnApis.Add(api); } CountApis(apis, sstCounts); } } CountApis(slnApis, slnCounts); } repoSizes[user + "/" + repo] = repoLoc; loc += repoLoc; Console.WriteLine(); CountApis(repoApis, repoCounts); } } Console.WriteLine(); Console.WriteLine("## RESULTS ##"); Console.WriteLine(); Console.WriteLine("#repos: {0}", numRepos); Console.WriteLine("#solutions: {0}", numSolutions); Console.WriteLine("#ssts: {0}", numSSTs); Console.WriteLine("loc: {0}", loc); Console.WriteLine(); Console.WriteLine(); Console.WriteLine("Repo\tLoc"); foreach (var repo in repoSizes.Keys) { Console.WriteLine("{0}\t{1}", repo, repoSizes[repo]); } Console.WriteLine(); Console.WriteLine(); Console.WriteLine("Name\tVersion\t#repo\t#sln\t#sst"); foreach (var api in repoCounts.Keys) { Console.WriteLine( "\"{0}\"\t{1}\t{2}\t{3}\t{4}", api.Name, api.Version, repoCounts[api], slnCounts[api], sstCounts[api]); } }