public Unit(TokenSpan span, String?packageDoc, MemberList items) { Span = span; PackageDoc = packageDoc; Items = items; Members = new DistinctList <INamed, Class>(); }
public void should_not_add_the_same_item_twice() { var sut = new DistinctList<ShopItem>(); var item = new ShopItem { Id = 1, Name = "Megaphone", Price = 200m }; sut.Add(item); sut.Add(item); Assert.That(sut.Count(), Is.EqualTo(1)); }
public void should_not_add_the_same_item_twice() { var sut = new DistinctList <ShopItem>(); var item = new ShopItem { Id = 1, Name = "Megaphone", Price = 200m }; sut.Add(item); sut.Add(item); Assert.That(sut.Count(), Is.EqualTo(1)); }
public void client_test_harness() { //var basket = new SimpleList<ShopItem>(); //var basket = new IndexedList<ShopItem>(); var basket = new DistinctList<ShopItem>(); var client = new Client(basket); var item1 = new ShopItem {Id = 1, Name = "Megaphone", Price = 200m}; var item2 = new ShopItem {Id = 2, Name = "Trumpet", Price = 20m}; var item3 = new ShopItem {Id = 3, Name = "Telescope", Price = 3200m}; ShopItem[] purchases = {item1, item1, item1, item2, item3, item3}; client.AddAllPurchasesToBasket(purchases); Assert.That(basket.Count(), Is.EqualTo(6)); }
/// <summary> /// starts the processing url /// </summary> /// <param name="initialUrls"></param> /// <returns></returns> public DistinctList <ParsedUrl> Run(IEnumerable <string> initialUrls) { var parsingManager = new ParsingManager(OnUrlProcessed); var processor = new ParallelProcessInvoker(parsingManager); var processedUrls = new DistinctList <ParsedUrl> { EqualityComparer = new ParsedUrlComparer() }; var urlsToProcess = initialUrls; var loopCounter = 1; while (loopCounter <= MaxProcessingDepth) { OnNewLoopStarted(loopCounter); var result = processor.Process(urlsToProcess, //(url, ct) => ProcessUrl((string) url, (CancellationToken)ct), CancellationToken, OnUrlProcessingErrorOccured); processedUrls.AddRange(result); //if cancellation has been requested, then stop processing if (CancellationToken.IsCancellationRequested) { break; } var itemsToTake = MaxUrlsToProcess - processedUrls.Count; urlsToProcess = processedUrls .SelectMany(url => url.FoundUrls) .GetValidOnSiteUrls("hiring.monster.com") //TODO - remove the hardcoded value with a dynamic processing based on url .Distinct() .Except(processedUrls.Select(u => u.Url)) .Take(itemsToTake); loopCounter++; } return(processedUrls); }
public void client_test_harness() { //var basket = new SimpleList<ShopItem>(); //var basket = new IndexedList<ShopItem>(); var basket = new DistinctList <ShopItem>(); var client = new Client(basket); var item1 = new ShopItem { Id = 1, Name = "Megaphone", Price = 200m }; var item2 = new ShopItem { Id = 2, Name = "Trumpet", Price = 20m }; var item3 = new ShopItem { Id = 3, Name = "Telescope", Price = 3200m }; ShopItem[] purchases = { item1, item1, item1, item2, item3, item3 }; client.AddAllPurchasesToBasket(purchases); Assert.That(basket.Count(), Is.EqualTo(6)); }
public void Do() { actionTitle = new List<string>(); actionData = new DistinctList<ActionDataCombinedWithOrderData>(); foreach (var currentMatch in matchResult) { if (currentMatch.MatchedOrderFiles.Count == 0) continue; readAction(currentMatch.actionFile.path); appendOrderGroupToActionTitle(); foreach (var currentOrder in currentMatch.MatchedOrderFiles) { combineOrderToAction(currentOrder.path); } var query = from q in actionData where q.hasThisOrder.Any(x => x) select q; write(currentMatch.actionFile); actionData.Clear(); actionTitle.Clear(); } }
public void Do() { var groupList = (from q in inputFile where q.FileType == "CD" || q.FileType == "DD" select q.@group).Distinct(); var hashGroupList = (from q in inputFile where q.FileType == "CD" || q.FileType == "DD" select q.hashGroup).Distinct(); System.Windows.MessageBox.Show($"input file count = {inputFile.Count}, group count = {orderGroupList.Count}, dx count = {diagnosisGroupList.Count}, output folder = {outputFolder}, groupList ={groupList.Count()}, hashGroupList = {hashGroupList.Count()}"); if(!Directory.Exists(outputFolder)) Directory.CreateDirectory(outputFolder); using (swAllCombined = new StreamWriter(outputFolder + @"\" + "Patient Based Data _All.PBD", false, Encoding.Default)) { swAllCombined.WriteLine(makeTitle()); foreach (var Rgroup in groupList) { foreach (var Hgroup in hashGroupList) { PatientList = new DistinctList<PatientBasedData>(); analyzeCDfilesInOneGroup(Rgroup, Hgroup); analyzeDDfilesInOneGroup(Rgroup, Hgroup); writeFile(Rgroup, Hgroup); PatientList.Clear(); } } } }
/// <summary> /// starts the processing url /// </summary> /// <param name="initialUrls"></param> /// <returns></returns> public DistinctList<ParsedUrl> Run(IEnumerable<string> initialUrls) { var parsingManager = new ParsingManager(OnUrlProcessed); var processor = new ParallelProcessInvoker(parsingManager); var processedUrls = new DistinctList<ParsedUrl> {EqualityComparer = new ParsedUrlComparer()}; var urlsToProcess = initialUrls; var loopCounter = 1; while (loopCounter <= MaxProcessingDepth) { OnNewLoopStarted(loopCounter); var result = processor.Process(urlsToProcess, //(url, ct) => ProcessUrl((string) url, (CancellationToken)ct), CancellationToken, OnUrlProcessingErrorOccured); processedUrls.AddRange(result); //if cancellation has been requested, then stop processing if (CancellationToken.IsCancellationRequested) { break; } var itemsToTake = MaxUrlsToProcess - processedUrls.Count; urlsToProcess = processedUrls .SelectMany(url => url.FoundUrls) .GetValidOnSiteUrls("hiring.monster.com") //TODO - remove the hardcoded value with a dynamic processing based on url .Distinct() .Except(processedUrls.Select(u=>u.Url)) .Take(itemsToTake); loopCounter++; } return processedUrls; }
void initializeActionCriteria(string currentFileType, string currentYear, string currentGroup) { var NewActionCriteriaList = new DistinctList<ActionData>(); //搜尋同組的Criteria File string matchFileType = ""; if (currentFileType == "CD") matchFileType = "OO"; if (currentFileType == "DD") matchFileType = "DO"; if (currentFileType == "GD") matchFileType = "GO"; if (currentFileType == "OO") matchFileType = "CD"; if (currentFileType == "DO") matchFileType = "DD"; if (currentFileType == "GO") matchFileType = "GD"; var FilesOfTheGroup = from q in CriteriaList.Find(x => x.key == "ACTIONLIST").ActionCriteriaFileList where q.@group == currentGroup && q.year == currentYear && q.FileType == matchFileType select q; //載入同組File中的ID+Birthday進入IDList,儲存在該筆criteria中 foreach (var f in FilesOfTheGroup) { using (var sr = new StreamReader(f.path, Encoding.Default)) { string[] titles = sr.ReadLine().Split('\t'); var indexFEE_YM = Array.FindIndex(titles, x => x == "FEE_YM"); var indexAPPL_TYPE = Array.FindIndex(titles, x => x == "APPL_TYPE"); var indexHOSP_ID = Array.FindIndex(titles, x => x == "HOSP_ID"); var indexAPPL_DATE = Array.FindIndex(titles, x => x == "APPL_DATE"); var indexCASE_TYPE = Array.FindIndex(titles, x => x == "CASE_TYPE"); var indexSEQ_NO = Array.FindIndex(titles, x => x == "SEQ_NO"); while (!sr.EndOfStream) { string[] linesplit = sr.ReadLine().Split('\t'); NewActionCriteriaList.AddDistinct(new ActionData { FEE_YM = linesplit[indexFEE_YM], APPL_TYPE = linesplit[indexAPPL_TYPE], HOSP_ID = linesplit[indexHOSP_ID], APPL_DATE = linesplit[indexAPPL_DATE], CASE_TYPE = linesplit[indexCASE_TYPE], SEQ_NO = linesplit[indexSEQ_NO] } ); } } } CriteriaList.Find(x => x.key == "ACTIONLIST").ActionCriteriaList = NewActionCriteriaList; }
// step 2.1 - 2.6 void initializeIDCriteria(string CurrentGroup) { var NewIDCriteriaList = new DistinctList<IDData>(); //搜尋同組的Criteria File var FilesOfTheGroup = from q in CriteriaList.Find(x => x.key == "IDLIST").IDCriteriaFileList where q.@group == CurrentGroup select q; //載入同組File中的ID+Birthday進入IDList,儲存在該筆criteria中 foreach (var f in FilesOfTheGroup) { using (var sr = new StreamReader(f.path, Encoding.Default)) { string[] titles = sr.ReadLine().Split('\t'); var indexBirthday = Array.FindIndex(titles, x => x.IndexOf("BIRTHDAY") >= 0 || x.IndexOf("Birthday") >= 0); var indexID = Array.FindIndex(titles, x => x == "ID"); while (!sr.EndOfStream) { string[] linesplit = sr.ReadLine().Split('\t'); NewIDCriteriaList.AddDistinct(new IDData { ID = linesplit[indexID], Birthday = linesplit[indexBirthday] }); } } } CriteriaList.Find(x => x.key == "IDLIST").IDCriteriaList = NewIDCriteriaList; }
private void ReadAndStandarizeIDFile() { var distinctGroupQuery = (from q in rawDataFileList where q.selected == true select q.@group).Distinct(); var distinctHashGroupQuery = (from q in rawDataFileList where q.selected == true select q.hashGroup).Distinct(); int counter = 0; foreach (string currentDistinctGroup in distinctGroupQuery) { foreach (string currentDistinctHashGroup in distinctHashGroupQuery) { var filesInThisGroup = from q in rawDataFileList where (q.@group == currentDistinctGroup && q.hashGroup == currentDistinctHashGroup && q.selected == true) select q; DistinctList<StandarizedIDData>[] standarizedIDDataTable = new DistinctList<StandarizedIDData>[hashTableElementCount]; for (int i = 0; i < hashTableElementCount; i++) standarizedIDDataTable[i] = new DistinctList<StandarizedIDData>(); foreach (File currentFile in filesInThisGroup) { List<StringDataFormat> stringDataFormatsForCurrentFile = getStringDataFormatsWithRightYear(currentFile); List<NumberDataFormat> numberDataFormatsForCurrentFile = getNumberDataFormatsWithRightYear(currentFile); int indexID = stringDataFormatsForCurrentFile.FindIndex(x => x.key == "ID"); int indexBirthday = stringDataFormatsForCurrentFile.FindIndex(x => x.key == "ID_BIRTHDAY"); int indexSex = stringDataFormatsForCurrentFile.FindIndex(x => x.key == "ID_SEX"); int indexInDate = stringDataFormatsForCurrentFile.FindIndex(x => x.key == "ID_IN_DATE"); int indexOutDate = stringDataFormatsForCurrentFile.FindIndex(x => x.key == "ID_OUT_DATE"); using (var sr = new StreamReader(currentFile.path, Encoding.Default)) { while (!sr.EndOfStream) { var dataRow = ReadRow(sr, stringDataFormatsForCurrentFile, numberDataFormatsForCurrentFile); if (!isMatchBirthYearRange(dataRow.stringData[indexBirthday])) continue; var newIDData = new StandarizedIDData() { ID = dataRow.stringData[indexID], Birthday = dataRow.stringData[indexBirthday], isMale = dataRow.stringData[indexSex] == "M", firstInDate = dataRow.stringData[indexInDate].StringToDate(), lastInDate = dataRow.stringData[indexInDate].StringToDate(), firstOutDate = dataRow.stringData[indexOutDate].StringToDate(), lastOutDate = dataRow.stringData[indexOutDate].StringToDate() }; uint hash = getIDHash(newIDData); int index = standarizedIDDataTable[hash].AddDistinct(newIDData); if (index >= 0) { standarizedIDDataTable[hash][index].isMale = newIDData.isMale; standarizedIDDataTable[hash][index].firstInDate = newIDData.firstInDate; standarizedIDDataTable[hash][index].firstOutDate = newIDData.firstOutDate; standarizedIDDataTable[hash][index].lastInDate = newIDData.lastInDate; standarizedIDDataTable[hash][index].lastOutDate = newIDData.lastOutDate; } else { counter++; } } } } var outputFilePath = getOutputFilePath(currentDistinctGroup, currentDistinctHashGroup); using (var sw = new StreamWriter(outputFilePath, false, System.Text.Encoding.Default)) { sw.WriteLine(StandarizedIDData.ToTitle()); foreach (var thisTable in standarizedIDDataTable) foreach (var thisstandarizedIDData in thisTable) { sw.WriteLine(thisstandarizedIDData.ToWriteLine()); } } } } }