public ScanResult FindDuplicateFiles(String dirPath) { worker.ReportProgress(0, "Preparing files ..."); FileInfo[] files = new DirectoryInfo(dirPath).GetFiles("*", SearchOption.AllDirectories); Array.Sort(files, Comparer<FileInfo>.Create((a,b) => b.FullName.CompareTo(a.FullName))); int total = files.Length; double progress = 0; IDictionary<long, IDictionary<FileInfo, IList<FileInfo>>> byteTable = new Dictionary<long, IDictionary<FileInfo, IList<FileInfo>>>(); foreach (FileInfo file in files) { worker.ReportProgress((int)(++progress/total*100), String.Format("Scanning files... ({0}/{1})", progress, total)); // Compare size long fileSize = file.Length; if (!byteTable.ContainsKey(fileSize)) byteTable.Add(fileSize, new Dictionary<FileInfo, IList<FileInfo>>()); // Compare contents of the files with the same size IDictionary<FileInfo, IList<FileInfo>> fileTable = byteTable[fileSize]; // All files in fileMap have the same size bool foundDuplicate = false; // Compare the current file to each file in fileTable foreach (KeyValuePair<FileInfo, IList<FileInfo>> pair in fileTable) { // If find a duplicate, add the file to the duplicate-files-list and break the iteration if (FilesAreEqual(pair.Key, file)) { foundDuplicate = true; pair.Value.Add(file); break; } } // No duplicate found, create a new entry in fileTable if (!foundDuplicate) fileTable.Add(file, new List<FileInfo>()); } // Build the result worker.ReportProgress(100, "Build the result ..."); ScanResult result = new ScanResult(); int sum = 0; foreach (IDictionary<FileInfo, IList<FileInfo>> fileTable in byteTable.Values) { foreach (KeyValuePair<FileInfo, IList<FileInfo>> pair in fileTable) { if (pair.Value.Count > 0) { ISet<FileInfo> list = new SortedSet<FileInfo>(Comparer<FileInfo>.Create((a, b) => b.FullName.CompareTo(a.FullName))); list.Add(pair.Key); result.RemoveList.Add(pair.Key, false); foreach (FileInfo file in pair.Value) { list.Add(file); result.RemoveList.Add(file, true); } result.FileList.Add(pair.Key, list); sum += pair.Value.Count; } } } result.NumDuplicates = sum; return result; }
public void Add () { var set = new SortedSet<int> (); Assert.AreEqual (0, set.Count); Assert.IsTrue (set.Add (2)); Assert.IsTrue (set.Add (4)); Assert.IsTrue (set.Add (3)); Assert.AreEqual (3, set.Count); Assert.IsFalse (set.Add (2)); }
public void Remove () { var set = new SortedSet<int> (); Assert.IsTrue (set.Add (2)); Assert.IsTrue (set.Add (4)); Assert.AreEqual (2, set.Count); Assert.IsTrue (set.Remove (4)); Assert.IsTrue (set.Remove (2)); Assert.AreEqual (0, set.Count); Assert.IsFalse (set.Remove (4)); Assert.IsFalse (set.Remove (2)); }
static SortedSet<int> FindPrimesBySieveOfAtkins(int max) { // var isPrime = new BitArray((int)max+1, false); // Can't use BitArray because of threading issues. var isPrime = new bool[max + 1]; var sqrt = (int)Math.Sqrt(max); Parallel.For(1, sqrt, x => { var xx = x * x; for (int y = 1; y <= sqrt; y++) { var yy = y * y; var n = 4 * xx + yy; if (n <= max && (n % 12 == 1 || n % 12 == 5)) isPrime[n] ^= true; n = 3 * xx + yy; if (n <= max && n % 12 == 7) isPrime[n] ^= true; n = 3 * xx - yy; if (x > y && n <= max && n % 12 == 11) isPrime[n] ^= true; } }); var primes = new SortedSet<int>() { 2, 3 }; for (int n = 5; n <= sqrt; n++) { if (isPrime[n]) { primes.Add(n); int nn = n * n; for (int k = nn; k <= max; k += nn) isPrime[k] = false; } } try { for (int n = sqrt + 1; n <= max; n++) if (isPrime[n]) primes.Add(n); } catch (OutOfMemoryException e) { } return primes; }
/// <summary> /// Creates dictionaries that maps from types full names to /// a suitable collection name. The resulting name is usually /// simple the name of the type. When there is more than /// one type with the same name, FullName is progressively /// perpended to name until there is no ambiguity. /// Two dictionaries are generated, one with pluralized last name /// and one with singular one. /// </summary> /// <param name="pPersistables">Types to be translated.</param> /// <param name="pSchema">Schema to add names dictionaries.</param> private static void CreateNamesDictionary(Type[] pPersistables, ref SchemaInfo pSchema) { Dictionary<string, string> lPlural; SortedSet<string> lSingular = new SortedSet<string>(); // Initially maps FullName to Name. lPlural = pPersistables.ToDictionary(lPersistable => lPersistable.ToGenericTypeString(), lPersistable => lPersistable.Name + "s"); foreach (Type type in pPersistables) lSingular.Add(type.ToGenericTypeString()); // Solve name clashes. pPersistables .ToLookup(lPersistable => lPersistable.Name) .Where(lGroup => lGroup.Count() > 1) .Select(lGroup => SolveNameClash(lGroup)) .ToList() .ForEach(delegate(Dictionary<string, string[]> lSub) { foreach (KeyValuePair<string, string[]> lPair in lSub) { // Singular names just join names. // lSingular[lPair.Key] = String.Join("_", lPair.Value); // Last name gets pluralized for plural names. lPair.Value[lPair.Value.Count() - 1] = lPair.Value.Last() + "s"; lPlural[lPair.Key] = String.Join("_", lPair.Value); } }); pSchema.SingularNames = lSingular; pSchema.TypesNameToPluralName = lPlural; }
public static ISet<string> ReadSourceFiles(string pdbPath) { var clsid = new Guid("3BFCEA48-620F-4B6B-81F7-B9AF75454C7D"); var type = Type.GetTypeFromCLSID(clsid); var source = (DiaSource)Activator.CreateInstance(type); source.loadDataFromPdb(pdbPath); IDiaSession session; source.openSession(out session); IDiaEnumTables enumTables; session.getEnumTables(out enumTables); var result = new SortedSet<string>(StringComparer.OrdinalIgnoreCase); foreach (IDiaTable diaEnumTable in enumTables) { var sourceFiles = diaEnumTable as IDiaEnumSourceFiles; if (sourceFiles == null) continue; foreach (IDiaSourceFile sourceFile in sourceFiles) result.Add(sourceFile.fileName); } return result; }
static void Main(string[] args) { long[] primes = ESieve(7071); long[][] powers = new long[3][]; int target = 50000000; List<long> templist = new List<long>(primes); for (int j = 0; j < 3; j++) { for (int i = 0; i < primes.Length; i++) { templist[i] *= primes[i]; } powers[j] = templist.ToArray(); } SortedSet<long> numbers = new SortedSet<long>(); for (int i = 0; i < primes.Length; i++) { for (int j = 0; j < primes.Length; j++) { for (int k = 0; k < primes.Length; k++) { long number = powers[0][i] + powers[1][j] + powers[2][k]; if (number > target) break; numbers.Add(number); } } } Console.Write(numbers.Count); Console.WriteLine(); }
private static SortedSet<string> BackTrack(string s1, string s2, int i, int j, int[,] matrix) { if (i == 0 || j == 0) { return new SortedSet<string>() { "" }; } else if (s1[i - 1] == s2[j - 1]) { SortedSet<string> temp = new SortedSet<string>(); SortedSet<string> holder = BackTrack(s1, s2, i - 1, j - 1, matrix); foreach (string str in holder) { temp.Add(str + s1[i - 1]); } return temp; } else { SortedSet<string> Result = new SortedSet<string>(); if (matrix[i - 1, j] >= matrix[i, j - 1]) { SortedSet<string> holder = BackTrack(s1, s2, i - 1, j, matrix); foreach (string s in holder) { Result.Add(s); } } if (matrix[i, j - 1] >= matrix[i - 1, j]) { SortedSet<string> holder = BackTrack(s1, s2, i, j - 1, matrix); foreach (string s in holder) { Result.Add(s); } } return Result; } }
public Module(string name) { Name = Helpers.RequireName(name); Provides = new SortedSet<string>(); Requires = new SortedSet<string>(); Script = new StringBuilder(); WorkerId = Worker.Any; Provides.Add(name); if (name != InitModuleName) Requires.Add(InitModuleName); }
public void GenericStack() { Stack<Product> stack = new Stack<Product>(); stack.Push(new Product(001, "mouse stack", 12)); stack.Push(new Product(002, "cookies", 20)); Console.WriteLine("\npeek: " + stack.Peek()); Queue<Product> q = new Queue<Product>(); q.Enqueue(new Product(001, "mouse q", 12)); q.Enqueue(new Product(002, "coffee", 4.78)); Console.WriteLine(q.Peek()); SortedSet<Product> ss = new SortedSet<Product>(new Product()); ss.Add(new Product(001, "mouse", 19.78)); ss.Add(new Product(002, "hello", 200)); ss.Add(new Product(004, "cup", 4.6)); ss.Add(new Product(003, "cake", 1.56)); ss.Add(new Product(005, "tv", 120)); Console.WriteLine("\nsorted set"); foreach (Product item in ss) { Console.WriteLine(item); } }
private static SortedSet<string> GetArtists() { SortedSet<string> artists = new SortedSet<string>(); XmlDocument catalogueDocument = new XmlDocument(); catalogueDocument.Load(XmlCatalogueFilePath); XmlNode catalogueNode = catalogueDocument.DocumentElement; foreach (XmlNode album in catalogueNode.ChildNodes) { var xmlElement = album[@"artist"]; if (xmlElement != null) { string artist = xmlElement.InnerText; artists.Add(artist); } } return artists; }
public void OrderedCaseInsensitiveEnumeration() { ArrayList expectedOrder = new ArrayList(3); expectedOrder.Add("ONE"); expectedOrder.Add("two"); expectedOrder.Add("tHree"); SortedSet<string> theSet = new SortedSet<string>(StringComparer.CurrentCultureIgnoreCase); foreach (string str in expectedOrder) theSet.Add(str); expectedOrder.Sort(StringComparer.CurrentCultureIgnoreCase); int index = 0; foreach (string str in theSet) { Assert.AreEqual(str, expectedOrder[index], index.ToString() + " did not have same value"); index++; } }
public IEnumerable<Product> FilterByPrice(double minPrice, double maxPrice) { var prices = this.allPrices.GetViewBetween(minPrice, maxPrice); var filteredProducts = new SortedSet<Product>(); int taken = 0; foreach (var price in prices) { foreach (var product in this.productsByPrice[price]) { if (taken == 10) { return filteredProducts; } filteredProducts.Add(product); taken++; } } return filteredProducts; }
/// <summary> /// Open /// </summary> /// <param name="fname"></param> public void Open(string fname) { lock(this.TableBlocking) { RaiseExceptionIfOpened(); if(fname.ToLower().EndsWith(".hnd")) fname=fname.Substring(0,fname.Length-4); DatabaseFilePath=System.IO.Path.GetFullPath(fname)+".hnd"; // Initial values if(!File.Exists(this.DatabaseFilePath)) { try { fsDB = new FileStream(this.DatabaseFilePath,FileMode.Create,FileAccess.ReadWrite,FileShare.None,8*1024); } catch { throw new Exception("Can't create file."); } } else { try { fsDB = new FileStream(this.DatabaseFilePath,FileMode.Open,FileAccess.ReadWrite,FileShare.None,8*1024); } catch { throw new Exception("Database in use."); } } long len = (fsDB.Length/PageSize); len*=PageSize; if(fsDB.Length>len) { this.LogToFile("Warning","File size fixed."); fsDB.SetLength(len); } slFID2Pages = new SortedList(); TableName2TID = new Hashtable(); TID2Def = new Hashtable(); pcInit(); //PagesInUse = new SortedSet(); DeletedPages = new SortedSet(); br = new BinaryReader(fsDB,System.Text.Encoding.Unicode); bw = new BinaryWriter(fsDB,System.Text.Encoding.Unicode); // check log file if(true) { string lfn = DatabaseFilePath+".hlg"; if(File.Exists(lfn)) { FileStream lf = new FileStream(lfn,FileMode.Open,FileAccess.ReadWrite,FileShare.None); BinaryReader lfr = new BinaryReader(lf,System.Text.Encoding.Unicode); try { if((lfr.BaseStream.Length>0)&&lfr.ReadBoolean()) {// recover from last crash byte logtype = lfr.ReadByte(); if(logtype==0) {// delete pages op this.LogToFile("Warning","Deleted pages fixed."); ArrayList al = new ArrayList(); int cnt = lfr.ReadInt32(); for(int n=0;n<cnt;n++) { al.Add( lfr.ReadInt32() ); } for(int n=0;n<cnt;n++) { bw.BaseStream.Position=PageSize*( (int)al[n] ); bw.Write( true ); // deleted } bw.Flush(); lf.SetLength(0); lf.Flush(); } if(logtype==1) {// rollback pages this.LogToFile("Warning","Rollback modified pages."); int pcount = lfr.ReadInt32(); // num of pages for(int p=0;p<pcount;p++) { int page = lfr.ReadInt32(); fsDB.Position=PageSize*page; byte[] buf = lfr.ReadBytes( Database.PageSize ); bw.Write( buf ); } bw.Flush(); lf.SetLength(0); lf.Flush(); } } } catch { Close(); throw new Exception("Can't recover from last crash."); } finally { lf.Close(); } } } ArrayList pagePurgatory = new ArrayList(); Hashtable htFieldsByTID = new Hashtable();// contains Hastables by field seq Hashtable htDataByTID = new Hashtable(); // key: tid + fieldseq + dataseq = pageno Set ProcessedPages = new SortedSet(); #region 1st Pass: Scan deleted pages and table pages NextFID=-1; try { int pos=0; // page counter fsDB.Position=0; while(fsDB.Position<fsDB.Length) { // leemos info de página long ptr = br.BaseStream.Position; bool bPageIsDeleted = br.ReadBoolean(); if(bPageIsDeleted) { ProcessedPages.Add(pos); this.DeletedPages.Add(pos); } else { byte bPageType = br.ReadByte(); int fid = br.ReadInt32(); if(bPageType==TablePageType) { ProcessedPages.Add(pos); TableNameDef tnd = new TableNameDef(fid,pos); tnd.fseq=br.ReadInt32(); tnd.rownum=br.ReadInt64(); tnd.tname = br.ReadString(); TID2Def[fid]=tnd; TableName2TID[tnd.tname]=fid; } else if(bPageType==FieldPageType) {// Page is a field def, store it for further processing ProcessedPages.Add(pos); int tid = fid; //TableNameDef tnd = TID2Def[tid] as TableNameDef; Field fld = new Field(); fld.Read(br);// 4-field fld.tid=tid; fld.PageOfFieldSef=pos; if(!htFieldsByTID.ContainsKey(tid)) htFieldsByTID[tid]=new Hashtable(); Hashtable htFieldsBySeq = htFieldsByTID[tid] as Hashtable; // avoid repeated fields bool bAvoid=false; foreach(Field f in htFieldsBySeq.Values) { if(f.Name==fld.Name) { bAvoid=true; break; } } if(!bAvoid) { htFieldsBySeq[fld.seq]=fld; //tnd.fseq2FieldDef[fld.seq]=fld; } else { pagePurgatory.Add(pos); } } else if(bPageType==ContentPageType) { int tid = fid; if(!htDataByTID.ContainsKey(tid)) htDataByTID[tid]=new Hashtable(); Hashtable htDataByFSeq = htDataByTID[tid] as Hashtable; long fseq = br.ReadInt32(); // 4º seq of field if(!htDataByFSeq.ContainsKey(fseq)) htDataByFSeq[fseq]=new ArrayList(); ArrayList alDataByOrder = htDataByFSeq[fseq] as ArrayList; int seq = br.ReadInt32(); // 5º data page order while(alDataByOrder.Count<=seq) alDataByOrder.Add(-1); alDataByOrder[seq]=pos; } NextFID = Math.Max( NextFID, fid ); PeekPagesByFID(fid).Add(pos); } fsDB.Position = Database.PageSize + ptr; pos++; } NextFID++; } catch(Exception ex) { this.LogToFile(ex.Message,ex.StackTrace); this.Close(); throw new Exception("Database corrupted."); } #endregion #region 2nd Pass: Field integration // try // { foreach(int tid in htFieldsByTID.Keys) { TableNameDef tnd = TID2Def[tid] as TableNameDef; Hashtable htFieldsBySeq = htFieldsByTID[tid] as Hashtable; foreach(long seq in htFieldsBySeq.Keys) { tnd.fseq2FieldDef[seq]=htFieldsBySeq[seq]; } } // // int pos=0; // page counter // fsDB.Position=0; // while(fsDB.Position<fsDB.Length) // { // // leemos info de página // long ptr = br.BaseStream.Position; // if(!ProcessedPages.Contains(pos)) // { // bool bPageIsDeleted = br.ReadBoolean();// 1-deleted // if(bPageIsDeleted) // { // // skip // } // else // { // byte bPageType = br.ReadByte();// 2-type // int tid = br.ReadInt32(); // 3-fid of table // if(bPageType==FieldPageType) // { // ProcessedPages.Add(pos); // TableNameDef tnd = TID2Def[tid] as TableNameDef; // Field fld = new Field(); // fld.Read(br);// 4-field // fld.tid=tid; // fld.PageOfFieldSef=pos; // // // avoid repeated fields // bool bAvoid=false; // foreach(Field f in tnd.fseq2FieldDef.Values) // { // if(f.Name==fld.Name) // { // bAvoid=true; // break; // } // } // if(!bAvoid) // { // tnd.fseq2FieldDef[fld.seq]=fld; // } // else // { // pagePurgatory.Add(pos); // } // } // } // } // fsDB.Position = Database.PageSize + ptr; // pos++; // } // } // catch(Exception ex) // { // this.LogToFile(ex.Message,ex.StackTrace); // this.Close(); // throw new Exception("Database corrupted."); // } #endregion #region 3nd Pass: Locate data for fields try { foreach(int tid in htDataByTID.Keys) { TableNameDef tnd = TID2Def[tid] as TableNameDef; Hashtable htDataByFSeq = htDataByTID[tid] as Hashtable; foreach(long seq in htDataByFSeq.Keys) { ArrayList alDataByOrder = htDataByFSeq[seq] as ArrayList; if(!tnd.fseq2FieldDef.ContainsKey(seq)) { pagePurgatory.AddRange( alDataByOrder ); } else { Field fld = tnd.fseq2FieldDef[seq] as Field; fld.DataFID=alDataByOrder; } } } // int pos=0; // page counter // fsDB.Position=0; // while(fsDB.Position<fsDB.Length) // { // // leemos info de página // long ptr = br.BaseStream.Position; // if(!ProcessedPages.Contains(pos)) // { // bool bPageIsDeleted = br.ReadBoolean();// 1º deleted is on? // if(bPageIsDeleted) // { // // skip // } // else // { // byte bPageType = br.ReadByte();// 2º Type // int tid = br.ReadInt32();// 3º fid of table // if(bPageType==ContentPageType) // { // long fseq = br.ReadInt32(); // 4º seq of field // int seq = br.ReadInt32(); // 5º data page order // TableNameDef tnd = TID2Def[tid] as TableNameDef; // if(!tnd.fseq2FieldDef.ContainsKey(fseq)) // { // pagePurgatory.Add(pos); // } // Field fld = tnd.fseq2FieldDef[fseq] as Field; // while(fld.DataFID.Count<=seq) // fld.DataFID.Add(-1); // fld.DataFID[seq]=pos; // } // } // } // fsDB.Position = Database.PageSize + ptr; // pos++; // } foreach(TableNameDef tnd in TID2Def.Values) foreach(Field f in tnd.fseq2FieldDef.Values) foreach(int page in f.DataFID) if(page==-1) throw new Exception("Database corrupted."); } catch(Exception ex) { this.LogToFile(ex.Message,ex.StackTrace); this.Close(); throw new Exception("Database corrupted."); } #endregion foreach(TableNameDef tnd in TID2Def.Values) foreach(Field f in tnd.fseq2FieldDef.Values) { // grow if it is needed if(tnd.rownum>0) { int valSize = (int)f.DataSize(); long Capacity = (PageSize-ContentPageDataOffset)/valSize; ArrayList pages = f.DataFID; while((pages.Count*Capacity)<tnd.rownum) { int datapage = this.LockAvaiblePage(); bw.BaseStream.Position = (datapage*PageSize); bw.Write( true ); bw.Flush(); bw.Write( (byte)Database.ContentPageType ); bw.Write( tnd.TableFID ); bw.Write( (int)f.seq ); bw.Write( f.DataFID.Count ); bw.Flush(); for(int c=0;c<Capacity;c++) { bw.BaseStream.Position = (datapage*PageSize)+ContentPageDataOffset+c*valSize; f.WriteDefaultData(bw,false); } bw.Flush(); bw.BaseStream.Position = (datapage*PageSize); bw.Write( (bool)false ); bw.Flush(); pages.Add(datapage); PeekPagesByFID(tnd.TableFID).Add(datapage); this.InvalidatePage(datapage); } } } // Autoseq table this.AddTableIfNotExist(tblSequences); this.AddFieldIfNotExist(tblSequences, new Field("SEQNAME","",FieldIndexing.Unique,40)); this.AddFieldIfNotExist(tblSequences, new Field("SEQVALUE",(long)0,FieldIndexing.None)); this.AddFieldIfNotExist(tblSequences, new Field("SEQINCREMENT",(long)1,FieldIndexing.None)); this.AddFieldIfNotExist(tblSequences, new Field("SEQLOOP",false,FieldIndexing.None)); this.AddFieldIfNotExist(tblSequences, new Field("SEQMAXVALUE",long.MaxValue,FieldIndexing.None)); // Autoseq table this.AddTableIfNotExist(tblAlterTbl); this.AddFieldIfNotExist(tblAlterTbl, new Field("TNAME","",FieldIndexing.None,80)); this.AddFieldIfNotExist(tblAlterTbl, new Field("FSRC","",FieldIndexing.None,80)); this.AddFieldIfNotExist(tblAlterTbl, new Field("FTMP","",FieldIndexing.None,80)); this.AddFieldIfNotExist(tblAlterTbl, new Field("STATE",(int)1,FieldIndexing.None)); // Unknown bugfix -> Purge pages foreach(int i in pagePurgatory) { if(i==-1) continue; bw.BaseStream.Position = (i*PageSize); bw.Write( true ); bw.Flush(); } } GC.Collect(); GC.WaitForPendingFinalizers(); }
/// <summary> /// Returns n unique random numbers in the range [1, n], inclusive. /// This is equivalent to getting the first n numbers of some random permutation of the sequential numbers from 1 to max. /// Runs in O(k^2) time. /// </summary> /// <param name="rand"></param> /// <param name="n">Maximum number possible.</param> /// <param name="k">How many numbers to return.</param> /// <returns></returns> public static int[] Permutation(this Random rand, int n, int k) { var result = new List<int>(); var sorted = new SortedSet<int>(); for (var i = 0; i < k; i++) { var r = rand.Next(1, n + 1 - i); foreach (var q in sorted) if (r >= q) r++; result.Add(r); sorted.Add(r); } return result.ToArray(); }
public Skeleton createStartingSkeleton(SkeletonStamp skeletonStamp) { Skeleton skeleton = skeletonStamp.GetTrackedSkeleton(); /* Determine the joint types to calculate */ ISet<JointType> jointTypes = new SortedSet<JointType>(); foreach (Criterion criterion in StartingCriteria) { foreach (Joint jointType in criterion.MatchSkeletonToCriterion(skeletonStamp)) { jointTypes.Add(jointType.JointType); } } /* find path to take from parent to child */ foreach (JointType jointType in jointTypes) { } return null; }
internal void ExecuteWhere(string From_TableName, object[,] Where_NameCondValue, out Set ROWIDS) { string TableName=From_TableName; if(Where_NameCondValue==null) Where_NameCondValue=new object[0,0]; Field[] flds = GetFields(TableName); // Assertion if(true) { Index DELNDX = GetIndex(From_TableName,flds[0]); int __rows1 = DELNDX.GetRowCountForKey(false); // non-deleted rows count for(int k=1;k<flds.Length;k++) { if(flds[k].bIndexed) { Index _NDX = GetIndex(From_TableName,flds[k]); if(_NDX.ReverseCount()!=__rows1) throw new Exception("Corruption error."); } } } // map of fields in table Hashtable FieldMap = new Hashtable(); foreach(Field f in flds) { FieldMap[f.Name]=f; } // Values in names with same type as field ArrayList Where = new ArrayList(); Hashtable UsedFieldMap = new Hashtable(); for(int n=0;n<Where_NameCondValue.GetLength(0);n++) { string fieldname = Where_NameCondValue[n,0].ToString(); string op = Where_NameCondValue[n,1].ToString(); object val = Where_NameCondValue[n,2]; bool oprec = (op=="=")||(op==">")||(op=="<")||(op=="!=")||(op==">=")||(op=="<="); if(!oprec) throw new Exception("Operand '"+op+"' unrecognized"); if(!FieldMap.ContainsKey(fieldname)) throw new Exception("Column "+fieldname+" unrecognized"); Field f = FieldMap[fieldname] as Field; UsedFieldMap[f]=f; int priority=0; if(f.bIndexed&&f.bUnique&&(op=="=")) priority=-4; else if(f.bIndexed&&(op=="=")) priority=-3; else if(f.bIndexed&&f.bUnique&&((op=="<")||(op==">")||(op==">=")||(op=="<="))) priority=-2; else if(f.bIndexed&&((op=="<")||(op==">")||(op==">=")||(op=="<="))) priority=-1; object v=Variant.Object2Variant(val,f.type).obj; Where.Add(new object[]{f,op,v,priority}); } // Sorting of fields to make search faster if(true) { // Order: // Indexed-unique with = condition -4 // Indexed with = condition -3 // Indexed-unique with < or > condition -2 // Indexed with < or > condition -1 // The rest 0 Where.Sort(new WhereComp()); } // Let's do the search if(Where.Count>0) { ROWIDS = null; for(int n=0;(n<Where.Count);n++) { Field f = (Where[n] as object[])[0] as Field; string op = (Where[n] as object[])[1] as string; object val = (Where[n] as object[])[2] as object; if(f.bIndexed) { if(f.bUnique) { if(op=="=") { Index ndx = this.GetIndex(TableName,f); if(ndx.ExistsKey(val)) { long row = ndx.PeekOne(val); if(ROWIDS==null) { ROWIDS = new SortedSet(); ROWIDS.Add( row ); continue; } else if(ROWIDS.Contains( row )) { ROWIDS = new SortedSet(); ROWIDS.Add( row ); continue; } else { ROWIDS = new SortedSet(); break; } } else { ROWIDS = new SortedSet(); break; } } //// beta begin // if(op=="!=") // { // Index ndx = this.GetIndex(TableName,f); // if(ndx.ExistsUnique(val)) // {// the value exists in the index // // If the rowids collection do not exist already // if(ROWIDS==null) // { // // Fill a rowid collection without the removed rowid // rowids = new ArrayList( ndx.ht.Values ); // rowids.Remove(ndx.ht[val]); // continue; // } // // If the rowids collection already exists // else // { // // If contains the value to exclude // if(rowids.Contains(val)) // { // rowids.Remove(ndx.ht[val]); // if(rowids.Count==0) break; // continue; // } // // If do not contains the value to exclude // else // { // continue; // } // } // } // else // {// the value do not exist in the index // continue; // } // } ////end beta } else {// clave no única if(ROWIDS==null) { Index ndx = this.GetIndex(TableName,flds[0]); ROWIDS = new SortedSet( ndx.GetRowSet(false) ); } // begin op = if(op=="=") { Index ndx = this.GetIndex(TableName,f); Set hs = ndx.GetRowSet(val); if(hs.Count<ROWIDS.Count) {// hs is smaller than ROWIDS Set newSet = new SortedSet(); foreach(long row in hs) { if(ROWIDS.Contains(row)) newSet.Add(row); } ROWIDS=newSet; } else {// ROWIDS is smaller than hs Set newSet = new SortedSet(); foreach(long row in ROWIDS) { if(hs.Contains(row)) newSet.Add(row); } ROWIDS=newSet; } //ROWIDS = ROWIDS.Intersect( hs); if(ROWIDS.Count==0) break; else continue; } // end of op = // begin op != if(op=="!=") { Index ndx = this.GetIndex(TableName,f); Set hs = ndx.GetRowSet(val); ROWIDS = ROWIDS.Minus( hs); if(ROWIDS.Count==0) break; else continue; } // end of op != // begin op > if((op=="<")||(op==">")||(op=="<=")||(op==">=")) { Index ndx = this.GetIndex(TableName,f); // Metemos en un set todos los ids de fila que llevamos hasta ahora // para luego irlas poniendo en una lista de seleccionadas Set nSet = new SortedSet(); IComparable v = (IComparable)val; foreach(object key in ndx.Keys) { IComparable o = (IComparable)key; bool bAdd=false; if((op==">")&&(o.CompareTo(v)>0)) bAdd=true; else if((op=="<")&&(o.CompareTo(v)<0)) bAdd=true; else if((op==">=")&&(o.CompareTo(v)>=0)) bAdd=true; else if((op=="<=")&&(o.CompareTo(v)<=0)) bAdd=true; if(bAdd) nSet = nSet.Union( ndx.GetRowSet(key) ); } ROWIDS = ROWIDS.Intersect( nSet ); if(ROWIDS.Count==0) break; continue; } else { throw new Exception("Unsupported operator"); } // end of op > } } // Linear search if(ROWIDS==null) { Index ndx = this.GetIndex(TableName,flds[0]); ROWIDS = new SortedSet( ndx.GetRowSet(false) ); } if(true) { int tid = (int)TableName2TID[TableName]; TableNameDef tnd = TID2Def[tid] as TableNameDef; int valSize = (int)f.DataSize(); int Capacity = (PageSize-ContentPageDataOffset)/valSize; ArrayList pages = f.DataFID; if((pages.Count*Capacity)<tnd.rownum) throw new Exception("Row num corrupted."); ArrayList new_rowids = new ArrayList(); for(int row=0;row<tnd.rownum;row++) { long rowid = (long)row; if(ROWIDS.Contains(rowid)) { int npage = row / Capacity; int offset = row % Capacity; int page = (int)pages[npage]; //br.BaseStream.Position = (page*PageSize)+ContentPageDataOffset+offset*valSize; //object data = f.ReadData(br); object data = f.ReadData( this.PageReader(page,ContentPageDataOffset+offset*valSize) ); IComparable o = (IComparable)data; IComparable v = (IComparable)val; if((op=="=")&&(o.CompareTo(v)==0)) {} else if((op==">")&&(o.CompareTo(v)>0)) {} else if((op=="<")&&(o.CompareTo(v)<0)) {} else if((op=="!=")&&(o.CompareTo(v)!=0)) {} else if((op==">=")&&(o.CompareTo(v)>=0)) {} else if((op=="<=")&&(o.CompareTo(v)<=0)) {} else ROWIDS.Remove( rowid ); } } if(ROWIDS.Count==0) break; } } } else { Index ndx = this.GetIndex(TableName,flds[0]); ROWIDS = new SortedSet( ndx.GetRowSet(false) ); } // Assertion if(true) { Index DELNDX = GetIndex(From_TableName,flds[0]); int __rows1 = DELNDX.GetRowCountForKey(false); // non-deleted rows count for(int k=1;k<flds.Length;k++) { if(flds[k].bIndexed) { Index _NDX = GetIndex(From_TableName,flds[k]); if(_NDX.ReverseCount()!=__rows1) throw new Exception("Corruption error."); } } } }
//recursively get all the types this object represents. private SortedSet<Type> GetTypesForImport(Object value) { Type valueType = value.GetType(); SortedSet<Type> set = new SortedSet<Type>(); IEnumerable collection = value as IEnumerable; if (collection != null) { foreach (var item in collection) { set.UnionWith(GetTypesForImport(item)); } } else if (valueType.IsGenericType) { Type generictype = valueType.GetGenericTypeDefinition(); set.UnionWith(generictype.GetGenericArguments()); //also add the valueType to be used as raw data set.Add(valueType); } else set.Add(CLRObjectMarshler.GetPublicType(valueType)); return set; }
/// <summary> /// Pushes the version taken as a parameter /// Will return any changes that conflicts /// IMPORTANT: The version is not commited at the server before null is returned! /// </summary> /// <param name="path">The path to the repository to push</param> /// <param name="number">The version number used to identify the right version</param> /// <param name="log">The changes given by a number of text-.</param> /// <param name="user">The user to submit changes.</param> /// <returns> /// Any changes that conflicts or null if version is commited /// </returns> /// <exception cref="System.ArgumentNullException">Log can't be null</exception> /// <exception cref="System.ArgumentException">Log can't be empty</exception> public string[][] PushVersion(String path, int number, string[] log, User user) { if (log == null) throw new ArgumentNullException("Log can't be null"); if (log.Length < 1) throw new ArgumentException("Log can't be empty"); if (user == null) throw new ArgumentNullException("User can't be null"); if (String.IsNullOrEmpty(user.Name)) throw new ArgumentException("User's name is invalid: " + user.Name); if (String.IsNullOrEmpty(user.Password)) throw new ArgumentException("User's password is invalid"); if (user.MAC == null) throw new ArgumentNullException("User's MAC address is invalid (" + user.MAC + " and can therefore not push"); Console.WriteLine("[" + DateTime.Now + "]: " + "User is pushing to the server: " + user.MAC); //See if the user is known - if not: Throw an exception //Connect(user); var returnArray = new string[0][]; // Test that the version number is correct - otherwise we'll just store things at the wrong places. var vcs = new ServerVersionControlSystem(path, _fileSystem); var currentVersion = vcs.GetLatestVersionNumber(); if (currentVersion != number - 1) { throw new ArgumentException("Can not push version " + number + " to the server version " + currentVersion); } // If the last user that attempted to push is the same as the current user, no conflicts will occur. if (PushedLastVersion(user)) { // We can assume that everything has been resolved client side and execute all the changes made IList<AbstractChange> changeList = ChangeParser.ParseChanges(log).Where(item => item != null).ToList(); // Store the version SaveChanges(number, user, vcs, changeList); } else { String[] simpleChanges = SearchForSimpleChanges(log); String[,] complexChanges = SearchForComplexChanges(log); ISet<String> conflictingPathsOnServerSet = new SortedSet<string>(); ISet<String> conflictingPathsOnClientSet = new SortedSet<string>(); //Get paths to all text files in project directory and all sub directories. String absolutePath = vcs.GetFileSystemRoot() + path; IList<String> projectFilesList = new List<string>(Directory.GetFileSystemEntries(absolutePath, "*", SearchOption.AllDirectories).Where(s => s.Contains(FileSystem.META_DIR) == false)); // Discover potential conflicts for (int i = 0; i < projectFilesList.Count; i++) { projectFilesList[i] = projectFilesList[i].Substring(vcs.GetFileSystemRoot().Length); } foreach (String filePath in simpleChanges) { if (projectFilesList.Contains(filePath)) { conflictingPathsOnServerSet.Add(filePath); conflictingPathsOnClientSet.Add(filePath); } } for (int i = 0; i < complexChanges.Length; i++) { if (projectFilesList.Contains(complexChanges[i, 0])) { conflictingPathsOnServerSet.Add(complexChanges[i, 0]); conflictingPathsOnClientSet.Add(complexChanges[i, 1]); } } // If no conflicts arises we can save the change to the file system if (conflictingPathsOnServerSet.Count == 0) { SaveChanges(number, user, vcs, ChangeParser.ParseChanges(log)); } else // Otherwise we find the conflicting paths and return the contents of the file on the // server for the client to merge { IList<String> conflictingPathsOnServer = new List<string>(conflictingPathsOnServerSet); IList<String> conflictingPathsOnClient = new List<string>(conflictingPathsOnClientSet); var list = new List<string[]>(); for (int i = 0; i < conflictingPathsOnServer.Count; i++) { var fileList = new List<string>() { conflictingPathsOnClient[i] }; fileList.AddRange(vcs.ReadAllLines(conflictingPathsOnServer[i])); list.Add(fileList.ToArray()); } returnArray = list.ToArray(); } } // Save the last MAC adress _lastPushUser = user.Name; //AddToPushHistory(user); // Return the conflicts to the client return returnArray; }
public void BatchLoad() { Baz baz, baz2, baz3; using (ISession s = OpenSession()) { baz = new Baz(); var stringSet = new SortedSet<string> { "foo", "bar" }; var fooSet = new HashSet<FooProxy>(); for (int i = 0; i < 3; i++) { Foo foo = new Foo(); s.Save(foo); fooSet.Add(foo); } baz.FooSet = fooSet; baz.StringSet = stringSet; s.Save(baz); baz2 = new Baz(); fooSet = new HashSet<FooProxy>(); for (int i = 0; i < 2; i++) { Foo foo = new Foo(); s.Save(foo); fooSet.Add(foo); } baz2.FooSet = fooSet; s.Save(baz2); baz3 = new Baz(); stringSet = new SortedSet<string>(); stringSet.Add("foo"); stringSet.Add("baz"); baz3.StringSet = stringSet; s.Save(baz3); s.Flush(); } using (ISession s = OpenSession()) { baz = (Baz) s.Load(typeof(Baz), baz.Code); baz2 = (Baz) s.Load(typeof(Baz), baz2.Code); baz3 = (Baz) s.Load(typeof(Baz), baz3.Code); Assert.IsFalse(NHibernateUtil.IsInitialized(baz.FooSet)); Assert.IsFalse(NHibernateUtil.IsInitialized(baz2.FooSet)); Assert.IsFalse(NHibernateUtil.IsInitialized(baz3.FooSet)); Assert.IsFalse(NHibernateUtil.IsInitialized(baz.StringSet)); Assert.IsFalse(NHibernateUtil.IsInitialized(baz2.StringSet)); Assert.IsFalse(NHibernateUtil.IsInitialized(baz3.StringSet)); Assert.AreEqual(3, baz.FooSet.Count); Assert.IsTrue(NHibernateUtil.IsInitialized(baz.FooSet)); Assert.IsTrue(NHibernateUtil.IsInitialized(baz2.FooSet)); Assert.IsTrue(NHibernateUtil.IsInitialized(baz3.FooSet)); Assert.AreEqual(2, baz2.FooSet.Count); Assert.IsTrue(baz3.StringSet.Contains("baz")); Assert.IsTrue(NHibernateUtil.IsInitialized(baz.StringSet)); Assert.IsTrue(NHibernateUtil.IsInitialized(baz2.StringSet)); Assert.IsTrue(NHibernateUtil.IsInitialized(baz3.StringSet)); Assert.AreEqual(2, baz.StringSet.Count); Assert.AreEqual(0, baz2.StringSet.Count); s.Delete(baz); s.Delete(baz2); s.Delete(baz3); IEnumerable en = new JoinedEnumerable( new IEnumerable[] {baz.FooSet, baz2.FooSet}); foreach (object obj in en) { s.Delete(obj); } s.Flush(); } }
public async Task<SortedSet<AutomationAsset>> getAssetsOfType(String type) { var assets = await getAssetsInfo(); var assetsOfType = new SortedSet<AutomationAsset>(); foreach (var asset in assets) { if (asset.GetType().Name == type) { assetsOfType.Add(asset); } } return assetsOfType; }
private async Task refreshRunbooks() { ISet<AutomationRunbook> runbooks = await AutomationRunbookManager.GetAllRunbookMetadata(iseClient.automationManagementClient, iseClient.currWorkspace, iseClient.accountResourceGroups[iseClient.currAccount].Name, iseClient.currAccount.Name); IDictionary<String, AutomationRunbook> runbookWithName = new Dictionary<String, AutomationRunbook>(runbooks.Count); foreach (AutomationRunbook runbook in runbooks) { runbookWithName.Add(runbook.Name, runbook); } ISet<AutomationRunbook> runbooksToDelete = new SortedSet<AutomationRunbook>(); foreach (AutomationRunbook curr in runbookListViewModel) { if (!runbookWithName.ContainsKey(curr.Name)) { runbooksToDelete.Add(curr); continue; } curr.AuthoringState = runbookWithName[curr.Name].AuthoringState; curr.Parameters = runbookWithName[curr.Name].Parameters; curr.Description = runbookWithName[curr.Name].Description; curr.LastModifiedCloud = runbookWithName[curr.Name].LastModifiedCloud; curr.LastModifiedLocal = runbookWithName[curr.Name].LastModifiedLocal; curr.UpdateSyncStatus(); runbookWithName.Remove(curr.Name); } foreach (AutomationRunbook runbook in runbooksToDelete) { runbookListViewModel.Remove(runbook); } foreach (String name in runbookWithName.Keys) { runbookListViewModel.Add(runbookWithName[name]); } }
public void Remove(byte[] key) { AssertValidKey(key); //find the element while adding to moveCandidates SortedSet<HashTableElement> moveCandidates = new SortedSet<HashTableElement>(); HashTableElement removingElement = null; foreach (var hashTableElement in GetElementWithKeyEnumerable(key)) { Debug.Assert(moveCandidates.Add(hashTableElement)); removingElement = hashTableElement; } Debug.Assert(removingElement != null, "GetElementWithKeyEnumberable() should have returned at least one element or thrown an exception"); moveCandidates.Remove(removingElement); //null out the element WipeArrayAt(removingElement.Index); //rehash the elements between the hash and the removed element foreach (var moveCandidate in moveCandidates) { Debug.Assert(!moveCandidate.Key.EqualsBytes(NullKey), "GetElementWithKeyEnumerable() should not return any null elements"); WipeArrayAt(moveCandidate.Index); Put(moveCandidate.Key, moveCandidate.Value); } }
public SortedSet<AutomationAsset> getAssetsOfType(String type) { var assetsOfType = new SortedSet<AutomationAsset>(); foreach (var asset in assets) { if (asset.GetType().Name == type) { assetsOfType.Add(asset); } } return assetsOfType; }
void SaveModifiables( Profile p ) { p.SteamID64 = Int64.Parse( txtUserID.Text ); p.AutoUpdate = chkAutoUpdate.Checked; p.AutoImport = chkAutoImport.Checked; p.LocalUpdate = chkLocalUpdate.Checked; p.WebUpdate = chkLocalUpdate.Checked; p.ExportDiscard = chkExportDiscard.Checked; p.IncludeShortcuts = chkIncludeShortcuts.Checked; p.OverwriteOnDownload = chkOverwriteNames.Checked; p.AutoIgnore = chkAutoIgnore.Checked; p.IncludeUnknown = chkIncludeUnknown.Checked; p.BypassIgnoreOnImport = chkBypassIgnoreOnImport.Checked; SortedSet<int> ignoreSet = new SortedSet<int>(); foreach( ListViewItem item in lstIgnored.Items ) { int id; if( int.TryParse( item.Text, out id ) ) { ignoreSet.Add( id ); } } p.IgnoreList = ignoreSet; }
public void ViewCount () { var set = new SortedSet<int> { 1, 3, 4, 5, 6, 7, 8, 9 }; var view = set.GetViewBetween (4, 8); Assert.AreEqual (5, view.Count); set.Remove (5); Assert.AreEqual (4, view.Count); set.Add (10); Assert.AreEqual (4, view.Count); set.Add (6); Assert.AreEqual (4, view.Count); set.Add (5); Assert.AreEqual (5, view.Count); }
public void DebuggerProxy_FrameworkTypes_SortedSet() { var obj = new SortedSet<int>(); obj.Add(1); obj.Add(2); var str = CSharpObjectFormatter.Instance.FormatObject(obj, s_inline); Assert.Equal("SortedSet<int>(2) { 1, 2 }", str); }
/// <summary> /// Adds or removes a track from the library collection /// </summary> /// <param name="sender">The sender of the event (the timer)</param> /// <param name="e">The event data</param> private void SourceModifiedDelay_Tick(object sender, EventArgs e) { sourceModifiedDelay.Stop(); Dispatcher.Invoke(DispatcherPriority.Background, new Action(delegate() { //ScanProgressBar.IsIndeterminate = true; //ScanProgress.Visibility = System.Windows.Visibility.Visible; Cursor = Cursors.AppStarting; })); ThreadStart GUIScanThread = delegate() { try { // copy the tracks into two lists SortedSet<string> trackPaths = new SortedSet<string>(); List<TrackData> tracksToAdd = new List<TrackData>(); SortedList<string, TrackData> tracksToRemove = new SortedList<string, TrackData>(); List<TrackData> tracksToUpdate = new List<TrackData>(); foreach (DictionaryEntry de in sourceModifiedTracks) { SourceModificationType modType = (SourceModificationType)de.Value; TrackData track = (TrackData)de.Key; if (modType == SourceModificationType.Added) tracksToAdd.Add(track); else if (modType == SourceModificationType.Removed) { if (!tracksToRemove.ContainsKey(track.Path)) tracksToRemove.Add(track.Path, track); } else tracksToUpdate.Add(de.Key as TrackData); } sourceModifiedTracks.Clear(); // copy the observable collections so we can work on them // outside the gui thread ObservableCollection<TrackData> files = new ObservableCollection<TrackData>(); foreach (TrackData t in SettingsManager.FileTracks) { files.Add(t); trackPaths.Add(t.Path); } // add tracks for (int j = 0; j < tracksToAdd.Count; j++) { TrackData track = tracksToAdd[j]; if (trackPaths.Contains(track.Path)) tracksToAdd.RemoveAt(j--); else files.Add(track); } // update source for file list U.L(LogLevel.Debug, "MAIN", "Adding tracks to GUI list"); //DateTime start = DateTime.Now; Dispatcher.Invoke(DispatcherPriority.Background, new Action(delegate() { SettingsManager.FileTracks = files; SettingsManager.FileTracks.CollectionChanged += new NotifyCollectionChangedEventHandler(LibraryTracks_CollectionChanged); if (FileTracks != null) FileTracks.ItemsSource = files; if (SettingsManager.CurrentSelectedNavigation == "Files") InfoPaneTracks.Text = String.Format(U.T("HeaderTracks"), SettingsManager.FileTracks.Count); //ScanProgressBar.IsIndeterminate = false; //ScanProgressBar.Value = 0; })); // remove tracks //int numTracks = tracksToRemove.Count + tracksToAdd.Count + tracksToUpdate.Count; //double progressDelta = 100.0 / numTracks; //if (Double.IsInfinity(progressDelta)) progressDelta = 0; //double progress = 0; //double removeDelta = progressDelta * tracksToRemove.Count; if (tracksToRemove.Count > 0) { // remove if current track for (int i = 0; i < tracksToRemove.Count; i++) { TrackData track = tracksToRemove.Values[i]; if (SettingsManager.CurrentTrack.Path == track.Path) SettingsManager.CurrentTrack = null; } //double lists = SettingsManager.Playlists.Count + 3; //double trackDelta = progressDelta / lists; //double listDelta = removeDelta / lists; double listDelta = 1; foreach (PlaylistData p in SettingsManager.Playlists) RemoveTracks(tracksToRemove, p.Tracks, listDelta); RemoveTracks(tracksToRemove, SettingsManager.QueueTracks, listDelta); RemoveTracks(tracksToRemove, SettingsManager.HistoryTracks, listDelta); RemoveTracks(tracksToRemove, SettingsManager.FileTracks, listDelta); } //progress = removeDelta; //if (showBar) // Dispatcher.Invoke(DispatcherPriority.Background, new Action(delegate() // { // ScanProgressBar.Value = progress; // })); // update tracks //U.L(LogLevel.Debug, "MAIN", "Updating tracks"); for (int j = 0; j < tracksToAdd.Count; j++) { TrackData track = tracksToAdd[j]; if (U.IsClosing) return; FilesystemManager.UpdateTrack(track, false); //if (showBar && j % 100 == 0) //{ // Dispatcher.Invoke(DispatcherPriority.Background, new Action(delegate() // { // ScanProgressBar.Value = progress; // })); //} //progress += progressDelta; } for (int j = 0; j < tracksToUpdate.Count; j++) { TrackData track = tracksToUpdate[j]; if (U.IsClosing) return; FilesystemManager.UpdateTrack(track, false); //if (j % 100 == 0) //{ // Dispatcher.Invoke(DispatcherPriority.Background, new Action(delegate() // { // ScanProgressBar.Value = progress; // })); //} //progress += progressDelta; } //TimeSpan ts = (DateTime.Now - start); //double time = Math.Round(ts.TotalMilliseconds / numTracks, 2); //U.L(LogLevel.Debug, "FILESYSTEM", String.Format("Scanning took {0} seconds, an average of {1} ms/track", Math.Round(ts.TotalSeconds, 2), time)); Dispatcher.Invoke(DispatcherPriority.Background, new Action(delegate() { Cursor = Cursors.Arrow; //ScanProgress.Visibility = System.Windows.Visibility.Collapsed; })); // call callbacks Dispatcher.Invoke(DispatcherPriority.Background, new Action(delegate() { foreach (KeyValuePair<ScannerCallback, object> pair in sourceModifiedCallbacks) { ScannerCallback callback = pair.Key; object callbackParams = pair.Value; if (callback != null) callback(callbackParams); } sourceModifiedCallbacks.Clear(); })); } catch (Exception exc) { U.L(LogLevel.Warning, "MAIN", "Error occured in meta scanner: " + exc.Message); U.L(LogLevel.Warning, "MAIN", "Restarting meta scanner."); SourceModifiedDelay_Tick(sender, e); } }; Thread gs_thread = new Thread(GUIScanThread); gs_thread.Name = "GUI scan updater"; gs_thread.IsBackground = true; gs_thread.Priority = ThreadPriority.Lowest; gs_thread.Start(); }
public ActionResult GetAllVideosBasedOnEntitlements() { List<ShowLookUpObject> list = null; if (!GlobalConfig.IsSynapseEnabled) return Json(list, JsonRequestBehavior.AllowGet); var registDt = DateTime.Now; if (MyUtility.isUserLoggedIn()) { try { //var cache = DataCache.Cache; //string cacheKey = "MOBILEGAV:U:" + User.Identity.Name; //list = (List<ShowLookUpObject>)cache[cacheKey]; if (list == null) { list = new List<ShowLookUpObject>(); var context = new IPTV2Entities(); var UserId = new Guid(User.Identity.Name); var user = context.Users.FirstOrDefault(u => u.UserId == UserId); if (user != null) { SortedSet<Int32> ShowIds = new SortedSet<int>(); var service = context.Offerings.Find(GlobalConfig.offeringId).Services.FirstOrDefault(s => s.StatusId == GlobalConfig.Visible); foreach (var entitlement in user.Entitlements.Where(e => e.EndDate > registDt)) { if (entitlement is PackageEntitlement) { var packageEntitlement = (PackageEntitlement)entitlement; var pkgCat = context.PackageCategories.Where(p => p.PackageId == packageEntitlement.PackageId).Select(p => p.Category); var pkgCatSubCategories = pkgCat.Select(p => p.SubCategories); foreach (var categories in pkgCatSubCategories) { foreach (var category in categories) { var listOfIds = service.GetAllMobileShowIds(MyUtility.GetCurrentCountryCodeOrDefault(), category); var showList = context.CategoryClasses.Where(c => listOfIds.Contains(c.CategoryId) && c.StatusId == GlobalConfig.Visible); foreach (var show in showList) { if (show != null) { if (!(ShowIds.Contains(show.CategoryId))) { if (show.StartDate < registDt && show.EndDate > registDt) { if (show is Show) { if (!(show is LiveEvent)) { ShowLookUpObject data = new ShowLookUpObject(); data.Show = show.Description; data.ShowId = show.CategoryId; data.MainCategory = category.Description; data.MainCategoryId = category.CategoryId; data.ShowType = (show is Movie) ? "MOVIE" : "SHOW"; if (!(show is Movie)) list.Add(data); } } } } } } ShowIds.UnionWith(listOfIds); //For checking } } } else if (entitlement is ShowEntitlement) { var showEntitlement = (ShowEntitlement)entitlement; if (!(ShowIds.Contains(showEntitlement.CategoryId))) { if (!(showEntitlement.Show is LiveEvent)) { ShowLookUpObject data = new ShowLookUpObject(); var show = showEntitlement.Show; if (show != null) { if (show.StartDate < registDt && show.EndDate > registDt) { var CacheDuration = new TimeSpan(0, GlobalConfig.GetParentCategoriesCacheDuration, 0); var parentCategories = show.GetAllParentCategories(CacheDuration); var parent = context.CategoryClasses.Where(c => parentCategories.Contains(c.CategoryId) && c.StatusId == GlobalConfig.Visible && c is Category); data.Show = show.Description; data.ShowId = show.CategoryId; data.MainCategory = parent.First().Description; data.MainCategoryId = parent.First().CategoryId; data.ShowType = (show is Movie) ? "MOVIE" : "SHOW"; if (!(show is Movie)) ShowIds.Add(show.CategoryId); list.Add(data); } } } } } else if (entitlement is EpisodeEntitlement) { var episodeEntitlement = (EpisodeEntitlement)entitlement; var eCacheDuration = new TimeSpan(0, GlobalConfig.GetParentShowsForEpisodeCacheDuration, 0); var listOfShows = episodeEntitlement.Episode.GetParentShows(eCacheDuration); var parentShow = context.CategoryClasses.FirstOrDefault(c => listOfShows.Contains(c.CategoryId) && c.StatusId == GlobalConfig.Visible && c is Show); if (parentShow != null) { if (!(ShowIds.Contains(parentShow.CategoryId))) { if (!(parentShow is LiveEvent)) { if (parentShow.StartDate < registDt && parentShow.EndDate > registDt) { ShowLookUpObject data = new ShowLookUpObject(); var CacheDuration = new TimeSpan(0, GlobalConfig.GetParentCategoriesCacheDuration, 0); var parentCategories = ((Show)parentShow).GetAllParentCategories(CacheDuration); var parent = context.CategoryClasses.Where(c => parentCategories.Contains(c.CategoryId) && c.StatusId == GlobalConfig.Visible && c is Category); data.EpisodeId = episodeEntitlement.Episode.EpisodeId; data.EpisodeName = episodeEntitlement.Episode.Description + ", " + episodeEntitlement.Episode.DateAired.Value.ToString("MMMM d, yyyy"); data.Show = parentShow.Description; data.ShowId = parentShow.CategoryId; data.MainCategory = parent.First().Description; data.MainCategoryId = parent.First().CategoryId; data.ShowType = (parentShow is Movie) ? "MOVIE" : "SHOW"; if (!(parentShow is Movie)) { ShowIds.Add(parentShow.CategoryId); list.Add(data); } } } } } } } list = list.OrderBy(c => c.Show).ToList(); //cache.Put(cacheKey, list, DataCache.CacheDuration); } } } catch (Exception e) { MyUtility.LogException(e); } } return Json(list, JsonRequestBehavior.AllowGet); }
/// <summary> /// Provides all the function calls related to this namespace /// </summary> /// <param name="system">The system in which the calls should be gathered</param> /// <param name="container">If provided, indicates that the calls should be limited to a given container</param> /// <returns></returns> public static List<AccessMode> getAccesses(EFSSystem system, IEnclosesNameSpaces container = null) { SortedSet<ProcedureOrFunctionCall> procedureCalls = new SortedSet<ProcedureOrFunctionCall>(); SortedSet<AccessToVariable> accessesToVariables = new SortedSet<AccessToVariable>(); foreach (Usage usage in system.FindReferences(IsCallableOrIsVariable.INSTANCE)) { ModelElement target = (ModelElement) usage.Referenced; ModelElement source = usage.User; NameSpace sourceNameSpace = getCorrespondingNameSpace(source, container, true); NameSpace targetNameSpace = getCorrespondingNameSpace(target, container, false); if (IsCallable.Predicate(usage.Referenced)) { if (considerCall(usage, container, sourceNameSpace, targetNameSpace)) { procedureCalls.Add(new ProcedureOrFunctionCall(sourceNameSpace, targetNameSpace, (ICallable) target)); } } else { // IsVariable(usage.Referenced) if (considerVariableReference(usage, container, sourceNameSpace, targetNameSpace)) { Usage.ModeEnum mode = (Usage.ModeEnum) usage.Mode; // Find a corresponding access to variable (same source and target namespaces, and same variable AccessToVariable otherAccess = null; foreach (AccessToVariable access in accessesToVariables) { if (access.Target == usage.Referenced && access.Source == sourceNameSpace && access.Target == targetNameSpace) { otherAccess = access; break; } } if (otherAccess != null) { if (otherAccess.AccessMode != mode) { // Since the access mode is different, one of them is either Read or ReadWrite and the other is ReadWrite or Write. // So, in any case, the resulting access mode is ReadWrite accessesToVariables.Remove(otherAccess); accessesToVariables.Add(new AccessToVariable(sourceNameSpace, targetNameSpace, (IVariable) target, Usage.ModeEnum.ReadAndWrite)); } else { // Already exists, do nothing } } else { // Does not already exists, insert it in the list accessesToVariables.Add(new AccessToVariable(sourceNameSpace, targetNameSpace, (IVariable) target, mode)); } } } } // Build the results based on the intermediate results List<AccessMode> retVal = new List<AccessMode>(); retVal.AddRange(procedureCalls); retVal.AddRange(accessesToVariables); return retVal; }