/// <summary> /// Create a new buffer. /// </summary> /// <remarks>This is the shared constructors code. This constructor should never be called by itself.</remarks> /// <param name="manager">The associated serializer.</param> /// <param name="progress_callback">A callback to notify progress of the current task.</param> /// <param name="types_map">Optional existing map of types used by this buffer. DO NOT USE IT, except when resizing existing buffers.</param> protected Buffer(Binary manager, Action <float> progress_callback, IDMap <Type> types_map = null) { serializer = manager; progress = progress_callback; types = types_map ?? new IDMap <Type>(manager, ToBytes, FromBytes); }
public DefaultCompressor( int rx, int ry, int zoom, ProvinceList provinces, AdjacencyTable adjacent, IDMap idmap ) { this.rx = rx; this.ry = ry; this.zoom = zoom; this.provinces = provinces; this.adjacent = adjacent; this.idmap = idmap; }
public TerrainShader( ColorScales scales, ProvinceList provinces, IDMap idmap ) { this.shades = scales.Shades; this.provinces = provinces; // Build the range check array rangecheck = new byte[512]; for( int r=0; r<512; r++ ) { if ( r >= 256 ) rangecheck[r] = 127; else if ( r >= 132 ) rangecheck[r] = (byte)(r-128); else rangecheck[r] = 4; } }
public void RedistributeUntilLoop() { var seenConfigurations = new IDMap <UInt128> { GetStateCode() }; while (true) { Redistribute(); if (!seenConfigurations.TryAdd(GetStateCode(), out int loopStart)) { RedistributionCyclesUntilLoop = seenConfigurations.Count; LoopStart = RedistributionCyclesUntilLoop - loopStart; return; } } }
public virtual void Dispose(bool disposing) { if (!disposed) { if (disposing) { // TODO: delete managed state (managed objects). serializer.Release(data); } // TODO: free unmanaged resources (unmanaged objects) and replace finalizer below. // TODO: set fields of large size with null value. resizeCount = 0; serializer = null; data = null; types = null; progress = null; disposed = true; } }
// 利用 Session Key 取得帳號資料 Dictionary <string, object> GetAccountInfoBySessionKey(string SessionKey) { Dictionary <string, object> dictResult = new Dictionary <string, object>(); string strCommand = ""; List <List <object> > listDBResult = null; // 從 DB 取得玩家資料 strCommand = string.Format("select AccountID, Account, PlayerID, UpdateDate, IsStop, StopDate from a_account where SessionKey='{0}'", SessionKey); listDBResult = UseDB.GameDB.DoQueryCommand(strCommand); if (listDBResult.Count == 0) { ReportDBLog(JsonConvert.SerializeObject(ErrorID.SessionError), IDMap.GetEnumAttribute(ErrorID.SessionError)); return(null); } // 把資料一個一個存起來 dictResult["AccountID"] = listDBResult[0][0]; dictResult["Account"] = listDBResult[0][1]; dictResult["PlayerID"] = listDBResult[0][2]; dictResult["UpdateDate"] = listDBResult[0][3]; dictResult["IsStop"] = listDBResult[0][4]; dictResult["StopDate"] = listDBResult[0][5]; return(dictResult); }
public CompressedBlock Compress( int xreal, int yreal, int zoom, ProvinceList provinces, AdjacencyTable adjacent, IDMap idmap ) { // Walk the tree, storing the info along the way MapBlockHandling.Compressor compressor; if ( provinces == null || adjacent == null || idmap == null ) compressor = new MapBlockHandling.LesserCompressor(); else compressor = new MapBlockHandling.DefaultCompressor( xreal, yreal, zoom, provinces, adjacent, idmap ); WalkTree( compressor ); // Write away idtable int datasize = compressor.IDCount*2 + compressor.Tree.Length + (compressor.LeafCount+3)/4*3; if ( compressor.IDCount == 2 ) datasize += (compressor.LeafCount+7)/8; else if ( compressor.IDCount == 3 || compressor.IDCount == 4 ) datasize += (compressor.LeafCount+3)/4; else if ( compressor.IDCount > 4 && compressor.IDCount <= 16 ) datasize += (compressor.LeafCount+1)/2; else if ( compressor.IDCount > 16 ) datasize += compressor.LeafCount; byte[] data = new byte[datasize]; int dataindex = 0; // Copy IDTable /* for ( int i=0; i<compressor.IDCount; ++i ) { data[dataindex++] = (byte)(compressor.IDTable[i] & 255); data[dataindex++] = (byte)(compressor.IDTable[i] >> 8); } */ byte[] idtable = compressor.IDTable; for ( int i=0; i<compressor.IDCount; ++i ) { for ( int t=0; t<ushort.MaxValue; ++t ) { if ( idtable[t] == i ) { data[dataindex++] = (byte)(t & 255); data[dataindex++] = (byte)(t >> 8); break; } } } data[dataindex-1] |= Terminator; byte[] tree = compressor.Tree; for ( int i=0; i<tree.Length; ++i ) { data[dataindex++] = tree[i]; } if ( compressor.IDCount == 2 ) { for ( int i=0; i<compressor.LeafCount; i+=8 ) { data[dataindex++] = (byte)( (compressor.Owners[i]) | (compressor.Owners[i+1] << 1) | (compressor.Owners[i+2] << 2) | (compressor.Owners[i+3] << 3) | (compressor.Owners[i+4] << 4) | (compressor.Owners[i+5] << 5) | (compressor.Owners[i+6] << 6) | (compressor.Owners[i+7] << 7) ); } } else if ( compressor.IDCount == 3 || compressor.IDCount == 4 ) { for ( int i=0; i<compressor.LeafCount; i+=4 ) { data[dataindex++] = (byte)( (compressor.Owners[i]) | (compressor.Owners[i+1] << 2) | (compressor.Owners[i+2] << 4) | (compressor.Owners[i+3] << 6) ); } } else if ( compressor.IDCount > 4 && compressor.IDCount <= 16 ) { for ( int i=0; i<compressor.LeafCount; i+=2 ) { data[dataindex++] = (byte)( (compressor.Owners[i]) | (compressor.Owners[i+1] << 4) ); } } else if ( compressor.IDCount > 16 ) { for ( int i=0; i<compressor.LeafCount; ++i ) { data[dataindex++] = (byte)(compressor.Owners[i]); } } // Finally, the 6-bit color values for ( int i=0; i<compressor.LeafCount; i+=4) { data[dataindex++] = (byte)((compressor.Colors[i] & 63) | ((compressor.Colors[i+1] & 63) << 6)); data[dataindex++] = (byte)(((compressor.Colors[i+1] & 63) >> 2) | ((compressor.Colors[i+2] & 63) << 4)); data[dataindex++] = (byte)(((compressor.Colors[i+2] & 63) >> 4) | ((compressor.Colors[i+3] & 63) << 2)); } return new CompressedBlock( data ); }
public DangerousLocationGrid(Location2D[] importantLocations) : base(importantLocations) { locationIDs = new(ImportantLocations); }
/// <summary> /// Create a new buffer of at least min_size bytes. /// </summary> /// <remarks>The buffer can potentially be bigger, depending on the available allocated resources.</remarks> /// <param name="manager">The associated serializer.</param> /// <param name="min_size">The minimal size of the buffer.</param> /// <param name="progress_callback">A callback to notify progress of the current task.</param> /// <param name="types_map">Optional existing map of types used by this buffer. DO NOT USE IT, except when resizing existing buffers.</param> /// <param name="resize_count">The number of times this buffer as been resized.</param> public Buffer(Binary manager, uint min_size, Action <float> progress_callback, IDMap <Type> types_map = null, uint resize_count = 0) : this(manager, progress_callback, types_map) { resizeCount = resize_count; data = serializer.Grab(min_size); }
// 統一回來的處理 public static void ProtocolCompleteCallback(ErrorType errorCode, object result, object userState, CReqState state) { LogMgr.DebugLog("[ClientService][ProtocolCompleteCallback] errorCode:{0}, result:{1}, userState:{2}, state:{3}", errorCode, result, userState, state); // 檢查是不是正確 if (errorCode == ErrorType.Error) { LogMgr.ErrorLog("[ClientService][ProtocolCompleteCallback] Error!! errorCode:{0}, result:{1}, userState:{2}, state:{3}", errorCode, result, userState, state); return; } else if (errorCode == ErrorType.Timeout) { // 做重送的動作?! LogMgr.ErrorLog("[ClientService][ProtocolCompleteCallback] Timeout!! errorCode:{0}, result:{1}, userState:{2}, state:{3}", errorCode, result, userState, state); return; } else { // 取得參數 (From Network) Dictionary <string, object> dictResult = null; if (result is string) { dictResult = JsonConvert.DeserializeObject <Dictionary <string, object> > (result.ToString()); dictResult = JsonConvert.DeserializeObject <Dictionary <string, object> > (dictResult["d"].ToString()); } // From Client Only Server else { dictResult = JsonConvert.DeserializeObject <Dictionary <string, object> > (JsonConvert.SerializeObject(result)); } //Debug.Log (JsonConvert.SerializeObject( dictResult)); if (dictResult.ContainsKey("Result") == true) { ErrorID eid = (ErrorID)System.Convert.ToInt32(dictResult["Result"]); if (eid != ErrorID.Success) { LogMgr.Log("[ClientService][ProtocolCompleteCallback] Parser Error, ErrorID = {0}, Msg={1}", eid, IDMap.GetEnumAttribute(eid)); //GameUtility.ShowMessageBox (IDMap.GetEnumAttribute (eid), "錯誤"); //[problem] PushClientAction(ClientActionID.ShowMessage, IDMap.GetEnumAttribute(eid), dictResult, userState); } } // 取得 ClientAction if (dictResult.ContainsKey("ClientAction") == false) { return; } //string jsonClientAction = dictResult["ClientAction"].ToString(); List <KeyValuePair <string, object> > listAction = null; listAction = JsonConvert.DeserializeObject <List <KeyValuePair <string, object> > > (dictResult["ClientAction"].ToString()); foreach (var ChildAction in listAction) { try { PushClientAction(ChildAction.Key, ChildAction.Value, dictResult, userState); } catch (Exception e) { LogMgr.ErrorLog("[Error!!] {0}", e.ToString()); } } } }
public ParseContext(Encoding outputEncoding) { Status = ReadStatus.Initial; SkipBackStatus = ReadStatus.Error; _encoding = outputEncoding; _documents = new Dictionary<string, WEMDocument>(); _entity = new Stack<Entity>(); _idMap = new IDMap(); LN = 1; COL = 1; Message = "未知错误状态"; }
public SerializationBufferContext(Binary serializer, Action <float> progress = null) { this.progress = progress; types = new IDMap <Type>(serializer, SystemTypeSerializer.ToBytes, SystemTypeSerializer.FromBytes); }
private static String ImportModel(XmlElement modelgraph, String modelname) { IDMap idmap = new IDMap(); int nextenumval = 1000; foreach (XmlElement nodeelem in modelgraph.GetElementsByTagName("node")) { String nodetype = GetTypeName(nodeelem); // skip unknown elements int hashchar = nodetype.IndexOf('#'); if (hashchar == -1 || !nodetype.Substring(0, hashchar).EndsWith("gxl-1.0.gxl")) { continue; } String id = nodeelem.GetAttribute("id"); nodetype = nodetype.Substring(hashchar + 1); switch (nodetype) { case "Bool": idmap[id] = new Thing(id, ThingKind.Domain, "boolean"); break; case "Int": idmap[id] = new Thing(id, ThingKind.Domain, "int"); break; case "Float": idmap[id] = new Thing(id, ThingKind.Domain, "double"); break; case "String": idmap[id] = new Thing(id, ThingKind.Domain, "string"); break; case "Enum": { String name; if (id.StartsWith("DM_enum_")) { name = id.Substring(8); } else { name = id; } idmap[id] = new Thing(id, ThingKind.EnumDomain, new EnumDomain(name)); break; } case "EnumVal": { int val; if (id.StartsWith("EV_")) { int ind = id.IndexOf('_', 4); if (id[3] == '_') { val = -int.Parse(id.Substring(4, ind - 4)); } else { val = int.Parse(id.Substring(3, ind - 3)); } } else { val = nextenumval++; } String name = GetGXLAttr(nodeelem, "value", "string"); idmap[id] = new Thing(id, ThingKind.EnumValue, new EnumMember(val, name)); break; } case "AttributeClass": { String name = GetGXLAttr(nodeelem, "name", "string"); idmap[id] = new Thing(id, ThingKind.AttributeClass, new AttributeClass(name)); break; } case "NodeClass": { String name = GetGXLAttr(nodeelem, "name", "string"); bool isabstract = GetGXLAttr(nodeelem, "isabstract", "bool") == "true"; idmap[id] = new Thing(id, ThingKind.NodeClass, new NodeClass(name, isabstract)); break; } case "EdgeClass": { String name = GetGXLAttr(nodeelem, "name", "string"); bool isabstract = GetGXLAttr(nodeelem, "isabstract", "bool") == "true"; bool isdirected = GetGXLAttr(nodeelem, "isdirected", "bool") == "true"; idmap[id] = new Thing(id, ThingKind.EdgeClass, new EdgeClass(name, isabstract, isdirected)); break; } } } foreach (XmlElement edgeelem in modelgraph.GetElementsByTagName("edge")) { String edgetype = GetTypeName(edgeelem); // skip unknown elements int hashchar = edgetype.IndexOf('#'); if (hashchar == -1 || !edgetype.Substring(0, hashchar).EndsWith("gxl-1.0.gxl")) { continue; } String fromid = edgeelem.GetAttribute("from"); String toid = edgeelem.GetAttribute("to"); edgetype = edgetype.Substring(hashchar + 1); switch (edgetype) { case "hasDomain": { AttributeClass attrClass = idmap[fromid].AttributeClass; String attrKind = idmap[toid].AttributeKind; attrClass.Type = attrKind; break; } case "containsValue": { EnumDomain enumDomain = idmap[fromid].EnumDomain; EnumMember enumMember = idmap[toid].EnumValue; enumDomain.Members.Add(enumMember); break; } case "isA": { NodeClass nodeClass = idmap[fromid].NodeOrEdgeClass; nodeClass.SuperClasses.Add(toid); break; } case "hasAttribute": { NodeClass nodeClass = idmap[fromid].NodeOrEdgeClass; AttributeClass attrClass = idmap[toid].AttributeClass; nodeClass.AttrList.Add(attrClass); break; } } } String model = BuildModel(idmap); String modelfilename = modelname + "__gxl.gm"; using (StreamWriter writer = new StreamWriter(modelfilename)) writer.Write(model); return(modelfilename); }
private static String BuildModel(IDMap idmap) { StringBuilder sb = new StringBuilder(); foreach (EnumDomain enumdomain in idmap.EnumDomains) { sb.Append("enum " + enumdomain.Name + " { "); bool first = true; foreach (EnumMember enummember in enumdomain.Members) { if (first) { first = false; } else { sb.Append(", "); } sb.Append(enummember.Name + "=" + enummember.Value); } sb.Append(" }\n"); } // TODO: Find the root node type! String rootnodetype = "Node"; foreach (NodeClass nodeclass in idmap.NodeClasses) { if (nodeclass.Name == rootnodetype) { continue; } if (nodeclass.IsAbstract) { sb.Append("abstract "); } sb.Append("node class " + nodeclass.Name); BuildInheritance(sb, nodeclass, rootnodetype); BuildBody(sb, nodeclass); } // TODO: Find the root edge type! String rootedgetype = "Edge"; foreach (EdgeClass edgeclass in idmap.EdgeClasses) { if (edgeclass.Name == rootedgetype || edgeclass.Name == "AEdge" || edgeclass.Name == "UEdge") { continue; } if (edgeclass.IsAbstract) { sb.Append("abstract "); } if (!edgeclass.IsDirected) { sb.Append("undirected "); } sb.Append("edge class " + edgeclass.Name); BuildInheritance(sb, edgeclass, rootedgetype); BuildBody(sb, edgeclass); } return(sb.ToString()); }
static public List <List <int> > CliquesFromBits(SortedList eachToOthers, int seed, List <int> everyone, SortedList <int, BitArray> hints) { // and if i could easily determine two-way relationships, i'm left with a derivative bitfield set that // i can use to hueristically determine the cliques. like so... Dictionary <int, BitArray> onlyTwoWayEachToOthers = new Dictionary <int, BitArray>(); BitArray baSeed = (BitArray)eachToOthers[seed]; // in the interest of bullshit prevention, seed reads self. baSeed.Set(everyone.IndexOf(seed), true); foreach (int dood in eachToOthers.Keys) { // weed out doods who i don't have two-way relations with? if (baSeed[everyone.IndexOf(dood)]) { BitArray baDoodReadsThese = (BitArray)eachToOthers[dood]; // weed out doods who i don't have two-way relations with? if (baDoodReadsThese[everyone.IndexOf(seed)]) { Debug.Assert(false == onlyTwoWayEachToOthers.ContainsKey(dood)); onlyTwoWayEachToOthers.Add(dood, new BitArray(everyone.Count)); // for each bit, is that other dood's bit set 4 me? for (int iPos = 0; iPos < baDoodReadsThese.Length; iPos++) { if (baDoodReadsThese[iPos]) { // we read someone. dop they read us? BitArray otherDoodReads = (BitArray)eachToOthers[everyone[iPos]]; if (otherDoodReads.Get(everyone.IndexOf(dood))) { onlyTwoWayEachToOthers[dood].Set(iPos, true); // Console.WriteLine("Two-way: " + dood + " " + everyone[iPos]); } } } } } } // now we have the two-way static bitarray called onlyTwoWayEachToOthers // produce clique knowledge. this is static for the last frame at this point. // i'm going to try a queue or stack or something. // LOOK THROUGH TWO-WAY DATASET. ALL F****D UP?! List <List <int> > publishedCliques = new List <List <int> >(); List <List <int> > leafyLeafs = new List <List <int> >(); List <int> starter = new List <int>(); starter.Add(seed); leafyLeafs.Add(starter); // if there are hints, we need to examine them and determine if they are currently accurate. // perhaps some names are not part of the clique hint anymore... and they must be removed. if (hints != null) { // this is a bit of an exercise, but the hints have a key of a name, and a bitarray of what cliques they were in. // so i fetch up the members of each bitfield, and re-construct them into lists. then i must confirm they're valid now. // so the bit value (or position) is the key, and i populate a List<string> there. SortedList <int, List <int> > possibleHints = new SortedList <int, List <int> >(); foreach (int name in hints.Keys) { BitArray ba = hints[name]; for (int iPos = 0; ; iPos++) { try { if (ba.Get(iPos)) { if (false == possibleHints.ContainsKey(iPos)) { possibleHints.Add(iPos, new List <int>()); } possibleHints[iPos].Add(name); } } catch (ArgumentOutOfRangeException) { break; // we're done! } } } // now we tackle those possibleHints... are they still valid? or is a subset valid? and what if i derive the same subset two different ways? foreach (int iKey in possibleHints.Keys) { bool validHint = true; // for now, if it's valid now, we add it to our starter list. no subset hassle. foreach (int aMember in possibleHints[iKey]) { // is this member e-friends with every other member? BitArray whoIRead = (BitArray)eachToOthers[aMember]; foreach (int otherMembers in possibleHints[iKey]) { // i hate the reads-self question if (aMember == otherMembers) { continue; } if (whoIRead[everyone.IndexOf(otherMembers)] == false) { validHint = false; break; } } if (false == validHint) { break; } } if (validHint) { leafyLeafs.Add(possibleHints[iKey]); } } } Console.WriteLine(""); Console.WriteLine("#STARTING " + DateTime.Now.ToString()); DateTime weTimeout = DateTime.Now.AddMinutes(5); // ACTUALLY WHENEVER I SENSE A CLIQUE, I GIVE 4 MORE MINUTES TOWARD TRYING... while (leafyLeafs.Count > 0) { // if (_shouldStop) // return; if (DateTime.Now > weTimeout) { Console.WriteLine("Timed out."); break; } // pop one off and advance it. if it doesn't advance, then it's maximally grown // POP OFF THE END SO I GROW MAXIMALLY FAST, SO I QUICKLY HAVE List <int> doinMe = leafyLeafs[leafyLeafs.Count - 1]; leafyLeafs.RemoveAt(leafyLeafs.Count - 1); Dictionary <int, int> numInCommon = new Dictionary <int, int>(); // construct its membership BitArray baThisClique = new BitArray(everyone.Count, true); // and away foreach (int s in doinMe) { baThisClique.And(onlyTwoWayEachToOthers[s]); } // now see how this advances... foreach (int thatCat in onlyTwoWayEachToOthers.Keys) { // perhaps here i need to check if thatCat is anyone ALREADY IN THE CLIQUE, not just the seed. // if (thatCat == args[0]) // continue; if (doinMe.Contains(thatCat)) { continue; } BitArray thatCatsCats = onlyTwoWayEachToOthers[thatCat]; // i need to work with a copy, cuz .And screws the original BitArray baInCommon = new BitArray(thatCatsCats); // BitArray baInCommon = thatCatsCats.And(ba2WaySeed); baInCommon.And(baThisClique); int inCommon = 0; for (int i = 0; i < baInCommon.Count; i++) { if (baInCommon.Get(i)) { inCommon++; } } // Console.WriteLine(args[0] + " and " + thatCat + " have this many e-friends in common: " + inCommon); numInCommon.Add(thatCat, inCommon); } List <int> topdown = TopDown(numInCommon.Values); /* * var topdown = (from theVals in numInCommon.Values * orderby theVals descending * select theVals).Distinct(); */ // all membership at the top three levelz are considered as the next member in the clique // and i push on a new, "proposed" clique based on this... bool maximallyGrown = true; foreach (int joker in numInCommon.Keys) { // just try the bulky shit first. if ( (numInCommon[joker] == topdown[0]) || (numInCommon[joker] == topdown[1]) || (numInCommon[joker] == topdown[2]) ) { // is the joker e-friends with all the clique members we're doin? bool success = true; foreach (int alreadyIn in doinMe) { if (false == onlyTwoWayEachToOthers[alreadyIn].Get(everyone.IndexOf(joker))) { success = false; break; } } if (false == success) { continue; // next joker please } List <int> aNewCliqueToTry = new List <int>(doinMe); aNewCliqueToTry.Add(joker); // if I only push sorted lists, i can more easily preven duplicates aNewCliqueToTry.Sort(); // is this already in my list? look fast. bool heyItsNew = true; // it's probably same as previous in the queue, so examine backwards, but for now just this... for (int iSlotOfLeafyLeaf = 0; iSlotOfLeafyLeaf < leafyLeafs.Count; iSlotOfLeafyLeaf++) // foreach (List<string> existing in leafyLeafs) { List <int> existing = leafyLeafs[iSlotOfLeafyLeaf]; if (aNewCliqueToTry.Count == existing.Count) { bool theyDiffer = false; for (int iEachDood = 0; iEachDood < aNewCliqueToTry.Count; iEachDood++) { if (aNewCliqueToTry[iEachDood] != existing[iEachDood]) { theyDiffer = true; break; } } if (false == theyDiffer) { // this should get trigglered... but if not, that's curious Console.WriteLine("This proposed entry is already present in the queue, at slot " + iSlotOfLeafyLeaf); heyItsNew = false; break; } } } if (heyItsNew) { maximallyGrown = false; // Console.WriteLine(""); // foreach (string sInIt in aNewCliqueToTry) // { // Console.Write(sInIt + " "); // } leafyLeafs.Add(aNewCliqueToTry); } } } if (maximallyGrown) { bool unique = true; // is this equal to or a subset of a set we have already published? // (A SUBSET SHOULD NEVER APPEAR TO BE MAXIMALLY GROWN... SO THAT'S A STOP!) foreach (List <int> set in publishedCliques) { // can i get some help here? if (set.Count == doinMe.Count) { bool aMatch = true; // proove me wrong... for (int iPos = 0; iPos < set.Count; iPos++) { // thank heaven for alpha sort if (set[iPos] != doinMe[iPos]) { aMatch = false; break; } } if (aMatch == false) { continue; } // if it's a match, then we don't re-add it. unique = false; break; } } if (unique) { publishedCliques.Add(doinMe); Console.Write("#PUBLISHED AT " + DateTime.Now.ToString() + " " + doinMe.Count + ": "); foreach (int s in doinMe) { Console.Write(IDMap.IDToName(s) + " "); } Console.WriteLine(""); weTimeout = DateTime.Now.AddMinutes(1); // We will try for 1 minute } } } Console.WriteLine("#FINISHED AT " + DateTime.Now.ToString()); Console.WriteLine(""); SortByCountDesc(publishedCliques); return(publishedCliques); /* THE FOLLOWING CODE IS VERY LOVELY BUT MOTHBALLED. * // i love this data just as it is, skip this later work, which remains cool * SortedList<int, BitArray> groupMemberships = null; * * * // from here, every one in these top three crossover points needs consideration, recursively * // and perhaps i will choose to go deeper, * // but first i implement this... * // so capture... each name-set, and the current .And result? * if (null != publishedCliques) * { * // sort them. * // publishedSets.Sort(delegate(string s1, string s2) { return (s1 > s2) ? s1 : s2; ; }); * SortByCountDesc(publishedCliques); * * // so i clobber any clique below the 63rd * // while (publishedCliques.Count > 64) * // publishedCliques.RemoveAt(64); // 0 to 63 are valid. * * // now they're ordered. i want to place by top-down as size, but... * // really all the size breakdown administers is * // font information. * // so i will have a bitfield for each name in everyone... * groupMemberships = new SortedList<int, BitArray>(); * for (int iClique = 0; iClique < publishedCliques.Count; iClique++) * { * Debug.Assert(iClique < 64); * * foreach (int name in publishedCliques[iClique]) * { * // assign membership. do i know this name yet? * BitArray ba = null; * if (groupMemberships.TryGetValue(name, out ba)) * { * ba.Set(iClique, true); * } * else * { * BitArray bitArrayOfCliques = new BitArray(publishedCliques.Count); * * bitArrayOfCliques.Set(iClique, true); * groupMemberships.Add(name, bitArrayOfCliques); * } * } * } * } * * // published cliques, if ever valid, are now trashed, and all we have is groupMemberships, if those. * publishedCliques = null; * * return groupMemberships; * */ }
/// <summary> /// Create a new buffer from existing data. /// </summary> /// <param name="manager">The associated serializer.</param> /// <param name="existing_data">The existing data.</param> /// <param name="progress_callback">A callback to notify progress of the current task.</param> /// <param name="types_map">Optional existing map of types used by this buffer. DO NOT USE IT, except when resizing existing buffers.</param> public Buffer(Binary manager, byte[] existing_data, Action <float> progress_callback, IDMap <Type> types_map = null) : this(manager, progress_callback, types_map) { resizeCount = 0; data = existing_data; }
public static void RefreshBasedOnRadarSignals_AndPublish(HashSet <Int32> considered) { // SendPostToLJ(); // SendPostToLJ("For <lj user=mcfnord>", "some kibble"); MMDB.MakeSureDBIsOpen(); // first trash watches entirely? so no need to drop people. // then populate watches entirely. build it up! readers of ljreader. not who ljreader reads! new MyNpgsqlCommand("delete from watches;", MMDB.DBConnection).ExecuteNonQuery(); // string fdataLJReader = FData.GetFData("ljfinder", true); // string fdataLJReader = FData.GetFData("mcfnord", true); // HashSet<Int32> whoReadsTheRobot = FData.IDsInTheyReadMeFData(fdataLJReader); considered.Remove(5731095); // no self! // ok, kill all adds that are oldder than 119 days. cuz i don't want to pblish offlines. or something. MMDB.ExecuteNonQuery(string.Format("delete from adds where daydetected < {0};", DateTime.Now.AddDays(-119).Subtract(Extras.TwoK).Days)); // MAYBE SOMEDAY I EXCLUDE SOME PEOPLE WHO READ THE ROBOT BECAUSE THEY SUCK ASS AND DON'T PICK UP THE SHIT. foreach (var jokerWhoReadsTheRobot in considered) { foreach (var someoneJokerReads in FData.IDsInIReadFData(FData.GetFData(IDMap.IDToName(jokerWhoReadsTheRobot)))) { MMDB.ExecuteNonQuery(string.Format("INSERT INTO watches (watcher, watched) Values({0}, {1});", jokerWhoReadsTheRobot, someoneJokerReads), false); } } // gather all actors in add events HashSet <Int32> watchers = new HashSet <int>(); NpgsqlCommand cmd = new NpgsqlCommand("select distinct(watcher) from watches where watched IN (select distinct(actor) from adds) ;", MMDB.DBConnection); cmd.CommandTimeout = 0; // forever! NpgsqlDataReader myReader = cmd.ExecuteReader(); while (myReader.Read()) { watchers.Add(myReader.GetInt32(0)); } myReader.Close(); HashSet <Int32> everyoneImmaUpdate = new HashSet <int>(); // I HATE RECOMMENDING RENAMES. I HATE IT SO MUCH THAT I WANT TO MAXIMALLY DETECT THEM IN A PRE-STEP. foreach (var watcher in watchers) { everyoneImmaUpdate.UnionWith(FData.IDsInIReadFData(FData.GetFData(IDMap.IDToName(watcher)))); } everyoneImmaUpdate.ExceptWith(watchers); foreach (var someParty in everyoneImmaUpdate) { if (false == FData.FDataConfirmedCurrentEnufOn(IDMap.IDToName(someParty))) { FData.GetFData(IDMap.IDToName(someParty)); } } everyoneImmaUpdate = null; // imma gonna order watchers into orderedWatchers, based on last-published-to date // but first it's ok i'll run this through... string sqlset = ""; foreach (var dude in watchers) { sqlset += dude.ToString() + ","; } sqlset = sqlset.Substring(0, sqlset.Length - 1); myReader = new NpgsqlCommand(string.Format("select id from nameidmap where id in ({0}) order by inbox_hit_week ;", sqlset), MMDB.DBConnection).ExecuteReader(); List <Int32> sortedWatchers = new List <int>(); while (myReader.Read()) { sortedWatchers.Add(myReader.GetInt32(0)); } myReader.Close(); watchers = null; foreach (var watcher in sortedWatchers) { if (watcher == 1179796) { Console.WriteLine("The bastardo."); } HashSet <Int32> whoWatcherReads = FData.IDsInIReadFData(FData.GetFData(IDMap.IDToName(watcher))); if (whoWatcherReads.Contains(watcher)) // ditch read-self! { whoWatcherReads.Remove(watcher); } // tell me who was added by all the parties we watch Dictionary <Int32, HashSet <Int32> > everyTarget = new Dictionary <Int32, HashSet <Int32> >(); string whoWatcherReadsInSQLSet = ""; foreach (var someone in whoWatcherReads) { whoWatcherReadsInSQLSet += someone.ToString() + ","; } whoWatcherReadsInSQLSet = whoWatcherReadsInSQLSet.Substring(0, whoWatcherReadsInSQLSet.Length - 1); // rule out anyone if i ever ever read them before. // my encyclopedic archive rules i guess! List <int> daysOfTheFData = FData.GetFDataDates(IDMap.IDToName(watcher)); HashSet <Int32> everyoneIEverRead = new HashSet <Int32>(); foreach (var day in daysOfTheFData) { string anFD = FData.FDataBy2kDay(IDMap.IDToName(watcher), day); everyoneIEverRead.UnionWith(FData.IDsInIReadFData(anFD)); } if (everyoneIEverRead.Contains(watcher)) { everyoneIEverRead.Remove(watcher); // remove watcher manually } if (watcher == 1) { if (everyoneIEverRead.Contains(578)) { Console.WriteLine("I read scribble before."); } } cmd = new NpgsqlCommand(string.Format("select target, actor from adds where actor IN ({0});", whoWatcherReadsInSQLSet), MMDB.DBConnection); myReader = cmd.ExecuteReader(); while (myReader.Read()) { Int32 iTarget = myReader.GetInt32(0); // skip self-whatevers. if (iTarget == watcher) { continue; } // skip ljfinder! if (iTarget == 5731095) { continue; } // detected as offline in last 120 days? NpgsqlDataReader innerReader = new MyNpgsqlCommand( string.Format("select offline_last_detected_on from nameidmap where id={0};", iTarget), MMDB.DBConnection).ExecuteReader(); innerReader.Read(); Int16?offline_detected_on = MMDB.MaybeNullInt16(innerReader, 0); // overload with a no-local-reader option! rolls royce db! innerReader.Close(); if (null != offline_detected_on) { if (offline_detected_on > DateTime.Now.AddDays(-120).Subtract(Extras.TwoK).Days) { continue; } } if (everyoneIEverRead.Contains(iTarget)) { continue; } // MORITORIUM ALSO ON ANYTHING THAT IS DEAD WTF // KILL IT WITH ANY F*****G FEATURE... UPDATEPPY? 404! // moritorium on any target that has been detected as a rename in the last 120 days innerReader = new MyNpgsqlCommand( string.Format("select made_by_rename_detected_on from nameidmap where id={0};", iTarget), MMDB.DBConnection).ExecuteReader(); innerReader.Read(); Int16?day_rename_detected = MMDB.MaybeNullInt16(innerReader, 0); // overload with a no-local-reader option! rolls royce db! innerReader.Close(); if (null != day_rename_detected) { if (day_rename_detected > DateTime.Now.AddDays(-120).Subtract(Extras.TwoK).Days) { continue; } } // create and add if (false == everyTarget.ContainsKey(iTarget)) { // if it's known offline i'm gonna hit this way too hard. i just need to remember somehow that i hit it. // Extras.CheckForRenameOrOffline(iTarget); everyTarget.Add(iTarget, new HashSet <Int32>()); } int iActor = myReader.GetInt32(1); ////////////////////// diagnose how actor == watcher, ever. // Console.WriteLine("Target: {0}, Actor: {1}, Watcher: {2}", IDMap.IDToName(iTarget), IDMap.IDToName(iActor), IDMap.IDToName(watcher)); ////////////////////// diagnose how actor == watcher, ever. System.Diagnostics.Debug.Assert(iActor != watcher); // watcher should not be in the IN set that does this query! everyTarget[iTarget].Add(iActor); // add the actor. this is why we can't just use a big IN sql statement. or can we? } myReader.Close(); // only publish if i've never published to this user, or barring that, at least one target must have more than one actor. cmd = new NpgsqlCommand(string.Format("select count(*) from radarpicks where userid={0} ;", watcher), MMDB.DBConnection); myReader = cmd.ExecuteReader(); myReader.Read(); Int64?any = MMDB.MaybeNullInt64(myReader, 0); myReader.Close(); if (everyTarget.Count > 0) // else we crash so f**k this situation. { if (any == 0 || everyTarget.Max(targ => targ.Value.Count) > 1) { List <HashSet <Int32> > actors = new List <HashSet <Int32> >(); List <HashSet <Int32> > targets = new List <HashSet <Int32> >(); var sortedGroups = (from g in everyTarget where g.Value.Count > ((any == 0) ? 0 : 1) // if we've never published anything, lower the standard to just 1 orderby g.Value.Count descending select g).Take(12); // fold the sorted groups into the two dictionaries. foreach (var thisG in sortedGroups) { // if these actors are already in my actor list, then we skip foreach (var existing in actors) { if (thisG.Value.SetEquals(existing)) // wha'ts the right way??? cuz it's not this. { goto ALREADY; } } // i know its key, so i can look for equal sets that aren't it. var sameActors = from x in everyTarget where (x.Key != thisG.Key && x.Value.SetEquals(thisG.Value)) // so same actors, different key, so excludes thisG! select x; // All of them are added as a same 'entry' slot into actors and targets sets. actors.Add(thisG.Value); System.Diagnostics.Debug.Assert(false == thisG.Value.Contains(watcher)); // watchers aren't actors in their deeds! HashSet <Int32> targs = new HashSet <int>(); targs.Add(thisG.Key); foreach (var same in sameActors) { targs.Add(same.Key); } targets.Add(targs); ALREADY: ; } // divide into two groups, based on whether i've published this name to this user before.] // builds a logic array of whether anyone in this set has been published before. List <bool> publishedBefore = new List <bool>(); foreach (var targSet in targets) { bool fPublishedBefore = true; foreach (var t in targSet) { cmd = new NpgsqlCommand(string.Format("select count(*) from radarpicks where userid={0} and userid_recommended={1};", watcher, t), MMDB.DBConnection); myReader = cmd.ExecuteReader(); myReader.Read(); try { fPublishedBefore = (myReader.GetInt64(0) > 0); if (false == fPublishedBefore) { break; // just one "news" in the set means NO, not published before! } } finally { myReader.Close(); } } publishedBefore.Add(fPublishedBefore); } //// TOTAL EXPERIMENT BEFORE THE PUBLISHED B4 CHECK. //// TOTAL EXPERIMENT BEFORE THE PUBLISHED B4 CHECK. /* * foreach (var set in actors) * { * List<List<Int32>> bunches = TCliquesClass.CustomGroupMain(watcher, set); // love this but need specialized "top set matters" timeout mode. * // that clique finder puts the watcher into the set. i take it out! * set.Remove(watcher); * * Console.WriteLine("Largest actor clique: "); * foreach (var party in bunches[0]) * { * Console.Write(IDMap.IDToName(party) + " "); * } * Console.WriteLine(); * } * */ //// TOTAL EXPERIMENT BEFORE THE PUBLISHED B4 CHECK. //// TOTAL EXPERIMENT BEFORE THE PUBLISHED B4 CHECK. // is there anything new to publish? if (any > 0) // if any == 0, then we publish cuz we never published b4 { if (false == publishedBefore.Contains(false)) { Console.WriteLine("Nothin new to publish, just stuff I already found."); continue; } } string title = "Hey <lj user="******">"; string archiveContent = ""; string newContent = ""; string names = ""; foreach (var t in targets) { foreach (var item in t) { names += IDMap.IDToName(item) + "|"; } } names = names.Substring(0, names.Length - 1); // in conclusion, a link to the visual // string content = "<ul><li><a href='http://ljmindmap.com/h.php?n=" + IDMap.IDToName(watcher) + "'>Тесен Мир / MindMap</a>"; string content = "<table><tr><td><center><a href='http://ljmindmap.com/h.php?n=" + IDMap.IDToName(watcher) + "'><img width='500' src='http://ljmindmap.com/s/?f=" + IDMap.IDToName(watcher) + ".gif'><br>Тесен Мир / MindMap</a></td></tr></table><br>"; /* * * * * <table><tr><td><center><a href="http://ljmindmap.com/h.php?n=micaturtle"><img width="500" src="http://ljmindmap.com/s/?f=micaturtle.gif"><br>Тесен Мир / MindMap</a></td></tr></table><br> */ // content += string.Format("<li><a href='http://ljmindmap.com/mass_add.php?a={0}'>Surf these LJs</a></ul>", names); content += "<table border='1'><tr><th><u>New LJs</u><th><u>Found by</u></tr>"; System.Diagnostics.Debug.Assert(actors.Count == targets.Count); for (int iPos = 0; iPos < actors.Count; iPos++) { string contentFragment = "<tr><td>"; foreach (var t in targets[iPos]) { contentFragment += "<lj user="******"> "; } contentFragment += "<td><b>"; foreach (var a in actors[iPos]) { System.Diagnostics.Debug.Assert(a != watcher); contentFragment += IDMap.IDToName(a) + " "; } contentFragment += "</b></tr>"; if (publishedBefore[iPos]) { archiveContent += contentFragment; } else { newContent += contentFragment; } } content += newContent; content += "</table>"; if (archiveContent.Length > 0) { content += "<br><h3>Archive</h3><table>"; content += archiveContent; content += "</table>"; } string url = AddOrUpdateLJPost(watcher, title, content); if (null == url) { // need to just re-post. } System.Diagnostics.Debug.Assert(url.Length > 0); Console.WriteLine(url); string emailcontent = "\r\nI've made your own custom MindMap and found some LJs your friends are adding. Yay!\r\n\r\n" + url + "\r\n\r\n- lil ljfinder"; NotifyViaLJInbox(watcher, emailcontent, url); NoteRecommendeds(watcher, targets); //NotifyViaLJBanner(watcher, emailcontent, url); } } } // still need to kill old events somewhere! at 110 days (rename prevention stops at 120) }
Dictionary <string, object> ReportTheResultDict(Dictionary <string, object> dictResult, ErrorID IErrorID, int LogID) { dictResult["Result"] = IErrorID; ReportDBLog(IDMap.GetEnumAttribute(IErrorID), JsonConvert.SerializeObject(IErrorID), LogID); return(dictResult); }
private void GetColumnMappingUsingReflection() { Type domainType = typeof(T); foreach (PropertyInfo prop in domainType.GetProperties()) { if (prop.IsDefined(typeof(NotMappedAttribute))) { continue; } if (IgnoreObjectMaps && (prop.Name.EndsWith("_Object") || prop.Name.EndsWith("_Objects"))) { continue; } if (prop.GetMethod != null && prop.GetMethod.IsPublic && prop.SetMethod != null && prop.SetMethod.IsPublic) { // Column Name string columnName = prop.Name; if (prop.IsDefined(typeof(ColumnAttribute))) { ColumnAttribute colAttribute = (ColumnAttribute)prop.GetCustomAttribute(typeof(ColumnAttribute)); if (!string.IsNullOrWhiteSpace(colAttribute.Name)) { columnName = colAttribute.Name; } } if (prop.IsDefined(typeof(AutonumberAttribute), true)) { // Autonumber Map if (AutonumberMap != null) { throw new Exception("Entity " + domainType.Name + " cannot have more than one autonumber map."); } AutonumberMap = new AutonumberMap(prop.Name, columnName); PrimaryKeyMaps.Add(AutonumberMap); } else if (prop.IsDefined(typeof(KeyAttribute))) { // ID Maps IDMap idMap = new IDMap(prop.Name, columnName); PrimaryKeyMaps.Add(idMap); } else if (prop.IsDefined(typeof(CacheFilemanDateAttribute))) { // Cache Fileman Date CacheFilemanDateMap filemanDateMap = new CacheFilemanDateMap(prop.Name, columnName); StandardMaps.Add(filemanDateMap); } else if (prop.IsDefined(typeof(CacheHorologDateAttribute))) { // Cache Horolog Date CacheHorologDateMap horologDateMap = new CacheHorologDateMap(prop.Name, columnName); StandardMaps.Add(horologDateMap); } else { // Standard Maps StandardMaps.Add(new Map(prop.Name, columnName)); } } } }
public static List <List <int> > CustomGroupMain(Int32 iSeed, HashSet <Int32> members) { MMDB.MakeSureDBIsOpen(); if (false == members.Contains(iSeed)) { Console.WriteLine("I crammed the seed back into the set."); members.Add(iSeed); } List <int> everyone = new List <int>(members); Dictionary <int, HashSet <int> > master = new Dictionary <int, HashSet <int> >(); foreach (int iUserIRead in members) { string fd2 = FData.GetFData(IDMap.IDToName(iUserIRead)); if (fd2 == null) { continue; } HashSet <int> whoTheyRead = FData.IDsInIReadFData(fd2); // ReadArrayFromUserField(iUserIRead, "snapshot", whoIRead); if (null == whoTheyRead) { whoTheyRead = new HashSet <int> { } } ; // and for every unique id, we need to store their snapshot // so we can make the bitfield later master.Add(iUserIRead, whoTheyRead); } // i want to know every case where whoAllRead lacks a key that appears in everyone list. foreach (int i in everyone) { if (false == master.ContainsKey(i)) { Console.WriteLine("whoAllRead has no key: " + i); // these are offline and so no fdata is provided for them. they still could be in everyone. fix that. } } // POPULATE THE BITFIELDS, which are GRIDS of two-way readership realationships // RITE? // well i'm one of those bitfields // bit i think i am always in my own set of whoiRead (self) // i only include the set of those i read, i think. // DO I INCLUDE MYSELF???? i want a bitarray also!? // prep SortedList eachToOthers = new SortedList(); // the userid yields a bitarray foreach (int i in everyone) { eachToOthers.Add(i, new BitArray(everyone.Count())); } // that's the f*****g way to create the bit array! // what is the f*****g way to populate it? // per each bit array. foreach (int iKey in eachToOthers.Keys) { BitArray ba = (BitArray)eachToOthers[iKey]; // if whoALlRead[ikey] doesn't exist, emit a warning and skip it. can i f*****g do that? // somehow there's no entry for iKey in this whoAllRead, where do eachToOthers diverge in keys from whoAllRead? foreach (int iSomeUser in master[iKey]) // whoAllRead[stooge] is a way of saying "who stooge reads" { // if i have never associated with them, they don't get in. if (everyone.IndexOf(iSomeUser) != -1) { ba.Set(everyone.IndexOf(iSomeUser), true);// all sets } } } // IS THERE A REQUIREMENT THAT I ONLY OR OPTIMALLY POPULATE TWO-WAY ONLY? List <List <int> > sl = CliquesFromBits(eachToOthers, iSeed, everyone, null); // if i save this output, i can use it as hints later. could be valuable / essential. we shall see. // so this is a bit array of many groupings? everyone has a line in this bit array. return(sl); } }
// ArrayList alAllLevels = ChurnAndBurnTribes(lju, twoWayReadership, seed); public static List <List <int> > TCliquesMain(string seed) { MMDB.MakeSureDBIsOpen(); int iUser = IDMap.NameToID(seed); string fd = FData.GetFData(seed, true); // absolutely new fdata. if (null == fd) { return(null); // f**k it } HashSet <int> whoIRead = FData.IDsInIReadFData(fd); if (false == whoIRead.Contains(iUser)) // i read my damn self ok { whoIRead.Add(iUser); } // what would i see if i scanned these for offlines? HashSet <int> whoIReadTrimmed = new HashSet <int>(); foreach (var v in whoIRead) { if (false == FData.KnownOffline(IDMap.IDToName(v))) { whoIReadTrimmed.Add(v); } } whoIRead = whoIReadTrimmed; // original list clobbered! whoIReadTrimmed = null; List <int> everyone = new List <int>(whoIRead); AGAIN_WITHOUT_THE_DINKS: // get an everyone List... by reading everyone's arrays? add-if-unique to a list of unique keys // HEY AM I IN THIS LIST??? Debug.Assert(everyone.IndexOf(iUser) != -1); // I WILL NEED TO ADD MYSELF TO THE EVERYONE LIST EVERY TIME OK? Dictionary <int, HashSet <int> > whoAllRead = new Dictionary <int, HashSet <int> >(); whoAllRead.Add(iUser, whoIRead); foreach (int iUserIRead in whoIRead) { // if i read myself, i was already added, skip if (iUserIRead == iUser) // meee! { continue; } string fd2 = FData.GetFData(IDMap.IDToName(iUserIRead)); if (fd2 == null) { continue; } HashSet <int> whoTheyRead = FData.IDsInIReadFData(fd2); // ReadArrayFromUserField(iUserIRead, "snapshot", whoIRead); if (null == whoTheyRead) { whoTheyRead = new HashSet <int> { } } ; // and for every unique id, we need to store their snapshot // so we can make the bitfield later whoAllRead.Add(iUserIRead, whoTheyRead); } // it's gonna blow my mind if core user is not in everyone set. Debug.Assert(-1 != everyone.IndexOf(iUser)); // i want to know every case where whoAllRead lacks a key that appears in everyone list. foreach (int i in everyone) { if (false == whoAllRead.ContainsKey(i)) { Console.WriteLine("whoAllRead has no key: " + i); // these are offline and so no fdata is provided for them. they still could be in everyone. fix that. } } // ATTENTION. EVERYONE I READ IS LOADED INTO whoAllRead. // For each, a sifted subset (containing only people I read) is constructed. // For too-large groups, I will now proceed to throw out parties who only read one other party in this set. // Maybe then I try throwing out two. // THIS INVOLVES CAREFUL WHATEVER... everyone is a slot-based scenario. // oh what the f***s please what the f***s ugh. // just cull the everyone list based on whoAllRead and GOTO up there to the whoAllRead. Console.WriteLine("of course i only want this for f*****g biggies not for me!"); if (whoAllRead.Count > 5000) { foreach (int iDude in whoAllRead.Keys) { HashSet <int> check = whoAllRead[iDude]; // two? this must iterate up until it's a confirmed manageable subset size! if (check.Count < 2) { everyone.Remove(iDude); } } Debug.Assert(false); //wtf is this? this is experimental two-wayer removal optimization that i never quite approved. goto AGAIN_WITHOUT_THE_DINKS; } // i better have a list who i read, right? // Debug.Assert(null != eachToOthers[iUser]); // POPULATE THE BITFIELDS, which are GRIDS of two-way readership realationships // RITE? // well i'm one of those bitfields // bit i think i am always in my own set of whoiRead (self) // i only include the set of those i read, i think. // DO I INCLUDE MYSELF???? i want a bitarray also!? // prep SortedList eachToOthers = new SortedList(); // the userid yields a bitarray foreach (int i in everyone) { eachToOthers.Add(i, new BitArray(everyone.Count())); } // that's the f*****g way to create the bit array! // what is the f*****g way to populate it? // per each bit array. foreach (int iKey in eachToOthers.Keys) { BitArray ba = (BitArray)eachToOthers[iKey]; // if whoALlRead[ikey] doesn't exist, emit a warning and skip it. can i f*****g do that? // somehow there's no entry for iKey in this whoAllRead, where do eachToOthers diverge in keys from whoAllRead? foreach (int iSomeUser in whoAllRead[iKey]) // whoAllRead[stooge] is a way of saying "who stooge reads" { // if i have never associated with them, they don't get in. if (everyone.IndexOf(iSomeUser) != -1) { ba.Set(everyone.IndexOf(iSomeUser), true);// all sets } } } // IS THERE A REQUIREMENT THAT I ONLY OR OPTIMALLY POPULATE TWO-WAY ONLY? List <List <int> > sl = CliquesFromBits(eachToOthers, iUser, everyone, null); // if i save this output, i can use it as hints later. could be valuable / essential. we shall see. // so this is a bit array of many groupings? everyone has a line in this bit array. return(sl); }
static void NotifyViaLJInbox(Int32 toWhom, string content, string url) { XDocument xd = new XDocument(); xd.Add(new XElement("methodCall", new XElement("methodName", "LJ.XMLRPC.sendmessage"), new XElement("params", new XElement("param", new XElement("value", new XElement("struct", new XElement("member", new XElement("name", "username"), new XElement("value", new XElement("string", "ljfinder"))), // new XElement("value", new XElement("string", "mcfnord"))), new XElement("member", new XElement("name", "password"), new XElement("value", new XElement("string", Registry.GetValue("HKEY_CURRENT_USER\\Software\\MindMap", "LJFinderPwd", null).ToString()))), new XElement("member", new XElement("name", "subject"), new XElement("value", new XElement("string", "New LJs and your MindMap at " + url))), new XElement("member", new XElement("name", "body"), new XElement("value", new XElement("string", content))), new XElement("member", new XElement("name", "to"), new XElement("value", new XElement("string", IDMap.IDToName(toWhom)))))))))); ASCIIEncoding encoding = new ASCIIEncoding(); byte[] data = encoding.GetBytes("<?xml version=\"1.0\"?>" + xd.ToString()); xd = null; HttpWebRequest request = (HttpWebRequest)WebRequest.Create("http://www.livejournal.com/interface/xmlrpc"); request.UserAgent = "http://ljmindmap.com/; [email protected]"; request.Method = "POST"; request.ContentType = "text/xml"; // "application/x-www-form-urlencoded"; request.ContentLength = data.GetLength(0); Stream newStream = request.GetRequestStream(); newStream.Write(data, 0, data.GetLength(0)); newStream.Close(); WebResponse response = null; response = request.GetResponse(); Stream s = response.GetResponseStream(); StreamReader sr = new StreamReader(s); string fd = sr.ReadToEnd(); Console.WriteLine(fd); // System.Diagnostics.Debug.Assert(false == fd.Contains("ault")); if (fd.Contains("ault")) { if (false == fd.Contains("privacy options")) { System.Diagnostics.Debug.Assert(false);// i hate any other fault. } } // remember this day for this user //MMDB.ExecuteNonQuery(string.Format("update nameidmap set // do we have a week indicator? DateTime TwoK = new DateTime(0x7d0, 1, 1); int weeks = (int)(DateTime.Now.Subtract(TwoK).Days / 7.02); MMDB.ExecuteNonQuery(string.Format("update nameidmap set inbox_hit_week={1} where id={0};", toWhom, weeks)); }
public ColorMapRenderer(IDMap map, Dictionary <int, Color> dict, SatelliteRenderer sat) { this.idmap = map; this.dictionary = dict; this.sat = sat; }