static void CheckModifiedFiles () { // Check databases following a bottom-up strategy in the dependency // tree. This will help resolving parsed classes. Set<ProjectDom> list = new Set<ProjectDom> (); lock (databases) { // There may be several uris for the same db foreach (ProjectDom ob in databases.Values) list.Add (ob); } Set<ProjectDom> done = new Set<ProjectDom> (); while (list.Count > 0) { ProjectDom readydb = null; ProjectDom bestdb = null; int bestRefCount = int.MaxValue; // Look for a db with all references resolved foreach (ProjectDom db in list) { bool allDone = true; foreach (ProjectDom refdb in db.References) { if (!done.Contains (refdb)) { allDone = false; break; } } if (allDone) { readydb = db; break; } else if (db.References.Count < bestRefCount) { bestdb = db; bestRefCount = db.References.Count; break; } } // It may not find any db without resolved references if there // are circular dependencies. In this case, take the one with // less references if (readydb == null) readydb = bestdb; readydb.CheckModifiedFiles (); list.Remove (readydb); done.Add (readydb); } }
/// <summary> /// Gets a set of ids of environments that are valid. /// </summary> /// <param name="cache"></param> /// <returns>Set of ids of environments that are valid.</returns> public static Set<int> ValidEnvironments(FdoCache cache) { Set<int> set = new Set<int>(cache.LangProject.PhonologicalDataOA.EnvironmentsOS.HvoArray); // Remove any that have problem annotations. string sql = "SELECT env.Id " + "FROM CmBaseAnnotation_ ann " + "JOIN PhEnvironment env ON ann.BeginObject = env.Id"; foreach (int id in DbOps.ReadIntsFromCommand(cache, sql, null)) set.Remove(id); return set; }
/// <summary> /// Confirm the set of cache rules in the given execution context includes a set of expected ones; assertion failure if not. /// </summary> /// <param name="ctx"></param> /// <param name="rules"></param> void VerifyCacheRules(ExecutionContext ctx, IList rules) { Set set = new Set(); foreach (CacheRule rule in ctx.CacheRules) set.AddRange(rule.AllLeafRules); foreach (CacheRule r in rules) { Assert.IsTrue(set.Contains(r), "Unable to find expected CacheRule (" + r.Description + ")"); set.Remove(r); } }
protected override IEnumerable<FeatureVectorList> ExtractFeatureVectors(Prediction prediction, bool training, DateTime start, DateTime end) { Dictionary<TimeSliceFeature, string> featureId = new Dictionary<TimeSliceFeature, string>(); foreach (Feature feature in Features.Where(f => f.EnumType == typeof(TimeSliceFeature))) featureId.Add((TimeSliceFeature)feature.EnumValue, feature.Id); List<TimeSliceFeature> threeHourIntervals = new List<TimeSliceFeature>(new TimeSliceFeature[] { TimeSliceFeature.LateNight, TimeSliceFeature.EarlyMorning, TimeSliceFeature.Morning, TimeSliceFeature.MidMorning, TimeSliceFeature.Afternoon, TimeSliceFeature.MidAfternoon, TimeSliceFeature.Evening, TimeSliceFeature.Night }); int processorCount = Configuration.ProcessorCount; Configuration.ProcessorCount = 1; // all sub-threads (e.g., those in FeatureBasedDCM) should use 1 core, since we're multi-threading here Set<Thread> threads = new Set<Thread>(processorCount); long firstSlice = (long)((training ? prediction.Model.TrainingStart.Ticks : prediction.PredictionStartTime.Ticks) / _timeSliceTicks); long lastSlice = (long)((training ? prediction.Model.TrainingEnd.Ticks : prediction.PredictionEndTime.Ticks) / _timeSliceTicks); long ticksPerHour = new TimeSpan(1, 0, 0).Ticks; List<FeatureVectorList> completeFeatureVectorLists = new List<FeatureVectorList>(); List<FeatureVectorList> incompleteFeatureVectorLists = new List<FeatureVectorList>(); AutoResetEvent emitCompleteFeatureVectorLists = new AutoResetEvent(false); IFeatureExtractor externalFeatureExtractor = InitializeExternalFeatureExtractor(typeof(TimeSliceDCM)); for (int i = 0; i < processorCount; ++i) { Thread t = new Thread(new ParameterizedThreadStart(core => { for (long slice = firstSlice + (int)core; slice <= lastSlice; slice += processorCount) { Console.Out.WriteLine("Processing slice " + (slice - firstSlice + 1) + " of " + (lastSlice - firstSlice + 1)); DateTime sliceStart = new DateTime(slice * _timeSliceTicks); DateTime sliceEnd = sliceStart.Add(new TimeSpan(_timeSliceTicks - 1)); DateTime sliceMid = new DateTime((sliceStart.Ticks + sliceEnd.Ticks) / 2L); #region get interval features that are true for all points in the current slice Dictionary<NumericFeature, int> threeHourIntervalFeatureValue = new Dictionary<NumericFeature, int>(); int startingThreeHourInterval = sliceStart.Hour / 3; // which 3-hour interval does the current slice start in? int threeHourIntervalsTouched = (int)(((sliceEnd.Ticks - sliceStart.Ticks) / ticksPerHour) / 3) + 1; // how many 3-hour intervals does the current slice touch? int endingThreeHourInterval = startingThreeHourInterval + threeHourIntervalsTouched - 1; // which 3-hour interval does the current slice end in? for (int k = 0; k < threeHourIntervals.Count; ++k) { TimeSliceFeature threeHourInterval = threeHourIntervals[k]; string id; if (featureId.TryGetValue(threeHourInterval, out id)) // if the current model uses the current 3-hour interval as a feature { bool covered = false; for (int interval = startingThreeHourInterval; !covered && interval <= endingThreeHourInterval; ++interval) if (interval % 8 == k) covered = true; threeHourIntervalFeatureValue.Add(IdNumericFeature[id], covered ? 1 : 0); } } #endregion #region extract feature vectors foreach (FeatureVectorList featureVectors in base.ExtractFeatureVectors(prediction, training, sliceStart, sliceEnd)) { if (!featureVectors.Complete) throw new Exception("Incomplete feature vectors received from base class extractor"); Console.Out.WriteLine("Extracting " + featureId.Count + " time slice features for " + featureVectors.Count + " points."); foreach (FeatureVector featureVector in featureVectors) { Point point = featureVector.DerivedFrom as Point; if (point.Time == DateTime.MinValue) point.Time = sliceMid; else if ((long)(point.Time.Ticks / _timeSliceTicks) != slice) throw new Exception("Point should not be in slice: " + point); foreach (LAIR.MachineLearning.NumericFeature threeHourIntervalFeature in threeHourIntervalFeatureValue.Keys) featureVector.Add(threeHourIntervalFeature, threeHourIntervalFeatureValue[threeHourIntervalFeature]); double percentThroughPeriod = (slice % _periodTimeSlices) / (double)(_periodTimeSlices - 1); double radians = 2 * Math.PI * percentThroughPeriod; foreach (TimeSliceFeature feature in featureId.Keys) if (feature == TimeSliceFeature.CosinePeriodPosition) featureVector.Add(IdNumericFeature[featureId[feature]], Math.Cos(radians)); else if (feature == TimeSliceFeature.SinePeriodPosition) featureVector.Add(IdNumericFeature[featureId[feature]], Math.Sin(radians)); } if (externalFeatureExtractor == null) lock (completeFeatureVectorLists) { completeFeatureVectorLists.Add(featureVectors); emitCompleteFeatureVectorLists.Set(); } else foreach (FeatureVectorList externalFeatureVectors in externalFeatureExtractor.ExtractFeatures(prediction, featureVectors, training, sliceStart, sliceEnd, false)) if (externalFeatureVectors.Complete) lock (completeFeatureVectorLists) { completeFeatureVectorLists.Add(externalFeatureVectors); emitCompleteFeatureVectorLists.Set(); } else lock (incompleteFeatureVectorLists) incompleteFeatureVectorLists.Add(externalFeatureVectors); } #endregion } lock (threads) threads.Remove(Thread.CurrentThread); emitCompleteFeatureVectorLists.Set(); })); lock (threads) { threads.Add(t); } t.Start(i); } while (emitCompleteFeatureVectorLists.WaitOne()) { lock (completeFeatureVectorLists) { foreach (FeatureVectorList completeFeatureVectors in completeFeatureVectorLists) yield return completeFeatureVectors; completeFeatureVectorLists.Clear(); } lock (threads) if (threads.Count == 0) break; } // emit any remaining completed vectors, which might have arrived just before the last thread was removed (breaking out of the loop above) foreach (FeatureVectorList completeFeatureVectors in completeFeatureVectorLists) yield return completeFeatureVectors; completeFeatureVectorLists.Clear(); Configuration.ProcessorCount = processorCount; // reset system-wide processor count since we're done with threads here foreach (FeatureVectorList incompleteFeatureVectors in incompleteFeatureVectorLists) foreach (FeatureVectorList externalFeatureVectors in externalFeatureExtractor.ExtractFeatures(prediction, incompleteFeatureVectors, training, start, end, true)) yield return externalFeatureVectors; }
void Analyze() { // Build the ocean // - an ocean (set) of islands (set) // - also a hash for TopicAnalysis (topic->{island set,refcount}) for quick check if already present ocean = new Set(); topicToTopicAnalysis = new Hashtable(); referenceMap = _namespaceManager.GetReferenceMap(ExistencePolicy.ExistingOnly); foreach (string outerTopic in referenceMap.Keys) { // Response.Write("Consider: " + outerTopic + "<br>"); Set islands = new Set(); QualifiedTopicRevisionCollection linkedTopics = referenceMap[outerTopic]; // Response.Write("Linked topics count: " + linkedTopics.Count + "<br>"); TopicAnalysis outerTopicAnalysis = (TopicAnalysis)(topicToTopicAnalysis[outerTopic]); if (outerTopicAnalysis == null) { outerTopicAnalysis = new TopicAnalysis(); topicToTopicAnalysis[outerTopic] = outerTopicAnalysis; // Response.Write("Creating info for " + outerTopic.Name + "<br>"); } else { // Response.Write("Found existing info for " + outerTopic.Name + "<br>"); // Response.Write("[island = " + outerTopicAnalysis.Island + "<br>"); } if (outerTopicAnalysis.Island != null) islands.Add(outerTopicAnalysis.Island); // - foreach outer topic // islands = new set // foreach linked topic // increment refcount for linked topic // if (linkedtopic is on an island) // islands add that island Set inNamespaceLinks = new Set(); foreach (QualifiedTopicRevision linkedTopic in linkedTopics) { // Only analyze in this namespace if (linkedTopic.Namespace != _namespaceManager.Namespace) { // Response.Write("Skiping linked topic (" + linkedTopic.Name + ") because namespace doesn't match<br>"); continue; } // Only do each topic once; have we seen this one? if (inNamespaceLinks.Contains(linkedTopic)) { // Response.Write("Skiping linked topic (" + linkedTopic.Name + ") because seen before<br>"); continue; } // Skip self-references if (linkedTopic.Equals(outerTopic)) { continue; } inNamespaceLinks.Add(linkedTopic); TopicAnalysis linkedTopicAnalysis = (TopicAnalysis)(topicToTopicAnalysis[linkedTopic]); if (linkedTopicAnalysis == null) { linkedTopicAnalysis = new TopicAnalysis(); topicToTopicAnalysis[linkedTopic] = linkedTopicAnalysis; // Response.Write("Creating info for " + linkedTopic.Name + "<br>"); } else { // Response.Write("Found existing info for " + linkedTopic.Name + "<br>"); } linkedTopicAnalysis.RefCount++; if (linkedTopicAnalysis.Island != null) islands.Add(linkedTopicAnalysis.Island); } // if (islands is empty) // create new island // add outer topic and all linked topics // else if (islands size == 1) // add all links and the outer topic to that islands // else // // need to merge islands // newset = merged set of all islands // TopicAnalysiss and replace and of the old islands with the new island Set newIsland; if (islands.Count == 1) newIsland = (Set)(islands.First); // if there's only one, we can just use that one else { newIsland = new Set(); ocean.Add(newIsland); } // Add the island and the linkedTopics newIsland.Add(outerTopic); outerTopicAnalysis.Island = newIsland; foreach (QualifiedTopicRevision linkedTopic in inNamespaceLinks) { newIsland.Add(linkedTopic); ((TopicAnalysis)(topicToTopicAnalysis[linkedTopic])).Island = newIsland; // Response.Write("Placing " + linkedTopic.Name + "<br>"); } // Now merge if there was originally more than one if (islands.Count > 1) { foreach (Set eachIsland in islands) { foreach (object o in eachIsland) newIsland.Add(o); ocean.Remove(eachIsland); // Now update all the pointers from the TopicAnalysiss foreach (QualifiedTopicRevision eachTopic in eachIsland) ((TopicAnalysis)(topicToTopicAnalysis[eachTopic])).Island = newIsland; } } } }
private static void DumpGroups(IGraph graph, Set<INode> nodes, DumpContext dc) { // Compute the nesting hierarchy (groups) Dictionary<INode, DumpGroupNode> groupNodes = new Dictionary<INode, DumpGroupNode>(); Dictionary<INode, INode> containedIn = new Dictionary<INode, INode>(); Set<INode> groupedNodes = new Set<INode>(); // (by iterating the group node types in order of dump declaration and removing the iterated nodes from the available nodes, // the conflict resolution priorities of debug enable are taken care of) foreach(GroupNodeType groupNodeType in dc.DumpInfo.GroupNodeTypes) { foreach(INode node in graph.GetCompatibleNodes(groupNodeType.NodeType)) { if(nodes.Contains(node)) { if(!groupNodes.ContainsKey(node)) groupNodes.Add(node, new DumpGroupNode()); // todo: is the if needed? nodes.Remove(node); } if(dc.DumpInfo.IsExcludedNodeType(node.Type)) continue; foreach(IEdge edge in node.Incoming) { GroupMode grpMode = groupNodeType.GetEdgeGroupMode(edge.Type, edge.Source.Type); if((grpMode & GroupMode.GroupIncomingNodes) == 0) continue; if(!dc.Nodes.Contains(edge.Source)) continue; groupNodes[node].groupedNodes.Add(edge.Source); if(!containedIn.ContainsKey(edge.Source)) containedIn.Add(edge.Source, node); // crashes without if in case of multiple containment due to dump misspecification by user groupedNodes.Add(edge.Source); if((grpMode & GroupMode.Hidden) != 0) dc.ExcludedEdges.Add(edge); } foreach(IEdge edge in node.Outgoing) { GroupMode grpMode = groupNodeType.GetEdgeGroupMode(edge.Type, edge.Target.Type); if((grpMode & GroupMode.GroupOutgoingNodes) == 0) continue; if(!dc.Nodes.Contains(edge.Target)) continue; groupNodes[node].groupedNodes.Add(edge.Target); if(!containedIn.ContainsKey(edge.Target)) containedIn.Add(edge.Target, node); // crashes without if in case of multiple containment due to dump misspecification by user groupedNodes.Add(edge.Target); if((grpMode & GroupMode.Hidden) != 0) dc.ExcludedEdges.Add(edge); } } } // Dump the groups (begin at the roots of the group trees) foreach(KeyValuePair<INode, DumpGroupNode> groupNode in groupNodes) { if(!containedIn.ContainsKey(groupNode.Key)) { DumpGroupTree(groupNode.Key, groupNodes, dc); DumpEdgesFromNode(groupNode.Key, dc); } } // Dump the rest, which has not been grouped nodes.Remove(groupedNodes); foreach(INode node in nodes) { DumpNodeAndEdges(node, dc); } }