Ejemplo n.º 1
0
        /// <summary>
        /// Checks whether this weighted network is identical to a given one (in terms of node names, edges and weights)
        /// </summary>
        /// <param name="obj"></param>
        /// <returns></returns>
        public override bool Equals(object obj)
        {
            if (obj.GetType() != this.GetType())
            {
                return(false);
            }

            WeightedNetwork temp = obj as WeightedNetwork;

            foreach (var edge in temp.Edges)
            {
                if (!this.ContainsKey(edge))
                {
                    return(false);
                }
                if (this[edge] != temp[edge])
                {
                    return(false);
                }
            }

            foreach (var edge in this.Edges)
            {
                if (!temp.ContainsKey(edge))
                {
                    return(false);
                }
                if (temp[edge] != this[edge])
                {
                    return(false);
                }
            }

            return(true);
        }
        /// <summary>
        /// Computes a normalized version P of a given betweenness preference matrix B.
        /// </summary>
        /// <param name="x">The node for which the normalized matrix is computed</param>
        /// <param name="aggregate_net">The weighted aggregate network</param>
        /// <param name="B">The betweenness preference matrix that shall be normalized</param>
        /// <returns>A normalized version of the betweenness preference matrix B</returns>
        public static double[,] NormalizeMatrix(string x, WeightedNetwork aggregate_net, double[,] B)
        {
            // Normalize the matrix ( i.e. this is equation (3) )
            double[,] P = new double[aggregate_net.GetIndeg(x), aggregate_net.GetOutdeg(x)];

            double sum = 0d;

            for (int s = 0; s < aggregate_net.GetIndeg(x); s++)
            {
                for (int d = 0; d < aggregate_net.GetOutdeg(x); d++)
                {
                    sum += B[s, d];
                }
            }

            if (sum > 0d)
            {
                for (int s = 0; s < aggregate_net.GetIndeg(x); s++)
                {
                    for (int d = 0; d < aggregate_net.GetOutdeg(x); d++)
                    {
                        P[s, d] = B[s, d] / sum;
                    }
                }
            }
            return(P);
        }
Ejemplo n.º 3
0
        /// <summary>
        /// Computes the scalar betwenness preference of a node based on its normalized betweenness preference matrix
        /// </summary>
        /// <param name="aggregate_net">The temporal network for which to compute betweenness preference</param>
        /// <param name="x">The node for which to compute betweenness preference</param>
        /// <param name="P">The betweenness preference matrix based on which betw. pref. will be computed</param>
        /// <returns>The betweenness preference, defined as the mutual information of the source and target of two-paths</returns>
        public static double GetBetweennessPref(WeightedNetwork aggregate_net, string x, double[,] P, bool normalized = false)
        {
            // If the network is empty, just return zero
            if (aggregate_net.VertexCount == 0)
            {
                return(0d);
            }

            // Compute the mutual information (i.e. betweenness preference)
            double I = 0;

            int indeg  = aggregate_net.GetIndeg(x);
            int outdeg = aggregate_net.GetOutdeg(x);

            double[] marginal_s = new double[indeg];
            double[] marginal_d = new double[outdeg];

            // Marginal probabilities P_d = \sum_s'{P_{s'd}}
            for (int d = 0; d < outdeg; d++)
            {
                double P_d = 0d;
                for (int s_prime = 0; s_prime < indeg; s_prime++)
                {
                    P_d += P[s_prime, d];
                }
                marginal_d[d] = P_d;
            }

            // Marginal probabilities P_s = \sum_d'{P_{sd'}}
            for (int s = 0; s < indeg; s++)
            {
                double P_s = 0d;
                for (int d_prime = 0; d_prime < outdeg; d_prime++)
                {
                    P_s += P[s, d_prime];
                }
                marginal_s[s] = P_s;
            }

            double H_s = Entropy(marginal_s);
            double H_d = Entropy(marginal_d);

            // Here we just compute equation (4) of the paper ...
            for (int s = 0; s < indeg; s++)
            {
                for (int d = 0; d < outdeg; d++)
                {
                    if (P[s, d] != 0) // 0 * Log(0)  = 0
                    // Mutual information
                    {
                        I += P[s, d] * Math.Log(P[s, d] / (marginal_s[s] * marginal_d[d]), 2d);
                    }
                }
            }

            return(normalized?I / Math.Min(H_s, H_d):I);
        }
        /// <summary>
        /// Computes the baseline betweenness preference matrix of a node under the assumption
        /// that the temporal network does not contain a betweenness preference correlation. This corresponds to
        /// equation (5) in the paper.
        /// </summary>
        /// <param name="v">The node to compute the baseline betweenness preference for</param>
        /// <param name="aggregate_net">The weighted, aggregate ego network of node x based on which the matrix will be computed</param>
        /// <param name="index_pred">Indices of predecessor nodes in the betweenness preference matrix</param>
        /// <param name="index_succ">Indices of successor nodes in the betweenness preference matric</param>
        /// <param name="normalize">Whether or not to normalize the betweenness preference matrix (i.e. whether B or P shall be returned)</param>
        /// <returns>Depending on the normalization, a betweenness preference matrix B or the normalized version P will be returned</returns>
        public static double[,] GetUncorrelatedBetweennessPrefMatrix(WeightedNetwork aggregate_net, string v, out Dictionary <string, int> index_pred, out Dictionary <string, int> index_succ)
        {
            // Use a mapping of indices to node labels
            index_pred = new Dictionary <string, int>();
            index_succ = new Dictionary <string, int>();

            // Create an empty matrix
            double[,] P = new double[aggregate_net.GetIndeg(v), aggregate_net.GetOutdeg(v)];

            // Create the index-to-node mapping
            int i = 0;

            foreach (string u in aggregate_net.GetPredecessors(v))
            {
                index_pred[u] = i++;
            }

            i = 0;
            foreach (string w in aggregate_net.GetSuccessors(v))
            {
                index_succ[w] = i++;
            }

            // Sum over the weights of all source nodes
            double sum_source_weights = 0d;

            foreach (string s_prime in aggregate_net.GetPredecessors(v))
            {
                sum_source_weights += aggregate_net.GetWeight(s_prime, v);
            }

            // Normalization factor for d
            double sum_dest_weights = 0d;

            foreach (string d_prime in aggregate_net.GetSuccessors(v))
            {
                sum_dest_weights += aggregate_net.GetWeight(v, d_prime);
            }

            double min_p = double.MaxValue;

            // Equation (5) in the paper
            foreach (string s in aggregate_net.GetPredecessors(v))
            {
                foreach (string d in aggregate_net.GetSuccessors(v))
                {
                    P[index_pred[s], index_succ[d]] = (aggregate_net.GetWeight(s, v) / sum_source_weights) * (aggregate_net.GetWeight(v, d) / sum_dest_weights);

                    min_p = Math.Min(P[index_pred[s], index_succ[d]], min_p);
                }
            }
            return(P);
        }
Ejemplo n.º 5
0
        /// <summary>
        /// Saves a weighted aggregate network to a file in the edge format
        /// </summary>
        /// <param name="path"></param>
        /// <param name="net"></param>
        public static void SaveToFile(string path, WeightedNetwork net)
        {
            StringBuilder sb = new StringBuilder();

            sb.AppendLine("source target weight");

            foreach (var edge in net.Edges)
            {
                sb.AppendLine(string.Format("{0} {1} {2}", edge.Item1, edge.Item2, string.Format(System.Globalization.CultureInfo.GetCultureInfo("en-US").NumberFormat, "{0:0.000000}", net[edge])));
            }

            System.IO.File.WriteAllText(path, sb.ToString());
        }
Ejemplo n.º 6
0
        /// <summary>
        /// Creates a weighted network representation of a temporal network by aggregating edge occurences
        /// (and thus discarding information on the temporal ordering of edges)
        /// </summary>
        /// <param name="temp_net">he temporal network that shall be aggregated</param>
        /// <returns>An instance of a weighted aggregate network</returns>
        public static WeightedNetwork FromTemporalNetwork(TemporalNetwork temp_net)
        {
            WeightedNetwork weighted_net = new WeightedNetwork();

            foreach (var t in temp_net.Keys)
            {
                foreach (Tuple <string, string> edge in temp_net[t])
                {
                    weighted_net.AddEdge(edge.Item1, edge.Item2);
                }
            }
            return(weighted_net);
        }
Ejemplo n.º 7
0
        /// <summary>
        /// Adds a single edge between two nodes to the temporal network (at the end of the sequence)
        /// </summary>
        /// <param name="v">the source node of an edge</param>
        /// <param name="w">the target node of an edge</param>
        public void AddTemporalEdge(int time, string v, string w, int weight = 1)
        {
            if (!this.ContainsKey(time))
            {
                this[time] = new List <Tuple <string, string> >();
            }
            this[time].Add(new Tuple <string, string>(v, w));
            _tempEdgeWeights[time] = weight;

            // Invalidate previously preprocessed data
            _cachedWeightedNetwork = null;
            _twoPathsByNode        = null;
        }
Ejemplo n.º 8
0
        /// <summary>
        /// This methods extracts all two paths from the sequence of edges in the temporal network (two-paths according to eq. (1) of the paper).
        /// It will also extract the correct (statistical) weights for both the two paths and the edges in the aggregate network.
        /// After this method returns, the weighted TwoPaths list as well as the weighted AggregateNetwork are available.
        /// If an explicit call to preprocess is ommitted, the preprocessing will be triggered whenever the AggregateNetwork or the TwoPaths dictionary
        /// is used for the first time. Changing the temporal network (i.e. adding or removing and edge) will invalidate both, so this method has to be called
        /// again.
        /// </summary>
        /// <seealso cref="TwoPathsByNode"/>
        /// <seealso cref="AggregateNetwork"/>
        public void ReduceToTwoPaths()
        {
            _twoPathsByNode      = new Dictionary <string, Dictionary <int, List <Tuple <string, string> > > >();
            _twoPathWeights      = new Dictionary <string, double>();
            _twoPathsByStartTime = new Dictionary <int, List <string> >();
            var two_path_edges = new Dictionary <int, List <Tuple <string, string> > >();

            int prev_t = -1;

            var ordered_time = Keys.OrderBy(k => k, new CompareInts());

            // Walk through time ...
            foreach (int t in ordered_time)
            {
                if (prev_t == -1)
                {
                    prev_t = t; // We skip the first time step and just set the prev_t index ...
                }
                else
                {
                    // N.B.: Only two-paths consisting of edges in time steps immediately following each other are found
                    // N.B.: We also account for multiple edges happening at the same time, i.e. multiple two-paths can pass through a node at a given time t!
                    // N.B.: For three consecutive edges (a,b), (b,c), (c,d) , two two-paths (a,b,c) and (b,c,d) will be found
                    foreach (var in_edge in this[prev_t])
                    {
                        foreach (var out_edge in this[t])
                        {
                            // In this case, we found the two_path (in_edge) -> (out_edge) = (s,v) -> (v,d)
                            if (in_edge.Item2 == out_edge.Item1)
                            {
                                // Use notation from the paper
                                string s = in_edge.Item1;
                                string v = in_edge.Item2;
                                string d = out_edge.Item2;

                                string two_path = s + "," + v + "," + d;

                                double indeg_v  = 0d;
                                double outdeg_v = 0d;

                                indeg_v = (from x in this[prev_t].AsParallel() where x.Item2 == v select x).Count();

                                //foreach (var edge in this[prev_t])
                                //   if (edge.Item2 == v)
                                //       indeg_v++;

                                outdeg_v = (from x in this[t].AsParallel() where x.Item1 == v select x).Count();

                                //foreach (var edge in this[t])
                                //   if (edge.Item1 == v)
                                //      outdeg_v++;

                                if (!_twoPathWeights.ContainsKey(two_path))
                                {
                                    _twoPathWeights[two_path] = 0d;
                                }

                                _twoPathWeights[two_path] += 1d / (indeg_v * outdeg_v);

                                if (!two_path_edges.ContainsKey(prev_t))
                                {
                                    two_path_edges[prev_t] = new List <Tuple <string, string> >();
                                }
                                if (!two_path_edges.ContainsKey(t))
                                {
                                    two_path_edges[t] = new List <Tuple <string, string> >();
                                }

                                // Important: In the reduced temporal network, we only use edges belonging to two paths. Each edge is added only once,
                                // even if it belongs to several two paths (this is the case for continued two paths as well as for two paths with multiple edges
                                // in one time step
                                if (!two_path_edges[prev_t].Contains(in_edge))
                                {
                                    two_path_edges[prev_t].Add(in_edge);
                                }
                                if (!two_path_edges[t].Contains(out_edge))
                                {
                                    two_path_edges[t].Add(out_edge);
                                }

                                // Add the identified two paths to the list of two paths passing through v at time t
                                if (!_twoPathsByNode.ContainsKey(v))
                                {
                                    _twoPathsByNode[v] = new Dictionary <int, List <Tuple <string, string> > >();
                                }
                                if (!_twoPathsByNode[v].ContainsKey(t))
                                {
                                    _twoPathsByNode[v][t] = new List <Tuple <string, string> >();
                                }

                                if (!_twoPathsByStartTime.ContainsKey(prev_t))
                                {
                                    _twoPathsByStartTime[prev_t] = new List <string>();
                                }

                                _twoPathsByNode[v][t].Add(new Tuple <string, string>(s, d));
                                _twoPathsByStartTime[prev_t].Add(s + "," + v + "," + d);
                            }
                        }
                    }
                    prev_t = t;
                }
            }

            // Replace the edges of the temporal network by those contributing to two paths
            if (_stripEdges)
            {
                this.Clear();
                foreach (int t in two_path_edges.Keys)
                {
                    this[t] = two_path_edges[t];
                }
            }

            // Build the aggregate networks with the correct weights ...
            _cachedWeightedNetwork            = new WeightedNetwork();
            _cachedWeightedNetworkSecondOrder = new WeightedNetwork();

            foreach (var two_path in _twoPathWeights.Keys)
            {
                string[] split = two_path.Split(',');
                _cachedWeightedNetwork.AddEdge(split[0], split[1], EdgeType.Directed, _twoPathWeights[two_path]);
                _cachedWeightedNetwork.AddEdge(split[1], split[2], EdgeType.Directed, _twoPathWeights[two_path]);

                _cachedWeightedNetworkSecondOrder.AddEdge(string.Format("({0};{1})", split[0], split[1]), string.Format("({0};{1})", split[1], split[2]), EdgeType.Directed, _twoPathWeights[two_path]);
            }

            foreach (string v in _cachedWeightedNetwork.Vertices)
            {
                if (!_twoPathsByNode.ContainsKey(v))
                {
                    _twoPathsByNode[v] = new Dictionary <int, List <Tuple <string, string> > >();
                }
            }
        }
Ejemplo n.º 9
0
        public static IDictionary <int, double> RunRW_BWP(TemporalNetwork temp_net, int max_steps = 100000, bool null_model = false)
        {
            Random r = new Random();

            var cumulatives = new Dictionary <Tuple <string, string>, Dictionary <double, string> >();
            var sums        = new Dictionary <Tuple <string, string>, double>();

            // Dictionary<string, Dictionary<double, string>> cumulatives =
            // Dictionary<string, double> sums = new Dictionary<string, double>();

            Dictionary <string, Dictionary <string, int> > indices_pred = new Dictionary <string, Dictionary <string, int> >();
            Dictionary <string, Dictionary <string, int> > indices_succ = new Dictionary <string, Dictionary <string, int> >();
            Dictionary <string, double[, ]> matrices = new Dictionary <string, double[, ]>();

            Dictionary <string, int>    visitations = new Dictionary <string, int>();
            Dictionary <string, double> stationary  = new Dictionary <string, double>();

            Dictionary <Tuple <string, string>, int>    edge_visitations = new Dictionary <Tuple <string, string>, int>();
            Dictionary <Tuple <string, string>, double> edge_stationary  = new Dictionary <Tuple <string, string>, double>();

            Dictionary <int, double> tvd = new Dictionary <int, double>();

            // Aggregate network
            WeightedNetwork network = temp_net.AggregateNetwork;

            // Read analytical stationary distribution (i.e. flow-corrected edge weights) from disk
            string[] lines = System.IO.File.ReadAllLines("stationary_dist_RM.dat");
            foreach (string x in lines)
            {
                string[] split = x.Split(' ');
                string[] nodes = split[0].Split('.');
                var      edge  = new Tuple <string, string>(nodes[0], nodes[1]);

                // Extract stationary dist, set visitations to zero and adjust edge weights ...
                edge_stationary[edge]  = double.Parse(split[1], System.Globalization.CultureInfo.GetCultureInfo("en-US").NumberFormat);
                edge_visitations[edge] = 0;
                network[edge]          = edge_stationary[edge];
            }

            // Compute stationary dist of vertices ...
            double total = 0d;

            foreach (string x in network.Vertices)
            {
                stationary[x] = 0d;
                foreach (string s in network.GetPredecessors(x))
                {
                    stationary[x] += edge_stationary[new Tuple <string, string>(s, x)];
                }
                total += stationary[x];
            }
            foreach (string x in network.Vertices)
            {
                stationary[x] = stationary[x] / total;
            }


            // Compute betweenness preference matrices
            if (!null_model)
            {
                Console.Write("Computing betweenness preference in temporal network ...");
            }
            else
            {
                Console.Write("Calculating null model betweenness preference ...");
            }
            foreach (string x in network.Vertices)
            {
                var ind_p = new Dictionary <string, int>();
                var ind_s = new Dictionary <string, int>();
                if (!null_model)
                {
                    matrices[x] = BetweennessPref.GetBetweennessPrefMatrix(temp_net, x, out ind_p, out ind_s, false);
                }
                else
                {
                    matrices[x] = BetweennessPref.GetUncorrelatedBetweennessPrefMatrix(temp_net, x, out ind_p, out ind_s);
                }

                indices_pred[x] = ind_p;
                indices_succ[x] = ind_s;
            }
            Console.WriteLine("done.");

            // Initialize visitations, stationary distribution and cumulatives ...
            foreach (string x in network.Vertices)
            {
                visitations[x] = 0;
                stationary[x]  = 0d;

                foreach (string s in indices_pred[x].Keys)
                {
                    Tuple <string, string> key = new Tuple <string, string>(s, x);

                    stationary[x] += network.GetWeight(s, x);

                    // Compute the transition probability for a edge (x,t) given that we are in (s,x)
                    cumulatives[key] = new Dictionary <double, string>();
                    double sum = 0d;

                    foreach (string t in indices_succ[x].Keys)
                    {
                        double transition_prob = 0d;

                        string two_path = s + "," + x + "," + t;
                        transition_prob = matrices[x][indices_pred[x][s], indices_succ[x][t]];

                        if (transition_prob > 0)
                        {
                            sum += transition_prob;
                            cumulatives[key][sum] = t;
                        }
                    }
                    sums[key] = sum;
                }
            }

            // Draw two initial nodes ...
            string pred    = network.RandomNode;
            string current = network.GetRandomSuccessor(pred);

            visitations[pred]    = 1;
            visitations[current] = 1;
            edge_visitations[new Tuple <string, string>(pred, current)] = 1;

            // Run the random walk (over edges)
            for (int t = 0; t < max_steps; t++)
            {
                // The edge via which we arrived at the current node
                Tuple <string, string> current_edge = new Tuple <string, string>(pred, current);

                // If this happens, we are stuck in a sink, i.e. there is no out edge
                System.Diagnostics.Debug.Assert(sums[current_edge] > 0, string.Format("Network not strongly connected! RW stuck after passing through edge {0}", current_edge));

                // Draw a sample uniformly from [0,1] and multiply it with the cumulative sum for the current edge ...
                double sample = rand.NextDouble() * sums[current_edge];

                // Determine the next transition ...
                string next_node = null;
                for (int i = 0; i < cumulatives[current_edge].Count; i++)
                {
                    if (cumulatives[current_edge].Keys.ElementAt(i) > sample)
                    {
                        next_node = cumulatives[current_edge].Values.ElementAt(i);
                        break;
                    }
                }
                pred    = current;
                current = next_node;

                visitations[current] = visitations[current] + 1;
                edge_visitations[new Tuple <string, string>(pred, current)] = edge_visitations[new Tuple <string, string>(pred, current)] + 1;

                tvd[t] = TVD(visitations, stationary);
            }
            return(tvd);
        }
Ejemplo n.º 10
0
        /// <summary>
        /// Creates a TikZ representation of the temporal unfolding of the temporal network
        /// </summary>
        /// <param name="path">The path to which to write the tikz file</param>
        /// <param name="temp_net">The temporal network that shall be exported</param>
        public static void CreateTikzUnfolding(string path, string between_node, TemporalNetwork temp_net)
        {
            WeightedNetwork net = WeightedNetwork.FromTemporalNetwork(temp_net);

            StringBuilder strB = new StringBuilder();

            strB.AppendLine("\\newcounter{a}");

            strB.AppendLine("\\begin{tikzpicture}[->,>=stealth',auto,scale=0.5, every node/.style={scale=0.9}]");
            strB.AppendLine("\\tikzstyle{node} = [fill=lightgray,text=black,circle]");
            strB.AppendLine("\\tikzstyle{v} = [fill=black,text=white,circle]");
            strB.AppendLine("\\tikzstyle{dst} = [fill=lightgray,text=black,circle]");
            strB.AppendLine("\\tikzstyle{lbl} = [fill=white,text=black,circle]");

            string last = "";

            foreach (string v in net.Vertices.OrderBy(s => s))
            {
                if (last == "")
                {
                    strB.AppendLine("\\node[lbl]                     (" + v + "-0)   {$" + v + "$};");
                }
                else
                {
                    strB.AppendLine("\\node[lbl,right=0.5cm of " + last + "-0] (" + v + "-0)   {$" + v + "$};");
                }
                last = v;
            }

            strB.AppendLine("\\setcounter{a}{0}");
            strB.AppendLine("\\foreach \\number in {1,...," + (temp_net.Length + 1) + "}{");
            strB.AppendLine("\\setcounter{a}{\\number}");
            strB.AppendLine("\\addtocounter{a}{-1}");
            strB.AppendLine("\\pgfmathparse{\\thea}");
            foreach (string v in net.Vertices)
            {
                if (v != between_node)
                {
                    strB.AppendLine("\\node[node,below=0.3cm of " + v + "-\\pgfmathresult]   (" + v + "-\\number) {};");
                }
                else
                {
                    strB.AppendLine("\\node[v,below=0.3cm of " + v + "-\\pgfmathresult]     (" + v + "-\\number) {};");
                }
            }
            strB.AppendLine("\\node[lbl,left=0.5cm of " + net.Vertices.OrderBy(s => s).ElementAt(0) + "-\\number]    (col-\\pgfmathresult) {$t=$\\number};");
            strB.AppendLine("}");
            strB.AppendLine("\\path[->,thick]");
            int i = 1;

            foreach (var t in temp_net.Keys)
            {
                foreach (var edge in temp_net[t])
                {
                    strB.AppendLine("(" + edge.Item1 + "-" + (t + 1) + ") edge (" + edge.Item2 + "-" + (t + 2) + ")");
                    i++;
                }
            }
            strB.AppendLine(";");
            strB.AppendLine("\\end{tikzpicture} ");

            System.IO.File.WriteAllText(path, strB.ToString());
        }