/// <summary> /// Computes a normalized version P of a given betweenness preference matrix B. /// </summary> /// <param name="x">The node for which the normalized matrix is computed</param> /// <param name="aggregate_net">The weighted aggregate network</param> /// <param name="B">The betweenness preference matrix that shall be normalized</param> /// <returns>A normalized version of the betweenness preference matrix B</returns> public static double[,] NormalizeMatrix(string x, WeightedNetwork aggregate_net, double[,] B) { // Normalize the matrix ( i.e. this is equation (3) ) double[,] P = new double[aggregate_net.GetIndeg(x), aggregate_net.GetOutdeg(x)]; double sum = 0d; for (int s = 0; s < aggregate_net.GetIndeg(x); s++) { for (int d = 0; d < aggregate_net.GetOutdeg(x); d++) { sum += B[s, d]; } } if (sum > 0d) { for (int s = 0; s < aggregate_net.GetIndeg(x); s++) { for (int d = 0; d < aggregate_net.GetOutdeg(x); d++) { P[s, d] = B[s, d] / sum; } } } return(P); }
/// <summary> /// Computes the scalar betwenness preference of a node based on its normalized betweenness preference matrix /// </summary> /// <param name="aggregate_net">The temporal network for which to compute betweenness preference</param> /// <param name="x">The node for which to compute betweenness preference</param> /// <param name="P">The betweenness preference matrix based on which betw. pref. will be computed</param> /// <returns>The betweenness preference, defined as the mutual information of the source and target of two-paths</returns> public static double GetBetweennessPref(WeightedNetwork aggregate_net, string x, double[,] P, bool normalized = false) { // If the network is empty, just return zero if (aggregate_net.VertexCount == 0) { return(0d); } // Compute the mutual information (i.e. betweenness preference) double I = 0; int indeg = aggregate_net.GetIndeg(x); int outdeg = aggregate_net.GetOutdeg(x); double[] marginal_s = new double[indeg]; double[] marginal_d = new double[outdeg]; // Marginal probabilities P_d = \sum_s'{P_{s'd}} for (int d = 0; d < outdeg; d++) { double P_d = 0d; for (int s_prime = 0; s_prime < indeg; s_prime++) { P_d += P[s_prime, d]; } marginal_d[d] = P_d; } // Marginal probabilities P_s = \sum_d'{P_{sd'}} for (int s = 0; s < indeg; s++) { double P_s = 0d; for (int d_prime = 0; d_prime < outdeg; d_prime++) { P_s += P[s, d_prime]; } marginal_s[s] = P_s; } double H_s = Entropy(marginal_s); double H_d = Entropy(marginal_d); // Here we just compute equation (4) of the paper ... for (int s = 0; s < indeg; s++) { for (int d = 0; d < outdeg; d++) { if (P[s, d] != 0) // 0 * Log(0) = 0 // Mutual information { I += P[s, d] * Math.Log(P[s, d] / (marginal_s[s] * marginal_d[d]), 2d); } } } return(normalized?I / Math.Min(H_s, H_d):I); }
/// <summary> /// Computes the baseline betweenness preference matrix of a node under the assumption /// that the temporal network does not contain a betweenness preference correlation. This corresponds to /// equation (5) in the paper. /// </summary> /// <param name="v">The node to compute the baseline betweenness preference for</param> /// <param name="aggregate_net">The weighted, aggregate ego network of node x based on which the matrix will be computed</param> /// <param name="index_pred">Indices of predecessor nodes in the betweenness preference matrix</param> /// <param name="index_succ">Indices of successor nodes in the betweenness preference matric</param> /// <param name="normalize">Whether or not to normalize the betweenness preference matrix (i.e. whether B or P shall be returned)</param> /// <returns>Depending on the normalization, a betweenness preference matrix B or the normalized version P will be returned</returns> public static double[,] GetUncorrelatedBetweennessPrefMatrix(WeightedNetwork aggregate_net, string v, out Dictionary <string, int> index_pred, out Dictionary <string, int> index_succ) { // Use a mapping of indices to node labels index_pred = new Dictionary <string, int>(); index_succ = new Dictionary <string, int>(); // Create an empty matrix double[,] P = new double[aggregate_net.GetIndeg(v), aggregate_net.GetOutdeg(v)]; // Create the index-to-node mapping int i = 0; foreach (string u in aggregate_net.GetPredecessors(v)) { index_pred[u] = i++; } i = 0; foreach (string w in aggregate_net.GetSuccessors(v)) { index_succ[w] = i++; } // Sum over the weights of all source nodes double sum_source_weights = 0d; foreach (string s_prime in aggregate_net.GetPredecessors(v)) { sum_source_weights += aggregate_net.GetWeight(s_prime, v); } // Normalization factor for d double sum_dest_weights = 0d; foreach (string d_prime in aggregate_net.GetSuccessors(v)) { sum_dest_weights += aggregate_net.GetWeight(v, d_prime); } double min_p = double.MaxValue; // Equation (5) in the paper foreach (string s in aggregate_net.GetPredecessors(v)) { foreach (string d in aggregate_net.GetSuccessors(v)) { P[index_pred[s], index_succ[d]] = (aggregate_net.GetWeight(s, v) / sum_source_weights) * (aggregate_net.GetWeight(v, d) / sum_dest_weights); min_p = Math.Min(P[index_pred[s], index_succ[d]], min_p); } } return(P); }