Mercurial > repos > shellac > sam_consensus_v3
comparison env/lib/python3.9/site-packages/networkx/algorithms/centrality/load.py @ 0:4f3585e2f14b draft default tip
"planemo upload commit 60cee0fc7c0cda8592644e1aad72851dec82c959"
author | shellac |
---|---|
date | Mon, 22 Mar 2021 18:12:50 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:4f3585e2f14b |
---|---|
1 """Load centrality.""" | |
2 from operator import itemgetter | |
3 | |
4 import networkx as nx | |
5 | |
6 __all__ = ["load_centrality", "edge_load_centrality"] | |
7 | |
8 | |
9 def newman_betweenness_centrality(G, v=None, cutoff=None, normalized=True, weight=None): | |
10 """Compute load centrality for nodes. | |
11 | |
12 The load centrality of a node is the fraction of all shortest | |
13 paths that pass through that node. | |
14 | |
15 Parameters | |
16 ---------- | |
17 G : graph | |
18 A networkx graph. | |
19 | |
20 normalized : bool, optional (default=True) | |
21 If True the betweenness values are normalized by b=b/(n-1)(n-2) where | |
22 n is the number of nodes in G. | |
23 | |
24 weight : None or string, optional (default=None) | |
25 If None, edge weights are ignored. | |
26 Otherwise holds the name of the edge attribute used as weight. | |
27 | |
28 cutoff : bool, optional (default=None) | |
29 If specified, only consider paths of length <= cutoff. | |
30 | |
31 Returns | |
32 ------- | |
33 nodes : dictionary | |
34 Dictionary of nodes with centrality as the value. | |
35 | |
36 See Also | |
37 -------- | |
38 betweenness_centrality() | |
39 | |
40 Notes | |
41 ----- | |
42 Load centrality is slightly different than betweenness. It was originally | |
43 introduced by [2]_. For this load algorithm see [1]_. | |
44 | |
45 References | |
46 ---------- | |
47 .. [1] Mark E. J. Newman: | |
48 Scientific collaboration networks. II. | |
49 Shortest paths, weighted networks, and centrality. | |
50 Physical Review E 64, 016132, 2001. | |
51 http://journals.aps.org/pre/abstract/10.1103/PhysRevE.64.016132 | |
52 .. [2] Kwang-Il Goh, Byungnam Kahng and Doochul Kim | |
53 Universal behavior of Load Distribution in Scale-Free Networks. | |
54 Physical Review Letters 87(27):1–4, 2001. | |
55 http://phya.snu.ac.kr/~dkim/PRL87278701.pdf | |
56 """ | |
57 if v is not None: # only one node | |
58 betweenness = 0.0 | |
59 for source in G: | |
60 ubetween = _node_betweenness(G, source, cutoff, False, weight) | |
61 betweenness += ubetween[v] if v in ubetween else 0 | |
62 if normalized: | |
63 order = G.order() | |
64 if order <= 2: | |
65 return betweenness # no normalization b=0 for all nodes | |
66 betweenness *= 1.0 / ((order - 1) * (order - 2)) | |
67 return betweenness | |
68 else: | |
69 betweenness = {}.fromkeys(G, 0.0) | |
70 for source in betweenness: | |
71 ubetween = _node_betweenness(G, source, cutoff, False, weight) | |
72 for vk in ubetween: | |
73 betweenness[vk] += ubetween[vk] | |
74 if normalized: | |
75 order = G.order() | |
76 if order <= 2: | |
77 return betweenness # no normalization b=0 for all nodes | |
78 scale = 1.0 / ((order - 1) * (order - 2)) | |
79 for v in betweenness: | |
80 betweenness[v] *= scale | |
81 return betweenness # all nodes | |
82 | |
83 | |
84 def _node_betweenness(G, source, cutoff=False, normalized=True, weight=None): | |
85 """Node betweenness_centrality helper: | |
86 | |
87 See betweenness_centrality for what you probably want. | |
88 This actually computes "load" and not betweenness. | |
89 See https://networkx.lanl.gov/ticket/103 | |
90 | |
91 This calculates the load of each node for paths from a single source. | |
92 (The fraction of number of shortests paths from source that go | |
93 through each node.) | |
94 | |
95 To get the load for a node you need to do all-pairs shortest paths. | |
96 | |
97 If weight is not None then use Dijkstra for finding shortest paths. | |
98 """ | |
99 # get the predecessor and path length data | |
100 if weight is None: | |
101 (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True) | |
102 else: | |
103 (pred, length) = nx.dijkstra_predecessor_and_distance(G, source, cutoff, weight) | |
104 | |
105 # order the nodes by path length | |
106 onodes = [(l, vert) for (vert, l) in length.items()] | |
107 onodes.sort() | |
108 onodes[:] = [vert for (l, vert) in onodes if l > 0] | |
109 | |
110 # initialize betweenness | |
111 between = {}.fromkeys(length, 1.0) | |
112 | |
113 while onodes: | |
114 v = onodes.pop() | |
115 if v in pred: | |
116 num_paths = len(pred[v]) # Discount betweenness if more than | |
117 for x in pred[v]: # one shortest path. | |
118 if x == source: # stop if hit source because all remaining v | |
119 break # also have pred[v]==[source] | |
120 between[x] += between[v] / float(num_paths) | |
121 # remove source | |
122 for v in between: | |
123 between[v] -= 1 | |
124 # rescale to be between 0 and 1 | |
125 if normalized: | |
126 l = len(between) | |
127 if l > 2: | |
128 # scale by 1/the number of possible paths | |
129 scale = 1.0 / float((l - 1) * (l - 2)) | |
130 for v in between: | |
131 between[v] *= scale | |
132 return between | |
133 | |
134 | |
135 load_centrality = newman_betweenness_centrality | |
136 | |
137 | |
138 def edge_load_centrality(G, cutoff=False): | |
139 """Compute edge load. | |
140 | |
141 WARNING: This concept of edge load has not been analysed | |
142 or discussed outside of NetworkX that we know of. | |
143 It is based loosely on load_centrality in the sense that | |
144 it counts the number of shortest paths which cross each edge. | |
145 This function is for demonstration and testing purposes. | |
146 | |
147 Parameters | |
148 ---------- | |
149 G : graph | |
150 A networkx graph | |
151 | |
152 cutoff : bool, optional (default=False) | |
153 If specified, only consider paths of length <= cutoff. | |
154 | |
155 Returns | |
156 ------- | |
157 A dict keyed by edge 2-tuple to the number of shortest paths | |
158 which use that edge. Where more than one path is shortest | |
159 the count is divided equally among paths. | |
160 """ | |
161 betweenness = {} | |
162 for u, v in G.edges(): | |
163 betweenness[(u, v)] = 0.0 | |
164 betweenness[(v, u)] = 0.0 | |
165 | |
166 for source in G: | |
167 ubetween = _edge_betweenness(G, source, cutoff=cutoff) | |
168 for e, ubetweenv in ubetween.items(): | |
169 betweenness[e] += ubetweenv # cumulative total | |
170 return betweenness | |
171 | |
172 | |
173 def _edge_betweenness(G, source, nodes=None, cutoff=False): | |
174 """Edge betweenness helper.""" | |
175 # get the predecessor data | |
176 (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True) | |
177 # order the nodes by path length | |
178 onodes = [n for n, d in sorted(length.items(), key=itemgetter(1))] | |
179 # initialize betweenness, doesn't account for any edge weights | |
180 between = {} | |
181 for u, v in G.edges(nodes): | |
182 between[(u, v)] = 1.0 | |
183 between[(v, u)] = 1.0 | |
184 | |
185 while onodes: # work through all paths | |
186 v = onodes.pop() | |
187 if v in pred: | |
188 # Discount betweenness if more than one shortest path. | |
189 num_paths = len(pred[v]) | |
190 for w in pred[v]: | |
191 if w in pred: | |
192 # Discount betweenness, mult path | |
193 num_paths = len(pred[w]) | |
194 for x in pred[w]: | |
195 between[(w, x)] += between[(v, w)] / num_paths | |
196 between[(x, w)] += between[(w, v)] / num_paths | |
197 return between |