comparison env/lib/python3.9/site-packages/networkx/algorithms/centrality/tests/test_load_centrality.py @ 0:4f3585e2f14b draft default tip

"planemo upload commit 60cee0fc7c0cda8592644e1aad72851dec82c959"
author shellac
date Mon, 22 Mar 2021 18:12:50 +0000
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:4f3585e2f14b
1 import networkx as nx
2 from networkx.testing import almost_equal
3
4
5 class TestLoadCentrality:
6 @classmethod
7 def setup_class(cls):
8
9 G = nx.Graph()
10 G.add_edge(0, 1, weight=3)
11 G.add_edge(0, 2, weight=2)
12 G.add_edge(0, 3, weight=6)
13 G.add_edge(0, 4, weight=4)
14 G.add_edge(1, 3, weight=5)
15 G.add_edge(1, 5, weight=5)
16 G.add_edge(2, 4, weight=1)
17 G.add_edge(3, 4, weight=2)
18 G.add_edge(3, 5, weight=1)
19 G.add_edge(4, 5, weight=4)
20 cls.G = G
21 cls.exact_weighted = {0: 4.0, 1: 0.0, 2: 8.0, 3: 6.0, 4: 8.0, 5: 0.0}
22 cls.K = nx.krackhardt_kite_graph()
23 cls.P3 = nx.path_graph(3)
24 cls.P4 = nx.path_graph(4)
25 cls.K5 = nx.complete_graph(5)
26
27 cls.C4 = nx.cycle_graph(4)
28 cls.T = nx.balanced_tree(r=2, h=2)
29 cls.Gb = nx.Graph()
30 cls.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)])
31 cls.F = nx.florentine_families_graph()
32 cls.LM = nx.les_miserables_graph()
33 cls.D = nx.cycle_graph(3, create_using=nx.DiGraph())
34 cls.D.add_edges_from([(3, 0), (4, 3)])
35
36 def test_not_strongly_connected(self):
37 b = nx.load_centrality(self.D)
38 result = {0: 5.0 / 12, 1: 1.0 / 4, 2: 1.0 / 12, 3: 1.0 / 4, 4: 0.000}
39 for n in sorted(self.D):
40 assert almost_equal(result[n], b[n], places=3)
41 assert almost_equal(result[n], nx.load_centrality(self.D, n), places=3)
42
43 def test_weighted_load(self):
44 b = nx.load_centrality(self.G, weight="weight", normalized=False)
45 for n in sorted(self.G):
46 assert b[n] == self.exact_weighted[n]
47
48 def test_k5_load(self):
49 G = self.K5
50 c = nx.load_centrality(G)
51 d = {0: 0.000, 1: 0.000, 2: 0.000, 3: 0.000, 4: 0.000}
52 for n in sorted(G):
53 assert almost_equal(c[n], d[n], places=3)
54
55 def test_p3_load(self):
56 G = self.P3
57 c = nx.load_centrality(G)
58 d = {0: 0.000, 1: 1.000, 2: 0.000}
59 for n in sorted(G):
60 assert almost_equal(c[n], d[n], places=3)
61 c = nx.load_centrality(G, v=1)
62 assert almost_equal(c, 1.0)
63 c = nx.load_centrality(G, v=1, normalized=True)
64 assert almost_equal(c, 1.0)
65
66 def test_p2_load(self):
67 G = nx.path_graph(2)
68 c = nx.load_centrality(G)
69 d = {0: 0.000, 1: 0.000}
70 for n in sorted(G):
71 assert almost_equal(c[n], d[n], places=3)
72
73 def test_krackhardt_load(self):
74 G = self.K
75 c = nx.load_centrality(G)
76 d = {
77 0: 0.023,
78 1: 0.023,
79 2: 0.000,
80 3: 0.102,
81 4: 0.000,
82 5: 0.231,
83 6: 0.231,
84 7: 0.389,
85 8: 0.222,
86 9: 0.000,
87 }
88 for n in sorted(G):
89 assert almost_equal(c[n], d[n], places=3)
90
91 def test_florentine_families_load(self):
92 G = self.F
93 c = nx.load_centrality(G)
94 d = {
95 "Acciaiuoli": 0.000,
96 "Albizzi": 0.211,
97 "Barbadori": 0.093,
98 "Bischeri": 0.104,
99 "Castellani": 0.055,
100 "Ginori": 0.000,
101 "Guadagni": 0.251,
102 "Lamberteschi": 0.000,
103 "Medici": 0.522,
104 "Pazzi": 0.000,
105 "Peruzzi": 0.022,
106 "Ridolfi": 0.117,
107 "Salviati": 0.143,
108 "Strozzi": 0.106,
109 "Tornabuoni": 0.090,
110 }
111 for n in sorted(G):
112 assert almost_equal(c[n], d[n], places=3)
113
114 def test_les_miserables_load(self):
115 G = self.LM
116 c = nx.load_centrality(G)
117 d = {
118 "Napoleon": 0.000,
119 "Myriel": 0.177,
120 "MlleBaptistine": 0.000,
121 "MmeMagloire": 0.000,
122 "CountessDeLo": 0.000,
123 "Geborand": 0.000,
124 "Champtercier": 0.000,
125 "Cravatte": 0.000,
126 "Count": 0.000,
127 "OldMan": 0.000,
128 "Valjean": 0.567,
129 "Labarre": 0.000,
130 "Marguerite": 0.000,
131 "MmeDeR": 0.000,
132 "Isabeau": 0.000,
133 "Gervais": 0.000,
134 "Listolier": 0.000,
135 "Tholomyes": 0.043,
136 "Fameuil": 0.000,
137 "Blacheville": 0.000,
138 "Favourite": 0.000,
139 "Dahlia": 0.000,
140 "Zephine": 0.000,
141 "Fantine": 0.128,
142 "MmeThenardier": 0.029,
143 "Thenardier": 0.075,
144 "Cosette": 0.024,
145 "Javert": 0.054,
146 "Fauchelevent": 0.026,
147 "Bamatabois": 0.008,
148 "Perpetue": 0.000,
149 "Simplice": 0.009,
150 "Scaufflaire": 0.000,
151 "Woman1": 0.000,
152 "Judge": 0.000,
153 "Champmathieu": 0.000,
154 "Brevet": 0.000,
155 "Chenildieu": 0.000,
156 "Cochepaille": 0.000,
157 "Pontmercy": 0.007,
158 "Boulatruelle": 0.000,
159 "Eponine": 0.012,
160 "Anzelma": 0.000,
161 "Woman2": 0.000,
162 "MotherInnocent": 0.000,
163 "Gribier": 0.000,
164 "MmeBurgon": 0.026,
165 "Jondrette": 0.000,
166 "Gavroche": 0.164,
167 "Gillenormand": 0.021,
168 "Magnon": 0.000,
169 "MlleGillenormand": 0.047,
170 "MmePontmercy": 0.000,
171 "MlleVaubois": 0.000,
172 "LtGillenormand": 0.000,
173 "Marius": 0.133,
174 "BaronessT": 0.000,
175 "Mabeuf": 0.028,
176 "Enjolras": 0.041,
177 "Combeferre": 0.001,
178 "Prouvaire": 0.000,
179 "Feuilly": 0.001,
180 "Courfeyrac": 0.006,
181 "Bahorel": 0.002,
182 "Bossuet": 0.032,
183 "Joly": 0.002,
184 "Grantaire": 0.000,
185 "MotherPlutarch": 0.000,
186 "Gueulemer": 0.005,
187 "Babet": 0.005,
188 "Claquesous": 0.005,
189 "Montparnasse": 0.004,
190 "Toussaint": 0.000,
191 "Child1": 0.000,
192 "Child2": 0.000,
193 "Brujon": 0.000,
194 "MmeHucheloup": 0.000,
195 }
196 for n in sorted(G):
197 assert almost_equal(c[n], d[n], places=3)
198
199 def test_unnormalized_k5_load(self):
200 G = self.K5
201 c = nx.load_centrality(G, normalized=False)
202 d = {0: 0.000, 1: 0.000, 2: 0.000, 3: 0.000, 4: 0.000}
203 for n in sorted(G):
204 assert almost_equal(c[n], d[n], places=3)
205
206 def test_unnormalized_p3_load(self):
207 G = self.P3
208 c = nx.load_centrality(G, normalized=False)
209 d = {0: 0.000, 1: 2.000, 2: 0.000}
210 for n in sorted(G):
211 assert almost_equal(c[n], d[n], places=3)
212
213 def test_unnormalized_krackhardt_load(self):
214 G = self.K
215 c = nx.load_centrality(G, normalized=False)
216 d = {
217 0: 1.667,
218 1: 1.667,
219 2: 0.000,
220 3: 7.333,
221 4: 0.000,
222 5: 16.667,
223 6: 16.667,
224 7: 28.000,
225 8: 16.000,
226 9: 0.000,
227 }
228
229 for n in sorted(G):
230 assert almost_equal(c[n], d[n], places=3)
231
232 def test_unnormalized_florentine_families_load(self):
233 G = self.F
234 c = nx.load_centrality(G, normalized=False)
235
236 d = {
237 "Acciaiuoli": 0.000,
238 "Albizzi": 38.333,
239 "Barbadori": 17.000,
240 "Bischeri": 19.000,
241 "Castellani": 10.000,
242 "Ginori": 0.000,
243 "Guadagni": 45.667,
244 "Lamberteschi": 0.000,
245 "Medici": 95.000,
246 "Pazzi": 0.000,
247 "Peruzzi": 4.000,
248 "Ridolfi": 21.333,
249 "Salviati": 26.000,
250 "Strozzi": 19.333,
251 "Tornabuoni": 16.333,
252 }
253 for n in sorted(G):
254 assert almost_equal(c[n], d[n], places=3)
255
256 def test_load_betweenness_difference(self):
257 # Difference Between Load and Betweenness
258 # --------------------------------------- The smallest graph
259 # that shows the difference between load and betweenness is
260 # G=ladder_graph(3) (Graph B below)
261
262 # Graph A and B are from Tao Zhou, Jian-Guo Liu, Bing-Hong
263 # Wang: Comment on "Scientific collaboration
264 # networks. II. Shortest paths, weighted networks, and
265 # centrality". https://arxiv.org/pdf/physics/0511084
266
267 # Notice that unlike here, their calculation adds to 1 to the
268 # betweennes of every node i for every path from i to every
269 # other node. This is exactly what it should be, based on
270 # Eqn. (1) in their paper: the eqn is B(v) = \sum_{s\neq t,
271 # s\neq v}{\frac{\sigma_{st}(v)}{\sigma_{st}}}, therefore,
272 # they allow v to be the target node.
273
274 # We follow Brandes 2001, who follows Freeman 1977 that make
275 # the sum for betweenness of v exclude paths where v is either
276 # the source or target node. To agree with their numbers, we
277 # must additionally, remove edge (4,8) from the graph, see AC
278 # example following (there is a mistake in the figure in their
279 # paper - personal communication).
280
281 # A = nx.Graph()
282 # A.add_edges_from([(0,1), (1,2), (1,3), (2,4),
283 # (3,5), (4,6), (4,7), (4,8),
284 # (5,8), (6,9), (7,9), (8,9)])
285 B = nx.Graph() # ladder_graph(3)
286 B.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)])
287 c = nx.load_centrality(B, normalized=False)
288 d = {0: 1.750, 1: 1.750, 2: 6.500, 3: 6.500, 4: 1.750, 5: 1.750}
289 for n in sorted(B):
290 assert almost_equal(c[n], d[n], places=3)
291
292 def test_c4_edge_load(self):
293 G = self.C4
294 c = nx.edge_load_centrality(G)
295 d = {(0, 1): 6.000, (0, 3): 6.000, (1, 2): 6.000, (2, 3): 6.000}
296 for n in G.edges():
297 assert almost_equal(c[n], d[n], places=3)
298
299 def test_p4_edge_load(self):
300 G = self.P4
301 c = nx.edge_load_centrality(G)
302 d = {(0, 1): 6.000, (1, 2): 8.000, (2, 3): 6.000}
303 for n in G.edges():
304 assert almost_equal(c[n], d[n], places=3)
305
306 def test_k5_edge_load(self):
307 G = self.K5
308 c = nx.edge_load_centrality(G)
309 d = {
310 (0, 1): 5.000,
311 (0, 2): 5.000,
312 (0, 3): 5.000,
313 (0, 4): 5.000,
314 (1, 2): 5.000,
315 (1, 3): 5.000,
316 (1, 4): 5.000,
317 (2, 3): 5.000,
318 (2, 4): 5.000,
319 (3, 4): 5.000,
320 }
321 for n in G.edges():
322 assert almost_equal(c[n], d[n], places=3)
323
324 def test_tree_edge_load(self):
325 G = self.T
326 c = nx.edge_load_centrality(G)
327 d = {
328 (0, 1): 24.000,
329 (0, 2): 24.000,
330 (1, 3): 12.000,
331 (1, 4): 12.000,
332 (2, 5): 12.000,
333 (2, 6): 12.000,
334 }
335 for n in G.edges():
336 assert almost_equal(c[n], d[n], places=3)