Revision 3c6ce57c

View differences:

fiddle/heuristic-betweenness-centrality/betweenness_centrality.py
21 21
           'edge_betweenness']
22 22

  
23 23

  
24
def betweenness_centrality(G, traffic_matrix, k=None, normalized=True, weight=None,
25
                           endpoints=False,
24
def weight_betweenness_centrality(G, traffic_matrix=None, k=None, normalized=True, weight=None, rescale=False,
26 25
                           seed=None):
27 26
    r"""Compute the shortest-path betweenness centrality for nodes.
28 27

  
......
74 73

  
75 74
    Notes
76 75
    -----
77
    The algorithm is from Ulrik Brandes [1]_.
76
    The algorithm is from Rami Puzis [1]_.
78 77
    See [4]_ for the original first published version and [2]_ for details on
79 78
    algorithms for variations and related metrics.
80 79

  
81
    For approximate betweenness calculations set k=#samples to use
82
    k nodes ("pivots") to estimate the betweenness values. For an estimate
83
    of the number of pivots needed see [3]_.
80
    For the faster way to calculate BC using combinatorial path counting, see [3]_
84 81

  
85 82
    For weighted graphs the edge weights must be greater than zero.
86 83
    Zero edge weights can produce an infinite number of equal length
87 84
    paths between pairs of nodes.
88 85

  
86
    Check out [5]_ for the details on how to implement the _accumulate_endpoints
87

  
89 88
    References
90 89
    ----------
91
    .. [1] Ulrik Brandes:
92
       A Faster Algorithm for Betweenness Centrality.
93
       Journal of Mathematical Sociology 25(2):163-177, 2001.
94
       http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
90
    .. [1] Rami Puzis:
91

  
95 92
    .. [2] Ulrik Brandes:
96 93
       On Variants of Shortest-Path Betweenness
97 94
       Centrality and their Generic Computation.
98 95
       Social Networks 30(2):136-145, 2008.
99 96
       http://www.inf.uni-konstanz.de/algo/publications/b-vspbc-08.pdf
100 97
    .. [3] Ulrik Brandes and Christian Pich:
101
       Centrality Estimation in Large Networks.
102
       International Journal of Bifurcation and Chaos 17(7):2303-2318, 2007.
103
       http://www.inf.uni-konstanz.de/algo/publications/bp-celn-06.pdf
98
       A Faster Algorithm for Betweenness Centrality.
99
       Journal of Mathematical Sociology 25(2):163-177, 2001.
100
       http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
104 101
    .. [4] Linton C. Freeman:
105 102
       A set of measures of centrality based on betweenness.
106 103
       Sociometry 40: 35–41, 1977
107 104
       http://moreno.ss.uci.edu/23.pdf
108 105

  
106
       [5] Rami Puzis
107
       Optimization of NIDS Placement for Protection of Intercommunicating Critical Infrastructures
108

  
109 109
    """
110 110
    betweenness = dict.fromkeys(G, 0.0)  # b[v]=0 for v in G
111 111
    if k is None:
......
115 115
        nodes = random.sample(G.nodes(), k)
116 116
    vertices = G.nodes()
117 117

  
118
    print 'nodes = %s' % nodes
119
    print 'vertices = %s' % vertices
118
    # print 'nodes = %s' % nodes
119
    # print 'vertices = %s' % vertices
120 120
    for s in nodes:
121 121
        # single source shortest paths
122
        # print '\nxxx _single_source_shortest_path'
122 123
        if weight is None:  # use BFS
123 124
            S, P, sigma = _single_source_shortest_path_basic(G, s)
124 125
        else:  # use Dijkstra's algorithm
125 126
            S, P, sigma = _single_source_dijkstra_path_basic(G, s, weight)
126 127

  
127
        print '\nxxx _single_source_shortest_path'
128
        print s
129
        print S
130
        print P
131
        print sigma
128
        # print s
129
        # print S
130
        # print P
131
        # print sigma
132 132
        # accumulation
133
        if endpoints:
134
            betweenness = _accumulate_endpoints(betweenness, S, P, sigma, s, traffic_matrix, vertices)
133
        if traffic_matrix:
134
            betweenness = _accumulate_weight(betweenness, S, P, sigma, s, traffic_matrix, vertices)
135 135
        else:
136
            betweenness = _accumulate_basic(betweenness, S, P, sigma, s, traffic_matrix, vertices)
136
            betweenness = _accumulate_weight_basic(betweenness, S, P, sigma, s)
137

  
138
    print 'betweenness = %s' % betweenness
139
    if rescale:
140
        betweenness = _rescale(betweenness, len(G),
141
                   normalized=normalized,
142
                   directed=G.is_directed(),
143
                   k=k)
137 144
    # Dec 9, 2015 - try to remove rescaling
138 145
    # rescaling
139 146
    # betweenness = _rescale(betweenness, len(G),
......
143 150
    return betweenness
144 151

  
145 152

  
146
def edge_betweenness_centrality(G, k=None, normalized=True, weight=None,
147
                                seed=None):
148
    r"""Compute betweenness centrality for edges.
149

  
150
    Betweenness centrality of an edge `e` is the sum of the
151
    fraction of all-pairs shortest paths that pass through `e`:
152

  
153
    .. math::
154

  
155
       c_B(e) =\sum_{s,t \in V} \frac{\sigma(s, t|e)}{\sigma(s, t)}
156

  
157
    where `V` is the set of nodes,`\sigma(s, t)` is the number of
158
    shortest `(s, t)`-paths, and `\sigma(s, t|e)` is the number of
159
    those paths passing through edge `e` [2]_.
160

  
161
    Parameters
162
    ----------
163
    G : graph
164
      A NetworkX graph
165

  
166
    k : int, optional (default=None)
167
      If k is not None use k node samples to estimate betweenness.
168
      The value of k <= n where n is the number of nodes in the graph.
169
      Higher values give better approximation.
170

  
171
    normalized : bool, optional
172
      If True the betweenness values are normalized by `2/(n(n-1))`
173
      for graphs, and `1/(n(n-1))` for directed graphs where `n`
174
      is the number of nodes in G.
175

  
176
    weight : None or string, optional
177
      If None, all edge weights are considered equal.
178
      Otherwise holds the name of the edge attribute used as weight.
179

  
180
    Returns
181
    -------
182
    edges : dictionary
183
       Dictionary of edges with betweenness centrality as the value.
184

  
185
    See Also
186
    --------
187
    betweenness_centrality
188
    edge_load
189

  
190
    Notes
191
    -----
192
    The algorithm is from Ulrik Brandes [1]_.
193

  
194
    For weighted graphs the edge weights must be greater than zero.
195
    Zero edge weights can produce an infinite number of equal length
196
    paths between pairs of nodes.
197

  
198
    References
199
    ----------
200
    .. [1]  A Faster Algorithm for Betweenness Centrality. Ulrik Brandes,
201
       Journal of Mathematical Sociology 25(2):163-177, 2001.
202
       http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
203
    .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
204
       Centrality and their Generic Computation.
205
       Social Networks 30(2):136-145, 2008.
206
       http://www.inf.uni-konstanz.de/algo/publications/b-vspbc-08.pdf
207
    """
208
    betweenness = dict.fromkeys(G, 0.0)  # b[v]=0 for v in G
209
    # b[e]=0 for e in G.edges()
210
    betweenness.update(dict.fromkeys(G.edges(), 0.0))
211
    if k is None:
212
        nodes = G
213
    else:
214
        random.seed(seed)
215
        nodes = random.sample(G.nodes(), k)
216
    for s in nodes:
217
        # single source shortest paths
218
        if weight is None:  # use BFS
219
            S, P, sigma = _single_source_shortest_path_basic(G, s)
220
        else:  # use Dijkstra's algorithm
221
            S, P, sigma = _single_source_dijkstra_path_basic(G, s, weight)
222
        # accumulation
223
        betweenness = _accumulate_edges(betweenness, S, P, sigma, s)
224

  
225
    # rescaling
226
    for n in G:  # remove nodes to only return edges
227
        del betweenness[n]
228

  
229
    betweenness = _rescale_e(betweenness, len(G),
230
                             normalized=normalized,
231
                             directed=G.is_directed())
232
    return betweenness
233

  
234
# obsolete name
235

  
236

  
237
def edge_betweenness(G, k=None, normalized=True, weight=None, seed=None):
238
    return edge_betweenness_centrality(G, k, normalized, weight, seed)
239

  
240

  
241
# helpers for betweenness centrality
242

  
243 153
def _single_source_shortest_path_basic(G, s):
244 154
    S = []
245 155
    P = {}
......
251 161
    D[s] = 0
252 162
    Q = [s]
253 163
    while Q:   # use BFS to find shortest paths
164
        # print "sigma = %s" % sigma
254 165
        v = Q.pop(0)
255 166
        S.append(v)
256 167
        Dv = D[v]
......
274 185
    sigma = dict.fromkeys(G, 0.0)    # sigma[v]=0 for v in G
275 186
    D = {}
276 187
    sigma[s] = 1.0
188
    print sigma
277 189
    push = heappush
278 190
    pop = heappop
279 191
    seen = {s: 0}
......
325 237
        if w != s:
326 238
            # print h
327 239
            # betweenness[w] += delta[w] * h
328
            betweenness[w] += delta[w] * 1
240
            betweenness[w] += delta[w]
329 241
            print 'betweenness = %s' % betweenness
330 242
    return betweenness
331 243

  
332 244

  
333
def _accumulate_endpoints(betweenness, S, P, sigma, s, traffic_matrix, nodes):
334
    betweenness[s] += len(S) - 1
245
def _accumulate_weight_basic(betweenness, S, P, sigma, s):
246
    r"""Accumlate the BC score.
247

  
248
    Implements the Algorithm 1 in [1]_ (line 15-21)
249

  
250
    In this one, the traffic_matrix is not provided. But we can deduce the value:
251
        h(s,v) = 1   if s != v
252
        h(s, v) = 0  if s == v
253

  
254
    .. [1] Rami Puzis
255
       Optimization of NIDS Placement for Protection of Intercommunicating Critical Infrastructures
256
    """
257
    # betweenness[s] += len(S) - 1
335 258
    delta = dict.fromkeys(S, 0)
336 259
    while S:
337 260
        w = S.pop()
338
        coeff = (1.0 + delta[w]) / sigma[w]
339
        # coeff = (delta[w]) / sigma[w]
340 261
        if w != s:
341
            h = _get_value_from_traffic_matrix(nodes, traffic_matrix, w, s)
342
            # h = 1
343
            # betweenness[w] += (delta[w] + 1) + h
344
            betweenness[w] += delta[w] + h
345
            print 'betweenness = %s' % betweenness
262
            delta[w] += 1
263
        coeff = delta[w] / sigma[w]
264
        for v in P[w]:
265
            delta[v] += sigma[v] * coeff
266
        if w != s:
267
            betweenness[w] += delta[w]
268
            # print 'out betweenness = %s' % betweenness
269
    return betweenness
270

  
271
def _accumulate_weight(betweenness, S, P, sigma, s, traffic_matrix, nodes):
272
    r"""Accumlate the BC score.
273

  
274
    Implements the Algorithm 1 in [1]_ (line 15-21)
275

  
276
    .. [1] Rami Puzis
277
       Optimization of NIDS Placement for Protection of Intercommunicating Critical Infrastructures
278
    """
279
    # betweenness[s] += len(S) - 1
280
    delta = dict.fromkeys(S, 0)
281
    while S:
282
        # print 'in betweenness = %s' % betweenness
283
        w = S.pop()
284
        h = _get_value_from_traffic_matrix(nodes, traffic_matrix, s, w)
285
        delta[w] += h
286
        coeff = delta[w] / sigma[w]
287
        # print "  coeff = %s" % coeff
346 288
        for v in P[w]:
289
            # print "  v = %s" % v
290
            # print "    h = %s" % h
347 291
            delta[v] += sigma[v] * coeff
292
            # print '  delta = %s' % delta
293
        if w != s:
294
            betweenness[w] += delta[w]
295
            # print 'betweenness = %s' % betweenness
348 296
    return betweenness
349 297

  
350 298

  
......
377 325
        else:
378 326
            scale = None
379 327
    if scale is not None:
328
        print 'scale = %s' % scale
380 329
        if k is not None:
381 330
            scale = scale * n / k
382 331
        for v in betweenness:
fiddle/heuristic-betweenness-centrality/centrality_biconnected.py
2 2
# Heuristic Betweenness Centrality - Partitioning to Bi-connected Components
3 3

  
4 4
import os
5
import pprint
5 6
import networkx as nx
6 7
import betweenness_centrality as centrality
7 8
import utility
......
27 28
        self.finalize()
28 29

  
29 30
    def calculate_bc_non_cutpoint(self):
30
        """BCen for non cutpoint
31
        """BC for non cutpoint
31 32
        """
32 33
        for i, subgraphs in enumerate(self.subgraphs):
33 34
            traffic_matrix = self.traffic_matrix[i]
34
            results = centrality.betweenness_centrality(subgraphs, traffic_matrix, endpoints=True)
35
            results = centrality.weight_betweenness_centrality(subgraphs, traffic_matrix)
35 36
            self.bc_components.append(results)
36 37

  
37 38
            # print results
......
58 59
                    # debugger()
59 60
                    bc_locally += self.bc_components[i][v]
60 61
            print 'locally = %s' % bc_locally
61
            self.bc_cutpoints[v] = bc_locally - bc_inter[v]
62
            # self.bc_cutpoints[v] = bc_locally - bc_inter[v]
63
            # TODO: do not minus the bc_inter
64
            self.bc_cutpoints[v] = bc_locally
62 65

  
63 66
    def finalize(self):
64 67
        # Add the bc for non cutpoint vertices
......
71 74
        for key, value in self.bc_cutpoints.iteritems():
72 75
            self.bc[key] = value
73 76

  
77
        print '*** betweenness = %s' % self.bc
74 78
        # Rescale the bc according to the original graph
75
        factor = 2.0 / (self.num_vertices*self.num_vertices - 3 * self.num_vertices + 2)
79
        factor = 1.0 / ((self.num_vertices - 1) * (self.num_vertices - 2))
80
        # factor = 2.0 / (self.num_vertices*self.num_vertices - 3 * self.num_vertices + 2)
76 81
        for key, value in self.bc.iteritems():
77 82
            self.bc[key] = value * factor
78 83

  
......
84 89

  
85 90
    def __str__(self):
86 91
        return str(self.bc)
87
        # for bc_component in self.bc_components:
88
        #     print bc_component
89 92

  
90 93

  
91 94
class TrafficMatrix():
......
167 170

  
168 171
        self.Dv_B = [dict() for i in range(self.num_components)] # link weight
169 172
        self.generate_link_weight()
173
        # self.compute_component_tree_weight()
170 174

  
171 175
        self.reverse_Dv_B = [dict() for i in range(self.num_components)] # reverse link weight
172 176
        self.generate_reverse_link_weight()
......
181 185
        return indices
182 186

  
183 187
    def get_link_weight(self, comp_index, point):
184
        print "Dv_B = %s" % str(self.Dv_B)
185

  
186 188
        Dv_B_comp = self.Dv_B[comp_index]
187 189

  
188 190
        if point in Dv_B_comp:
......
190 192
        else:
191 193
            return 0
192 194

  
195
    def set_link_weight(self, comp_index, point, value):
196
        self.Dv_B[comp_index][point] = value
197

  
193 198
    def get_reverse_link_weight(self, comp_index, point):
194 199
        reverse_Dv_B_comp = self.reverse_Dv_B[comp_index]
195 200

  
......
207 212
            B_plus_v.append(comp.difference(points))
208 213
            B_cutpoints.append(points)
209 214

  
210
        print 'B_cutpoints %s' % str(B_cutpoints)
211 215
        len_B_plus_v = [len(x) for x in B_plus_v]
212 216
        len_B_cutpoints = [len(x) for x in B_cutpoints]
213 217

  
......
220 224
                point = list(cp)[0] # there is only 1 element
221 225
                self.Dv_B[i][point] = len_B_plus_v[i]
222 226

  
223
        print 'B_cutpoints %s' % str(B_cutpoints)
227
        print "Link Weight - For all the leaves: %s" % self.Dv_B
224 228

  
225 229
        # For other nodes in the block-cut tree
226 230
        level += 1
227 231
        while level <= max(len_B_cutpoints):
232
            if level == 3:
233
                debugger()
228 234
            for (i, cp) in enumerate(B_cutpoints):
229 235
                if len_B_cutpoints[i] == level:
230 236

  
231 237
                    for point in cp:
232
                        print 'cutpoints = %s' % cp
238
                        # 1st way
233 239
                        shared_comp_indices = self._components_sharing_cutpoint(B_cutpoints, point)
234 240
                        shared_comp_indices.remove(i)
235
                        print 'shared indices %s' % shared_comp_indices
236 241
                        weight_shared_comp = list()
237 242

  
238 243
                        for index in shared_comp_indices:
239 244
                            weight_shared_comp.append(self.get_link_weight(index, point))
240 245

  
241
                        print 'weight shared comp %s' % str(weight_shared_comp)
242
                        weight = num_vertices - 1 - sum(weight_shared_comp)
246
                        weight = self.num_vertices - 1 - sum(weight_shared_comp)
243 247

  
244 248
                        self.Dv_B[i][point] = weight
245 249

  
250
                        # # 2nd way
251
                        # sum_of_connected_vertices = 0
252
                        # for point_temp in cp:
253
                        #     if point_temp != point:
254
                        #         sum_of_connected_vertices += self.num_vertices -
255
            print "Link Weight - For level %s: %s" % (level, self.Dv_B)
256

  
246 257
            level += 1
247 258

  
248 259
    def generate_reverse_link_weight(self):
......
250 261
            for key, value in Dv_B_i.iteritems():
251 262
                self.reverse_Dv_B[i][key] = self.num_vertices - 1 - self.get_link_weight(i, key)
252 263

  
253
        print 'reverse = %s' % self.reverse_Dv_B
264
    def compute_component_tree_weight(self):
265
        """Follows exactly the Algorithm 1 [Puzis 2012]
266
        """
267
        B_cutpoints = list() # number of cutpoints in component B
268
        for comp in self.bicomponents:
269
            points = comp.intersection(self.cutpoints)
270
            B_cutpoints.append(points)
271

  
272
        Q = self._inititalize_component_tree_weight()
273
        while Q:
274
            print self.Dv_B
275
            pair = Q.pop(0)
276
            if pair['type'] == 'component_vertex_pair':
277
                B_index = pair['value'][0]
278
                v = pair['value'][1]
279
                print'  CV: %s %s' % (B_index, v)
280
                size = len(self.bicomponents[B_index]) - 1;
281
                all_cutpoints = self.bicomponents[B_index].intersection(self.cutpoints)
282
                all_cutpoints.remove(v)
283

  
284
                for cp in all_cutpoints:
285
                    if self.get_link_weight(B_index, cp) != -1:
286
                        size += self.num_vertices - self.get_link_weight(B_index, cp)
287

  
288
                self.set_link_weight(B_index, v, size)
289

  
290
                # update Q
291
                Q = self._find_unknown_weight_wrt_cutpoint(v, Q)
292

  
293
            if pair['type'] == 'vertex_component_pair':
294
                size = 1
295
                B_index = pair['value'][0]
296
                v = pair['value'][1]
297
                print'  vc: %s %s' % (B_index, v)
298
                shared_comp_indices = self._components_sharing_cutpoint(B_cutpoints, v)
299
                shared_comp_indices.remove(B_index)
300

  
301

  
302
                for i in shared_comp_indices:
303
                    if self.get_link_weight(i, v) != - 1:
304
                        size += self.get_link_weight(i, v)
305

  
306
                self.set_link_weight(B_index, v, self.num_vertices - 1 - size)
307

  
308
                # update Q
309
                Q = self._find_unknown_weight_wrt_component(B_index, Q)
310

  
311
    def _inititalize_component_tree_weight(self):
312
        Q = []
313
        for i, comp in enumerate(self.bicomponents):
314
            current_cutpoints = self.cutpoints.intersection(comp)
315
            if len(current_cutpoints) == 1:
316
                Q.append({
317
                    'type': 'component_vertex_pair',
318
                    'value': (i, list(current_cutpoints)[0]) # (B_i, cutpoint) = (i-th component, the cutpoint name)
319
                    })
320
            for cp in current_cutpoints:
321
                self.set_link_weight(i, cp, -1)
322
        return Q
323

  
324
    def _find_unknown_weight_wrt_cutpoint(self, cutpoint, Q):
325
        for i, Dv_B_comp in enumerate(self.Dv_B):
326
            for cp, value in Dv_B_comp.iteritems():
327
                if value == -1:
328
                    pair = {
329
                        'type': 'vertex_component_pair',
330
                        'value': (i, cp)
331
                    }
332
                    Q.append(pair)
333
        return Q
334

  
335
    def _find_unknown_weight_wrt_component(self, comp_index, Q):
336
        Dv_B_comp = self.Dv_B[comp_index]
337
        for cp, value in Dv_B_comp.iteritems():
338
            if value == -1:
339
                pair = {
340
                    'type': 'component_vertex_pair',
341
                    'value': (comp_index, cp)
342
                }
343
                Q.append(pair)
344
        return Q
345

  
254 346

  
255 347
    def __str__(self):
256 348
        return str(self.Dv_B)
......
260 352
    print bicomponents
261 353
    print [len(x) for x in bicomponents]
262 354

  
263
if __name__ == '__main__':
264
    filepath = MAIN_CODE_DIR + '/input/simple.edges'
265
    file_suffix = 'edge_list'
355
def find_heuristic_bc(filepath):
356
    pp = pprint.PrettyPrinter(indent=4)
266 357

  
267
    # Brandes betweenness centrality
268
    utility.brandes_betweenness_centrality(filepath, file_suffix)
269

  
270
    # Heuristic betweenness centrality
271 358
    graph = nx.read_weighted_edgelist(filepath)
272 359

  
273 360
    # Find biconnected components
......
276 363
    subgraphs = list(nx.biconnected_component_subgraphs(graph))
277 364
    bicomponents = list(nx.biconnected_components(graph))
278 365
    component_edges = list(nx.biconnected_component_edges(graph))
279
    # cutpoints = list(nx.articulation_points(graph))
280 366
    cutpoints = set(nx.articulation_points(graph))
281 367

  
282 368
    num_vertices = nx.number_of_nodes(graph)
283 369

  
284 370
    lw = LinkWeight(graph, bicomponents, cutpoints)
285 371
    print 'link weight: '
286
    print lw
372
    print(lw)
373

  
374
    # tm = TrafficMatrix(bicomponents, cutpoints, lw)
375
    # print 'traffic matrix:'
376
    # pp.pprint(tm.h)
287 377

  
288
    tm = TrafficMatrix(bicomponents, cutpoints, lw)
289
    print tm
378
    # bc = HeuristicBetweennessCentrality(subgraphs, bicomponents, cutpoints, num_vertices, lw, tm)
379
    # bc.write(file_suffix)
380
    # # print bc
290 381

  
382
if __name__ == '__main__':
383
    # filepath = MAIN_CODE_DIR + '/input/simple_house.edges'
384
    # filepath = MAIN_CODE_DIR + '/input/simple2.edges'
385
    # filepath = MAIN_CODE_DIR + '/input/simple.edges'
386
    # filepath = MAIN_CODE_DIR + '/input/ninux_30_1.edges'
387
    # filepath = MAIN_CODE_DIR + '/input/simple3.edges'
388
    filepath = MAIN_CODE_DIR + '/input/simple4.edges'
389
    file_suffix = 'edge_list'
390

  
391
    # Weight betweenness centrality
392
    utility.weight_betweenness_centrality(filepath, file_suffix)
393

  
394
    # Brandess betweenness centrality
395
    utility.brandes_betweenness_centrality(filepath, file_suffix)
291 396

  
292
    bc = HeuristicBetweennessCentrality(subgraphs, bicomponents, cutpoints, num_vertices, lw, tm)
293
    bc.write(file_suffix)
294
    print bc
397
    # Heuristic BC
398
    find_heuristic_bc(filepath)
fiddle/heuristic-betweenness-centrality/gnuplot_script
5 5
set xlabel "Router (each integer corresponds to one router)"
6 6
set ylabel "Betweenness Centrality"
7 7
plot  "./output/score_summary_edge_list.txt" using 2 title 'networkx' pointtype 7 pointsize 0.7, \
8
      "./output/score_summary_edge_list.txt" using 3 title 'heuristic'
8
      "./output/score_summary_edge_list.txt" using 3 title 'weight basic' pointtype 5 pointsize 1,\
9
      "./output/score_summary_edge_list.txt" using 4 title 'networkx - endpoints' pointtype 8 pointsize 0.5,\
fiddle/heuristic-betweenness-centrality/utility.py
1 1
import os
2 2
import networkx as nx
3
import betweenness_centrality as centrality
3 4

  
4 5
MAIN_CODE_DIR = os.environ.get('MAIN_CODE_DIR', '')
5 6

  
7
def weight_betweenness_centrality(filepath, file_suffix):
8
    graph = nx.read_weighted_edgelist(filepath)
9

  
10
    score = centrality.weight_betweenness_centrality(graph, rescale=True)
11
    filepath = '/output/weight_basic_%s.csv'  % file_suffix
12

  
13
    with open(MAIN_CODE_DIR + filepath, 'w') as output:
14
        for node, bc in score.iteritems():
15
            output.write('%s, %s\n' % (node, bc))
16

  
6 17
def brandes_betweenness_centrality(filepath, file_suffix):
7 18
    graph = nx.read_weighted_edgelist(filepath)
8 19

  
9
    score = nx.betweenness_centrality(graph, endpoints=True)
20
    score = nx.betweenness_centrality(graph)
10 21
    filepath = '/output/networkx_%s.csv'  % file_suffix
11 22

  
12 23
    with open(MAIN_CODE_DIR + filepath, 'w') as output:
13
        for node, centrality in score.iteritems():
14
            output.write('%s, %s\n' % (node, centrality))
24
        for node, bc in score.iteritems():
25
            output.write('%s, %s\n' % (node, bc))
26

  
27
def brandes_betweenness_centrality_endpoints(filepath, file_suffix):
28
    graph = nx.read_weighted_edgelist(filepath)
29

  
30
    score = nx.betweenness_centrality(graph, endpoints=True)
31
    filepath = '/output/heuristic_%s.csv'  % file_suffix
32

  
33
    with open(MAIN_CODE_DIR + filepath, 'w') as output:
34
        for node, bc in score.iteritems():
35
            output.write('%s, %s\n' % (node, bc))

Also available in: Unified diff