Statistics
| Branch: | Revision:

iof-tools / mrai_setter / milaniBGPLoad.py @ cf1932f7

History | View | Annotate | Download (4.86 KB)

1
#!/usr/bin/env python
2
# -*- coding: utf-8 -*-
3

    
4
# This program is free software: you can redistribute it and/or modify
5
# it under the terms of the GNU General Public License as published by
6
# the Free Software Foundation, either version 3 of the License, or
7
# (at your option) any later version.
8
#
9
# This program is distributed in the hope that it will be useful,
10
# but WITHOUT ANY WARRANTY; without even the implied warranty of
11
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12
# GNU General Public License for more details.
13
#
14
# You should have received a copy of the GNU General Public License
15
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
16
#
17
# Copyright (C) 2019  Mattia Milani <mattia.milani@studenti.unitn.it>
18
"""Milani Centrality."""
19
from operator import itemgetter
20

    
21
import networkx as nx
22

    
23

    
24

    
25
def mice_centrality(G, cutoff=None, normalized=True, weight=None, destIdentifier='destinations'):
26
    """Compute distributed partial centrality for nodes.
27

28
    The centrality of a node is the fraction of all shortest
29
    paths that pass through that node.
30

31
    Parameters
32
    ----------
33
    G : graph
34
      A networkx graph.
35

36
    normalized : bool, optional (default=True)
37
      If True the dpc values are normalized by b=b/(n-1)(n-2) where
38
      n is the number of nodes in the destination set.
39

40
    weight : None or string, optional (default=None)
41
      If None, edge weights are ignored.
42
      Otherwise holds the name of the edge attribute used as weight.
43

44
    destIdentifier : string, (identifier for the stub nodes)
45
      If specified is a parameter used to discern between stub nodes and transit nodes
46

47
    Returns
48
    -------
49
    nodes : dictionary
50
       Dictionary of nodes with centrality as the value.
51

52
    See Also
53
    --------
54
    betweenness_centrality()
55
    """
56

    
57
    # Initlally all loads are 0
58
    dpc_load = {}.fromkeys(G, 0.0)
59
    dpc = {}
60
    # Take the list of stub nodes
61
    nodeList = [i for i in G.nodes if destIdentifier in G.nodes[i]]
62
    for node in nodeList:
63
        dpc[node] = 0.0  # For each initial stub node I set a dpc of 0
64

    
65
    # Calculate the load dispersione between each couple of stub nodes
66
    for source in dpc:
67
        ubetween = _node_betweenness(G, source, cutoff, weight, destIdentifier=destIdentifier)
68
        for vk in ubetween:
69
            dpc_load[vk] += ubetween[vk]
70

    
71
    if normalized:
72
        order = len(nodeList)
73
        if order <= 1:
74
            return dpc_load  # no normalization for only 1 node
75
        scale = 1/(order * (order - 1))  # scale is 1/ two times the summarized input load
76
        for v in dpc_load:
77
            dpc_load[v] *= scale  # For each element in the load the final load is the actual load multiplied by scale
78
    return dpc_load  # all nodes
79

    
80

    
81
def _node_betweenness(G, source, cutoff=False, weight=None, destIdentifier='destinations'):
82
    """Node betweenness_centrality helper:
83

84
    See betweenness_centrality for what you probably want.
85
    This actually computes "partial centrality" and not betweenness.
86
    See https://networkx.lanl.gov/ticket/103
87

88
    This calculates the load of each node for paths from a single source.
89
    (The fraction of number of shortests paths from source that go
90
    through each node.)
91

92
    To get the load for a node you need to do all-pairs shortest paths.
93

94
    If weight is not None then use Dijkstra for finding shortest paths.
95
    """
96
    # get the predecessor and path length data
97
    if weight is None:
98
        (pred, length) = nx.predecessor(G, source, cutoff=cutoff,
99
                                        return_seen=True)
100
    else:
101
        (pred, length) = nx.dijkstra_predecessor_and_distance(G, source,
102
                                                              cutoff, weight)
103

    
104
    for predecessor in pred:
105
        newlist = []
106
        if len(pred[predecessor]) > 0:
107
            minimo = pred[predecessor][0]
108
            for elem in pred[predecessor]:
109
                if int(elem) < int(minimo):
110
                    minimo = elem
111
            newlist.append(minimo)
112
            pred[predecessor][:] = newlist
113

    
114
    onodes = [(l, vert) for (vert, l) in length.items()]
115
    onodes.sort()
116
    onodes[:] = [vert for (l, vert) in onodes if l > 0]
117

    
118
    between = {}.fromkeys(length, 1.0)
119
    for node in G.nodes:
120
        if destIdentifier not in G.nodes[node]:
121
            between[node] = 0.0  # No stub nodes does not propagate any contribute
122
        else:
123
            between[node] = 1.0  # Stub nodes propagate 1 contribute
124

    
125
    while onodes:
126
        v = onodes.pop()
127
        if v in pred:
128
            num_paths = len(pred[v])  # Discount betweenness if more than
129
            for x in pred[v]:  # one shortest path.
130
                if x == source:  # stop if hit source
131
                    break  # also have pred[v]==[source]
132
                between[x] += between[v] / float(num_paths)
133

    
134
    for node in G.nodes:
135
        if destIdentifier in G.nodes[node]:
136
            between[node] -= 1.0
137

    
138
    return between