Revision 1ef4948a

View differences:

autoPlot.py
1
import glob
2
import code  # code.interact(local=dict(globals(), **locals()))
3
from pprint import pprint
4
import sys
5
import glob
6
from pprint import pprint
7
import code
8
import subprocess
9
import os
10
import time
11
import shutil
12

  
13
interval = 100
14
if len(sys.argv)>1:
15
    interval = int(sys.argv[1])
16

  
17
#code.interact(local=dict(globals(), **locals()))
18

  
19
for fold in sorted(glob.glob('out/rwp*/')):
20
    pass
21
    #cmd = "python plotterBCrealization.py "+fold+" "+str(interval)
22
    cmd = "python timeAnalysis.py "+fold+" "+str(interval)
23
    print cmd
24
    #p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
25
    p = subprocess.Popen(cmd, shell=True)
26
    p.communicate()
27
    time.sleep(1)
documents/reportInizialeRWP-TCVM/rwpTVCM.md
1
---
2
title: "Autocorrelation of BC with RandomWayPoint and TimeVariantCommunityModel"
3
subtitle: "Results for different Communication Ranges and Number of Nodes"
4
author: [Lorenzo Ghiro]
5
date: \today
6
#subject: "Network algorithms"
7
#keywords: [Markdown, Example]
8
titlepage: true
9
#titlepage-color: "06386e"
10
#titlepage-text-color: "FFFFFF"
11
#titlepage-rule-color: "FFFFFF"
12
#titlepage-rule-height: 1
13

  
14
header-includes:
15
    - \usepackage{xspace}
16
    - \usepackage{amsmath}
17
    - \usepackage{amssymb}
18
    - \usepackage{cleveref}
19
    - \usepackage{hyperref}
20
    - \hypersetup{colorlinks=true, linkcolor=blue, filecolor=magenta, urlcolor=cyan}
21
...
22

  
23
# Methodology
24

  
25
 ![](images/workflow.pdf){ width=100% }
26

  
27
**NB: it may be that, with time, graphs get disconnected. The BC for a disconnected node is 0**
28

  
29
\newpage
30

  
31
# Results
32

  
33
## Random Way Point
34

  
35
### Number of Nodes = 30, Radius in [10,20,30]
36

  
37
![Radius = 10, NNodes = 30](images/rwp/r10_30n_autoBC-3d.pdf){ width=80% }
38

  
39
![Radius = 20, NNodes = 30](images/rwp/r20_30n_autoBC-3d.pdf){ width=80% }
40

  
41
![Radius = 30, NNodes = 30](images/rwp/r30_30n_autoBC-3d.pdf){ width=80% }
42

  
43
<!---
44
At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr.
45

  
46
![](images/adorf.eps){ width=80% }
47

  
48
![](images/sintExp.pdf){ width=80% }
49

  
50
\begin{longtable}[]{llllllll}
51
\caption[Nam liber tempor cum soluta nobis eleifend option congue.]{Nam liber tempor cum soluta nobis eleifend option congue nihil imperdiet doming id quod mazim placerat facer possim assum. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.} \\
52
\toprule
53
Test Nr. & Position & Radius & Rot & Grün & Blau &
54
beste Fitness & Abweichung\tabularnewline
55
\midrule
56
\endhead
57
1 & 20 \% & 20 \% & 20 \% & 20 \% & 20 \% & 7,5219 &
58
0,9115\tabularnewline
59
2 & 0 \% & 25 \% & 25 \% & 25 \% & 25 \% & 8,0566 &
60
1,4462\tabularnewline
61
3 & 0 \% & 0 \% & 33 \% & 33 \% & 33 \% & 8,7402 & 2,1298\tabularnewline
62
4 & 50 \% & 20 \% & 10 \% & 10 \% & 10 \% & 6,6104 &
63
0,0000\tabularnewline
64
5 & 70 \% & 0 \% & 10 \% & 10 \% & 10 \% & 7,0696 &
65
0,4592\tabularnewline
66
6 & 20 \% & 50 \% & 10 \% & 10 \% & 10 \% & 7,0034 &
67
0,3930\tabularnewline
68
\bottomrule
69
\end{longtable}
70

  
71
At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr.
72

  
73
## Image with Caption
74

  
75
![Nam liber tempor cum soluta nobis eleifend option congue nihil imperdiet doming id quod mazim placerat facer possim assum. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.](image.png)
76

  
77
## Markdown Table without Caption
78

  
79
Lorem markdownum Letoia, et alios: figurae flectentem annis aliquid Peneosque abesse, obstat gravitate. Obscura atque coniuge, per de coniunx, sibi medias
80
commentaque virgine anima tamen comitemque petis, sed. In Amphion vestros
81
hamos ire arceor mandere spicula, in licet aliquando.
82

  
83
Test Nr. | Position | Radius | Rot | Grün | Blau | beste Fitness | Abweichung |
84
|---|---|---|---|---|---|---|---|
85
1 |  20 % |  20 % |  20 % |  20 % |  20 % |  7,5219 |  0,9115 |
86
2 |   0 % |  25 % |  25 % |  25 % |  25 % |  8,0566 |  1,4462 |
87
3 |   0 % |   0 % |  33 % |  33 % |  33 % |  8,7402 |  2,1298 |
88
4 |  50 % |  20 % |  10 % |  10 % |  10 % |  6,6104 |  0,0000 |
89
5 |  70 % |   0 % |  10 % |  10 % |  10 % |  7,0696 |  0,4592 |
90
6 |  20 % |  50 % |  10 % |  10 % |  10 % |  7,0034 |  0,3930 |
91
7 |  40 % |  15 % |  15 % |  15 % |  15 % |  6,9122 |  0,3018 |
92

  
93
Porrigitur et Pallas nuper longusque cratere habuisse sepulcro pectore fertur.
94
Laudat ille auditi; vertitur iura tum nepotis causa; motus. Diva virtus! Acrota destruitis vos iubet quo et classis excessere Scyrumve spiro subitusque mente Pirithoi abstulit, lapides.
95

  
96
## Image without Caption
97

  
98
![](image.png)
99
-->
plotterBCrealization.py
1
import pandas as pd
2
from pprint import pprint
3
import numpy as np
4
import glob
5
from matplotlib import pyplot
6
import seaborn as sns; sns.set()
7
from statsmodels.graphics.tsaplots import plot_acf, acf
8
import sys
9
import os
10
from collections import defaultdict
11
import matplotlib.pyplot as plt
12
from scipy import stats
13
import operator
14
import code  # code.interact(local=dict(globals(), **locals()))
15

  
16
folder = sys.argv[1]
17
interval = 100
18
if len(sys.argv)>2:
19
    interval = int(sys.argv[2])
20
nick = folder.split('/')[-2].split('_')[0]+"_"
21

  
22

  
23
os.chdir(folder)
24

  
25
dfn = pd.DataFrame() #rows=nodes columns=BC at column-index time-instant 
26
print "Loading data from", folder, "..."
27
for snap in sorted(glob.glob('./BC*')):
28
    # print snap
29
    df = pd.read_csv(snap, names=['BC'], skiprows=1)
30
    dfn = pd.concat([dfn,df], axis=1)
31

  
32
print "Processing and plotting..."
33
if not os.path.exists("plots"+nick):
34
    os.makedirs("plots"+nick)
35
os.chdir("plots"+nick)
36

  
37
nodes = dfn.index.tolist()
38
initialCentrality = {}
39
for n in nodes:
40
    initialCentrality[n] = dfn.iloc[n][0]
41
n0 = dfn.iloc[0]
42
y = n0.values
43

  
44
'''
45
#Batch Means of ACF
46
print "Bacth Means of ACF..."
47
nlg=15
48
memo=50
49
batMeans = []
50
for i in range(0, len(y)-memo, memo):
51
    bacf = acf(y[i:i+memo], nlags=nlg)
52
    batMeans.append(np.mean(bacf))
53

  
54
pd.Series(batMeans).plot()
55
plt.ylabel("Mean ACF for lags [0...15]")
56
plt.xlabel("Batches of 50 samples")
57
plt.savefig(nick+"batchMeansACF.pdf", format='pdf')
58
plt.clf()'''
59

  
60
# BC realization of a random node
61
print "BC realization of a random node..."
62
if not os.path.exists("BCreal"):
63
    os.makedirs("BCreal")
64
os.chdir("BCreal")
65

  
66
for i in range(0, len(y)-interval, interval):
67
    plt.plot(range(i, i+interval, 1), y[i:i+interval])
68
    plt.ylim(min(y), max(y))
69
    plt.xlabel("Time [s]")
70
    plt.ylabel("Betweenness Centrality (NON-norm)")
71
    plt.savefig(nick+"BCrealization["+str(i)+"-"+str(i+interval)+"].pdf", format='pdf')
72
    plt.clf()
73
os.chdir("./..")
74

  
75
# BC Heatmaps for consecutive time-frames
76
print "BC Heatmaps for consecutive time-frames"
77
if not os.path.exists("TimeFramesHeatmaps"):
78
    os.makedirs("TimeFramesHeatmaps")
79
os.chdir("TimeFramesHeatmaps")
80
sns.set(font_scale=0.5)
81
for i in range(0, len(y)-interval, interval):
82
    xticks=range(i, i+interval)
83
    #yticks=range(0, len(dfn),5)
84
    sns.heatmap(dfn.iloc[:,xticks],cmap="Spectral", xticklabels = xticks, cbar_kws={'label': 'BC'})
85
    #ax.set_xticks(range(i, i+interval))
86
    plt.xlabel("Time [sec]")
87
    plt.ylabel("Nodes")
88
    plt.yticks(rotation=0)
89
    plt.savefig(nick+"BCrealization["+str(i)+"-"+str(i+interval)+"].pdf", format='pdf')
90
    plt.clf()
91
os.chdir("./..")
92
sns.set(font_scale=1)
93

  
94

  
95
def coreNodesAtTime(t, perc):
96
    BCd = dict(dfn.iloc[:, t])
97
    srtd_BC = sorted(BCd.items(), key=operator.itemgetter(1), reverse=True)
98
    upto = int(len(srtd_BC) * (perc/100.0))
99
    coreNodes = [e[0] for e in srtd_BC[:upto]]
100
    coreDict = {k: v for k, v in srtd_BC[:upto]}
101
    coreRank = {}
102
    for i in range(upto):
103
        coreRank[srtd_BC[i][0]] = i
104
    return coreDict, coreRank, coreNodes
105

  
106
print "CoreResistence..."
107
'''dfCoreResist = pd.DataFrame()
108
for t in range(len(dfn.iloc[0])):
109
    coreT, coreRankT, coreNodes = coreNodesAtTime(t, 20)
110
    corePD = pd.DataFrame(coreNodes)
111
    dfCoreResist = pd.concat([dfCoreResist, corePD], axis=1)'''
112
activeMap = defaultdict(bool)
113
coreResistMap = [{}]
114
firstCore = coreNodesAtTime(0, 20)[2]
115
for n in nodes:
116
    flag = n in firstCore
117
    activeMap[n] = flag
118
    coreResistMap[0][n] = flag
119

  
120
for t in range(1, len(dfn.iloc[0])):
121
    coreNodes = coreNodesAtTime(t, 20)[2]
122
    old_Actives = [k for k, v in activeMap.items() if v]
123
    # rimuovi chi non e' piu' nella top20
124
    for n in old_Actives:
125
        if n not in coreNodes:
126
            activeMap[n] = False
127
    # aggiungi i nuovi arrivatim chi si trova nella meta' alta
128
    for n in coreNodes[:len(coreNodes)/2]:
129
        activeMap[n] = True
130
    # aggiorna la coreResistMap
131
    resistings = {}
132
    for n in nodes:
133
        if activeMap[n]:
134
            if n in coreNodes:
135
                resistings[n] = True
136
        else:
137
            resistings[n] = False
138
    coreResistMap.append(resistings)
139

  
140
from matplotlib.colors import LinearSegmentedColormap
141

  
142
cmap1 = LinearSegmentedColormap.from_list('mycmap1', ['white', 'blue'], 2)
143
resDF = pd.DataFrame(coreResistMap).transpose()
144
xticks = range(0, len(resDF.iloc[0]),1)
145
sns.heatmap(resDF, cmap=cmap1, xticklabels = xticks, cbar_kws={'label': '\"Core Or Not\" (Blue or White)'})#
146

  
147
plt.ylabel("Nodes")
148
plt.xlabel("Time")
149
plt.savefig(nick+"coreResistMap-EntryTOP10LeavingTOP20.pdf", format='pdf')
150
plt.clf()
151

  
152
def activeIntervals(v):
153
    retval = []
154
    current = 0
155
    prev = False
156
    for i in range(0, len(v)):
157
        if v[i]:
158
            if prev == False:
159
                current += 1
160
                prev = True
161
            elif prev == True:
162
                current += 1
163
        elif v[i] == False:
164
            if prev == False:
165
                continue
166
            elif prev == True:
167
                retval.append(current)
168
                current = 0
169
                prev = False
170
    return retval
171

  
172

  
173
nodes2interval = {}
174
for n in nodes:
175
    nodes2interval[n] = activeIntervals(resDF.iloc[n])
176

  
177
allint = []
178
for e in nodes2interval.values():
179
    allint = allint+e
180
np.mean(allint)
181

  
182
#code.interact(local=dict(globals(), **locals()))
183
pd.DataFrame(allint).hist(bins=50, normed=True)
184
plt.xlabel("Intervals of Persistence in the core [sec]")
185
plt.ylabel("Normalized Frequency")
186
plt.savefig(nick+"PersistenceDistributionEntryTOP10LeavingTOP20.pdf", format='pdf')
187
plt.clf()
188

  
189
f=open(nick +"stats.txt",'w')
190
f.write(str(pd.DataFrame(allint).describe()))
191
f.close()
settings.ini
11 11
max_wait_time = 0.0
12 12

  
13 13
[baseTVCM]
14
duration = 100s
14
duration = 10000s
15 15
mobility_timer = 1s
16 16
mob_model = TimeVariantCommunity
17 17
nodes_x_group = 5
......
23 23
radius = 20.0
24 24

  
25 25
# RandomWayPoint exps
26

  
27
# Molti nodi Raggio 30
28
[rwpR30N250:baseRWP]
29
nodes_number = 250
30
radius = 30.0
31

  
32
[rwpR30N500:baseRWP]
33
nodes_number = 500
34
radius = 30.0
35

  
36
[rwpR30N750:baseRWP]
37
nodes_number = 750
38
radius = 30.0
39

  
40
[rwpR30N1000:baseRWP]
41
nodes_number = 1000
42
radius = 30.0
43

  
26 44
# 50 nodi
27 45
[rwpR30N50:baseRWP]
28 46
nodes_number = 50
......
62 80
nodes_number = 30
63 81
radius = 10.0
64 82

  
83
[rwpR5N30:baseRWP]
84
nodes_number = 30
85
radius = 5.0
86

  
65 87

  
66 88
# Time-Variant-Community Model
67 89

  
simulator.py
10 10

  
11 11
import networkx as nx
12 12
import util.MyUtil as myu
13
import util.nx2gt as nx2gt
13 14
from time import sleep
14 15
import matplotlib.pyplot as plt
15 16
plt.ion()
......
45 46

  
46 47
                if self.gui:
47 48
                    self.draw(G, positions)
48
                stats = ['BC', 'LC', 'DEG']
49
                #stats = ['BC', 'LC', 'DEG']
50
                stats = ['BC']
49 51

  
50 52
                self.logStatistics(G, stats, sched.processed_events())
51 53
                # schedule next DV
......
58 60
        tag = '%08d' % tag
59 61
        for stat in whatLog:
60 62
            if stat == 'BC':
61
                bw_centrality = nx.betweenness_centrality(
62
                    G, normalized=False,  weight='weight', endpoints=True)
63
                df = pd.DataFrame(bw_centrality.items(),
64
                                  columns=['Node', 'BC'])
63
                '''bw_centrality = nx.betweenness_centrality(G, normalized=False,  weight='weight', endpoints=True)'''
64
                #code.interact(local=dict(globals(), **locals()))
65
                import graph_tool.centrality as gtc
66
                gtG = nx2gt.nx2gt(G)
67
                ws=gtG.edge_properties["weight"]
68
                vp, ep = gtc.betweenness(gtG, norm=False, weight=ws)
69
                btw = {i: vp.a[i] for i in range(len(vp.a))}
70
                df = pd.DataFrame(btw.items(), columns=['Node', 'BC'])
65 71
                df.to_csv(self.OP+"/BC"+tag+".csv", sep=',',
66 72
                          encoding='utf-8', index=False)
67 73
            if stat == 'LC':
timeAnalysis.py
1
import code  # code.interact(local=dict(globals(), **locals()))
2
from collections import deque
3
from scipy import stats
4
import matplotlib.pyplot as plt
5
from collections import defaultdict
6
import os
7
import sys
8
from statsmodels.graphics.tsaplots import plot_acf, acf
9
import operator
10
from mpl_toolkits import mplot3d
1 11
import pandas as pd
2 12
from pprint import pprint
3 13
import numpy as np
4 14
import glob
5
from matplotlib import pyplot
6
from statsmodels.graphics.tsaplots import plot_acf, acf
7
import sys
8
import os
9
from collections import defaultdict
10
import matplotlib.pyplot as plt
11
import code  # code.interact(local=dict(globals(), **locals()))
15
import matplotlib
16
import seaborn as sns
17
sns.set()
18

  
12 19

  
13 20
folder = sys.argv[1]
14 21
lags = int(sys.argv[2])
15
nick = sys.argv[3]
22
nick = folder.split('/')[-2].split('_')[0]+"_"
16 23
os.chdir(folder)
17 24

  
18
dfn = pd.DataFrame() #rows=nodes columns=BC at column-index time-instant 
25
dfn = pd.DataFrame()  # rows=nodes columns=BC at column-index time-instant
19 26
print "Loading data from", folder, "..."
20 27
for snap in sorted(glob.glob('./BC*')):
21 28
    # print snap
22 29
    df = pd.read_csv(snap, names=['BC'], skiprows=1)
23
    dfn = pd.concat([dfn,df], axis=1)
30
    dfn = pd.concat([dfn, df], axis=1)
24 31

  
25 32
nodes = dfn.index.tolist()
26 33

  
......
28 35
for n in nodes:
29 36
    initialCentrality[n] = dfn.iloc[n][0]
30 37

  
31
from mpl_toolkits import mplot3d
32
import operator
33 38

  
34
sorted_x = sorted(initialCentrality.items(), key=operator.itemgetter(1), reverse=True)
39
sorted_x = sorted(initialCentrality.items(),
40
                  key=operator.itemgetter(1), reverse=True)
35 41
srtNodes = [e[0] for e in sorted_x]
36 42

  
37
dfACF = pd.DataFrame() # rows=Time-Lags, columns = nodes
43
dfACF = pd.DataFrame()  # rows=Time-Lags, columns = nodes
38 44
print "Processing data..."
39 45
for node in nodes:
40
    nodeACF =[pd.Series(dfn.iloc[node]).autocorr(lag) for lag in range(lags)]
41
    #code.interact(local=dict(globals(), **locals()))
46
    nodeACF = [pd.Series(dfn.iloc[node]).autocorr(lag) for lag in range(lags)]
42 47
    nodeACF = pd.DataFrame(nodeACF)
43 48
    dfACF = pd.concat([dfACF, nodeACF], axis=1)
44 49

  
......
49 54
Z ==> l'acf del nodo y al time-lag x
50 55
'''
51 56

  
52
os.chdir("./..")
57
if not os.path.exists("plots"+nick):
58
    os.makedirs("plots"+nick)
59
os.chdir("plots"+nick)
53 60
# Plotting
54

  
55
#lags=20
61
# Mean AutoCorrelation and Rank-Correlation
62
# lags=20
63
firstRank = dfn.iloc[:, 0]
56 64
x = range(0, lags)
65
meanACF = []
66
rankCorr = []
67
weightedRankCorr = []
68
for i in x:
69
    meanACF.append(np.mean(dfACF.iloc[i]))
70
    rankCorr.append(stats.spearmanr(firstRank, dfn.iloc[:, i])[0])
71
    weightedRankCorr.append(stats.weightedtau(firstRank, dfn.iloc[:, i])[0])
72
plt.plot(x, meanACF, lw="1.5", label='Mean Autocorrelation')
73
plt.plot(x, rankCorr, lw="1.5", label='Rank-Correlation (with rank at t_0)')
74
plt.plot(x, weightedRankCorr, lw="1.5",
75
         label='Weighted-Rank-Correlation (with rank at t_0)')
76
plt.ylabel('Corr coeff: [ACF, Spearman rho]')
77
plt.xlabel('Time-lags / Time')
78
plt.grid()
79
plt.legend()
80
# plt.ylim(-1.0,1.0)
81
plt.xlim(0, lags)
82
plt.savefig(nick+"autoCorrMean-RankSpearman.pdf", format='pdf')
83
plt.clf()
84
'''
85

  
86

  
87

  
88
nodes2coreInst = defaultdict(list)
89
#nodes2rankInst = defaultdict(list)
90

  
91
for t in range(len(dfn.iloc[0])):
92
    coreT, coreRankT = coreNodesAtTime(dfn, t, 5)
93
    for n in coreT:
94
        nodes2coreInst[n].append((t,coreT[n],coreRankT[n]))
95

  
96

  
97

  
98
for n in [5,38,59,92]:
99
    points = nodes2coreInst[n]
100
    x = [p[0] for p in points]
101
    y = [len(nodes2coreInst)-p[2] for p in points]
102
    color = n / float(len(nodes2coreInst.keys()))
103
    #rgba = cmap(color)
104
    #plt.scatter(x,y, rgba)
105
    plt.plot(x,y, 'o')
106
plt.ylim(0, len(nodes2coreInst))
107
plt.show()
108
code.interact(local=dict(globals(), **locals()))
109
plt.show()
110
exit()
111
# Core Persitence
112
plags=100
113
x = range(0, plags)
57 114
y = []
115

  
58 116
for i in x:
59
    y.append(np.mean(dfACF.iloc[i])) #[:,i] ==> all values of column i, where a column
60

  
61
plt.plot(x, y, lw="1.5")
62
plt.ylabel('Mean Autocorrelation')
63
plt.xlabel('Time-lags')
64
#plt.ylim(-1.0,1.0)
65
plt.xlim(0,lags)
66
plt.savefig(nick+"autoCorrMean.pdf", format='pdf')
117
    print "cacca"
118

  
67 119
plt.clf()
68 120

  
69
#code.interact(local=dict(globals(), **locals()))
121
code.interact(local=dict(globals(), **locals()))'''
70 122

  
71
X,Y,Z = [], [], []
123
X, Y, Z = [], [], []
72 124
for node in srtNodes:
73 125
    for lag in range(lags):
74 126
        X.append(lag)
75 127
        Y.append(node)
76 128
        Z.append(list(dfACF.iloc[lag])[node])
77
        
129

  
78 130

  
79 131
fig = plt.figure()
80 132
ax = plt.axes(projection='3d')
81 133
ax.set_xlabel('Time-Lag')
82 134
ax.set_ylabel('Nodes sorted by BC at t_0')
83
ax.set_zlabel('ACF at time-lag x of node y');
84
ax.plot_trisurf(X,Y,Z,linewidth=0.2, antialiased=True)
135
ax.set_zlabel('ACF at time-lag x of node y')
136
ax.plot_trisurf(X, Y, Z, linewidth=0.2, antialiased=True)
85 137
ax.set_xlim(0, lags)
86 138
ax.set_ylim(0, len(srtNodes))
87 139
#ax.set_zlim(-1.0, 1.0)
88 140
plt.savefig(nick+"autoBC-3d.pdf", format="pdf")
89 141

  
90
print "Plot saved in", folder+".."
91 142
print "THE END"
92

  
util/UnitDiskGraph.py
1 1
# https://stackoverflow.com/questions/32424604/find-all-nearest-neighbors-within-a-specific-distance
2 2

  
3 3
import networkx as nx
4
from scipy import spatial
5
import code # code.interact(local=dict(globals(), **locals()))
6

  
4
from scipy.spatial import KDTree
5
from scipy.spatial.distance import cdist
6
import code  # code.interact(local=dict(globals(), **locals()))
7
import graph_tool as gt
7 8

  
8 9
class UnitDiskGraph:
9 10

  
10 11
    def __init__(self, points, radius):
11
        self.G = self.genereateGraphFromKDtree(points, radius)
12

  
12
        self.G = nx.Graph()
13
        self.G.add_nodes_from(range(len(points)))
14
        '''
15
        self.G = gt.Graph(directed=False)
16
        self.G.add_vertex(len(points))
17
        self.edge_weights = self.G.new_edge_property('double')
18
        self.G.edge_properties['weight'] = self.edge_weights'''
19
        
20
        self.generateGraphCDIST(points, radius)
21
        
22
        
13 23
    def genereateGraphFromKDtree(self, points, radius):
14
        tree = spatial.KDTree(points)
24
        tree = KDTree(points)
15 25
        edges = tree.query_pairs(r=radius)
16
        edges = [e+(1.0,) for e in edges]
17
        G = nx.Graph()
18
        #pos = {k:points[k] for k in range(0,len(points))}
19
        #code.interact(local=dict(globals(), **locals()))
20
        G.add_nodes_from(range(len(points)))
21
        G.add_weighted_edges_from(edges, weight='weight')
22
        return G
26
        #edges = [e+(1.0,) for e in edges]
27
        #pos = {k:points[k] for k in range(0,len(points))}        
28
        #self.G.add_weighted_edges_from(edges, weight='weight')
29
        for e in edges:
30
            e = self.G.add_edge(e[0],e[1])
31
            self.edge_weights[e] = 1.0
32

  
33

  
34
    def generateGraphCDIST(self, points, radius):
35
        distM = cdist(points, points, 'euclidean')
36
        edges = []
37
        for r in range(len(points)):
38
            for c in range(len(points)):
39
                if r==c:
40
                    continue
41
                if distM[r][c] <= radius:
42
                    edges.append((r,c,1.0))
43
        self.G.add_weighted_edges_from(edges, weight='weight')
44
        '''code.interact(local=dict(globals(), **locals()))
45
        for e in edges:
46
            e = self.G.add_edge(e[0],e[1])
47
            self.edge_weights[e] = 1.0'''
23 48

  
24 49
    def getGraph(self):
25 50
        return self.G
util/nx2gt.py
1

  
2
import networkx as nx
3
import graph_tool as gt
4
import code  # code.interact(local=dict(globals(), **locals()))
5

  
6

  
7

  
8
def get_prop_type(value, key=None):
9
    """
10
    Performs typing and value conversion for the graph_tool PropertyMap class.
11
    If a key is provided, it also ensures the key is in a format that can be
12
    used with the PropertyMap. Returns a tuple, (type name, value, key)
13
    """
14
    if isinstance(key, unicode):
15
        # Encode the key as ASCII
16
        key = key.encode('ascii', errors='replace')
17

  
18
    # Deal with the value
19
    if isinstance(value, bool):
20
        tname = 'bool'
21

  
22
    elif isinstance(value, int):
23
        tname = 'float'
24
        value = float(value)
25

  
26
    elif isinstance(value, float):
27
        tname = 'float'
28

  
29
    elif isinstance(value, unicode):
30
        tname = 'string'
31
        value = value.encode('ascii', errors='replace')
32

  
33
    elif isinstance(value, dict):
34
        tname = 'object'
35

  
36
    else:
37
        tname = 'string'
38
        value = str(value)
39

  
40
    return tname, value, key
41

  
42

  
43
def nx2gt(nxG):
44
    """
45
    Converts a networkx graph to a graph-tool graph.
46
    """
47
    # Phase 0: Create a directed or undirected graph-tool Graph
48
    gtG = gt.Graph(directed=nxG.is_directed())
49

  
50
    # Add the Graph properties as "internal properties"
51
    for key, value in nxG.graph.items():
52
        # Convert the value and key into a type for graph-tool
53
        tname, value, key = get_prop_type(value, key)
54

  
55
        prop = gtG.new_graph_property(tname) # Create the PropertyMap
56
        gtG.graph_properties[key] = prop     # Set the PropertyMap
57
        gtG.graph_properties[key] = value    # Set the actual value
58

  
59
    # Phase 1: Add the vertex and edge property maps
60
    # Go through all nodes and edges and add seen properties
61
    # Add the node properties first
62
    nprops = set() # cache keys to only add properties once
63
    for node, data in nxG.nodes(data=True):
64

  
65
        # Go through all the properties if not seen and add them.
66
        for key, val in data.items():
67
            if key in nprops: continue # Skip properties already added
68

  
69
            # Convert the value and key into a type for graph-tool
70
            tname, _, key  = get_prop_type(val, key)
71

  
72
            prop = gtG.new_vertex_property(tname) # Create the PropertyMap
73
            gtG.vertex_properties[key] = prop     # Set the PropertyMap
74

  
75
            # Add the key to the already seen properties
76
            nprops.add(key)
77

  
78
    # Also add the node id: in NetworkX a node can be any hashable type, but
79
    # in graph-tool node are defined as indices. So we capture any strings
80
    # in a special PropertyMap called 'id' -- modify as needed!
81
    gtG.vertex_properties['id'] = gtG.new_vertex_property('string')
82

  
83
    # Add the edge properties second
84
    eprops = set() # cache keys to only add properties once
85
    for src, dst, data in nxG.edges(data=True):
86

  
87
        # Go through all the edge properties if not seen and add them.
88
        for key, val in data.items():
89
            if key in eprops: continue # Skip properties already added
90

  
91
            # Convert the value and key into a type for graph-tool
92
            tname, _, key = get_prop_type(val, key)
93

  
94
            prop = gtG.new_edge_property(tname) # Create the PropertyMap
95
            gtG.edge_properties[key] = prop     # Set the PropertyMap
96

  
97
            # Add the key to the already seen properties
98
            eprops.add(key)
99

  
100
    # Phase 2: Actually add all the nodes and vertices with their properties
101
    # Add the nodes
102
    vertices = {} # vertex mapping for tracking edges later
103
    for node, data in nxG.nodes(data=True):
104

  
105
        # Create the vertex and annotate for our edges later
106
        v = gtG.add_vertex()
107
        vertices[node] = v
108

  
109
        # Set the vertex properties, not forgetting the id property
110
        data['id'] = str(node)
111
        for key, value in data.items():
112
            gtG.vp[key][v] = value # vp is short for vertex_properties
113

  
114
    # Add the edges
115
    for src, dst, data in nxG.edges(data=True):
116

  
117
        # Look up the vertex structs from our vertices mapping and add edge.
118
        e = gtG.add_edge(vertices[src], vertices[dst])
119

  
120
        # Add the edge properties
121
        for key, value in data.items():
122
            gtG.ep[key][e] = value # ep is short for edge_properties
123

  
124
    # Done, finally!
125
    return gtG
126

  
127

  
128
if __name__ == '__main__':
129

  
130
    # Create the networkx graph
131
    nxG = nx.Graph(name="Undirected Graph")
132
    nxG.add_node("v1", name="alpha", color="red")
133
    nxG.add_node("v2", name="bravo", color="blue")
134
    nxG.add_node("v3", name="charlie", color="blue")
135
    nxG.add_node("v4", name="hub", color="purple")
136
    nxG.add_node("v5", name="delta", color="red")
137
    nxG.add_node("v6", name="echo", color="red")
138

  
139
    nxG.add_edge("v1", "v2", weight=0.5, label="follows")
140
    nxG.add_edge("v1", "v3", weight=0.25, label="follows")
141
    nxG.add_edge("v2", "v4", weight=0.05, label="follows")
142
    nxG.add_edge("v3", "v4", weight=0.35, label="follows")
143
    nxG.add_edge("v5", "v4", weight=0.65, label="follows")
144
    nxG.add_edge("v6", "v4", weight=0.53, label="follows")
145
    nxG.add_edge("v5", "v6", weight=0.21, label="follows")
146

  
147
    
148
    for item in nxG.edges(data=True):
149
        print item
150

  
151
    # Convert to graph-tool graph
152
    gtG = nx2gt(nxG)
153
    gtG.list_properties()
154
    
155
    bw_centrality = nx.betweenness_centrality(nxG, normalized=False,  weight='weight', endpoints=False)
156
    import graph_tool.centrality as gtc
157
    ws=gtG.edge_properties["weight"]
158
    vp, ep = gtc.betweenness(gtG, norm=False, weight=ws)
159
    #code.interact(local=dict(globals(), **locals()))
160
    print vp.a==bw_centrality.values()

Also available in: Unified diff