nepatest_popbabel / scripts / router.py @ 60ba786f
History | View | Annotate | Download (6.22 KB)
1 |
import glob |
---|---|
2 |
import json |
3 |
from pprint import pprint |
4 |
import sys, select |
5 |
import os |
6 |
import numpy as np |
7 |
import matplotlib |
8 |
matplotlib.use('Agg')
|
9 |
from matplotlib import pyplot as plt |
10 |
import code |
11 |
import pdb |
12 |
import networkx as nx |
13 |
|
14 |
def outputTopologyGraph(noderoutes,node2RT): |
15 |
print "You have 5 seconds to write a filename!" |
16 |
fn="out"
|
17 |
i, o, e = select.select( [sys.stdin], [], [], 5 )
|
18 |
if (i):
|
19 |
fn = sys.stdin.readline().strip() |
20 |
else:
|
21 |
print "You said nothing! I choose \'out\' as filename for you" |
22 |
|
23 |
#map nodeid to numbers...
|
24 |
nid2int={} |
25 |
v=0
|
26 |
for k in node2RT: |
27 |
nid2int[k]=v |
28 |
v+=1
|
29 |
|
30 |
|
31 |
G=nx.Graph() |
32 |
for path in noderoutes: |
33 |
mypath=[] |
34 |
for nid in path: |
35 |
mypath.append(nid2int[nid]) |
36 |
G.add_path(mypath) |
37 |
nx.write_edgelist(G, fn+".edges", data=False) |
38 |
|
39 |
|
40 |
#the folder where to look for dumps and logs
|
41 |
folder=sys.argv[1]
|
42 |
print folder
|
43 |
|
44 |
#ROUTE CALCULATOR BASED ON R.TABLE DUMPS PROVIDED BY BABEL
|
45 |
node2RT={}#keys are node_ids, values their RT. Each RT has as keys destinations, as values related NHs
|
46 |
ip2node={}#map between ip addr and nodeid of node_owner
|
47 |
nodes2counter={} |
48 |
rid2hanme={} |
49 |
for fname in glob.glob(folder+'topo*'): |
50 |
print "Loading json from " + fname |
51 |
f=open(fname, "r") |
52 |
j=json.load(f) |
53 |
#we choose the last routes dump for each node
|
54 |
ld=j[1]
|
55 |
routes=ld['routes']
|
56 |
router_id=ld['router_id']
|
57 |
nodes2counter[router_id]=0.0
|
58 |
hname=fname.split("/")[-1].strip("topo").strip(".json") |
59 |
rid2hanme[router_id]=hname |
60 |
rt={} |
61 |
for r in routes: |
62 |
nh=r['next']
|
63 |
dest=r['destination']
|
64 |
rt[dest]=nh |
65 |
node2RT[router_id]=rt |
66 |
for r in routes: |
67 |
if (r['next']=="me"): |
68 |
ip=r['destination'].split("/")[0] |
69 |
ip2node[ip]=router_id |
70 |
f.close() |
71 |
|
72 |
ips=ip2node.keys() |
73 |
|
74 |
class DestNotFound(Exception): |
75 |
def __init__(self, value): |
76 |
self.value = value
|
77 |
def __str__(self): |
78 |
return repr(self.value) |
79 |
|
80 |
#ok, let me call this function like this...
|
81 |
#currently works only because ALL routes are \32 addresses
|
82 |
def longestPrefixMatchNextHop(rt,dest): |
83 |
for d in rt.keys(): |
84 |
if d.startswith(dest):
|
85 |
return rt[d]
|
86 |
raise DestNotFound("DEST 404: "+str(dest)) |
87 |
|
88 |
routes=[] |
89 |
for s in ips: |
90 |
for d in ips: |
91 |
#print "looking for route from "+str(s)+"..->.."+str(d)
|
92 |
#r=route->list of IPs crossed
|
93 |
source_node=ip2node[s] |
94 |
target_node=ip2node[d] |
95 |
if(source_node==target_node):
|
96 |
continue
|
97 |
r = [] |
98 |
r.append(s) |
99 |
try:
|
100 |
target_ip=longestPrefixMatchNextHop(node2RT[ip2node[s]],d) |
101 |
target=ip2node[target_ip] |
102 |
except:
|
103 |
print "Unexpected error:", sys.exc_info()[0], target_ip |
104 |
code.interact(local=dict(globals(), **locals())) |
105 |
continue
|
106 |
while(target!=target_node):
|
107 |
r.append(target_ip) |
108 |
target_ip=longestPrefixMatchNextHop(node2RT[ip2node[target_ip]],d) |
109 |
target=ip2node[target_ip] |
110 |
r.append(d) |
111 |
routes.append(r) |
112 |
'''for i in range(len(r)):
|
113 |
if (i!=0):
|
114 |
sys.stdout.write("->")
|
115 |
sys.stdout.write("c"+str(r[i]))
|
116 |
print ""'''
|
117 |
|
118 |
#print all routes for offline analysis
|
119 |
'''for r in routes:
|
120 |
for i in range(len(r)):
|
121 |
if (i!=0):
|
122 |
sys.stdout.write("->")
|
123 |
sys.stdout.write(str(r[i]))
|
124 |
print ""'''
|
125 |
|
126 |
#nrs=NodeRoutes, where a NodeRoute is a list of node crossed in a route
|
127 |
nrs = [] |
128 |
print "Transform routes from IPlist to NodeList" |
129 |
for r in routes: |
130 |
nr = [] |
131 |
for crossed in r: |
132 |
node_owner=ip2node[crossed] |
133 |
nr.append(node_owner) |
134 |
nrs.append(nr) |
135 |
'''for i in range(len(nr)):
|
136 |
if (i!=0):
|
137 |
sys.stdout.write("->")
|
138 |
sys.stdout.write("c"+str(nr[i]))
|
139 |
print ""'''
|
140 |
|
141 |
#There are lot of identical NodeRoute, because routes comes from the combination of all IPs in the network
|
142 |
# but we want only routes between each pair of node, so we remove duplicates
|
143 |
noderoutes=set()
|
144 |
for nr in nrs: |
145 |
noderoutes.add(tuple(nr))
|
146 |
|
147 |
#NB: noderoutes includes all the selfNodeRoutes (e.g. A->A)
|
148 |
print "#routes="+str(len(routes))+" #noderoutes="+str(len(noderoutes)) |
149 |
print "Different node routes" |
150 |
for nr in noderoutes: |
151 |
for i in range(len(nr)): |
152 |
if (i!=0): |
153 |
sys.stdout.write(" -> ")
|
154 |
sys.stdout.write(str(nr[i]))
|
155 |
print "" |
156 |
|
157 |
#outputTopologyGraph(noderoutes,node2RT)
|
158 |
#Computing how many routes cross each node, endpoint are not considered
|
159 |
for nr in noderoutes: |
160 |
nr=list(nr)
|
161 |
del nr[0] |
162 |
del nr[-1] |
163 |
for crossed in nr: |
164 |
c=nodes2counter[crossed] |
165 |
nodes2counter[crossed]=c+1.0
|
166 |
|
167 |
tot=0
|
168 |
for n in nodes2counter: |
169 |
tot=tot+nodes2counter[n] |
170 |
print "Centrality of "+str(n)+" = "+str(nodes2counter[n]) |
171 |
print "tot="+str(tot) |
172 |
|
173 |
#Now nodes2counter store reference results from Routing Table dumps provided by Babel
|
174 |
|
175 |
#Now results from distributed proto are collected from csv dumps and finally they will be compared with just computed reference values
|
176 |
import csv |
177 |
res={} |
178 |
tot=0
|
179 |
for fname in glob.glob(folder+'*_cdump.csv'): |
180 |
text=os.popen("tail -n 30 "+fname).read()
|
181 |
f=fname.split("/")[-1] |
182 |
f=f.split("_")
|
183 |
nodename=f[0]
|
184 |
lines=text.splitlines() |
185 |
csum = 0
|
186 |
for l in lines: |
187 |
csum += int(l.split(",")[-1]) |
188 |
avgc = csum / 30.0
|
189 |
res[nodename] = avgc |
190 |
#print "Node ",n,":",cent
|
191 |
tot = tot + avgc |
192 |
for k in res: |
193 |
print "Node",k,":",res[k] |
194 |
print "tot =",tot |
195 |
|
196 |
#Merging results in a single dict before writing to file for later elaboration/plotting
|
197 |
|
198 |
towrite={} |
199 |
|
200 |
for k in nodes2counter: |
201 |
v=nodes2counter[k] |
202 |
toAdd=(v,) |
203 |
towrite[rid2hanme[k]]=toAdd |
204 |
#code.interact(local=dict(globals(), **locals()))
|
205 |
|
206 |
for k in res: |
207 |
v=res[k] |
208 |
toAdd=(v,) |
209 |
towrite[k]+=toAdd |
210 |
for k in towrite: |
211 |
cent=towrite[k] |
212 |
print "Node: "+str(k)+ "\t(theoric = "+str(cent[0])+", proto = "+str(cent[1])+")" |
213 |
|
214 |
import operator |
215 |
sorted_by_theor_centr = sorted(towrite.items(), key=operator.itemgetter(1), reverse=True) |
216 |
pprint(sorted_by_theor_centr) |
217 |
|
218 |
#write sorted results to file to plot later
|
219 |
fldname=folder.strip("/")
|
220 |
file = open(folder+fldname+"-results.csv", "w") |
221 |
file.write("NodeID,theoric,proto\n") |
222 |
for el in sorted_by_theor_centr: |
223 |
file.write(str(el[0])+","+str(el[1][0])+","+str(el[1][1])+"\n") |
224 |
file.close()
|
225 |
|
226 |
|
227 |
xticks = [] |
228 |
y1 = [] |
229 |
y2 = [] |
230 |
for el in sorted_by_theor_centr: |
231 |
xticks.append(el[0])
|
232 |
y1.append(el[1][0]) |
233 |
y2.append(el[1][1]) |
234 |
|
235 |
x = np.arange(len(sorted_by_theor_centr))
|
236 |
plt.plot(x, y1, 'ro', label="theoric") |
237 |
plt.plot(x, y2, 'b*', label="proto") |
238 |
plt.xticks(x, xticks, rotation='vertical', fontsize=8) |
239 |
plt.xlabel('Nodes by id')
|
240 |
plt.ylabel('Babel Centrality index')
|
241 |
plt.legend(loc='upper right', fontsize=10, numpoints=1) |
242 |
# Pad margins so that markers don't get clipped by the axes
|
243 |
plt.margins(0.2)
|
244 |
# Tweak spacing to prevent clipping of tick-labels
|
245 |
plt.subplots_adjust(bottom=0.3)
|
246 |
#fig = plt.gcf()
|
247 |
#plt.show()
|
248 |
fldname=folder.strip("/")
|
249 |
plt.savefig(folder+fldname+"plot.pdf")
|