Revision c39a768e py_logs_analizer.py

View differences:

py_logs_analizer.py
15 15
from utilities import *
16 16

  
17 17

  
18
START_ANALYSIS_TIME = 150
19
STOP_ANALYSIS_TIME = 450
18
START_ANALYSIS_TIME = 0
19
STOP_ANALYSIS_TIME = 300
20 20

  
21 21
def rescale_log_time(logdata,offsetdata,server_addr):
22 22
  if (offsetdata is None) or 'addr' not in offsetdata.columns:
......
48 48
        print "Loading source file: "+logfile
49 49
        session = exp.getSession(time_from_filename(logfile),creation=True)
50 50
        peer_data = read_csv(folder+"/"+logfile)
51
        try:
52
          offset_data = read_csv(timespread_filename(folder,hostname_from_filename(logfile)))
53
        except IOError:
51
        
52
        if timespread_filename(folder,hostname_from_filename(logfile)) is not None:
53
          try:
54
            offset_data = read_csv(timespread_filename(folder,hostname_from_filename(logfile)))
55
          except IOError:
56
            offset_data = None
57
        else:
54 58
          offset_data = None
55 59

  
56 60
        peer = Peer(hostname_from_filename(logfile),is_source_from_filename(logfile), \
......
65 69
      if session is not None:
66 70
          source = session.getSource()
67 71
          peer_data = read_csv(folder+"/"+logfile)
68
          try:
69
                  offset_data = read_csv(timespread_filename(folder,hostname_from_filename(logfile)))
70
          except IOError:
71
                  offset_data = None
72
          if timespread_filename(folder,hostname_from_filename(logfile)) is not None:
73
            try:
74
                    offset_data = read_csv(timespread_filename(folder,hostname_from_filename(logfile)))
75
            except IOError:
76
                    offset_data = None
77
          else:
78
            offset_data = None
72 79

  
73 80
          peer = Peer(hostname_from_filename(logfile),is_source_from_filename(logfile), \
74 81
                          rescale_log_time(peer_data,offset_data,source))
......
78 85
    if logfile.endswith(".neighlog.csv"):
79 86
      print "Loading peer file: "+logfile
80 87
      hostname = hostname_from_filename(logfile)
81
      peer = exp.getPeer(hostname)
82
      if peer:
83
        peer.setNeighbours(read_csv(folder+"/"+logfile))
84
      else:
85
        print "WARNING: peer " + hostname + " not found"
88
      session = exp.getSession(time_from_filename(logfile))
89
      if session is not None:
90
        peer = session.getPeer(hostname)
91
        if peer:
92
          peer.setNeighbours(read_csv(folder+"/"+logfile))
93
        else:
94
          print "WARNING: peer " + hostname + " not found"
86 95

  
87 96
# prune deviating sessions
88 97
  for session in exp.sessions:
......
346 355
        if time <= session_id:
347 356
          paths[time] = logfile
348 357
  if len(paths.keys()) > 0:
349
    return folder+'/'+paths[min(paths.keys())]
358
    return folder+'/'+paths[max(paths.keys())]
350 359
  else:
351 360
    return None
352 361

  
......
404 413
    paths_file = seekPathFile(folder,session.time)
405 414
    if paths_file:
406 415
      paths,names = loadShortestPaths(paths_file)
416

  
407 417
      for instant in range(begin_sec,end_sec,time_sensibility): 
408 418
        edges = []
409 419
        for peer in session.peers:
410 420
          logs = peer.neighbours_interval_sec(session.time + instant,session.time + instant+time_sensibility)
411
          if len(logs) > 0:
421
          if logs is not None and len(logs) > 0:
412 422
            maxtime = max(logs['logTime'])
413 423
            logs = logs[logs['logTime'] == maxtime]
414 424
            edges = edges +  logs2edges(names,logs)
415 425
          else:
416
            print "WARNING: no neighbourhood data for interval " + str(session.time+begin_sec) + "-" + str(session.time+end_sec)
426
            print "WARNING: no neighbourhood data for interval " + str(session.time ) + "-" + str(session.time + instant+time_sensibility) + ", instant " + str(instant) + ", peer " + peer.hostname
417 427
        
418 428
        sum_edges = {}
419 429
        for e in edges:
......
424 434
      print "WARNING: shortest paths file not found for session "+str(session.time)
425 435
  out_file.close()
426 436

  
437
def janeFairness(b):
438
  n = len(b)
439
  num = (sum(b.values()))**2
440
  den = sum([ x**2 for x in b.values()])
441
  return float(num)/(n*den)
442

  
443
def neighJaneAnalyse(folder,exp,begin_sec,end_sec):
444
  time_sensibility = 10
445
  out_file = open(folder+"/network_fairness.exp","w")
446
  out_file.write("info_type,session_id,complete_overlay_fairness,complete_overlay_link_count,time,fairness,link_count\n")
447

  
448
  for session in exp.sessions:
449
    paths_file = seekPathFile(folder,session.time)
450
    if paths_file:
451
      paths,names = loadShortestPaths(paths_file)
452

  
453
      complete_overlay = {}
454
      overlay_nodes = set()
455
      for peer in session.peers:
456
        logs = peer.neighbours
457
        overlay_nodes = overlay_nodes.union(set(logs['logger']))
458
        overlay_nodes = overlay_nodes.union(set(logs['peer']))
459
      n = len(names.keys())
460
      for p1 in overlay_nodes:
461
        n1 = names[p1.split(':')[0]]
462
        for p2 in overlay_nodes:
463
          n2 = names[p2.split(':')[0]]
464
          if p1<p2:
465
            complete_overlay = add_spath_id(complete_overlay,paths[triel(n,n1,n2)])
466
      complete_overlay_link_count = sum(complete_overlay.values())
467
      complete_overlay_fairness =  janeFairness(complete_overlay)
468

  
469
      for instant in range(begin_sec,end_sec,time_sensibility): 
470
        edges = []
471
        for peer in session.peers:
472
          logs = peer.neighbours_interval_sec(session.time + instant,session.time + instant+time_sensibility)
473
          if len(logs) > 0:
474
            maxtime = max(logs['logTime'])
475
            logs = logs[logs['logTime'] == maxtime]
476
            edges = edges +  logs2edges(names,logs)
477
          else:
478
            print "WARNING: no neighbourhood data for interval " + str(session.time ) + "-" + str(session.time + instant+time_sensibility) + ", instant " + str(instant) + ", peer " + peer.hostname
479
        
480
        sum_edges = {}
481
        for e in edges:
482
          sum_edges = add_spath_id(sum_edges,paths[e])
483

  
484
        link_count = sum(sum_edges.values())
485
        fairness = janeFairness(sum_edges)
486
        out_file.write("NEIGHJANE" +','+ str(session.time)+','+str(complete_overlay_fairness)+','+str(complete_overlay_link_count) \
487
            +','+str(instant)+','+str(fairness)+","+str(link_count)+'\n')
488
    else:
489
      print "WARNING: shortest paths file not found for session "+str(session.time)
490
  out_file.close()
491

  
492

  
427 493
def lossAnalyse(folder,exp,begin_sec,end_sec):
428 494
  out_file = open(folder+"/packets_loss.exp","w")
429 495
  out_file.write("info_type,session_id,chunks,peer_hostname,losts\n")
......
490 556
  dataPopulate(folder,exp)
491 557
  #pm.launchProcess(rttAnalyse,[folder,exp])
492 558
  pm.launchProcess(lossAnalyse,[folder,exp,START_ANALYSIS_TIME,STOP_ANALYSIS_TIME])
493
  pm.launchProcess(neighAnalyse,[folder,exp,0,600])
559
  pm.launchProcess(neighAnalyse,[folder,exp,0,300])
560
  pm.launchProcess(neighJaneAnalyse,[folder,exp,0,300])
494 561
  #pm.launchProcess(ICMPLossAnalyse,[folder,exp,START_ANALYSIS_TIME,STOP_ANALYSIS_TIME])
495 562
  #pm.launchProcess(delayAnalyse,[folder,exp,START_ANALYSIS_TIME,STOP_ANALYSIS_TIME])
496 563
  #pm.launchProcess(receptionAnalyse,[folder,exp,START_ANALYSIS_TIME,STOP_ANALYSIS_TIME])

Also available in: Unified diff