#!/usr/bin/python
from __future__ import division
import getopt,os,sys
import numpy as nm
from pandas import *
from py_logs_analizer import *
import matplotlib as mpl
mpl.use( "agg" )
import matplotlib.pyplot as plt
#import matplotlib
import networkx as nx
import pygraphviz  as pgz
import numpy as np

sys.path.insert(0,'lib')
import process_manager as pmgr
from plot_utilities import *
from utilities import *
from peerstreamer_logs import *
import inspect # only for DEBUG

ImageExtension=".png"

def get_params(argv):
	save = False
	try:
		opts,args = getopt.getopt(argv,"shf:",["help","folder"])
	except getopt.GetoptError:
		sys.exit(2)
	for opt,arg in opts:
		if opt in ("-h","--help"):
			sys.exit()
		elif opt in ("-s","--save"):
			save = True
		elif opt in ("-f","--folder"):
			folder = arg
	
	try:
		return [folder, save]
	except NameError:
		print "[Error] folder name parameter required."
		sys.exit()

def delayVisualize(folder,save):
	print "My name is", inspect.stack()[0][3]
	filename=folder+"/packets_delay.exp"
	if os.path.isfile(filename):
		delay = read_csv(filename)
		data= DataFrame({'session' : delay['session_id'], 'chunk avg delay(s)' : delay['avg_delay']/1000000})
		#print data
		data.boxplot(by='session',column='chunk avg delay(s)')
		plt.xticks(rotation=90)
		if (save):
			plt.savefig("delayVisual"+ImageExtension,bbox_inches='tight')
	else:
			print "[Error] "+filename+" file not found."

def peer_accuracy_series(s):
	return (1-(s['losts']/s['chunks']))

def perPeerDelayVisualize(folder,save):
	print "My name is", inspect.stack()[0][3]
	session_data = []
	source = purify_hostname(source_hostname(folder))
	for elm in os.listdir(folder):
		if elm.endswith("_session_delay.exp"):
			session_data.append(read_csv(folder+"/"+elm))

	data = concat(session_data)
	data = data[data['peer_hostname'] != source]
	received_chunks = {}
	for peer in set(data['peer_hostname']):
		plt.figure()
		plt.grid()
		peer_data = data[data['peer_hostname'] == peer]
		n_samples = len(peer_data)
#		data = data[data['delay'] < 400]

#		peer_data.boxplot(by='hops',column='delay')
		median_delays = (DataFrame({'hops':peer_data['hops'],'delay':peer_data['delay']}).groupby('hops').median()['delay']/1000).values
		plt.plot(range(1,len(median_delays)+1),median_delays,marker='o',linestyle='dashed',label='All data')
		plt.ylabel('Median delay (msec)')
		plt.xlabel('Number of chunk hops')
		plt.gca().set_ylim(bottom=0)
		#plt.legend()
#		plt.title('Average delay per number of hops, peer '+str(peer))
		if save:
			plt.savefig("delayPerHopPeer"+peer+ImageExtension,bbox_inches='tight')
		received_chunks[peer] = DataFrame({'hops':peer_data['hops'],'delay':peer_data['delay']}).groupby('hops').count()['delay'].values

		plt.figure()
		delay_series = data[data['peer_hostname'] == peer]['delay']/1000
#		maxv = delay_series.quantile(0.8)
#		delay_series = delay_series[delay_series < maxv]
		h,b = np.histogram(delay_series,bins=5000)
		plotHistFrequencies(h,b,"Delay (msec)",log_scale=True)#,title="Delay distribution for sliver "+peer)
		if save:
			plt.savefig("overallPeerDelay_"+peer+ImageExtension,bbox_inches='tight')
		# sbrodoli colorati
		plt.figure()
		for hop in set(peer_data['hops']):
			delay_series = peer_data[(peer_data['hops'] == hop)]['delay']/1000
			weigth = round((len(delay_series)*100/n_samples),1)
			h,b = np.histogram(delay_series,bins=5000)
			plotHistogram(h,b,log_scale=True,label=str(hop)+" hops ("+str(weigth)+"%)")
	#		print 'Peer '+peer+' samples '+str(n_samples)+' / '+str(len(delay_series))
	#	plt.title('Delay distribution of peer '+peer+\
	#			' clustered per number of chunk hops')
		ax = plt.gca()
		plt.text(0.5, 1.1,'Delay distribution of peer '+peer+' clustered per number of chunk hops'\
				, horizontalalignment='center',  fontsize=20,  transform = ax.transAxes)
		plt.legend()

		if save:
			plt.savefig("overallPeerDelayHops_"+peer+ImageExtension,bbox_inches='tight')

#		plt.figure()
#		plotBarGraph(DataFrame({'hops':peer_data['hops'],'delay':peer_data['delay']}).groupby('hops').count()['delay'],xlab='Number of chunk hops',ylab='Number of received chunks',tit='Distribution of chunks with respect to number of hops for peer '+str(peer))

	plt.figure()
	for peer in received_chunks.keys():
		plt.plot(range(1,len(received_chunks[peer])+1),received_chunks[peer]/10000,marker='*',color='black',label='Peer '+str(peer))
#	plt.legend()
	plt.ylabel('Number of chunks received x$10^4$')
	plt.xlabel('Number of chunk hops')
#	plt.title('Chunks received per peer, versus chunk hops')
	if save:
		plt.savefig("ChunksPerHopPeer"+ImageExtension,bbox_inches='tight')

def perPeerLossVisualize(folder,save):
	print "My name is", inspect.stack()[0][3]
	session_data = []
	source = purify_hostname(source_hostname(folder))
	for elm in os.listdir(folder):
		if elm.endswith("_session_loss.exp"):
			session_data.append(read_csv(folder+"/"+elm))

	data = concat(session_data)
	data = data[data['peer_hostname'] != source]
	for peer in set(data['peer_hostname']):
		plt.figure()
		plt.grid()
		data = data[data['chunks'] > 0]
		peer_series = (peer_accuracy_series(data[data['peer_hostname'] == peer].set_index('time')))
#		peer_series.plot()
		h,b = np.histogram(peer_series.tolist(),bins=500)
		plotHistFrequencies(h,b,"Accuracy (chunk received %)",title="Accuracy distribution for sliver "+peer)
		if save:
			plt.savefig("overallPeerLoss_"+peer+"_"+ImageExtension,bbox_inches='tight')

def sessionLossVisualize(folder,save):
	print "My name is", inspect.stack()[0][3]
	plt.figure()
	for elm in os.listdir(folder):
		if elm.endswith("_session_loss.exp"):
			plt.figure()
			plt.grid()
			session_data = read_csv(folder+"/"+elm)
			mintime = session_data['time'].min()
			session_data['time'] -= mintime
			for peer in set(session_data['peer_hostname']):
				peer_series = (peer_accuracy_series(session_data[session_data['peer_hostname'] == peer].set_index('time')))
				peer_series.plot()
#			plt.title("Peers' chunk loss for the session "+session_id_shortner(session_data['session_id'][0]))
			plt.xlabel("Unix time (s)")# - starting from "+str(round(mintime)))
			plt.ylabel("Receiving rate (received/sent)")
			if save:
				plt.savefig("sessionLoss_"+session_id_shortner(session_data['session_id'][0])+ImageExtension,bbox_inches='tight')

def sessionHopsVisualize(folder,save):
	print "My name is", inspect.stack()[0][3]
	filename=folder+"/packets_hops.exp"
	if os.path.isfile(filename):
		data = read_csv(filename)
		for session in data['session_id'].unique():
			plt.figure()
			plt.grid()
			session_data = data[data['session_id'] == session]
			session_data['time'] = session_data['time'] - session_data['time'].min()
			for peer in set(session_data['peer_hostname']):
				peer_series = ((session_data[session_data['peer_hostname'] == peer]).set_index('time'))['hops_avg']
				peer_series.plot()
#			plt.title("Chunk mean number of hops per peer, session: "+session_id_shortner(session))
			plt.xlabel("Unix time (s)")
			plt.ylabel("Number of hops from the source")
			if save:
				plt.savefig("sessionHops_"+session_id_shortner(session)+ImageExtension,bbox_inches='tight')
	else:
			print "[Error] "+filename+" file not found."

def delayPerPeer(folder,save,filename):
	print "My name is", inspect.stack()[0][3]
	try:
		session_data = read_csv(folder+"/"+filename)
		for peer in session_data['peer_hostname'].unique():
			plt.figure()
			plt.grid()
			data = session_data[session_data['peer_hostname'] == peer]
			data= DataFrame({ 'delay (msec)' : data['delay']/1000})
			data = data[data['delay (msec)'] < 1500]
			h,b = np.histogram(data['delay (msec)'],bins=500)
			plotHistFrequencies(h,b,"Delay (msec)",title="Delay distribution sliver "+peer+", session "+session_id_shortner(session_data['session_id'][0]),log_scale=True)

			if save:
				plt.savefig("peerDelay_"+session_id_shortner(session_data['session_id'][0])+"_"+peer+"_"+ImageExtension,bbox_inches='tight')
	except:
		print "[ERROR] file "+filename+" not found"


def sessionDelayVisualize(folder,save):
	print "My name is", inspect.stack()[0][3]
	for elm in os.listdir(folder):
		if elm.endswith("_session_delay.exp"):
#			print "Now visualizing: "+str(elm)
			session_data = read_csv(folder+"/"+elm)
			if session_data['peer_hostname'].size > 0:
#				plt.figure()
				data= DataFrame({'hostname' : session_data['peer_hostname'], 'delay (msec)' : session_data['delay']/1000})
				data.boxplot(by='hostname',column='delay (msec)')
				plt.xticks(rotation=90)
#				plt.title("Chunks delay for session "+str(session_data['session_id'][0]))
				plt.title("")
				plt.ylabel("Delay (msec)")
				plt.xlabel("Peer hostname")
				if save:
					plt.savefig("sessionDelay_"+session_id_shortner(session_data['session_id'][0])+ImageExtension,bbox_inches='tight')

def delayVisualize2(folder,save):
	print "My name is", inspect.stack()[0][3]
	plt.figure()
	for elm in os.listdir(folder):
		if elm.endswith("_session_delay.exp"):
#			print "Now visualizing: "+str(elm)
			try:
				data = data.append(read_csv(folder+"/"+elm),ignore_index=True)
			except:
				data = read_csv(folder+"/"+elm)
		
	data1 = DataFrame({'hostname' : data['peer_hostname'], 'delay (msec)' : data['delay']/1000, 'session' : data['session_id']})
	data1 = data1[data1['delay (msec)'] < 3000]
#	print "num: "+str(data1['delay (msec)'].size)
	h,b = np.histogram(data1['delay (msec)'],bins=500)
	plotHistFrequencies(h,b,"Delay (msec)",log_scale=True,title="Delay distribution (frequency, ECDF)")

	if save:
		plt.savefig("delayVisual2"+ImageExtension,bbox_inches='tight')

def lossVisualize2(folder,save,sorting=None):
	print "My name is", inspect.stack()[0][3]
	filename=folder+"/packets_loss.exp"
	if os.path.isfile(filename):
		loss = read_csv(filename)
		loss = loss[loss['chunks'] > 0] # drop useless columns (sessions aborted..)

		# overall loss bars
		overall = DataFrame({'hostname' : loss['peer_hostname'], '% chunk loss' : loss['losts']*100/loss['chunks']}).groupby('hostname').mean().sort()
		if sorting:
			overall = overall.reindex(sorting['list'])
		#plotBarGraph(overall,"Peers' hostnames","Percentage of chunk loss","Overall peers chunk loss",ylim=[0,50])
		plotBarGraph(overall,"Peers' hostnames","Percentage of chunk loss (%)",ylim=[0,10])
		if save:
			filename = "lossVisualize2_overall"
			if sorting:
				filename += "_"+sorting['name']+"_"
			filename += ImageExtension
			plt.savefig(filename,bbox_inches='tight')

		# perSession loss bars
		for session in loss['session_id'].unique():
			data=loss[loss['session_id'] == session]
			data= DataFrame({'hostname' : data['peer_hostname'], '% chunk loss' : data['losts']*100/data['chunks']})
			data = data.set_index('hostname').sort()
			if sorting:
				data = data.reindex(sorting['list'])

			plotBarGraph(data,"Peers' hostnames","Percentage of chunk loss","Peers chunk loss for session "+session_id_shortner(session),ylim=[0,100])
			
			if save:
				filename = "lossVisualize2_"+session_id_shortner(session)
				if sorting:
					filename += "_"+sorting['name']+"_"
				filename += ImageExtension
				plt.savefig(filename,bbox_inches='tight')
	else:
			print "[Error] "+filename+" file not found."

def topologyVisualize(folder,save,sorting=None):
	print "My name is", inspect.stack()[0][3]
	filename=folder+"/edges.exp"
	if os.path.isfile(filename):
		edges = read_csv(filename)
		maxv = edges['weight'].max()
		for session in edges['session_id'].unique():
			records=edges[edges['session_id'] == session]
	# heatmap part
			plt.figure()
			if sorting:
				chunkmat = correlateColumns(records,'peer_sender','peer_receiver','weight',defaultValue=0,autoValue=0,columns=sorting['list'],rows=sorting['list'])
			else:
				chunkmat = correlateColumns(records,'peer_sender','peer_receiver','weight',defaultValue=0,autoValue=0)
			chunkmat.sort_index()
			chunkmat.sort_index(axis=1)
			plotHeatMap(chunkmat,maxv=maxv,min_color='w',xlab="Chunks sender",ylab="Chunks receiver",tit="Chunk exchanged among slivers in session "+str(session_id_shortner(session)))
			if save:
				plt.savefig("chunkExchangedHeatMap_session_"+str(session_id_shortner(session))+ImageExtension,bbox_inches='tight')

#		# graph part
#			levels=7
#		# OCCHIO STO TAGLIANDO VIA UN SACCO DI ROBA!!
#			allweight = sum(records['weight'])
#			records=records[records['weight'] > records['weight'].median()*1.4]
#			shownweight = sum(records['weight'])
#			sender=records['peer_sender']
#			receiver=records['peer_receiver']
#			weight=records['weight']
#
#			weight_max= max(weight)
#			weight_min= min(weight)
#			legend = make_legend(weight_min,weight_max,levels,"Chunk sent:")
#			G = pgz.AGraph(directed=True,strict=False,label=legend)
#			
#			for rec in sender.iteritems(): 
#				i = rec[0]
#				G.add_edge(sender[i],receiver[i])
#				e = G.get_edge(sender[i],receiver[i])
#				e.attr['weight'] = weight[i]
##				e.attr['label'] = weight[i]
#				e.attr['nodesep'] = 5000
#				w = rescale(weight[i],weight_min,weight_max,1,levels)
#				e.attr['penwidth'] = int((weight[i]/weight_max)*levels)
#				e.attr['color'] = rgb_gradient_color(w,1,levels)
#
#			G.layout()
#			G.draw('topology_'+session_id_shortner(session)+'_rate'+str(round(shownweight/allweight,2))+ImageExtension,prog='dot')
			#twopi, gvcolor, wc, ccomps, tred, sccmap, fdp, circo, neato, acyclic, nop, gvpr, dot, sfdp

def allDelayPerPeer(folder,save):
	print "My name is", inspect.stack()[0][3]
	pm = pmgr.ProcessManager()
	for logfile in os.listdir(folder):
		if logfile.endswith("_session_delay.exp"):
			pm.launchProcess(delayPerPeer,[folder,save,logfile])
	pm.joinAll()

def lossVisualize(folder,save):
	print "My name is", inspect.stack()[0][3]
	filename=folder+"/packets_loss.exp"
	if os.path.isfile(filename):
		loss = read_csv(filename)
		data= DataFrame({'session' : loss['session_id'], '% chunk loss' : loss['losts']*100/loss['chunks']})
		data['session'] = data['session'].map(lambda x: session_id_shortner(x))
		data.boxplot(by='session',column='% chunk loss')
		plt.xticks(rotation=90)
		if save:
			plt.savefig("packetsLoss"+ImageExtension,bbox_inches='tight')
	else:
			print "[Error] "+filename+" file not found."

def rttVisualize(folder,save,sorting=None):
	print "My name is", inspect.stack()[0][3]
	filename=folder+"/slivers_rtts.exp"
	if os.path.isfile(filename):
		rtts = read_csv(filename)

		# Pruning outliers
		rtts = rtts[(rtts['SRC'] != 's15') & (rtts['DST'] != 's15')]
		if 's15' in sorting['list']:
			sorting['list'].remove('s15')

		rttmax = rtts[rtts['RTT_TYPE'] == 'RTT_MAX']
		maxv = rttmax['MSEC'].max() #quantile(0.8)
		if sorting:
			rttmat = correlateColumns(rttmax,'SRC','DST','MSEC',defaultValue=maxv,autoValue=0,columns=sorting['list'],rows=sorting['list'])
		else:
			rttmat = correlateColumns(rttmax,'SRC','DST','MSEC',defaultValue=maxv,autoValue=0)
		plotHeatMap(rttmat,tit="Max Round Trip Time among slivers (msec)\nTruncated, max value is "+str(round(rttmax['MSEC'].max()))+"ms",maxv=maxv,min_color='w')
		if save:
			plt.savefig("maxrtt"+ImageExtension,bbox_inches='tight')

		rttavg = rtts[rtts['RTT_TYPE'] == 'RTT_AVG']
		maxv = rttavg['MSEC'].max() #quantile(0.8)
		maxd = rttavg['MDEV'].max() #quantile(0.8)
		if sorting:
			rttmat = correlateColumns(rttavg,'SRC','DST','MSEC',defaultValue=maxv,autoValue=0,columns=sorting['list'],rows=sorting['list'])
		else:
			rttmat = correlateColumns(rttavg,'SRC','DST','MSEC',defaultValue=maxv,autoValue=0)
		plotHeatMap(rttmat,maxv=maxv,xlab="Echo sender",ylab="Echo receiver",min_color='w')#,tit="Average Round Trip Time among slivers (msec)")
#		plotHeatMap(rttmat,tit="Average Round Trip Time among slivers (msec)\nTruncated, max value is "+str(round(rttavg['MSEC'].max()))+"ms",maxv=maxv,min_color='w')
		if save:
			plt.savefig("avgrtt"+ImageExtension,bbox_inches='tight')
		if sorting:
			rttmat = correlateColumns(rttavg,'SRC','DST','MDEV',defaultValue=maxd,autoValue=0,columns=sorting['list'],rows=sorting['list'])
		else:
			rttmat = correlateColumns(rttavg,'SRC','DST','MDEV',defaultValue=maxd,autoValue=0)
		plotHeatMap(rttmat,maxv=maxd,xlab="Echo sender",ylab="Echo receiver",min_color='w')#,tit="Average Round Trip Time Deviation among slivers (msec)",)
#		plotHeatMap(rttmat,tit="Average Round Trip Time Deviation among slivers (msec)\nTruncated, max value is "+str(round(rttavg['MDEV'].max()))+"ms",maxv=maxd,min_color='w')
		if save:
			plt.savefig("avgrttmdev"+ImageExtension,bbox_inches='tight')

		rttreach = rtts[rtts['RTT_TYPE'] == 'RTT_MAX']
		rttreach['MSEC'] = 0 
		if sorting:
			rttmat = correlateColumns(rttreach,'SRC','DST','MSEC',defaultValue=1,autoValue=0,columns=sorting['list'],rows=sorting['list'])
		else:
			rttmat = correlateColumns(rttreach,'SRC','DST','MSEC',defaultValue=1,autoValue=0)
		plotHeatMap(rttmat,tit="ICMP reachability among slivers")
		if save:
			plt.savefig("ping_reachability"+ImageExtension,bbox_inches='tight')
		else:
			plt.show()

	else:
			print "[Error] "+filename+" file not found."

def rttPerPeerVisualize(folder,save):
	print "My name is", inspect.stack()[0][3]
	names = []
	allRTTMeans = []
	allRTTStd = []
	srcRTTMeans = []
	srcRTTStd = []
	source = source_addr(folder)
	rtts = []

	for elm in os.listdir(folder):
		if "_rtt_" in elm and elm.endswith(".csv") and not emptyFile(folder+"/"+elm): 
			rtt = read_csv(folder+"/"+elm)
			rtt = rtt[rtt['rtt'] != -1]
			rtts.append(rtt)
			names.append(purify_hostname(elm[:17]))
			allRTTMeans.append(rtt['rtt'].mean())
			allRTTStd.append(rtt['rtt'].std())
			if source:
				if len(rtt[rtt['addr'] == source]['rtt'])>0:
					srcRTTMeans.append(rtt[rtt['addr'] == source]['rtt'].mean())
					srcRTTStd.append(rtt[rtt['addr'] == source]['rtt'].std())
				else:
					if purify_hostname(elm[:17]) == purify_hostname(source_hostname(folder)):
						srcRTTMeans.append(0)
						srcRTTStd.append(0)
					else:
						srcRTTMeans.append(np.nan)
						srcRTTStd.append(np.nan)

	plt.figure()
	ar_sort = plotOrderedErrorBar(names,allRTTMeans,allRTTStd,"Slivers","RTT (msec)","Overall RoundTripTime per sliver")
	if save:
		plt.savefig("allRTTInterval"+ImageExtension,bbox_inches='tight')
	else:
		plt.show()

	if source:
		plt.figure()
		sr_sort = plotOrderedErrorBar(names,srcRTTMeans,[0]*len(names),"Slivers","RTT (msec)")##,"RoundTripTime wrt the source ("+purify_address(folder,source)+")")
		#sr_sort = plotOrderedErrorBar(names,srcRTTMeans,srcRTTStd,"Slivers","RTT (msec)","RoundTripTime wrt the source ("+purify_address(folder,source)+")")
		if save:
			plt.savefig("srcRTTInterval"+ImageExtension,bbox_inches='tight')
		else:
			plt.show()
#		plt.figure()
#		
#		data = concat(rtts)
#		data = data[data['addr'] == source]
#		data = DataFrame({'sliver': data['hostname'], 'RTT to the source (ms)': data['rtt']})
#		data.boxplot(by='sliver',column='RTT to the source (ms)')
#		if save:
#			plt.savefig("srcRTTBoxplot"+ImageExtension,bbox_inches='tight')
		
	else:
		sr_sort = None
	return [ar_sort, sr_sort]

def ICMPLossPerPeerVisualize(folder,save,sorting=None):
	print "My name is", inspect.stack()[0][3]
	names = []
	allLost = []
	srcLost = []
	source = source_addr(folder)
	srcLossDFs = []

	for elm in os.listdir(folder):
		if "_rtt_" in elm and elm.endswith(".csv") and not emptyFile(folder+"/"+elm):
			rtt = read_csv(folder+"/"+elm)
			names.append(purify_hostname(elm[:17]))
			allLost.append(100*sum(rtt['sent'] - rtt['answers'])/sum(rtt['sent']))
			if source:
				src_rtt = rtt[rtt['addr'] == source]
				if len(src_rtt) > 0:
					srcLossDFs.append(src_rtt)
					srcLost.append(100*sum(src_rtt['sent'] - src_rtt['answers'])/sum(src_rtt['sent']))
				else:
					if purify_hostname(elm[:17]) == purify_hostname(source_hostname(folder)):
						srcLost.append(0)
					else:
						srcLost.append(100)


	plt.figure()
	al_sort = plotOrderedErrorBar(names,allLost,xlab="Slivers",ylab="Lost ICMP packets (%)")#,tit="Overall ICMP packets loss percentage")
	if save:
		plt.savefig("allLostICMP"+ImageExtension,bbox_inches='tight')
	else:
		plt.show()

	if source:
		plt.figure()
		sl_sort = plotOrderedErrorBar(names,srcLost,xlab="Slivers",ylab="Lost ICMP packets (%)")#,tit="Lost ICMP packets sent to the source ("+purify_address(folder,source)+")")
		if save:
			plt.savefig("srcLostICMP"+ImageExtension,bbox_inches='tight')
		else:
			plt.show()

# in deep analysis		
		src_data = concat(srcLossDFs)
		src_data['loss'] = 100*(src_data['sent'] - src_data['answers'])/(src_data['sent'])
# peer bars, all data

		data_peers = DataFrame({'hostname':src_data['hostname'].map(purify_hostname),'loss':src_data['loss']})
		data_peers = data_peers.groupby('hostname').mean()
		if sorting:
			data_peers = data_peers.reindex(sorting['list'])
		plotBarGraph(data_peers,"Peers' hostnames","Percentage of ICMP ECHO loss","Overall peers ICMP loss",ylim=[0,50])
		if save:
			plt.savefig("icmplossbars"+ImageExtension,bbox_inches='tight')

# time evolution wrt source
		plt.figure()
		data_mean = src_data
		minv =  data_mean['unixtime'].min()
		data_mean['unixtime'] = data_mean['unixtime'] - minv 
		data_mean = data_mean.set_index('unixtime').sort_index()
		ax = plt.gca()
		ax.set_color_cycle(get_colors(20))

		for hostname in set(data_mean['hostname']):
			data_mean[data_mean['hostname'] == hostname]['loss'].plot()
		plt.xlabel("time (s)")# - starting from unixtime "+str(round(minv)))
		plt.ylabel("Loss (%)")
		plt.title("Percentage of ICMP ECHOs lost with the source")
		if save:
			plt.savefig("icmploss2source"+ImageExtension,bbox_inches='tight')
	else:
		sl_sort = None
	return [al_sort,sl_sort]

def get_colors(n):
	ret = []
	for i in range(n):
		v = int(round(256*i/n))
		b = str(hex(255-v))[2:]
		r = str(hex(v))[2:]
		g = str(hex(int((n*i)%255)))[2:]
		if len(r)<2:
			r = '0'+r
		if len(b)<2:
			b = '0'+b
		if len(g)<2:
			g = '0'+g
		ret.append('#'+(r)+g+b)
	return ret

def sourceRTTVisualize(folder,save):
	print "My name is", inspect.stack()[0][3]
	rtts = []
	source = source_hostname(folder)
	for elm in os.listdir(folder):
		if "_rtt_" in elm and elm.endswith(".csv") and not emptyFile(folder+"/"+elm):
			rtt = read_csv(folder+"/"+elm)
			rtts.append(rtt[rtt['rtt'] != -1])
	data = concat(rtts)
	if len(data[data['rtt'] < 0]) > 0:
		raise Exception('Negative Ping RTTs!!')
	data = data[(data['hostname'] != source) & (data['addr'] == source_addr(folder))]
	# frequencies
	plt.figure()
#	data = data[(data['rtt'] > 10 & data['rtt'] < 1000)]
	h,b = np.histogram(data['rtt'],bins=1000)
#	print [i for i in b] 
#	print h
	plotHistFrequencies(h,b,"(msec)",title="Mean RTT to source distribution (frequency, ECDF)",log_scale=True)
	if save:
		plt.savefig("rtt2source_distribution"+ImageExtension,bbox_inches='tight')
	# time evolution
	plt.figure()

	data['neotime'] = zip(data['unixtime'].map(lambda x: round(x/3)))
	data_mean = data.groupby('neotime').mean()
	minv =  data_mean['unixtime'].min()
	data_mean['unixtime'] = data_mean['unixtime'] - minv 
	data_mean = data_mean.set_index('unixtime')

	data_mean['rtt'].plot()
#	plt.xlim([min(data_mean.index),max(data_mean.index)])
	plt.xlabel("time (s)")# - starting from unixtime "+str(round(minv)))
	plt.ylabel("rtt (ms)")
	plt.title("Mean RTT to the source")
	if save:
		plt.savefig("rtt2source"+ImageExtension,bbox_inches='tight')
	

def lossVisualizer(folder,save,sorting=None,procman=None):
	if sorting:
		for sort in sorting:
			if procman:
				procman.launchProcess(lossVisualize2,[folder,save,sort])
			else:
				lossVisualize2(folder,save,sort)
	else:
		if procman:
			procman.launchProcess(lossVisualize2,[folder,save,sort])
		else:
			lossVisualize2(folder,save,sort)

def main(argv):
	[folder, save] = get_params(argv)
	print "folder is " + folder
	
	mpl.rcParams.update({'font.size': 16})
	pm = pmgr.ProcessManager()

	[ar_sort, sr_sort] = rttPerPeerVisualize(folder,save)

	sorts = [{'name': "ARSORT",'list':ar_sort},\
				{'name': "SRSORT",'list':sr_sort}]#,\
##					{'name': "ALSORT",'list':al_sort},\
##					{'name': "SLSORT",'list':sl_sort} ]
	[al_sort, sl_sort] = ICMPLossPerPeerVisualize(folder,save,sorts[1])

	pm.launchProcess(perPeerLossVisualize,[folder,save])
	pm.launchProcess(perPeerDelayVisualize,[folder,save])
	lossVisualizer(folder,save,sorting=[sorts[1]],procman=pm)
	pm.launchProcess(sessionLossVisualize,[folder,save])

	pm.launchProcess(sourceRTTVisualize,[folder,save])
	pm.launchProcess(rttVisualize,[folder,save,sorts[1]])
	pm.launchProcess(topologyVisualize,[folder,save,sorts[1]])
	pm.launchProcess(lossVisualize,[folder,save])
	pm.launchProcess(sessionLossVisualize,[folder,save])
	pm.launchProcess(sessionHopsVisualize,[folder,save])
	pm.launchProcess(sessionDelayVisualize,[folder,save])
	pm.launchProcess(delayVisualize,[folder,save])
	pm.launchProcess(delayVisualize2,[folder,save])
#	allDelayPerPeer(folder,save)
	pm.joinAll()

	if not save:
		plt.show()


if __name__ == "__main__":
	main(sys.argv[1:])	
