Skip to content
This repository was archived by the owner on Jan 7, 2023. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 47 additions & 0 deletions analyzer/analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -486,6 +486,9 @@ def _process_data(self, node_name):
self.workpool.schedule( self.process_sar_data, "%s/%s/%s" % (dest_dir, node_name, dir_name))
if 'totals.html' in dir_name:
self.workpool.schedule( self.process_vdbench_data, "%s/%s/%s" % (dest_dir, node_name, dir_name), "%s_%s" % (node_name, dir_name))
if '_fio.json' in dir_name:
if '.tmp' not in dir_name:
self.workpool.schedule( self.process_json_fio_data, "%s/%s/%s" % (dest_dir, node_name, dir_name), dir_name)
if '_fio.txt' in dir_name:
self.workpool.schedule( self.process_fio_data, "%s/%s/%s" % (dest_dir, node_name, dir_name), dir_name)
if '_fio_iops.1.log' in dir_name or '_fio_bw.1.log' in dir_name or '_fio_lat.1.log' in dir_name:
Expand Down Expand Up @@ -844,6 +847,47 @@ def get_lat_persent_dict(self,fio_str):
lat_percent_dict[key[0]] = value[0]
return lat_percent_dict

def process_json_fio_data(self,path,dirname):
result = {}
try:
js={}
f = open(path,'r')
a=f.readlines()
path_tmp = path+'.tmp'
dir_name = os.path.dirname(path)
os.system('rm %s/*.tmp'%dir_name)
f_tmp=open(path_tmp,'w')
b=''.join(a[1:])
f_tmp.write(b)
f.close()
f_tmp.close()
with open(path_tmp,'r') as f_json:
js.update(json.load(f_json,object_pairs_hook=OrderedDict))
output_fio_data = OrderedDict()
output_fio_data['read_lat'] = js['jobs'][0]["read"]['lat']['mean']
output_fio_data['read_iops'] = js['jobs'][0]["read"]['iops']
output_fio_data['read_bw'] = js['jobs'][0]["read"]['bw']
output_fio_data['read_runtime'] = js['jobs'][0]["read"]['runtime']
output_fio_data['write_lat'] = js['jobs'][0]["write"]['lat']['mean']
output_fio_data['write_iops'] = js['jobs'][0]["write"]['iops']
output_fio_data['write_bw'] = js['jobs'][0]["write"]['bw']
output_fio_data['write_runtime'] = js['jobs'][0]["write"]['runtime']
if js['jobs'][0]["read"]['clat']['percentile']['95.000000'] == 0:
output_fio_data['95.00th%_lat'] = js['jobs'][0]["write"]['clat']['percentile']['95.000000']
if js['jobs'][0]["read"]['clat']['percentile']['99.000000'] == 0:
output_fio_data['99.00th%_lat'] = js['jobs'][0]["write"]['clat']['percentile']['99.000000']
if js['jobs'][0]["read"]['clat']['percentile']['99.990000'] == 0:
output_fio_data['99.99th%_lat'] = js['jobs'][0]["write"]['clat']['percentile']['99.990000']
output_fio_data['lat_unit'] = 'msec'
output_fio_data['runtime_unit'] = 'sec'
output_fio_data['bw_unit'] = 'MB/s'
result[dirname] = {}
result[dirname]["fio"] = output_fio_data
self.workpool.enqueue_data( ["process_json_fio_data", result] )
return result
except:
return result

def process_fio_data(self, path, dirname):
result = {}
stdout, stderr = common.bash("grep \" *io=.*bw=.*iops=.*runt=.*\|^ *lat.*min=.*max=.*avg=.*stdev=.*\" "+path, True)
Expand Down Expand Up @@ -916,6 +960,9 @@ def process_fio_data(self, path, dirname):
result[dirname]["fio"] = output_fio_data

self.workpool.enqueue_data( ["process_fio_data", result] )
print '---------------------------------'
print path,dirname
print result
return result

def process_lttng_data(self, path):
Expand Down
46 changes: 46 additions & 0 deletions analyzer/analyzer_remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,6 +367,9 @@ def _process_data(self):
if 'totals.html' in dir_name:
self.common.printout("LOG","Processing %s_%s" % (self.whoami, dir_name))
self.workpool.schedule( self.process_vdbench_data, "%s/%s/%s" % (dest_dir, node_name, dir_name), "%s_%s" % (node_name, dir_name))
if '_fio.json' in dir_name:
if '.tmp' not in dir_name:
self.workpool.schedule( self.process_json_fio_data, "%s/%s/%s" % (dest_dir, node_name, dir_name), dir_name)
if '_fio.txt' in dir_name:
self.common.printout("LOG","Processing %s_%s" % (self.whoami, dir_name))
self.workpool.schedule( self.process_fio_data, "%s/%s/%s" % (dest_dir, node_name, dir_name), dir_name)
Expand Down Expand Up @@ -725,6 +728,47 @@ def get_lat_persent_dict(self,fio_str):
lat_percent_dict[key[0]] = value[0]
return lat_percent_dict

def process_json_fio_data(self,path,dirname):
result = {}
try:
js={}
f = open(path,'r')
a=f.readlines()
path_tmp = path+'.tmp'
dir_name = os.path.dirname(path)
os.system('rm %s/*.tmp'%dir_name)
f_tmp=open(path_tmp,'w')
b=''.join(a[1:])
f_tmp.write(b)
f.close()
f_tmp.close()
with open(path_tmp,'r') as f_json:
js.update(json.load(f_json,object_pairs_hook=OrderedDict))
output_fio_data = OrderedDict()
output_fio_data['read_lat'] = js['jobs'][0]["read"]['lat']['mean']
output_fio_data['read_iops'] = js['jobs'][0]["read"]['iops']
output_fio_data['read_bw'] = js['jobs'][0]["read"]['bw']
output_fio_data['read_runtime'] = js['jobs'][0]["read"]['runtime']
output_fio_data['write_lat'] = js['jobs'][0]["write"]['lat']['mean']
output_fio_data['write_iops'] = js['jobs'][0]["write"]['iops']
output_fio_data['write_bw'] = js['jobs'][0]["write"]['bw']
output_fio_data['write_runtime'] = js['jobs'][0]["write"]['runtime']
if js['jobs'][0]["read"]['clat']['percentile']['95.000000'] == 0:
output_fio_data['95.00th%_lat'] = js['jobs'][0]["write"]['clat']['percentile']['95.000000']
if js['jobs'][0]["read"]['clat']['percentile']['99.000000'] == 0:
output_fio_data['99.00th%_lat'] = js['jobs'][0]["write"]['clat']['percentile']['99.000000']
if js['jobs'][0]["read"]['clat']['percentile']['99.990000'] == 0:
output_fio_data['99.99th%_lat'] = js['jobs'][0]["write"]['clat']['percentile']['99.990000']
output_fio_data['lat_unit'] = 'msec'
output_fio_data['runtime_unit'] = 'sec'
output_fio_data['bw_unit'] = 'MB/s'
result[dirname] = {}
result[dirname]["fio"] = output_fio_data
self.workpool.enqueue_data( ["process_json_fio_data", result] )
return result
except:
return result

def process_fio_data(self, path, dirname):
result = {}
stdout = self.common.bash("grep \" *io=.*bw=.*iops=.*runt=.*\|^ *lat.*min=.*max=.*avg=.*stdev=.*\" "+path)
Expand Down Expand Up @@ -796,6 +840,8 @@ def process_fio_data(self, path, dirname):
result[dirname] = {}
result[dirname]["fio"] = output_fio_data
self.workpool.enqueue_data(["process_fio_data", result])
print '======================================='
print result
return result

def process_lttng_data(self, path):
Expand Down
2 changes: 1 addition & 1 deletion benchmarking/mod/bblock/fiorbd.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def run(self):
poolname = self.benchmark["poolname"]
for client in self.benchmark["distribution"]:
rbdlist = ' '.join(self.benchmark["distribution"][client])
res = common.pdsh(user, [client], "for rbdname in %s; do POOLNAME=%s RBDNAME=${rbdname} fio --output %s/`hostname`_${rbdname}_fio.txt --write_bw_log=%s/`hostname`_${rbdname}_fio --write_lat_log=%s/`hostname`_${rbdname}_fio --write_iops_log=%s/`hostname`_${rbdname}_fio --section %s %s/fio.conf 2>%s/`hostname`_${rbdname}_fio_errorlog.txt & done" % (rbdlist, poolname, dest_dir, dest_dir, dest_dir, dest_dir, self.benchmark["section_name"], dest_dir, dest_dir), option = "force")
res = common.pdsh(user, [client], "for rbdname in %s; do POOLNAME=%s RBDNAME=${rbdname} fio --output-format=json --output %s/`hostname`_${rbdname}_fio.json --write_bw_log=%s/`hostname`_${rbdname}_fio --write_lat_log=%s/`hostname`_${rbdname}_fio --write_iops_log=%s/`hostname`_${rbdname}_fio --section %s %s/fio.conf 2>%s/`hostname`_${rbdname}_fio_errorlog.json & done" % (rbdlist, poolname, dest_dir, dest_dir, dest_dir, dest_dir, self.benchmark["section_name"], dest_dir, dest_dir), option = "force")
fio_job_num_total += len(self.benchmark["distribution"][client])
self.chkpoint_to_log("fio start")
time.sleep(1)
Expand Down
2 changes: 2 additions & 0 deletions benchmarking/mod/benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,7 @@ def archive(self):
common.bash("mkdir -p %s/raw/%s" % (dest_dir, node))
common.rscp(user, node, "%s/raw/%s/" % (dest_dir, node), "%s/*.txt" % self.cluster["tmp_dir"])
common.rscp(user, node, "%s/raw/%s/" % (dest_dir, node), "%s/*.csv" % self.cluster["tmp_dir"])
common.rscp(user, node, "%s/raw/%s/" % (dest_dir, node), "%s/*.json" % self.cluster["tmp_dir"])
common.rscp(user, node, "%s/conf/" % (dest_dir), "%s/*.csv" % self.cluster["tmp_dir"])
if "blktrace" in self.cluster["collector"]:
common.rscp(user, node, "%s/raw/%s/" % (dest_dir, node), "%s/*blktrace*" % self.cluster["tmp_dir"])
Expand All @@ -270,6 +271,7 @@ def archive(self):
common.bash( "mkdir -p %s/raw/%s" % (dest_dir, node))
common.rscp(user, node, "%s/raw/%s/" % (dest_dir, node), "%s/*.txt" % self.cluster["tmp_dir"])
common.rscp(user, node, "%s/raw/%s/" % (dest_dir, node), "%s/*.csv" % self.cluster["tmp_dir"])
common.rscp(user, node, "%s/raw/%s/" % (dest_dir, node), "%s/*.json" % self.cluster["tmp_dir"])
common.rscp(user, node, "%s/conf/" % (dest_dir), "%s/*.csv" % self.cluster["tmp_dir"])

#save real runtime
Expand Down