From fc92ae3d73e18088afa0360854c27fbf07584646 Mon Sep 17 00:00:00 2001 From: Martin Ritchie Date: Mon, 12 Apr 2010 15:42:21 +0000 Subject: QPID-2424 : Augment processing scripts to generate min/max/avg stats file for CPU/MEM, GC-Freq,GC-Duration, Heap-Size. Merged from 0.5.x-dev rev 917452 git-svn-id: https://svn.apache.org/repos/asf/qpid/trunk/qpid@933282 13f79535-47bb-0310-9956-ffa450edef68 --- java/perftests/bin/processing/process.sh | 84 ++++++++- java/perftests/bin/processing/processTests.py | 239 +++++++++++++++++++++++--- 2 files changed, 297 insertions(+), 26 deletions(-) (limited to 'java/perftests') diff --git a/java/perftests/bin/processing/process.sh b/java/perftests/bin/processing/process.sh index cb2181622c..f8e13cd5cd 100755 --- a/java/perftests/bin/processing/process.sh +++ b/java/perftests/bin/processing/process.sh @@ -51,7 +51,11 @@ processCMSGCFile() cat parnew.gc.log | awk '{print $8}' | cut -d 'K' -f 2 | cut -d '>' -f 2 > HEAP_POST_GC.log cat parnew.gc.log | awk '{print $1}' | cut -d ':' -f 1 > GC_TIME.log - + calculateStats "MEM_PRE" HEAP_PRE_GC.log + calculateStats "MEM_POST" HEAP_POST_GC.log + calcualteStatsBC "GC_FREQ" GC_FREQ.log + + # Calculate ParNew GC Cumulative total cat parnew.gc.log |awk '{sum=sum+$6; print $6, sum}' > GC_DUR.log # Give a count of GC occurances @@ -129,6 +133,82 @@ processG1GCFile() } +calculateStatsBC() +{ +label=$1 +file=$2 +# Calculate GC Frequencies + prevFreq=0 + countFreq=0 + gcFreqTotal=0 + minFreq=99999999 + maxFreq=0 + + echo "" > GC_FREQ.log + for time in `cat $file` ; do + if [ $prevFreq == 0 ] ; then + prevFreq=$time + else + freq=`echo $time - $prevFreq | bc` + echo $freq >> GC_FREQ.log + + prevFreq=$time + gcFreqTotal=`echo $gcFreqTotal + $freq | bc` + countFreq=$[$countFreq + 1] + + if [ `echo "$freq > $maxFreq " | bc` == 1 ] ; then + maxFreq=$freq + fi + + if [ `echo "$freq < $minFreq " | bc ` == 1 ] ; then + minFreq=$freq + fi + fi + done + + # Extract Min/Max/Avg + echo "#type:min/max/avg" > $file.stats + avgFreq=`echo $gcFreqTotal / $countFreq | bc -l` + echo "$1:$minFreq/$maxFreq/$avgFreq" >> $file.stats +} + +calculateStats() +{ +label=$1 +file=$2 + +# Calculate GC Frequencies + count=0 + gcTotal=0 + min=0 + max=0 + + + for item in `cat $file` ; do + if [ $min == 0 ] ; then + min=$item + fi + + gcTotal=$[$gcTotal + $item] + count=$[$count + 1] + + if [ $item -gt $max ] ; then + max=$item + fi + + if [ $item -lt $min ] ; then + min=$item + fi + done + + # Extract Min/Max/Avg + echo "#type:min/max/avg" > $file.stats + avg=`echo $gcTotal / $count | bc -l` + + echo "$label:$min/$max/$avg" >> $file.stats + echo "Done Stat generation for $file" +} + # # Take an input file ($1) of lines # @@ -282,4 +362,4 @@ set format x "%H:%M" plot "CPU.data" using 1:2 with lines EOGNUPLOT -popd &> /dev/null +popd &> /dev/null \ No newline at end of file diff --git a/java/perftests/bin/processing/processTests.py b/java/perftests/bin/processing/processTests.py index 7142692d14..57c8cc7668 100755 --- a/java/perftests/bin/processing/processTests.py +++ b/java/perftests/bin/processing/processTests.py @@ -29,7 +29,7 @@ import shutil def showUsage(): - log("./pT.py [-b|--broker-log-dir] [-t|--test-dir] ") + log("./processTests.py [-b|--broker-log-dir] [-t|--test-dir] ") ACCESS="Access" @@ -39,6 +39,7 @@ BROKER_LOG="broker.log" BROKER_PID="broker.pid" BROKER_CPU="broker_cpu.log" BROKER_CPU_DATED="broker_cpu.log.dated" +BROKER_STATS="broker.stats" BROKER_GC="gc.log" GRAPH_DATA="graph.data" @@ -124,7 +125,7 @@ def processCPUUsage(resultDir): # Calulate addition required per process line output # if topRate.find(".") == -1: - second = topRate + seconds = topRate millis = 0 else: split = topRate.split('.') @@ -142,33 +143,103 @@ def processCPUUsage(resultDir): logfile = open(logfile, "r") # Open the output file, erasing any existing version + # Keep track of the min/max sum and entries,. + minCPU=float(sys.maxint) + maxCPU=0.0 + minMem=float(sys.maxint) + maxMem=0.0 + + entries=0 + sumCPU=0.0 + sumMem=0.0 + + entries = 0 output= open(datedFile, "w") - for line in logfile: - if line.find(pid) != -1: - # Split line on whitespace - data = line.split() - + for line in logfile: # # Data format # 0 1 2 3 4 5 6 7 8 9 10 11 - # PID USER PR NI %CPU TIME+ %MEM VIRT RES SHR S COMMAND + # PID USER PR NI %CPU TIME+ %MEM VIRT RES SHR S COMMAND + # PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND # - - #Write out the date time (ISO-8601 format) - output.write(str(start)) - # Output the %CPU value - output.write(" "+str(data[4])) - # Output the %MEM value - output.write(" "+str(data[5])) - output.write('\n') - - # Add the offset based on the logging rate - start = start + offset + # %CPU and %MEM are vary, probably based on os/version of top. + # lets auto-detect where it is. + # + # Index is 0 based for array usage. + index = 0 + if line.find("PID") != -1: + for key in line.split(" "): + strippedKey = key.lstrip() + if len(strippedKey) > 0: + # Record the key index + if (strippedKey == "%CPU"): + cpuIndex=index + if (strippedKey == "%MEM"): + memIndex=index + # Increase count for next key + index = index + 1 + + + # Find lines that contain our broker process + if line.find("QPBRKR") != -1: + + # Split line on whitespace + data = line.split() + + #Write out the date time (ISO-8601 format) + output.write(str(start)) + # Output the %CPU value + output.write(" "+str(data[cpuIndex])) + # Output the %MEM value + output.write(" "+str(data[memIndex])) + output.write('\n') + + # Add the offset based on the logging rate + start = start + offset + + # Record entires + entries = entries + 1 + + # Record Metrics + # Record CPU data + cpu = float(data[cpuIndex]) + if (cpu < minCPU): + minCPU = cpu + + if (cpu > maxCPU): + maxCPU = cpu + + sumCPU = sumCPU + cpu + + # Record Mem data + mem = float(data[memIndex]) + + if (mem < minMem): + minMem = mem + + if (mem > maxMem): + maxMem = mem + + sumMem = sumMem + mem + + + #end for # Close the files logfile.close output.close + # Output stats file + statFile=resultDir+os.sep+BROKER_CPU+".stats" + output= open(statFile, "w") + output.write("#type:min/max/avg") + output.write('\n') + output.write("CPU:"+str(minCPU)+"/"+str(maxCPU)+"/"+str(float(sumCPU)/float(entries))) + output.write('\n') + output.write("MEM:"+str(minMem)+"/"+str(maxMem)+"/"+str(float(sumMem)/float(entries))) + output.write('\n') + output.close + log("Pre Process of CPU Log file '"+BROKER_CPU+"' complete") @@ -306,11 +377,23 @@ def sliceCPULog(resultDir, start, end): # Open log CPU file for reading logFile = open(logfilePath, "r") + # Open the output file, erasing any existing version + # Keep track of the min/max sum and entries,. + minCPU=float(sys.maxint) + maxCPU=0.0 + minMem=float(sys.maxint) + maxMem=0.0 + + entries=0 + sumCPU=0.0 + sumMem=0.0 + + entries = 0 + # # Create outputfile # - cpuslice = open(cpuSliceFile,"w") - + cpuslice = open(cpuSliceFile,"w") for line in logFile: data = line.split() # @@ -320,11 +403,63 @@ def sliceCPULog(resultDir, start, end): if lineTime > start: if lineTime < end: + # Write the data though to the new file cpuslice.writelines(line) + + # Perform stat processing for the min/max/avg + + data = line.split() + + # + # Data format is + #