diff options
author | Martin Ritchie <ritchiem@apache.org> | 2010-04-12 15:42:21 +0000 |
---|---|---|
committer | Martin Ritchie <ritchiem@apache.org> | 2010-04-12 15:42:21 +0000 |
commit | fc92ae3d73e18088afa0360854c27fbf07584646 (patch) | |
tree | f1846787a9dee60835d7219708fe7395fa32a41e /java | |
parent | 483ab71751bc7c7cd8270ebf5ac30866b95972bd (diff) | |
download | qpid-python-fc92ae3d73e18088afa0360854c27fbf07584646.tar.gz |
QPID-2424 : Augment processing scripts to generate min/max/avg stats file for CPU/MEM, GC-Freq,GC-Duration, Heap-Size.
Merged from 0.5.x-dev rev 917452
git-svn-id: https://svn.apache.org/repos/asf/qpid/trunk/qpid@933282 13f79535-47bb-0310-9956-ffa450edef68
Diffstat (limited to 'java')
-rwxr-xr-x | java/perftests/bin/processing/process.sh | 84 | ||||
-rwxr-xr-x | java/perftests/bin/processing/processTests.py | 239 |
2 files changed, 297 insertions, 26 deletions
diff --git a/java/perftests/bin/processing/process.sh b/java/perftests/bin/processing/process.sh index cb2181622c..f8e13cd5cd 100755 --- a/java/perftests/bin/processing/process.sh +++ b/java/perftests/bin/processing/process.sh @@ -51,7 +51,11 @@ processCMSGCFile() cat parnew.gc.log | awk '{print $8}' | cut -d 'K' -f 2 | cut -d '>' -f 2 > HEAP_POST_GC.log cat parnew.gc.log | awk '{print $1}' | cut -d ':' -f 1 > GC_TIME.log - + calculateStats "MEM_PRE" HEAP_PRE_GC.log + calculateStats "MEM_POST" HEAP_POST_GC.log + calcualteStatsBC "GC_FREQ" GC_FREQ.log + + # Calculate ParNew GC Cumulative total cat parnew.gc.log |awk '{sum=sum+$6; print $6, sum}' > GC_DUR.log # Give a count of GC occurances @@ -129,6 +133,82 @@ processG1GCFile() } +calculateStatsBC() +{ +label=$1 +file=$2 +# Calculate GC Frequencies + prevFreq=0 + countFreq=0 + gcFreqTotal=0 + minFreq=99999999 + maxFreq=0 + + echo "" > GC_FREQ.log + for time in `cat $file` ; do + if [ $prevFreq == 0 ] ; then + prevFreq=$time + else + freq=`echo $time - $prevFreq | bc` + echo $freq >> GC_FREQ.log + + prevFreq=$time + gcFreqTotal=`echo $gcFreqTotal + $freq | bc` + countFreq=$[$countFreq + 1] + + if [ `echo "$freq > $maxFreq " | bc` == 1 ] ; then + maxFreq=$freq + fi + + if [ `echo "$freq < $minFreq " | bc ` == 1 ] ; then + minFreq=$freq + fi + fi + done + + # Extract Min/Max/Avg + echo "#type:min/max/avg" > $file.stats + avgFreq=`echo $gcFreqTotal / $countFreq | bc -l` + echo "$1:$minFreq/$maxFreq/$avgFreq" >> $file.stats +} + +calculateStats() +{ +label=$1 +file=$2 + +# Calculate GC Frequencies + count=0 + gcTotal=0 + min=0 + max=0 + + + for item in `cat $file` ; do + if [ $min == 0 ] ; then + min=$item + fi + + gcTotal=$[$gcTotal + $item] + count=$[$count + 1] + + if [ $item -gt $max ] ; then + max=$item + fi + + if [ $item -lt $min ] ; then + min=$item + fi + done + + # Extract Min/Max/Avg + echo "#type:min/max/avg" > $file.stats + avg=`echo $gcTotal / $count | bc -l` + + echo "$label:$min/$max/$avg" >> $file.stats + echo "Done Stat generation for $file" +} + # # Take an input file ($1) of lines # <value><K|M> @@ -282,4 +362,4 @@ set format x "%H:%M" plot "CPU.data" using 1:2 with lines EOGNUPLOT -popd &> /dev/null +popd &> /dev/null
\ No newline at end of file diff --git a/java/perftests/bin/processing/processTests.py b/java/perftests/bin/processing/processTests.py index 7142692d14..57c8cc7668 100755 --- a/java/perftests/bin/processing/processTests.py +++ b/java/perftests/bin/processing/processTests.py @@ -29,7 +29,7 @@ import shutil def showUsage(): - log("./pT.py [-b|--broker-log-dir] <dir> [-t|--test-dir] <dir>") + log("./processTests.py [-b|--broker-log-dir] <dir> [-t|--test-dir] <dir>") ACCESS="Access" @@ -39,6 +39,7 @@ BROKER_LOG="broker.log" BROKER_PID="broker.pid" BROKER_CPU="broker_cpu.log" BROKER_CPU_DATED="broker_cpu.log.dated" +BROKER_STATS="broker.stats" BROKER_GC="gc.log" GRAPH_DATA="graph.data" @@ -124,7 +125,7 @@ def processCPUUsage(resultDir): # Calulate addition required per process line output # if topRate.find(".") == -1: - second = topRate + seconds = topRate millis = 0 else: split = topRate.split('.') @@ -142,33 +143,103 @@ def processCPUUsage(resultDir): logfile = open(logfile, "r") # Open the output file, erasing any existing version + # Keep track of the min/max sum and entries,. + minCPU=float(sys.maxint) + maxCPU=0.0 + minMem=float(sys.maxint) + maxMem=0.0 + + entries=0 + sumCPU=0.0 + sumMem=0.0 + + entries = 0 output= open(datedFile, "w") - for line in logfile: - if line.find(pid) != -1: - # Split line on whitespace - data = line.split() - + for line in logfile: # # Data format # 0 1 2 3 4 5 6 7 8 9 10 11 - # PID USER PR NI %CPU TIME+ %MEM VIRT RES SHR S COMMAND + # PID USER PR NI %CPU TIME+ %MEM VIRT RES SHR S COMMAND + # PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND # - - #Write out the date time (ISO-8601 format) - output.write(str(start)) - # Output the %CPU value - output.write(" "+str(data[4])) - # Output the %MEM value - output.write(" "+str(data[5])) - output.write('\n') - - # Add the offset based on the logging rate - start = start + offset + # %CPU and %MEM are vary, probably based on os/version of top. + # lets auto-detect where it is. + # + # Index is 0 based for array usage. + index = 0 + if line.find("PID") != -1: + for key in line.split(" "): + strippedKey = key.lstrip() + if len(strippedKey) > 0: + # Record the key index + if (strippedKey == "%CPU"): + cpuIndex=index + if (strippedKey == "%MEM"): + memIndex=index + # Increase count for next key + index = index + 1 + + + # Find lines that contain our broker process + if line.find("QPBRKR") != -1: + + # Split line on whitespace + data = line.split() + + #Write out the date time (ISO-8601 format) + output.write(str(start)) + # Output the %CPU value + output.write(" "+str(data[cpuIndex])) + # Output the %MEM value + output.write(" "+str(data[memIndex])) + output.write('\n') + + # Add the offset based on the logging rate + start = start + offset + + # Record entires + entries = entries + 1 + + # Record Metrics + # Record CPU data + cpu = float(data[cpuIndex]) + if (cpu < minCPU): + minCPU = cpu + + if (cpu > maxCPU): + maxCPU = cpu + + sumCPU = sumCPU + cpu + + # Record Mem data + mem = float(data[memIndex]) + + if (mem < minMem): + minMem = mem + + if (mem > maxMem): + maxMem = mem + + sumMem = sumMem + mem + + + #end for # Close the files logfile.close output.close + # Output stats file + statFile=resultDir+os.sep+BROKER_CPU+".stats" + output= open(statFile, "w") + output.write("#type:min/max/avg") + output.write('\n') + output.write("CPU:"+str(minCPU)+"/"+str(maxCPU)+"/"+str(float(sumCPU)/float(entries))) + output.write('\n') + output.write("MEM:"+str(minMem)+"/"+str(maxMem)+"/"+str(float(sumMem)/float(entries))) + output.write('\n') + output.close + log("Pre Process of CPU Log file '"+BROKER_CPU+"' complete") @@ -306,11 +377,23 @@ def sliceCPULog(resultDir, start, end): # Open log CPU file for reading logFile = open(logfilePath, "r") + # Open the output file, erasing any existing version + # Keep track of the min/max sum and entries,. + minCPU=float(sys.maxint) + maxCPU=0.0 + minMem=float(sys.maxint) + maxMem=0.0 + + entries=0 + sumCPU=0.0 + sumMem=0.0 + + entries = 0 + # # Create outputfile # - cpuslice = open(cpuSliceFile,"w") - + cpuslice = open(cpuSliceFile,"w") for line in logFile: data = line.split() # @@ -320,11 +403,63 @@ def sliceCPULog(resultDir, start, end): if lineTime > start: if lineTime < end: + # Write the data though to the new file cpuslice.writelines(line) + + # Perform stat processing for the min/max/avg + + data = line.split() + + # + # Data format is + # <Date> <Time> <%CPU> <%MEM> + # 2010-02-19 10:16:17 157 28.1 + # + cpuIndex = 2 + memIndex = 3 + + # Record entires + entries = entries + 1 + + # Record Metrics + # Record CPU data + cpu = float(data[cpuIndex]) + if (cpu < minCPU): + minCPU = cpu + + if (cpu > maxCPU): + maxCPU = cpu + + sumCPU = sumCPU + cpu + + # Record Mem data + mem = float(data[memIndex]) + + if (mem < minMem): + minMem = mem + + if (mem > maxMem): + maxMem = mem + + sumMem = sumMem + mem logFile.close() cpuslice.close() log("Sliced CPU log") + + # Output stats file + statFile=cpuSliceFile+".stats" + output= open(statFile, "w") + output.write("#type:min/max/avg") + output.write('\n') + output.write("CPU:"+str(minCPU)+"/"+str(maxCPU)+"/"+str(float(sumCPU)/float(entries))) + output.write('\n') + output.write("MEM:"+str(minMem)+"/"+str(maxMem)+"/"+str(float(sumMem)/float(entries))) + output.write('\n') + output.close + + log("Generated stat data from CPU Log file") + def sliceGCLog(resultDir, start, end): global _brokerLogs @@ -333,6 +468,14 @@ def sliceGCLog(resultDir, start, end): gcstart = extractTime(ACCESS, logfilePath+".stat") + # Open the output file, erasing any existing version + # Keep track of the min/max sum and entries,. + minGCDuration=float(sys.maxint) + maxGCDuration=0.0 + sumGCDuration=0.0 + entriesGCDuration = 0 + + # Open log GC file for reading logFile = open(logfilePath, "r") @@ -357,11 +500,59 @@ def sliceGCLog(resultDir, start, end): if lineTime > start: if lineTime < end: output.writelines(line) + # Perform stat processing for the min/max/avg + + # Process GC Duration lines in ParNew gc , + # ensure we do not have CMS printed as that means the line line has been corrupted + if line.find("ParNew") != -1 & line.find("CMS") == -1: + + # + # Example data line + # 7.646: [GC 7.646: [ParNew: 14778K->461K(14784K), 0.0026610 secs] 49879K->36609K(73288K), 0.0027560 secs] [Times: user=0.01 sys=0.00, real=0.01 secs] + # + # So entry 5 is the ParNew time and 8 is the whole GC cycle. 14 entries total + + data = line.split() + + gcTime = 0 + # Check we have a valid ParNew Line + if (len(data) == 15): + + # Record entires + # Record GC Duration data + + entriesGCDuration = entriesGCDuration + 1 + gcTime = float(data[8]) + + if (gcTime < minGCDuration): + minGCDuration = gcTime + + if (gcTime > maxGCDuration): + maxGCDuration = gcTime + + sumGCDuration = sumGCDuration + gcTime + + # Close the files logFile.close output.close() + log("Sliced gc log") + + # Output stats file + statFile=sliceFile+".stats" + output= open(statFile, "w") + output.write("#type:min/max/avg") + output.write('\n') + # + # Only provide GCDuration if it was processed + # + output.write("GC_DUR:%.14f/%.14f/%.14f" % (minGCDuration, maxGCDuration , (sumGCDuration/float(entriesGCDuration)))) + output.write('\n') + output.close + + log("Generated stat data from CPU Log file") def sliceLog4j(resultDir, start, end): @@ -437,8 +628,7 @@ def hasTime(data): timeRE = re.compile('[0-9][0-9]:[0-9][0-9]:[0-9][0-9]?[0-9]*') return dateRE.match(date) and timeRE.match(time) - - + def createGraphData(resultDir, testName): # Create graph.data file for process.sh # Format two lines : Title and filename @@ -564,4 +754,5 @@ def mkdir(dir): os.mkdir(dir) if __name__ == "__main__": - main() + + main()
\ No newline at end of file |