github.com/cactusblossom/fabric-ca@v0.0.0-20200611062428-0082fc643826/scripts/run_benchmarks (about) 1 #!/bin/bash 2 # Copyright IBM Corp. All Rights Reserved. 3 # 4 # SPDX-License-Identifier: Apache-2.0 5 # 6 # This scripts runs benchmarks in the specified package or in all the fabric-ca 7 # packages except vendor, api, mocks, dbutil, ldap, test/fabric-ca-load-tester. 8 # It also allows you to create memory or cpu profiles of the benchmarks in each 9 # package. 10 # 11 # If the script is invoked without -C or -M option, the bench.results file containing 12 # benchmarks results is created in the current working directory. 13 # 14 # If -C or -M option is specified, bench.results, profile file (bench-cpu.prof 15 # bench-mem.prof) and the binary file are created in the specified packages 16 # (if -P option is specified) or in all the packages (if -P option is not specified) 17 # Refer to the Profiling section in the README.md file for more information on analyzing 18 # profile files. 19 # 20 # Run the script with -h option to print all the valid arguments. 21 22 23 function pushd() { 24 command pushd "$@" > /dev/null 25 } 26 27 function popd() { 28 command popd "$@" > /dev/null 29 } 30 31 function usage() { 32 echo "ARGS:" 33 echo " -C -- Create CPU profile" 34 echo " -M -- Create memory profile" 35 echo " -P <package name> -- Run benchmark for the specified package. Default benchmarks are run on all the packages" 36 echo " -R -- Remove benchmark files" 37 echo " -h -- Print help" 38 } 39 40 function exitrc() { 41 if test $1 -ne 0; then 42 echo "ERROR: Failure occurred while running benchmarks" 43 exit $1 44 else 45 echo "Finished running all benchmarks" 46 exit 0 47 fi 48 } 49 50 # Removes all benchmark related files 51 function removefiles() { 52 for pkg in $PKGS 53 do 54 pushd $GOPATH/src/$pkg 55 rm -f $BENCHMARK_FILE_NAME 56 rm -f bench-cpu.prof 57 rm -f bench-mem.prof 58 rm -f ${PWD##*/}".test" 59 popd 60 done 61 } 62 63 while getopts "hRCMP:" option; do 64 case "$option" in 65 C) CPU_PROFILE="true" ;; 66 M) MEM_PROFILE="true" ;; 67 P) PACKAGE_NAME="$OPTARG" ;; 68 R) REMOVE_BENCHMARKS="true" ;; 69 \?|h) usage 70 exit 1 71 ;; 72 esac 73 done 74 75 BENCHMARK_FILE_NAME=bench.results 76 FCA=$GOPATH/src/github.com/hyperledger/fabric-ca 77 export PATH=$PATH:$GOPATH/bin 78 PKGS=`go list github.com/hyperledger/fabric-ca/... | grep -Ev '/vendor/|/api|/mocks|/dbutil|/ldap|/test/fabric-ca-load-tester'` 79 if test -n "$PACKAGE_NAME"; then 80 PKGS=$PACKAGE_NAME 81 fi 82 83 if test -n "$REMOVE_BENCHMARKS"; then 84 removefiles 85 exit 86 fi 87 88 if test -n "$CPU_PROFILE"; then 89 CPU_PROFILE_OPT="-cpuprofile=bench-cpu.prof" 90 CREATE_PROFILE="true" 91 else 92 CPU_PROFILE_OPT="" 93 fi 94 95 if test -n "$MEM_PROFILE"; then 96 MEM_PROFILE_OPT="-memprofile=bench-mem.prof" 97 CREATE_PROFILE="true" 98 else 99 MEM_PROFILE_OPT="" 100 fi 101 102 echo "Running all benchmarks ..." 103 if test -n "$CREATE_PROFILE"; then 104 # go test does not allow to specify multiple packages when benchmarks are profiled 105 # so, run go test in each package with profiling options. This will create benchmark 106 # and profile files in each package 107 for pkg in $PKGS 108 do 109 pushd $GOPATH/src/$pkg 110 (go test -run=^$ -bench=. -benchmem -timeout=20m $CPU_PROFILE_OPT $MEM_PROFILE_OPT 2> /dev/null) | tee $BENCHMARK_FILE_NAME 111 exitrc ${PIPESTATUS[0]} 112 popd 113 done 114 else 115 (go test -run=^$ -bench=. -benchmem -timeout=20m $PKGS 2> /dev/null) | tee $BENCHMARK_FILE_NAME 116 exitrc ${PIPESTATUS[0]} 117 fi