github.com/unigraph-dev/dgraph@v1.1.1-0.20200923154953-8b52b426f765/systest/21million/test-21million.sh (about)

     1  #!/bin/bash -e
     2  
     3  readonly ME=${0##*/}
     4  readonly SRCDIR=$(dirname $0)
     5  
     6  QUERY_DIR=$SRCDIR/queries
     7  BENCHMARKS_REPO="$GOPATH/src/github.com/dgraph-io/benchmarks"
     8  SCHEMA_FILE="$BENCHMARKS_REPO/data/21million.schema"
     9  DATA_FILE="$BENCHMARKS_REPO/data/21million.rdf.gz"
    10  
    11  # this may be used to load a smaller data set when testing the test itself
    12  #DATA_FILE="$BENCHMARKS_REPO/data/goldendata.rdf.gz"
    13  
    14  function Info {
    15      echo -e "INFO: $*"
    16  }
    17  
    18  function DockerCompose {
    19      docker-compose -p dgraph "$@"
    20  }
    21  
    22  HELP= LOADER=bulk CLEANUP= SAVEDIR= LOAD_ONLY= QUIET=
    23  
    24  ARGS=$(/usr/bin/getopt -n$ME -o"h" -l"help,loader:,cleanup:,savedir:,load-only,quiet" -- "$@") || exit 1
    25  eval set -- "$ARGS"
    26  while true; do
    27      case "$1" in
    28          -h|--help)      HELP=yes;              ;;
    29          --loader)       LOADER=${2,,}; shift   ;;
    30          --cleanup)      CLEANUP=${2,,}; shift  ;;
    31          --savedir)      SAVEDIR=${2,,}; shift  ;;
    32          --load-only)    LOAD_ONLY=yes          ;;
    33          --quiet)        QUIET=yes              ;;
    34          --)             shift; break           ;;
    35      esac
    36      shift
    37  done
    38  
    39  if [[ $HELP ]]; then
    40      cat <<EOF
    41  usage: $ME [-h|--help] [--loader=<bulk|live|none>] [--cleanup=<all|none|servers>] [--savedir=path]
    42  
    43  options:
    44  
    45      --loader        bulk = use dgraph bulk (default)
    46                      live = use dgraph live
    47                      none = use data loaded by previous run
    48      --cleanup       all = take down containers and data volume (default)
    49                      servers = take down dgraph zero and alpha but leave data volume up
    50                      none = leave up containers and data volume
    51      --savedir=path  specify a directory to save test failure json in
    52                      for easier post-test review
    53      --load-only     load data but do not run tests
    54      --quiet         just report which queries differ, without a diff
    55  EOF
    56      exit 0
    57  fi
    58  
    59  if [[ $LOADER != bulk && $LOADER != live && $LOADER != none ]]; then
    60      echo >&2 "$ME: loader must be 'bulk' or 'live' or 'none' -- $LOADER"
    61      exit 1
    62  fi
    63  
    64  # default to leaving the data around for another run
    65  # if already re-using it from a previous run
    66  if [[ $LOADER == none && -z $CLEANUP ]]; then
    67      CLEANUP=servers
    68  fi
    69  
    70  # default to cleaning up both services and volume
    71  if [[ -z $CLEANUP  ]]; then
    72      CLEANUP=all
    73  elif [[ $CLEANUP != all && $CLEANUP != servers && $CLEANUP != none ]]; then
    74      echo >&2 "$ME: cleanup must be 'all' or 'servers' or 'none' -- $LOADER"
    75      exit 1
    76  fi
    77  
    78  # default to quiet mode if diffs are being saved in a directory
    79  if [[ -n $SAVEDIR ]]; then
    80      QUIET=yes
    81  fi
    82  
    83  Info "entering directory $SRCDIR"
    84  cd $SRCDIR
    85  
    86  if [[ $LOADER != none ]]; then
    87      Info "removing old data (if any)"
    88      DockerCompose down -v --remove-orphans
    89  else
    90      Info "using previously loaded data"
    91  fi
    92  
    93  Info "bringing up zero container"
    94  DockerCompose up -d --remove-orphans --force-recreate zero1
    95  
    96  Info "waiting for zero to become leader"
    97  DockerCompose logs -f zero1 | grep -q -m1 "I've become the leader"
    98  
    99  if [[ $LOADER == bulk ]]; then
   100      Info "bulk loading data set"
   101      DockerCompose run -v $BENCHMARKS_REPO:$BENCHMARKS_REPO --name bulk_load --rm alpha1 \
   102          bash -s <<EOF
   103              /gobin/dgraph bulk --schema=$SCHEMA_FILE --files=$DATA_FILE \
   104                                 --format=rdf --zero=zero1:5180 --out=/data/alpha1/bulk
   105              mv /data/alpha1/bulk/0/p /data/alpha1
   106  EOF
   107  fi
   108  
   109  Info "bringing up alpha container"
   110  DockerCompose up -d --force-recreate alpha1
   111  
   112  Info "waiting for alpha to be ready"
   113  DockerCompose logs -f alpha1 | grep -q -m1 "Server is ready"
   114  # after the server prints the log "Server is ready", it may be still loading data from badger
   115  Info "sleeping for 10 seconds for the server to be ready"
   116  sleep 10
   117  
   118  if [[ $LOADER == live ]]; then
   119      Info "live loading data set"
   120      dgraph live --schema=$SCHEMA_FILE --files=$DATA_FILE \
   121                  --format=rdf --zero=:5180 --alpha=:9180 --logtostderr
   122  fi
   123  
   124  if [[ $LOAD_ONLY ]]; then
   125      Info "exiting after data load"
   126      exit 0
   127  fi
   128  
   129  # replace variables if set with the corresponding option
   130  SAVEDIR=${SAVEDIR:+-savedir=$SAVEDIR}
   131  QUIET=${QUIET:+-quiet}
   132  
   133  Info "running benchmarks/regression queries"
   134  
   135  if [[ ! -z "$TEAMCITY_VERSION" ]]; then
   136      # Make TeamCity aware of Go tests
   137      export GOFLAGS="-json"
   138  fi
   139  go test -v -tags standalone $SAVEDIR $QUIET || FOUND_DIFFS=1
   140  
   141  if [[ $CLEANUP == all ]]; then
   142      Info "bringing down zero and alpha and data volumes"
   143      DockerCompose down -v
   144  elif [[ $CLEANUP == none ]]; then
   145      Info "leaving up zero and alpha"
   146  else
   147      Info "bringing down zero and alpha only"
   148      DockerCompose down
   149  fi
   150  
   151  if [[ $FOUND_DIFFS -eq 0 ]]; then
   152      Info "no diffs found in query results"
   153  else
   154      Info "found some diffs in query results"
   155  fi
   156  
   157  exit $FOUND_DIFFS