github.com/eikeon/docker@v1.5.0-rc4/docs/release.sh (about)

     1  #!/usr/bin/env bash
     2  set -e
     3  
     4  set -o pipefail
     5  
     6  usage() {
     7  	cat >&2 <<'EOF'
     8  To publish the Docker documentation you need to set your access_key and secret_key in the docs/awsconfig file 
     9  (with the keys in a [profile $AWS_S3_BUCKET] section - so you can have more than one set of keys in your file)
    10  and set the AWS_S3_BUCKET env var to the name of your bucket.
    11  
    12  If you're publishing the current release's documentation, also set `BUILD_ROOT=yes`
    13  
    14  make AWS_S3_BUCKET=docs-stage.docker.com docs-release
    15  
    16  will then push the documentation site to your s3 bucket.
    17  
    18   Note: you can add `OPTIONS=--dryrun` to see what will be done without sending to the server
    19  EOF
    20  	exit 1
    21  }
    22  
    23  [ "$AWS_S3_BUCKET" ] || usage
    24  
    25  VERSION=$(cat VERSION)
    26  
    27  if [ "$AWS_S3_BUCKET" == "docs.docker.com" ]; then
    28  	if [ "${VERSION%-dev}" != "$VERSION" ]; then
    29  		echo "Please do not push '-dev' documentation to docs.docker.com ($VERSION)"
    30  		exit 1
    31  	fi
    32  	cat > ./sources/robots.txt <<'EOF'
    33  User-agent: *
    34  Allow: /
    35  EOF
    36  
    37  else
    38  	cat > ./sources/robots.txt <<'EOF'
    39  User-agent: *
    40  Disallow: /
    41  EOF
    42  fi
    43  
    44  # Remove the last version - 1.0.2-dev -> 1.0
    45  MAJOR_MINOR="v${VERSION%.*}"
    46  export MAJOR_MINOR
    47  
    48  export BUCKET=$AWS_S3_BUCKET
    49  
    50  export AWS_CONFIG_FILE=$(pwd)/awsconfig
    51  [ -e "$AWS_CONFIG_FILE" ] || usage
    52  export AWS_DEFAULT_PROFILE=$BUCKET
    53  
    54  echo "cfg file: $AWS_CONFIG_FILE ; profile: $AWS_DEFAULT_PROFILE"
    55  
    56  setup_s3() {
    57  	echo "Create $BUCKET"
    58  	# Try creating the bucket. Ignore errors (it might already exist).
    59  	aws s3 mb --profile $BUCKET s3://$BUCKET 2>/dev/null || true
    60  	# Check access to the bucket.
    61  	echo "test $BUCKET exists"
    62  	aws s3 --profile $BUCKET ls s3://$BUCKET
    63  	# Make the bucket accessible through website endpoints.
    64  	echo "make $BUCKET accessible as a website"
    65  	#aws s3 website s3://$BUCKET --index-document index.html --error-document jsearch/index.html
    66  	s3conf=$(cat s3_website.json | envsubst)
    67  	echo
    68  	echo $s3conf
    69  	echo
    70  	aws s3api --profile $BUCKET put-bucket-website --bucket $BUCKET --website-configuration "$s3conf"
    71  }
    72  
    73  build_current_documentation() {
    74  	mkdocs build
    75  	cd site/
    76  	gzip -9k -f search_content.json
    77  	cd ..
    78  }
    79  
    80  upload_current_documentation() {
    81  	src=site/
    82  	dst=s3://$BUCKET$1
    83  
    84  	cache=max-age=3600
    85  	if [ "$NOCACHE" ]; then
    86  		cache=no-cache
    87  	fi
    88  
    89  	echo
    90  	echo "Uploading $src"
    91  	echo "  to $dst"
    92  	echo
    93  
    94  	# a really complicated way to send only the files we want
    95  	# if there are too many in any one set, aws s3 sync seems to fall over with 2 files to go
    96  	#  versions.html_fragment
    97  		include="--recursive --include \"*.$i\" "
    98  		echo "uploading *.$i"
    99  		run="aws s3 cp $src $dst $OPTIONS --profile $BUCKET --cache-control $cache --acl public-read $include"
   100  		echo "======================="
   101  		echo "$run"
   102  		echo "======================="
   103  		$run
   104  
   105  	# Make sure the search_content.json.gz file has the right content-encoding
   106  	aws s3 cp --profile $BUCKET --cache-control $cache --content-encoding="gzip" --acl public-read "site/search_content.json.gz" "$dst"
   107  }
   108  
   109  invalidate_cache() {
   110  	if [ "" == "$DISTRIBUTION_ID" ]; then
   111  		echo "Skipping Cloudfront cache invalidation"
   112  		return
   113  	fi
   114  
   115  	dst=$1
   116  
   117  	#aws cloudfront  create-invalidation --profile docs.docker.com --distribution-id $DISTRIBUTION_ID --invalidation-batch '{"Paths":{"Quantity":1, "Items":["'+$file+'"]},"CallerReference":"19dec2014sventest1"}'
   118  	aws configure set preview.cloudfront true
   119  
   120  	files=($(cat changed-files | grep 'sources/.*$' | sed -E 's#.*docs/sources##' | sed -E 's#index\.md#index.html#' | sed -E 's#\.md#/index.html#'))
   121  	files[${#files[@]}]="/index.html"
   122  	files[${#files[@]}]="/versions.html_fragment"
   123  
   124  	len=${#files[@]}
   125  
   126  	echo "aws cloudfront  create-invalidation --profile $AWS_S3_BUCKET --distribution-id $DISTRIBUTION_ID --invalidation-batch '" > batchfile
   127  	echo "{\"Paths\":{\"Quantity\":$len," >> batchfile
   128  	echo "\"Items\": [" >> batchfile
   129  
   130  	#for file in $(cat changed-files | grep 'sources/.*$' | sed -E 's#.*docs/sources##' | sed -E 's#index\.md#index.html#' | sed -E 's#\.md#/index.html#')
   131  	for file in "${files[@]}"
   132  	do
   133  		if [ "$file" == "${files[${#files[@]}-1]}" ]; then
   134  			comma=""
   135  		else
   136  			comma=","
   137  		fi
   138  		echo "\"$dst$file\"$comma" >> batchfile
   139  	done
   140  
   141  	echo "]}, \"CallerReference\":" >> batchfile
   142  	echo "\"$(date)\"}'" >> batchfile
   143  
   144  
   145  	echo "-----"
   146  	cat batchfile
   147  	echo "-----"
   148  	sh batchfile
   149  	echo "-----"
   150  }
   151  
   152  
   153  if [ "$OPTIONS" != "--dryrun" ]; then
   154  	setup_s3
   155  fi
   156  
   157  # Default to only building the version specific docs so we don't clober the latest by accident with old versions
   158  if [ "$BUILD_ROOT" == "yes" ]; then
   159  	echo "Building root documentation"
   160  	build_current_documentation
   161  	upload_current_documentation
   162  	[ "$NOCACHE" ] || invalidate_cache
   163  fi
   164  
   165  #build again with /v1.0/ prefix
   166  sed -i "s/^site_url:.*/site_url: \/$MAJOR_MINOR\//" mkdocs.yml
   167  echo "Building the /$MAJOR_MINOR/ documentation"
   168  build_current_documentation
   169  upload_current_documentation "/$MAJOR_MINOR/"
   170  [ "$NOCACHE" ] || invalidate_cache "/$MAJOR_MINOR"