github.com/opiuman/docker@v1.6.0/docs/release.sh (about)

     1  #!/bin/bash
     2  set -e
     3  
     4  set -o pipefail
     5  
     6  usage() {
     7  	cat >&2 <<'EOF'
     8  To publish the Docker documentation you need to set your access_key and secret_key in the docs/awsconfig file 
     9  (with the keys in a [profile $AWS_S3_BUCKET] section - so you can have more than one set of keys in your file)
    10  and set the AWS_S3_BUCKET env var to the name of your bucket.
    11  
    12  If you're publishing the current release's documentation, also set `BUILD_ROOT=yes`
    13  
    14  make AWS_S3_BUCKET=docs-stage.docker.com docs-release
    15  
    16  will then push the documentation site to your s3 bucket.
    17  
    18   Note: you can add `OPTIONS=--dryrun` to see what will be done without sending to the server
    19   You can also add NOCACHE=1 to publish without a cache, which is what we do for the master docs.
    20  EOF
    21  	exit 1
    22  }
    23  
    24  create_robots_txt() {
    25  	if [ "$AWS_S3_BUCKET" == "docs.docker.com" ]; then
    26  		cat > ./sources/robots.txt <<-'EOF'
    27  		User-agent: *
    28  		Allow: /
    29  		EOF
    30  	else
    31  		cat > ./sources/robots.txt <<-'EOF'
    32  		User-agent: *
    33  		Disallow: /
    34  		EOF
    35  	fi
    36  }
    37  
    38  setup_s3() {
    39  	# Try creating the bucket. Ignore errors (it might already exist).
    40  	echo "create $BUCKET if it does not exist"
    41  	aws s3 mb --profile $BUCKET s3://$BUCKET 2>/dev/null || true
    42  
    43  	# Check access to the bucket.
    44  	echo "test $BUCKET exists"
    45  	aws s3 --profile $BUCKET ls s3://$BUCKET
    46  
    47  	# Make the bucket accessible through website endpoints.
    48  	echo "make $BUCKET accessible as a website"
    49  	#aws s3 website s3://$BUCKET --index-document index.html --error-document jsearch/index.html
    50  	local s3conf=$(cat s3_website.json | envsubst)
    51  	aws s3api --profile $BUCKET put-bucket-website --bucket $BUCKET --website-configuration "$s3conf"
    52  }
    53  
    54  build_current_documentation() {
    55  	mkdocs build
    56  	cd site/
    57  	gzip -9k -f search_content.json
    58  	cd ..
    59  }
    60  
    61  upload_current_documentation() {
    62  	src=site/
    63  	dst=s3://$BUCKET$1
    64  
    65  	cache=max-age=3600
    66  	if [ "$NOCACHE" ]; then
    67  		cache=no-cache
    68  	fi
    69  
    70  	printf "\nUploading $src to $dst\n"
    71  
    72  	# a really complicated way to send only the files we want
    73  	# if there are too many in any one set, aws s3 sync seems to fall over with 2 files to go
    74  	#  versions.html_fragment
    75  	include="--recursive --include \"*.$i\" "
    76  	run="aws s3 cp $src $dst $OPTIONS --profile $BUCKET --cache-control $cache --acl public-read $include"
    77  	printf "\n=====\n$run\n=====\n"
    78  	$run
    79  
    80  	# Make sure the search_content.json.gz file has the right content-encoding
    81  	aws s3 cp --profile $BUCKET --cache-control $cache --content-encoding="gzip" --acl public-read "site/search_content.json.gz" "$dst"
    82  }
    83  
    84  invalidate_cache() {
    85  	if [[ -z "$DISTRIBUTION_ID" ]]; then
    86  		echo "Skipping Cloudfront cache invalidation"
    87  		return
    88  	fi
    89  
    90  	dst=$1
    91  
    92  	aws configure set preview.cloudfront true
    93  
    94  	# Get all the files
    95  	# not .md~ files
    96  	# replace spaces w %20 so urlencoded
    97  	files=( $(find site/ -not -name "*.md*" -type f | sed 's/site//g' | sed 's/ /%20/g') )
    98  
    99  	len=${#files[@]}
   100  	last_file=${files[$((len-1))]}
   101  
   102  	echo "aws cloudfront  create-invalidation --profile $AWS_S3_BUCKET --distribution-id $DISTRIBUTION_ID --invalidation-batch '" > batchfile
   103  	echo "{\"Paths\":{\"Quantity\":$len," >> batchfile
   104  	echo "\"Items\": [" >> batchfile
   105  
   106  	for file in "${files[@]}" ; do
   107  		if [[ $file == $last_file ]]; then
   108  			comma=""
   109  		else
   110  			comma=","
   111  		fi
   112  		echo "\"$dst$file\"$comma" >> batchfile
   113  	done
   114  
   115  	echo "]}, \"CallerReference\":\"$(date)\"}'" >> batchfile
   116  
   117  	sh batchfile
   118  }
   119  
   120  main() {
   121  	[ "$AWS_S3_BUCKET" ] || usage
   122  
   123  	# Make sure there is an awsconfig file
   124  	export AWS_CONFIG_FILE=$(pwd)/awsconfig
   125  	[ -f "$AWS_CONFIG_FILE" ] || usage
   126  
   127  	# Get the version
   128  	VERSION=$(cat VERSION)
   129  
   130  	# Disallow pushing dev docs to master
   131  	if [ "$AWS_S3_BUCKET" == "docs.docker.com" ] && [ "${VERSION%-dev}" != "$VERSION" ]; then
   132  		echo "Please do not push '-dev' documentation to docs.docker.com ($VERSION)"
   133  		exit 1
   134  	fi
   135  
   136  	# Clean version - 1.0.2-dev -> 1.0
   137  	export MAJOR_MINOR="v${VERSION%.*}"
   138  
   139  	export BUCKET=$AWS_S3_BUCKET
   140  	export AWS_DEFAULT_PROFILE=$BUCKET
   141  
   142  	# debug variables
   143  	echo "bucket: $BUCKET, full version: $VERSION, major-minor: $MAJOR_MINOR"
   144  	echo "cfg file: $AWS_CONFIG_FILE ; profile: $AWS_DEFAULT_PROFILE"
   145  
   146  	# create the robots.txt
   147  	create_robots_txt
   148  
   149  	if [ "$OPTIONS" != "--dryrun" ]; then
   150  		setup_s3
   151  	fi
   152  
   153  	# Default to only building the version specific docs
   154  	# so we don't clober the latest by accident with old versions
   155  	if [ "$BUILD_ROOT" == "yes" ]; then
   156  		echo "Building root documentation"
   157  		build_current_documentation
   158  
   159  		echo "Uploading root documentation"
   160  		upload_current_documentation
   161  		[ "$NOCACHE" ] || invalidate_cache
   162  	fi
   163  
   164  	#build again with /v1.0/ prefix
   165  	sed -i "s/^site_url:.*/site_url: \/$MAJOR_MINOR\//" mkdocs.yml
   166  	echo "Building the /$MAJOR_MINOR/ documentation"
   167  	build_current_documentation
   168  
   169  	echo "Uploading the documentation"
   170  	upload_current_documentation "/$MAJOR_MINOR/"
   171  
   172  	# Invalidating cache
   173  	[ "$NOCACHE" ] || invalidate_cache "/$MAJOR_MINOR"
   174  }
   175  
   176  main