From a3a48e90788eaf0a8c46c4e6d6e99fd31a96206b Mon Sep 17 00:00:00 2001 From: Justin SB Date: Thu, 20 Dec 2018 10:55:19 -0500 Subject: [PATCH] Create dev-upload tasks, for a faster upload during dev builds --- Makefile | 44 ++++++++++++++++++++++++++-- docs/development/adding_a_feature.md | 5 ++-- docs/development/release.md | 2 +- docs/development/testing.md | 19 +++++++++--- hack/dev-build.sh | 2 +- hack/upload | 28 ++++++++++++++++++ 6 files changed, 88 insertions(+), 12 deletions(-) create mode 100755 hack/upload diff --git a/Makefile b/Makefile index f61ae6df35..7a73fb2b56 100644 --- a/Makefile +++ b/Makefile @@ -15,6 +15,7 @@ DOCKER_REGISTRY?=gcr.io/must-override S3_BUCKET?=s3://must-override/ +UPLOAD_DEST?=$(S3_BUCKET) GCS_LOCATION?=gs://must-override GCS_URL=$(GCS_LOCATION:gs://%=https://storage.googleapis.com/%) LATEST_FILE?=latest-ci.txt @@ -48,6 +49,9 @@ GCFLAGS?= # See http://stackoverflow.com/questions/18136918/how-to-get-current-relative-directory-of-your-makefile MAKEDIR:=$(strip $(shell dirname "$(realpath $(lastword $(MAKEFILE_LIST)))")) +UPLOAD=$(MAKEDIR)/hack/upload + + # Unexport environment variables that can affect tests and are not used in builds unexport AWS_ACCESS_KEY_ID AWS_REGION AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN CNI_VERSION_URL DNS_IGNORE_NS_CHECK DNSCONTROLLER_IMAGE DO_ACCESS_TOKEN GOOGLE_APPLICATION_CREDENTIALS unexport KOPS_BASE_URL KOPS_CLUSTER_NAME KOPS_RUN_OBSOLETE_VERSION KOPS_STATE_STORE KOPS_STATE_S3_ACL KUBE_API_VERSIONS NODEUP_URL OPENSTACK_CREDENTIAL_FILE PROTOKUBE_IMAGE SKIP_PACKAGE_UPDATE @@ -80,7 +84,7 @@ ifndef VERSION # we never cross versions. # # We expect that if you are uploading nodeup/protokube, you will set - # VERSION (along with S3_BUCKET), either directly or by setting CI=1 + # VERSION (along with UPLOAD_DEST), either directly or by setting CI=1 ifndef CI VERSION=${KOPS_RELEASE_VERSION} else @@ -747,7 +751,7 @@ push-node-authorizer: bazel-protokube-export: mkdir -p ${BAZELIMAGES} bazel build --action_env=PROTOKUBE_TAG=${PROTOKUBE_TAG} --platforms=@io_bazel_rules_go//go/toolchain:linux_amd64 //images:protokube.tar - cp bazel-bin/images/protokube.tar ${BAZELIMAGES}/protokube.tar + cp -fp bazel-bin/images/protokube.tar ${BAZELIMAGES}/protokube.tar gzip --force --fast ${BAZELIMAGES}/protokube.tar (${SHASUMCMD} ${BAZELIMAGES}/protokube.tar.gz | cut -d' ' -f1) > ${BAZELIMAGES}/protokube.tar.gz.sha1 @@ -773,7 +777,7 @@ bazel-version-dist: bazel-crossbuild-nodeup bazel-crossbuild-kops bazel-protokub bazel-upload: bazel-version-dist # Upload kops to S3 aws s3 sync --acl public-read ${BAZELUPLOAD}/ ${S3_BUCKET} -#----------------------------------------------------------- +#----------------------------------------------------------- # static html documentation .PHONY: live-docs @@ -789,3 +793,37 @@ update-machine-types: #Update machine_types.go go build -o hack/machine_types/machine_types ${KOPS_ROOT}/hack/machine_types/ hack/machine_types/machine_types --out upup/pkg/fi/cloudup/awsup/machine_types.go go fmt upup/pkg/fi/cloudup/awsup/machine_types.go + +#----------------------------------------------------------- +# development targets + +# dev-upload-nodeup uploads nodeup to GCS +.PHONY: dev-upload-nodeup +dev-upload-nodeup: bazel-crossbuild-nodeup + mkdir -p ${BAZELUPLOAD}/kops/${VERSION}/linux/amd64/ + cp -fp bazel-bin/cmd/nodeup/linux_amd64_pure_stripped/nodeup ${BAZELUPLOAD}/kops/${VERSION}/linux/amd64/nodeup + (${SHASUMCMD} ${BAZELUPLOAD}/kops/${VERSION}/linux/amd64/nodeup | cut -d' ' -f1) > ${BAZELUPLOAD}/kops/${VERSION}/linux/amd64/nodeup.sha1 + ${UPLOAD} ${BAZELUPLOAD}/ ${UPLOAD_DEST} + +# dev-upload-protokube uploads protokube to GCS +.PHONY: dev-upload-protokube +dev-upload-protokube: bazel-protokube-export # Upload kops to GCS + mkdir -p ${BAZELUPLOAD}/kops/${VERSION}/images/ + cp -fp ${BAZELIMAGES}/protokube.tar.gz ${BAZELUPLOAD}/kops/${VERSION}/images/protokube.tar.gz + cp -fp ${BAZELIMAGES}/protokube.tar.gz.sha1 ${BAZELUPLOAD}/kops/${VERSION}/images/protokube.tar.gz.sha1 + ${UPLOAD} ${BAZELUPLOAD}/ ${UPLOAD_DEST} + +# dev-copy-utils copies utils from a recent release +# We don't currently have a bazel build for them, and the build is pretty slow, but they change rarely. +.PHONE: dev-copy-utils +dev-copy-utils: + mkdir -p ${BAZELUPLOAD}/kops/${VERSION}/linux/amd64/ + cd ${BAZELUPLOAD}/kops/${VERSION}/linux/amd64/; wget -N https://kubeupv2.s3.amazonaws.com/kops/1.11.0-alpha.1/linux/amd64/utils.tar.gz + cd ${BAZELUPLOAD}/kops/${VERSION}/linux/amd64/; wget -N https://kubeupv2.s3.amazonaws.com/kops/1.11.0-alpha.1/linux/amd64/utils.tar.gz.sha1 + ${UPLOAD} ${BAZELUPLOAD}/ ${UPLOAD_DEST} + +# dev-upload does a faster build and uploads to GCS / S3 +# It copies utils instead of building it +.PHONY: dev-upload +dev-upload: dev-upload-nodeup dev-upload-protokube dev-copy-utils + echo "Done" diff --git a/docs/development/adding_a_feature.md b/docs/development/adding_a_feature.md index 368cc927d7..1de28b2759 100644 --- a/docs/development/adding_a_feature.md +++ b/docs/development/adding_a_feature.md @@ -186,10 +186,9 @@ and then push nodeup using: ``` export S3_BUCKET_NAME= -make kops-install upload S3_BUCKET=s3://${S3_BUCKET_NAME} VERSION=dev - -export KOPS_BASE_URL=https://${S3_BUCKET_NAME}.s3.amazonaws.com/kops/dev/ +make kops-install dev-upload UPLOAD_DEST=s3://${S3_BUCKET_NAME} +export KOPS_BASE_URL=https://${S3_BUCKET_NAME}.s3.amazonaws.com/kops/${KOPS_VERSION}/ kops create cluster --zones us-east-1b ... ``` diff --git a/docs/development/release.md b/docs/development/release.md index 2abdc9f66d..afe4f1399f 100644 --- a/docs/development/release.md +++ b/docs/development/release.md @@ -56,7 +56,7 @@ make dns-controller-push DOCKER_REGISTRY=kope ``` # export AWS_PROFILE=??? # If needed -make upload S3_BUCKET=s3://kubeupv2 +make upload UPLOAD_DEST=s3://kubeupv2 ``` ## Tag new version diff --git a/docs/development/testing.md b/docs/development/testing.md index 9a773e36bb..82efb63c10 100644 --- a/docs/development/testing.md +++ b/docs/development/testing.md @@ -71,16 +71,27 @@ This isn't yet terribly useful, though - it just shows how to replicate the existing job, but not with your custom code. To test a custom `kops` build, you can do the following: +To use S3: ``` # cd to your kops repo export S3_BUCKET_NAME= -make kops-install upload S3_BUCKET=s3://${S3_BUCKET_NAME} VERSION=dev +make kops-install dev-upload UPLOAD_DEST=s3://${S3_BUCKET_NAME} -export KOPS_BASE_URL=https://${S3_BUCKET_NAME}.s3.amazonaws.com/kops/dev/ - -kops create cluster --zones us-east-1b +KOPS_VERSION=`bazel run //cmd/kops version | cut -f2 -d ' '` +export KOPS_BASE_URL=https://${S3_BUCKET_NAME}.s3.amazonaws.com/kops/${KOPS_VERSION}/ ``` +To use GCS: +``` +export GCS_BUCKET_NAME=kops-dev-${USER} +make kops-install dev-upload UPLOAD_DEST=gs://${GCS_BUCKET_NAME} + +KOPS_VERSION=`bazel run //cmd/kops version | cut -f2 -d ' '` +export KOPS_BASE_URL=https://${GCS_BUCKET_NAME}.storage.googleapis.com/kops/${KOPS_VERSION}/ +``` + +You can create a cluster using `kops create cluster --zones us-east-1b` + Then follow the test directions above. To override the test list for the job, you need to familiar with the diff --git a/hack/dev-build.sh b/hack/dev-build.sh index 450ea835ce..03711b7028 100755 --- a/hack/dev-build.sh +++ b/hack/dev-build.sh @@ -95,7 +95,7 @@ echo "Starting build" # removing CI=1 because it forces a new upload every time # export CI=1 -make && S3_BUCKET=s3://${NODEUP_BUCKET} make upload +make && UPLOAD_DEST=s3://${NODEUP_BUCKET} make upload # removing make test since it relies on the files in the bucket # && make test diff --git a/hack/upload b/hack/upload new file mode 100755 index 0000000000..3d746fbb54 --- /dev/null +++ b/hack/upload @@ -0,0 +1,28 @@ +#!/bin/bash -e + +SRC=$1 +DEST=$2 + +if [[ -z "${SRC}" ]]; then + echo "syntax: $0 " + exit 1 +fi + +if [[ -z "${DEST}" ]]; then + echo "syntax: $0 " + exit 1 +fi + +if [[ "${DEST:0:5}" == "s3://" ]]; then + aws s3 sync --acl public-read ${SRC} ${DEST} + exit 0 +fi + +if [[ "${DEST:0:5}" == "gs://" ]]; then + gsutil -h "Cache-Control:private,max-age=0" rsync -r -a public-read ${SRC} ${DEST} + exit 0 +fi + +echo "Unsupported destination - supports s3:// and gs:// urls: ${DEST}" +exit 1 +