chore(components): Update scripts to use public ecr instead of docker (#8264)

* Update scripts to use public ecr instead of docker

* other codebuild specs

* run black on non-formatted files

* login to general ecr

* change default image for generate_components

* use public ecr amazon linux

* use :2 tag

* add arg for kfp v1 or v2 build version

* change whitespace and add docker login back for integration tests

* enable buildkit

* use v2 license file if in v2 build-mode

* make build_version mandatory
This commit is contained in:
ryansteakley 2022-09-14 19:16:40 -07:00 committed by GitHub
parent 166d6bb917
commit 0368fc6174
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 42 additions and 18 deletions

View File

@ -11,12 +11,12 @@
# limitations under the License.
FROM amazonlinux:2
FROM public.ecr.aws/amazonlinux/amazonlinux:2
ENV PYTHONPATH /app
RUN yum update -y \
&& yum install -y -q \
&& yum install -y -q \
ca-certificates \
python3 \
wget \

View File

@ -7,9 +7,7 @@ import os
import random
# Training job component, path is relative from this directory.
sagemaker_training_op = components.load_component_from_file(
"../../component.yaml"
)
sagemaker_training_op = components.load_component_from_file("../../component.yaml")
# This section initializes complex data structures that will be used for the pipeline.
# S3 bucket where dataset is uploaded

View File

@ -1,12 +1,16 @@
version: 0.2
env:
variables:
DOCKER_BUILDKIT: "1"
phases:
pre_build:
commands:
# Log in to Dockerhub
- docker login -u $DOCKER_CONFIG_USERNAME -p $DOCKER_CONFIG_PASSWORD
- aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws/kubeflow-on-aws
build:
commands:
- cd components/aws/sagemaker
- ./codebuild/scripts/deploy.sh -d "${DRY_RUN}"
- ./codebuild/scripts/deploy.sh -d "${DRY_RUN}" -b "${BUILD_VERSION}"

View File

@ -13,12 +13,13 @@
set -e
REMOTE_REPOSITORY="amazon/aws-sagemaker-kfp-components"
REMOTE_REPOSITORY="public.ecr.aws/kubeflow-on-aws/aws-sagemaker-kfp-components"
DRYRUN="true"
FULL_VERSION_TAG=""
BUILD_VERSION=""
DOCKER_CONFIG_PATH=${DOCKER_CONFIG_PATH:-"/root/.docker"}
while getopts ":d:v:" opt; do
while getopts ":d:v:b:" opt; do
case ${opt} in
d)
if [[ "${OPTARG}" = "false" ]]; then
@ -30,9 +31,18 @@ while getopts ":d:v:" opt; do
v)
FULL_VERSION_TAG="${OPTARG}"
;;
b)
BUILD_VERSION="${OPTARG}"
;;
esac
done
# Check that build version is not empty
if [ -z "$BUILD_VERSION" ]; then
>&2 echo "BUILD_VERSION is required, please provide the variable in codebuild or -b <BUILD_VERSION> if running locally"
exit 1
fi
function docker_tag_exists() {
curl --silent -f -lSL https://index.docker.io/v1/repositories/$1/tags/$2 > /dev/null 2> /dev/null
}
@ -43,7 +53,12 @@ if [[ ! -z "${FULL_VERSION_TAG}" && ! "${FULL_VERSION_TAG}" =~ ^[0-9]+\.[0-9]+\.
fi
# Check version does not already exist
VERSION_LICENSE_FILE="THIRD-PARTY-LICENSES.txt"
if [ "${BUILD_VERSION}" == "v2" ]; then
VERSION_LICENSE_FILE="THIRD-PARTY-LICENSES.v2.txt"
else
VERSION_LICENSE_FILE="THIRD-PARTY-LICENSES.txt"
fi
if [[ -z "${FULL_VERSION_TAG}" ]]; then
FULL_VERSION_TAG="$(cat ${VERSION_LICENSE_FILE} | head -n1 | grep -Po '(?<=version )\d.\d.\d')"
fi
@ -62,7 +77,14 @@ echo "Deploying version ${FULL_VERSION_TAG}"
# Build the image
FULL_VERSION_IMAGE="${REMOTE_REPOSITORY}:${FULL_VERSION_TAG}"
docker build . -f Dockerfile -t "${FULL_VERSION_IMAGE}"
if [ "${BUILD_VERSION}" == "v2" ]; then
echo "Building V2 image"
docker build . -f v2.Dockerfile -t "${FULL_VERSION_IMAGE}"
else
echo "Building V1 image"
docker build . -f Dockerfile -t "${FULL_VERSION_IMAGE}"
fi
# Get the minor and major versions
[[ $FULL_VERSION_TAG =~ ^[0-9]+\.[0-9]+ ]] && MINOR_VERSION_IMAGE="${REMOTE_REPOSITORY}:${BASH_REMATCH[0]}"

View File

@ -2,7 +2,7 @@ version: 0.2
phases:
pre_build:
commands:
- docker login -u $DOCKER_CONFIG_USERNAME -p $DOCKER_CONFIG_PASSWORD
- aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws
build:
commands:
- docker build . -f ./components/aws/sagemaker/tests/unit_tests/Dockerfile -t amazon/unit-test-image --quiet

View File

@ -46,7 +46,7 @@ def parse_arguments():
"--image",
type=str,
required=False,
default="amazon/aws-sagemaker-kfp-components",
default="public.ecr.aws/kubeflow-on-aws/aws-sagemaker-kfp-components",
help="The component container image.",
)
parser.add_argument(

View File

@ -24,9 +24,9 @@ def get_artifact_in_minio(workflow_json, step_name, artifact_name, output_dir):
for artifact in node["outputs"]["artifacts"]:
if artifact["name"] == artifact_name:
s3_data = artifact["s3"]
s3_bucket = workflow_json["status"]["artifactRepositoryRef"][
"artifactRepository"
]["s3"]["bucket"]
s3_bucket = workflow_json["status"]["artifactRepositoryRef"]["artifactRepository"][
"s3"
]["bucket"]
minio_client = Minio(
"localhost:{}".format(minio_port),
access_key=minio_access_key,

View File

@ -1,9 +1,9 @@
FROM amazonlinux:2
FROM public.ecr.aws/amazonlinux/amazonlinux:2
ENV PYTHONPATH /app
RUN yum update -y \
&& yum install -y -q \
&& yum install -y -q \
python3
# requirements.txt is copied separately to preserve cache