refactor component build codes (#260)

* refactor codes

* change name handler to handle
This commit is contained in:
nealgao 2018-11-20 20:03:34 -08:00 committed by k8s-ci-robot
parent 7a89c98f0a
commit 6937e780c3
2 changed files with 39 additions and 46 deletions

View File

@ -328,11 +328,25 @@ class ImageBuilder(object):
complete_component_code = dedecorated_component_src + '\n' + wrapper_code + '\n' + codegen.end()
return complete_component_code
def _build_image_from_tarball(self, local_tarball_path, namespace, timeout):
GCSHelper.upload_gcs_file(local_tarball_path, self._gcs_path)
kaniko_spec = self._generate_kaniko_spec(namespace=namespace,
arc_dockerfile_name=self._arc_dockerfile_name,
gcs_path=self._gcs_path,
target_image=self._target_image)
# Run kaniko job
logging.info('Start a kaniko job for build.')
k8s_helper = K8sHelper()
k8s_helper.run_job(kaniko_spec, timeout)
logging.info('Kaniko job complete.')
# Clean up
GCSHelper.remove_gcs_blob(self._gcs_path)
def build_image_from_func(self, component_func, namespace, base_image, timeout, dependency):
""" build_image builds an image for the given python function"""
# Generate entrypoint and serialization python codes
with tempfile.TemporaryDirectory() as local_build_dir:
# Generate entrypoint and serialization python codes
local_python_filepath = os.path.join(local_build_dir, self._arc_python_filepath)
logging.info('Generate entrypoint and serialization codes.')
complete_component_code = self._generate_entrypoint(component_func)
@ -341,45 +355,24 @@ class ImageBuilder(object):
# Prepare build files
logging.info('Generate build files.')
local_tarball_path = os.path.join(local_build_dir, 'docker.tmp.tar.gz')
docker_helper = DockerfileHelper(arc_dockerfile_name=self._arc_dockerfile_name)
local_tarball_file = os.path.join(local_build_dir, 'docker.tmp.tar.gz')
docker_helper.prepare_docker_tarball_with_py(python_filepath=local_python_filepath,
arc_python_filename=self._arc_python_filepath,
base_image=base_image, local_tarball_path=local_tarball_file,
base_image=base_image,
local_tarball_path=local_tarball_path,
dependency=dependency)
GCSHelper.upload_gcs_file(local_tarball_file, self._gcs_path)
kaniko_spec = self._generate_kaniko_spec(namespace=namespace,
arc_dockerfile_name=self._arc_dockerfile_name,
gcs_path=self._gcs_path,
target_image=self._target_image)
# Run kaniko job
logging.info('Start a kaniko job for build.')
k8s_helper = K8sHelper()
k8s_helper.run_job(kaniko_spec, timeout)
logging.info('Kaniko job complete.')
# Clean up
GCSHelper.remove_gcs_blob(self._gcs_path)
self._build_image_from_tarball(local_tarball_path, namespace, timeout)
def build_image_from_dockerfile(self, dockerfile_path, timeout, namespace):
""" build_image_from_dockerfile builds an image directly """
""" build_image_from_dockerfile builds an image based on the dockerfile """
with tempfile.TemporaryDirectory() as local_build_dir:
# Prepare build files
logging.info('Generate build files.')
local_tarball_path = os.path.join(local_build_dir, 'docker.tmp.tar.gz')
docker_helper = DockerfileHelper(arc_dockerfile_name=self._arc_dockerfile_name)
local_tarball_file = os.path.join(local_build_dir, 'docker.tmp.tar.gz')
docker_helper.prepare_docker_tarball(dockerfile_path, local_tarball_path=local_tarball_file)
GCSHelper.upload_gcs_file(local_tarball_file, self._gcs_path)
kaniko_spec = self._generate_kaniko_spec(namespace=namespace, arc_dockerfile_name=self._arc_dockerfile_name,
gcs_path=self._gcs_path, target_image=self._target_image)
logging.info('Start a kaniko job for build.')
k8s_helper = K8sHelper()
k8s_helper.run_job(kaniko_spec, timeout)
logging.info('Kaniko job complete.')
# Clean up
GCSHelper.remove_gcs_blob(self._gcs_path)
docker_helper.prepare_docker_tarball(dockerfile_path, local_tarball_path=local_tarball_path)
self._build_image_from_tarball(local_tarball_path, namespace, timeout)
def _configure_logger(logger):
""" _configure_logger configures the logger such that the info level logs

View File

@ -131,8 +131,8 @@ class TestDockerfileHelper(unittest.TestCase):
docker_helper = DockerfileHelper(arc_dockerfile_name='')
docker_helper._wrap_files_in_tarball(temp_tarball, {'dockerfile':temp_file_one, 'main.py':temp_file_two})
self.assertTrue(os.path.exists(temp_tarball))
temp_tarball_handler = tarfile.open(temp_tarball)
temp_files = temp_tarball_handler.getmembers()
temp_tarball_handle = tarfile.open(temp_tarball)
temp_files = temp_tarball_handle.getmembers()
self.assertTrue(len(temp_files) == 2)
for temp_file in temp_files:
self.assertTrue(temp_file.name in ['dockerfile', 'main.py'])
@ -183,21 +183,21 @@ ENTRYPOINT ["python3", "/ml/main.py"]'''
# prepare
test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
python_filepath = os.path.join(test_data_dir, 'basic.py')
generated_tarball = os.path.join(test_data_dir, 'test_docker.tar.gz')
local_tarball_path = os.path.join(test_data_dir, 'test_docker.tar.gz')
# check
docker_helper = DockerfileHelper(arc_dockerfile_name='dockerfile')
docker_helper.prepare_docker_tarball_with_py(arc_python_filename='main.py', python_filepath=python_filepath,
base_image='gcr.io/ngao-mlpipeline-testing/tensorflow:1.8.0',
local_tarball_path=generated_tarball)
temp_tarball_handler = tarfile.open(generated_tarball)
temp_files = temp_tarball_handler.getmembers()
local_tarball_path=local_tarball_path)
temp_tarball_handle = tarfile.open(local_tarball_path)
temp_files = temp_tarball_handle.getmembers()
self.assertTrue(len(temp_files) == 2)
for temp_file in temp_files:
self.assertTrue(temp_file.name in ['dockerfile', 'main.py'])
# clean up
os.remove(generated_tarball)
os.remove(local_tarball_path)
def test_prepare_docker_with_py_and_dependency(self):
""" Test the whole prepare docker from python function and dependencies """
@ -216,8 +216,8 @@ ENTRYPOINT ["python3", "/ml/main.py"]'''
docker_helper.prepare_docker_tarball_with_py(arc_python_filename='main.py', python_filepath=python_filepath,
base_image='gcr.io/ngao-mlpipeline-testing/tensorflow:1.8.0',
local_tarball_path=local_tarball_path, dependency=dependencies)
temp_tarball_handler = tarfile.open(local_tarball_path)
temp_files = temp_tarball_handler.getmembers()
temp_tarball_handle = tarfile.open(local_tarball_path)
temp_files = temp_tarball_handle.getmembers()
self.assertTrue(len(temp_files) == 3)
for temp_file in temp_files:
self.assertTrue(temp_file.name in ['dockerfile', 'main.py', 'requirements.txt'])
@ -232,19 +232,19 @@ ENTRYPOINT ["python3", "/ml/main.py"]'''
test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
dockerfile_path = os.path.join(test_data_dir, 'component.target.dockerfile')
Path(dockerfile_path).touch()
generated_tarball = os.path.join(test_data_dir, 'test_docker.tar.gz')
local_tarball_path = os.path.join(test_data_dir, 'test_docker.tar.gz')
# check
docker_helper = DockerfileHelper(arc_dockerfile_name='dockerfile')
docker_helper.prepare_docker_tarball(dockerfile_path=dockerfile_path, local_tarball_path=generated_tarball)
temp_tarball_handler = tarfile.open(generated_tarball)
temp_files = temp_tarball_handler.getmembers()
docker_helper.prepare_docker_tarball(dockerfile_path=dockerfile_path, local_tarball_path=local_tarball_path)
temp_tarball_handle = tarfile.open(local_tarball_path)
temp_files = temp_tarball_handle.getmembers()
self.assertTrue(len(temp_files) == 1)
for temp_file in temp_files:
self.assertTrue(temp_file.name in ['dockerfile'])
# clean up
os.remove(generated_tarball)
os.remove(local_tarball_path)
os.remove(dockerfile_path)
# hello function is used by the TestCodeGenerator to verify the auto generated python function