test(components): Check the Sagemaker component output rather than Controller (#9402)

* update tests to check output

bugfix

fix another bug

* adress pr comments

* bug fix

* test fix

* namefix
This commit is contained in:
ananth102 2023-05-18 17:27:46 -07:00 committed by GitHub
parent 5b680a2bb2
commit 7de50a5839
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 84 additions and 5 deletions

View File

@ -4,6 +4,8 @@ import utils
from utils import kfp_client_utils
from utils import ack_utils
from utils import sagemaker_utils
from utils import minio_utils
import json
@ -23,7 +25,7 @@ def test_create_v2_endpoint(kfp_client, experiment_id, boto3_session, test_file_
utils.replace_placeholders(
os.path.join(test_file_dir, "config.yaml"),
os.path.join(download_dir, "config.yaml"),
shallow_canary=True
shallow_canary=True,
)
)
k8s_client = ack_utils.k8s_client()
@ -50,7 +52,7 @@ def test_create_v2_endpoint(kfp_client, experiment_id, boto3_session, test_file_
] = input_model_name
try:
_, _, _ = kfp_client_utils.compile_run_monitor_pipeline(
_, _, workflow_json = kfp_client_utils.compile_run_monitor_pipeline(
kfp_client,
experiment_id,
test_params["PipelineDefinition"],
@ -64,7 +66,65 @@ def test_create_v2_endpoint(kfp_client, experiment_id, boto3_session, test_file_
k8s_client, input_endpoint_name, "endpoints"
)
endpoint_describe["status"]["endpointStatus"] == "InService"
outputs = {
"sagemaker-endpoint": [
"endpoint_status",
"sagemaker_resource_name",
"ack_resource_metadata",
],
"sagemaker-endpointconfig": [
"sagemaker_resource_name",
"ack_resource_metadata",
],
"sagemaker-model": [
"sagemaker_resource_name",
"ack_resource_metadata",
]
}
DESIRED_COMPONENT_STATUS = "InService"
# Get output data
output_files = minio_utils.artifact_download_iterator(
workflow_json, outputs, download_dir
)
output_endpoint_status = utils.read_from_file_in_tar(
output_files["sagemaker-endpoint"]["endpoint_status"]
)
output_ack_resource_metadata_endpoint = kfp_client_utils.get_output_ack_resource_metadata(
output_files, "sagemaker-endpoint"
)
output_ack_resource_metadata_endpoint_config = kfp_client_utils.get_output_ack_resource_metadata(
output_files, "sagemaker-endpointconfig"
)
output_ack_resource_metadata_model = kfp_client_utils.get_output_ack_resource_metadata(
output_files, "sagemaker-model"
)
output_endpoint_name = utils.read_from_file_in_tar(
output_files["sagemaker-endpoint"]["sagemaker_resource_name"]
)
output_endpoint_config_name = utils.read_from_file_in_tar(
output_files["sagemaker-endpointconfig"]["sagemaker_resource_name"]
)
output_model_name = utils.read_from_file_in_tar(
output_files["sagemaker-model"]["sagemaker_resource_name"]
)
assert (
endpoint_describe["status"]["endpointStatus"]
== output_endpoint_status
== DESIRED_COMPONENT_STATUS
)
assert output_endpoint_name in output_ack_resource_metadata_endpoint["arn"]
assert output_endpoint_config_name in output_ack_resource_metadata_endpoint_config["arn"]
assert output_model_name in output_ack_resource_metadata_model["arn"]
# Verify that the update was successful by checking that the endpoint config name is the same as the second one.
if "ExpectedEndpointConfig" in test_params.keys():

View File

@ -5,6 +5,7 @@ from utils import kfp_client_utils
from utils import minio_utils
from utils import ack_utils
import ast
import json
@pytest.mark.parametrize(
@ -12,7 +13,7 @@ import ast
[
pytest.param(
"resources/config/ack-training-job",
marks=[pytest.mark.canary_test, pytest.mark.shallow_canary,pytest.mark.v2],
marks=[pytest.mark.canary_test, pytest.mark.shallow_canary, pytest.mark.v2],
)
],
)
@ -42,9 +43,13 @@ def test_trainingjobV2(kfp_client, experiment_id, test_file_dir):
outputs = {
"sagemaker-trainingjob": [
"model_artifacts",
"ack_resource_metadata",
"training_job_status",
]
}
DESIRED_COMPONENT_STATUS = "Completed"
# Get output data
output_files = minio_utils.artifact_download_iterator(
workflow_json, outputs, download_dir
@ -52,11 +57,24 @@ def test_trainingjobV2(kfp_client, experiment_id, test_file_dir):
model_artifact = utils.read_from_file_in_tar(
output_files["sagemaker-trainingjob"]["model_artifacts"]
)
output_ack_resource_metadata = json.loads(
utils.read_from_file_in_tar(
output_files["sagemaker-trainingjob"]["ack_resource_metadata"]
).replace("'", '"')
)
output_training_job_status = utils.read_from_file_in_tar(
output_files["sagemaker-trainingjob"]["training_job_status"]
)
# Verify Training job was successful on SageMaker
print(f"training job name: {input_job_name}")
train_response = ack_utils._get_resource(k8s_client, input_job_name, "trainingjobs")
assert train_response["status"]["trainingJobStatus"] == "Completed"
assert (
train_response["status"]["trainingJobStatus"]
== output_training_job_status
== DESIRED_COMPONENT_STATUS
)
assert input_job_name in output_ack_resource_metadata["arn"]
# Verify model artifacts output was generated from this run
model_uri = ast.literal_eval(model_artifact)["s3ModelArtifacts"]
@ -66,6 +84,7 @@ def test_trainingjobV2(kfp_client, experiment_id, test_file_dir):
utils.remove_dir(download_dir)
@pytest.mark.v2
def test_terminate_trainingjob(kfp_client, experiment_id):
k8s_client = ack_utils.k8s_client()