Skip to content
11 changes: 9 additions & 2 deletions test/canary/canary.buildspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,15 @@ phases:
commands:
# Run tests
- docker run --name ack-canary $(env | cut -f1 -d= | sed 's/^/-e /') --mount type=bind,source="$(pwd)/",target="/${SERVICE}-controller/" ${ECR_CACHE_URI}:latest


post_build:
commands:
- docker cp ack-canary:/sagemaker-controller/test/canary/integration_tests.xml /tmp/results.xml || true
# Push test image to cache ECR repo
- docker push ${ECR_CACHE_URI}:latest || true


reports:
IntegrationTestReport:
files:
- "results.xml"
base-directory: "/tmp"
91 changes: 91 additions & 0 deletions test/canary/scripts/push_stats_to_cloudwatch.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
import boto3
from datetime import datetime
import xml.etree.ElementTree as ET
import os


xml_path = "../integration_tests.xml"

def readXML_and_publish_metrics_to_cw():
if os.path.isfile(xml_path):
tree = ET.parse(xml_path)
testsuite = tree.find("testsuite")
failures = testsuite.attrib["failures"]
tests = testsuite.attrib["tests"]
successes = int(tests) - int(failures)
else:
print("f{xml_path} does not exists.")
print(os.getcwd())
failures = 0
successes = 0
tests = 23

timestamp = datetime.now().strftime("%Y-%m-%dT%H:%M:%S")

print(f"Failures: {failures}")
print(f"Total tests: {tests}")
print(f"Success: {successes}")

# push to cloudwatch
cw_client = boto3.client("cloudwatch")
project_name = os.getenv("PROJECT_NAME")

# Define the metric data
metric_data = [
{
"MetricName": "failures",
"Timestamp": timestamp,
"Dimensions": [
{"Name": "CodeBuild Project Name", "Value": project_name},
],
"Value": int(failures),
"Unit": "Count",
},
{
"MetricName": "total_tests",
"Timestamp": timestamp,
"Dimensions": [
{"Name": "CodeBuild Project Name", "Value": project_name},
],
"Value": int(tests),
"Unit": "Count",
},
{
"MetricName": "successes",
"Timestamp": timestamp,
"Dimensions": [
{"Name": "CodeBuild Project Name", "Value": project_name},
],
"Value": int(successes),
"Unit": "Count",
},
]

# Use the put_metric_data method to push the metric data to CloudWatch
try:
response = cw_client.put_metric_data(
Namespace="Canary_Metrics", MetricData=metric_data
)
if response["ResponseMetadata"]["HTTPStatusCode"] == 200:
print("Successfully pushed data to CloudWatch")
# return 200 status code if successful
return 200
else:
# raise exception if the status code is not 200
raise Exception(
"Unexpected response status code: {}".format(
response["ResponseMetadata"]["HTTPStatusCode"]
)
)
except Exception as e:
print("Error pushing data to CloudWatch: {}".format(e))
# raise exception if there was an error pushing data to CloudWatch
raise


def main():
readXML_and_publish_metrics_to_cw()


if __name__ == "__main__":
main()
8 changes: 7 additions & 1 deletion test/canary/scripts/run_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,11 @@ function print_controller_logs() {
}

function cleanup {
#push to metrics to cloudwatch
echo "Pushing Codebuild stats to Cloudwatch."
cd $SCRIPTS_DIR
python push_stats_to_cloudwatch.py

echo "Cleaning up resources"
set +e
kubectl delete monitoringschedules --all
Expand Down Expand Up @@ -66,6 +71,7 @@ function cleanup {
}
trap cleanup EXIT


# Update kubeconfig
aws --region $CLUSTER_REGION eks update-kubeconfig --name $CLUSTER_NAME

Expand All @@ -87,7 +93,7 @@ pushd $E2E_DIR

# run tests
echo "Run Tests"
pytest_args=( -n 15 --dist loadfile --log-cli-level INFO )
pytest_args=( -n 15 --dist loadfile --log-cli-level INFO --junitxml ../canary/integration_tests.xml)
if [[ $SERVICE_REGION =~ ^(eu-north-1|eu-west-3)$ ]]; then
# If select_regions_1 true we run the notebook_instance test
pytest_args+=(-m "canary or select_regions_1")
Expand Down