account_id: &ACCOUNT_ID region: ®ION framework: &FRAMEWORK pytorch version: &VERSION 1.12.1 short_version: &SHORT_VERSION "1.12" arch_type: x86 repository_info: inference_repository: &INFERENCE_REPOSITORY image_type: &INFERENCE_IMAGE_TYPE inference root: !join [ *FRAMEWORK, "/", *INFERENCE_IMAGE_TYPE ] repository_name: &REPOSITORY_NAME !join [pr, "-", *FRAMEWORK, "-", *INFERENCE_IMAGE_TYPE] repository: &REPOSITORY !join [ *ACCOUNT_ID, .dkr.ecr., *REGION, .amazonaws.com/, *REPOSITORY_NAME ] context: inference_context: &INFERENCE_CONTEXT torchserve-ec2-entrypoint: source: docker/build_artifacts/torchserve-ec2-entrypoint.py target: torchserve-ec2-entrypoint.py torchserve-entrypoint: source: docker/build_artifacts/torchserve-entrypoint.py target: torchserve-entrypoint.py config: source: docker/build_artifacts/config.properties target: config.properties deep_learning_container: source: ../../src/deep_learning_container.py target: deep_learning_container.py images: BuildEC2CPUPTInferencePy3DockerImage: <<: *INFERENCE_REPOSITORY build: &PYTORCH_CPU_INFERENCE_PY3 false image_size_baseline: 4899 device_type: &DEVICE_TYPE cpu python_version: &DOCKER_PYTHON_VERSION py3 tag_python_version: &TAG_PYTHON_VERSION py38 os_version: &OS_VERSION ubuntu20.04 tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *OS_VERSION, "-ec2" ] docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /Dockerfile., *DEVICE_TYPE ] target: ec2 context: <<: *INFERENCE_CONTEXT BuildEC2GPUPTInferencePy3DockerImage: <<: *INFERENCE_REPOSITORY build: &PYTORCH_GPU_INFERENCE_PY3 false image_size_baseline: 14000 device_type: &DEVICE_TYPE gpu python_version: &DOCKER_PYTHON_VERSION py3 tag_python_version: &TAG_PYTHON_VERSION py38 cuda_version: &CUDA_VERSION cu116 os_version: &OS_VERSION ubuntu20.04 tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *CUDA_VERSION, "-", *OS_VERSION, "-ec2" ] docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /, *CUDA_VERSION, /Dockerfile., *DEVICE_TYPE ] target: ec2 context: <<: *INFERENCE_CONTEXT BuildSageMakerCPUPTInferencePy3DockerImage: <<: *INFERENCE_REPOSITORY build: &PYTORCH_CPU_INFERENCE_PY3 false image_size_baseline: 4899 device_type: &DEVICE_TYPE cpu python_version: &DOCKER_PYTHON_VERSION py3 tag_python_version: &TAG_PYTHON_VERSION py38 os_version: &OS_VERSION ubuntu20.04 tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *OS_VERSION, "-sagemaker" ] docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /Dockerfile., *DEVICE_TYPE ] target: sagemaker context: <<: *INFERENCE_CONTEXT BuildSageMakerGPUPTInferencePy3DockerImage: <<: *INFERENCE_REPOSITORY build: &PYTORCH_GPU_INFERENCE_PY3 false image_size_baseline: 14000 device_type: &DEVICE_TYPE gpu python_version: &DOCKER_PYTHON_VERSION py3 tag_python_version: &TAG_PYTHON_VERSION py38 cuda_version: &CUDA_VERSION cu113 os_version: &OS_VERSION ubuntu20.04 tag: !join [ *VERSION, "-", *DEVICE_TYPE, "-", *TAG_PYTHON_VERSION, "-", *CUDA_VERSION, "-", *OS_VERSION, "-sagemaker" ] docker_file: !join [ docker/, *SHORT_VERSION, /, *DOCKER_PYTHON_VERSION, /, *CUDA_VERSION, /Dockerfile., *DEVICE_TYPE ] target: sagemaker context: <<: *INFERENCE_CONTEXT