######################################################## # _____ ____ ____ ___ # | ____/ ___|___ \ |_ _|_ __ ___ __ _ __ _ ___ # | _|| | __) | | || '_ ` _ \ / _` |/ _` |/ _ \ # | |__| |___ / __/ | || | | | | | (_| | (_| | __/ # |_____\____|_____| |___|_| |_| |_|\__,_|\__, |\___| # |___/ # ____ _ # | _ \ ___ ___(_)_ __ ___ # | |_) / _ \/ __| | '_ \ / _ \ # | _ < __/ (__| | |_) | __/ # |_| \_\___|\___|_| .__/ \___| # |_| ######################################################## FROM ubuntu:20.04 AS base_image ENV DEBIAN_FRONTEND=noninteractive \ LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/usr/local/lib:/opt/conda/lib" \ PATH="${PATH}:/opt/conda/bin" RUN apt-get update \ && apt-get upgrade -y \ && apt-get autoremove -y \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* FROM base_image AS ec2 LABEL maintainer="Amazon AI" LABEL dlc_major_version="1" ARG PYTHON_VERSION=3.8.10 ARG MAMBA_VERSION=4.12.0-2 ARG MMS_VERSION=1.1.8 ARG MX_URL=https://framework-binaries.s3.us-west-2.amazonaws.com/mxnet/v1.9.0_mkl_blas/aws_mx-1.9.0b20221209-py3-none-manylinux2014_x86_64.whl ENV PYTHONDONTWRITEBYTECODE=1 \ PYTHONUNBUFFERED=1 \ LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/usr/local/lib" \ PYTHONIOENCODING=UTF-8 \ LANG=C.UTF-8 \ LC_ALL=C.UTF-8 \ TEMP=/home/model-server/tmp \ DEBIAN_FRONTEND=noninteractive \ DLC_CONTAINER_TYPE=inference # Set MKL_THREADING_LAYER=GNU to prevent issues between torch and numpy/mkl ENV MKL_THREADING_LAYER=GNU RUN apt-get update \ && apt-get -y upgrade --only-upgrade systemd \ && apt-get install -y --no-install-recommends \ build-essential \ ca-certificates \ curl \ emacs \ git \ libopencv-dev \ openjdk-8-jdk-headless \ openjdk-8-jdk \ openjdk-8-jre \ vim \ wget \ unzip \ zlib1g-dev \ libreadline-gplv2-dev \ libncursesw5-dev \ libssl-dev \ libsqlite3-dev \ libgdbm-dev \ libc6-dev \ libbz2-dev \ tk-dev \ libffi-dev \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* # install Python RUN apt-get update \ && apt-get install -y python3 python3-pip \ && rm -rf /usr/bin/python /usr/bin/pip \ && ln -s /usr/bin/python3 /usr/bin/python \ && ln -s /usr/bin/pip3 /usr/bin/pip WORKDIR / RUN curl -L -o ~/mambaforge.sh https://github.com/conda-forge/miniforge/releases/download/${MAMBA_VERSION}/Mambaforge-${MAMBA_VERSION}-Linux-x86_64.sh \ && chmod +x ~/mambaforge.sh \ && ~/mambaforge.sh -b -p /opt/conda \ && rm ~/mambaforge.sh \ && /opt/conda/bin/conda install -c conda-forge \ mkl \ mkl-include \ # Upstream conda looks to have moved to 4.13 which is incompatible with mamba 0.22.1 and will fail the conda-forge installs. # having "conda update conda" before the "conda -c conda-forge" commands will automatically update conda to 4.13. # Moving conda update conda" after the "conda -c conda-forge" commands keep conda at 4.12 but will update other packages using # the current conda 4.12 && /opt/conda/bin/conda update -y conda \ && /opt/conda/bin/conda clean -ya RUN pip install --upgrade pip --trusted-host pypi.org --trusted-host files.pythonhosted.org \ && pip install --no-cache-dir \ "awscli<2" \ boto3 \ ${MX_URL} \ gluonnlp==0.10.0 \ gluoncv==0.8.0 \ multi-model-server==$MMS_VERSION \ "numpy<1.20" \ onnx>=1.13.0 \ # Protobuf >=3.21 results in protobuf errors version mismatch errors with framework and other packages. "protobuf>=3.20.2,<3.21" \ "wheel>=0.38.0" \ # setuptools is installed while the installation of python "setuptools>=66.0.0" \ # Upper bound pin matplotlib, as it creates dependency conflict with numpy "matplotlib<3.7" # This is here to make our installed version of OpenCV work. # https://stackoverflow.com/questions/29274638/opencv-libdc1394-error-failed-to-initialize-libdc1394 # TODO: Should we be installing OpenCV in our image like this? Is there another way we can fix this? RUN ln -s /dev/null /dev/raw1394 RUN useradd -m model-server \ && mkdir -p /home/model-server/tmp \ && chown -R model-server /home/model-server COPY mms-ec2-entrypoint.py /usr/local/bin/dockerd-entrypoint.py COPY config.properties /home/model-server RUN chmod +x /usr/local/bin/dockerd-entrypoint.py COPY deep_learning_container.py /usr/local/bin/deep_learning_container.py RUN chmod +x /usr/local/bin/deep_learning_container.py RUN HOME_DIR=/root \ && curl -o ${HOME_DIR}/oss_compliance.zip https://aws-dlinfra-utilities.s3.amazonaws.com/oss_compliance.zip \ && unzip ${HOME_DIR}/oss_compliance.zip -d ${HOME_DIR}/ \ && cp ${HOME_DIR}/oss_compliance/test/testOSSCompliance /usr/local/bin/testOSSCompliance \ && chmod +x /usr/local/bin/testOSSCompliance \ && chmod +x ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh \ && ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh ${HOME_DIR} python \ && rm -rf ${HOME_DIR}/oss_compliance* \ && rm -iRf ${HOME_DIR}/.cache RUN curl https://aws-dlc-licenses.s3.amazonaws.com/aws-mx-1.9.0/license.txt -o /license.txt EXPOSE 8080 8081 ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"] CMD ["multi-model-server", "--start", "--mms-config", "/home/model-server/config.properties"] ################################################################# # ____ __ __ _ # / ___| __ _ __ _ ___| \/ | __ _| | _____ _ __ # \___ \ / _` |/ _` |/ _ \ |\/| |/ _` | |/ / _ \ '__| # ___) | (_| | (_| | __/ | | | (_| | < __/ | # |____/ \__,_|\__, |\___|_| |_|\__,_|_|\_\___|_| # |___/ # ___ ____ _ # |_ _|_ __ ___ __ _ __ _ ___ | _ \ ___ ___(_)_ __ ___ # | || '_ ` _ \ / _` |/ _` |/ _ \ | |_) / _ \/ __| | '_ \ / _ \ # | || | | | | | (_| | (_| | __/ | _ < __/ (__| | |_) | __/ # |___|_| |_| |_|\__,_|\__, |\___| |_| \_\___|\___|_| .__/ \___| # |___/ |_| ################################################################# FROM ec2 AS sagemaker LABEL maintainer="Amazon AI" LABEL dlc_major_version="1" # Specify accept-bind-to-port LABEL for inference pipelines to use SAGEMAKER_BIND_TO_PORT # https://docs.aws.amazon.com/sagemaker/latest/dg/inference-pipeline-real-time.html LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true # Specify multi-models LABEL to indicate container is capable of loading and serving multiple models concurrently # https://docs.aws.amazon.com/sagemaker/latest/dg/build-multi-model-build-container.html LABEL com.amazonaws.sagemaker.capabilities.multi-models=true ARG PYTHON_VERSION=3.8.10 WORKDIR / RUN pip install --no-cache-dir \ sagemaker-mxnet-inference RUN pip install --no-cache-dir \ "sagemaker-inference==1.6.1" COPY mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py RUN chmod +x /usr/local/bin/dockerd-entrypoint.py RUN HOME_DIR=/root \ && curl -o ${HOME_DIR}/oss_compliance.zip https://aws-dlinfra-utilities.s3.amazonaws.com/oss_compliance.zip \ && unzip ${HOME_DIR}/oss_compliance.zip -d ${HOME_DIR}/ \ && cp ${HOME_DIR}/oss_compliance/test/testOSSCompliance /usr/local/bin/testOSSCompliance \ && chmod +x /usr/local/bin/testOSSCompliance \ && chmod +x ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh \ && ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh ${HOME_DIR} python \ && rm -rf ${HOME_DIR}/oss_compliance* \ && rm -iRf ${HOME_DIR}/.cache EXPOSE 8080 8081 ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"] CMD ["multi-model-server", "--start", "--mms-config", "/home/model-server/config.properties"]