FROM nvidia/cuda:10.1-cudnn7-devel-ubuntu16.04 # NCCL_VERSION=2.4.7, CUDNN_VERSION=7.6.2.24 LABEL maintainer="Amazon AI" LABEL dlc_major_version="3" # Specify accept-bind-to-port LABEL for inference pipelines to use SAGEMAKER_BIND_TO_PORT # https://docs.aws.amazon.com/sagemaker/latest/dg/inference-pipeline-real-time.html LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true # Add arguments to achieve the version, python and url ARG PYTHON_VERSION=3.6.13 ARG PYTORCH_VERSION=1.4.0 ARG TORCHVISION_VERSION=0.5.0 ARG MMS_VERSION=1.1.2 # See http://bugs.python.org/issue19846 ENV LANG C.UTF-8 ENV LD_LIBRARY_PATH /opt/conda/lib/:$LD_LIBRARY_PATH ENV PATH /opt/conda/bin:$PATH ENV SAGEMAKER_SERVING_MODULE sagemaker_pytorch_serving_container.serving:main ENV TEMP=/home/model-server/tmp RUN apt-get update \ && apt-get install -y --allow-downgrades --allow-change-held-packages --no-install-recommends \ build-essential \ ca-certificates \ cmake \ curl \ emacs \ git \ jq \ libgl1-mesa-glx \ libglib2.0-0 \ libgomp1 \ libibverbs-dev \ libsm6 \ libxext6 \ libxrender-dev \ openjdk-8-jdk-headless \ vim \ wget \ unzip \ zlib1g-dev # Install OpenSSH. Allow OpenSSH to talk to containers without asking for confirmation RUN apt-get install -y --no-install-recommends \ openssh-client \ openssh-server \ && mkdir -p /var/run/sshd \ && cat /etc/ssh/ssh_config | grep -v StrictHostKeyChecking > /etc/ssh/ssh_config.new \ && echo " StrictHostKeyChecking no" >> /etc/ssh/ssh_config.new \ && mv /etc/ssh/ssh_config.new /etc/ssh/ssh_configs RUN curl -L -o ~/miniconda.sh https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \ && chmod +x ~/miniconda.sh \ && ~/miniconda.sh -b -p /opt/conda \ && rm ~/miniconda.sh \ && /opt/conda/bin/conda update conda \ && /opt/conda/bin/conda install -c conda-forge \ python=$PYTHON_VERSION \ && /opt/conda/bin/conda install -y \ cython==0.29.12 \ ipython==7.7.0 \ mkl-include==2019.4 \ mkl==2019.4 \ numpy==1.16.4 \ scipy==1.3.0 \ typing==3.6.4 \ && /opt/conda/bin/conda clean -ya RUN conda install -c \ pytorch magma-cuda101==2.5.1 \ && conda install -c \ conda-forge \ # install PyYAML>=5.4,<5.5 to avoid conflict with latest awscli "PyYAML>=5.4,<5.5" \ "awscli<2" \ opencv==4.0.1 \ && conda install -y \ scikit-learn==0.21.2 \ pandas==0.25.0 \ h5py==2.9.0 \ requests==2.22.0 \ && conda install -c \ pytorch \ pytorch==$PYTORCH_VERSION \ torchvision==$TORCHVISION_VERSION \ cudatoolkit=10.1 \ && conda clean -ya \ && /opt/conda/bin/conda config --set ssl_verify False \ && pip install --upgrade pip --trusted-host pypi.org --trusted-host files.pythonhosted.org \ && ln -s /opt/conda/bin/pip /usr/local/bin/pip3 \ && pip install multi-model-server==$MMS_VERSION RUN useradd -m model-server \ && mkdir -p /home/model-server/tmp \ && chown -R model-server /home/model-server COPY mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py COPY config.properties /home/model-server RUN chmod +x /usr/local/bin/dockerd-entrypoint.py RUN pip install --no-cache-dir "sagemaker-pytorch-inference<2" RUN curl https://aws-dlc-licenses.s3.amazonaws.com/pytorch-1.4.0/license.txt -o /license.txt RUN pip install -U \ pillow==7.1.0 \ awscli \ ruamel-yaml \ # pyopenssl requires cryptography>3.2 cryptography>3.2 RUN HOME_DIR=/root \ && curl -o ${HOME_DIR}/oss_compliance.zip https://aws-dlinfra-utilities.s3.amazonaws.com/oss_compliance.zip \ && unzip ${HOME_DIR}/oss_compliance.zip -d ${HOME_DIR}/ \ && cp ${HOME_DIR}/oss_compliance/test/testOSSCompliance /usr/local/bin/testOSSCompliance \ && chmod +x /usr/local/bin/testOSSCompliance \ && chmod +x ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh \ && ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh ${HOME_DIR} ${PYTHON} \ && rm -rf ${HOME_DIR}/oss_compliance* EXPOSE 8080 8081 ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"] CMD ["multi-model-server", "--start", "--mms-config", "/home/model-server/config.properties"]