FROM ubuntu:16.04 LABEL maintainer="Amazon AI" LABEL dlc_major_version="3" # Specify accept-bind-to-port LABEL for inference pipelines to use SAGEMAKER_BIND_TO_PORT # https://docs.aws.amazon.com/sagemaker/latest/dg/inference-pipeline-real-time.html LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true # Specify multi-models LABEL to indicate container is capable of loading and serving multiple models concurrently # https://docs.aws.amazon.com/sagemaker/latest/dg/build-multi-model-build-container.html LABEL com.amazonaws.sagemaker.capabilities.multi-models=true ARG PYTHON_VERSION=3.6.13 ARG PYTORCH_VERSION=1.4.0 ARG TORCHVISION_VERSION=0.5.0 ARG MMS_VERSION=1.1.2 # See http://bugs.python.org/issue19846 ENV LANG C.UTF-8 ENV LD_LIBRARY_PATH /opt/conda/lib/:$LD_LIBRARY_PATH ENV PATH /opt/conda/bin:$PATH ENV SAGEMAKER_SERVING_MODULE sagemaker_pytorch_serving_container.serving:main ENV TEMP=/home/model-server/tmp RUN apt-get update && apt-get install -y --no-install-recommends \ build-essential \ ca-certificates \ cmake \ curl \ emacs \ git \ jq \ libgl1-mesa-glx \ libglib2.0-0 \ libsm6 \ libxext6 \ libxrender-dev \ openjdk-8-jdk-headless \ vim \ wget \ unzip \ zlib1g-dev RUN curl -L -o ~/miniconda.sh https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \ && chmod +x ~/miniconda.sh \ && ~/miniconda.sh -b -p /opt/conda \ && rm ~/miniconda.sh \ && /opt/conda/bin/conda update conda \ && /opt/conda/bin/conda install -c conda-forge \ python=$PYTHON_VERSION \ && /opt/conda/bin/conda install -y \ cython==0.29.12 \ ipython==7.7.0 \ mkl-include==2019.4 \ mkl==2019.4 \ numpy==1.16.4 \ scipy==1.3.0 \ typing==3.6.4 \ && /opt/conda/bin/conda clean -ya RUN conda install -c \ conda-forge \ # install PyYAML>=5.4,<5.5 to avoid conflict with latest awscli "PyYAML>=5.4,<5.5" \ "awscli<2" \ opencv==4.0.1 \ && conda install -y \ scikit-learn==0.21.2 \ pandas==0.25.0 \ h5py==2.9.0 \ requests==2.22.0 \ && conda install \ pytorch==$PYTORCH_VERSION \ torchvision==$TORCHVISION_VERSION cpuonly -c pytorch \ && conda clean -ya \ && pip install --upgrade pip --trusted-host pypi.org --trusted-host files.pythonhosted.org \ && ln -s /opt/conda/bin/pip /usr/local/bin/pip3 \ && pip install multi-model-server==$MMS_VERSION RUN useradd -m model-server \ && mkdir -p /home/model-server/tmp \ && chown -R model-server /home/model-server COPY mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py COPY config.properties /home/model-server RUN chmod +x /usr/local/bin/dockerd-entrypoint.py RUN pip install --no-cache-dir "sagemaker-pytorch-inference<2" RUN curl https://aws-dlc-licenses.s3.amazonaws.com/pytorch-1.4.0/license.txt -o /license.txt RUN pip install -U \ pillow==7.1.0 \ awscli \ ruamel-yaml \ # pyopenssl requires cryptography>3.2 cryptography>3.2 RUN HOME_DIR=/root \ && curl -o ${HOME_DIR}/oss_compliance.zip https://aws-dlinfra-utilities.s3.amazonaws.com/oss_compliance.zip \ && unzip ${HOME_DIR}/oss_compliance.zip -d ${HOME_DIR}/ \ && cp ${HOME_DIR}/oss_compliance/test/testOSSCompliance /usr/local/bin/testOSSCompliance \ && chmod +x /usr/local/bin/testOSSCompliance \ && chmod +x ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh \ && ${HOME_DIR}/oss_compliance/generate_oss_compliance.sh ${HOME_DIR} ${PYTHON} \ && rm -rf ${HOME_DIR}/oss_compliance* EXPOSE 8080 8081 ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"] CMD ["multi-model-server", "--start", "--mms-config", "/home/model-server/config.properties"]