Skip to content

Release 1.6.0 Dockerfiles #82

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 24 commits into from
Jan 4, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions buildspec-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ version: 0.2

env:
variables:
FRAMEWORK_FULL_VERSION: '1.4.1'
FRAMEWORK_SHORT_VERSION: '1.4'
FRAMEWORK_FULL_VERSION: '1.6.0'
FRAMEWORK_SHORT_VERSION: '1.6'
AWS_DEFAULT_REGION: 'us-west-2'
ECR_REPO: 'sagemaker-mxnet-serving'
GITHUB_REPO: 'sagemaker-mxnet-serving-container'
Expand Down
13 changes: 9 additions & 4 deletions buildspec.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ version: 0.2

env:
variables:
FRAMEWORK_FULL_VERSION: '1.4.1'
FRAMEWORK_FULL_VERSION: '1.6.0'
EI_FRAMEWORK_VERSION: '1.4.1'
AWS_DEFAULT_REGION: 'us-west-2'
ECR_REPO: 'sagemaker-test'
GITHUB_REPO: 'sagemaker-mxnet-serving-container'
Expand Down Expand Up @@ -31,14 +32,18 @@ phases:
- mv dist/sagemaker_mxnet_serving_container-*.tar.gz dist/sagemaker_mxnet_serving_container.tar.gz
- cp dist/sagemaker_mxnet_serving_container.tar.gz docker/$FRAMEWORK_FULL_VERSION/py3/sagemaker_mxnet_serving_container.tar.gz
- cp dist/sagemaker_mxnet_serving_container.tar.gz docker/$FRAMEWORK_FULL_VERSION/py2/sagemaker_mxnet_serving_container.tar.gz
- cp dist/sagemaker_mxnet_serving_container.tar.gz docker/$EI_FRAMEWORK_VERSION/py3/sagemaker_mxnet_serving_container.tar.gz
- cp dist/sagemaker_mxnet_serving_container.tar.gz docker/$EI_FRAMEWORK_VERSION/py2/sagemaker_mxnet_serving_container.tar.gz
- cp src/sagemaker_mxnet_serving_container/deep_learning_container.py docker/$FRAMEWORK_FULL_VERSION/py3/deep_learning_container.py
- cp src/sagemaker_mxnet_serving_container/deep_learning_container.py docker/$FRAMEWORK_FULL_VERSION/py2/deep_learning_container.py

- CPU_PY2_TAG=$FRAMEWORK_FULL_VERSION-cpu-py2
- CPU_PY3_TAG=$FRAMEWORK_FULL_VERSION-cpu-py3
- GPU_PY2_TAG=$FRAMEWORK_FULL_VERSION-gpu-py2
- GPU_PY3_TAG=$FRAMEWORK_FULL_VERSION-gpu-py3

# build images
- python3 scripts/build_all.py --version $FRAMEWORK_FULL_VERSION --account $ACCOUNT --repo $ECR_REPO
- python3 scripts/build_all.py --version $FRAMEWORK_FULL_VERSION --eia-version $EI_FRAMEWORK_VERSION --account $ACCOUNT --repo $ECR_REPO

# run cpu local integration tests
- |
Expand All @@ -49,7 +54,7 @@ phases:
fi

# push docker images to ECR
- python3 scripts/publish_all.py --version $FRAMEWORK_FULL_VERSION --account $ACCOUNT --repo $ECR_REPO
- python3 scripts/publish_all.py --version $FRAMEWORK_FULL_VERSION --eia-version $EI_FRAMEWORK_VERSION --account $ACCOUNT --repo $ECR_REPO

# launch remote gpu instance
- create-key-pair
Expand Down Expand Up @@ -77,7 +82,7 @@ phases:
# run eia tests
- |
if has-matching-changes "test/" "tests/" "src/*.py" "docker/*" "buildspec.yml"; then
IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_elastic_inference.py -n 2 --py-version 2,3 --processor cpu --accelerator-type $EI_ACCELERATOR_TYPE --region $AWS_DEFAULT_REGION --docker-base-name "$ECR_REPO-eia" --aws-id $ACCOUNT --framework-version $FRAMEWORK_FULL_VERSION --reruns 3
IGNORE_COVERAGE=- tox -e py36 -- test/integration/sagemaker/test_elastic_inference.py -n 2 --py-version 2,3 --processor cpu --accelerator-type $EI_ACCELERATOR_TYPE --region $AWS_DEFAULT_REGION --docker-base-name "$ECR_REPO-eia" --aws-id $ACCOUNT --framework-version $EI_FRAMEWORK_VERSION
else
echo "skipping sagemaker eia tests"
fi
Expand Down
1 change: 1 addition & 0 deletions docker/1.4.1/py2/Dockerfile.eia
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ RUN pip install --no-cache-dir \
mxnet-model-server==$MMS_VERSION \
keras-mxnet==2.2.4.1 \
onnx==1.4.1 \
sagemaker-inference==1.1.0 \
/sagemaker_mxnet_serving_container.tar.gz \
&& rm /sagemaker_mxnet_serving_container.tar.gz

Expand Down
1 change: 1 addition & 0 deletions docker/1.4.1/py3/Dockerfile.eia
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ RUN pip install --no-cache-dir \
mxnet-model-server==$MMS_VERSION \
keras-mxnet==2.2.4.1 \
onnx==1.4.1 \
sagemaker-inference==1.1.0 \
/sagemaker_mxnet_serving_container.tar.gz \
&& rm /sagemaker_mxnet_serving_container.tar.gz

Expand Down
82 changes: 82 additions & 0 deletions docker/1.6.0/py2/Dockerfile.cpu
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
FROM ubuntu:16.04

LABEL maintainer="Amazon AI"

# Specify accept-bind-to-port LABEL for inference pipelines to use SAGEMAKER_BIND_TO_PORT
# https://docs.aws.amazon.com/sagemaker/latest/dg/inference-pipeline-real-time.html
LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true
# Specify multi-models LABEL to indicate container is capable of loading and serving multiple models concurrently
# https://docs.aws.amazon.com/sagemaker/latest/dg/build-multi-model-build-container.html
LABEL com.amazonaws.sagemaker.capabilities.multi-models=true

ARG MMS_VERSION=1.0.8
ARG MX_URL=https://aws-mxnet-pypi.s3-us-west-2.amazonaws.com/1.6.0/aws_mxnet_mkl-1.6.0rc0-py2.py3-none-manylinux1_x86_64.whl
ARG PYTHON=python
ARG PYTHON_PIP=python-pip
ARG PIP=pip

ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/usr/local/lib" \
PYTHONIOENCODING=UTF-8 \
LANG=C.UTF-8 \
LC_ALL=C.UTF-8 \
TEMP=/home/model-server/tmp

RUN apt-get update \
&& apt-get -y install --no-install-recommends \
build-essential \
ca-certificates \
curl \
git \
libopencv-dev \
openjdk-8-jdk-headless \
vim \
wget \
zlib1g-dev \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*

RUN apt-get update \
&& apt-get install -y \
${PYTHON} \
${PYTHON_PIP}

RUN ${PIP} --no-cache-dir install --upgrade \
pip \
setuptools

WORKDIR /

COPY sagemaker_mxnet_serving_container.tar.gz /sagemaker_mxnet_serving_container.tar.gz

RUN ${PIP} install --no-cache-dir \
${MX_URL} \
mxnet-model-server==$MMS_VERSION \
keras-mxnet==2.2.4.1 \
numpy==1.16.5 \
onnx==1.4.1 \
/sagemaker_mxnet_serving_container.tar.gz \
&& rm /sagemaker_mxnet_serving_container.tar.gz

# This is here to make our installed version of OpenCV work.
# https://stackoverflow.com/questions/29274638/opencv-libdc1394-error-failed-to-initialize-libdc1394
# TODO: Should we be installing OpenCV in our image like this? Is there another way we can fix this?
RUN ln -s /dev/null /dev/raw1394

RUN useradd -m model-server \
&& mkdir -p /home/model-server/tmp \
&& chown -R model-server /home/model-server

COPY mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py
COPY config.properties /home/model-server
COPY deep_learning_container.py /usr/local/bin/deep_learning_container.py

RUN chmod +x /usr/local/bin/dockerd-entrypoint.py \
&& chmod +x /usr/local/bin/deep_learning_container.py

RUN curl https://aws-dlc-licenses.s3.amazonaws.com/mxnet/license.txt -o /license.txt

EXPOSE 8080 8081
ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"]
CMD ["mxnet-model-server", "--start", "--mms-config", "/home/model-server/config.properties"]
79 changes: 79 additions & 0 deletions docker/1.6.0/py2/Dockerfile.gpu
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
FROM nvidia/cuda:10.1-cudnn7-runtime-ubuntu16.04

LABEL maintainer="Amazon AI"

# Specify accept-bind-to-port LABEL for inference pipelines to use SAGEMAKER_BIND_TO_PORT
# https://docs.aws.amazon.com/sagemaker/latest/dg/inference-pipeline-real-time.html
LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true

ARG MMS_VERSION=1.0.8
ARG MX_URL=https://aws-mxnet-pypi.s3-us-west-2.amazonaws.com/1.6.0/aws_mxnet_cu101mkl-1.6.0rc0-py2.py3-none-manylinux1_x86_64.whl
ARG PYTHON=python
ARG PYTHON_PIP=python-pip
ARG PIP=pip

ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/usr/local/lib" \
PYTHONIOENCODING=UTF-8 \
LANG=C.UTF-8 \
LC_ALL=C.UTF-8 \
TEMP=/home/model-server/tmp

RUN apt-get update \
&& apt-get -y install --no-install-recommends \
build-essential \
ca-certificates \
curl \
git \
libopencv-dev \
openjdk-8-jdk-headless \
vim \
wget \
zlib1g-dev \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*

RUN apt-get update \
&& apt-get install -y \
${PYTHON} \
${PYTHON_PIP}

RUN ${PIP} --no-cache-dir install --upgrade \
pip \
setuptools

WORKDIR /

COPY sagemaker_mxnet_serving_container.tar.gz /sagemaker_mxnet_serving_container.tar.gz

RUN ${PIP} install --no-cache-dir \
${MX_URL} \
mxnet-model-server==$MMS_VERSION \
keras-mxnet==2.2.4.1 \
numpy==1.16.5 \
onnx==1.4.1 \
/sagemaker_mxnet_serving_container.tar.gz \
&& rm /sagemaker_mxnet_serving_container.tar.gz

# This is here to make our installed version of OpenCV work.
# https://stackoverflow.com/questions/29274638/opencv-libdc1394-error-failed-to-initialize-libdc1394
# TODO: Should we be installing OpenCV in our image like this? Is there another way we can fix this?
RUN ln -s /dev/null /dev/raw1394

RUN useradd -m model-server \
&& mkdir -p /home/model-server/tmp \
&& chown -R model-server /home/model-server

COPY mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py
COPY config.properties /home/model-server
COPY deep_learning_container.py /usr/local/bin/deep_learning_container.py

RUN chmod +x /usr/local/bin/dockerd-entrypoint.py \
&& chmod +x /usr/local/bin/deep_learning_container.py

RUN curl https://aws-dlc-licenses.s3.amazonaws.com/mxnet/license.txt -o /license.txt

EXPOSE 8080 8081
ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"]
CMD ["mxnet-model-server", "--start", "--mms-config", "/home/model-server/config.properties"]
5 changes: 5 additions & 0 deletions docker/1.6.0/py2/config.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
vmargs=-Xmx128m -XX:-UseLargePages -XX:+UseG1GC -XX:MaxMetaspaceSize=32M -XX:MaxDirectMemorySize=10m -XX:+ExitOnOutOfMemoryError
model_store=/opt/ml/model
load_models=ALL
inference_address=http://0.0.0.0:8080
management_address=http://0.0.0.0:8081
29 changes: 29 additions & 0 deletions docker/1.6.0/py2/mms-entrypoint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.

import shlex
import subprocess
import sys
import os.path

if not os.path.exists("/opt/ml/input/config"):
subprocess.call(['python', '/usr/local/bin/deep_learning_container.py', '&>/dev/null', '&'])

if sys.argv[1] == 'serve':
from sagemaker_mxnet_serving_container import serving
serving.main()
else:
subprocess.check_call(shlex.split(' '.join(sys.argv[1:])))

# prevent docker exit
subprocess.call(['tail', '-f', '/dev/null'])
101 changes: 101 additions & 0 deletions docker/1.6.0/py3/Dockerfile.cpu
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
FROM ubuntu:16.04

LABEL maintainer="Amazon AI"

# Specify accept-bind-to-port LABEL for inference pipelines to use SAGEMAKER_BIND_TO_PORT
# https://docs.aws.amazon.com/sagemaker/latest/dg/inference-pipeline-real-time.html
LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true
# Specify multi-models LABEL to indicate container is capable of loading and serving multiple models concurrently
# https://docs.aws.amazon.com/sagemaker/latest/dg/build-multi-model-build-container.html
LABEL com.amazonaws.sagemaker.capabilities.multi-models=true

ARG MMS_VERSION=1.0.8
ARG MX_URL=https://aws-mxnet-pypi.s3-us-west-2.amazonaws.com/1.6.0/aws_mxnet_mkl-1.6.0rc0-py2.py3-none-manylinux1_x86_64.whl
ARG PYTHON=python3
ARG PYTHON_PIP=python3-pip
ARG PIP=pip3
ARG PYTHON_VERSION=3.6.8

ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/usr/local/lib" \
PYTHONIOENCODING=UTF-8 \
LANG=C.UTF-8 \
LC_ALL=C.UTF-8 \
TEMP=/home/model-server/tmp

RUN apt-get update \
&& apt-get -y install --no-install-recommends \
build-essential \
ca-certificates \
curl \
git \
libopencv-dev \
openjdk-8-jdk-headless \
vim \
wget \
zlib1g-dev \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*

RUN wget https://www.python.org/ftp/python/$PYTHON_VERSION/Python-$PYTHON_VERSION.tgz \
&& tar -xvf Python-$PYTHON_VERSION.tgz \
&& cd Python-$PYTHON_VERSION \
&& ./configure \
&& make \
&& make install \
&& apt-get update \
&& apt-get install -y --no-install-recommends \
libreadline-gplv2-dev \
libncursesw5-dev \
libssl-dev \
libsqlite3-dev \
tk-dev \
libgdbm-dev \
libc6-dev \
libbz2-dev \
&& make \
&& make install \
&& rm -rf ../Python-$PYTHON_VERSION* \
&& ln -s /usr/local/bin/pip3 /usr/bin/pip

RUN ln -s $(which ${PYTHON}) /usr/local/bin/python

RUN ${PIP} --no-cache-dir install --upgrade \
pip \
setuptools

WORKDIR /

COPY sagemaker_mxnet_serving_container.tar.gz /sagemaker_mxnet_serving_container.tar.gz

RUN ${PIP} install --no-cache-dir \
${MX_URL} \
mxnet-model-server==$MMS_VERSION \
keras-mxnet==2.2.4.1 \
numpy==1.17.4 \
onnx==1.4.1 \
/sagemaker_mxnet_serving_container.tar.gz \
&& rm /sagemaker_mxnet_serving_container.tar.gz

# This is here to make our installed version of OpenCV work.
# https://stackoverflow.com/questions/29274638/opencv-libdc1394-error-failed-to-initialize-libdc1394
# TODO: Should we be installing OpenCV in our image like this? Is there another way we can fix this?
RUN ln -s /dev/null /dev/raw1394

RUN useradd -m model-server \
&& mkdir -p /home/model-server/tmp \
&& chown -R model-server /home/model-server

COPY mms-entrypoint.py /usr/local/bin/dockerd-entrypoint.py
COPY config.properties /home/model-server
COPY deep_learning_container.py /usr/local/bin/deep_learning_container.py

RUN chmod +x /usr/local/bin/dockerd-entrypoint.py \
&& chmod +x /usr/local/bin/deep_learning_container.py

RUN curl https://aws-dlc-licenses.s3.amazonaws.com/mxnet/license.txt -o /license.txt

EXPOSE 8080 8081
ENTRYPOINT ["python", "/usr/local/bin/dockerd-entrypoint.py"]
CMD ["mxnet-model-server", "--start", "--mms-config", "/home/model-server/config.properties"]
Loading