google-research
44 строки · 1.5 Кб
1ARG TF_SERVING_BUILD_IMAGE=google/tf-serving-scann:latest-devel
2
3FROM ${TF_SERVING_BUILD_IMAGE} as build_image
4FROM ubuntu:20.04
5
6ARG TF_SERVING_VERSION_GIT_BRANCH=r2.11
7ARG TF_SERVING_VERSION_GIT_COMMIT=7c9bd6b224061a9d4e2df8d80100807ba80f8f4b
8
9LABEL maintainer="sunphil@google.com"
10LABEL tensorflow_serving_github_branchtag=${TF_SERVING_VERSION_GIT_BRANCH}
11LABEL tensorflow_serving_github_commit=${TF_SERVING_VERSION_GIT_COMMIT}
12
13RUN apt-get update && apt-get install -y --no-install-recommends \
14software-properties-common && add-apt-repository -y ppa:ubuntu-toolchain-r/test && \
15apt-get update && apt-get install -y g++-9 && \
16apt-get clean && \
17rm -rf /var/lib/apt/lists/*
18
19# Install TF Serving pkg
20COPY --from=build_image /usr/local/bin/tensorflow_model_server /usr/bin/tensorflow_model_server
21
22# Expose ports
23# gRPC
24EXPOSE 8500
25
26# REST
27EXPOSE 8501
28
29# Set where models should be stored in the container
30ENV MODEL_BASE_PATH=/models
31RUN mkdir -p ${MODEL_BASE_PATH}
32
33# The only required piece is the model name in order to differentiate endpoints
34ENV MODEL_NAME=model
35
36# Create a script that runs the model server so we can use environment variables
37# while also passing in arguments from the docker command line
38RUN echo '#!/bin/bash \n\n\
39tensorflow_model_server --port=8500 --rest_api_port=8501 \
40--model_name=${MODEL_NAME} --model_base_path=${MODEL_BASE_PATH}/${MODEL_NAME} \
41"$@"' > /usr/bin/tf_serving_entrypoint.sh \
42&& chmod +x /usr/bin/tf_serving_entrypoint.sh
43
44ENTRYPOINT ["/usr/bin/tf_serving_entrypoint.sh"]
45