Last active
January 31, 2021 14:21
-
-
Save RetrieverJo/c4aa824a345cd59a135bc24a95b48180 to your computer and use it in GitHub Desktop.
Dockerfile and entrypoint.sh for tf serving model api
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
apiVersion: apps/v1 | |
kind: Deployment | |
metadata: | |
name: category-api-serving | |
labels: | |
app: category-api-serving | |
spec: | |
replicas: 1 | |
selector: | |
matchLabels: | |
app: category-api-serving | |
strategy: | |
rollingUpdate: | |
maxSurge: 25% | |
maxUnavailable: 25% | |
type: RollingUpdate | |
template: | |
metadata: | |
labels: | |
app: category-api-serving | |
spec: | |
containers: | |
- image: asia.gcr.io/storage-273502/buzzni-ai-category-api-serving:latest | |
lifecycle: | |
preStop: | |
exec: | |
command: | |
- sleep | |
- "5" | |
resources: | |
requests: | |
cpu: 1 | |
readinessProbe: | |
httpGet: | |
path: /v1/models/hybrid_classifier | |
port: 8501 | |
name: category-serving-container | |
serviceAccountName: gcs-reader | |
--- | |
apiVersion: autoscaling/v1 | |
kind: HorizontalPodAutoscaler | |
metadata: | |
name: category-api-serving-hpa | |
namespace: default | |
spec: | |
maxReplicas: 10 | |
minReplicas: 1 | |
scaleTargetRef: | |
apiVersion: apps/v1 | |
kind: Deployment | |
name: category-api-serving | |
targetCPUUtilizationPercentage: 80 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
apiVersion: v1 | |
kind: Service | |
metadata: | |
name: category-api-serving-service | |
labels: | |
app: category-api-serving-service | |
namespace: default | |
spec: | |
ports: | |
- port: 80 | |
protocol: TCP | |
targetPort: 8501 | |
selector: | |
app: category-api-serving | |
sessionAffinity: None | |
type: ClusterIP | |
--- | |
apiVersion: traefik.containo.us/v1alpha1 | |
kind: IngressRoute | |
metadata: | |
name: vpn-web-category-serving-api | |
namespace: default | |
spec: | |
entryPoints: | |
- vpn-web | |
routes: | |
- kind: Rule | |
match: PathPrefix(`/category-serving/`) | |
middlewares: | |
- name: api-strip | |
services: | |
- name: category-api-serving-service | |
port: 80 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# https://github.com/tensorflow/serving/blob/master/tensorflow_serving/tools/docker/Dockerfile | |
ARG TF_SERVING_VERSION=latest | |
ARG TF_SERVING_BUILD_IMAGE=tensorflow/serving:${TF_SERVING_VERSION} | |
FROM ${TF_SERVING_BUILD_IMAGE} as build_image | |
FROM python:3.7 | |
# Install apt package | |
RUN apt-get update && apt-get install -y --no-install-recommends \ | |
ca-certificates software-properties-common git curl | |
RUN apt-get clean && \ | |
rm -rf /var/lib/apt/lists/* | |
# Install TF Serving pkg | |
COPY --from=build_image /usr/bin/tensorflow_model_server /usr/bin/tensorflow_model_server | |
# Install poetry | |
RUN pip3 install poetry | |
# Prepare tf-serving model | |
WORKDIR /clf_api | |
COPY poetry.lock . | |
COPY pyproject.toml . | |
# Install dependency packages | |
RUN poetry config virtualenvs.create false | |
RUN poetry install -n | |
# Copy other files | |
COPY . . | |
# Expose ports | |
# gRPC | |
EXPOSE 8500 | |
# REST | |
EXPOSE 8501 | |
# Make entrypoint | |
RUN chmod +x scripts/serving_api_entrypoint.sh | |
ENTRYPOINT ["scripts/serving_api_entrypoint.sh"] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
export PYTHONPATH=/clf_api | |
export MODEL_BASE_PATH=/models | |
mkdir -p ${MODEL_BASE_PATH} | |
cp ./models.config ${MODEL_BASE_PATH}/ | |
python3.7 buzzni/ai/category_api/serving/prepare_serving.py | |
tensorflow_model_server --port=8500 --rest_api_port=8501 --model_config_file=${MODEL_BASE_PATH}/models.config "$@" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment