Created
August 26, 2020 17:24
-
-
Save lmwilkin/359ef8ada2eb1766d049719e9fc7053a to your computer and use it in GitHub Desktop.
GPU enable inference container for Transformers
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
FROM tiangolo/uvicorn-gunicorn-machine-learning:cuda9.1-python3.7 | |
ENV PORT 40003 | |
ENV APP_MODULE app.api:app | |
ENV LOG_LEVEL debug | |
ENV WEB_CONCURRENCY 2 | |
COPY ./requirements.txt ./requirements.txt | |
RUN pip install --no-cache-dir --upgrade pip | |
RUN pip install --no-cache-dir --upgrade torch transformers | |
RUN pip install --no-cache-dir fastapi | |
COPY ./app /app/app |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment