Last active
November 2, 2023 15:15
-
-
Save revolunet/9d80c0b1d69ea7fb406648c657b64d1d to your computer and use it in GitHub Desktop.
OpenAI API nginx timeout proxy
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# | |
# Sometimes openai API hangs and the openai-python lib doesnt make it easy to set timeouts via llamaindex | |
# This nginx conf lets you pop a nginx cluster that will force to kill long queries | |
# Llama index for example has good request retry policy so this makes the whole thing faster | |
# | |
# Put that file in /etc/nginx/conf.d/default.conf | |
# | |
# for ex with docker: `docker run -p 8080:8080 -v $PWD/nginx.conf:/etc/nginx/conf.d/default.conf nginx:alpine3.18` | |
# | |
# Add this to your imports: | |
# import openai | |
# openai.api_base = "http://127.0.0.1:8080" # where your nginx proxy runs | |
# openai.verify_ssl_certs = False | |
# | |
proxy_ssl_server_name on; | |
server { | |
listen 8080; | |
proxy_busy_buffers_size 512k; | |
proxy_buffers 4 512k; | |
proxy_buffer_size 256k; | |
location / { | |
proxy_pass https://api.openai.com/v1/; | |
proxy_set_header Host api.openai.com; | |
proxy_connect_timeout 15s; | |
proxy_read_timeout 15s; | |
proxy_send_timeout 15s; | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment