├── README.md ├── docker-compose.yml └── nginx.conf /README.md: -------------------------------------------------------------------------------- 1 | ## 1、准备工作 2 | 3 | 在开始之前,我们需要准备一些东西: 4 | * Docker:这是一个用于容器化应用程序的开源平台。 5 | 6 | ## 2、准备Nginx配置文件 7 | 完整的配置文件见**nginx.conf**,其核心部分如下: 8 | ```conf 9 | server { 10 | listen 80; # 监听80端口,用于HTTP请求 11 | location / { 12 | proxy_pass https://api.openai.com/; # 反向代理到https://api.openai.com/这个地址 13 | proxy_ssl_server_name on; # 开启代理SSL服务器名称验证,确保SSL连接的安全性 14 | proxy_set_header Host api.openai.com; # 设置代理请求头中的Host字段为api.openai.com 15 | chunked_transfer_encoding off; # 禁用分块编码传输,避免可能的代理问题 16 | proxy_buffering off; # 禁用代理缓存,避免数据传输延迟 17 | proxy_cache off; # 禁用代理缓存,确保实时获取最新的数据 18 | #proxy_set_header X-Forwarded-For $remote_addr; # 将客户端真实IP添加到代理请求头中的X-Forwarded-For字段中,用于记录客户端真实IP 19 | } 20 | } 21 | ``` 22 | 23 | ## 3、启动服务 24 | 25 | ### 3.1、docker run 26 | ``` 27 | docker run -itd -p 80:80 -v $PWD/nginx.conf:/etc/nginx/nginx.conf --name my-nginx nginx 28 | ``` 29 | 30 | ### 3.2、docker compose up 31 | ``` 32 | docker compose up -d 33 | ``` 34 | 35 | ## 4、应用 36 | ```python 37 | # Note: you need to be using OpenAI Python v0.27.0 for the code below to work 38 | import openai 39 | openai.api_key = api_key 40 | openai.api_base = "your_proxy_url" # 代理地址,如“http://www.test.com/v1” 41 | openai.ChatCompletion.create( 42 | model="gpt-3.5-turbo", 43 | messages=[ 44 | {"role": "system", "content": "You are a helpful assistant."}, 45 | {"role": "user", "content": "Who won the world series in 2020?"}, 46 | {"role": "assistant", "content": "The Los Angeles Dodgers won the World Series in 2020."}, 47 | {"role": "user", "content": "Where was it played?"} 48 | ] 49 | ) 50 | ``` 51 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | proxy: 3 | image: nginx 4 | volumes: 5 | - "$PWD/nginx.conf:/etc/nginx/nginx.conf" 6 | ports: 7 | - "80:80" 8 | - "443:443" 9 | restart: unless-stopped 10 | -------------------------------------------------------------------------------- /nginx.conf: -------------------------------------------------------------------------------- 1 | user nginx; 2 | worker_processes auto; 3 | 4 | error_log /var/log/nginx/error.log notice; 5 | pid /var/run/nginx.pid; 6 | 7 | 8 | events { 9 | worker_connections 1024; 10 | } 11 | 12 | 13 | http { 14 | include /etc/nginx/mime.types; 15 | default_type application/octet-stream; 16 | 17 | log_format main '$remote_addr - $remote_user [$time_local] "$request" ' 18 | '$status $body_bytes_sent "$http_referer" ' 19 | '"$http_user_agent" "$http_x_forwarded_for"'; 20 | 21 | access_log /var/log/nginx/access.log main; 22 | 23 | sendfile on; 24 | #tcp_nopush on; 25 | 26 | keepalive_timeout 65; 27 | 28 | #gzip on; 29 | 30 | # include /etc/nginx/conf.d/*.conf; 31 | 32 | server { 33 | listen 80; 34 | location / { 35 | proxy_pass https://api.openai.com/; 36 | proxy_ssl_server_name on; 37 | proxy_set_header Host api.openai.com; 38 | chunked_transfer_encoding off; 39 | proxy_buffering off; 40 | proxy_cache off; 41 | proxy_set_header X-Forwarded-For $remote_addr; 42 | } 43 | } 44 | } 45 | --------------------------------------------------------------------------------