一键部署人工智能中的OPEN-WEBUI,OLLAMA, NGINX,也就对类似OPEN-AI的对话机器人
docker-compose.yaml
version: '3.9'
services:
ollama:
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities:
- gpu #使用GPU加速
volumes:
- ollama-volume:/root/.ollama #配置OLLAMA的配置数据文件在宿主机
- /etc/localtime:/etc/localtime:ro
container_name: ollama
image: ollama/ollama
restart: unless-stopped
networks:
isolated: #使用DOCKER的隔离网络
open-webui: #全局维一的服务名
volumes:
- open-webui-volume:/app/backend/data #配置open-webui的配置数据文件在宿主机
- /etc/localtime:/etc/localtime:ro
container_name: open-webui
restart: unless-stopped
image: ghcr.io/open-webui/open-webui:main
environment:
- 'OLLAMA_BASE_URL=http://ollama:11434' #OPEN-WEBUI访问OLLAMA的地址,其实就是服务名代替IP
depends_on:
- ollama
networks:
- isolated
nginx-webui:
volumes:
- /opt/tool/ai/nginx/data/html:/usr/share/nginx/html:ro
- /opt/tool/ai/nginx/data/conf/nginx.conf:/etc/nginx/nginx.conf:ro
- /opt/tool/ai/nginx/data/conf/conf.d/default.conf:/etc/nginx/conf.d/default.conf:ro
- /opt/tool/ai/nginx/data/conf/.htpasswd:/etc/nginx/.htpasswd:ro
- /etc/localtime:/etc/localtime:ro
- /opt/tool/ai/nginx/data/log/access.log:/var/log/nginx/access.log
- /opt/tool/ai/nginx/data/log/error.log:/var/log/nginx/error.log
container_name: nginx-webui
ports:
- "81:81"
image: nginx:latest
#image: quay.io/ricardbejarano/nginx
depends_on:
- open-webui
restart: unless-stopped
networks:
- isolated
- internet
volumes:
ollama-volume:
driver: local
driver_opts:
type: none
o: bind
device: /opt/tool/ai/ollama/data
open-webui-volume:
driver: local
driver_opts:
type: none
o: bind
device: /opt/tool/ai/open-webui/data
networks:
isolated:
driver: bridge
internal: true
internet:
driver: bridge