Files
llm/docker-compose.yml

36 lines
816 B
YAML

services:
llm-chat:
build:
context: .
dockerfile: Dockerfile
platform: linux/amd64
privileged: true
container_name: llm-chat
restart: always
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
volumes:
- ./app:/app
- ./models:/models
- ./cache/cache:/root/.cache
- ./cache/site-packages:/usr/local/lib/python3.10/site-packages
- ./offline_packages:/offline_packages
# entrypoint: sleep 1000000 #./app/entrypoint.sh
ports:
- "8080:8080"
networks:
- apps
networks:
apps:
name: apps
driver: bridge
ipam:
driver: default
config:
- subnet: "172.100.0.0/24"
gateway: "172.100.0.1"