Files
llm/docker-compose.yml
2025-08-03 10:46:06 +00:00

33 lines
664 B
YAML

services:
llm-chat:
build:
context: .
dockerfile: Dockerfile
platform: linux/amd64
privileged: true
container_name: llm-chat
restart: always
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
volumes:
- ./app:/app
- ./models:/models
entrypoint: sleep 1000000 #./app/entrypoint.sh
ports:
- "8080:8080"
networks:
- apps
networks:
apps:
name: apps
driver: bridge
ipam:
driver: default
config:
- subnet: "172.100.0.0/24"
gateway: "172.100.0.1"