From 919710a07059fb67a5a77e263c387b71133f8e1e Mon Sep 17 00:00:00 2001 From: Gormery Kombo Wanjiru Date: Mon, 5 Feb 2024 16:30:52 +0100 Subject: [PATCH] with gpu --- docker-compose-ollama-gpu.yaml | 35 ++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 docker-compose-ollama-gpu.yaml diff --git a/docker-compose-ollama-gpu.yaml b/docker-compose-ollama-gpu.yaml new file mode 100644 index 0000000..59b2715 --- /dev/null +++ b/docker-compose-ollama-gpu.yaml @@ -0,0 +1,35 @@ +version: '3.8' + +services: + ollama: + volumes: + - ./ollama/ollama:/root/.ollama + container_name: ollama + pull_policy: always + tty: true + restart: unless-stopped + image: ollama/ollama:latest + ports: + - 11434:11434 + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: [gpu] + + ollama-webui: + image: ghcr.io/ollama-webui/ollama-webui:main + container_name: ollama-webui + volumes: + - ./ollama/ollama-webui:/app/backend/data + depends_on: + - ollama + ports: + - 3000:8080 + environment: + - '/ollama/api=http://ollama:11434/api' + extra_hosts: + - host.docker.internal:host-gateway + restart: unless-stopped \ No newline at end of file