My AI local config with docker compose Ollama with OpenWebUI

From PedrosBrainDump
Revision as of 23:11, 13 October 2024 by 413vhcu1lq0463ob (talk | contribs) (Created page with " services: ollama: volumes: - /docker/ollama-openwebui/data/ollama:/root/.ollama container_name: ollama pull_policy: always tty: true restart: unless-stopped image: ollama/ollama:${OLLAMA_DOCKER_TAG-latest} open-webui: build: context: . args: OLLAMA_BASE_URL: '/ollama' dockerfile: Dockerfile image: ghcr.io/open-webui/open-webui:${WEBUI_DOCKER_TAG-main} container_name: open-webui...")
(diff) ← Older revision | Latest revision (diff) | Newer revision → (diff)
services:
  ollama:
    volumes:
      - /docker/ollama-openwebui/data/ollama:/root/.ollama
    container_name: ollama
    pull_policy: always
    tty: true
    restart: unless-stopped
    image: ollama/ollama:${OLLAMA_DOCKER_TAG-latest}

  open-webui:
    build:
      context: .
      args:
        OLLAMA_BASE_URL: '/ollama'
      dockerfile: Dockerfile
    image: ghcr.io/open-webui/open-webui:${WEBUI_DOCKER_TAG-main}
    container_name: open-webui
    volumes:
      - /docker/ollama-openwebui/data/open-webui:/app/backend/data
    depends_on:
      - ollama
    ports:
      - ${OPEN_WEBUI_PORT-3000}:8080
    environment:
      - 'OLLAMA_BASE_URL=http://ollama:11434'
      - 'WEBUI_SECRET_KEY='
    extra_hosts:
      - host.docker.internal:host-gateway
    restart: unless-stopped