From 604285355deb373cb0758414a78c1a93488d8ac2 Mon Sep 17 00:00:00 2001 From: kevin Date: Sun, 9 Mar 2025 09:09:43 +0800 Subject: [PATCH] add open-webui --- README.md | 1 + open-webui/README.md | 9 +++++++++ open-webui/docker-compose.yml | 15 +++++++++++++++ 3 files changed, 25 insertions(+) create mode 100644 open-webui/README.md create mode 100644 open-webui/docker-compose.yml diff --git a/README.md b/README.md index 0bf7e8c..f5878ea 100644 --- a/README.md +++ b/README.md @@ -474,6 +474,7 @@ A collection of delicious docker recipes. - [x] openresty/openresty - [x] opensearchproject/opensearch :bucket: - [x] kylemanna/openvpn +- [x] ghcr.io/open-webui/open-webui - [x] campbellsoftwaresolutions/osticket - [x] outlinewiki/outline - [x] gabekangas/owncast diff --git a/open-webui/README.md b/open-webui/README.md new file mode 100644 index 0000000..0645e22 --- /dev/null +++ b/open-webui/README.md @@ -0,0 +1,9 @@ +open-webui +========== + +[Open WebUI][1] is an extensible, feature-rich, and user-friendly self-hosted +AI platform designed to operate entirely offline. It supports various LLM +runners like Ollama and OpenAI-compatible APIs, with built-in inference engine +for RAG, making it a powerful AI deployment solution. + +[1]: https://github.com/open-webui/open-webui diff --git a/open-webui/docker-compose.yml b/open-webui/docker-compose.yml new file mode 100644 index 0000000..98e663f --- /dev/null +++ b/open-webui/docker-compose.yml @@ -0,0 +1,15 @@ +services: + open-webui: + image: ghcr.io/open-webui/open-webui:main + volumes: + - ./data:/app/backend/data + ports: + - "3000:8080" + environment: + - HF_HUB_OFFLINE=1 + - WEBUI_SECRET_KEY=t0p-s3cr3t + - OLLAMA_BASE_URL=http://ollama:11434 + # OPENAI_API_KEY=sk-xxxxxx + extra_hosts: + - ollama:x.x.x.x + restart: unless-stopped