1
0
mirror of https://github.com/vimagick/dockerfiles.git synced 2024-11-21 17:56:53 +02:00

update ollama

This commit is contained in:
kev 2024-08-09 12:19:01 +08:00
parent 64b17a6d4b
commit 1fcf1f50ba
2 changed files with 92 additions and 0 deletions

46
ollama/README.md Normal file
View File

@ -0,0 +1,46 @@
ollama
======
[Ollama][1] is an open-source project that serves as a powerful and
user-friendly platform for running LLMs on your local machine.
```bash
$ docker compose up -d
$ docker compose exec ollama bash
>>> ollama pull tinyllama
pulling manifest
pulling 2af3b81862c6... 100% ▕████████████████▏ 637 MB
pulling af0ddbdaaa26... 100% ▕████████████████▏ 70 B
pulling c8472cd9daed... 100% ▕████████████████▏ 31 B
pulling fa956ab37b8c... 100% ▕████████████████▏ 98 B
pulling 6331358be52a... 100% ▕████████████████▏ 483 B
verifying sha256 digest
writing manifest
removing any unused layers
success
>>> exit
$ curl http://localhost:11434/api/show -d '{"model": "tinyllama"}'
$ curl -s http://localhost:11434/api/generate -d '{
"model": "tinyllama",
"prompt": "What is 1+1?",
"stream": false
}' | jq -r .response
$ curl -s http://localhost:11434/api/chat -d '{
"model": "tinyllama",
"messages": [
{
"role": "user",
"content": "why is the sky blue?"
}
],
"stream": false
}' | jq -r .message.content
```
Read API Doc: https://github.com/ollama/ollama/blob/main/docs/api.md
[1]: https://github.com/ollama/ollama

46
ollama/docker-compose.yml Normal file
View File

@ -0,0 +1,46 @@
#
# - https://hub.docker.com/r/ollama/ollama
# - https://docs.openwebui.com/getting-started/env-configuration
#
version: "3.8"
services:
ollama:
image: ollama/ollama:0.3.4
ports:
- "11434:11434"
volumes:
- ./data/ollama:/root/.ollama
tty: true
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
restart: unless-stopped
open-webui:
image: ghcr.io/open-webui/open-webui:cuda
ports:
- "8080:8080"
volumes:
- ./data/open-webui:/app/backend/data
environment:
- OLLAMA_BASE_URL=http://ollama:11434
- WEBUI_SECRET_KEY=t0p-s3cr3t
- DATABASE_URL=sqlite:///app/backend/data/webui.db
- PORT=8080
depends_on:
- ollama
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
restart: unless-stopped