mirror of
https://github.com/vimagick/dockerfiles.git
synced 2025-01-02 03:37:40 +02:00
add litellm
This commit is contained in:
parent
a5b87d2324
commit
390955387f
@ -416,6 +416,7 @@ A collection of delicious docker recipes.
|
||||
- [x] mastodon
|
||||
- [x] piwigo
|
||||
- [x] wireguard
|
||||
- [x] ghcr.io/berriai/litellm
|
||||
- [x] ghcr.io/ggerganov/llama.cpp
|
||||
- [x] lldap/lldap
|
||||
- [x] mailhog/mailhog
|
||||
|
28
litellm/README.md
Normal file
28
litellm/README.md
Normal file
@ -0,0 +1,28 @@
|
||||
litellm
|
||||
=======
|
||||
|
||||
OpenAI Proxy Server (LLM Gateway) to call 100+ LLMs in a unified interface & track spend, set budgets per virtual key/user.
|
||||
|
||||
```bash
|
||||
$ LITELLM_KEY=sk-xxxxxx
|
||||
|
||||
$ curl -H "Authorization: Bearer $LITELLM_KEY" http://127.0.0.1:4000/model/info
|
||||
|
||||
$ curl http://127.0.0.1:4000/v1/chat/completions \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: Bearer $LITELLM_KEY" \
|
||||
-d '{
|
||||
"model": "claude-3.5",
|
||||
"response_format": { "type": "json_object" },
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "You are a helpful assistant designed to output JSON."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Who won the world series in 2020?"
|
||||
}
|
||||
]
|
||||
}'
|
||||
```
|
20
litellm/data/config.yaml
Normal file
20
litellm/data/config.yaml
Normal file
@ -0,0 +1,20 @@
|
||||
#
|
||||
# https://docs.litellm.ai/docs/proxy/configs
|
||||
#
|
||||
|
||||
model_list:
|
||||
- model_name: gpt-4o
|
||||
litellm_params:
|
||||
model: openai/gpt-4o
|
||||
apk_key: os.environ/OPENAI_API_KEY
|
||||
- model_name: claude-3.5
|
||||
litellm_params:
|
||||
model: anthropic/claude-3-5-sonnet-latest
|
||||
api_key: os.environ/ANTHROPIC_API_KEY
|
||||
- model_name: llama3.2
|
||||
litellm_params:
|
||||
model: ollama/llama3.2
|
||||
api_base: http://localhost:11434
|
||||
|
||||
general_settings:
|
||||
master_key: sk-xxxxxx
|
12
litellm/docker-compose.yml
Normal file
12
litellm/docker-compose.yml
Normal file
@ -0,0 +1,12 @@
|
||||
services:
|
||||
litellm:
|
||||
image: ghcr.io/berriai/litellm:main-stable
|
||||
command: --config /app/config.yaml --detailed_debug
|
||||
ports:
|
||||
- "4000:4000"
|
||||
volumes:
|
||||
- ./data/config.yaml:/app/config.yaml
|
||||
environment:
|
||||
- OPENAI_API_KEY=sk-xxxxxx
|
||||
- ANTHROPIC_API_KEY=sk-xxxxxx
|
||||
restart: unless-stopped
|
Loading…
Reference in New Issue
Block a user