diff --git a/README.md b/README.md index 6a29f48..d185cb3 100644 --- a/README.md +++ b/README.md @@ -416,6 +416,7 @@ A collection of delicious docker recipes. - [x] mastodon - [x] piwigo - [x] wireguard +- [x] ghcr.io/berriai/litellm - [x] ghcr.io/ggerganov/llama.cpp - [x] lldap/lldap - [x] mailhog/mailhog diff --git a/litellm/README.md b/litellm/README.md new file mode 100644 index 0000000..8baed0b --- /dev/null +++ b/litellm/README.md @@ -0,0 +1,28 @@ +litellm +======= + +OpenAI Proxy Server (LLM Gateway) to call 100+ LLMs in a unified interface & track spend, set budgets per virtual key/user. + +```bash +$ LITELLM_KEY=sk-xxxxxx + +$ curl -H "Authorization: Bearer $LITELLM_KEY" http://127.0.0.1:4000/model/info + +$ curl http://127.0.0.1:4000/v1/chat/completions \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer $LITELLM_KEY" \ + -d '{ + "model": "claude-3.5", + "response_format": { "type": "json_object" }, + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant designed to output JSON." + }, + { + "role": "user", + "content": "Who won the world series in 2020?" + } + ] + }' +``` diff --git a/litellm/data/config.yaml b/litellm/data/config.yaml new file mode 100644 index 0000000..cbd6d33 --- /dev/null +++ b/litellm/data/config.yaml @@ -0,0 +1,20 @@ +# +# https://docs.litellm.ai/docs/proxy/configs +# + +model_list: +- model_name: gpt-4o + litellm_params: + model: openai/gpt-4o + apk_key: os.environ/OPENAI_API_KEY +- model_name: claude-3.5 + litellm_params: + model: anthropic/claude-3-5-sonnet-latest + api_key: os.environ/ANTHROPIC_API_KEY +- model_name: llama3.2 + litellm_params: + model: ollama/llama3.2 + api_base: http://localhost:11434 + +general_settings: + master_key: sk-xxxxxx diff --git a/litellm/docker-compose.yml b/litellm/docker-compose.yml new file mode 100644 index 0000000..7a49d85 --- /dev/null +++ b/litellm/docker-compose.yml @@ -0,0 +1,12 @@ +services: + litellm: + image: ghcr.io/berriai/litellm:main-stable + command: --config /app/config.yaml --detailed_debug + ports: + - "4000:4000" + volumes: + - ./data/config.yaml:/app/config.yaml + environment: + - OPENAI_API_KEY=sk-xxxxxx + - ANTHROPIC_API_KEY=sk-xxxxxx + restart: unless-stopped