AI
ByteBot
LiteLLM Integration
$ cd bytebot
$ nano packages/bytebot-llm-proxy/litellm-config.yaml
Edit config as following:
model_list:
- model_name: qwen2.5-vl-7b-llmstudio
litellm_params:
model: openai/qwen2.5-vl-7b
api_base: http://xxxxxxxx:1234/v1
support_function_calling: true
drop_params: true
- model_name: qwen3-8b-ollama
litellm_params:
model: ollama/qwen3:8b
support_function_calling: true
api_base: http://xxxxxxxx:11434
drop_params: true
$ docker-compose -f docker/docker-compose.proxy.yml up -d --build