llm-hub/config/continue/config.yaml

55 lines
1.1 KiB
YAML

name: LLM Hub IDE
version: 1.0.0
schema: v1
models:
- name: Groq Llama 3.3 70B
provider: openai
model: fast-tier
apiBase: http://agent-core:8080/v1
apiKey: sk-agent
roles: [chat, edit, apply]
- name: Claude 3.5 Sonnet
provider: openai
model: quality-tier
apiBase: http://agent-core:8080/v1
apiKey: sk-agent
roles: [chat, edit, apply]
- name: Kimi K2
provider: openai
model: reasoning-tier
apiBase: http://agent-core:8080/v1
apiKey: sk-agent
roles: [chat, edit, apply]
- name: Mistral Small
provider: openai
model: volume-tier
apiBase: http://agent-core:8080/v1
apiKey: sk-agent
roles: [chat, edit]
tabAutocompleteModel:
name: Mistral Autocomplete
provider: openai
model: volume-tier
apiBase: http://litellm:4000/v1
apiKey: sk-agent
embeddingsProvider:
provider: openai
model: embeddings
apiBase: http://litellm:4000/v1
apiKey: sk-agent
context:
- provider: code
- provider: docs
- provider: diff
- provider: terminal
- provider: problems
- provider: folder
- provider: codebase