add configuration for copilot/continue integration
This commit is contained in:
32
.continue/agents/ollama.yaml
Normal file
32
.continue/agents/ollama.yaml
Normal file
@@ -0,0 +1,32 @@
|
||||
# This is an example configuration file
|
||||
# To learn more, see the full config.yaml reference: https://docs.continue.dev/reference
|
||||
name: ollama
|
||||
version: 1.0.0
|
||||
schema: v1
|
||||
# Define which models can be used
|
||||
# https://docs.continue.dev/customization/models
|
||||
models:
|
||||
- name: StarCoder2 Local
|
||||
provider: ollama
|
||||
model: starcoder2:7b
|
||||
modelTimeout: "5s"
|
||||
roles:
|
||||
- autocomplete
|
||||
autocompleteOptions:
|
||||
useCache: true
|
||||
useImports: true
|
||||
useRecentlyEdited: true
|
||||
- name: Nomic Embed Local
|
||||
provider: ollama
|
||||
model: nomic-embed-text:latest
|
||||
roles:
|
||||
- embed
|
||||
- name: Autodetect
|
||||
provider: ollama
|
||||
model: AUTODETECT
|
||||
defaultCompletionOptions:
|
||||
contextLength: 64000
|
||||
# MCP Servers that Continue can access
|
||||
# https://docs.continue.dev/customization/mcp-tools
|
||||
mcpServers:
|
||||
- uses: anthropic/memory-mcp
|
||||
Reference in New Issue
Block a user