# This is an example configuration file # To learn more, see the full config.yaml reference: https://docs.continue.dev/reference name: ollama version: 1.0.0 schema: v1 # Define which models can be used # https://docs.continue.dev/customization/models models: - name: StarCoder2 Local provider: ollama model: starcoder2:7b modelTimeout: "5s" roles: - autocomplete autocompleteOptions: useCache: true useImports: true useRecentlyEdited: true - name: Nomic Embed Local provider: ollama model: nomic-embed-text:latest roles: - embed - name: Autodetect provider: ollama model: AUTODETECT defaultCompletionOptions: contextLength: 64000 # MCP Servers that Continue can access # https://docs.continue.dev/customization/mcp-tools mcpServers: - uses: anthropic/memory-mcp