|
| 1 | +import Tabs from '@theme/Tabs'; |
| 2 | +import TabItem from '@theme/TabItem'; |
| 3 | +import useBaseUrl from '@docusaurus/useBaseUrl'; |
| 4 | +import ThemedImage from '@theme/ThemedImage'; |
| 5 | + |
| 6 | +import LocalModelRecommendation from './_local-model-recommendation.md'; |
| 7 | + |
| 8 | +<Tabs groupId="cline-provider"> |
| 9 | +<TabItem value="anthropic" label="Anthropic" default> |
| 10 | + |
| 11 | +You need an [Anthropic API](https://www.anthropic.com/api) account to use this |
| 12 | +provider. |
| 13 | + |
| 14 | +In the Cline settings, choose **Anthropic** as your provider, enter your |
| 15 | +Anthropic API key, and choose your preferred model (we recommend |
| 16 | +`claude-3-5-sonnet-<latest>`). |
| 17 | + |
| 18 | +To enable CodeGate, enable **Use custom base URL** and enter |
| 19 | +`https://localhost:8989/anthropic`. |
| 20 | + |
| 21 | +<ThemedImage |
| 22 | + alt='Cline settings for Anthropic' |
| 23 | + sources={{ |
| 24 | + light: useBaseUrl('/img/how-to/cline-provider-anthropic-light.webp'), |
| 25 | + dark: useBaseUrl('/img/how-to/cline-provider-anthropic-dark.webp'), |
| 26 | + }} |
| 27 | + width={'540px'} |
| 28 | +/> |
| 29 | + |
| 30 | +</TabItem> |
| 31 | +<TabItem value="openai" label="OpenAI"> |
| 32 | + |
| 33 | +You need an [OpenAI API](https://openai.com/api/) account to use this provider. |
| 34 | +To use a different OpenAI-compatible endpoint, set the `CODEGATE_OPENAI_URL` |
| 35 | +[configuration parameter](../how-to/configure.md) when you launch CodeGate. |
| 36 | + |
| 37 | +In the Cline settings, choose **OpenAI Compatible** as your provider, enter your |
| 38 | +OpenAI API key, and set your preferred model (example: `gpt-4o-mini`). |
| 39 | + |
| 40 | +To enable CodeGate, set the **Base URL** to `https://localhost:8989/openai`. |
| 41 | + |
| 42 | +<ThemedImage |
| 43 | + alt='Cline settings for OpenAI' |
| 44 | + sources={{ |
| 45 | + light: useBaseUrl('/img/how-to/cline-provider-openai-light.webp'), |
| 46 | + dark: useBaseUrl('/img/how-to/cline-provider-openai-dark.webp'), |
| 47 | + }} |
| 48 | + width={'540px'} |
| 49 | +/> |
| 50 | + |
| 51 | +</TabItem> |
| 52 | +<TabItem value="ollama" label="Ollama"> |
| 53 | + |
| 54 | +You need Ollama installed on your local system with the server running |
| 55 | +(`ollama serve`) to use this provider. |
| 56 | + |
| 57 | +CodeGate connects to `http://host.docker.internal:11434` by default. If you |
| 58 | +changed the default Ollama server port or to connect to a remote Ollama |
| 59 | +instance, launch CodeGate with the `CODEGATE_OLLAMA_URL` environment variable |
| 60 | +set to the correct URL. See [Configure CodeGate](/how-to/configure.md). |
| 61 | + |
| 62 | +In the Cline settings, choose **Ollama** as your provider and set the **Base |
| 63 | +URL** to `http://localhost:8989/ollama`. |
| 64 | + |
| 65 | +For the **Model ID**, provide the name of a coding model you have installed |
| 66 | +locally using `ollama pull`. |
| 67 | + |
| 68 | +<LocalModelRecommendation /> |
| 69 | + |
| 70 | +<ThemedImage |
| 71 | + alt='Cline settings for Ollama' |
| 72 | + sources={{ |
| 73 | + light: useBaseUrl('/img/how-to/cline-provider-ollama-light.webp'), |
| 74 | + dark: useBaseUrl('/img/how-to/cline-provider-ollama-dark.webp'), |
| 75 | + }} |
| 76 | + width={'540px'} |
| 77 | +/> |
| 78 | + |
| 79 | +</TabItem> |
| 80 | +<TabItem value="lmstudio" label="LM Studio"> |
| 81 | + |
| 82 | +You need LM Studio installed on your local system with a server running from LM |
| 83 | +Studio's **Developer** tab to use this provider. See the |
| 84 | +[LM Studio docs](https://lmstudio.ai/docs/api/server) for more information. |
| 85 | + |
| 86 | +Cline uses large prompts, so you will likely need to increase the context length |
| 87 | +for the model you've loaded in LM Studio. In the Developer tab, select the model |
| 88 | +you'll use with CodeGate, open the **Load** tab on the right and increase the |
| 89 | +**Context Length** to _at least_ 18k (18,432) tokens, then reload the model. |
| 90 | + |
| 91 | +<ThemedImage |
| 92 | + alt='LM Studio dev server' |
| 93 | + sources={{ |
| 94 | + light: useBaseUrl('/img/how-to/lmstudio-server-light.webp'), |
| 95 | + dark: useBaseUrl('/img/how-to/lmstudio-server-dark.webp'), |
| 96 | + }} |
| 97 | + width={'800px'} |
| 98 | +/> |
| 99 | + |
| 100 | +In the Cline settings, choose LM Studio as your provider and set the **Base |
| 101 | +URL** to `http://localhost:8989/openai`. |
| 102 | + |
| 103 | +Set the **Model ID** to `lm_studio/<MODEL_NAME>`, where `<MODEL_NAME>` is the |
| 104 | +name of the model you're serving through LM Studio (shown in the Developer tab), |
| 105 | +for example `lm_studio/qwen2.5-coder-7b-instruct`. |
| 106 | + |
| 107 | +<LocalModelRecommendation /> |
| 108 | + |
| 109 | +<ThemedImage |
| 110 | + alt='Cline settings for LM Studio' |
| 111 | + sources={{ |
| 112 | + light: useBaseUrl('/img/how-to/cline-provider-lmstudio-light.webp'), |
| 113 | + dark: useBaseUrl('/img/how-to/cline-provider-lmstudio-dark.webp'), |
| 114 | + }} |
| 115 | + width={'635px'} |
| 116 | +/> |
| 117 | + |
| 118 | +</TabItem> |
| 119 | +</Tabs> |
0 commit comments