diff --git a/core/llm/autodetect.ts b/core/llm/autodetect.ts index 4085d3798b..6a29b18db9 100644 --- a/core/llm/autodetect.ts +++ b/core/llm/autodetect.ts @@ -72,6 +72,7 @@ const PROVIDER_HANDLES_TEMPLATING: string[] = [ "docker", "nous", "zAI", + "avian", // TODO add these, change to inverted logic so only the ones that need templating are hardcoded // Asksage.ts // Azure.ts diff --git a/core/llm/llms/Avian.ts b/core/llm/llms/Avian.ts new file mode 100644 index 0000000000..0a9b69d06a --- /dev/null +++ b/core/llm/llms/Avian.ts @@ -0,0 +1,13 @@ +import { LLMOptions } from "../../index.js"; + +import OpenAI from "./OpenAI.js"; + +class Avian extends OpenAI { + static providerName = "avian"; + static defaultOptions: Partial = { + apiBase: "https://api.avian.io/v1", + useLegacyCompletionsEndpoint: false, + }; +} + +export default Avian; diff --git a/core/llm/llms/index.ts b/core/llm/llms/index.ts index 04f58e393d..f487f3971c 100644 --- a/core/llm/llms/index.ts +++ b/core/llm/llms/index.ts @@ -11,6 +11,7 @@ import { renderTemplatedString } from "../../util/handlebars/renderTemplatedStri import { BaseLLM } from "../index"; import Anthropic from "./Anthropic"; import Asksage from "./Asksage"; +import Avian from "./Avian"; import Azure from "./Azure"; import Bedrock from "./Bedrock"; import BedrockImport from "./BedrockImport"; @@ -128,6 +129,7 @@ export const LLMClasses = [ LlamaStack, TARS, zAI, + Avian, ]; export async function llmFromDescription( diff --git a/core/llm/toolSupport.ts b/core/llm/toolSupport.ts index 099424c61a..a18d882b23 100644 --- a/core/llm/toolSupport.ts +++ b/core/llm/toolSupport.ts @@ -363,6 +363,15 @@ export const PROVIDER_TOOL_SUPPORT: Record boolean> = const lower = model.toLowerCase(); return lower.startsWith("glm-4") || lower.startsWith("glm-5"); }, + avian: (model) => { + const lower = model.toLowerCase(); + return ( + lower.includes("deepseek") || + lower.includes("glm") || + lower.includes("kimi") || + lower.includes("minimax") + ); + }, moonshot: (model) => { // support moonshot models // https://platform.moonshot.ai/docs/pricing/chat#concepts diff --git a/docs/customize/model-providers/more/avian.mdx b/docs/customize/model-providers/more/avian.mdx new file mode 100644 index 0000000000..defae22a31 --- /dev/null +++ b/docs/customize/model-providers/more/avian.mdx @@ -0,0 +1,61 @@ +--- +title: "Avian" +description: "Configure Avian's AI models with Continue, including DeepSeek V3.2, Kimi K2.5, GLM-5, and MiniMax M2.5" +--- + +[Avian](https://avian.io/) provides an OpenAI-compatible API with access to leading AI models at competitive pricing. + +Get an API key from the [Avian dashboard](https://avian.io) + +## Configuration + + + + ```yaml title="config.yaml" + name: My Config + version: 0.0.1 + schema: v1 + + models: + - name: DeepSeek V3.2 + provider: avian + model: deepseek/deepseek-v3.2 + apiKey: + ``` + + + ```json title="config.json" + { + "models": [ + { + "title": "DeepSeek V3.2", + "provider": "avian", + "model": "deepseek/deepseek-v3.2", + "apiKey": "" + } + ] + } + ``` + + + +## Available Models + +| Model | Context Length | Input Price | Output Price | +| ----- | ------------- | ----------- | ------------ | +| `deepseek/deepseek-v3.2` | 164K | $0.26/M | $0.38/M | +| `moonshotai/kimi-k2.5` | 131K | $0.45/M | $2.20/M | +| `z-ai/glm-5` | 131K | $0.30/M | $2.55/M | +| `minimax/minimax-m2.5` | 1M | $0.30/M | $1.10/M | + +## Configuration Options + +| Option | Description | Default | +| --------- | -------------------- | ----------------------------- | +| `apiKey` | Avian API key | Required | +| `apiBase` | API base URL | `https://api.avian.io/v1` | +| `model` | Model name to use | - | + + +You can set the `AVIAN_API_KEY` environment variable instead of specifying the API key directly in the configuration file. + diff --git a/docs/docs.json b/docs/docs.json index 379837394d..f64edbee2d 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -169,6 +169,7 @@ "group": "More Providers", "pages": [ "customize/model-providers/more/asksage", + "customize/model-providers/more/avian", "customize/model-providers/more/deepseek", "customize/model-providers/more/deepinfra", "customize/model-providers/more/groq", diff --git a/gui/public/logos/avian.png b/gui/public/logos/avian.png new file mode 100644 index 0000000000..0a5fcfbc42 Binary files /dev/null and b/gui/public/logos/avian.png differ diff --git a/gui/src/pages/AddNewModel/configs/models.ts b/gui/src/pages/AddNewModel/configs/models.ts index d15db3f3d1..dabe9124fd 100644 --- a/gui/src/pages/AddNewModel/configs/models.ts +++ b/gui/src/pages/AddNewModel/configs/models.ts @@ -563,6 +563,59 @@ export const models: { [key: string]: ModelPackage } = { providerOptions: ["zAI"], isOpenSource: false, }, + avianDeepseekV32: { + title: "DeepSeek V3.2", + description: "DeepSeek V3.2 with 164K context, available through Avian", + refUrl: "https://avian.io", + params: { + title: "DeepSeek V3.2", + model: "deepseek/deepseek-v3.2", + contextLength: 164_000, + }, + icon: "avian.png", + providerOptions: ["avian"], + isOpenSource: false, + }, + avianKimiK25: { + title: "Kimi K2.5", + description: + "Moonshot AI's Kimi K2.5 with 131K context, available through Avian", + refUrl: "https://avian.io", + params: { + title: "Kimi K2.5", + model: "moonshotai/kimi-k2.5", + contextLength: 131_000, + }, + icon: "avian.png", + providerOptions: ["avian"], + isOpenSource: false, + }, + avianGlm5: { + title: "GLM-5", + description: "Z.ai's GLM-5 with 131K context, available through Avian", + refUrl: "https://avian.io", + params: { + title: "GLM-5", + model: "z-ai/glm-5", + contextLength: 131_000, + }, + icon: "avian.png", + providerOptions: ["avian"], + isOpenSource: false, + }, + avianMinimaxM25: { + title: "MiniMax M2.5", + description: "MiniMax M2.5 with 1M context window, available through Avian", + refUrl: "https://avian.io", + params: { + title: "MiniMax M2.5", + model: "minimax/minimax-m2.5", + contextLength: 1_000_000, + }, + icon: "avian.png", + providerOptions: ["avian"], + isOpenSource: false, + }, mistralOs: { title: "Mistral", description: diff --git a/gui/src/pages/AddNewModel/configs/providers.ts b/gui/src/pages/AddNewModel/configs/providers.ts index 5dfb7220b1..141aedcbd8 100644 --- a/gui/src/pages/AddNewModel/configs/providers.ts +++ b/gui/src/pages/AddNewModel/configs/providers.ts @@ -286,6 +286,32 @@ export const providers: Partial> = { ], apiKeyUrl: "https://z.ai/manage-apikey/apikey-list", }, + avian: { + title: "Avian", + provider: "avian", + description: "Access top AI models at low cost through Avian's API", + longDescription: + "Avian provides an OpenAI-compatible API with access to leading AI models including DeepSeek V3.2, Kimi K2.5, GLM-5, and MiniMax M2.5. Get your API key from the [Avian dashboard](https://avian.io).", + icon: "avian.png", + tags: [ModelProviderTags.RequiresApiKey], + packages: [ + models.avianDeepseekV32, + models.avianKimiK25, + models.avianGlm5, + models.avianMinimaxM25, + ], + collectInputFor: [ + { + inputType: "text", + key: "apiKey", + label: "API Key", + placeholder: "Enter your Avian API key", + required: true, + }, + ...completionParamsInputsConfigs, + ], + apiKeyUrl: "https://avian.io", + }, "function-network": { title: "Function Network", provider: "function-network", diff --git a/packages/llm-info/src/index.ts b/packages/llm-info/src/index.ts index 52ca9f211c..abffcb0208 100644 --- a/packages/llm-info/src/index.ts +++ b/packages/llm-info/src/index.ts @@ -1,4 +1,5 @@ import { Anthropic } from "./providers/anthropic.js"; +import { Avian } from "./providers/avian.js"; import { Azure } from "./providers/azure.js"; import { Bedrock } from "./providers/bedrock.js"; import { Cohere } from "./providers/cohere.js"; @@ -27,6 +28,7 @@ export const allModelProviders: ModelProvider[] = [ CometAPI, xAI, zAI, + Avian, ]; export const allLlms: LlmInfoWithProvider[] = allModelProviders.flatMap( diff --git a/packages/llm-info/src/providers/avian.ts b/packages/llm-info/src/providers/avian.ts new file mode 100644 index 0000000000..574fae5d43 --- /dev/null +++ b/packages/llm-info/src/providers/avian.ts @@ -0,0 +1,36 @@ +import { ModelProvider } from "../types.js"; + +export const Avian: ModelProvider = { + models: [ + { + model: "deepseek/deepseek-v3.2", + displayName: "DeepSeek V3.2", + contextLength: 164000, + recommendedFor: ["chat"], + regex: /deepseek\/deepseek-v3\.2/, + }, + { + model: "moonshotai/kimi-k2.5", + displayName: "Kimi K2.5", + contextLength: 131000, + recommendedFor: ["chat"], + regex: /moonshotai\/kimi-k2\.5/, + }, + { + model: "z-ai/glm-5", + displayName: "GLM-5", + contextLength: 131000, + recommendedFor: ["chat"], + regex: /z-ai\/glm-5/, + }, + { + model: "minimax/minimax-m2.5", + displayName: "MiniMax M2.5", + contextLength: 1000000, + recommendedFor: ["chat"], + regex: /minimax\/minimax-m2\.5/, + }, + ], + id: "avian", + displayName: "Avian", +}; diff --git a/packages/openai-adapters/src/index.ts b/packages/openai-adapters/src/index.ts index acb0c580e3..7c251c3a27 100644 --- a/packages/openai-adapters/src/index.ts +++ b/packages/openai-adapters/src/index.ts @@ -103,6 +103,8 @@ export function constructLlmApi(config: LLMConfig): BaseLlmApi | undefined { return openAICompatible("https://api.x.ai/v1/", config); case "zAI": return openAICompatible("https://api.z.ai/api/paas/v4/", config); + case "avian": + return openAICompatible("https://api.avian.io/v1/", config); case "voyage": return openAICompatible("https://api.voyageai.com/v1/", config); case "mistral": diff --git a/packages/openai-adapters/src/types.ts b/packages/openai-adapters/src/types.ts index 868a6e8dfe..e80b28185d 100644 --- a/packages/openai-adapters/src/types.ts +++ b/packages/openai-adapters/src/types.ts @@ -56,6 +56,7 @@ export const OpenAIConfigSchema = BasePlusConfig.extend({ z.literal("vllm"), z.literal("xAI"), z.literal("zAI"), + z.literal("avian"), z.literal("scaleway"), z.literal("ncompass"), z.literal("relace"),