diff --git a/providers/ai-enabler/models/glm-5-fp8.toml b/providers/ai-enabler/models/glm-5-fp8.toml new file mode 100644 index 000000000..48cdcd07b --- /dev/null +++ b/providers/ai-enabler/models/glm-5-fp8.toml @@ -0,0 +1,24 @@ +name = "GLM-5 FP8" +family = "glm" +release_date = "2026-02-11" +last_updated = "2026-02-11" +attachment = false +reasoning = true +temperature = true +tool_call = true +open_weights = true + +[interleaved] +field = "reasoning_content" + +[cost] +input = 1.00 +output = 3.20 + +[limit] +context = 130_000 +output = 30_000 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/ai-enabler/models/minimax-m2.5.toml b/providers/ai-enabler/models/minimax-m2.5.toml new file mode 100644 index 000000000..31a51d0d2 --- /dev/null +++ b/providers/ai-enabler/models/minimax-m2.5.toml @@ -0,0 +1,21 @@ +name = "MiniMax M2.5" +family = "minimax" +release_date = "2026-02-12" +last_updated = "2026-02-12" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = true + +[cost] +input = 0.30 +output = 1.20 + +[limit] +context = 200_000 +output = 45_760 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/ai-enabler/provider.toml b/providers/ai-enabler/provider.toml new file mode 100644 index 000000000..2f5c45761 --- /dev/null +++ b/providers/ai-enabler/provider.toml @@ -0,0 +1,5 @@ +name = "AI Enabler by Cast AI" +env = ["AI_ENABLER_API_KEY"] +npm = "@ai-sdk/openai-compatible" +api = "https://api.cast.ai/v1/spec/#/AIEnablerAPI" +doc = "https://docs.cast.ai/docs/ai-enabler-serverless-endpoints"