Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ GOOGLE_CLOUD_LOCATION=global
GOOGLE_CLOUD_PROJECT=$(op read "op://RubyLLM/Google Cloud/project")
GPUSTACK_API_BASE=http://localhost:11444/v1
GPUSTACK_API_KEY=$(op read "op://RubyLLM/GPUStack/credential")
MINIMAX_API_KEY=$(op read "op://RubyLLM/MiniMax/credential")
MISTRAL_API_KEY=$(op read "op://RubyLLM/Mistral/credential")
OLLAMA_API_BASE=http://localhost:11434/v1
OPENAI_API_KEY=$(op read "op://RubyLLM/OpenAI/credential")
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ response = chat.with_schema(ProductSchema).ask "Analyze this product", with: "pr
* **Async:** Fiber-based concurrency
* **Model registry:** 800+ models with capability detection and pricing
* **Extended thinking:** Control, view, and persist model deliberation
* **Providers:** OpenAI, xAI, Anthropic, Gemini, VertexAI, Bedrock, DeepSeek, Mistral, Ollama, OpenRouter, Perplexity, GPUStack, and any OpenAI-compatible API
* **Providers:** OpenAI, xAI, Anthropic, Gemini, VertexAI, Bedrock, DeepSeek, MiniMax, Mistral, Ollama, OpenRouter, Perplexity, GPUStack, and any OpenAI-compatible API

## Installation

Expand Down
8 changes: 8 additions & 0 deletions docs/_getting_started/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,10 @@ RubyLLM.configure do |config|
config.gpustack_api_base = ENV['GPUSTACK_API_BASE']
config.gpustack_api_key = ENV['GPUSTACK_API_KEY']

# MiniMax
config.minimax_api_key = ENV['MINIMAX_API_KEY']
config.minimax_api_base = ENV['MINIMAX_API_BASE'] # Optional custom MiniMax endpoint (defaults to https://api.minimax.io/v1)

# Mistral
config.mistral_api_key = ENV['MISTRAL_API_KEY']

Expand Down Expand Up @@ -477,6 +481,10 @@ RubyLLM.configure do |config|
config.gpustack_api_base = String
config.gpustack_api_key = String

# MiniMax
config.minimax_api_key = String
config.minimax_api_base = String

# Mistral
config.mistral_api_key = String

Expand Down
2 changes: 2 additions & 0 deletions lib/ruby_llm.rb
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
'deepseek' => 'DeepSeek',
'gpustack' => 'GPUStack',
'llm' => 'LLM',
'minimax' => 'MiniMax',
'mistral' => 'Mistral',
'openai' => 'OpenAI',
'openrouter' => 'OpenRouter',
Expand Down Expand Up @@ -99,6 +100,7 @@ def logger
RubyLLM::Provider.register :deepseek, RubyLLM::Providers::DeepSeek
RubyLLM::Provider.register :gemini, RubyLLM::Providers::Gemini
RubyLLM::Provider.register :gpustack, RubyLLM::Providers::GPUStack
RubyLLM::Provider.register :minimax, RubyLLM::Providers::MiniMax
RubyLLM::Provider.register :mistral, RubyLLM::Providers::Mistral
RubyLLM::Provider.register :ollama, RubyLLM::Providers::Ollama
RubyLLM::Provider.register :openai, RubyLLM::Providers::OpenAI
Expand Down
41 changes: 41 additions & 0 deletions lib/ruby_llm/providers/minimax.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# frozen_string_literal: true

module RubyLLM
module Providers
# MiniMax API integration.
# MiniMax provides an OpenAI-compatible chat completions API at https://api.minimax.io/v1
class MiniMax < OpenAI
include MiniMax::Chat
include MiniMax::Models

def api_base
@config.minimax_api_base || 'https://api.minimax.io/v1'
end

def headers
{
'Authorization' => "Bearer #{@config.minimax_api_key}",
'Content-Type' => 'application/json'
}
end

def maybe_normalize_temperature(temperature, _model)
MiniMax::Temperature.normalize(temperature)
end

class << self
def capabilities
MiniMax::Capabilities
end

def configuration_options
%i[minimax_api_key minimax_api_base]
end

def configuration_requirements
%i[minimax_api_key]
end
end
end
end
end
129 changes: 129 additions & 0 deletions lib/ruby_llm/providers/minimax/capabilities.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
# frozen_string_literal: true

module RubyLLM
module Providers
class MiniMax
# Determines capabilities and pricing for MiniMax models
module Capabilities
module_function

def context_window_for(model_id)
case model_id
when /M2\.7/ then 1_000_000
when /M2\.5/ then 204_000
else 204_000
end
end

def max_tokens_for(model_id)
case model_id
when /M2\.7/ then 16_384
when /M2\.5/ then 16_384
else 8_192
end
end

def input_price_for(model_id)
PRICES.dig(model_family(model_id), :input) || default_input_price
end

def output_price_for(model_id)
PRICES.dig(model_family(model_id), :output) || default_output_price
end

def supports_vision?(_model_id)
false
end

def supports_functions?(model_id)
model_id.match?(/M2\.[57]/)
end

def supports_tool_choice?(_model_id)
true
end

def supports_tool_parallel_control?(_model_id)
false
end

def supports_json_mode?(model_id)
model_id.match?(/M2\.[57]/)
end

def format_display_name(model_id)
model_id
end

def model_type(_model_id)
'chat'
end

def model_family(model_id)
case model_id
when /M2\.7-highspeed/ then :m2_7_highspeed
when /M2\.7/ then :m2_7
when /M2\.5-highspeed/ then :m2_5_highspeed
when /M2\.5/ then :m2_5
else :default
end
end

PRICES = {
m2_7: {
input: 0.10,
output: 0.10
},
m2_7_highspeed: {
input: 0.07,
output: 0.07
},
m2_5: {
input: 0.10,
output: 0.10
},
m2_5_highspeed: {
input: 0.07,
output: 0.07
}
}.freeze

def default_input_price
0.10
end

def default_output_price
0.10
end

def modalities_for(_model_id)
{
input: ['text'],
output: ['text']
}
end

def capabilities_for(model_id)
capabilities = ['streaming']
capabilities << 'function_calling' if supports_functions?(model_id)
capabilities << 'json_mode' if supports_json_mode?(model_id)
capabilities
end

def pricing_for(model_id)
family = model_family(model_id)
prices = PRICES.fetch(family, { input: default_input_price, output: default_output_price })

{
text_tokens: {
standard: {
input_per_million: prices[:input],
output_per_million: prices[:output]
}
}
}
end
end
end
end
end
16 changes: 16 additions & 0 deletions lib/ruby_llm/providers/minimax/chat.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# frozen_string_literal: true

module RubyLLM
module Providers
class MiniMax
# Chat methods of the MiniMax API integration
module Chat
module_function

def format_role(role)
role.to_s
end
end
end
end
end
49 changes: 49 additions & 0 deletions lib/ruby_llm/providers/minimax/models.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# frozen_string_literal: true

module RubyLLM
module Providers
class MiniMax
# Models methods of the MiniMax API integration.
# MiniMax does not provide a /v1/models endpoint,
# so models are statically defined.
module Models
def list_models(**)
slug = 'minimax'
capabilities = MiniMax::Capabilities
parse_list_models_response(nil, slug, capabilities)
end

def parse_list_models_response(_response, slug, capabilities)
model_ids.map do |model_id|
create_model_info(model_id, slug, capabilities)
end
end

def model_ids
%w[
MiniMax-M2.7
MiniMax-M2.7-highspeed
MiniMax-M2.5
MiniMax-M2.5-highspeed
]
end

def create_model_info(model_id, slug, capabilities)
Model::Info.new(
id: model_id,
name: capabilities.format_display_name(model_id),
provider: slug,
family: capabilities.model_family(model_id).to_s,
created_at: Time.now,
context_window: capabilities.context_window_for(model_id),
max_output_tokens: capabilities.max_tokens_for(model_id),
modalities: capabilities.modalities_for(model_id),
capabilities: capabilities.capabilities_for(model_id),
pricing: capabilities.pricing_for(model_id),
metadata: {}
)
end
end
end
end
end
23 changes: 23 additions & 0 deletions lib/ruby_llm/providers/minimax/temperature.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# frozen_string_literal: true

module RubyLLM
module Providers
class MiniMax
# Normalizes temperature for MiniMax models.
# MiniMax accepts temperature in the range [0.0, 1.0].
module Temperature
module_function

def normalize(temperature)
return temperature if temperature.nil?

clamped = temperature.to_f.clamp(0.0, 1.0)
return clamped if (clamped - temperature.to_f).abs <= Float::EPSILON

RubyLLM.logger.debug { "MiniMax requires temperature in [0.0, 1.0], clamping #{temperature} to #{clamped}" }
clamped
end
end
end
end
end
Loading
Loading