From d171fa41edf29a30200af43d01b96385c147402e Mon Sep 17 00:00:00 2001 From: Octopus Date: Mon, 23 Mar 2026 21:29:30 +0800 Subject: [PATCH] feat: add MiniMax as first-class LLM provider Add MiniMax cloud models (M2.7, M2.5, M2.5-highspeed) as a built-in provider following the established ProviderTrait pattern. Uses OpenAI- compatible chat completions API at api.minimax.io. - New MiniMaxProvider with full ProviderTrait implementation - API key management with env var support and masked display - Model pricing for M2.7, M2.5, and M2.5-highspeed - Provider registration in factory and PROVIDER_NAMES - 20 unit tests covering all provider functionality - README updated to mention MiniMax in feature list --- README.md | 6 +- refact-agent/engine/src/providers/minimax.rs | 361 ++++++++++++++++++ refact-agent/engine/src/providers/mod.rs | 1 + refact-agent/engine/src/providers/pricing.rs | 25 ++ refact-agent/engine/src/providers/registry.rs | 3 + 5 files changed, 393 insertions(+), 3 deletions(-) create mode 100644 refact-agent/engine/src/providers/minimax.rs diff --git a/README.md b/README.md index 25b47132f..606486494 100644 --- a/README.md +++ b/README.md @@ -35,8 +35,8 @@ Refact Agent works effortlessly with the tools and databases you already use: ### ⚡ Why Choose Refact Agent? - ✅ **Deploy On-Premise:** For maximum security, choose our self-hosted AI Agent version and run it on your own infrastructure. -- 🧠 **Access State-of-the-Art Models:** Use GPT-5, Claude 4.5, Gemini 3.0, DeepSeek, and more with AI Agent or for chat queries. -- 🔑 **Bring Your Own Key (BYOK):** Connect your API key and use any LLM: OpenAI, Anthropic, Google, DeepSeek, Qwen, and others. +- 🧠 **Access State-of-the-Art Models:** Use GPT-5, Claude 4.5, Gemini 3.0, DeepSeek, MiniMax, and more with AI Agent or for chat queries. +- 🔑 **Bring Your Own Key (BYOK):** Connect your API key and use any LLM: OpenAI, Anthropic, Google, DeepSeek, MiniMax, Qwen, and others. - 💬 **Integrated IDE Chat:** Integrate with GitHub, PostgreSQL, Docker, and more. Refact.ai Agent accesses your resources and handles related operations autonomously, mimicking your workflow. - ⚡ **Free, Unlimited, Context-Aware Auto-Completion:** Code faster with smart AI suggestions powered by Qwen2.5-Coder-1.5B with RAG. - 🛠️ **Supports 25+ Programming Languages:** Python, JavaScript, Java, Rust, TypeScript, PHP, C++, C#, Go, and many more! @@ -87,7 +87,7 @@ Our Ambassadors shared remarkable stories of how they transform weeks of coding ![integrations](https://lh7-rt.googleusercontent.com/docsz/AD_4nXc4DWYXF73AgPWAaFFGLTqEprWwA0im8R_A1QMo4QW4pTnSi1MCoP9L8udMZb5FPyN-CdgefaxJFGpX2ndn5nkjGBF2b_hZBNHogM7IM6SPvUIvUd9iE1lYIq7q-TB2qKzSGLk00A?key=zllGjEBckkx13bRZ6JIqX6qr) -✅ **State-of-the-Art Models** – Use GPT-5, Claude 4.5, Gemini 3.0, DeepSeek Reasoner, and more with AI Agent or for chat queries. +✅ **State-of-the-Art Models** – Use GPT-5, Claude 4.5, Gemini 3.0, DeepSeek Reasoner, MiniMax M2.7, and more with AI Agent or for chat queries. ✅ **Bring Your Own Key (BYOK)** – Use your own API keys for external LLMs. diff --git a/refact-agent/engine/src/providers/minimax.rs b/refact-agent/engine/src/providers/minimax.rs new file mode 100644 index 000000000..bdfb419c8 --- /dev/null +++ b/refact-agent/engine/src/providers/minimax.rs @@ -0,0 +1,361 @@ +use std::any::Any; +use std::collections::HashMap; + +use async_trait::async_trait; +use serde::{Deserialize, Serialize}; +use serde_json::json; + +use crate::llm::adapter::WireFormat; +use crate::providers::config::resolve_env_var; +use crate::providers::traits::{CustomModelConfig, ModelPricing, ModelSource, ProviderRuntime, ProviderTrait, parse_enabled_models, parse_custom_models, set_model_enabled_impl}; +use crate::providers::pricing::minimax_pricing; + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +pub struct MiniMaxProvider { + pub api_key: String, + pub enabled: bool, + #[serde(default)] + pub enabled_models: Vec, + #[serde(default)] + pub custom_models: HashMap, +} + +#[async_trait] +impl ProviderTrait for MiniMaxProvider { + fn name(&self) -> &'static str { + "minimax" + } + + fn display_name(&self) -> &'static str { + "MiniMax" + } + + fn as_any(&self) -> &dyn Any { + self + } + + fn as_any_mut(&mut self) -> &mut dyn Any { + self + } + + fn clone_box(&self) -> Box { + Box::new(self.clone()) + } + + fn default_wire_format(&self) -> WireFormat { + WireFormat::OpenaiChatCompletions + } + + fn model_filter_regex(&self) -> Option<&'static str> { + Some(r"(?i)^minimax-") + } + + fn provider_schema(&self) -> &'static str { + r#" +fields: + api_key: + f_type: string_long + f_desc: "MiniMax API key from platform.minimaxi.com" + f_placeholder: "eyJhbG..." + f_label: "API Key" + smartlinks: + - sl_label: "Get API Key" + sl_goto: "https://platform.minimaxi.com/user-center/basic-information/interface-key" +description: | + MiniMax cloud models (M2.7, M2.5, M2.5-highspeed with 204K context). +available: + on_your_laptop_possible: true + when_isolated_possible: true +"# + } + + fn provider_settings_apply(&mut self, yaml: serde_yaml::Value) -> Result<(), String> { + if let Some(api_key) = yaml.get("api_key").and_then(|v| v.as_str()) { + if api_key != "***" { + self.api_key = api_key.to_string(); + } + } + if let Some(enabled) = yaml.get("enabled").and_then(|v| v.as_bool()) { + self.enabled = enabled; + } + parse_enabled_models(&yaml, &mut self.enabled_models); + parse_custom_models(&yaml, &mut self.custom_models); + Ok(()) + } + + fn provider_settings_as_json(&self) -> serde_json::Value { + json!({ + "api_key": if self.api_key.is_empty() { "" } else { "***" }, + "enabled": self.enabled, + "enabled_models": self.enabled_models, + "custom_models": self.custom_models + }) + } + + fn build_runtime(&self) -> Result { + let api_key = resolve_env_var(&self.api_key, "", "minimax api_key"); + + Ok(ProviderRuntime { + name: self.name().to_string(), + display_name: self.display_name().to_string(), + enabled: self.enabled && !api_key.is_empty() && !self.enabled_models.is_empty(), + readonly: false, + wire_format: self.default_wire_format(), + chat_endpoint: "https://api.minimax.io/v1/chat/completions".to_string(), + completion_endpoint: String::new(), + embedding_endpoint: String::new(), + api_key, + auth_token: String::new(), + tokenizer_api_key: String::new(), + extra_headers: HashMap::new(), + support_metadata: false, + chat_models: Vec::new(), + completion_models: Vec::new(), + embedding_model: None, + }) + } + + fn has_credentials(&self) -> bool { + let key = resolve_env_var(&self.api_key, "", "minimax api_key"); + !key.is_empty() + } + + fn model_source(&self) -> ModelSource { + ModelSource::ModelCaps + } + + fn enabled_models(&self) -> &[String] { + &self.enabled_models + } + + fn custom_models(&self) -> &HashMap { + &self.custom_models + } + + fn set_model_enabled(&mut self, model_id: &str, enabled: bool) { + set_model_enabled_impl(&mut self.enabled_models, model_id, enabled); + } + + fn add_custom_model(&mut self, model_id: String, config: CustomModelConfig) { + self.custom_models.insert(model_id, config); + } + + fn remove_custom_model(&mut self, model_id: &str) -> bool { + self.custom_models.remove(model_id).is_some() + } + + fn model_pricing(&self, model_id: &str) -> Option { + if let Some(config) = self.custom_models.get(model_id) { + if config.pricing.is_some() { + return config.pricing.clone(); + } + } + minimax_pricing(model_id) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_provider_name() { + let provider = MiniMaxProvider::default(); + assert_eq!(provider.name(), "minimax"); + assert_eq!(provider.display_name(), "MiniMax"); + } + + #[test] + fn test_wire_format() { + let provider = MiniMaxProvider::default(); + assert!(matches!(provider.default_wire_format(), WireFormat::OpenaiChatCompletions)); + } + + #[test] + fn test_model_filter_regex() { + let provider = MiniMaxProvider::default(); + let pattern = provider.model_filter_regex().unwrap(); + let re = regex::Regex::new(pattern).unwrap(); + assert!(re.is_match("MiniMax-M2.7")); + assert!(re.is_match("MiniMax-M2.5-highspeed")); + assert!(re.is_match("minimax-m2.5")); + assert!(!re.is_match("gpt-4o")); + assert!(!re.is_match("deepseek-chat")); + } + + #[test] + fn test_settings_apply() { + let mut provider = MiniMaxProvider::default(); + let yaml: serde_yaml::Value = serde_yaml::from_str(r#" + api_key: "test-key-123" + enabled: true + enabled_models: + - "MiniMax-M2.7" + - "MiniMax-M2.5-highspeed" + "#).unwrap(); + provider.provider_settings_apply(yaml).unwrap(); + assert_eq!(provider.api_key, "test-key-123"); + assert!(provider.enabled); + assert_eq!(provider.enabled_models.len(), 2); + assert!(provider.enabled_models.contains(&"MiniMax-M2.7".to_string())); + assert!(provider.enabled_models.contains(&"MiniMax-M2.5-highspeed".to_string())); + } + + #[test] + fn test_settings_as_json_masks_api_key() { + let provider = MiniMaxProvider { + api_key: "secret-key".to_string(), + enabled: true, + enabled_models: vec!["MiniMax-M2.7".to_string()], + custom_models: HashMap::new(), + }; + let json = provider.provider_settings_as_json(); + assert_eq!(json["api_key"], "***"); + assert_eq!(json["enabled"], true); + } + + #[test] + fn test_settings_as_json_empty_key() { + let provider = MiniMaxProvider::default(); + let json = provider.provider_settings_as_json(); + assert_eq!(json["api_key"], ""); + } + + #[test] + fn test_build_runtime_endpoint() { + let provider = MiniMaxProvider { + api_key: "test-key".to_string(), + enabled: true, + enabled_models: vec!["MiniMax-M2.7".to_string()], + custom_models: HashMap::new(), + }; + let runtime = provider.build_runtime().unwrap(); + assert_eq!(runtime.chat_endpoint, "https://api.minimax.io/v1/chat/completions"); + assert!(runtime.completion_endpoint.is_empty()); + assert!(runtime.embedding_endpoint.is_empty()); + assert_eq!(runtime.name, "minimax"); + assert_eq!(runtime.display_name, "MiniMax"); + } + + #[test] + fn test_build_runtime_disabled_without_key() { + let provider = MiniMaxProvider { + api_key: String::new(), + enabled: true, + enabled_models: vec!["MiniMax-M2.7".to_string()], + custom_models: HashMap::new(), + }; + let runtime = provider.build_runtime().unwrap(); + assert!(!runtime.enabled); + } + + #[test] + fn test_build_runtime_disabled_without_models() { + let provider = MiniMaxProvider { + api_key: "test-key".to_string(), + enabled: true, + enabled_models: Vec::new(), + custom_models: HashMap::new(), + }; + let runtime = provider.build_runtime().unwrap(); + assert!(!runtime.enabled); + } + + #[test] + fn test_model_source() { + let provider = MiniMaxProvider::default(); + assert!(matches!(provider.model_source(), ModelSource::ModelCaps)); + } + + #[test] + fn test_set_model_enabled() { + let mut provider = MiniMaxProvider::default(); + provider.set_model_enabled("MiniMax-M2.7", true); + assert!(provider.enabled_models.contains(&"MiniMax-M2.7".to_string())); + provider.set_model_enabled("MiniMax-M2.7", false); + assert!(!provider.enabled_models.contains(&"MiniMax-M2.7".to_string())); + } + + #[test] + fn test_custom_model_management() { + let mut provider = MiniMaxProvider::default(); + let config = CustomModelConfig::default(); + provider.add_custom_model("custom-minimax".to_string(), config); + assert!(provider.custom_models().contains_key("custom-minimax")); + assert!(provider.remove_custom_model("custom-minimax")); + assert!(!provider.custom_models().contains_key("custom-minimax")); + assert!(!provider.remove_custom_model("nonexistent")); + } + + #[test] + fn test_pricing_m27() { + let pricing = minimax_pricing("MiniMax-M2.7").unwrap(); + assert!(pricing.prompt > 0.0); + assert!(pricing.generated > 0.0); + } + + #[test] + fn test_pricing_m25_highspeed() { + let pricing = minimax_pricing("MiniMax-M2.5-highspeed").unwrap(); + assert!(pricing.prompt > 0.0); + assert!(pricing.generated > 0.0); + } + + #[test] + fn test_pricing_m25() { + let pricing = minimax_pricing("MiniMax-M2.5").unwrap(); + assert!(pricing.prompt > 0.0); + assert!(pricing.generated > 0.0); + } + + #[test] + fn test_pricing_unknown() { + assert!(minimax_pricing("unknown-model").is_none()); + } + + #[test] + fn test_has_credentials_empty() { + let provider = MiniMaxProvider::default(); + assert!(!provider.has_credentials()); + } + + #[test] + fn test_schema_is_valid_yaml() { + let provider = MiniMaxProvider::default(); + let schema = provider.provider_schema(); + let parsed: serde_yaml::Value = serde_yaml::from_str(schema).unwrap(); + assert!(parsed.get("fields").is_some()); + assert!(parsed.get("description").is_some()); + assert!(parsed.get("available").is_some()); + } + + #[test] + fn test_clone_box() { + let provider = MiniMaxProvider { + api_key: "key".to_string(), + enabled: true, + enabled_models: vec!["MiniMax-M2.7".to_string()], + custom_models: HashMap::new(), + }; + let cloned = provider.clone_box(); + assert_eq!(cloned.name(), "minimax"); + assert_eq!(cloned.display_name(), "MiniMax"); + } + + #[test] + fn test_settings_apply_masked_key_not_overwritten() { + let mut provider = MiniMaxProvider { + api_key: "original-key".to_string(), + enabled: false, + enabled_models: Vec::new(), + custom_models: HashMap::new(), + }; + let yaml: serde_yaml::Value = serde_yaml::from_str(r#" + api_key: "***" + enabled: true + "#).unwrap(); + provider.provider_settings_apply(yaml).unwrap(); + assert_eq!(provider.api_key, "original-key"); + assert!(provider.enabled); + } +} diff --git a/refact-agent/engine/src/providers/mod.rs b/refact-agent/engine/src/providers/mod.rs index 136de0f2c..b161d2469 100644 --- a/refact-agent/engine/src/providers/mod.rs +++ b/refact-agent/engine/src/providers/mod.rs @@ -16,6 +16,7 @@ mod lmstudio; mod vllm; mod groq; mod deepseek; +mod minimax; mod xai; mod xai_responses; mod google_gemini; diff --git a/refact-agent/engine/src/providers/pricing.rs b/refact-agent/engine/src/providers/pricing.rs index 0da50fe66..0c515b833 100644 --- a/refact-agent/engine/src/providers/pricing.rs +++ b/refact-agent/engine/src/providers/pricing.rs @@ -389,3 +389,28 @@ pub fn groq_pricing(model_id: &str) -> Option { _ => None, } } + +pub fn minimax_pricing(model_id: &str) -> Option { + let id = model_id.to_lowercase(); + match id.as_str() { + s if s.contains("m2.7") || s.contains("m2-7") => Some(ModelPricing { + prompt: 1.00, + generated: 4.00, + cache_read: None, + cache_creation: None, + }), + s if s.contains("m2.5") && s.contains("highspeed") || s.contains("m2-5") && s.contains("highspeed") => Some(ModelPricing { + prompt: 0.80, + generated: 3.00, + cache_read: None, + cache_creation: None, + }), + s if s.contains("m2.5") || s.contains("m2-5") => Some(ModelPricing { + prompt: 0.80, + generated: 3.00, + cache_read: None, + cache_creation: None, + }), + _ => None, + } +} diff --git a/refact-agent/engine/src/providers/registry.rs b/refact-agent/engine/src/providers/registry.rs index 349924d90..fc19e0da3 100644 --- a/refact-agent/engine/src/providers/registry.rs +++ b/refact-agent/engine/src/providers/registry.rs @@ -13,6 +13,7 @@ use crate::providers::{ vllm::VLLMProvider, groq::GroqProvider, deepseek::DeepseekProvider, + minimax::MiniMaxProvider, xai::XAIProvider, xai_responses::XAIResponsesProvider, google_gemini::GoogleGeminiProvider, @@ -32,6 +33,7 @@ pub const PROVIDER_NAMES: &[&str] = &[ "vllm", "groq", "deepseek", + "minimax", "xai", "xai_responses", "google_gemini", @@ -52,6 +54,7 @@ pub fn create_provider(name: &str) -> Option> { "vllm" => Some(Box::new(VLLMProvider::default())), "groq" => Some(Box::new(GroqProvider::default())), "deepseek" => Some(Box::new(DeepseekProvider::default())), + "minimax" => Some(Box::new(MiniMaxProvider::default())), "xai" => Some(Box::new(XAIProvider::default())), "xai_responses" => Some(Box::new(XAIResponsesProvider::default())), "google_gemini" => Some(Box::new(GoogleGeminiProvider::default())),