Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions Agent/AgentViewModel/Core/AgentViewModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -292,6 +292,19 @@ final class AgentViewModel {
var qwenModels: [OpenAIModelInfo] = []
var isFetchingQwenModels = false

// MARK: - MiniMax

var miniMaxAPIKey: String = KeychainService.shared.getMiniMaxAPIKey() ?? "" {
didSet { KeychainService.shared.setMiniMaxAPIKey(miniMaxAPIKey) }
}

var miniMaxModel: String = UserDefaults.standard.string(forKey: "miniMaxModel") ?? "MiniMax-M2.7" {
didSet { UserDefaults.standard.set(miniMaxModel, forKey: "miniMaxModel") }
}

var miniMaxModels: [OpenAIModelInfo] = Self.defaultMiniMaxModels
var isFetchingMiniMaxModels = false

// MARK: - Google Gemini

var geminiAPIKey: String = KeychainService.shared.getGeminiAPIKey() ?? "" {
Expand Down Expand Up @@ -420,6 +433,9 @@ final class AgentViewModel {
var grokTemperature: Double = UserDefaults.standard.object(forKey: "grokTemperature") as? Double ?? 0.2 {
didSet { UserDefaults.standard.set(grokTemperature, forKey: "grokTemperature") }
}
var miniMaxTemperature: Double = UserDefaults.standard.object(forKey: "miniMaxTemperature") as? Double ?? 1.0 {
didSet { UserDefaults.standard.set(miniMaxTemperature, forKey: "miniMaxTemperature") }
}

/// Max output tokens per provider. 0 = let provider decide (omit from request).
/// Claude API requires max_tokens so 0 defaults to 16384 at the service level.
Expand Down
1 change: 1 addition & 0 deletions Agent/AgentViewModel/Core/Colors.swift
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ extension AgentViewModel {
case .lmStudio: return lmStudioTemperature
case .zAI: return zAITemperature
case .bigModel: return zAITemperature
case .miniMax: return miniMaxTemperature
case .qwen: return openAITemperature
case .gemini: return geminiTemperature
case .grok: return grokTemperature
Expand Down
7 changes: 7 additions & 0 deletions Agent/AgentViewModel/Features/DefaultModels.swift
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,13 @@ extension AgentViewModel {
OpenAIModelInfo(id: "mistralai/Mistral-Small-24B-Instruct-2501", name: "Mistral Small 24B"),
]

// MARK: - MiniMax

nonisolated static let defaultMiniMaxModels: [OpenAIModelInfo] = [
OpenAIModelInfo(id: "MiniMax-M2.7", name: "MiniMax-M2.7"),
OpenAIModelInfo(id: "MiniMax-M2.7-highspeed", name: "MiniMax-M2.7-highspeed"),
]

// MARK: - Ollama (Cloud)

nonisolated static let defaultOllamaModels: [OllamaModelInfo] = [
Expand Down
26 changes: 26 additions & 0 deletions Agent/AgentViewModel/Features/ModelFetching.swift
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,31 @@ extension AgentViewModel {
}
}

func fetchMiniMaxModels() {
guard !miniMaxAPIKey.isEmpty else {
miniMaxModels = Self.defaultMiniMaxModels
return
}
isFetchingMiniMaxModels = true
Task {
defer { isFetchingMiniMaxModels = false }
do {
let models = try await Self.fetchOpenAICompatibleModels(
baseURL: "https://api.minimax.io/v1",
apiKey: miniMaxAPIKey
)
miniMaxModels = models.isEmpty ? Self.defaultMiniMaxModels : models
let ids = miniMaxModels.map(\.id)
if miniMaxModel.isEmpty || (!ids.isEmpty && !ids.contains(miniMaxModel)) {
miniMaxModel = ids.first ?? "MiniMax-M2.7"
}
} catch {
appendLog("Failed to fetch MiniMax models: \(error.localizedDescription)")
miniMaxModels = Self.defaultMiniMaxModels
}
}
}

// MARK: - Static API Fetch Helpers

private nonisolated static func fetchOpenAIModelsFromAPI(apiKey: String) async throws -> [OpenAIModelInfo] {
Expand Down Expand Up @@ -775,6 +800,7 @@ extension AgentViewModel {
case .mistral: if force || mistralModels.isEmpty { fetchMistralModels() }
case .codestral: if force || codestralModels.isEmpty { fetchCodestralModels() }
case .vibe: if force || vibeModels.isEmpty { fetchVibeModels() }
case .miniMax: if force || miniMaxModels.isEmpty { fetchMiniMaxModels() }
case .bigModel: break
default: break
}
Expand Down
5 changes: 5 additions & 0 deletions Agent/AgentViewModel/Features/ScriptTabs.swift
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ extension AgentViewModel {
case .lmStudio: return lmStudioModel
case .zAI: return zAIModel.replacingOccurrences(of: ":v", with: "")
case .bigModel: return bigModelModel.replacingOccurrences(of: ":v", with: "")
case .miniMax: return miniMaxModel
case .qwen: return qwenModel
case .gemini: return geminiModel
case .grok: return grokModel
Expand All @@ -95,6 +96,7 @@ extension AgentViewModel {
case .lmStudio: return lmStudioAPIKey
case .zAI: return zAIAPIKey
case .bigModel: return bigModelAPIKey
case .miniMax: return miniMaxAPIKey
case .qwen: return qwenAPIKey
case .gemini: return geminiAPIKey
case .grok: return grokAPIKey
Expand Down Expand Up @@ -141,6 +143,9 @@ extension AgentViewModel {
?? Self.defaultZAIModels.first(where: { $0.id == modelId })?.name ?? modelId
case .bigModel:
return modelId
case .miniMax:
return miniMaxModels.first(where: { $0.id == modelId })?.name
?? Self.defaultMiniMaxModels.first(where: { $0.id == modelId })?.name ?? modelId
case .qwen:
return modelId
case .gemini:
Expand Down
3 changes: 3 additions & 0 deletions Agent/AgentViewModel/TaskExecution/Setup.swift
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,9 @@ extension AgentViewModel {
case .bigModel:
isVision = bigModelModel.hasSuffix(":v")
modelName = bigModelModel.replacingOccurrences(of: ":v", with: "")
case .miniMax:
modelName = miniMaxModel
isVision = false
case .qwen:
modelName = qwenModel
isVision = Self.isVisionModel(qwenModel)
Expand Down
4 changes: 4 additions & 0 deletions Agent/Services/KeychainService.swift
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,10 @@ final class KeychainService: Sendable {
func setQwenAPIKey(_ key: String) { set(key: Self.qwenAPIKeyId, value: key) }
func getQwenAPIKey() -> String? { get(key: Self.qwenAPIKeyId) }

private static let miniMaxAPIKey = "com.agent.minimax-api-key"
func setMiniMaxAPIKey(_ key: String) { set(key: Self.miniMaxAPIKey, value: key) }
func getMiniMaxAPIKey() -> String? { get(key: Self.miniMaxAPIKey) }

private func set(key: String, value: String) {
guard let data = value.data(using: .utf8) else { return }
delete(key: key)
Expand Down
13 changes: 12 additions & 1 deletion Agent/Services/LLMProviderSetup.swift
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ enum LLMProviderSetup {

static func registerAllProviders() {
LLMRegistry.shared.registerAll([
claude, openAI, gemini, grok, mistral, codestral, vibe, deepSeek, huggingFace, zAI, bigModel, qwen,
claude, openAI, gemini, grok, mistral, codestral, vibe, deepSeek, huggingFace, miniMax, zAI, bigModel, qwen,
ollama, localOllama, vLLM, lmStudio, appleIntelligence
])
}
Expand Down Expand Up @@ -57,6 +57,17 @@ enum LLMProviderSetup {
capabilities: [.streaming, .tools, .systemPrompt]
)

static let miniMax = LLMProviderConfig(
id: "miniMax", displayName: "MiniMax",
kind: .cloudAPI, apiProtocol: .openAI,
endpoint: LLMEndpoint(
chatURL: "https://api.minimax.io/v1/chat/completions",
modelsURL: "https://api.minimax.io/v1/models"
),
capabilities: [.streaming, .tools, .systemPrompt],
temperature: 1.0
)

static let zAI = LLMProviderConfig(
id: "zAI", displayName: "Z.ai",
kind: .cloudAPI, apiProtocol: .openAI,
Expand Down
1 change: 1 addition & 0 deletions Agent/Views/Output/ThinkingIndicatorView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ struct ThinkingIndicatorView: View {
case .grok: return 2_000_000
case .zAI: return 128_000
case .bigModel: return 128_000
case .miniMax: return 1_000_000
case .qwen: return 131_072
case .mistral: return 256_000
case .codestral: return 256_000
Expand Down
1 change: 1 addition & 0 deletions Agent/Views/Settings/FallbackChainView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,7 @@ struct FallbackChainView: View {
case .codestral: if !viewModel.codestralModel.isEmpty { return viewModel.codestralModel }
case .vibe: if !viewModel.vibeModel.isEmpty { return viewModel.vibeModel }
case .bigModel: if !viewModel.bigModelModel.isEmpty { return viewModel.bigModelModel }
case .miniMax: if !viewModel.miniMaxModel.isEmpty { return viewModel.miniMaxModel }
case .foundationModel: return "Apple Intelligence"
}
// Fall back to the first dynamically-fetched model for this provider
Expand Down
43 changes: 43 additions & 0 deletions Agent/Views/Settings/SettingsView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ struct SettingsView: View {
case .lmStudio: return $viewModel.lmStudioTemperature
case .zAI: return $viewModel.zAITemperature
case .bigModel: return $viewModel.zAITemperature
case .miniMax: return $viewModel.miniMaxTemperature
case .qwen: return $viewModel.openAITemperature
case .gemini: return $viewModel.geminiTemperature
case .grok: return $viewModel.grokTemperature
Expand Down Expand Up @@ -275,6 +276,48 @@ struct SettingsView: View {
.textFieldStyle(.roundedBorder)
}
}
} else if viewModel.selectedProvider == .miniMax {
VStack(alignment: .leading, spacing: 10) {
Text("MiniMax API")
.font(.headline)

VStack(alignment: .leading, spacing: 4) {
Text("API Key").font(.caption).foregroundStyle(.secondary)
LockedSecureField(text: $viewModel.miniMaxAPIKey, placeholder: "MiniMax API key", lockKey: "lock.miniMaxAPIKey")
}

VStack(alignment: .leading, spacing: 4) {
Text("Model").font(.caption).foregroundStyle(.secondary)
HStack {
if viewModel.miniMaxModels.isEmpty {
TextField("Model name", text: $viewModel.miniMaxModel)
.textFieldStyle(.roundedBorder)
} else {
Picker("Model", selection: $viewModel.miniMaxModel) {
ForEach(viewModel.miniMaxModels) { model in
Text(model.name).tag(model.id)
}
}
.labelsHidden()
}

Button {
viewModel.fetchMiniMaxModels()
} label: {
if viewModel.isFetchingMiniMaxModels {
ProgressView()
.controlSize(.small)
} else {
Image(systemName: "arrow.clockwise")
}
}
.buttonStyle(.bordered)
.controlSize(.small)
.disabled(viewModel.isFetchingMiniMaxModels)
.help("Fetch available models")
}
}
}
} else if viewModel.selectedProvider == .qwen {
VStack(alignment: .leading, spacing: 10) {
Text("Qwen (DashScope)")
Expand Down
9 changes: 9 additions & 0 deletions Agent/Views/Tabs/NewMainTabSheet.swift
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,14 @@ struct NewMainTabSheet: View {
TextField("Model (e.g. glm-4.7)", text: $selectedModelId)
.textFieldStyle(.roundedBorder)

case .miniMax:
modelPickerWithFetch(
models: viewModel.miniMaxModels,
fallbackBinding: $selectedModelId,
isFetching: viewModel.isFetchingMiniMaxModels,
fetch: { viewModel.fetchModelsIfNeeded(for: .miniMax, force: true) }
)

case .qwen:
TextField("Model (e.g. qwen-plus)", text: $selectedModelId)
.textFieldStyle(.roundedBorder)
Expand Down Expand Up @@ -286,6 +294,7 @@ struct NewMainTabSheet: View {
case .lmStudio: return viewModel.lmStudioModel
case .zAI: return viewModel.zAIModel
case .bigModel: return "glm-4.7"
case .miniMax: return viewModel.miniMaxModel.isEmpty ? "MiniMax-M2.7" : viewModel.miniMaxModel
case .qwen: return "qwen-plus"
case .gemini: return viewModel.geminiModel
case .grok: return viewModel.grokModel
Expand Down