diff --git a/.env.template b/.env.template
index 96dedddc8aa..1f52d425052 100644
--- a/.env.template
+++ b/.env.template
@@ -76,6 +76,13 @@ ANTHROPIC_URL=
### (optional)
WHITE_WEBDAV_ENDPOINTS=
+### MiniMax Api key (optional)
+MINIMAX_API_KEY=
+
+### MiniMax Api url (optional)
+# Default: https://api.minimax.io
+MINIMAX_URL=
+
### siliconflow Api key (optional)
SILICONFLOW_API_KEY=
diff --git a/README.md b/README.md
index 01a223d3b63..25bfe3ae29d 100644
--- a/README.md
+++ b/README.md
@@ -10,7 +10,7 @@ English / [简体中文](./README_CN.md)
-✨ Light and Fast AI Assistant,with Claude, DeepSeek, GPT4 & Gemini Pro support.
+✨ Light and Fast AI Assistant,with Claude, DeepSeek, GPT4, Gemini Pro & MiniMax support.
[![Saas][Saas-image]][saas-url]
[![Web][Web-image]][web-url]
@@ -282,6 +282,14 @@ DeepSeek Api Key.
DeepSeek Api Url.
+### `MINIMAX_API_KEY` (optional)
+
+MiniMax Api Key. Available at [MiniMax Platform](https://platform.minimaxi.com/).
+
+### `MINIMAX_URL` (optional)
+
+MiniMax Api Url.
+
### `HIDE_USER_API_KEY` (optional)
> Default: Empty
@@ -367,6 +375,14 @@ SiliconFlow API Key.
SiliconFlow API URL.
+### `MINIMAX_API_KEY` (optional)
+
+MiniMax API Key, available from [MiniMax Platform](https://platform.minimaxi.com/).
+
+### `MINIMAX_URL` (optional)
+
+MiniMax API URL.
+
### `AI302_API_KEY` (optional)
302.AI API Key.
diff --git a/README_CN.md b/README_CN.md
index f4c441ad006..6ed085fa63d 100644
--- a/README_CN.md
+++ b/README_CN.md
@@ -209,6 +209,14 @@ DeepSeek Api Key.
DeepSeek Api Url.
+### `MINIMAX_API_KEY` (可选)
+
+MiniMax Api Key,可在 [MiniMax 开放平台](https://platform.minimaxi.com/) 获取。
+
+### `MINIMAX_URL` (可选)
+
+MiniMax Api Url。
+
### `HIDE_USER_API_KEY` (可选)
如果你不想让用户自行填入 API Key,将此环境变量设置为 1 即可。
diff --git a/app/api/[provider]/[...path]/route.ts b/app/api/[provider]/[...path]/route.ts
index e8af34f29f8..642127684f0 100644
--- a/app/api/[provider]/[...path]/route.ts
+++ b/app/api/[provider]/[...path]/route.ts
@@ -15,6 +15,7 @@ import { handle as siliconflowHandler } from "../../siliconflow";
import { handle as xaiHandler } from "../../xai";
import { handle as chatglmHandler } from "../../glm";
import { handle as proxyHandler } from "../../proxy";
+import { handle as minimaxHandler } from "../../minimax";
import { handle as ai302Handler } from "../../302ai";
async function handle(
@@ -51,6 +52,8 @@ async function handle(
return chatglmHandler(req, { params });
case ApiPath.SiliconFlow:
return siliconflowHandler(req, { params });
+ case ApiPath.MiniMax:
+ return minimaxHandler(req, { params });
case ApiPath.OpenAI:
return openaiHandler(req, { params });
case ApiPath["302.AI"]:
diff --git a/app/api/auth.ts b/app/api/auth.ts
index 8c78c70c865..145e3344fb8 100644
--- a/app/api/auth.ts
+++ b/app/api/auth.ts
@@ -104,6 +104,9 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) {
case ModelProvider.SiliconFlow:
systemApiKey = serverConfig.siliconFlowApiKey;
break;
+ case ModelProvider.MiniMax:
+ systemApiKey = serverConfig.minimaxApiKey;
+ break;
case ModelProvider.GPT:
default:
if (req.nextUrl.pathname.includes("azure/deployments")) {
diff --git a/app/api/minimax.ts b/app/api/minimax.ts
new file mode 100644
index 00000000000..e9a3bb452eb
--- /dev/null
+++ b/app/api/minimax.ts
@@ -0,0 +1,127 @@
+import { getServerSideConfig } from "@/app/config/server";
+import {
+ MINIMAX_BASE_URL,
+ ApiPath,
+ ModelProvider,
+ ServiceProvider,
+} from "@/app/constant";
+import { prettyObject } from "@/app/utils/format";
+import { NextRequest, NextResponse } from "next/server";
+import { auth } from "@/app/api/auth";
+import { isModelNotavailableInServer } from "@/app/utils/model";
+
+const serverConfig = getServerSideConfig();
+
+export async function handle(
+ req: NextRequest,
+ { params }: { params: { path: string[] } },
+) {
+ console.log("[MiniMax Route] params ", params);
+
+ if (req.method === "OPTIONS") {
+ return NextResponse.json({ body: "OK" }, { status: 200 });
+ }
+
+ const authResult = auth(req, ModelProvider.MiniMax);
+ if (authResult.error) {
+ return NextResponse.json(authResult, {
+ status: 401,
+ });
+ }
+
+ try {
+ const response = await request(req);
+ return response;
+ } catch (e) {
+ console.error("[MiniMax] ", e);
+ return NextResponse.json(prettyObject(e));
+ }
+}
+
+async function request(req: NextRequest) {
+ const controller = new AbortController();
+
+ let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.MiniMax, "");
+
+ let baseUrl = serverConfig.minimaxUrl || MINIMAX_BASE_URL;
+
+ if (!baseUrl.startsWith("http")) {
+ baseUrl = `https://${baseUrl}`;
+ }
+
+ if (baseUrl.endsWith("/")) {
+ baseUrl = baseUrl.slice(0, -1);
+ }
+
+ console.log("[Proxy] ", path);
+ console.log("[Base Url]", baseUrl);
+
+ const timeoutId = setTimeout(
+ () => {
+ controller.abort();
+ },
+ 10 * 60 * 1000,
+ );
+
+ const fetchUrl = `${baseUrl}${path}`;
+ const fetchOptions: RequestInit = {
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: req.headers.get("Authorization") ?? "",
+ },
+ method: req.method,
+ body: req.body,
+ redirect: "manual",
+ // @ts-ignore
+ duplex: "half",
+ signal: controller.signal,
+ };
+
+ // #1815 try to refuse some request to some models
+ if (serverConfig.customModels && req.body) {
+ try {
+ const clonedBody = await req.text();
+ fetchOptions.body = clonedBody;
+
+ const jsonBody = JSON.parse(clonedBody) as { model?: string };
+
+ // not undefined and is false
+ if (
+ isModelNotavailableInServer(
+ serverConfig.customModels,
+ jsonBody?.model as string,
+ ServiceProvider.MiniMax as string,
+ )
+ ) {
+ return NextResponse.json(
+ {
+ error: true,
+ message: `you are not allowed to use ${jsonBody?.model} model`,
+ },
+ {
+ status: 403,
+ },
+ );
+ }
+ } catch (e) {
+ console.error(`[MiniMax] filter`, e);
+ }
+ }
+ try {
+ const res = await fetch(fetchUrl, fetchOptions);
+
+ // to prevent browser prompt for credentials
+ const newHeaders = new Headers(res.headers);
+ newHeaders.delete("www-authenticate");
+ // to disable nginx buffering
+ newHeaders.set("X-Accel-Buffering", "no");
+
+ return new Response(res.body, {
+ status: res.status,
+ statusText: res.statusText,
+ headers: newHeaders,
+ });
+ } finally {
+ clearTimeout(timeoutId);
+ }
+}
diff --git a/app/client/api.ts b/app/client/api.ts
index f60b0e2ad71..abf403536de 100644
--- a/app/client/api.ts
+++ b/app/client/api.ts
@@ -24,6 +24,7 @@ import { DeepSeekApi } from "./platforms/deepseek";
import { XAIApi } from "./platforms/xai";
import { ChatGLMApi } from "./platforms/glm";
import { SiliconflowApi } from "./platforms/siliconflow";
+import { MiniMaxApi } from "./platforms/minimax";
import { Ai302Api } from "./platforms/ai302";
export const ROLES = ["system", "user", "assistant"] as const;
@@ -174,6 +175,9 @@ export class ClientApi {
case ModelProvider.SiliconFlow:
this.llm = new SiliconflowApi();
break;
+ case ModelProvider.MiniMax:
+ this.llm = new MiniMaxApi();
+ break;
case ModelProvider["302.AI"]:
this.llm = new Ai302Api();
break;
@@ -269,6 +273,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
const isChatGLM = modelConfig.providerName === ServiceProvider.ChatGLM;
const isSiliconFlow =
modelConfig.providerName === ServiceProvider.SiliconFlow;
+ const isMiniMax = modelConfig.providerName === ServiceProvider.MiniMax;
const isAI302 = modelConfig.providerName === ServiceProvider["302.AI"];
const isEnabledAccessControl = accessStore.enabledAccessControl();
const apiKey = isGoogle
@@ -291,6 +296,8 @@ export function getHeaders(ignoreHeaders: boolean = false) {
? accessStore.chatglmApiKey
: isSiliconFlow
? accessStore.siliconflowApiKey
+ : isMiniMax
+ ? accessStore.minimaxApiKey
: isIflytek
? accessStore.iflytekApiKey && accessStore.iflytekApiSecret
? accessStore.iflytekApiKey + ":" + accessStore.iflytekApiSecret
@@ -311,6 +318,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
isXAI,
isChatGLM,
isSiliconFlow,
+ isMiniMax,
isAI302,
apiKey,
isEnabledAccessControl,
@@ -340,6 +348,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
isXAI,
isChatGLM,
isSiliconFlow,
+ isMiniMax,
isAI302,
apiKey,
isEnabledAccessControl,
@@ -391,6 +400,8 @@ export function getClientApi(provider: ServiceProvider): ClientApi {
return new ClientApi(ModelProvider.ChatGLM);
case ServiceProvider.SiliconFlow:
return new ClientApi(ModelProvider.SiliconFlow);
+ case ServiceProvider.MiniMax:
+ return new ClientApi(ModelProvider.MiniMax);
case ServiceProvider["302.AI"]:
return new ClientApi(ModelProvider["302.AI"]);
default:
diff --git a/app/client/platforms/minimax.ts b/app/client/platforms/minimax.ts
new file mode 100644
index 00000000000..f29281c3cda
--- /dev/null
+++ b/app/client/platforms/minimax.ts
@@ -0,0 +1,233 @@
+"use client";
+// azure and openai, using same models. so using same LLMApi.
+import { ApiPath, MINIMAX_BASE_URL, MiniMax } from "@/app/constant";
+import {
+ useAccessStore,
+ useAppConfig,
+ useChatStore,
+ ChatMessageTool,
+ usePluginStore,
+} from "@/app/store";
+import { streamWithThink } from "@/app/utils/chat";
+import {
+ ChatOptions,
+ getHeaders,
+ LLMApi,
+ LLMModel,
+ SpeechOptions,
+} from "../api";
+import { getClientConfig } from "@/app/config/client";
+import {
+ getMessageTextContent,
+ getMessageTextContentWithoutThinking,
+ getTimeoutMSByModel,
+} from "@/app/utils";
+import { RequestPayload } from "./openai";
+import { fetch } from "@/app/utils/stream";
+
+export class MiniMaxApi implements LLMApi {
+ private disableListModels = true;
+
+ path(path: string): string {
+ const accessStore = useAccessStore.getState();
+
+ let baseUrl = "";
+
+ if (accessStore.useCustomConfig) {
+ baseUrl = accessStore.minimaxUrl;
+ }
+
+ if (baseUrl.length === 0) {
+ const isApp = !!getClientConfig()?.isApp;
+ const apiPath = ApiPath.MiniMax;
+ baseUrl = isApp ? MINIMAX_BASE_URL : apiPath;
+ }
+
+ if (baseUrl.endsWith("/")) {
+ baseUrl = baseUrl.slice(0, baseUrl.length - 1);
+ }
+ if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.MiniMax)) {
+ baseUrl = "https://" + baseUrl;
+ }
+
+ console.log("[Proxy Endpoint] ", baseUrl, path);
+
+ return [baseUrl, path].join("/");
+ }
+
+ extractMessage(res: any) {
+ return res.choices?.at(0)?.message?.content ?? "";
+ }
+
+ speech(options: SpeechOptions): Promise {
+ throw new Error("Method not implemented.");
+ }
+
+ async chat(options: ChatOptions) {
+ const messages: ChatOptions["messages"] = [];
+ for (const v of options.messages) {
+ if (v.role === "assistant") {
+ const content = getMessageTextContentWithoutThinking(v);
+ messages.push({ role: v.role, content });
+ } else {
+ const content = getMessageTextContent(v);
+ messages.push({ role: v.role, content });
+ }
+ }
+
+ const modelConfig = {
+ ...useAppConfig.getState().modelConfig,
+ ...useChatStore.getState().currentSession().mask.modelConfig,
+ ...{
+ model: options.config.model,
+ providerName: options.config.providerName,
+ },
+ };
+
+ // MiniMax requires temperature in (0.0, 1.0], ensure it's not zero
+ const temperature =
+ modelConfig.temperature <= 0 ? 0.01 : modelConfig.temperature;
+
+ const requestPayload: RequestPayload = {
+ messages,
+ stream: options.config.stream,
+ model: modelConfig.model,
+ temperature: temperature,
+ presence_penalty: modelConfig.presence_penalty,
+ frequency_penalty: modelConfig.frequency_penalty,
+ top_p: modelConfig.top_p,
+ };
+
+ console.log("[Request] minimax payload: ", requestPayload);
+
+ const shouldStream = !!options.config.stream;
+ const controller = new AbortController();
+ options.onController?.(controller);
+
+ try {
+ const chatPath = this.path(MiniMax.ChatPath);
+ const chatPayload = {
+ method: "POST",
+ body: JSON.stringify(requestPayload),
+ signal: controller.signal,
+ headers: getHeaders(),
+ };
+
+ const requestTimeoutId = setTimeout(
+ () => controller.abort(),
+ getTimeoutMSByModel(options.config.model),
+ );
+
+ if (shouldStream) {
+ const [tools, funcs] = usePluginStore
+ .getState()
+ .getAsTools(
+ useChatStore.getState().currentSession().mask?.plugin || [],
+ );
+ return streamWithThink(
+ chatPath,
+ requestPayload,
+ getHeaders(),
+ tools as any,
+ funcs,
+ controller,
+ // parseSSE
+ (text: string, runTools: ChatMessageTool[]) => {
+ const json = JSON.parse(text);
+ const choices = json.choices as Array<{
+ delta: {
+ content: string | null;
+ tool_calls: ChatMessageTool[];
+ reasoning_content: string | null;
+ };
+ }>;
+ const tool_calls = choices[0]?.delta?.tool_calls;
+ if (tool_calls?.length > 0) {
+ const index = tool_calls[0]?.index;
+ const id = tool_calls[0]?.id;
+ const args = tool_calls[0]?.function?.arguments;
+ if (id) {
+ runTools.push({
+ id,
+ type: tool_calls[0]?.type,
+ function: {
+ name: tool_calls[0]?.function?.name as string,
+ arguments: args,
+ },
+ });
+ } else {
+ // @ts-ignore
+ runTools[index]["function"]["arguments"] += args;
+ }
+ }
+ const reasoning = choices[0]?.delta?.reasoning_content;
+ const content = choices[0]?.delta?.content;
+
+ if (
+ (!reasoning || reasoning.length === 0) &&
+ (!content || content.length === 0)
+ ) {
+ return {
+ isThinking: false,
+ content: "",
+ };
+ }
+
+ if (reasoning && reasoning.length > 0) {
+ return {
+ isThinking: true,
+ content: reasoning,
+ };
+ } else if (content && content.length > 0) {
+ return {
+ isThinking: false,
+ content: content,
+ };
+ }
+
+ return {
+ isThinking: false,
+ content: "",
+ };
+ },
+ // processToolMessage, include tool_calls message and tool call results
+ (
+ requestPayload: RequestPayload,
+ toolCallMessage: any,
+ toolCallResult: any[],
+ ) => {
+ // @ts-ignore
+ requestPayload?.messages?.splice(
+ // @ts-ignore
+ requestPayload?.messages?.length,
+ 0,
+ toolCallMessage,
+ ...toolCallResult,
+ );
+ },
+ options,
+ );
+ } else {
+ const res = await fetch(chatPath, chatPayload);
+ clearTimeout(requestTimeoutId);
+
+ const resJson = await res.json();
+ const message = this.extractMessage(resJson);
+ options.onFinish(message, res);
+ }
+ } catch (e) {
+ console.log("[Request] failed to make a chat request", e);
+ options.onError?.(e as Error);
+ }
+ }
+ async usage() {
+ return {
+ used: 0,
+ total: 0,
+ };
+ }
+
+ async models(): Promise {
+ return [];
+ }
+}
diff --git a/app/components/settings.tsx b/app/components/settings.tsx
index 881c12caeb3..0bc273f9834 100644
--- a/app/components/settings.tsx
+++ b/app/components/settings.tsx
@@ -75,6 +75,7 @@ import {
ChatGLM,
DeepSeek,
SiliconFlow,
+ MiniMax,
AI302,
} from "../constant";
import { Prompt, SearchService, usePromptStore } from "../store/prompt";
@@ -1459,6 +1460,47 @@ export function Settings() {
>
);
+ const minimaxConfigComponent = accessStore.provider ===
+ ServiceProvider.MiniMax && (
+ <>
+
+
+ accessStore.update(
+ (access) => (access.minimaxUrl = e.currentTarget.value),
+ )
+ }
+ >
+
+
+ {
+ accessStore.update(
+ (access) => (access.minimaxApiKey = e.currentTarget.value),
+ );
+ }}
+ />
+
+ >
+ );
+
const ai302ConfigComponent = accessStore.provider === ServiceProvider["302.AI"] && (
<>
)}
diff --git a/app/config/server.ts b/app/config/server.ts
index 14175eadc8c..f7a19f5b0da 100644
--- a/app/config/server.ts
+++ b/app/config/server.ts
@@ -88,6 +88,10 @@ declare global {
SILICONFLOW_URL?: string;
SILICONFLOW_API_KEY?: string;
+ // minimax only
+ MINIMAX_URL?: string;
+ MINIMAX_API_KEY?: string;
+
// 302.AI only
AI302_URL?: string;
AI302_API_KEY?: string;
@@ -167,6 +171,7 @@ export const getServerSideConfig = () => {
const isXAI = !!process.env.XAI_API_KEY;
const isChatGLM = !!process.env.CHATGLM_API_KEY;
const isSiliconFlow = !!process.env.SILICONFLOW_API_KEY;
+ const isMiniMax = !!process.env.MINIMAX_API_KEY;
const isAI302 = !!process.env.AI302_API_KEY;
// const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
// const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
@@ -251,6 +256,10 @@ export const getServerSideConfig = () => {
siliconFlowUrl: process.env.SILICONFLOW_URL,
siliconFlowApiKey: getApiKey(process.env.SILICONFLOW_API_KEY),
+ isMiniMax,
+ minimaxUrl: process.env.MINIMAX_URL,
+ minimaxApiKey: getApiKey(process.env.MINIMAX_API_KEY),
+
isAI302,
ai302Url: process.env.AI302_URL,
ai302ApiKey: getApiKey(process.env.AI302_API_KEY),
diff --git a/app/constant.ts b/app/constant.ts
index db9842d6027..9caf05e7a86 100644
--- a/app/constant.ts
+++ b/app/constant.ts
@@ -36,6 +36,8 @@ export const CHATGLM_BASE_URL = "https://open.bigmodel.cn";
export const SILICONFLOW_BASE_URL = "https://api.siliconflow.cn";
+export const MINIMAX_BASE_URL = "https://api.minimax.io";
+
export const AI302_BASE_URL = "https://api.302.ai";
export const CACHE_URL_PREFIX = "/api/cache";
@@ -74,6 +76,7 @@ export enum ApiPath {
ChatGLM = "/api/chatglm",
DeepSeek = "/api/deepseek",
SiliconFlow = "/api/siliconflow",
+ MiniMax = "/api/minimax",
"302.AI" = "/api/302ai",
}
@@ -133,6 +136,7 @@ export enum ServiceProvider {
ChatGLM = "ChatGLM",
DeepSeek = "DeepSeek",
SiliconFlow = "SiliconFlow",
+ MiniMax = "MiniMax",
"302.AI" = "302.AI",
}
@@ -160,6 +164,7 @@ export enum ModelProvider {
ChatGLM = "ChatGLM",
DeepSeek = "DeepSeek",
SiliconFlow = "SiliconFlow",
+ MiniMax = "MiniMax",
"302.AI" = "302.AI",
}
@@ -265,6 +270,11 @@ export const ChatGLM = {
VideoPath: "api/paas/v4/videos/generations",
};
+export const MiniMax = {
+ ExampleEndpoint: MINIMAX_BASE_URL,
+ ChatPath: "v1/chat/completions",
+};
+
export const SiliconFlow = {
ExampleEndpoint: SILICONFLOW_BASE_URL,
ChatPath: "v1/chat/completions",
@@ -742,6 +752,15 @@ const ai302Models = [
"gemini-2.5-pro",
];
+const minimaxModels = [
+ "MiniMax-M1",
+ "MiniMax-M1-80k",
+ "MiniMax-M2",
+ "MiniMax-M2-80k",
+ "MiniMax-M2.5",
+ "MiniMax-M2.5-80k",
+];
+
let seq = 1000; // 内置的模型序号生成器从1000开始
export const DEFAULT_MODELS = [
...openaiModels.map((name) => ({
@@ -887,6 +906,17 @@ export const DEFAULT_MODELS = [
sorted: 13,
},
})),
+ ...minimaxModels.map((name) => ({
+ name,
+ available: true,
+ sorted: seq++,
+ provider: {
+ id: "minimax",
+ providerName: "MiniMax",
+ providerType: "minimax",
+ sorted: 14,
+ },
+ })),
...siliconflowModels.map((name) => ({
name,
available: true,
@@ -895,7 +925,7 @@ export const DEFAULT_MODELS = [
id: "siliconflow",
providerName: "SiliconFlow",
providerType: "siliconflow",
- sorted: 14,
+ sorted: 15,
},
})),
...ai302Models.map((name) => ({
@@ -906,7 +936,7 @@ export const DEFAULT_MODELS = [
id: "ai302",
providerName: "302.AI",
providerType: "ai302",
- sorted: 15,
+ sorted: 16,
},
})),
] as const;
diff --git a/app/locales/cn.ts b/app/locales/cn.ts
index 2cb7dd1e535..2ffc7fe53c2 100644
--- a/app/locales/cn.ts
+++ b/app/locales/cn.ts
@@ -507,6 +507,17 @@ const cn = {
SubTitle: "样例:",
},
},
+ MiniMax: {
+ ApiKey: {
+ Title: "接口密钥",
+ SubTitle: "使用自定义 MiniMax API Key",
+ Placeholder: "MiniMax API Key",
+ },
+ Endpoint: {
+ Title: "接口地址",
+ SubTitle: "样例:",
+ },
+ },
Stability: {
ApiKey: {
Title: "接口密钥",
diff --git a/app/locales/en.ts b/app/locales/en.ts
index a6d1919045c..197b6fdd8cb 100644
--- a/app/locales/en.ts
+++ b/app/locales/en.ts
@@ -491,6 +491,17 @@ const en: LocaleType = {
SubTitle: "Example: ",
},
},
+ MiniMax: {
+ ApiKey: {
+ Title: "MiniMax API Key",
+ SubTitle: "Use a custom MiniMax API Key",
+ Placeholder: "MiniMax API Key",
+ },
+ Endpoint: {
+ Title: "Endpoint Address",
+ SubTitle: "Example: ",
+ },
+ },
Stability: {
ApiKey: {
Title: "Stability API Key",
diff --git a/app/store/access.ts b/app/store/access.ts
index fd55fbdd3d1..ade20e7d6a3 100644
--- a/app/store/access.ts
+++ b/app/store/access.ts
@@ -18,6 +18,7 @@ import {
CHATGLM_BASE_URL,
SILICONFLOW_BASE_URL,
AI302_BASE_URL,
+ MINIMAX_BASE_URL,
} from "../constant";
import { getHeaders } from "../client/api";
import { getClientConfig } from "../config/client";
@@ -60,6 +61,8 @@ const DEFAULT_SILICONFLOW_URL = isApp
? SILICONFLOW_BASE_URL
: ApiPath.SiliconFlow;
+const DEFAULT_MINIMAX_URL = isApp ? MINIMAX_BASE_URL : ApiPath.MiniMax;
+
const DEFAULT_AI302_URL = isApp ? AI302_BASE_URL : ApiPath["302.AI"];
const DEFAULT_ACCESS_STATE = {
@@ -135,6 +138,10 @@ const DEFAULT_ACCESS_STATE = {
siliconflowUrl: DEFAULT_SILICONFLOW_URL,
siliconflowApiKey: "",
+ // minimax
+ minimaxUrl: DEFAULT_MINIMAX_URL,
+ minimaxApiKey: "",
+
// 302.AI
ai302Url: DEFAULT_AI302_URL,
ai302ApiKey: "",
@@ -226,6 +233,10 @@ export const useAccessStore = createPersistStore(
return ensure(get(), ["siliconflowApiKey"]);
},
+ isValidMiniMax() {
+ return ensure(get(), ["minimaxApiKey"]);
+ },
+
isAuthorized() {
this.fetch();
@@ -245,6 +256,7 @@ export const useAccessStore = createPersistStore(
this.isValidXAI() ||
this.isValidChatGLM() ||
this.isValidSiliconFlow() ||
+ this.isValidMiniMax() ||
!this.enabledAccessControl() ||
(this.enabledAccessControl() && ensure(get(), ["accessCode"]))
);