From bc56867046fbc60b15838c60b66dd36882f8fa32 Mon Sep 17 00:00:00 2001 From: Xinrui Date: Thu, 25 Dec 2025 19:10:00 +0800 Subject: [PATCH] add glm-4.7 and minimax-m2.1 --- .../aihubmix/models/coding-glm-4.7-free.toml | 25 ++++++++++++++++++ .../models/coding-minimax-m2.1-free.toml | 21 +++++++++++++++ providers/aihubmix/models/glm-4.7.toml | 26 +++++++++++++++++++ providers/aihubmix/models/minimax-m2.1.toml | 21 +++++++++++++++ 4 files changed, 93 insertions(+) create mode 100644 providers/aihubmix/models/coding-glm-4.7-free.toml create mode 100644 providers/aihubmix/models/coding-minimax-m2.1-free.toml create mode 100644 providers/aihubmix/models/glm-4.7.toml create mode 100644 providers/aihubmix/models/minimax-m2.1.toml diff --git a/providers/aihubmix/models/coding-glm-4.7-free.toml b/providers/aihubmix/models/coding-glm-4.7-free.toml new file mode 100644 index 00000000..8c5ee9d2 --- /dev/null +++ b/providers/aihubmix/models/coding-glm-4.7-free.toml @@ -0,0 +1,25 @@ +name = "coding-glm-4.7-free" +family = "coding-glm-4.7-free" +release_date = "2025-12-22" +last_updated = "2025-12-22" +attachment = false +reasoning = true +temperature = true +tool_call = true +knowledge = "2025-04" +open_weights = true + +[interleaved] +field = "reasoning_content" + +[cost] +input = 0 +output = 0 + +[limit] +context = 204800 +output = 131072 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/aihubmix/models/coding-minimax-m2.1-free.toml b/providers/aihubmix/models/coding-minimax-m2.1-free.toml new file mode 100644 index 00000000..7e088a93 --- /dev/null +++ b/providers/aihubmix/models/coding-minimax-m2.1-free.toml @@ -0,0 +1,21 @@ +name = "coding-minimax-m2.1-free" +family = "coding-minimax-m2.1-free" +release_date = "2025-12-23" +last_updated = "2025-12-23" +attachment = false +reasoning = true +temperature = true +tool_call = true +open_weights = true + +[cost] +input = 0 +output = 0 + +[limit] +context = 204_800 +output = 131_072 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/aihubmix/models/glm-4.7.toml b/providers/aihubmix/models/glm-4.7.toml new file mode 100644 index 00000000..48859a1f --- /dev/null +++ b/providers/aihubmix/models/glm-4.7.toml @@ -0,0 +1,26 @@ +name = "glm-4.7" +family = "glm-4.7" +release_date = "2025-12-22" +last_updated = "2025-12-22" +attachment = false +reasoning = true +temperature = true +tool_call = true +knowledge = "2025-04" +open_weights = true + +[interleaved] +field = "reasoning_content" + +[cost] +input = 0.27 +output = 1.10 +cache_read = 0.054 + +[limit] +context = 204800 +output = 131072 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/aihubmix/models/minimax-m2.1.toml b/providers/aihubmix/models/minimax-m2.1.toml new file mode 100644 index 00000000..0328cad6 --- /dev/null +++ b/providers/aihubmix/models/minimax-m2.1.toml @@ -0,0 +1,21 @@ +name = "minimax-m2.1" +family = "minimax-m2.1" +release_date = "2025-12-23" +last_updated = "2025-12-23" +attachment = false +reasoning = true +temperature = true +tool_call = true +open_weights = true + +[cost] +input = 0.29 +output = 1.15 + +[limit] +context = 204_800 +output = 131_072 + +[modalities] +input = ["text"] +output = ["text"]