大语言模型 MiniMax API
- 全接口字段注释
- Chatcompletion 文本对话接口
- Embeddings 向量化接口
- T2A 文本转语音接口
- 无缝对接官方文档:单轮问答、历史记忆问答、流返回
- 支持Swagger文档 和 ApiFox 文档
package main
import (
"context"
"fmt"
"os"
textv1 "github.com/ConnectAI-E/go-minimax/gen/go/minimax/text/v1"
"github.com/ConnectAI-E/go-minimax/minimax"
)
func main() {
ctx := context.Background()
//init client
client, _ := minimax.New(
minimax.WithApiToken(os.Getenv("TEST_MINIMAX_API_TOKEN")),
minimax.WithGroupId(os.Getenv("TEST_MINIMAX_GROUP_ID")),
)
//chat
req := &textv1.ChatCompletionsRequest{
Messages: []*textv1.Message{
{
SenderType: "USER",
Text: "hi~",
},
},
Model: "abab5-chat",
Temperature: 0.7,
}
res, _ := client.ChatCompletions(ctx, req)
fmt.Println(res.Choices) // output: 你好!有什么我可以帮助你的吗?
}
MiniMax completion
package main
import (
"context"
"fmt"
"os"
textv1 "github.com/ConnectAI-E/go-minimax/gen/go/minimax/text/v1"
"github.com/ConnectAI-E/go-minimax/minimax"
)
func main() {
ctx := context.Background()
//init client
client, _ := minimax.New(
minimax.WithApiToken(os.Getenv("TEST_MINIMAX_API_TOKEN")),
minimax.WithGroupId(os.Getenv("TEST_MINIMAX_GROUP_ID")),
)
//chat
req := &textv1.ChatCompletionsRequest{
Messages: []*textv1.Message{
{
SenderType: "USER",
Text: "hi~",
},
},
Model: "abab5-chat",
Temperature: 0.7,
}
res, _ := client.ChatCompletions(ctx, req)
fmt.Println(res.Choices) // output: 你好!有什么我可以帮助你的吗?
}
MiniMax stream completion
package main
import (
"context"
"errors"
"fmt"
textv1 "github.com/ConnectAI-E/go-minimax/gen/go/minimax/text/v1"
"github.com/ConnectAI-E/go-minimax/minimax"
"io"
"os"
)
func main() {
ctx := context.Background()
//init client
client, _ := minimax.New(
minimax.WithApiToken(os.Getenv("TEST_MINIMAX_API_TOKEN")),
minimax.WithGroupId(os.Getenv("TEST_MINIMAX_GROUP_ID")),
)
//chat
req := &textv1.ChatCompletionsRequest{
Messages: []*textv1.Message{
{
SenderType: "USER",
Text: "hi~",
},
},
Model: "abab5-chat",
Temperature: 0.7,
}
stream, _ := client.ChatCompletionStream(ctx, req)
defer stream.CloseSend()
for {
response, err := stream.Recv()
if errors.Is(err, io.EOF) {
break
}
if err != nil {
fmt.Println(err)
break
}
fmt.Printf(response.Choices[0].Delta + "\n") //嗨!有什么我可以帮助您的吗?
}
}
MiniMax history stream completion
package main
import (
"context"
"errors"
"fmt"
textv1 "github.com/ConnectAI-E/go-minimax/gen/go/minimax/text/v1"
"github.com/ConnectAI-E/go-minimax/minimax"
"io"
"os"
)
func main() {
ctx := context.Background()
//init client
client, _ := minimax.New(
minimax.WithApiToken(os.Getenv("TEST_MINIMAX_API_TOKEN")),
minimax.WithGroupId(os.Getenv("TEST_MINIMAX_GROUP_ID")),
)
//chat
req := &textv1.ChatCompletionsRequest{
Messages: []*textv1.Message{
{
"sender_type": "USER",
"text": "路卡,今天在干什么呢?"
},
{
"sender_type": "BOT",
"text": "我今天在家里复习功课,准备期末考试呢!"
},
{
"sender_type": "USER",
"text": "期末考试怎么样,有把握吗?"
}
},
Model: "abab5-chat",
Temperature: 0.7,
}
stream, _ := client.ChatCompletionStream(ctx, req)
defer stream.CloseSend()
for {
response, err := stream.Recv()
if errors.Is(err, io.EOF) {
break
}
if err != nil {
fmt.Println(err)
break
}
fmt.Printf(response.Choices[0].Delta + "\n") //放轻松,一切尽在掌握中
}
}