// 使用了 Betalgo.OpenAI
// 使用了 kimi 的 web 接口 https://github.com/LLM-Red-Team/kimi-free-api
// 这里使用 simple-one-api 进行了 api 调用整合 https://github.com/fruitbars/simple-one-api
#load "AppLib"
#load "seqlog"
async Task Main(string[] args)
{
Util.SetPassword("seqKey", "xOfpInQDsoJt5Irp2IlN"); // 日志记录
Util.SetPassword("ai_key", "123456"); // 整合入 simple-one-api 的 key
Util.SetPassword("Model", "kimi_silent_search"); // model 中带有 silent_search 可以过滤回复中的引用信息
var app = this.CreateApp(args, $@"./logs/coze_.log", it => EnableSeqLogSystem(it, Util.GetPassword("seqKey"))); // 创建日志
var logger = this.GetLogger(app); // 创建日志
try
{
var options = new OpenAiOptions()
{
ApiKey = Util.GetPassword("ai_key"),
BaseDomain = "http://192.168.0.70:1000"
};
using var openAiService = new OpenAIService(options);
var request = new ChatCompletionCreateRequest
{
Messages = new List<ChatMessage>
{
//ChatMessage.FromSystem("请使用文本格式进行回复"),
ChatMessage.FromUser("正常人类一般几点入睡?")
},
Model = Util.GetPassword("Model")
};
//request.ResponseFormat = new ResponseFormat() { Type = "json" };
// 流式调用
// var streamResult = openAiService.CreateCompletionAsStream(request);
//
// await foreach (var completion in streamResult)
// {
// this.OutputCompletion(logger, completion);
// }
// 直接对话
var completionResult = await openAiService.ChatCompletion.CreateCompletion(request);
this.OutputCompletion(logger, completionResult);
}
catch (Exception exception)
{
logger.LogError(exception, "工作发生错误");
throw;
}
}
void OutputCompletion(ILogger logger, ChatCompletionCreateResponse completionResult)
{
foreach (var it in completionResult.Choices)
{
logger.LogInformation($"{it.Message.ContentCalculated}");
}
if (!completionResult.Successful)
{
var errorMessage = completionResult.Error != null
? $"{completionResult.Error.Code} [{completionResult.Error.Message}]"
: "调用 ai api 失败";
throw new ApplicationException(errorMessage);
}
}