فهرست منبع

修改core-ai,修改chat接口通用返回数据结构,增加查询余额接口

Signed-off-by: luoshi <luoshi@qmth.com.cn>
luoshi 1 سال پیش
والد
کامیت
5599391fc1

+ 12 - 7
core-ai/src/main/java/com/qmth/boot/core/ai/client/LlmApiClient.java

@@ -2,10 +2,7 @@ package com.qmth.boot.core.ai.client;
 
 import com.qmth.boot.core.ai.config.LlmApiConfiguration;
 import com.qmth.boot.core.ai.model.AiConstants;
-import com.qmth.boot.core.ai.model.llm.ChatChoice;
-import com.qmth.boot.core.ai.model.llm.ChatRequest;
-import com.qmth.boot.core.ai.model.llm.LlmAppType;
-import com.qmth.boot.core.ai.model.llm.PromptTemplate;
+import com.qmth.boot.core.ai.model.llm.*;
 import com.qmth.boot.core.retrofit.annotatioin.RetrofitClient;
 import com.qmth.boot.core.retrofit.utils.SignatureInfo;
 import retrofit2.http.Body;
@@ -13,14 +10,22 @@ import retrofit2.http.Header;
 import retrofit2.http.POST;
 import retrofit2.http.Tag;
 
-import java.util.List;
-
 /**
  * 大模型应用服务接口
  */
 @RetrofitClient(configuration = LlmApiConfiguration.class)
 public interface LlmApiClient {
 
+    /**
+     * 大模型接口余额查询
+     *
+     * @param signature 使用机构AK构造Secret类型签名
+     * @param type      大模型应用类型
+     * @return
+     */
+    @POST(AiConstants.LLM_BALANCE_PATH)
+    LlmAppBalance balance(@Tag SignatureInfo signature, @Header(AiConstants.LLM_APP_TYPE_HEADER) LlmAppType type);
+
     /**
      * 大模型chat类型请求
      *
@@ -30,7 +35,7 @@ public interface LlmApiClient {
      * @return
      */
     @POST(AiConstants.LLM_CHAT_PATH)
-    List<ChatChoice> chat(@Tag SignatureInfo signature, @Header(AiConstants.LLM_APP_TYPE_HEADER) LlmAppType type,
+    ChatResult chat(@Tag SignatureInfo signature, @Header(AiConstants.LLM_APP_TYPE_HEADER) LlmAppType type,
             @Body ChatRequest request);
 
     /**

+ 6 - 2
core-ai/src/main/java/com/qmth/boot/core/ai/model/AiConstants.java

@@ -4,7 +4,11 @@ public interface AiConstants {
 
     String LLM_APP_TYPE_HEADER = "llm_app_type";
 
-    String LLM_CHAT_PATH = "/api/ai/llm/chat";
+    String API_PREFIX = "/api/ai";
 
-    String LLM_PROMPT_TEMPLATE_PATH = "/api/ai/llm/prompt_template";
+    String LLM_BALANCE_PATH = API_PREFIX + "/llm/balance";
+
+    String LLM_CHAT_PATH = API_PREFIX + "/llm/chat";
+
+    String LLM_PROMPT_TEMPLATE_PATH = API_PREFIX + "/llm/prompt_template";
 }

+ 27 - 0
core-ai/src/main/java/com/qmth/boot/core/ai/model/llm/ChatResult.java

@@ -0,0 +1,27 @@
+package com.qmth.boot.core.ai.model.llm;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * 大模型通用chat返回结果
+ */
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class ChatResult {
+
+    private List<ChatChoice> choices;
+
+    public ChatResult() {
+        this.choices = new LinkedList<>();
+    }
+
+    public List<ChatChoice> getChoices() {
+        return choices;
+    }
+
+    public void setChoices(List<ChatChoice> choices) {
+        this.choices = choices;
+    }
+}

+ 36 - 0
core-ai/src/main/java/com/qmth/boot/core/ai/model/llm/LlmAppBalance.java

@@ -0,0 +1,36 @@
+package com.qmth.boot.core.ai.model.llm;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+
+/**
+ * 大模型接口余额
+ */
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class LlmAppBalance {
+
+    /**
+     * 累计许可次数
+     */
+    private int permitCount;
+
+    /**
+     * 剩余可用次数
+     */
+    private int leftCount;
+
+    public int getPermitCount() {
+        return permitCount;
+    }
+
+    public void setPermitCount(int permitCount) {
+        this.permitCount = permitCount;
+    }
+
+    public int getLeftCount() {
+        return leftCount;
+    }
+
+    public void setLeftCount(int leftCount) {
+        this.leftCount = leftCount;
+    }
+}