Преглед на файлове

core-ai consumeBalance api

deason преди 1 месец
родител
ревизия
69ca3c1729

+ 13 - 1
core-ai/src/main/java/com/qmth/boot/core/ai/client/LlmApiClient.java

@@ -62,7 +62,7 @@ public interface LlmApiClient {
                     @Body ChatRequest request);
 
     /**
-     * 大模型接口余额查询
+     * 查询大模型接口余额
      *
      * @param baseUrl   接口前缀地址,默认取配置文件的值(选填)
      * @param signature 使用机构AK构造Secret类型签名
@@ -73,6 +73,18 @@ public interface LlmApiClient {
     LlmAppBalance getBalance(@Header(AiConstants.BASE_URL) String baseUrl, @Tag SignatureInfo signature,
                              @Header(AiConstants.LLM_APP_TYPE) LlmAppType type);
 
+    /**
+     * 消费大模型接口余额
+     *
+     * @param baseUrl   接口前缀地址,默认取配置文件的值(选填)
+     * @param signature 使用机构AK构造Secret类型签名
+     * @param type      大模型应用类型
+     * @return
+     */
+    @POST(AiConstants.LLM_BALANCE_CONSUME_PATH)
+    LlmAppBalance consumeBalance(@Header(AiConstants.BASE_URL) String baseUrl, @Tag SignatureInfo signature,
+                                 @Header(AiConstants.LLM_APP_TYPE) LlmAppType type);
+
     /**
      * 大模型提示词模版获取
      *

+ 2 - 0
core-ai/src/main/java/com/qmth/boot/core/ai/model/AiConstants.java

@@ -12,6 +12,8 @@ public interface AiConstants {
 
     String LLM_BALANCE_PATH = API_PREFIX + "/llm/balance";
 
+    String LLM_BALANCE_CONSUME_PATH = API_PREFIX + "/llm/balance/consume";
+
     String LLM_CHAT_ENDPOINT_PATH = API_PREFIX + "/llm/chat_endpoint";
 
     String LLM_CHAT_PATH = API_PREFIX + "/llm/chat";