|
@@ -3,6 +3,7 @@ package com.qmth.ops.api.controller.ai;
|
|
|
import com.qmth.boot.api.annotation.Aac;
|
|
|
import com.qmth.boot.core.ai.model.AiConstants;
|
|
|
import com.qmth.boot.core.ai.model.llm.*;
|
|
|
+import com.qmth.boot.core.ai.model.llm.endpoint.ChatEndpoint;
|
|
|
import com.qmth.boot.core.exception.ForbiddenException;
|
|
|
import com.qmth.boot.core.exception.NotFoundException;
|
|
|
import com.qmth.boot.tools.freemarker.FreemarkerUtil;
|
|
@@ -32,10 +33,39 @@ public class LlmController {
|
|
|
@Resource
|
|
|
private LlmPromptTemplateService llmPromptTemplateService;
|
|
|
|
|
|
+ @PostMapping(AiConstants.LLM_CHAT_ENDPOINT_PATH)
|
|
|
+ public ChatEndpoint chatEndpoint(@RequestAttribute AccessOrg accessOrg,
|
|
|
+ @RequestHeader(AiConstants.LLM_APP_TYPE) LlmAppType type,
|
|
|
+ @RequestBody Object param) throws Exception {
|
|
|
+ LlmOrgConfig config = llmOrgConfigService.findByOrgAndAppType(accessOrg.getOrg().getId(), type);
|
|
|
+ if (config == null || config.getLeftCount() <= 0) {
|
|
|
+ throw new ForbiddenException(
|
|
|
+ "Chat api is disabled or exhausted for org=" + accessOrg.getOrg().getCode() + ", app_type=" + type);
|
|
|
+ }
|
|
|
+ LlmPromptTemplate llmPromptTemplate = llmPromptTemplateService.findById(config.getPromptId());
|
|
|
+ if (llmPromptTemplate == null) {
|
|
|
+ throw new NotFoundException(
|
|
|
+ "Chat prompt template not found for app_type=" + type + ", modelId=" + config.getModelId());
|
|
|
+ }
|
|
|
+ ChatRequest request = new ChatRequest();
|
|
|
+ String systemMessage = FreemarkerUtil.getValue(llmPromptTemplate.getSystem(), param, null);
|
|
|
+ String userMessage = FreemarkerUtil.getValue(llmPromptTemplate.getUser(), param, null);
|
|
|
+ if (StringUtils.isNotBlank(systemMessage)) {
|
|
|
+ request.addMessage(ChatRole.system, systemMessage);
|
|
|
+ }
|
|
|
+ if (StringUtils.isNotBlank(userMessage)) {
|
|
|
+ request.addMessage(ChatRole.user, userMessage);
|
|
|
+ }
|
|
|
+ request.setStream(true);//todo
|
|
|
+
|
|
|
+ return llmClientService.chatEndpoint(request, config.getModelId(), type);
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
@PostMapping(AiConstants.LLM_CHAT_PATH)
|
|
|
public ChatResult chat(@RequestAttribute AccessOrg accessOrg,
|
|
|
- @RequestHeader(AiConstants.LLM_APP_TYPE) LlmAppType type,
|
|
|
- @RequestBody @Validated ChatRequest request) throws Exception {
|
|
|
+ @RequestHeader(AiConstants.LLM_APP_TYPE) LlmAppType type,
|
|
|
+ @RequestBody @Validated ChatRequest request) throws Exception {
|
|
|
LlmOrgConfig config = llmOrgConfigService.findByOrgAndAppType(accessOrg.getOrg().getId(), type);
|
|
|
if (config == null || config.getLeftCount() <= 0) {
|
|
|
throw new ForbiddenException(
|
|
@@ -76,7 +106,7 @@ public class LlmController {
|
|
|
|
|
|
@PostMapping(AiConstants.LLM_BALANCE_PATH)
|
|
|
public LlmAppBalance balance(@RequestAttribute AccessOrg accessOrg,
|
|
|
- @RequestHeader(AiConstants.LLM_APP_TYPE) LlmAppType type) {
|
|
|
+ @RequestHeader(AiConstants.LLM_APP_TYPE) LlmAppType type) {
|
|
|
LlmAppBalance balance = new LlmAppBalance();
|
|
|
LlmOrgConfig config = llmOrgConfigService.findByOrgAndAppType(accessOrg.getOrg().getId(), type);
|
|
|
if (config != null) {
|
|
@@ -88,7 +118,7 @@ public class LlmController {
|
|
|
|
|
|
@PostMapping(AiConstants.LLM_PROMPT_TEMPLATE_PATH)
|
|
|
public PromptTemplate getPromptTemplate(@RequestAttribute AccessOrg accessOrg,
|
|
|
- @RequestHeader(AiConstants.LLM_APP_TYPE) LlmAppType type) {
|
|
|
+ @RequestHeader(AiConstants.LLM_APP_TYPE) LlmAppType type) {
|
|
|
PromptTemplate template = new PromptTemplate();
|
|
|
LlmOrgConfig config = llmOrgConfigService.findByOrgAndAppType(accessOrg.getOrg().getId(), type);
|
|
|
if (config != null) {
|
|
@@ -100,4 +130,5 @@ public class LlmController {
|
|
|
}
|
|
|
return template;
|
|
|
}
|
|
|
+
|
|
|
}
|