|
@@ -13,11 +13,11 @@ import com.qmth.ops.biz.domain.LlmPromptTemplate;
|
|
|
import com.qmth.ops.biz.service.LlmClientService;
|
|
|
import com.qmth.ops.biz.service.LlmOrgConfigService;
|
|
|
import com.qmth.ops.biz.service.LlmPromptTemplateService;
|
|
|
+import org.apache.commons.lang3.StringUtils;
|
|
|
import org.springframework.validation.annotation.Validated;
|
|
|
import org.springframework.web.bind.annotation.*;
|
|
|
|
|
|
import javax.annotation.Resource;
|
|
|
-import java.util.Map;
|
|
|
|
|
|
@RestController
|
|
|
@Aac(auth = true, signType = SignatureType.SECRET)
|
|
@@ -48,24 +48,26 @@ public class LlmController {
|
|
|
|
|
|
@PostMapping(AiConstants.LLM_CHAT_TEMPLATE_PATH)
|
|
|
public ChatResult chatTemplate(@RequestAttribute AccessOrg accessOrg,
|
|
|
- @RequestHeader(AiConstants.LLM_APP_TYPE_HEADER) LlmAppType type, @RequestBody Map<String, Object> param)
|
|
|
+ @RequestHeader(AiConstants.LLM_APP_TYPE_HEADER) LlmAppType type, @RequestBody Object param)
|
|
|
throws Exception {
|
|
|
LlmOrgConfig config = llmOrgConfigService.findByOrgAndAppType(accessOrg.getOrg().getId(), type);
|
|
|
if (config == null || config.getLeftCount() <= 0) {
|
|
|
throw new ForbiddenException(
|
|
|
"Chat api is disabled or exhausted for org=" + accessOrg.getOrg().getCode() + ", app_type=" + type);
|
|
|
}
|
|
|
- LlmPromptTemplate llmPromptTemplate = llmPromptTemplateService.findByModelAndAppType(config.getModelId(), type);
|
|
|
+ LlmPromptTemplate llmPromptTemplate = llmPromptTemplateService.findById(config.getPromptId());
|
|
|
if (llmPromptTemplate == null) {
|
|
|
throw new NotFoundException(
|
|
|
"Chat prompt template not found for app_type=" + type + ", modelId=" + config.getModelId());
|
|
|
}
|
|
|
ChatRequest request = new ChatRequest();
|
|
|
- if (llmPromptTemplate.getSystem() != null) {
|
|
|
- request.addMessage(ChatRole.system, FreemarkerUtil.getValue(llmPromptTemplate.getSystem(), param, ""));
|
|
|
+ String systemMessage = FreemarkerUtil.getValue(llmPromptTemplate.getSystem(), param, null);
|
|
|
+ String userMessage = FreemarkerUtil.getValue(llmPromptTemplate.getUser(), param, null);
|
|
|
+ if (StringUtils.isNotBlank(systemMessage)) {
|
|
|
+ request.addMessage(ChatRole.system, systemMessage);
|
|
|
}
|
|
|
- if (llmPromptTemplate.getUser() != null) {
|
|
|
- request.addMessage(ChatRole.user, FreemarkerUtil.getValue(llmPromptTemplate.getUser(), param, ""));
|
|
|
+ if (StringUtils.isNotBlank(userMessage)) {
|
|
|
+ request.addMessage(ChatRole.user, userMessage);
|
|
|
}
|
|
|
ChatResult result = llmClientService.chat(request, config.getModelId());
|
|
|
llmOrgConfigService.consume(config);
|
|
@@ -90,8 +92,7 @@ public class LlmController {
|
|
|
PromptTemplate template = new PromptTemplate();
|
|
|
LlmOrgConfig config = llmOrgConfigService.findByOrgAndAppType(accessOrg.getOrg().getId(), type);
|
|
|
if (config != null) {
|
|
|
- LlmPromptTemplate llmPromptTemplate = llmPromptTemplateService
|
|
|
- .findByModelAndAppType(config.getModelId(), type);
|
|
|
+ LlmPromptTemplate llmPromptTemplate = llmPromptTemplateService.findById(config.getPromptId());
|
|
|
if (llmPromptTemplate != null) {
|
|
|
template.setSystem(llmPromptTemplate.getSystem());
|
|
|
template.setUser(llmPromptTemplate.getUser());
|