From 5bfc673070408e07b4657bc77145fa344a957068 Mon Sep 17 00:00:00 2001 From: moyangzhan Date: Mon, 22 Apr 2024 19:04:25 +0800 Subject: [PATCH] fix: knowledgeBase ownerUuid --- README.md | 7 +++--- .../common/interfaces/AbstractLLMService.java | 25 +++++++++++++------ .../common/service/KnowledgeBaseService.java | 1 + .../moyz/adi/common/service/RAGService.java | 2 +- 4 files changed, 23 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 73b7a3b..e147a2b 100644 --- a/README.md +++ b/README.md @@ -99,7 +99,6 @@ Google的配置 update adi_sys_config set value = '{"url":"https://www.googleapis.com/customsearch/v1","key":"my key from cloud.google.com","cx":"my cx from programmablesearchengine.google.com"}' where name = 'google_setting'; ``` - **b. 修改配置文件** * postgresql: application-[dev|prod].xml中的spring.datasource @@ -125,8 +124,8 @@ mvn clean package -Dmaven.test.skip=true a. jar包启动: ```plaintext -cd adi-bootstrap/target -nohup java -jar -Xms768m -Xmx1024m -XX:+HeapDumpOnOutOfMemoryError adi-chat-0.0.1-SNAPSHOT.jar --spring.profiles.active=[dev|prod] dev/null 2>&1 & +adi-bootstrap-0.0.1-SNAPSHOT.jarcd adi-bootstrap/target +nohup java -jar -Xms768m -Xmx1024m -XX:+HeapDumpOnOutOfMemoryError adi-bootstrap-0.0.1-SNAPSHOT.jar --spring.profiles.active=[dev|prod] dev/null 2>&1 & ``` b. docker启动 @@ -143,7 +142,7 @@ docker run -d \ ## 待办: -增强RAG +高级RAG 增加搜索引擎(BING、百度) diff --git a/adi-common/src/main/java/com/moyz/adi/common/interfaces/AbstractLLMService.java b/adi-common/src/main/java/com/moyz/adi/common/interfaces/AbstractLLMService.java index be72e0c..8842491 100644 --- a/adi-common/src/main/java/com/moyz/adi/common/interfaces/AbstractLLMService.java +++ b/adi-common/src/main/java/com/moyz/adi/common/interfaces/AbstractLLMService.java @@ -117,14 +117,10 @@ public abstract class AbstractLLMService { } TokenStream tokenStream; - if (StringUtils.isNotBlank(params.getMessageId()) && StringUtils.isNotBlank(params.getSystemMessage())) { - tokenStream = chatAssistant.chat(params.getMessageId(), params.getSystemMessage(), params.getUserMessage()); - } else if (StringUtils.isNotBlank(params.getMessageId()) && StringUtils.isBlank(params.getSystemMessage())) { - tokenStream = chatAssistant.chat(params.getMessageId(), params.getUserMessage()); - } else if (StringUtils.isBlank(params.getMessageId()) && StringUtils.isNotBlank(params.getSystemMessage())) { - tokenStream = chatAssistantWithoutMemory.chat(params.getSystemMessage(), params.getUserMessage()); + if (StringUtils.isNotBlank(params.getMessageId())) { + tokenStream = chatWithMemory(params.getMessageId(), params.getSystemMessage(), params.getUserMessage()); } else { - tokenStream = chatAssistantWithoutMemory.chat(params.getUserMessage()); + tokenStream = chatWithoutMemory(params.getSystemMessage(), params.getUserMessage()); } tokenStream .onNext((content) -> { @@ -170,4 +166,19 @@ public abstract class AbstractLLMService { .start(); } + public TokenStream chatWithoutMemory(String systemMessage, String userMessage) { + if (StringUtils.isNotBlank(systemMessage)) { + return chatAssistantWithoutMemory.chat(systemMessage, userMessage); + } else { + return chatAssistantWithoutMemory.chat(userMessage); + } + } + + public TokenStream chatWithMemory(String messageId, String systemMessage, String userMessage) { + if (StringUtils.isNotBlank(systemMessage)) { + return chatAssistant.chat(messageId, systemMessage, userMessage); + } else { + return chatAssistant.chat(messageId, userMessage); + } + } } diff --git a/adi-common/src/main/java/com/moyz/adi/common/service/KnowledgeBaseService.java b/adi-common/src/main/java/com/moyz/adi/common/service/KnowledgeBaseService.java index e137c57..0f9c44b 100644 --- a/adi-common/src/main/java/com/moyz/adi/common/service/KnowledgeBaseService.java +++ b/adi-common/src/main/java/com/moyz/adi/common/service/KnowledgeBaseService.java @@ -93,6 +93,7 @@ public class KnowledgeBaseService extends ServiceImpl> relevantEmbeddings = ((AdiPgVectorEmbeddingStore) embeddingStore).findRelevantByMetadata(metadataCond, questionEmbedding, maxResults, minScore); - // Create a prompt for the model that includes question and relevant embeddings + // Create a prompt that includes question and relevant embeddings String information = relevantEmbeddings.stream() .map(match -> match.embedded().text()) .collect(joining("\n\n"));