add ollama
This commit is contained in:
parent
7b7e1b0ddc
commit
273dc9df8b
20
README.md
20
README.md
|
@ -24,6 +24,7 @@
|
|||
* ChatGPT 3.5
|
||||
* 通义千问
|
||||
* 文心一言
|
||||
* ollama
|
||||
* DALL-E 2
|
||||
|
||||
## 技术栈
|
||||
|
@ -48,28 +49,37 @@ vue3+typescript+pnpm
|
|||
|
||||
### 初始化
|
||||
|
||||
初始化数据库
|
||||
**a. 初始化数据库**
|
||||
|
||||
* 创建数据库aideepin
|
||||
* 执行docs/create.sql
|
||||
* 填充各模型的配置
|
||||
|
||||
openai的secretKey
|
||||
|
||||
```plaintext
|
||||
update adi_sys_config set value = '{"secret_key":"my_openai_secret_key"}' where name = 'openai_setting';
|
||||
update adi_sys_config set value = '{"secret_key":"my_openai_secret_key","models":["gpt-3.5-turbo"]}' where name = 'openai_setting';
|
||||
```
|
||||
|
||||
灵积大模型平台的apiKey
|
||||
|
||||
```plaintext
|
||||
update adi_sys_config set value = '{"api_key":"my_dashcope_api_key"}' where name = 'dashscope_setting';
|
||||
update adi_sys_config set value = '{"api_key":"my_dashcope_api_key","models":["my model name,eg:qwen-max"]}' where name = 'dashscope_setting';
|
||||
```
|
||||
|
||||
千帆大模型平台的配置
|
||||
|
||||
```plaintext
|
||||
update adi_sys_config set value = '{"api_key":"my_qianfan_api_key","secret_key":"my_qianfan_secret_key"}' where name = 'qianfan_setting';
|
||||
update adi_sys_config set value = '{"api_key":"my_qianfan_api_key","secret_key":"my_qianfan_secret_key","models":["my model name,eg:ERNIE-Bot"]}' where name = 'qianfan_setting';
|
||||
```
|
||||
|
||||
* 修改配置文件
|
||||
ollama的配置
|
||||
|
||||
```
|
||||
update adi_sys_config set value = '{"base_url":"my_ollama_base_url","models":["my model name,eg:tinydolphin"]}' where name = 'ollama_setting';
|
||||
```
|
||||
|
||||
**b. 修改配置文件**
|
||||
|
||||
* postgresql: application-[dev|prod].xml中的spring.datasource
|
||||
* redis: application-[dev|prod].xml中的spring.data.redis
|
||||
|
|
|
@ -68,6 +68,7 @@ public class AdiConstant {
|
|||
public static final String OPENAI_SETTING = "openai_setting";
|
||||
public static final String DASHSCOPE_SETTING = "dashscope_setting";
|
||||
public static final String QIANFAN_SETTING = "qianfan_setting";
|
||||
public static final String OLLAMA_SETTING = "ollama_setting";
|
||||
public static final String REQUEST_TEXT_RATE_LIMIT = "request_text_rate_limit";
|
||||
public static final String REQUEST_IMAGE_RATE_LIMIT = "request_image_rate_limit";
|
||||
public static final String CONVERSATION_MAX_NUM = "conversation_max_num";
|
||||
|
|
|
@ -31,7 +31,9 @@ public enum ErrorEnum {
|
|||
B_MONTHLY_QUOTA_USED("B0007", "当月额度已经用完"),
|
||||
B_LLM_NOT_SUPPORT("B0008", "LLM不支持该功能"),
|
||||
B_LLM_SECRET_KEY_NOT_SET("B0009", "LLM的secret key没设置"),
|
||||
B_MESSAGE_NOT_FOUND("B0008", "消息不存在");
|
||||
B_MESSAGE_NOT_FOUND("B0008", "消息不存在"),
|
||||
B_LLM_SERVICE_DISABLED("B0009", "LLM服务不可用"),
|
||||
;
|
||||
|
||||
private String code;
|
||||
private String info;
|
||||
|
|
|
@ -35,12 +35,12 @@ public class ImageModelContext {
|
|||
}
|
||||
}
|
||||
|
||||
public static void addImageModelService(String modelName, AbstractImageModelService modelService) {
|
||||
public static void addImageModelService(String modelServiceKey, AbstractImageModelService modelService) {
|
||||
ImageModelInfo imageModelInfo = new ImageModelInfo();
|
||||
imageModelInfo.setModelService(modelService);
|
||||
imageModelInfo.setModelName(modelName);
|
||||
imageModelInfo.setModelName(modelServiceKey);
|
||||
imageModelInfo.setEnable(modelService.isEnabled());
|
||||
NAME_TO_MODEL.put(modelName, imageModelInfo);
|
||||
NAME_TO_MODEL.put(modelServiceKey, imageModelInfo);
|
||||
}
|
||||
|
||||
public AbstractImageModelService getModelService() {
|
||||
|
|
|
@ -1,10 +1,14 @@
|
|||
package com.moyz.adi.common.helper;
|
||||
|
||||
import com.moyz.adi.common.interfaces.AbstractLLMService;
|
||||
import com.moyz.adi.common.util.JsonUtil;
|
||||
import com.moyz.adi.common.util.LocalCache;
|
||||
import com.moyz.adi.common.vo.CommonAiPlatformSetting;
|
||||
import com.moyz.adi.common.vo.LLMModelInfo;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
|
||||
|
@ -14,7 +18,7 @@ import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
|
|||
*/
|
||||
@Slf4j
|
||||
public class LLMContext {
|
||||
public static final Map<String, LLMModelInfo> NAME_TO_MODEL = new HashMap<>();
|
||||
public static final Map<String, LLMModelInfo> NAME_TO_MODEL = new LinkedHashMap<>();
|
||||
private AbstractLLMService llmService;
|
||||
|
||||
public LLMContext() {
|
||||
|
@ -30,15 +34,21 @@ public class LLMContext {
|
|||
}
|
||||
}
|
||||
|
||||
public static void addLLMService(String modelName, AbstractLLMService llmService) {
|
||||
public static void addLLMService(String llmServiceKey, AbstractLLMService llmService) {
|
||||
LLMModelInfo llmModelInfo = new LLMModelInfo();
|
||||
llmModelInfo.setModelName(modelName);
|
||||
llmModelInfo.setModelName(llmServiceKey);
|
||||
llmModelInfo.setEnable(llmService.isEnabled());
|
||||
llmModelInfo.setLlmService(llmService);
|
||||
NAME_TO_MODEL.put(modelName, llmModelInfo);
|
||||
NAME_TO_MODEL.put(llmServiceKey, llmModelInfo);
|
||||
}
|
||||
|
||||
public AbstractLLMService getLLMService() {
|
||||
return llmService;
|
||||
}
|
||||
|
||||
public static String[] getSupportModels(String settingName) {
|
||||
String st = LocalCache.CONFIGS.get(settingName);
|
||||
CommonAiPlatformSetting setting = JsonUtil.fromJson(st, CommonAiPlatformSetting.class);
|
||||
return setting.getModels();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
package com.moyz.adi.common.interfaces;
|
||||
|
||||
import com.fasterxml.jackson.databind.util.JSONPObject;
|
||||
import com.moyz.adi.common.exception.BaseException;
|
||||
import com.moyz.adi.common.util.JsonUtil;
|
||||
import com.moyz.adi.common.util.LocalCache;
|
||||
import com.moyz.adi.common.vo.AnswerMeta;
|
||||
|
@ -20,10 +20,10 @@ import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.net.Proxy;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import static com.moyz.adi.common.enums.ErrorEnum.B_LLM_SERVICE_DISABLED;
|
||||
|
||||
@Slf4j
|
||||
public abstract class AbstractLLMService<T> {
|
||||
|
||||
|
@ -73,11 +73,18 @@ public abstract class AbstractLLMService<T> {
|
|||
protected abstract String parseError(Object error);
|
||||
|
||||
public Response<AiMessage> chat(ChatMessage chatMessage) {
|
||||
if(!isEnabled()){
|
||||
log.error("llm service is disabled");
|
||||
throw new BaseException(B_LLM_SERVICE_DISABLED);
|
||||
}
|
||||
return getChatLLM().generate(chatMessage);
|
||||
}
|
||||
|
||||
public void sseChat(SseAskParams params, TriConsumer<String, PromptMeta, AnswerMeta> consumer) {
|
||||
|
||||
if(!isEnabled()){
|
||||
log.error("llm service is disabled");
|
||||
throw new BaseException(B_LLM_SERVICE_DISABLED);
|
||||
}
|
||||
//create chat assistant
|
||||
AiServices<IChatAssistant> serviceBuilder = AiServices.builder(IChatAssistant.class)
|
||||
.streamingChatLanguageModel(getStreamingChatLLM());
|
||||
|
|
|
@ -1,17 +1,19 @@
|
|||
package com.moyz.adi.common.service;
|
||||
|
||||
import com.moyz.adi.common.cosntant.AdiConstant;
|
||||
import com.moyz.adi.common.helper.ImageModelContext;
|
||||
import com.moyz.adi.common.helper.LLMContext;
|
||||
import dev.langchain4j.model.dashscope.QwenModelName;
|
||||
import dev.langchain4j.model.openai.OpenAiModelName;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import jakarta.annotation.Resource;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.Proxy;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
public class Initializer {
|
||||
|
||||
|
@ -38,9 +40,43 @@ public class Initializer {
|
|||
if (proxyEnable) {
|
||||
proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(proxyHost, proxyHttpPort));
|
||||
}
|
||||
LLMContext.addLLMService(OpenAiModelName.GPT_3_5_TURBO, new OpenAiLLMService(OpenAiModelName.GPT_3_5_TURBO, proxy));
|
||||
LLMContext.addLLMService(QwenModelName.QWEN_MAX, new DashScopeLLMService(QwenModelName.QWEN_MAX));
|
||||
LLMContext.addLLMService("ERNIE-Bot", new QianFanLLMService("ERNIE-Bot"));
|
||||
|
||||
//openai
|
||||
String[] openaiModels = LLMContext.getSupportModels(AdiConstant.SysConfigKey.OPENAI_SETTING);
|
||||
if(openaiModels.length == 0){
|
||||
log.warn("openai service is disabled");
|
||||
}
|
||||
for (String model : openaiModels) {
|
||||
LLMContext.addLLMService(model, new OpenAiLLMService(model, proxy));
|
||||
}
|
||||
|
||||
//dashscope
|
||||
String[] dashscopeModels = LLMContext.getSupportModels(AdiConstant.SysConfigKey.DASHSCOPE_SETTING);
|
||||
if(dashscopeModels.length == 0){
|
||||
log.warn("dashscope service is disabled");
|
||||
}
|
||||
for (String model : dashscopeModels) {
|
||||
LLMContext.addLLMService(model, new DashScopeLLMService(model));
|
||||
}
|
||||
|
||||
//qianfan
|
||||
String[] qianfanModels = LLMContext.getSupportModels(AdiConstant.SysConfigKey.QIANFAN_SETTING);
|
||||
if(qianfanModels.length == 0){
|
||||
log.warn("qianfan service is disabled");
|
||||
}
|
||||
for (String model : qianfanModels) {
|
||||
LLMContext.addLLMService(model, new QianFanLLMService(model));
|
||||
}
|
||||
|
||||
//ollama
|
||||
String[] ollamaModels = LLMContext.getSupportModels(AdiConstant.SysConfigKey.OLLAMA_SETTING);
|
||||
if(ollamaModels.length == 0){
|
||||
log.warn("ollama service is disabled");
|
||||
}
|
||||
for (String model : ollamaModels) {
|
||||
LLMContext.addLLMService("ollama:" + model, new OllamaLLMService(model));
|
||||
}
|
||||
|
||||
ImageModelContext.addImageModelService(OpenAiModelName.DALL_E_2, new OpenAiImageModelService(OpenAiModelName.DALL_E_2, proxy));
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
package com.moyz.adi.common.service;
|
||||
|
||||
import com.moyz.adi.common.interfaces.AbstractLLMService;
|
||||
import com.moyz.adi.common.vo.OllamaSetting;
|
||||
import dev.langchain4j.model.chat.ChatLanguageModel;
|
||||
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
|
||||
import dev.langchain4j.model.ollama.OllamaChatModel;
|
||||
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import static com.moyz.adi.common.cosntant.AdiConstant.SysConfigKey.OLLAMA_SETTING;
|
||||
|
||||
public class OllamaLLMService extends AbstractLLMService<OllamaSetting> {
|
||||
|
||||
public OllamaLLMService(String modelName) {
|
||||
super(modelName, OLLAMA_SETTING, OllamaSetting.class, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEnabled() {
|
||||
return StringUtils.isNotBlank(setting.getBaseUrl());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ChatLanguageModel buildChatLLM() {
|
||||
return OllamaChatModel.builder()
|
||||
.baseUrl(setting.getBaseUrl())
|
||||
.modelName(modelName)
|
||||
.temperature(0.0)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected StreamingChatLanguageModel buildStreamingChatLLM() {
|
||||
return OllamaStreamingChatModel.builder()
|
||||
.baseUrl(setting.getBaseUrl())
|
||||
.modelName(modelName)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String parseError(Object error) {
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -5,6 +5,7 @@ import com.fasterxml.jackson.core.JsonGenerationException;
|
|||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.databind.DeserializationFeature;
|
||||
import com.fasterxml.jackson.databind.JsonMappingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||
|
@ -26,6 +27,7 @@ public class JsonUtil {
|
|||
static {
|
||||
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
objectMapper.configure(SerializationFeature.INDENT_OUTPUT, Boolean.FALSE);
|
||||
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
||||
objectMapper.registerModules(LocalDateTimeUtil.getSimpleModule(), new JavaTimeModule(), new Jdk8Module());
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
package com.moyz.adi.common.vo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
public class CommonAiPlatformSetting {
|
||||
private String[] models;
|
||||
}
|
|
@ -4,7 +4,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
public class DashScopeSetting {
|
||||
public class DashScopeSetting extends CommonAiPlatformSetting{
|
||||
|
||||
@JsonProperty("api_key")
|
||||
private String apiKey;
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
package com.moyz.adi.common.vo;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
public class OllamaSetting extends CommonAiPlatformSetting {
|
||||
|
||||
@JsonProperty("base_url")
|
||||
private String baseUrl;
|
||||
}
|
|
@ -4,7 +4,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
public class OpenAiSetting {
|
||||
public class OpenAiSetting extends CommonAiPlatformSetting {
|
||||
|
||||
@JsonProperty("secret_key")
|
||||
private String secretKey;
|
||||
|
|
|
@ -4,7 +4,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
public class QianFanSetting {
|
||||
public class QianFanSetting extends CommonAiPlatformSetting {
|
||||
|
||||
@JsonProperty("api_key")
|
||||
private String apiKey;
|
||||
|
|
|
@ -382,11 +382,13 @@ CREATE TRIGGER trigger_user_day_cost_update_time
|
|||
EXECUTE PROCEDURE update_modified_column();
|
||||
|
||||
INSERT INTO adi_sys_config (name, value)
|
||||
VALUES ('openai_setting', '{"secret_key":""}');
|
||||
VALUES ('openai_setting', '{"secret_key":"","models":[]}');
|
||||
INSERT INTO adi_sys_config (name, value)
|
||||
VALUES ('dashscope_setting', '{"api_key":""}');
|
||||
VALUES ('dashscope_setting', '{"api_key":"","models":[]}');
|
||||
INSERT INTO adi_sys_config (name, value)
|
||||
VALUES ('qianfan_setting', '{"api_key":"","secret_key":""}');
|
||||
VALUES ('qianfan_setting', '{"api_key":"","secret_key":"","models":[]}');
|
||||
INSERT INTO adi_sys_config (name, value)
|
||||
VALUES ('ollama_setting', '{"base_url":"","models":[]}');
|
||||
INSERT INTO adi_sys_config (name, value)
|
||||
VALUES ('request_text_rate_limit', '{"times":24,"minutes":3}');
|
||||
INSERT INTO adi_sys_config (name, value)
|
||||
|
|
7
pom.xml
7
pom.xml
|
@ -25,7 +25,7 @@
|
|||
<maven.compiler.source>17</maven.compiler.source>
|
||||
<maven.compiler.target>17</maven.compiler.target>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<langchain4j.version>0.27.1</langchain4j.version>
|
||||
<langchain4j.version>0.28.0</langchain4j.version>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
|
@ -170,6 +170,11 @@
|
|||
<artifactId>langchain4j-qianfan</artifactId>
|
||||
<version>${langchain4j.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>dev.langchain4j</groupId>
|
||||
<artifactId>langchain4j-ollama</artifactId>
|
||||
<version>${langchain4j.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
|
|
Loading…
Reference in New Issue