本文基于Langchain4j、Milvus、Redis实现AI智能体。Langchain4j作为会话大脑,Redis实现会话记忆(Memory),Milvus构建RAG知识库。
引入依赖
<!--LangChain4j 针对 open ai 的 Spring Boot starter-->
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-open-ai-spring-boot-starter</artifactId>
<version>1.10.0-beta18</version>
</dependency>
<!-- AiService的相关依赖 -->
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-spring-boot-starter</artifactId>
<version>1.10.0-beta18</version>
</dependency>
<!--LangChain4j 针对 ollama 的 Spring Boot starter-->
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-ollama-spring-boot-starter</artifactId>
<version>1.10.0-beta18</version>
</dependency>
<!-- LangChain4j 流式调用依赖 -->
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-reactor</artifactId>
<version>1.10.0-beta18</version>
</dependency>
<!-- LangChain4j 向量数据库milvus 依赖 -->
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-milvus</artifactId>
<version>1.10.0-beta18</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-embeddings-all-minilm-l6-v2</artifactId>
<version>1.10.0-beta18</version>
</dependency>
<!-- 文档解析器 -->
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-document-parser-apache-pdfbox</artifactId>
<version>1.10.0-beta18</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-document-parser-apache-poi</artifactId>
<version>1.10.0-beta18</version>
</dependency>
<!-- easy rag 知识库 -->
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-easy-rag</artifactId>
<version>1.10.0-beta18</version>
</dependency>
<!-- 流式调用 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webflux</artifactId>
</dependency>
智能体配置
配置使用Milvus作为向量数据库实现rag知识检索,此处仅实例,实际使用过程中加载知识库的部分需要独立出来根据需要更新即可。
@Configuration
public class AgentsConfig {
@Autowired
private RedisChatMemoryStore redisChatMemoryStore;
// Milvus配置
@Value("${milvus.host}")
private String milvusHost;
@Value("${milvus.port}")
private Integer milvusPort;
@Value("${milvus.database-name}")
private String milvusDatabase;
@Value("${milvus.collection-name:knowledge_base}")
private String milvusCollectionName;
@Value("${milvus.dimension:384}")
private Integer milvusDimension;
/**
* 构建chatMemoryProvider
*
* @return
*/
@Bean
public ChatMemoryProvider chatMemoryProvider() {
ChatMemoryProvider chatMemoryProvider = new ChatMemoryProvider() {
@Override
public ChatMemory get(Object memoryId) {
ChatMemory m = MessageWindowChatMemory.builder().id(memoryId).maxMessages(30)
.chatMemoryStore(redisChatMemoryStore).build();
return m;
}
};
return chatMemoryProvider;
}
/**
* MiniLM 嵌入模型
*/
@Bean
public EmbeddingModel embeddingModel() {
return new AllMiniLmL6V2EmbeddingModel();
}
/**
* Milvus向量数据库配置
*/
@Bean
public EmbeddingStore<TextSegment> milvusEmbeddingStore() {
return MilvusEmbeddingStore.builder()
.host(milvusHost)
.port(milvusPort)
.databaseName(milvusDatabase)
.collectionName(milvusCollectionName)
.dimension(milvusDimension)
.build();
}
/**
* 文档注入器
*/
@Bean
public EmbeddingStoreIngestor embeddingStoreIngestor(EmbeddingModel embeddingModel,
EmbeddingStore<TextSegment> embeddingStore) {
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 50))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
// 1、加载知识库
List<Document> documents = ClassPathDocumentLoader.loadDocuments("content");
ingestor.ingest(documents);
return ingestor;
}
/**
* 构建向量数据库检索对象
*
* @return
*/
@Bean
public ContentRetriever contentRetriever(EmbeddingModel embeddingModel,
EmbeddingStore<TextSegment> embeddingStore) {
return EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.minScore(0.5)
.maxResults(3)
.build();
}
}
会话记忆
使用Redis做对话记忆。
@Component
public class RedisChatMemoryStore implements ChatMemoryStore{
@Override
public List<ChatMessage> getMessages(Object memoryId) {
String json = RedisUtils.getCacheObject(memoryId.toString());
return ChatMessageDeserializer.messagesFromJson(json);
}
@Override
public void updateMessages(Object memoryId, List<ChatMessage> messages) {
RedisUtils.setCacheObject(memoryId.toString(), ChatMessageSerializer.messagesToJson(messages), Duration.ofDays(7));
}
@Override
public void deleteMessages(Object memoryId) {
RedisUtils.deleteObject(memoryId.toString());
}
}
工具调用
智能体本地工具调用,此处是举例,实际场景根据业务调整即可。这里演示了一个根据商品名称查询库存的案例。
@Component
public class RetailTool {
@DubboReference
private IChainStockApiService stockApiService;
@Tool("根据门商品名称查询库存")
public List<ErpChainStock> queryChainByPhone(@P("商品名称") String productName) {
// LoginUser user = LoginHelper.getLoginUser();
// Long chainId = user.getDeptId();
// Map<String,String> map = new HashMap<>();
// map.put("productNameLike", productName);
// PagerInfo pager = new PagerInfo(10, 1);
// ResultPage<ErpChainStock> resultPage = stockApiService.query(chainId, map, pager);
// return resultPage.getList();
return TenantHelper.dynamic(TenantConstants.DEFAULT_TENANT_ID, ()->{
Long chainId = 1863398543189741570l;
Map<String,String> map = new HashMap<>();
map.put("productNameLike", productName);
PagerInfo pager = new PagerInfo(10, 1);
ResultPage<ErpChainStock> resultPage = stockApiService.query(chainId, map, pager);
return resultPage.getList();
});
}
}
AI Service
使用@AiService注解,注入会话模型(此处使用ollama本地模型),注入本地工具tools、会话记忆提供者chatMemoryProvider、RAG知识库contentRetriever。
@AiService(
wiringMode = AiServiceWiringMode.EXPLICIT,
//chatModel = "ollamaChatModel",
streamingChatModel = "ollamaStreamingChatModel",
//streamingChatModel = "openAiStreamingChatModel",
//chatMemory = "chatMemory", //配置会话记忆对象
tools = "retailTool" ,
chatMemoryProvider = "chatMemoryProvider",
contentRetriever = "contentRetriever"
)
public interface IChartAiService {
/**
* 和AI聊天
* @param message
* @return
*/
//@SystemMessage("你是黔丰盈通门店管理助手爱钱钱。请以友好、乐于助人且愉快的方式解答用户关于门店零售的相关问题,回答问题请用中文。")
@SystemMessage(fromResource = "system-message.txt")
Flux<String> chat(@MemoryId Long memoryId, @UserMessage String message);
}
HTTP对外接口
此处只是举例,getMemoryId需要根据实际业务场景生成。使用流式对话返回Flux<String>。
@RestController("/ai")
public class AiChatController {
@Autowired
private IChartAiService chartAiService;
@GetMapping(value = "/getMemoryId")
public R<Long> getMemoryId() {
return R.ok(System.currentTimeMillis()) ;
}
@GetMapping(value = "/chat", produces = "text/html;charset=utf-8")
public Flux<String> model(Long memoryId, String message) {
Flux<String> r = chartAiService.chat(memoryId, message);
return r;
}
}
配置
配置文件如下
langchain4j:
#open-ai:
ollama:
streaming-chat-model:
#ollama http://192.168.2.131:11434 阿里百炼 https://dashscope.aliyuncs.com/compatible-mode/v1
base-url: http://192.168.2.131:11434
#api-key: sk-xxxxxxxxx697
#ollama deepseek-r1-1.5b, qwen3:0.6b 阿里百炼 qwen3-max
model-name: qwen3:0.6b
log-requests: true
log-responses: true
# 配置 Milvus 向量数据库
milvus:
host: localhost
port: 19530
# 如果 Milvus 配置了用户名密码,请取消注释并填写
username: root
# password: Milvus
database-name: test
LoveCTO

