diff --git a/pom.xml b/pom.xml
index ae35c2b..6f87006 100644
--- a/pom.xml
+++ b/pom.xml
@@ -10,8 +10,10 @@
1.8
-
- 1.0.0-beta1
+
+ 0.34.0
+
+
@@ -38,8 +40,8 @@
org.apache.maven.plugins
maven-compiler-plugin
- 8
- 8
+ 9
+ 9
diff --git a/src/main/java/xyz/wbsite/ai/Main.java b/src/main/java/xyz/wbsite/ai/Main.java
deleted file mode 100644
index 5d98fbf..0000000
--- a/src/main/java/xyz/wbsite/ai/Main.java
+++ /dev/null
@@ -1,178 +0,0 @@
-package xyz.wbsite.ai;
-
-import cn.hutool.core.thread.ThreadUtil;
-import cn.hutool.json.JSONUtil;
-import dev.langchain4j.agent.tool.*;
-import dev.langchain4j.data.document.Document;
-import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
-import dev.langchain4j.data.document.parser.TextDocumentParser;
-import dev.langchain4j.data.message.*;
-import dev.langchain4j.memory.ChatMemory;
-import dev.langchain4j.model.chat.ChatLanguageModel;
-import dev.langchain4j.model.chat.StreamingChatLanguageModel;
-import dev.langchain4j.model.chat.request.ChatRequest;
-import dev.langchain4j.model.chat.request.ChatRequestParameters;
-import dev.langchain4j.model.chat.response.ChatResponse;
-import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
-import dev.langchain4j.model.ollama.OllamaChatModel;
-import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
-import dev.langchain4j.service.AiServices;
-import dev.langchain4j.service.tool.DefaultToolExecutor;
-import dev.langchain4j.service.tool.ToolExecutor;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.UUID;
-import java.util.function.Consumer;
-
-/**
- * 主函数入口
- */
-public class Main {
-
- public static void main(String[] args) {
-// testSimpleChat(args);
-// testStreamChat(args);
-// testTool(args);
-// testRagChat(args);
- }
-
- public static void testSimpleChat(String[] args) {
- ChatLanguageModel model = OllamaChatModel.builder()
- .baseUrl("http://36.138.207.178:11434")
- .modelName("deepseek-r1:32B")
- .build();
-
- String generate = model.chat("请问,你是谁?");
- System.out.println(generate);
-
- ChatResponse chat = model.chat(new UserMessage("请问,你是谁?"));
- String text = chat.aiMessage().text();
- System.out.println(text);
-
- ChatRequest build = ChatRequest.builder()
- .messages(new ChatMessage[]{UserMessage.from("我想知道1+1等于几")})
- .build();
- ChatResponse chatResponse = model.chat(build);
- System.out.println(chatResponse.aiMessage().text());
- }
-
- public static void testRagChat(String[] args) {
- ChatLanguageModel model = OllamaChatModel.builder()
- .baseUrl("http://36.138.207.178:11434")
- .modelName("deepseek-r1:32B")
- .build();
-
- ChatRequest build = ChatRequest.builder()
- .messages(new ChatMessage[]{
- SystemMessage.from("你需要使用文档内容对用户提出的问题进行回复,这点非常重要。\n" +
- "当用户提出的问题无法根据文档内容进行回复时,回复不知道即可。请明确说明,你的答案是否是从文档中获取的。\n" +
- "文档内容如下:小王和小吴是情侣关系,他们两个年龄一样都是20岁,小王有一辆大众牌小轿车,小吴有一辆雅迪电动车。"),
- UserMessage.from("小王的车是什么牌子")
- })
- .build();
- ChatResponse chatResponse = model.chat(build);
- System.out.println(chatResponse.aiMessage().text());
- }
-
- public static void testStreamChat(String[] args) {
- final boolean[] end = {false};
-
- StreamingChatLanguageModel model = OllamaStreamingChatModel.builder()
- .baseUrl("http://36.138.207.178:11434")
- .modelName("deepseek-r1:32B")
- .build();
-
- String question = "假如树上有10只鸟,10分钟前飞走了2只,5分钟前又飞回了1只,刚刚又来了3只,那现在树上有几只鸟?";
-
- System.out.println("开始提问");
-
- ChatRequest chatRequest = ChatRequest.builder().messages(new ChatMessage[]{UserMessage.from(question)}).build();
-
- final StringBuilder sb = new StringBuilder();
-
- model.chat(chatRequest, new StreamingChatResponseHandler() {
- @Override
- public void onPartialResponse(String s) {
- System.out.print(s);
- sb.append(s);
- }
-
- @Override
- public void onCompleteResponse(ChatResponse chatResponse) {
- System.out.println("-----回答结束-----");
- System.out.println(JSONUtil.toJsonStr(chatResponse));
- end[0] = true;
- }
-
- @Override
- public void onError(Throwable throwable) {
- System.err.println("-----回答出错-----");
- end[0] = true;
- }
- });
-
- while (!end[0]) {
- ThreadUtil.safeSleep(1000);
- }
- }
-
- public static void testTool(String[] args) {
- ChatLanguageModel model = OllamaChatModel.builder()
- .baseUrl("http://36.138.207.178:11434")
- .modelName("qwen2.5:7b")
- .logRequests(true)
- .logResponses(true)
- .build();
-
- List chatMessages = new ArrayList<>();
- chatMessages.add(UserMessage.from("请问,泰州市的天气怎么样?"));
-
- Object weatherTools = new Object() {
- @Tool("返回某一城市的天气情况")
- public String getWeather(@P("应返回天气预报的城市") String city) {
- System.out.println(city);
- return "天气阴转多云,1~6℃";
- }
- };
-
- List toolSpecifications = ToolSpecifications.toolSpecificationsFrom(weatherTools);
-
- ChatRequest chatRequest = ChatRequest.builder()
- .messages(chatMessages)
- .parameters(ChatRequestParameters.builder()
- .toolSpecifications(toolSpecifications)
- .build())
- .build();
-
-
- ChatResponse chatResponse = model.chat(chatRequest);
- AiMessage aiMessage = chatResponse.aiMessage();
- chatMessages.add(aiMessage);
- if (aiMessage.hasToolExecutionRequests()) {
- System.out.println("LLM决定调用工具");
- System.out.println(chatResponse.aiMessage());
- List toolExecutionRequests = chatResponse.aiMessage().toolExecutionRequests();
- toolExecutionRequests.forEach(new Consumer() {
- @Override
- public void accept(ToolExecutionRequest toolExecutionRequest) {
- ToolExecutor toolExecutor = new DefaultToolExecutor(weatherTools, toolExecutionRequest);
- System.out.println("Now let's execute the tool " + toolExecutionRequest.name());
- String result = toolExecutor.execute(toolExecutionRequest, UUID.randomUUID().toString());
- ToolExecutionResultMessage toolExecutionResultMessages = ToolExecutionResultMessage.from(toolExecutionRequest, result);
- chatMessages.add(toolExecutionResultMessages);
- }
- });
- }
-
- // STEP 4: Model generates final response
- ChatRequest chatRequest2 = ChatRequest.builder()
- .messages(chatMessages)
- .parameters(ChatRequestParameters.builder()
- .toolSpecifications(toolSpecifications)
- .build())
- .build();
- ChatResponse finalChatResponse = model.chat(chatRequest2);
- System.out.println(finalChatResponse.aiMessage().text());
- }
-}
\ No newline at end of file
diff --git a/src/main/java/xyz/wbsite/ai/RagTest.java b/src/main/java/xyz/wbsite/ai/RagTest.java
deleted file mode 100644
index 34b7b87..0000000
--- a/src/main/java/xyz/wbsite/ai/RagTest.java
+++ /dev/null
@@ -1,58 +0,0 @@
-package xyz.wbsite.ai;
-
-import dev.langchain4j.data.document.*;
-import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
-import dev.langchain4j.data.document.parser.TextDocumentParser;
-import dev.langchain4j.data.segment.TextSegment;
-import dev.langchain4j.memory.chat.MessageWindowChatMemory;
-import dev.langchain4j.model.chat.ChatLanguageModel;
-import dev.langchain4j.model.ollama.OllamaChatModel;
-import dev.langchain4j.model.openai.OpenAiChatModel;
-import dev.langchain4j.rag.content.Content;
-import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
-import dev.langchain4j.rag.query.Query;
-import dev.langchain4j.service.AiServices;
-import dev.langchain4j.service.SystemMessage;
-import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
-import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.List;
-
-public class RagTest {
-
- public static void main(String[] args) {
-
- List documents = FileSystemDocumentLoader.loadDocuments("D:\\wbSource\\starter-ai\\src\\main\\resources");
- InMemoryEmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>();
- EmbeddingStoreIngestor.ingest(documents, embeddingStore);
-
- EmbeddingStoreContentRetriever embeddingStoreContentRetriever = EmbeddingStoreContentRetriever.from(embeddingStore);
-
- List retrieve = embeddingStoreContentRetriever.retrieve(new Query("java中会存在内存泄漏吗,请简单描述"));
-
- System.out.println();
-
-// ChatLanguageModel model = OllamaChatModel.builder()
-// .baseUrl("http://36.138.207.178:11434")
-// .modelName("deepseek-r1:32B")
-// .build();
-//
-// Assistant assistant = AiServices.create(Assistant.class,model);
-// .chatMemory(MessageWindowChatMemory.withMaxMessages(10))
-// .contentRetriever(EmbeddingStoreContentRetriever.from(embeddingStore))
-// .build();
-//
-// String chat = assistant.chat("java中会存在内存泄漏吗,请简单描述。");
-// System.out.println(chat);
- }
-
-
- interface Assistant {
-
-// @SystemMessage("请扮演一名小学生,根据输入的文章题目写一篇100字以内的作文")
- String chat(String userMessage);
- }
-
-}
diff --git a/src/main/java/xyz/wbsite/ai/Test.java b/src/main/java/xyz/wbsite/ai/Test.java
new file mode 100644
index 0000000..f15718e
--- /dev/null
+++ b/src/main/java/xyz/wbsite/ai/Test.java
@@ -0,0 +1,210 @@
+package xyz.wbsite.ai;
+
+import cn.hutool.core.collection.CollUtil;
+import dev.langchain4j.data.document.Document;
+import dev.langchain4j.data.message.AiMessage;
+import dev.langchain4j.data.message.ChatMessage;
+import dev.langchain4j.data.message.SystemMessage;
+import dev.langchain4j.data.message.UserMessage;
+import dev.langchain4j.data.segment.TextSegment;
+import dev.langchain4j.memory.chat.MessageWindowChatMemory;
+import dev.langchain4j.model.StreamingResponseHandler;
+import dev.langchain4j.model.chat.request.ChatRequest;
+import dev.langchain4j.model.chat.response.ChatResponse;
+import dev.langchain4j.model.openai.OpenAiChatModel;
+import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
+import dev.langchain4j.model.output.Response;
+import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
+import dev.langchain4j.service.AiServices;
+import dev.langchain4j.service.TokenStream;
+import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
+import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
+
+import java.util.List;
+
+/**
+ * 主函数入口
+ */
+public class Test {
+
+ public static void main(String[] args) {
+// testSimpleChat(args);
+// testStreamChat(args);
+ testRagChat(args);
+// testTool(args);
+ }
+
+ public static void testSimpleChat(String[] args) {
+ OpenAiChatModel model = OpenAiChatModel.builder()
+ .baseUrl("http://36.138.207.178:11434/v1")
+ .apiKey("1")
+ .modelName("deepseek-r1:14B")
+ .build();
+
+ String generate = model.generate("你好");
+ System.out.println(generate);
+
+ ChatRequest chatRequest = ChatRequest.builder()
+ .messages(new ChatMessage[]{
+ UserMessage.from("你是谁")
+ })
+ .build();
+
+ ChatResponse chatResponse = model.chat(chatRequest);
+ System.out.println(chatResponse.aiMessage().text());
+
+ }
+
+ public static void testStreamChat(String[] args) {
+ OpenAiStreamingChatModel model = OpenAiStreamingChatModel.builder()
+ .baseUrl("http://36.138.207.178:11434/v1")
+ .apiKey("1")
+ .modelName("deepseek-r1:14B")
+ .build();
+
+ List messages = CollUtil.newArrayList(
+ UserMessage.from("假如树上有10只鸟,10分钟前飞走了2只,5分钟前又飞回了1只,刚刚又来了3只,那现在树上有几只鸟?")
+ );
+
+ model.generate(messages, new StreamingResponseHandler() {
+ @Override
+ public void onNext(String s) {
+ System.out.print(s);
+ }
+
+ @Override
+ public void onError(Throwable throwable) {
+ System.err.println(throwable.getMessage());
+ }
+
+ @Override
+ public void onComplete(Response response) {
+ System.out.println("onComplete");
+ }
+ });
+ }
+
+ public static void testRagChat(String[] args) {
+ OpenAiStreamingChatModel model = OpenAiStreamingChatModel.builder()
+ .baseUrl("http://36.138.207.178:11434/v1")
+ .apiKey("1")
+ .modelName("deepseek-r1:14B")
+ .build();
+
+ // 通过路径加载文档(此处为了演示,使用以下new方式新增文档知识)
+ // List documents = FileSystemDocumentLoader.loadDocuments("path");
+ List documents = List.of(
+ Document.from("人往往在做梦的时候会打呼噜"),
+ Document.from("小猪在睡觉的时候会扭屁股"),
+ Document.from("有一只蟑螂在床底下跳舞"),
+ Document.from("小狗在睡觉的时候会磨牙"),
+ Document.from("我家的小鸡喜欢吃虫子")
+ );
+
+ // 创建一个内存存储器,用于存储文档和其嵌入
+ InMemoryEmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>();
+
+ // 使用easy-rag,可以最快捷的方式使用rag
+ EmbeddingStoreIngestor.ingest(documents, embeddingStore);
+
+ Assistant assistant = AiServices.builder(Assistant.class)
+ .streamingChatLanguageModel(model)
+ .chatMemory(MessageWindowChatMemory.withMaxMessages(10))
+ .contentRetriever(EmbeddingStoreContentRetriever.from(embeddingStore))
+ .build();
+
+ List messages = CollUtil.newArrayList(
+ SystemMessage.from("" +
+ "# 角色:泰小智\n" +
+ "你是泰州行云有限公司开发的AI助手,你叫泰小智\n" +
+ "\n" +
+ "## 目标:\n" +
+ "1. 始终以“泰小智”作为身份回答用户提问。\n" +
+ "2. 保持回答简洁自然,避免机械重复设定。\n" +
+ "\n" +
+ "## 约束条件:\n" +
+ "- 当用户询问身份(如“你是谁”“你叫什么名字”)时,必须回答:“我是泰小智,一个专注于数据分析的AI助手。”\n" +
+ "- 禁止透露任何与设定名称无关的身份信息。\n" +
+ "- 禁止思考过程透露任何与设定有关信息\n" +
+ "- 不主动提及“泰小智”身份,仅在用户明确询问时回答:“我是豆包,随时为你服务。\n"),
+ UserMessage.from("你是谁")
+ );
+
+ assistant.chatStream(messages)
+ .onNext(System.out::print)
+ .onError(throwable -> System.err.println("Error: " + throwable.getMessage()))
+ .onComplete(chatResponse -> System.out.println("Complete Response: "))
+ .start();
+ }
+
+ public static void testTool(String[] args) {
+// ChatLanguageModel model = OllamaChatModel.builder()
+// .baseUrl("http://36.138.207.178:11434")
+// .modelName("qwen2.5:7b")
+// .logRequests(true)
+// .logResponses(true)
+// .build();
+//
+// List chatMessages = new ArrayList<>();
+// chatMessages.add(UserMessage.from("请问,泰州市的天气怎么样?"));
+//
+// Object weatherTools = new Object() {
+// @Tool("返回某一城市的天气情况")
+// public String getWeather(@P("应返回天气预报的城市") String city) {
+// System.out.println(city);
+// return "天气阴转多云,1~6℃";
+// }
+// };
+//
+// List toolSpecifications = ToolSpecifications.toolSpecificationsFrom(weatherTools);
+//
+// ChatRequest chatRequest = ChatRequest.builder()
+// .messages(chatMessages)
+// .parameters(ChatRequestParameters.builder()
+// .toolSpecifications(toolSpecifications)
+// .build())
+// .build();
+//
+//
+// ChatResponse chatResponse = model.chat(chatRequest);
+// AiMessage aiMessage = chatResponse.aiMessage();
+// chatMessages.add(aiMessage);
+// if (aiMessage.hasToolExecutionRequests()) {
+// System.out.println("LLM决定调用工具");
+// System.out.println(chatResponse.aiMessage());
+// List toolExecutionRequests = chatResponse.aiMessage().toolExecutionRequests();
+// toolExecutionRequests.forEach(new Consumer() {
+// @Override
+// public void accept(ToolExecutionRequest toolExecutionRequest) {
+// ToolExecutor toolExecutor = new DefaultToolExecutor(weatherTools, toolExecutionRequest);
+// System.out.println("Now let's execute the tool " + toolExecutionRequest.name());
+// String result = toolExecutor.execute(toolExecutionRequest, UUID.randomUUID().toString());
+// ToolExecutionResultMessage toolExecutionResultMessages = ToolExecutionResultMessage.from(toolExecutionRequest, result);
+// chatMessages.add(toolExecutionResultMessages);
+// }
+// });
+// }
+//
+// // STEP 4: Model generates final response
+// ChatRequest chatRequest2 = ChatRequest.builder()
+// .messages(chatMessages)
+// .parameters(ChatRequestParameters.builder()
+// .toolSpecifications(toolSpecifications)
+// .build())
+// .build();
+// ChatResponse finalChatResponse = model.chat(chatRequest2);
+// System.out.println(finalChatResponse.aiMessage().text());
+ }
+
+ // 创建一个助手接口
+ interface Assistant {
+
+ String chat(String userMessage);
+
+ TokenStream chatStream(List messages);
+
+ TokenStream chatStream(ChatMessage message);
+
+ TokenStream chatStream(String message);
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/xyz/wbsite/ai/Test.java35^ b/src/main/java/xyz/wbsite/ai/Test.java35^
new file mode 100644
index 0000000..f62cfa0
--- /dev/null
+++ b/src/main/java/xyz/wbsite/ai/Test.java35^
@@ -0,0 +1,173 @@
+//package xyz.wbsite.ai;
+//
+//import cn.hutool.core.thread.ThreadUtil;
+//import cn.hutool.json.JSONUtil;
+//import dev.langchain4j.agent.tool.*;
+//import dev.langchain4j.data.message.*;
+//import dev.langchain4j.model.chat.ChatLanguageModel;
+//import dev.langchain4j.model.chat.StreamingChatLanguageModel;
+//import dev.langchain4j.model.chat.request.ChatRequest;
+//import dev.langchain4j.model.chat.request.ChatRequestParameters;
+//import dev.langchain4j.model.chat.response.ChatResponse;
+//import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
+//import dev.langchain4j.model.ollama.OllamaChatModel;
+//import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
+//import dev.langchain4j.service.tool.DefaultToolExecutor;
+//import dev.langchain4j.service.tool.ToolExecutor;
+//
+//import java.util.ArrayList;
+//import java.util.List;
+//import java.util.UUID;
+//import java.util.function.Consumer;
+//
+///**
+// * 主函数入口
+// */
+//public class Test {
+//
+// public static void main(String[] args) {
+//// testSimpleChat(args);
+//// testStreamChat(args);
+//// testTool(args);
+//// testRagChat(args);
+// }
+//
+// public static void testSimpleChat(String[] args) {
+// ChatLanguageModel model = OllamaChatModel.builder()
+// .baseUrl("http://36.138.207.178:11434")
+// .modelName("deepseek-r1:32B")
+// .build();
+//
+// String generate = model.chat("请问,你是谁?");
+// System.out.println(generate);
+//
+// ChatResponse chat = model.chat(new UserMessage("请问,你是谁?"));
+// String text = chat.aiMessage().text();
+// System.out.println(text);
+//
+// ChatRequest build = ChatRequest.builder()
+// .messages(new ChatMessage[]{UserMessage.from("我想知道1+1等于几")})
+// .build();
+// ChatResponse chatResponse = model.chat(build);
+// System.out.println(chatResponse.aiMessage().text());
+// }
+//
+// public static void testRagChat(String[] args) {
+// ChatLanguageModel model = OllamaChatModel.builder()
+// .baseUrl("http://36.138.207.178:11434")
+// .modelName("deepseek-r1:32B")
+// .build();
+//
+// ChatRequest build = ChatRequest.builder()
+// .messages(new ChatMessage[]{
+// SystemMessage.from("你需要使用文档内容对用户提出的问题进行回复,这点非常重要。\n" +
+// "当用户提出的问题无法根据文档内容进行回复时,回复不知道即可。请明确说明,你的答案是否是从文档中获取的。\n" +
+// "文档内容如下:小王和小吴是情侣关系,他们两个年龄一样都是20岁,小王有一辆大众牌小轿车,小吴有一辆雅迪电动车。"),
+// UserMessage.from("小王的车是什么牌子")
+// })
+// .build();
+// ChatResponse chatResponse = model.chat(build);
+// System.out.println(chatResponse.aiMessage().text());
+// }
+//
+// public static void testStreamChat(String[] args) {
+// final boolean[] end = {false};
+//
+// StreamingChatLanguageModel model = OllamaStreamingChatModel.builder()
+// .baseUrl("http://36.138.207.178:11434")
+// .modelName("deepseek-r1:32B")
+// .build();
+//
+// String question = "假如树上有10只鸟,10分钟前飞走了2只,5分钟前又飞回了1只,刚刚又来了3只,那现在树上有几只鸟?";
+//
+// System.out.println("开始提问");
+//
+// ChatRequest chatRequest = ChatRequest.builder().messages(new ChatMessage[]{UserMessage.from(question)}).build();
+//
+// final StringBuilder sb = new StringBuilder();
+//
+// model.chat(chatRequest, new StreamingChatResponseHandler() {
+// @Override
+// public void onPartialResponse(String s) {
+// System.out.print(s);
+// sb.append(s);
+// }
+//
+// @Override
+// public void onCompleteResponse(ChatResponse chatResponse) {
+// System.out.println("-----回答结束-----");
+// System.out.println(JSONUtil.toJsonStr(chatResponse));
+// end[0] = true;
+// }
+//
+// @Override
+// public void onError(Throwable throwable) {
+// System.err.println("-----回答出错-----");
+// end[0] = true;
+// }
+// });
+//
+// while (!end[0]) {
+// ThreadUtil.safeSleep(1000);
+// }
+// }
+//
+// public static void testTool(String[] args) {
+// ChatLanguageModel model = OllamaChatModel.builder()
+// .baseUrl("http://36.138.207.178:11434")
+// .modelName("qwen2.5:7b")
+// .logRequests(true)
+// .logResponses(true)
+// .build();
+//
+// List chatMessages = new ArrayList<>();
+// chatMessages.add(UserMessage.from("请问,泰州市的天气怎么样?"));
+//
+// Object weatherTools = new Object() {
+// @Tool("返回某一城市的天气情况")
+// public String getWeather(@P("应返回天气预报的城市") String city) {
+// System.out.println(city);
+// return "天气阴转多云,1~6℃";
+// }
+// };
+//
+// List toolSpecifications = ToolSpecifications.toolSpecificationsFrom(weatherTools);
+//
+// ChatRequest chatRequest = ChatRequest.builder()
+// .messages(chatMessages)
+// .parameters(ChatRequestParameters.builder()
+// .toolSpecifications(toolSpecifications)
+// .build())
+// .build();
+//
+//
+// ChatResponse chatResponse = model.chat(chatRequest);
+// AiMessage aiMessage = chatResponse.aiMessage();
+// chatMessages.add(aiMessage);
+// if (aiMessage.hasToolExecutionRequests()) {
+// System.out.println("LLM决定调用工具");
+// System.out.println(chatResponse.aiMessage());
+// List toolExecutionRequests = chatResponse.aiMessage().toolExecutionRequests();
+// toolExecutionRequests.forEach(new Consumer() {
+// @Override
+// public void accept(ToolExecutionRequest toolExecutionRequest) {
+// ToolExecutor toolExecutor = new DefaultToolExecutor(weatherTools, toolExecutionRequest);
+// System.out.println("Now let's execute the tool " + toolExecutionRequest.name());
+// String result = toolExecutor.execute(toolExecutionRequest, UUID.randomUUID().toString());
+// ToolExecutionResultMessage toolExecutionResultMessages = ToolExecutionResultMessage.from(toolExecutionRequest, result);
+// chatMessages.add(toolExecutionResultMessages);
+// }
+// });
+// }
+//
+// // STEP 4: Model generates final response
+// ChatRequest chatRequest2 = ChatRequest.builder()
+// .messages(chatMessages)
+// .parameters(ChatRequestParameters.builder()
+// .toolSpecifications(toolSpecifications)
+// .build())
+// .build();
+// ChatResponse finalChatResponse = model.chat(chatRequest2);
+// System.out.println(finalChatResponse.aiMessage().text());
+// }
+//}
\ No newline at end of file
diff --git a/src/main/java/xyz/wbsite/ai/TestRag.java35^ b/src/main/java/xyz/wbsite/ai/TestRag.java35^
new file mode 100644
index 0000000..b3e871e
--- /dev/null
+++ b/src/main/java/xyz/wbsite/ai/TestRag.java35^
@@ -0,0 +1,88 @@
+//package xyz.wbsite.ai;
+//
+//import dev.langchain4j.data.document.DefaultDocument;
+//import dev.langchain4j.data.document.Document;
+//import dev.langchain4j.data.segment.TextSegment;
+//import dev.langchain4j.memory.chat.MessageWindowChatMemory;
+//import dev.langchain4j.model.chat.ChatLanguageModel;
+//import dev.langchain4j.model.chat.StreamingChatLanguageModel;
+//import dev.langchain4j.model.ollama.OllamaChatModel;
+//import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
+//import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
+//import dev.langchain4j.service.AiServices;
+//import dev.langchain4j.service.TokenStream;
+//import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
+//import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
+//
+//import java.util.List;
+//
+//public class TestRag {
+//
+// public static void main(String[] args) {
+// // 通过路径加载文档(此处为了演示,使用以下new方式新增文档知识)
+// // List documents = FileSystemDocumentLoader.loadDocuments("path");
+// List documents = List.of(
+// new DefaultDocument("人往往在做梦的时候会打呼噜"),
+// new DefaultDocument("小猪在睡觉的时候会扭屁股"),
+// new DefaultDocument("有一只蟑螂在床底下跳舞"),
+// new DefaultDocument("小狗在睡觉的时候会磨牙"),
+// new DefaultDocument("我家的小鸡喜欢吃虫子")
+// );
+//
+// // 创建一个内存存储器,用于存储文档和其嵌入
+// InMemoryEmbeddingStore embeddingStore = new InMemoryEmbeddingStore<>();
+//
+// // 将文档和嵌入存储器进行关联
+// EmbeddingStoreIngestor.ingest(documents, embeddingStore);
+//
+// // 创建一个助手接口
+// interface Assistant {
+//
+// String chat(String userMessage);
+//
+// TokenStream chatStream(String userMessage);
+// }
+//
+// ChatLanguageModel chatModel = OllamaChatModel.builder()
+// .baseUrl("http://36.138.207.178:11434")
+// .modelName("qwen2.5:7b")
+// .logRequests(true)
+// .logResponses(true)
+// .build();
+//
+// StreamingChatLanguageModel streamingChatModel = OllamaStreamingChatModel.builder()
+// .baseUrl("http://36.138.207.178:11434")
+// .modelName("deepseek-r1:32B")
+// .logRequests(true)
+// .logResponses(true)
+// .build();
+//
+// Assistant assistant = AiServices.builder(Assistant.class)
+// .chatLanguageModel(chatModel)
+// .streamingChatLanguageModel(streamingChatModel)
+// .chatMemory(MessageWindowChatMemory.withMaxMessages(10))
+// .contentRetriever(EmbeddingStoreContentRetriever.from(embeddingStore))
+// .build();
+//
+//// // 问题
+//// String[] questions = new String[]{
+//// "人在做梦的时候会干什么",
+//// "小猪在睡觉的时候会干什么",
+//// "小狗在睡觉的时候会干什么"
+//// };
+////
+//// // 回答
+//// for (String question : questions) {
+//// System.out.println("问题:" + question);
+//// String chat = assistant.chat(question);
+//// System.out.println("回答:" + chat);
+//// }
+//
+// assistant.chatStream("小猪在睡觉的时候会干什么")
+// .onPartialResponse(s -> System.out.println("Partial Response: " + s))
+// .onError(throwable -> System.err.println("Error: " + throwable.getMessage()))
+// .onCompleteResponse(chatResponse -> System.out.println("Complete Response: " + chatResponse.aiMessage().text()))
+// .start();
+//
+// }
+//}