上传备份

master
王兵 6 months ago
parent 396ff5b4ce
commit 49201663f3

@ -32,6 +32,18 @@
<layout>default</layout>
</pluginRepository>
</pluginRepositories>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
</plugins>
</build>
<dependencies>
<!-- 糊涂工具包含常用API避免重复造轮子 -->

@ -2,25 +2,29 @@ package xyz.wbsite.ai;
import cn.hutool.core.thread.ThreadUtil;
import cn.hutool.json.JSONUtil;
import com.fasterxml.jackson.annotation.JsonUnwrapped;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.agent.tool.*;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.message.*;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.chat.request.ChatRequest;
import dev.langchain4j.model.chat.request.ChatRequestParameters;
import dev.langchain4j.model.chat.request.ChatRequestParametersBuilder;
import dev.langchain4j.model.chat.request.DefaultChatRequestParameters;
import dev.langchain4j.model.chat.request.json.JsonObjectSchema;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.tool.DefaultToolExecutor;
import dev.langchain4j.service.tool.ToolExecutor;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.function.Consumer;
/**
*
*/
@ -29,8 +33,8 @@ public class Main {
public static void main(String[] args) {
// testSimpleChat(args);
// testStreamChat(args);
// testTool(args);
testRagChat(args);
testTool(args);
// testRagChat(args);
}
public static void testSimpleChat(String[] args) {
@ -116,34 +120,59 @@ public class Main {
public static void testTool(String[] args) {
ChatLanguageModel model = OllamaChatModel.builder()
.baseUrl("http://36.138.207.178:11434")
.modelName("deepseek-r1:32B")
.modelName("qwen2.5:7b")
.logRequests(true)
.logResponses(true)
.build();
ToolSpecification build1 = ToolSpecification.builder()
.name("获取天气")
.description("返回指定城市的天气")
.parameters(JsonObjectSchema.builder().addStringProperty("城市").build())
.build();
ToolExecutor toolExecutor = new ToolExecutor() {
public String execute(ToolExecutionRequest toolExecutionRequest, Object o) {
System.out.println();
return "=============";
List<ChatMessage> chatMessages = new ArrayList<>();
chatMessages.add(UserMessage.from("请问,泰州市的天气怎么样?"));
Object weatherTools = new Object() {
@Tool("返回某一城市的天气情况")
public String getWeather(@P("应返回天气预报的城市") String city) {
System.out.println(city);
return "天气阴转多云1~6℃";
}
};
ChatRequestParameters build2 = DefaultChatRequestParameters.builder().toolSpecifications(build1).build();
List<ToolSpecification> toolSpecifications = ToolSpecifications.toolSpecificationsFrom(weatherTools);
ChatRequest build = ChatRequest.builder()
.messages(new ChatMessage[]{
UserMessage.from("请问,泰州市明天的天气怎么样?"),
SystemMessage.from("请使用天气工具"),
})
.parameters(build2)
ChatRequest chatRequest = ChatRequest.builder()
.messages(chatMessages)
.parameters(ChatRequestParameters.builder()
.toolSpecifications(toolSpecifications)
.build())
.build();
ChatResponse chat = model.chat(build);
System.out.println(chat.aiMessage().text());
ChatResponse chatResponse = model.chat(chatRequest);
AiMessage aiMessage = chatResponse.aiMessage();
chatMessages.add(aiMessage);
if (aiMessage.hasToolExecutionRequests()) {
System.out.println("LLM决定调用工具");
System.out.println(chatResponse.aiMessage());
List<ToolExecutionRequest> toolExecutionRequests = chatResponse.aiMessage().toolExecutionRequests();
toolExecutionRequests.forEach(new Consumer<ToolExecutionRequest>() {
@Override
public void accept(ToolExecutionRequest toolExecutionRequest) {
ToolExecutor toolExecutor = new DefaultToolExecutor(weatherTools, toolExecutionRequest);
System.out.println("Now let's execute the tool " + toolExecutionRequest.name());
String result = toolExecutor.execute(toolExecutionRequest, UUID.randomUUID().toString());
ToolExecutionResultMessage toolExecutionResultMessages = ToolExecutionResultMessage.from(toolExecutionRequest, result);
chatMessages.add(toolExecutionResultMessages);
}
});
}
// STEP 4: Model generates final response
ChatRequest chatRequest2 = ChatRequest.builder()
.messages(chatMessages)
.parameters(ChatRequestParameters.builder()
.toolSpecifications(toolSpecifications)
.build())
.build();
ChatResponse finalChatResponse = model.chat(chatRequest2);
System.out.println(finalChatResponse.aiMessage().text());
}
}
Loading…
Cancel
Save

Powered by TurnKey Linux.