上传备份

master
王兵 6 months ago
parent f3634b630a
commit 84a0a51740

@ -4,12 +4,14 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>xyz.wbsite</groupId>
<artifactId>start-ai</artifactId>
<artifactId>starter-ai</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<properties>
<java.version>1.8</java.version>
<langchain4j.version>1.0.0-beta1</langchain4j.version>
</properties>
<repositories>
@ -40,9 +42,29 @@
</dependency>
<dependency>
<groupId>org.dromara.easyai</groupId>
<artifactId>easyAi</artifactId>
<version>1.3.3</version>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-core</artifactId>
<version>${langchain4j.version}</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j</artifactId>
<version>${langchain4j.version}</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-ollama</artifactId>
<version>${langchain4j.version}</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-embeddings</artifactId>
<version>${langchain4j.version}</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-chroma</artifactId>
<version>${langchain4j.version}</version>
</dependency>
</dependencies>
</project>

@ -0,0 +1,91 @@
package xyz.wbsite.ai;
import cn.hutool.core.thread.ThreadUtil;
import cn.hutool.json.JSONUtil;
import com.fasterxml.jackson.annotation.JsonUnwrapped;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.chat.request.ChatRequest;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
/**
*
*/
public class Main {
public static void main(String[] args) {
// testSimpleChat(args);
testStreamChat(args);
}
public static void testSimpleChat(String[] args) {
ChatLanguageModel model = OllamaChatModel.builder()
.baseUrl("http://36.138.207.178:11434")
.modelName("deepseek-r1:32B")
.build();
String generate = model.chat("请问,你是谁?");
System.out.println(generate);
ChatResponse chat = model.chat(new UserMessage("请问,你是谁?"));
String text = chat.aiMessage().text();
System.out.println(text);
ChatRequest build = ChatRequest.builder()
.messages(new ChatMessage[]{UserMessage.from("我想知道1+1等于几")})
.build();
ChatResponse chatResponse = model.chat(build);
System.out.println(chatResponse.aiMessage().text());
}
public static void testStreamChat(String[] args) {
final boolean[] end = {false};
StreamingChatLanguageModel model = OllamaStreamingChatModel.builder()
.baseUrl("http://36.138.207.178:11434")
.modelName("deepseek-r1:32B")
.build();
String question = "假如树上有10只鸟10分钟前飞走了2只5分钟前又飞回了1只刚刚又来了3只那现在树上有几只鸟?";
System.out.println("开始提问");
ChatRequest chatRequest = ChatRequest.builder().messages(new ChatMessage[]{UserMessage.from(question)}).build();
final StringBuilder sb = new StringBuilder();
model.chat(chatRequest, new StreamingChatResponseHandler() {
@Override
public void onPartialResponse(String s) {
System.out.print(s);
sb.append(s);
}
@Override
public void onCompleteResponse(ChatResponse chatResponse) {
System.out.println("-----回答结束-----");
System.out.println(JSONUtil.toJsonStr(chatResponse));
end[0] = true;
}
@Override
public void onError(Throwable throwable) {
System.err.println("-----回答出错-----");
end[0] = true;
}
});
while (!end[0]) {
ThreadUtil.safeSleep(1000);
}
}
}
Loading…
Cancel
Save

Powered by TurnKey Linux.