集成springboot
xml
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-open-ai-spring-boot-starter</artifactId>
<version>1.14.1-beta24</version>
</dependency>
java
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class AiConfig {
@Bean
public ChatModel chatModel() {
return OpenAiChatModel.builder()
.baseUrl("https://api.deepseek.com")
.apiKey(System.getenv("DS_API_KEY"))
.modelName("deepseek-v4-flash")
.strictJsonSchema(true)
.logRequests(true)
.logResponses(true)
.build();
}
@Bean
public StreamingChatModel streamingChatModel() {
return OpenAiStreamingChatModel.builder()
.baseUrl("https://api.deepseek.com")
.apiKey(System.getenv("DS_API_KEY"))
.modelName("deepseek-v4-flash")
.logRequests(true)
.logResponses(true)
.build();
}
}
low-level API
java
import dev.langchain4j.model.LambdaStreamingResponseHandler;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Sinks;
@Slf4j
@RestController
@RequestMapping("/")
public class MyController {
private final ChatModel chatModel;
private final StreamingChatModel streamingChatModel;
public MyController(ChatModel chatModel, StreamingChatModel streamingChatModel) {
this.chatModel = chatModel;
this.streamingChatModel = streamingChatModel;
}
@GetMapping("/chat")
public String model(@RequestParam(value = "message", defaultValue = "Hello") String message) {
return chatModel.chat(message);
}
@GetMapping(path = "/chat2", produces = MediaType.TEXT_EVENT_STREAM_VALUE + "; charset=UTF-8")
public Flux<String> chat2(@RequestParam(value = "message") String message) {
return Flux.create(sink -> {
streamingChatModel.chat(message, new StreamingChatResponseHandler() {
@Override
public void onPartialResponse(String partialResponse) {
// 每收到一个片段,就推送到前端
sink.next(partialResponse);
}
@Override
public void onCompleteResponse(ChatResponse completeResponse) {
// 流式响应完成,关闭连接
sink.complete();
}
@Override
public void onError(Throwable error) {
// 发生错误,传递异常
sink.error(error);
}
});
});
}
// 使用 Sinks.Many()(更精细的控制)
@GetMapping(value = "/chat3", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
public Flux<String> chat3(@RequestParam(value = "message") String message) {
// 创建一个可手动推送数据的 Sink
Sinks.Many<String> sink = Sinks.many().unicast().onBackpressureBuffer();
/*streamingChatModel.chat(message, LambdaStreamingResponseHandler.onPartialResponseAndError(new Consumer<String>() {
@Override
public void accept(String partialResponse) {
sink.tryEmitNext(partialResponse);
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) {
sink.tryEmitError(throwable);
}
}));*/
streamingChatModel.chat(message, LambdaStreamingResponseHandler.onPartialResponseAndError(sink::tryEmitNext, sink::tryEmitError));
// 返回 Flux 视图
return sink.asFlux();
}
}
high-level API
引入
xml
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-spring-boot-starter</artifactId>
<version>1.14.1-beta24</version>
</dependency>
java
public interface Assistant {
@SystemMessage("You are a polite assistant")
String chat(String userMessage);
}
java
@Slf4j
@RestController
@RequestMapping("/")
public class MyController {
@Autowired
private ChatModel chatModel;
@GetMapping("/chat")
public String model(@RequestParam(value = "message") String message) {
Assistant assistant = AiServices.create(Assistant.class, chatModel);
return assistant.chat(message);
}
}
java
@AiService
public interface Assistant {
@SystemMessage("You are a polite assistant")
String chat(String userMessage);
}
java
public class MyController {
@Autowired
private Assistant assistant;
@GetMapping("/chat")
public String model(@RequestParam(value = "message") String message) {
return assistant.chat(message);
}
}
java
@AiService(wiringMode = AiServiceWiringMode.EXPLICIT, chatModel = "openAiChatModel")
public interface Assistant {
@SystemMessage("你是一个建模专家,只能回答建模的问题")
String chat(String userMessage);
}
@AiService(wiringMode = AiServiceWiringMode.EXPLICIT, streamingChatModel = "openAiStreamingChatModel")
public interface StreamingAssistant {
@SystemMessage("You are a polite assistant")
Flux<String> chat(String userMessage);
}
在注解上使用 chatMemoryProvider, 方法上使用 memoryId
java
@Configuration
public class MyChatMemoryProvider {
@Bean
public ChatMemoryProvider chatMemoryProvider() {
return memoryId -> MessageWindowChatMemory.builder()
.id(memoryId)
.maxMessages(30)
.chatMemoryStore(chatMemoryStore())
.build();
}
@Bean
public ChatMemoryStore chatMemoryStore() {
//return new RedisChatMemoryStore();
return new InMemoryChatMemoryStore();
}
}
java
@AiService(wiringMode = AiServiceWiringMode.EXPLICIT,
chatModel = "openAiChatModel",
chatMemoryProvider = "chatMemoryProvider")
public interface Assistant {
@SystemMessage("你是一个智能助手")
String chat(@MemoryId String memoryId, @UserMessage String userMessage);
}
http
GET http://localhost:8080/chat?memoryId=1&message=北京大学是211吗?
###
GET http://localhost:8080/chat?memoryId=1&message=是985么?
###
GET http://localhost:8080/chat?memoryId=1&message=是双一流么?
springboot 打印log日志
properties
langchain4j.open-ai.chat-model.log-requests = true
langchain4j.open-ai.chat-model.log-responses = true
logging.level.dev.langchain4j = DEBUG