spring ai 对接 ollama
引入依赖
xml
<dependency>
<groupId>io.springboot.ai</groupId>
<artifactId>spring-ai-ollama-spring-boot-starter</artifactId>
<version>1.0.0</version>
</dependency>
这里因为使用的是快照版本所以需要配置 spring repository 的快照的仓库
xml
<repositories>
<repository>
<name>spring-milestones</name>
<id>spring-milestones</id>
<url>https://repo.spring.io/milestone</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
</repositories>
配置文件
yaml
spring:
application:
name: study-spring-ai
ai:
ollama:
# 这里填写 ollama 对应的地址信息就行
base-url: http://192.168.4.11:11434
chat:
options:
model: qwen2:0.5b
编码实现
编码是相对比较简单的,我们这里提供一个实例
java
package org.study.ai.controller;
import org.springframework.ai.chat.ChatResponse;
import org.springframework.ai.chat.Generation;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatClient;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
/**
* @author xl-9527
* @since 2024/10/25
**/
@RestController
@RequestMapping("ai/chat-models")
public class AiRequestController {
private final OllamaChatClient ollamaChatClient;
public AiRequestController(final OllamaChatClient ollamaChatClient) {
this.ollamaChatClient = ollamaChatClient;
}
@GetMapping("ollama/dialogue")
public Object dialogue(@RequestParam(name = "msg") String msg) {
UserMessage userMessage = new UserMessage(msg);
ChatResponse callResponse = ollamaChatClient.call(
new Prompt(
userMessage,
OllamaOptions.create()
.withModel("qwen2:0.5b")
)
);
Generation result = callResponse.getResult();
return result.getOutput().getContent();
}
}