前言
C#手写Ollama服务交互,实现本地模型对话
最近使用C#调用OllamaSharpe库实现Ollama本地对话,然后思考着能否自己实现这个功能。经过一番查找,和查看OllamaSharpe源码发现确实可以。其实就是开启Ollama服务后,发送HTTP请求,获取返回结果以及一些数据处理。
基本流程
1、启动Ollama服务进程。
2、创建HttpClient对象。
3、创建请求体(参数:模型名称、提示语、是否流式生成)。
4、将请求体序列化为JSON。
5、创建HTTP请求内容。
6、发送POST请求,并确保请求成功。
7、读取并返回响应内容,并解析相应字符串。
8、返回结果。
csharp
//创建请求体:模型名称、提示语、是否流式生成
var request = new RequestModel{Model = model,Prompt = prompt,Stream = false};
// 将请求体序列化为JSON
var json = JsonSerializer.Serialize(request);
// 创建HTTP请求内容
var content = new StringContent(json, Encoding.UTF8, "application/json");
// 发送POST请求
var response = await _httpClient.PostAsync("/api/generate", content);
// 确保请求成功
response.EnsureSuccessStatusCode();
// 读取并返回响应内容
string responseString = await response.Content.ReadAsStringAsync();
///解析相应字符串
ResponseModel results = JsonSerializer.Deserialize<ResponseModel>(responseString);
//返回结果
return results.Response;
项目结构
OllamaClient :实现基本的对话请求、获取模型列表功能。
Model :创建模型结果的一些参数
RequestModel:请求参数模型
ResponseModel:结果参数模型,用于解析返回的结果。
MainWindow:用户界面
MainWindowViewModel:界面交互业务处理

案例
模型加载

发送聊天

代码
OllamaSharpe
Ollama客户端 OllamaClient
csharp
public class OllamaClient
{
public IEnumerable<Model> ModelList { get; set; }
private readonly HttpClient _httpClient;
public OllamaClient(string baseAddress = "http://localhost:11434")
{
_httpClient = new HttpClient
{
BaseAddress = new Uri(baseAddress)
};
ExecuteCommand("ollama list"); //启动Ollama服务
}
/// <summary>
/// 异步生成文本
/// </summary>
public async Task<string> GenerateTextAsync(string model, string prompt)
{
try
{
//创建请求体:模型名称、提示语、是否流式生成
var request = new RequestModel{Model = model,Prompt = prompt,Stream = false};
// 将请求体序列化为JSON
var json = JsonSerializer.Serialize(request);
// 创建HTTP请求内容
var content = new StringContent(json, Encoding.UTF8, "application/json");
// 发送POST请求
var response = await _httpClient.PostAsync("/api/generate", content);
// 确保请求成功
response.EnsureSuccessStatusCode();
// 读取并返回响应内容
string responseString = await response.Content.ReadAsStringAsync();
///解析相应字符串
ResponseModel results = JsonSerializer.Deserialize<ResponseModel>(responseString);
//返回结果
return results.Response;
}
catch (HttpRequestException e)
{
throw new Exception($"Request failed: {e.Message}");
}
}
/// <summary>
/// 异步流式生成文本
/// </summary>
public async IAsyncEnumerable<string> StreamGenerateTextAsync(string model, string prompt)
{
//创建请求体:模型名称、提示语、是否流式生成
var request = new RequestModel{Model = model,Prompt = prompt, Stream = true};
// 将请求体序列化为JSON
var json = JsonSerializer.Serialize(request);
//创建HTTP请求内容
var content = new StringContent(json, Encoding.UTF8, "application/json");
//发送POST请求
using var response = await _httpClient.PostAsync("/api/generate", content);
// 确保请求成功
response.EnsureSuccessStatusCode();
// 读取流并解析为ResponseModel
using var stream = await response.Content.ReadAsStreamAsync();
// 创建流读取器
using var reader = new StreamReader(stream);
// 循环读取流
while (!reader.EndOfStream)
{
// 读取一行
var line = await reader.ReadLineAsync();
// 如果行不为空,则解析为ResponseModel并返回
if (!string.IsNullOrEmpty(line))
{
var partial = JsonSerializer.Deserialize<ResponseModel>(line);
yield return partial.Response;
}
}
}
/// <summary>
/// 异步获取本地模型列表
/// </summary>
public async Task<IEnumerable<Model>> ListLocalModelsAsync()
{
//相应请求
HttpResponseMessage responseMessage = await _httpClient.GetAsync("/api/tags").ConfigureAwait(false);;
//确保请求成功
responseMessage.EnsureSuccessStatusCode();
//读取响应
string response = await responseMessage.Content.ReadAsStringAsync();
//读取流并解析为LocalModels
LocalModels localModel = JsonSerializer.Deserialize<LocalModels>(response);
await Task.Delay(3000);
//返回结果
ModelList = localModel.Models;
return localModel.Models;
}
// <summary>
/// 执行CMD指令:用于启动Ollama服务,
/// </summary>
public static bool ExecuteCommand(string command)
{
// 创建一个新的进程启动信息
ProcessStartInfo processStartInfo = new ProcessStartInfo
{
FileName = "cmd.exe", // 设置要启动的程序为cmd.exe
Arguments = $"/C {command}", // 设置要执行的命令
UseShellExecute = true, // 使用操作系统shell启动进程
CreateNoWindow = false, //不创建窗体
};
try
{
Process process = Process.Start(processStartInfo);// 启动进程
process.WaitForExit(); // 等待进程退出
process.Close(); // 返回是否成功执行
return process.ExitCode == 0;
}
catch (Exception ex)
{
Debug.WriteLine($"发生错误: {ex.Message}");// 其他异常处理
return false;
}
}
}
请求模型:RequestModel
csharp
/// <summary>
/// 请求模型
/// </summary>
public class RequestModel
{
public string Model { get; set; }
public string Prompt { get; set; }
public bool Stream { get; set; }
}
响应模型:ResponseModel
csharp
/// <summary>
/// 响应模型
/// </summary>
public class ResponseModel
{
/// <summary>
/// 模型名称
/// </summary>
[JsonPropertyName("model")]
public string Model { get; set; }
/// <summary>
/// 创建时间
/// </summary>
[JsonPropertyName("created_at")]
public string CreatedTime { get; set; }
/// <summary>
/// 响应:返回文本
/// </summary>
[JsonPropertyName("response")]
public string Response { get; set; }
/// <summary>
/// 是否结束
/// </summary>
[JsonPropertyName("done")]
public bool Done { get; set; }
/// <summary>
/// 结束原因
/// </summary>
[JsonPropertyName("done_reason")]
public string Done_Reason { get; set; }
/// <summary>
/// 上下文
/// </summary>
[JsonPropertyName("context")]
public List<int> Context { get; set; }
/// <summary>
/// 总耗时
/// </summary>
[JsonPropertyName("total_duration")]
public long TotalDuration { get; set; }
/// <summary>
/// 加载耗时
/// </summary>
[JsonPropertyName("load_duration")]
public long LoadDuration { get; set; }
/// <summary>
/// 提示词评估次数
/// </summary>
[JsonPropertyName("prompt_eval_count")]
public long PromptEvalCount { get; set; }
/// <summary>
/// 提示词评估耗时
/// </summary>
[JsonPropertyName("prompt_eval_duration")]
public long PromptEvalDuration { get; set; }
/// <summary>
/// 评估次数
/// </summary>
[JsonPropertyName("eval_count")]
public long EvalCount { get; set; }
/// <summary>
/// 评估耗时
/// </summary>
[JsonPropertyName("eval_duration")]
public long EvalDuration { get; set; }
}
结果模型:LocalModels | Model
csharp
/// <summary>
/// 本地模型
/// </summary>
public class LocalModels
{
[JsonPropertyName("models")]
public IEnumerable<Model> Models { get; set; }
}
/// <summary>
/// 模型
/// </summary>
public class Model
{
/// <summary>
/// 模型名称
/// </summary>
[JsonPropertyName("name")]
public string Name { get; set; }
/// <summary>
/// 模型名称
/// </summary>
[JsonPropertyName("model")]
public string ModelName { get; set; }
/// <summary>
/// 修改时间
/// </summary>
[JsonPropertyName("modified_at")]
public DateTime ModifiedAt { get; set; }
/// <summary>
/// 大小
/// </summary>
[JsonPropertyName("size")]
public long Size { get; set; }
/// <summary>
///
/// </summary>
[JsonPropertyName("digest")]
public string Digest { get; set; }
/// <summary>
/// 模型细节
/// </summary>
[JsonPropertyName("details")]
public ModelDetails Details { get; set; }
}
/// <summary>
/// 模型细节
/// </summary>
public class ModelDetails
{
/// <summary>
/// 父模型
/// </summary>
[JsonPropertyName("parent_model")]
public string ParentModel { get; set; }
/// <summary>
/// 格式
/// </summary>
[JsonPropertyName("format")]
public string Format { get; set; }
/// <summary>
///
/// </summary>
[JsonPropertyName("family")]
public string Family { get; set; }
/// <summary>
///
/// </summary>
[JsonPropertyName("families")]
public List<string> Families { get; set; }
/// <summary>
/// 参数大小
/// </summary>
[JsonPropertyName("parameter_size")]
public string ParameterSize { get; set; }
/// <summary>
/// 质量等级
/// </summary>
[JsonPropertyName("quantization_level")]
public string QuantizationLevel { get; set; }
}
简单的界面
MainWindow
csharp
<Window.DataContext>
<local:MainWindowViewModel x:Name="ViewModel"/>
</Window.DataContext>
<Grid>
<Grid.RowDefinitions>
<RowDefinition Height="50"/>
<RowDefinition Height="*"/>
<RowDefinition Height="300"/>
</Grid.RowDefinitions>
<Grid Grid.Row="0">
<WrapPanel VerticalAlignment="Center" Margin="5">
<Label Content="模型列表" Margin="5"/>
<ComboBox Width="200" Margin="5" Name="ModelListBox"
ItemsSource="{Binding ModelCollection}"
SelectedItem="{Binding SelectedModel}"/>
</WrapPanel>
</Grid>
<Grid Grid.Row="1">
<TextBox x:Name="OutputBox" Text="{Binding OutputText}"
ScrollViewer.HorizontalScrollBarVisibility="Visible"
ScrollViewer.VerticalScrollBarVisibility="Visible"/>
</Grid>
<Grid Grid.Row="2">
<Grid.RowDefinitions>
<RowDefinition Height="*"/>
<RowDefinition Height="50"/>
</Grid.RowDefinitions>
<TextBox Grid.Row="0" x:Name="InputBox" Background="#AAAAAA"
Text="{Binding InputText}"
TextWrapping="WrapWithOverflow"
ScrollViewer.VerticalScrollBarVisibility="Auto"
ScrollViewer.HorizontalScrollBarVisibility="Auto" >
</TextBox>
<WrapPanel Grid.Row="1" HorizontalAlignment="Right" VerticalAlignment="Center" Margin="5">
<Button Grid.Row="1" Width="100" Height="30" x:Name="Btn_Submit"
Command="{Binding SendQuestionCommand}">发送</Button>
</WrapPanel>
</Grid>
</Grid>
MainWindowViewModel
csharp
public class MainWindowViewModel: PropertyChangedBase
{
#region 字段、属性
private string _inputText = ""; //输入文本
private string _outputText = ""; //输出文本
private OllamaClient _ollama; //Ollama客户端
private string _selectedModel = "deepseek-r1:1.5b"; //选择模型
private ObservableCollection<string> _modelCollection; //模型列表
#region 属性
public ObservableCollection<string> ModelCollection
{
get => _modelCollection;
set
{
if (_modelCollection != value)
{
_modelCollection = value;
OnPropertyChanged();
}
}
}
public string SelectedModel
{
get => _selectedModel;
set
{
if (_selectedModel != value)
{
_selectedModel = value;
OnPropertyChanged();
}
}
}
private OllamaClient Ollama { get => _ollama; }
public string OutputText
{
get => _outputText;
set
{
if (_outputText != value)
{
_outputText = value;
OnPropertyChanged();
}
}
}
public string InputText
{
get => _inputText;
set
{
if (_inputText != value)
{
_inputText = value;
OnPropertyChanged();
}
}
}
public ICommand SendQuestionCommand { get; set; }
#endregion
#endregion
public MainWindowViewModel()
{
Initialze();
}
/// <summary>
/// 初始化
/// </summary>
private void Initialze()
{
_ollama = new OllamaClient();
_modelCollection = new ObservableCollection<string>();
SelectedModel = "deepseek-r1:1.5b";
var models = Ollama.ListLocalModelsAsync();
AppendLine($"模型列表;{Environment.NewLine}");
foreach (var model in models.Result)
{
ModelCollection.Add(model.ModelName);
AppendLine($"{model.ModelName},{FormatFileSize(model.Size)}\r\n");
}
SendQuestionCommand = new ParameterlessCommand(OnSendQuestion);
}
/// <summary>
/// 格式化文件大小
/// </summary>
private string FormatFileSize(long bytes)
{
string[] sizes = { "B", "KB", "MB", "GB", "TB" };
int order = 0;
while (bytes >= 1024 && order < sizes.Length - 1)
{
order++;
bytes = bytes / 1024;
}
return $"{bytes:0.##} {sizes[order]}";
}
/// <summary>
/// 发送文本
/// </summary>
public async void OnSendQuestion()
{
try
{
AppendLine($"【用户】{InputText}\r\n\r\n");
AppendLine($"【AI】\r\n\r\n");
await foreach (var answerToken in Ollama.StreamGenerateTextAsync(SelectedModel, InputText))
{
AppendText(answerToken);
}
AppendLine($"\r\n");
}
catch (Exception ex)
{
AppendText($"Error: {ex.Message}");
}
}
/// <summary>
/// 附加文本
/// </summary>
private async void AppendText(string text)
{
Debug.Print($"{text}");
OutputText += text;
}
/// <summary>
/// 附加文本行
/// </summary>
private async void AppendLine(string text)
{
Debug.Print($"{text}");
OutputText += $"{text}\r\n";
}
}
csharp
/// <summary>
/// 属性变更
/// </summary>
public class PropertyChangedBase : INotifyPropertyChanged
{
public event PropertyChangedEventHandler PropertyChanged;
protected void OnPropertyChanged([CallerMemberName] string propertyName = null)
{
PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(propertyName));
}
}
总结
案例代码实现了与Ollama的HTTP交互,通过使用HttpClient、JSON序列化和错误处理,提供了一个简洁的异步文本生成接口。适合直接调用本地Ollama服务的场景,更多功能,可以后续拓展。