langchain-go调用deepseek

1.查看官网

发现只有ollama,openai,Mistral于是查看代码

2.代码查看

先从llm, err := openai.New(url, model, token)开始

发现New方法可以传option参数,再看一下option参数
go 复制代码
const (
	tokenEnvVarName        = "OPENAI_API_KEY"      //nolint:gosec
	modelEnvVarName        = "OPENAI_MODEL"        //nolint:gosec
	baseURLEnvVarName      = "OPENAI_BASE_URL"     //nolint:gosec
	baseAPIBaseEnvVarName  = "OPENAI_API_BASE"     //nolint:gosec
	organizationEnvVarName = "OPENAI_ORGANIZATION" //nolint:gosec
)

type APIType openaiclient.APIType

const (
	APITypeOpenAI  APIType = APIType(openaiclient.APITypeOpenAI)
	APITypeAzure           = APIType(openaiclient.APITypeAzure)
	APITypeAzureAD         = APIType(openaiclient.APITypeAzureAD)
)

const (
	DefaultAPIVersion = "2023-05-15"
)

type options struct {
	token        string
	model        string
	baseURL      string
	organization string
	apiType      APIType
	httpClient   openaiclient.Doer

	responseFormat *ResponseFormat

	// required when APIType is APITypeAzure or APITypeAzureAD
	apiVersion     string
	embeddingModel string

	callbackHandler callbacks.Handler
}

// Option is a functional option for the OpenAI client.
type Option func(*options)

// ResponseFormat is the response format for the OpenAI client.
type ResponseFormat = openaiclient.ResponseFormat

// ResponseFormatJSON is the JSON response format.
var ResponseFormatJSON = &ResponseFormat{Type: "json_object"} //nolint:gochecknoglobals

// WithToken passes the OpenAI API token to the client. If not set, the token
// is read from the OPENAI_API_KEY environment variable.
func WithToken(token string) Option {
	return func(opts *options) {
		opts.token = token
	}
}

// WithModel passes the OpenAI model to the client. If not set, the model
// is read from the OPENAI_MODEL environment variable.
// Required when ApiType is Azure.
func WithModel(model string) Option {
	return func(opts *options) {
		opts.model = model
	}
}

// WithEmbeddingModel passes the OpenAI model to the client. Required when ApiType is Azure.
func WithEmbeddingModel(embeddingModel string) Option {
	return func(opts *options) {
		opts.embeddingModel = embeddingModel
	}
}

// WithBaseURL passes the OpenAI base url to the client. If not set, the base url
// is read from the OPENAI_BASE_URL environment variable. If still not set in ENV
// VAR OPENAI_BASE_URL, then the default value is https://api.openai.com/v1 is used.
func WithBaseURL(baseURL string) Option {
	return func(opts *options) {
		opts.baseURL = baseURL
	}
}

// WithOrganization passes the OpenAI organization to the client. If not set, the
// organization is read from the OPENAI_ORGANIZATION.
func WithOrganization(organization string) Option {
	return func(opts *options) {
		opts.organization = organization
	}
}

// WithAPIType passes the api type to the client. If not set, the default value
// is APITypeOpenAI.
func WithAPIType(apiType APIType) Option {
	return func(opts *options) {
		opts.apiType = apiType
	}
}

// WithAPIVersion passes the api version to the client. If not set, the default value
// is DefaultAPIVersion.
func WithAPIVersion(apiVersion string) Option {
	return func(opts *options) {
		opts.apiVersion = apiVersion
	}
}

// WithHTTPClient allows setting a custom HTTP client. If not set, the default value
// is http.DefaultClient.
func WithHTTPClient(client openaiclient.Doer) Option {
	return func(opts *options) {
		opts.httpClient = client
	}
}

// WithCallback allows setting a custom Callback Handler.
func WithCallback(callbackHandler callbacks.Handler) Option {
	return func(opts *options) {
		opts.callbackHandler = callbackHandler
	}
}

// WithResponseFormat allows setting a custom response format.
func WithResponseFormat(responseFormat *ResponseFormat) Option {
	return func(opts *options) {
		opts.responseFormat = responseFormat
	}
}
这里发现了各种配置的地方,以及获取环境变量,于是我们使用openai包下面的各种with方法来配置deepseek的地址
go 复制代码
// 我这里使用的是腾讯的API
url := openai.WithBaseURL("https://api.lkeap.cloud.tencent.com/v1/chat/completions")
	model := openai.WithModel("deepseek-v3")
	token := openai.WithToken("API-KEY")
	// We can construct an LLMChain from a PromptTemplate and an LLM.
	llm, err := openai.New(url, model, token)
	if err != nil {
		return err
	}
	ctx := context.Background()
	completion, err := llm.Call(ctx, "The first man to walk on the moon",
		llms.WithTemperature(0.8),
		llms.WithStopWords([]string{"Armstrong"}))

	if err != nil {
		return err
	}
	fmt.Println(completion)
	return nil
相关推荐
举一个梨子zz2 分钟前
Java—— 集合 List
java·开发语言·数据结构·intellij-idea
passionSnail12 分钟前
《用MATLAB玩转游戏开发:从零开始打造你的数字乐园》基础篇(2D图形交互)-贪吃蛇的百变玩法:从命令行到AI对战
开发语言·matlab·游戏程序
李匠202420 分钟前
C++GO语言微服务之gorm框架操作MySQL
开发语言·c++·后端·golang
千里镜宵烛1 小时前
C++ 哈希表
开发语言·数据结构·c++·哈希算法·散列表
努力学习的小廉1 小时前
【C++】 —— 笔试刷题day_27
开发语言·c++
蜗牛沐雨1 小时前
Rust 中的 Pin 和 Unpin:内存安全与异步编程的守护者
服务器·开发语言·rust
python算法(魔法师版)1 小时前
JavaScript性能优化实战,从理论到落地的全面指南
开发语言·性能优化·前端框架·代理模式
shmily麻瓜小菜鸡2 小时前
vue3使用tailwindcss报错问题
开发语言·前端·javascript·vue.js
studyer_domi2 小时前
Matlab 车辆四自由度垂向模型平稳性
开发语言·matlab·汽车
passionSnail2 小时前
《用MATLAB玩转游戏开发:从零开始打造你的数字乐园》基础篇(2D图形交互)-《打砖块:向量反射与实时物理模拟》MATLAB教程
开发语言·matlab·交互