Documentation
¶
Index ¶
- Constants
- type AnthropicFactory
- type AnthropicProvider
- func (ap *AnthropicProvider) Capabilities() ProviderCapabilities
- func (ap *AnthropicProvider) Close() error
- func (ap *AnthropicProvider) Complete(ctx context.Context, messages []types.Message, opts *StreamOptions) (*CompleteResponse, error)
- func (ap *AnthropicProvider) Config() *types.ModelConfig
- func (ap *AnthropicProvider) GetSystemPrompt() string
- func (ap *AnthropicProvider) SetSystemPrompt(prompt string) error
- func (ap *AnthropicProvider) Stream(ctx context.Context, messages []types.Message, opts *StreamOptions) (<-chan StreamChunk, error)
- type CompleteResponse
- type CustomClaudeFactory
- type CustomClaudeProvider
- func (cp *CustomClaudeProvider) Capabilities() ProviderCapabilities
- func (cp *CustomClaudeProvider) Close() error
- func (cp *CustomClaudeProvider) Complete(ctx context.Context, messages []types.Message, opts *StreamOptions) (*CompleteResponse, error)
- func (cp *CustomClaudeProvider) Config() *types.ModelConfig
- func (cp *CustomClaudeProvider) GetSystemPrompt() string
- func (cp *CustomClaudeProvider) SetSystemPrompt(prompt string) error
- func (cp *CustomClaudeProvider) Stream(ctx context.Context, messages []types.Message, opts *StreamOptions) (<-chan StreamChunk, error)
- type DeepseekProvider
- func (dp *DeepseekProvider) Capabilities() ProviderCapabilities
- func (dp *DeepseekProvider) Close() error
- func (dp *DeepseekProvider) Complete(ctx context.Context, messages []types.Message, opts *StreamOptions) (*CompleteResponse, error)
- func (dp *DeepseekProvider) Config() *types.ModelConfig
- func (dp *DeepseekProvider) GetSystemPrompt() string
- func (dp *DeepseekProvider) SetSystemPrompt(prompt string) error
- func (dp *DeepseekProvider) Stream(ctx context.Context, messages []types.Message, opts *StreamOptions) (<-chan StreamChunk, error)
- type DoubaoConfig
- type DoubaoFactory
- type DoubaoProvider
- type Factory
- type GLMFactory
- type GLMProvider
- func (gp *GLMProvider) Capabilities() ProviderCapabilities
- func (gp *GLMProvider) Close() error
- func (gp *GLMProvider) Complete(ctx context.Context, messages []types.Message, opts *StreamOptions) (*CompleteResponse, error)
- func (gp *GLMProvider) Config() *types.ModelConfig
- func (gp *GLMProvider) GetSystemPrompt() string
- func (gp *GLMProvider) SetSystemPrompt(prompt string) error
- func (gp *GLMProvider) Stream(ctx context.Context, messages []types.Message, opts *StreamOptions) (<-chan StreamChunk, error)
- type GatewayProvider
- func (g *GatewayProvider) Capabilities() ProviderCapabilities
- func (g *GatewayProvider) Close() error
- func (g *GatewayProvider) Complete(ctx context.Context, messages []types.Message, opts *StreamOptions) (*CompleteResponse, error)
- func (g *GatewayProvider) Config() *types.ModelConfig
- func (g *GatewayProvider) GetSystemPrompt() string
- func (g *GatewayProvider) Protocol() string
- func (g *GatewayProvider) SetSystemPrompt(prompt string) error
- func (g *GatewayProvider) Stream(ctx context.Context, messages []types.Message, opts *StreamOptions) (<-chan StreamChunk, error)
- type GeminiBlob
- type GeminiContent
- type GeminiFactory
- type GeminiFunctionCall
- type GeminiFunctionDeclaration
- type GeminiFunctionResponse
- type GeminiPart
- type GeminiProvider
- func (p *GeminiProvider) Capabilities() ProviderCapabilities
- func (p *GeminiProvider) Close() error
- func (p *GeminiProvider) Complete(ctx context.Context, messages []types.Message, opts *StreamOptions) (*CompleteResponse, error)
- func (p *GeminiProvider) Config() *types.ModelConfig
- func (p *GeminiProvider) GetSystemPrompt() string
- func (p *GeminiProvider) SetSystemPrompt(prompt string) error
- func (p *GeminiProvider) Stream(ctx context.Context, messages []types.Message, opts *StreamOptions) (<-chan StreamChunk, error)
- type GeminiTool
- type GroqFactory
- type GroqProvider
- type MistralFactory
- type MistralProvider
- type MoonshotFactory
- type MoonshotProvider
- type MultiProviderFactory
- type OllamaFactory
- type OllamaProvider
- type OpenAICompatibleOptions
- type OpenAICompatibleProvider
- func (p *OpenAICompatibleProvider) Capabilities() ProviderCapabilities
- func (p *OpenAICompatibleProvider) Close() error
- func (p *OpenAICompatibleProvider) Complete(ctx context.Context, messages []types.Message, opts *StreamOptions) (*CompleteResponse, error)
- func (p *OpenAICompatibleProvider) Config() *types.ModelConfig
- func (p *OpenAICompatibleProvider) GetSystemPrompt() string
- func (p *OpenAICompatibleProvider) SetSystemPrompt(prompt string) error
- func (p *OpenAICompatibleProvider) Stream(ctx context.Context, messages []types.Message, opts *StreamOptions) (<-chan StreamChunk, error)
- type OpenAIFactory
- type OpenAIProvider
- type OpenRouterConfig
- type OpenRouterFactory
- type OpenRouterProvider
- type Provider
- func NewDoubaoProvider(config *types.ModelConfig, dbConfig *DoubaoConfig) (Provider, error)
- func NewDoubaoProviderSimple(config *types.ModelConfig) (Provider, error)
- func NewGatewayProvider(config *types.ModelConfig) (Provider, error)
- func NewGeminiProvider(config *types.ModelConfig) (Provider, error)
- func NewGroqProvider(config *types.ModelConfig) (Provider, error)
- func NewMistralProvider(config *types.ModelConfig) (Provider, error)
- func NewMoonshotProvider(config *types.ModelConfig) (Provider, error)
- func NewOllamaProvider(config *types.ModelConfig) (Provider, error)
- func NewOpenAIProvider(config *types.ModelConfig) (Provider, error)
- func NewOpenAIProviderWithBaseURL(config *types.ModelConfig) (Provider, error)
- func NewOpenRouterProvider(config *types.ModelConfig, orConfig *OpenRouterConfig) (Provider, error)
- func NewOpenRouterProviderSimple(config *types.ModelConfig) (Provider, error)
- type ProviderCapabilities
- type ProviderFactory
- type ReasoningTrace
- type ResponseFormat
- type ResponseFormatType
- type StreamChunk
- type StreamChunkType
- type StreamError
- type StreamOptions
- type ThinkingConfig
- type TokenUsage
- type ToolCallDelta
- type ToolChoiceOption
- type ToolExample
- type ToolSchema
Constants ¶
const (
// DoubaoAPIBaseURL Doubao API 基础 URL(字节跳动火山引擎)
DoubaoAPIBaseURL = "https://ark.cn-beijing.volces.com/api/v3"
)
const (
// GeminiAPIBaseURL Gemini API 基础 URL
GeminiAPIBaseURL = "https://generativelanguage.googleapis.com/v1beta"
)
const (
// GroqAPIBaseURL Groq API 基础 URL
GroqAPIBaseURL = "https://api.groq.com/openai/v1"
)
const (
// MistralAPIBaseURL Mistral API 基础 URL
MistralAPIBaseURL = "https://api.mistral.ai/v1"
)
const (
// MoonshotAPIBaseURL Moonshot API 基础 URL
MoonshotAPIBaseURL = "https://api.moonshot.cn/v1"
)
const (
// OllamaDefaultBaseURL Ollama 默认基础 URL
OllamaDefaultBaseURL = "http://localhost:11434/v1"
)
const (
// OpenAIAPIBaseURL OpenAI API 基础 URL
OpenAIAPIBaseURL = "https://api.openai.com/v1"
)
const (
// OpenRouterAPIBaseURL OpenRouter API 基础 URL
OpenRouterAPIBaseURL = "https://openrouter.ai/api/v1"
)
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type AnthropicFactory ¶
type AnthropicFactory struct{}
AnthropicFactory Anthropic工厂
func (*AnthropicFactory) Create ¶
func (f *AnthropicFactory) Create(config *types.ModelConfig) (Provider, error)
Create 创建Anthropic提供商
type AnthropicProvider ¶
type AnthropicProvider struct {
// contains filtered or unexported fields
}
AnthropicProvider Anthropic模型提供商
func NewAnthropicProvider ¶
func NewAnthropicProvider(config *types.ModelConfig) (*AnthropicProvider, error)
NewAnthropicProvider 创建Anthropic提供商
func (*AnthropicProvider) Capabilities ¶
func (ap *AnthropicProvider) Capabilities() ProviderCapabilities
Capabilities 返回模型能力
func (*AnthropicProvider) Complete ¶
func (ap *AnthropicProvider) Complete(ctx context.Context, messages []types.Message, opts *StreamOptions) (*CompleteResponse, error)
Complete 非流式对话(阻塞式,返回完整响应)
func (*AnthropicProvider) Config ¶
func (ap *AnthropicProvider) Config() *types.ModelConfig
Config 返回配置
func (*AnthropicProvider) GetSystemPrompt ¶
func (ap *AnthropicProvider) GetSystemPrompt() string
GetSystemPrompt 获取系统提示词
func (*AnthropicProvider) SetSystemPrompt ¶
func (ap *AnthropicProvider) SetSystemPrompt(prompt string) error
SetSystemPrompt 设置系统提示词
func (*AnthropicProvider) Stream ¶
func (ap *AnthropicProvider) Stream(ctx context.Context, messages []types.Message, opts *StreamOptions) (<-chan StreamChunk, error)
Stream 流式对话
type CompleteResponse ¶
type CompleteResponse struct {
Message types.Message
Usage *TokenUsage
}
CompleteResponse 完整响应
type CustomClaudeFactory ¶ added in v0.31.0
type CustomClaudeFactory struct{}
CustomClaudeFactory 自定义 Claude 工厂
func (*CustomClaudeFactory) Create ¶ added in v0.31.0
func (f *CustomClaudeFactory) Create(config *types.ModelConfig) (Provider, error)
Create 创建自定义 Claude 提供商
type CustomClaudeProvider ¶ added in v0.31.0
type CustomClaudeProvider struct {
// contains filtered or unexported fields
}
CustomClaudeProvider 自定义 Claude API 中转站提供商 适配各种中转站的特殊响应格式
func NewCustomClaudeProvider ¶ added in v0.31.0
func NewCustomClaudeProvider(config *types.ModelConfig) (*CustomClaudeProvider, error)
NewCustomClaudeProvider 创建自定义 Claude 提供商
func (*CustomClaudeProvider) Capabilities ¶ added in v0.31.0
func (cp *CustomClaudeProvider) Capabilities() ProviderCapabilities
Capabilities 返回模型能力
func (*CustomClaudeProvider) Close ¶ added in v0.31.0
func (cp *CustomClaudeProvider) Close() error
Close 关闭连接
func (*CustomClaudeProvider) Complete ¶ added in v0.31.0
func (cp *CustomClaudeProvider) Complete(ctx context.Context, messages []types.Message, opts *StreamOptions) (*CompleteResponse, error)
Complete 非流式对话
func (*CustomClaudeProvider) Config ¶ added in v0.31.0
func (cp *CustomClaudeProvider) Config() *types.ModelConfig
Config 返回配置
func (*CustomClaudeProvider) GetSystemPrompt ¶ added in v0.31.0
func (cp *CustomClaudeProvider) GetSystemPrompt() string
GetSystemPrompt 获取系统提示词
func (*CustomClaudeProvider) SetSystemPrompt ¶ added in v0.31.0
func (cp *CustomClaudeProvider) SetSystemPrompt(prompt string) error
SetSystemPrompt 设置系统提示词
func (*CustomClaudeProvider) Stream ¶ added in v0.31.0
func (cp *CustomClaudeProvider) Stream(ctx context.Context, messages []types.Message, opts *StreamOptions) (<-chan StreamChunk, error)
Stream 流式对话
type DeepseekProvider ¶
type DeepseekProvider struct {
// contains filtered or unexported fields
}
DeepseekProvider Deepseek v3.2 模型提供商 Deepseek API 与 OpenAI 完全兼容
func NewDeepseekProvider ¶
func NewDeepseekProvider(config *types.ModelConfig) (*DeepseekProvider, error)
NewDeepseekProvider 创建 Deepseek 提供商
func (*DeepseekProvider) Capabilities ¶
func (dp *DeepseekProvider) Capabilities() ProviderCapabilities
Capabilities 返回模型能力
func (*DeepseekProvider) Complete ¶
func (dp *DeepseekProvider) Complete(ctx context.Context, messages []types.Message, opts *StreamOptions) (*CompleteResponse, error)
Complete 非流式对话(阻塞式,返回完整响应)
func (*DeepseekProvider) Config ¶
func (dp *DeepseekProvider) Config() *types.ModelConfig
Config 返回配置
func (*DeepseekProvider) GetSystemPrompt ¶
func (dp *DeepseekProvider) GetSystemPrompt() string
GetSystemPrompt 获取系统提示词
func (*DeepseekProvider) SetSystemPrompt ¶
func (dp *DeepseekProvider) SetSystemPrompt(prompt string) error
SetSystemPrompt 设置系统提示词
func (*DeepseekProvider) Stream ¶
func (dp *DeepseekProvider) Stream(ctx context.Context, messages []types.Message, opts *StreamOptions) (<-chan StreamChunk, error)
Stream 流式对话
type DoubaoConfig ¶
type DoubaoConfig struct {
// EndpointID 模型端点 ID(必需)
EndpointID string
}
DoubaoConfig Doubao 特定配置
type DoubaoFactory ¶
type DoubaoFactory struct {
Config *DoubaoConfig
}
DoubaoFactory Doubao 工厂
func (*DoubaoFactory) Create ¶
func (f *DoubaoFactory) Create(config *types.ModelConfig) (Provider, error)
Create 创建 Doubao 提供商
type DoubaoProvider ¶
type DoubaoProvider struct {
*OpenAICompatibleProvider
// contains filtered or unexported fields
}
DoubaoProvider Doubao(豆包)提供商 字节跳动的企业级 AI 服务,基于火山引擎
func (*DoubaoProvider) Capabilities ¶
func (p *DoubaoProvider) Capabilities() ProviderCapabilities
Capabilities 返回 Doubao 的能力
type Factory ¶
type Factory interface {
Create(config *types.ModelConfig) (Provider, error)
}
Factory 模型提供商工厂
type GLMFactory ¶
type GLMFactory struct{}
GLMFactory GLM工厂
func (*GLMFactory) Create ¶
func (f *GLMFactory) Create(config *types.ModelConfig) (Provider, error)
Create 创建GLM提供商
type GLMProvider ¶
type GLMProvider struct {
// contains filtered or unexported fields
}
GLMProvider GLM 4.6 模型提供商
func NewGLMProvider ¶
func NewGLMProvider(config *types.ModelConfig) (*GLMProvider, error)
NewGLMProvider 创建 GLM 提供商
func (*GLMProvider) Capabilities ¶
func (gp *GLMProvider) Capabilities() ProviderCapabilities
Capabilities 返回模型能力
func (*GLMProvider) Complete ¶
func (gp *GLMProvider) Complete(ctx context.Context, messages []types.Message, opts *StreamOptions) (*CompleteResponse, error)
Complete 非流式对话(阻塞式,返回完整响应)
func (*GLMProvider) GetSystemPrompt ¶
func (gp *GLMProvider) GetSystemPrompt() string
GetSystemPrompt 获取系统提示词
func (*GLMProvider) SetSystemPrompt ¶
func (gp *GLMProvider) SetSystemPrompt(prompt string) error
SetSystemPrompt 设置系统提示词
func (*GLMProvider) Stream ¶
func (gp *GLMProvider) Stream(ctx context.Context, messages []types.Message, opts *StreamOptions) (<-chan StreamChunk, error)
Stream 流式对话
type GatewayProvider ¶ added in v0.34.0
type GatewayProvider struct {
// contains filtered or unexported fields
}
GatewayProvider 是一个通用的 API 网关 provider 支持将请求转发到自定义的 base_url,并根据 model 自动选择协议
func (*GatewayProvider) Capabilities ¶ added in v0.34.0
func (g *GatewayProvider) Capabilities() ProviderCapabilities
Capabilities 返回模型能力
func (*GatewayProvider) Complete ¶ added in v0.34.0
func (g *GatewayProvider) Complete(ctx context.Context, messages []types.Message, opts *StreamOptions) (*CompleteResponse, error)
Complete 实现 Provider 接口 - 非流式对话
func (*GatewayProvider) Config ¶ added in v0.34.0
func (g *GatewayProvider) Config() *types.ModelConfig
Config 返回配置
func (*GatewayProvider) GetSystemPrompt ¶ added in v0.34.0
func (g *GatewayProvider) GetSystemPrompt() string
GetSystemPrompt 获取系统提示词
func (*GatewayProvider) Protocol ¶ added in v0.34.0
func (g *GatewayProvider) Protocol() string
Protocol 返回检测到的协议类型
func (*GatewayProvider) SetSystemPrompt ¶ added in v0.34.0
func (g *GatewayProvider) SetSystemPrompt(prompt string) error
SetSystemPrompt 设置系统提示词
func (*GatewayProvider) Stream ¶ added in v0.34.0
func (g *GatewayProvider) Stream(ctx context.Context, messages []types.Message, opts *StreamOptions) (<-chan StreamChunk, error)
Stream 实现 Provider 接口 - 流式对话
type GeminiBlob ¶
GeminiBlob 二进制数据
type GeminiContent ¶
type GeminiContent struct {
Role string `json:"role,omitempty"`
Parts []GeminiPart `json:"parts"`
}
GeminiContent Gemini 消息内容格式
type GeminiFactory ¶
type GeminiFactory struct{}
GeminiFactory Gemini 工厂
func (*GeminiFactory) Create ¶
func (f *GeminiFactory) Create(config *types.ModelConfig) (Provider, error)
Create 创建 Gemini 提供商
type GeminiFunctionCall ¶
GeminiFunctionCall 函数调用
type GeminiFunctionDeclaration ¶
type GeminiFunctionDeclaration struct {
Name string `json:"name"`
Description string `json:"description"`
Parameters map[string]any `json:"parameters"`
}
GeminiFunctionDeclaration 函数声明
type GeminiFunctionResponse ¶
type GeminiFunctionResponse struct {
Name string `json:"name"`
Response map[string]any `json:"response"`
}
GeminiFunctionResponse 函数响应
type GeminiPart ¶
type GeminiPart struct {
// 文本内容
Text string `json:"text,omitempty"`
// 内联数据(图片、音频等)
InlineData *GeminiBlob `json:"inlineData,omitempty"`
// 函数调用
FunctionCall *GeminiFunctionCall `json:"functionCall,omitempty"`
// 函数响应
FunctionResponse *GeminiFunctionResponse `json:"functionResponse,omitempty"`
}
GeminiPart Gemini 内容部分
type GeminiProvider ¶
type GeminiProvider struct {
// contains filtered or unexported fields
}
GeminiProvider Google Gemini 提供商 Gemini 使用专有的 Content/Parts 格式,不兼容 OpenAI
func (*GeminiProvider) Capabilities ¶
func (p *GeminiProvider) Capabilities() ProviderCapabilities
Capabilities 返回能力
func (*GeminiProvider) Complete ¶
func (p *GeminiProvider) Complete( ctx context.Context, messages []types.Message, opts *StreamOptions, ) (*CompleteResponse, error)
Complete 实现非流式对话
func (*GeminiProvider) GetSystemPrompt ¶
func (p *GeminiProvider) GetSystemPrompt() string
GetSystemPrompt 获取系统提示词
func (*GeminiProvider) SetSystemPrompt ¶
func (p *GeminiProvider) SetSystemPrompt(prompt string) error
SetSystemPrompt 设置系统提示词
func (*GeminiProvider) Stream ¶
func (p *GeminiProvider) Stream( ctx context.Context, messages []types.Message, opts *StreamOptions, ) (<-chan StreamChunk, error)
Stream 实现流式对话
type GeminiTool ¶
type GeminiTool struct {
FunctionDeclarations []GeminiFunctionDeclaration `json:"functionDeclarations"`
}
GeminiTool Gemini 工具定义
type GroqFactory ¶
type GroqFactory struct{}
GroqFactory Groq 工厂
func (*GroqFactory) Create ¶
func (f *GroqFactory) Create(config *types.ModelConfig) (Provider, error)
Create 创建 Groq 提供商
type GroqProvider ¶
type GroqProvider struct {
*OpenAICompatibleProvider
}
GroqProvider Groq 提供商 Groq 提供超快速的 LLM 推理服务,完全兼容 OpenAI API
func (*GroqProvider) Capabilities ¶
func (p *GroqProvider) Capabilities() ProviderCapabilities
Capabilities 返回 Groq 的能力
type MistralFactory ¶
type MistralFactory struct{}
MistralFactory Mistral 工厂
func (*MistralFactory) Create ¶
func (f *MistralFactory) Create(config *types.ModelConfig) (Provider, error)
Create 创建 Mistral 提供商
type MistralProvider ¶
type MistralProvider struct {
*OpenAICompatibleProvider
}
MistralProvider Mistral 提供商 Mistral AI 是欧洲领先的 AI 公司,提供高质量的开源和商业模型
func (*MistralProvider) Capabilities ¶
func (p *MistralProvider) Capabilities() ProviderCapabilities
Capabilities 返回 Mistral 的能力
type MoonshotFactory ¶
type MoonshotFactory struct{}
MoonshotFactory Moonshot 工厂
func (*MoonshotFactory) Create ¶
func (f *MoonshotFactory) Create(config *types.ModelConfig) (Provider, error)
Create 创建 Moonshot 提供商
type MoonshotProvider ¶
type MoonshotProvider struct {
*OpenAICompatibleProvider
}
MoonshotProvider Moonshot(月之暗面 Kimi)提供商 Moonshot AI 的 Kimi 模型以超长上下文(128K-200K)闻名
func (*MoonshotProvider) Capabilities ¶
func (p *MoonshotProvider) Capabilities() ProviderCapabilities
Capabilities 返回 Moonshot 的能力
type MultiProviderFactory ¶
type MultiProviderFactory struct{}
MultiProviderFactory 多提供商工厂
func NewMultiProviderFactory ¶
func NewMultiProviderFactory() *MultiProviderFactory
NewMultiProviderFactory 创建多提供商工厂
func (*MultiProviderFactory) Create ¶
func (f *MultiProviderFactory) Create(config *types.ModelConfig) (Provider, error)
Create 根据配置创建相应的提供商
type OllamaFactory ¶
type OllamaFactory struct{}
OllamaFactory Ollama 工厂
func (*OllamaFactory) Create ¶
func (f *OllamaFactory) Create(config *types.ModelConfig) (Provider, error)
Create 创建 Ollama 提供商
type OllamaProvider ¶
type OllamaProvider struct {
*OpenAICompatibleProvider
}
OllamaProvider Ollama 提供商 Ollama 是本地 LLM 部署的首选方案,支持多种开源模型
func (*OllamaProvider) Capabilities ¶
func (p *OllamaProvider) Capabilities() ProviderCapabilities
Capabilities 返回 Ollama 的能力
type OpenAICompatibleOptions ¶
type OpenAICompatibleOptions struct {
// 是否需要 API Key
RequireAPIKey bool
// 默认模型名称
DefaultModel string
// 是否支持推理模型
SupportReasoning bool
// 是否支持 Prompt Caching
SupportPromptCache bool
// 是否支持多模态
SupportVision bool
SupportAudio bool
// 超时配置
Timeout time.Duration
// 重试配置
MaxRetries int
RetryDelay time.Duration
RetryOn429 bool // 是否在 429 时重试
RetryOn500 bool // 是否在 5xx 时重试
// 自定义请求头
CustomHeaders map[string]string
}
OpenAICompatibleOptions OpenAI 兼容 Provider 的可选配置
type OpenAICompatibleProvider ¶
type OpenAICompatibleProvider struct {
// contains filtered or unexported fields
}
OpenAICompatibleProvider OpenAI 兼容格式的通用 Provider 适用于 OpenAI, Groq, Ollama, Fireworks, Cerebras, DeepInfra, xAI 等
func NewCustomProvider ¶ added in v0.25.0
func NewCustomProvider(config *types.ModelConfig) (*OpenAICompatibleProvider, error)
NewCustomProvider 创建自定义 OpenAI 兼容 Provider(简化版) 用于用户自带 API Key 场景,如 new-api、API2D 等
func NewOpenAICompatibleProvider ¶
func NewOpenAICompatibleProvider( config *types.ModelConfig, baseURL string, providerName string, options *OpenAICompatibleOptions, ) (*OpenAICompatibleProvider, error)
NewOpenAICompatibleProvider 创建 OpenAI 兼容 Provider
func (*OpenAICompatibleProvider) Capabilities ¶
func (p *OpenAICompatibleProvider) Capabilities() ProviderCapabilities
Capabilities 返回能力
func (*OpenAICompatibleProvider) Close ¶
func (p *OpenAICompatibleProvider) Close() error
Close 关闭连接
func (*OpenAICompatibleProvider) Complete ¶
func (p *OpenAICompatibleProvider) Complete( ctx context.Context, messages []types.Message, opts *StreamOptions, ) (*CompleteResponse, error)
Complete 实现非流式对话
func (*OpenAICompatibleProvider) Config ¶
func (p *OpenAICompatibleProvider) Config() *types.ModelConfig
Config 返回配置
func (*OpenAICompatibleProvider) GetSystemPrompt ¶
func (p *OpenAICompatibleProvider) GetSystemPrompt() string
GetSystemPrompt 获取系统提示词
func (*OpenAICompatibleProvider) SetSystemPrompt ¶
func (p *OpenAICompatibleProvider) SetSystemPrompt(prompt string) error
SetSystemPrompt 设置系统提示词
func (*OpenAICompatibleProvider) Stream ¶
func (p *OpenAICompatibleProvider) Stream( ctx context.Context, messages []types.Message, opts *StreamOptions, ) (<-chan StreamChunk, error)
Stream 实现流式对话
type OpenAIFactory ¶
type OpenAIFactory struct{}
OpenAIFactory OpenAI 工厂
func (*OpenAIFactory) Create ¶
func (f *OpenAIFactory) Create(config *types.ModelConfig) (Provider, error)
Create 创建 OpenAI 提供商
type OpenAIProvider ¶
type OpenAIProvider struct {
*OpenAICompatibleProvider
}
OpenAIProvider OpenAI 提供商 支持 GPT-4/4.5/5, GPT-4o, o1/o3 等最先进的模型
func (*OpenAIProvider) Capabilities ¶
func (p *OpenAIProvider) Capabilities() ProviderCapabilities
Capabilities 返回 OpenAI 的能力
type OpenRouterConfig ¶
type OpenRouterConfig struct {
// 优先使用的 provider (如 "OpenAI", "Anthropic")
ProviderPreference string
// Provider 优先级顺序
ProviderOrder []string
// 是否允许 fallback 到其他 provider
AllowFallbacks bool
// 应用名称(用于统计)
AppName string
// 站点 URL(用于统计)
SiteURL string
}
OpenRouterConfig OpenRouter 特定配置
type OpenRouterFactory ¶
type OpenRouterFactory struct {
Config *OpenRouterConfig
}
OpenRouterFactory OpenRouter 工厂
func (*OpenRouterFactory) Create ¶
func (f *OpenRouterFactory) Create(config *types.ModelConfig) (Provider, error)
Create 创建 OpenRouter 提供商
type OpenRouterProvider ¶
type OpenRouterProvider struct {
*OpenAICompatibleProvider
// contains filtered or unexported fields
}
OpenRouterProvider OpenRouter 提供商 OpenRouter 是聚合平台,支持数百个模型(OpenAI, Anthropic, Google, Meta, 等)
func (*OpenRouterProvider) Capabilities ¶
func (p *OpenRouterProvider) Capabilities() ProviderCapabilities
Capabilities 返回 OpenRouter 的能力 注意:OpenRouter 聚合多个 provider,能力取决于所选模型
type Provider ¶
type Provider interface {
// Stream 流式对话
Stream(ctx context.Context, messages []types.Message, opts *StreamOptions) (<-chan StreamChunk, error)
// Complete 非流式对话(阻塞式,返回完整响应)
Complete(ctx context.Context, messages []types.Message, opts *StreamOptions) (*CompleteResponse, error)
// Config 返回配置
Config() *types.ModelConfig
// Capabilities 返回模型能力
Capabilities() ProviderCapabilities
// SetSystemPrompt 设置系统提示词
SetSystemPrompt(prompt string) error
// GetSystemPrompt 获取系统提示词
GetSystemPrompt() string
// Close 关闭连接
Close() error
}
Provider 模型提供商接口
func NewDoubaoProvider ¶
func NewDoubaoProvider(config *types.ModelConfig, dbConfig *DoubaoConfig) (Provider, error)
NewDoubaoProvider 创建 Doubao 提供商
func NewDoubaoProviderSimple ¶
func NewDoubaoProviderSimple(config *types.ModelConfig) (Provider, error)
NewDoubaoProviderSimple 创建 Doubao 提供商(简化版)
func NewGatewayProvider ¶ added in v0.34.0
func NewGatewayProvider(config *types.ModelConfig) (Provider, error)
NewGatewayProvider 创建新的 Gateway provider
Gateway provider 会根据 model 名称自动推断使用哪种协议: - claude-* -> Anthropic 协议 - gpt-*, o1-*, o3-* -> OpenAI 协议 - gemini-* -> Gemini 协议 - 其他 -> OpenAI 兼容协议
func NewGeminiProvider ¶
func NewGeminiProvider(config *types.ModelConfig) (Provider, error)
NewGeminiProvider 创建 Gemini 提供商
func NewGroqProvider ¶
func NewGroqProvider(config *types.ModelConfig) (Provider, error)
NewGroqProvider 创建 Groq 提供商
func NewMistralProvider ¶
func NewMistralProvider(config *types.ModelConfig) (Provider, error)
NewMistralProvider 创建 Mistral 提供商
func NewMoonshotProvider ¶
func NewMoonshotProvider(config *types.ModelConfig) (Provider, error)
NewMoonshotProvider 创建 Moonshot 提供商
func NewOllamaProvider ¶
func NewOllamaProvider(config *types.ModelConfig) (Provider, error)
NewOllamaProvider 创建 Ollama 提供商
func NewOpenAIProvider ¶
func NewOpenAIProvider(config *types.ModelConfig) (Provider, error)
NewOpenAIProvider 创建 OpenAI 提供商
func NewOpenAIProviderWithBaseURL ¶ added in v0.34.0
func NewOpenAIProviderWithBaseURL(config *types.ModelConfig) (Provider, error)
NewOpenAIProviderWithBaseURL 创建支持自定义 BaseURL 的 OpenAI 提供商
func NewOpenRouterProvider ¶
func NewOpenRouterProvider(config *types.ModelConfig, orConfig *OpenRouterConfig) (Provider, error)
NewOpenRouterProvider 创建 OpenRouter 提供商
func NewOpenRouterProviderSimple ¶
func NewOpenRouterProviderSimple(config *types.ModelConfig) (Provider, error)
NewOpenRouterProviderSimple 创建 OpenRouter 提供商(简化版)
type ProviderCapabilities ¶
type ProviderCapabilities struct {
// 基础能力
SupportToolCalling bool // 是否支持工具调用
SupportSystemPrompt bool // 是否支持独立 system prompt
SupportStreaming bool // 是否支持流式输出
// 多模态能力
SupportVision bool // 是否支持视觉(图片)
SupportAudio bool // 是否支持音频
SupportVideo bool // 是否支持视频
// 高级能力
SupportReasoning bool // 是否支持推理模型(o1/o3/R1)
SupportPromptCache bool // 是否支持 Prompt Caching
SupportJSONMode bool // 是否支持 JSON 模式
SupportFunctionCall bool // 是否支持 Function Calling
SupportStructuredOutput bool // 是否支持结构化输出(JSON Schema)
// 限制
MaxTokens int // 最大 token 数
MaxToolsPerCall int // 单次最多调用工具数
MaxImageSize int // 最大图片大小(字节)
// Tool Calling 格式
ToolCallingFormat string // "anthropic" | "openai" | "qwen" | "custom"
// 推理模型特性
ReasoningTokensIncluded bool // reasoning tokens 是否包含在总 token 中
// Prompt Caching 特性
CacheMinTokens int // 最小缓存 Token 数
}
ProviderCapabilities 模型能力(扩展版本)
type ProviderFactory ¶
type ProviderFactory interface {
Create(config *types.ModelConfig) (Provider, error)
}
ProviderFactory 提供商工厂接口
type ReasoningTrace ¶
type ReasoningTrace struct {
Step int `json:"step"`
Thought string `json:"thought"`
ThoughtDelta string `json:"thought_delta,omitempty"`
Type string `json:"type,omitempty"` // "thinking", "reflection", "conclusion"
Confidence float64 `json:"confidence,omitempty"`
}
ReasoningTrace 推理过程跟踪
type ResponseFormat ¶ added in v0.31.0
type ResponseFormat struct {
Type ResponseFormatType `json:"type"`
Name string `json:"name,omitempty"` // JSON Schema 名称(仅用于 json_schema 类型)
Schema map[string]any `json:"schema,omitempty"` // JSON Schema 定义(仅用于 json_schema 类型)
Strict bool `json:"strict,omitempty"` // 是否严格模式(OpenAI)
}
ResponseFormat 响应格式配置(用于结构化输出)
type ResponseFormatType ¶ added in v0.31.0
type ResponseFormatType string
ResponseFormatType 响应格式类型
const ( ResponseFormatText ResponseFormatType = "text" ResponseFormatJSON ResponseFormatType = "json_object" ResponseFormatJSONSchema ResponseFormatType = "json_schema" )
type StreamChunk ¶
type StreamChunk struct {
// Type 块类型
Type string `json:"type"`
// Index 内容块索引(用于兼容 Anthropic 格式)
Index int `json:"index,omitempty"`
// Delta 增量数据(通用,兼容旧版)
Delta any `json:"delta,omitempty"`
// TextDelta 文本增量(新增,明确类型)
TextDelta string `json:"text_delta,omitempty"`
// ToolCall 工具调用增量(新增)
ToolCall *ToolCallDelta `json:"tool_call,omitempty"`
// Reasoning 推理过程(新增)
Reasoning *ReasoningTrace `json:"reasoning,omitempty"`
// Usage Token使用情况
Usage *TokenUsage `json:"usage,omitempty"`
// Error 错误信息(新增)
Error *StreamError `json:"error,omitempty"`
// FinishReason 完成原因(新增)
FinishReason string `json:"finish_reason,omitempty"`
}
StreamChunk 流式响应块(扩展版本)
type StreamChunkType ¶
type StreamChunkType string
StreamChunkType 流式响应块类型
const ( // 原有类型(兼容 Anthropic) ChunkTypeContentBlockStart StreamChunkType = "content_block_start" ChunkTypeContentBlockDelta StreamChunkType = "content_block_delta" ChunkTypeContentBlockStop StreamChunkType = "content_block_stop" ChunkTypeMessageDelta StreamChunkType = "message_delta" // 新增类型(通用) ChunkTypeText StreamChunkType = "text" ChunkTypeReasoning StreamChunkType = "reasoning" ChunkTypeUsage StreamChunkType = "usage" ChunkTypeToolCall StreamChunkType = "tool_call" ChunkTypeError StreamChunkType = "error" ChunkTypeDone StreamChunkType = "done" )
type StreamError ¶
type StreamError struct {
Code string `json:"code"`
Message string `json:"message"`
Type string `json:"type,omitempty"`
Param string `json:"param,omitempty"`
}
StreamError 流式错误
type StreamOptions ¶
type StreamOptions struct {
Tools []ToolSchema
MaxTokens int
Temperature float64
System string
// ToolChoice 工具选择策略(Anthropic API 支持)
// 可选值: nil (默认), "auto", "any", 或指定工具名
ToolChoice *ToolChoiceOption `json:"tool_choice,omitempty"`
// ResponseFormat 响应格式(用于结构化输出)
// 支持 JSON Schema 强制输出特定格式的响应
ResponseFormat *ResponseFormat `json:"response_format,omitempty"`
// Thinking Extended Thinking 配置(Claude 专属)
// 启用后模型会在响应前进行深度思考,思考过程会通过流式事件返回
Thinking *ThinkingConfig `json:"thinking,omitempty"`
}
StreamOptions 流式请求选项
type ThinkingConfig ¶ added in v0.31.0
type ThinkingConfig struct {
// Enabled 是否启用 extended thinking
Enabled bool `json:"enabled"`
// BudgetTokens 思考过程的 token 预算
// Claude 建议范围: 1024 - 32000
BudgetTokens int `json:"budget_tokens,omitempty"`
}
ThinkingConfig Extended Thinking 配置
type TokenUsage ¶
type TokenUsage struct {
// 基础统计
InputTokens int64 `json:"input_tokens"`
OutputTokens int64 `json:"output_tokens"`
TotalTokens int64 `json:"total_tokens,omitempty"`
// 推理模型特殊统计
ReasoningTokens int64 `json:"reasoning_tokens,omitempty"`
// Prompt Caching 统计
CachedTokens int64 `json:"cached_tokens,omitempty"`
CacheCreationTokens int64 `json:"cache_creation_tokens,omitempty"`
CacheReadTokens int64 `json:"cache_read_tokens,omitempty"`
// 成本估算(新增)
EstimatedCost float64 `json:"estimated_cost,omitempty"` // 估算成本 (USD)
// 请求元数据(新增)
RequestID string `json:"request_id,omitempty"` // API 请求 ID
Model string `json:"model,omitempty"` // 使用的模型
Provider string `json:"provider,omitempty"` // Provider 类型
// 时间统计(新增)
LatencyMs int64 `json:"latency_ms,omitempty"` // 请求总耗时 (ms)
TimeToFirstToken int64 `json:"time_to_first_token,omitempty"` // 首 token 时间 (ms)
}
TokenUsage Token使用统计(扩展版本)
type ToolCallDelta ¶
type ToolCallDelta struct {
Index int `json:"index"`
ID string `json:"id,omitempty"`
Type string `json:"type,omitempty"`
Name string `json:"name,omitempty"`
ArgumentsDelta string `json:"arguments_delta,omitempty"`
}
ToolCallDelta 工具调用增量
type ToolChoiceOption ¶ added in v0.17.0
type ToolChoiceOption struct {
// Type 选择类型: "auto", "any", "tool"
Type string `json:"type"`
// Name 当 Type="tool" 时,指定工具名称
Name string `json:"name,omitempty"`
// DisableParallelToolUse 禁用并行工具调用
DisableParallelToolUse bool `json:"disable_parallel_tool_use,omitempty"`
}
ToolChoiceOption 工具选择选项
type ToolExample ¶ added in v0.17.0
type ToolExample struct {
Description string `json:"description"`
Input map[string]any `json:"input"`
Output any `json:"output,omitempty"`
}
ToolExample 工具使用示例(与 tools.ToolExample 保持一致)
type ToolSchema ¶
type ToolSchema struct {
Name string `json:"name"`
Description string `json:"description"`
InputSchema map[string]any `json:"input_schema"`
// InputExamples 工具使用示例,帮助 LLM 更准确地调用工具
// 参考 Anthropic 的 Tool Use Examples 功能
InputExamples []ToolExample `json:"input_examples,omitempty"`
// AllowedCallers 指定哪些上下文可以调用此工具 (PTC 支持)
// 可选值: ["direct"], ["code_execution_20250825"], 或两者组合
// 默认: nil 或 ["direct"] - 仅 LLM 直接调用
AllowedCallers []string `json:"allowed_callers,omitempty"`
}
ToolSchema 工具Schema