Documentation
¶
Index ¶
- Constants
- type AgentCompressionConfig
- type AgentConfig
- type AgentContextConfig
- type Anthropic
- type AzureOpenAI
- type BaseModel
- type Bedrock
- type EmbeddingConfig
- type Gemini
- type GeminiAnthropic
- type GeminiVertexAI
- type HttpMcpServerConfig
- type MemoryConfig
- type Model
- type Ollama
- type OpenAI
- type RemoteAgentConfig
- type SseConnectionParams
- type SseMcpServerConfig
- type StreamableHTTPConnectionParams
Constants ¶
const ( ModelTypeOpenAI = "openai" ModelTypeAzureOpenAI = "azure_openai" ModelTypeAnthropic = "anthropic" ModelTypeGeminiVertexAI = "gemini_vertex_ai" ModelTypeGeminiAnthropic = "gemini_anthropic" ModelTypeOllama = "ollama" ModelTypeGemini = "gemini" ModelTypeBedrock = "bedrock" )
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type AgentCompressionConfig ¶
type AgentCompressionConfig struct {
CompactionInterval *int `json:"compaction_interval,omitempty"`
OverlapSize *int `json:"overlap_size,omitempty"`
SummarizerModel Model `json:"summarizer_model,omitempty"`
PromptTemplate string `json:"prompt_template,omitempty"`
TokenThreshold *int `json:"token_threshold,omitempty"`
EventRetentionSize *int `json:"event_retention_size,omitempty"`
}
AgentCompressionConfig maps to Python's ContextCompressionSettings.
func (*AgentCompressionConfig) UnmarshalJSON ¶
func (c *AgentCompressionConfig) UnmarshalJSON(data []byte) error
type AgentConfig ¶
type AgentConfig struct {
Model Model `json:"model"`
Description string `json:"description"`
Instruction string `json:"instruction"`
HttpTools []HttpMcpServerConfig `json:"http_tools"`
SseTools []SseMcpServerConfig `json:"sse_tools"`
RemoteAgents []RemoteAgentConfig `json:"remote_agents"`
ExecuteCode bool `json:"execute_code,omitempty"`
Stream bool `json:"stream"`
Memory *MemoryConfig `json:"memory,omitempty"`
ContextConfig *AgentContextConfig `json:"context_config,omitempty"`
}
See `python/packages/kagent-adk/src/kagent/adk/types.py` for the python version of this
func (*AgentConfig) Scan ¶
func (a *AgentConfig) Scan(value any) error
func (*AgentConfig) UnmarshalJSON ¶
func (a *AgentConfig) UnmarshalJSON(data []byte) error
type AgentContextConfig ¶
type AgentContextConfig struct {
Compaction *AgentCompressionConfig `json:"compaction,omitempty"`
}
AgentContextConfig is the context management configuration that flows through config.json to the Python runtime.
type Anthropic ¶
func (*Anthropic) MarshalJSON ¶
type AzureOpenAI ¶
type AzureOpenAI struct {
BaseModel
}
func (*AzureOpenAI) GetType ¶
func (a *AzureOpenAI) GetType() string
func (*AzureOpenAI) MarshalJSON ¶
func (a *AzureOpenAI) MarshalJSON() ([]byte, error)
type BaseModel ¶
type BaseModel struct {
Type string `json:"type"`
Model string `json:"model"`
Headers map[string]string `json:"headers,omitempty"`
// TLS/SSL configuration (applies to all model types)
TLSDisableVerify *bool `json:"tls_disable_verify,omitempty"`
TLSCACertPath *string `json:"tls_ca_cert_path,omitempty"`
TLSDisableSystemCAs *bool `json:"tls_disable_system_cas,omitempty"`
// APIKeyPassthrough enables forwarding the Bearer token from incoming requests
// as the LLM API key instead of using a static secret.
APIKeyPassthrough bool `json:"api_key_passthrough,omitempty"`
}
type Bedrock ¶
type Bedrock struct {
BaseModel
// Region is the AWS region where the model is available
Region string `json:"region,omitempty"`
}
func (*Bedrock) MarshalJSON ¶
type EmbeddingConfig ¶
type EmbeddingConfig struct {
Provider string `json:"provider"`
Model string `json:"model"`
BaseUrl string `json:"base_url,omitempty"`
}
EmbeddingConfig is the embedding model config for memory tools. JSON uses "provider" to match Python EmbeddingConfig; unmarshaling accepts "type" for backward compat.
func ModelToEmbeddingConfig ¶
func ModelToEmbeddingConfig(m Model) *EmbeddingConfig
ModelToEmbeddingConfig converts a Model (e.g. from translateModel) to EmbeddingConfig so serialized AgentConfig has embedding.provider for Python EmbeddingConfig validation.
func (*EmbeddingConfig) UnmarshalJSON ¶
func (e *EmbeddingConfig) UnmarshalJSON(data []byte) error
type GeminiAnthropic ¶
type GeminiAnthropic struct {
BaseModel
}
func (*GeminiAnthropic) GetType ¶
func (g *GeminiAnthropic) GetType() string
func (*GeminiAnthropic) MarshalJSON ¶
func (g *GeminiAnthropic) MarshalJSON() ([]byte, error)
type GeminiVertexAI ¶
type GeminiVertexAI struct {
BaseModel
}
func (*GeminiVertexAI) GetType ¶
func (g *GeminiVertexAI) GetType() string
func (*GeminiVertexAI) MarshalJSON ¶
func (g *GeminiVertexAI) MarshalJSON() ([]byte, error)
type HttpMcpServerConfig ¶
type HttpMcpServerConfig struct {
Params StreamableHTTPConnectionParams `json:"params"`
Tools []string `json:"tools"`
AllowedHeaders []string `json:"allowed_headers,omitempty"`
}
type MemoryConfig ¶
type MemoryConfig struct {
TTLDays int `json:"ttl_days,omitempty"`
Embedding *EmbeddingConfig `json:"embedding,omitempty"`
}
MemoryConfig groups all memory-related configuration.
type Ollama ¶
func (*Ollama) MarshalJSON ¶
type OpenAI ¶
type OpenAI struct {
BaseModel
BaseUrl string `json:"base_url"`
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"`
MaxTokens *int `json:"max_tokens,omitempty"`
N *int `json:"n,omitempty"`
PresencePenalty *float64 `json:"presence_penalty,omitempty"`
ReasoningEffort *string `json:"reasoning_effort,omitempty"`
Seed *int `json:"seed,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
Timeout *int `json:"timeout,omitempty"`
TopP *float64 `json:"top_p,omitempty"`
}
func (*OpenAI) MarshalJSON ¶
type RemoteAgentConfig ¶
type SseConnectionParams ¶
type SseMcpServerConfig ¶
type SseMcpServerConfig struct {
Params SseConnectionParams `json:"params"`
Tools []string `json:"tools"`
AllowedHeaders []string `json:"allowed_headers,omitempty"`
}