Skip to content
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
Show all changes
17 commits
Select commit Hold shift + click to select a range
8740c90
updated model version consistently to "gpt-4.1-nano" in various files
giterinhub Jul 1, 2025
cff0983
feat: centralize model management with environment variable configura…
giterinhub Aug 29, 2025
7adfd70
feat: centralize model management with environment variable configura…
giterinhub Aug 29, 2025
6931a29
feat: centralize model management with environment variable configura…
giterinhub Aug 29, 2025
e7a68a8
feat: update model examples to use environment variable placeholders
giterinhub Aug 29, 2025
03e24a7
Merge branch 'main' into main
giterinhub Sep 1, 2025
2d151c1
feat: enhance model retrieval with metadata support in getters. Updat…
giterinhub Sep 3, 2025
fa9ace4
Merge branch 'main' of https://github.com/giterinhub/components-contrib
giterinhub Sep 3, 2025
2c99617
feat: centralize conversation model management with fallback hierarchy
giterinhub Sep 4, 2025
059b3a4
Fix GPT-5 temperature issue in conversation conformance tests
giterinhub Sep 4, 2025
571ebe1
feat: update environment variable names for conversation models and i…
giterinhub Sep 4, 2025
4b860f4
Standardize conversation component metadata with env vars and defaults
giterinhub Sep 4, 2025
d4a7ae5
refactor: replace getModelValue with getModel for consistency in mode…
giterinhub Sep 4, 2025
09e0376
refactor: update conversation test configs to use centralized model d…
giterinhub Sep 9, 2025
0332efb
Merge branch 'main' into main
giterinhub Sep 9, 2025
3f8b574
fix: correct model resolution precedence and update metadata examples
giterinhub Sep 10, 2025
8c81a6e
fix: reorder logic in getModel function to prioritize Environment Var…
giterinhub Sep 11, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 2 additions & 6 deletions conversation/anthropic/anthropic.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,19 +41,15 @@ func NewAnthropic(logger logger.Logger) conversation.Conversation {
return a
}

const defaultModel = "claude-3-5-sonnet-20240620"

func (a *Anthropic) Init(ctx context.Context, meta conversation.Metadata) error {
m := conversation.LangchainMetadata{}
err := kmeta.DecodeMetadata(meta.Properties, &m)
if err != nil {
return err
}

model := defaultModel
if m.Model != "" {
model = m.Model
}
// Resolve model via central helper (uses metadata, then env var, then default)
model := conversation.GetAnthropicModel(m.Model)

llm, err := anthropic.New(
anthropic.WithModel(model),
Expand Down
8 changes: 2 additions & 6 deletions conversation/googleai/googleai.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,19 +41,15 @@ func NewGoogleAI(logger logger.Logger) conversation.Conversation {
return g
}

const defaultModel = "gemini-1.5-flash"

func (g *GoogleAI) Init(ctx context.Context, meta conversation.Metadata) error {
md := conversation.LangchainMetadata{}
err := kmeta.DecodeMetadata(meta.Properties, &md)
if err != nil {
return err
}

model := defaultModel
if md.Model != "" {
model = md.Model
}
// Resolve model via central helper (uses metadata, then env var, then default)
model := conversation.GetGoogleAIModel(md.Model)

opts := []googleai.Option{
googleai.WithAPIKey(md.Key),
Expand Down
3 changes: 1 addition & 2 deletions conversation/googleai/metadata.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,7 @@ metadata:
description: |
The GoogleAI LLM to use.
type: string
example: 'gemini-2.0-flash'
default: 'gemini-2.0-flash'
example: '${{DAPR_CONVERSATION_GOOGLEAI_MODEL}}'
- name: cacheTTL
required: false
description: |
Expand Down
9 changes: 2 additions & 7 deletions conversation/huggingface/huggingface.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,6 @@ func NewHuggingface(logger logger.Logger) conversation.Conversation {
return h
}

// Default model - using a popular and reliable model
const defaultModel = "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B"

// Default HuggingFace OpenAI-compatible endpoint
const defaultEndpoint = "https://router.huggingface.co/hf-inference/models/{{model}}/v1"

Expand All @@ -55,10 +52,8 @@ func (h *Huggingface) Init(ctx context.Context, meta conversation.Metadata) erro
return err
}

model := defaultModel
if m.Model != "" {
model = m.Model
}
// Resolve model via central helper (uses metadata, then env var, then default)
model := conversation.GetHuggingFaceModel(m.Model)

endpoint := strings.Replace(defaultEndpoint, "{{model}}", model, 1)
if m.Endpoint != "" {
Expand Down
3 changes: 1 addition & 2 deletions conversation/huggingface/metadata.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,7 @@ metadata:
description: |
The Huggingface model to use. Uses OpenAI-compatible API.
type: string
example: 'deepseek-ai/DeepSeek-R1-Distill-Qwen-32B'
default: 'deepseek-ai/DeepSeek-R1-Distill-Qwen-32B'
example: '${{DAPR_CONVERSATION_HUGGINGFACE_MODEL}}'
- name: endpoint
required: false
description: |
Expand Down
2 changes: 1 addition & 1 deletion conversation/metadata_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ func TestLangchainMetadata(t *testing.T) {
t.Run("json marshaling with endpoint", func(t *testing.T) {
metadata := LangchainMetadata{
Key: "test-key",
Model: "gpt-4",
Model: DefaultOpenAIModel,
CacheTTL: "10m",
Endpoint: "https://custom-endpoint.example.com",
}
Expand Down
2 changes: 1 addition & 1 deletion conversation/mistral/metadata.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ metadata:
description: |
The Mistral LLM to use.
type: string
example: 'open-mistral-7b'
example: '${{DAPR_CONVERSATION_MISTRAL_MODEL}}'
default: 'open-mistral-7b'
- name: cacheTTL
required: false
Expand Down
8 changes: 2 additions & 6 deletions conversation/mistral/mistral.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,19 +43,15 @@ func NewMistral(logger logger.Logger) conversation.Conversation {
return m
}

const defaultModel = "open-mistral-7b"

func (m *Mistral) Init(ctx context.Context, meta conversation.Metadata) error {
md := conversation.LangchainMetadata{}
err := kmeta.DecodeMetadata(meta.Properties, &md)
if err != nil {
return err
}

model := defaultModel
if md.Model != "" {
model = md.Model
}
// Resolve model via central helper (uses metadata, then env var, then default)
model := conversation.GetMistralModel(md.Model)

llm, err := mistral.New(
mistral.WithModel(model),
Expand Down
85 changes: 85 additions & 0 deletions conversation/models.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
/*
Copyright 2024 The Dapr Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package conversation

import (
"os"
)

// Default models for conversation components
// These can be overridden via environment variables for runtime configuration
const (
// Environment variable names
envOpenAIModel = "DAPR_CONVERSATION_OPENAI_MODEL"
envAzureOpenAIModel = "AZURE_OPENAI_MODEL"
envAnthropicModel = "DAPR_CONVERSATION_ANTHROPIC_MODEL"
envGoogleAIModel = "DAPR_CONVERSATION_GOOGLEAI_MODEL"
envMistralModel = "DAPR_CONVERSATION_MISTRAL_MODEL"
envHuggingFaceModel = "DAPR_CONVERSATION_HUGGINGFACE_MODEL"
envOllamaModel = "DAPR_CONVERSATION_OLLAMA_MODEL"
)

// Exported default model constants for consumers of the conversation package.
// These are used as fallbacks when env vars and metadata are not set.
const (
DefaultOpenAIModel = "gpt-5-nano" // Enable GPT-5 (Preview) for all clients
DefaultAzureOpenAIModel = "gpt-4.1-nano" // Default Azure OpenAI model
DefaultAnthropicModel = "claude-sonnet-4-20250514"
DefaultGoogleAIModel = "gemini-2.5-flash-lite"
DefaultMistralModel = "open-mistral-7b"
DefaultHuggingFaceModel = "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B"
DefaultOllamaModel = "llama3.2:latest"
)

// getEnvOrDefault returns the value of an environment variable or a default value
func getModelValue(envVar, defaultValue, metadataValue string) string {
if metadataValue != "" {
return metadataValue
}
if value := os.Getenv(envVar); value != "" {
return value
}
return defaultValue
}

// Example usage for model getters with metadata support:
// Pass metadataValue from your metadata file/struct, or "" if not set.
func GetOpenAIModel(metadataValue string) string {
return getModelValue(envOpenAIModel, DefaultOpenAIModel, metadataValue)
}

func GetAzureOpenAIModel(metadataValue string) string {
return getModelValue(envAzureOpenAIModel, DefaultAzureOpenAIModel, metadataValue)
}

func GetAnthropicModel(metadataValue string) string {
return getModelValue(envAnthropicModel, DefaultAnthropicModel, metadataValue)
}

func GetGoogleAIModel(metadataValue string) string {
return getModelValue(envGoogleAIModel, DefaultGoogleAIModel, metadataValue)
}

func GetMistralModel(metadataValue string) string {
return getModelValue(envMistralModel, DefaultMistralModel, metadataValue)
}

func GetHuggingFaceModel(metadataValue string) string {
return getModelValue(envHuggingFaceModel, DefaultHuggingFaceModel, metadataValue)
}

func GetOllamaModel(metadataValue string) string {
return getModelValue(envOllamaModel, DefaultOllamaModel, metadataValue)
}
3 changes: 1 addition & 2 deletions conversation/ollama/metadata.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@ metadata:
description: |
The Ollama LLM to use.
type: string
example: 'llama3.2:latest'
default: 'llama3.2:latest'
example: '${{DAPR_CONVERSATION_OLLAMA_MODEL}}'
- name: cacheTTL
required: false
description: |
Expand Down
8 changes: 2 additions & 6 deletions conversation/ollama/ollama.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,19 +41,15 @@ func NewOllama(logger logger.Logger) conversation.Conversation {
return o
}

const defaultModel = "llama3.2:latest"

func (o *Ollama) Init(ctx context.Context, meta conversation.Metadata) error {
md := conversation.LangchainMetadata{}
err := kmeta.DecodeMetadata(meta.Properties, &md)
if err != nil {
return err
}

model := defaultModel
if md.Model != "" {
model = md.Model
}
// Resolve model via central helper (uses metadata, then env var, then default)
model := conversation.GetOllamaModel(md.Model)

llm, err := ollama.New(
ollama.WithModel(model),
Expand Down
5 changes: 2 additions & 3 deletions conversation/openai/metadata.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,9 @@ metadata:
- name: model
required: false
description: |
The OpenAI LLM to use.
The OpenAI LLM to use. Defaults to gpt-5-nano (configurable via DAPR_CONVERSATION_OPENAI_MODEL environment variable)
type: string
example: 'gpt-4-turbo'
default: 'gpt-4o'
example: '${{DAPR_CONVERSATION_OPENAI_MODEL}}'
- name: endpoint
required: false
description: |
Expand Down
11 changes: 6 additions & 5 deletions conversation/openai/openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,18 +42,19 @@ func NewOpenAI(logger logger.Logger) conversation.Conversation {
return o
}

const defaultModel = "gpt-4o"

func (o *OpenAI) Init(ctx context.Context, meta conversation.Metadata) error {
md := OpenAILangchainMetadata{}
err := kmeta.DecodeMetadata(meta.Properties, &md)
if err != nil {
return err
}

model := defaultModel
if md.Model != "" {
model = md.Model
// Resolve model via central helper (uses metadata, then env var, then default)
var model string
if md.APIType == "azure" {
model = conversation.GetAzureOpenAIModel(md.Model)
} else {
model = conversation.GetOpenAIModel(md.Model)
}
// Create options for OpenAI client
options := []openai.Option{
Expand Down
10 changes: 5 additions & 5 deletions conversation/openai/openai_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ func TestInit(t *testing.T) {
name: "with default endpoint",
metadata: map[string]string{
"key": "test-key",
"model": "gpt-4",
"model": conversation.DefaultOpenAIModel,
},
testFn: func(t *testing.T, o *OpenAI, err error) {
require.NoError(t, err)
Expand All @@ -45,7 +45,7 @@ func TestInit(t *testing.T) {
name: "with custom endpoint",
metadata: map[string]string{
"key": "test-key",
"model": "gpt-4",
"model": conversation.DefaultOpenAIModel,
"endpoint": "https://api.openai.com/v1",
},
testFn: func(t *testing.T, o *OpenAI, err error) {
Expand All @@ -59,7 +59,7 @@ func TestInit(t *testing.T) {
name: "with apiType azure and missing apiVersion",
metadata: map[string]string{
"key": "test-key",
"model": "gpt-4",
"model": conversation.DefaultOpenAIModel,
"apiType": "azure",
"endpoint": "https://custom-endpoint.openai.azure.com/",
},
Expand All @@ -72,7 +72,7 @@ func TestInit(t *testing.T) {
name: "with apiType azure and custom apiVersion",
metadata: map[string]string{
"key": "test-key",
"model": "gpt-4",
"model": conversation.DefaultOpenAIModel,
"apiType": "azure",
"endpoint": "https://custom-endpoint.openai.azure.com/",
"apiVersion": "2025-01-01-preview",
Expand All @@ -86,7 +86,7 @@ func TestInit(t *testing.T) {
name: "with apiType azure but missing endpoint",
metadata: map[string]string{
"key": "test-key",
"model": "gpt-4",
"model": conversation.DefaultOpenAIModel,
"apiType": "azure",
"apiVersion": "2025-01-01-preview",
},
Expand Down
Loading