diff --git a/src/features/common/ai/factory.js b/src/features/common/ai/factory.js index 419cfdfa..f6d84d92 100644 --- a/src/features/common/ai/factory.js +++ b/src/features/common/ai/factory.js @@ -53,7 +53,7 @@ const PROVIDERS = { name: 'Anthropic', handler: () => require("./providers/anthropic"), llmModels: [ - { id: 'claude-3-5-sonnet-20241022', name: 'Claude 3.5 Sonnet' }, + { id: 'claude-sonnet-4-5', name: 'Claude Sonnet 4.5' }, ], sttModels: [], }, diff --git a/src/features/common/ai/providers/anthropic.js b/src/features/common/ai/providers/anthropic.js index 315e7cba..60e53ea0 100644 --- a/src/features/common/ai/providers/anthropic.js +++ b/src/features/common/ai/providers/anthropic.js @@ -63,12 +63,12 @@ async function createSTT({ apiKey, language = "en", callbacks = {}, ...config }) * Creates an Anthropic LLM instance * @param {object} opts - Configuration options * @param {string} opts.apiKey - Anthropic API key - * @param {string} [opts.model='claude-3-5-sonnet-20241022'] - Model name + * @param {string} [opts.model='claude-sonnet-4-5'] - Model name * @param {number} [opts.temperature=0.7] - Temperature * @param {number} [opts.maxTokens=4096] - Max tokens * @returns {object} LLM instance */ -function createLLM({ apiKey, model = "claude-3-5-sonnet-20241022", temperature = 0.7, maxTokens = 4096, ...config }) { +function createLLM({ apiKey, model = "claude-sonnet-4-5", temperature = 0.7, maxTokens = 4096, ...config }) { const client = new Anthropic({ apiKey }) return { @@ -188,14 +188,14 @@ function createLLM({ apiKey, model = "claude-3-5-sonnet-20241022", temperature = * Creates an Anthropic streaming LLM instance * @param {object} opts - Configuration options * @param {string} opts.apiKey - Anthropic API key - * @param {string} [opts.model='claude-3-5-sonnet-20241022'] - Model name + * @param {string} [opts.model='claude-sonnet-4-5'] - Model name * @param {number} [opts.temperature=0.7] - Temperature * @param {number} [opts.maxTokens=4096] - Max tokens * @returns {object} Streaming LLM instance */ function createStreamingLLM({ apiKey, - model = "claude-3-5-sonnet-20241022", + model = "claude-sonnet-4-5", temperature = 0.7, maxTokens = 4096, ...config