Skip to content

Commit a2e74ef

Browse files
authored
Merge pull request #546 from kikumoto/feature/add_support_opus-4.1
Support Anthropic Claude 4.1; avoid simultaneous temperature and top_p
2 parents 49843bf + ce82635 commit a2e74ef

File tree

2 files changed

+37
-13
lines changed

2 files changed

+37
-13
lines changed

examples/pipelines/providers/anthropic_manifold_pipeline.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@ def get_anthropic_models(self):
6666
{"id": "claude-3-7-sonnet-20250219", "name": "claude-3.7-sonnet"},
6767
{"id": "claude-opus-4-20250514", "name": "claude-4-opus"},
6868
{"id": "claude-sonnet-4-20250514", "name": "claude-4-sonnet"},
69+
{"id": "claude-opus-4-1-20250805", "name": "claude-4.1-opus"},
6970
]
7071

7172
def get_thinking_supported_models(self):
@@ -168,12 +169,21 @@ def pipe(
168169
"messages": processed_messages,
169170
"max_tokens": body.get("max_tokens", 4096),
170171
"temperature": body.get("temperature", 0.8),
171-
"top_k": body.get("top_k", 40),
172-
"top_p": body.get("top_p", 0.9),
173172
"stop_sequences": body.get("stop", []),
174173
**({"system": str(system_message)} if system_message else {}),
175174
"stream": body.get("stream", False),
176175
}
176+
177+
# Add optional parameters only if explicitly provided
178+
if "top_k" in body:
179+
payload["top_k"] = body["top_k"]
180+
181+
# Only include top_p if explicitly set (not both temperature and top_p)
182+
if "top_p" in body:
183+
payload["top_p"] = body["top_p"]
184+
# Remove temperature if top_p is explicitly set
185+
if "temperature" in payload:
186+
del payload["temperature"]
177187

178188
if body.get("stream", False):
179189
supports_thinking = any(model in model_id for model in self.get_thinking_supported_models())

examples/pipelines/providers/aws_bedrock_claude_pipeline.py

Lines changed: 25 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -169,17 +169,31 @@ def pipe(
169169

170170
processed_messages.append({"role": message["role"], "content": processed_content})
171171

172-
payload = {"modelId": model_id,
173-
"messages": processed_messages,
174-
"system": [{'text': system_message["content"] if system_message else 'you are an intelligent ai assistant'}],
175-
"inferenceConfig": {
176-
"temperature": body.get("temperature", 0.5),
177-
"topP": body.get("top_p", 0.9),
178-
"maxTokens": body.get("max_tokens", 4096),
179-
"stopSequences": body.get("stop", []),
180-
},
181-
"additionalModelRequestFields": {"top_k": body.get("top_k", 200)}
182-
}
172+
payload = {
173+
"modelId": model_id,
174+
"messages": processed_messages,
175+
"system": [{'text': system_message["content"] if system_message else 'you are an intelligent ai assistant'}],
176+
"inferenceConfig": {
177+
"temperature": body.get("temperature", 0.5),
178+
"maxTokens": body.get("max_tokens", 4096),
179+
"stopSequences": body.get("stop", []),
180+
},
181+
"additionalModelRequestFields": {}
182+
}
183+
184+
# Handle top_p and temperature conflict
185+
if "top_p" in body:
186+
payload["inferenceConfig"]["topP"] = body["top_p"]
187+
# Remove temperature if top_p is explicitly set
188+
if "temperature" in payload["inferenceConfig"]:
189+
del payload["inferenceConfig"]["temperature"]
190+
191+
# Add top_k if explicitly provided
192+
if "top_k" in body:
193+
payload["additionalModelRequestFields"]["top_k"] = body["top_k"]
194+
else:
195+
# Use default top_k value
196+
payload["additionalModelRequestFields"]["top_k"] = 200
183197

184198
if body.get("stream", False):
185199
supports_thinking = any(model in model_id for model in self.get_thinking_supported_models())

0 commit comments

Comments
 (0)