Skip to content

Commit f0bd239

Browse files
authored
Merge pull request #4477 from janhq/fix/remote-engine-request-templates
chore: fix remote engines request templates
2 parents 56df737 + 96a14c2 commit f0bd239

File tree

7 files changed

+9
-10
lines changed

7 files changed

+9
-10
lines changed

extensions/engine-management-extension/models/martian.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
"name": "Martian Model Router",
66
"version": "1.0",
77
"description": "Martian Model Router dynamically routes requests to the best LLM in real-time",
8-
"parameters": {
8+
"inference_params": {
99
"max_tokens": 4096,
1010
"temperature": 0.7,
1111
"top_p": 0.95,

extensions/engine-management-extension/models/mistral.json

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
"name": "Mistral Small",
66
"version": "1.1",
77
"description": "Mistral Small is the ideal choice for simple tasks (Classification, Customer Support, or Text Generation) at an affordable price.",
8-
"parameters": {
8+
"inference_params": {
99
"max_tokens": 32000,
1010
"temperature": 0.7,
1111
"top_p": 0.95,
@@ -19,7 +19,7 @@
1919
"name": "Mistral Large",
2020
"version": "1.1",
2121
"description": "Mistral Large is ideal for complex tasks (Synthetic Text Generation, Code Generation, RAG, or Agents).",
22-
"parameters": {
22+
"inference_params": {
2323
"max_tokens": 32000,
2424
"temperature": 0.7,
2525
"top_p": 0.95,
@@ -33,7 +33,7 @@
3333
"name": "Mixtral 8x22B",
3434
"version": "1.1",
3535
"description": "Mixtral 8x22B is a high-performance, cost-effective model designed for complex tasks.",
36-
"parameters": {
36+
"inference_params": {
3737
"max_tokens": 32000,
3838
"temperature": 0.7,
3939
"top_p": 0.95,

extensions/engine-management-extension/models/nvidia.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
"name": "Mistral 7B",
66
"version": "1.1",
77
"description": "Mistral 7B with NVIDIA",
8-
"parameters": {
8+
"inference_params": {
99
"max_tokens": 1024,
1010
"temperature": 0.3,
1111
"top_p": 1,

extensions/engine-management-extension/models/openrouter.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
"name": "OpenRouter",
66
"version": "1.0",
77
"description": " OpenRouter scouts for the lowest prices and best latencies/throughputs across dozens of providers, and lets you choose how to prioritize them.",
8-
"parameters": {
8+
"inference_params": {
99
"max_tokens": 128000,
1010
"temperature": 0.7,
1111
"top_p": 0.95,

extensions/engine-management-extension/resources/anthropic.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
},
1616
"transform_resp": {
1717
"chat_completions": {
18-
"template": "{% if input_request.stream %} {\"object\": \"chat.completion.chunk\", \"model\": \"{{ input_request.model }}\", \"choices\": [{\"index\": 0, \"delta\": { {% if input_request.type == \"message_start\" %} \"role\": \"assistant\", \"content\": null {% else if input_request.type == \"ping\" %} \"role\": \"assistant\", \"content\": null {% else if input_request.type == \"content_block_delta\" %} \"role\": \"assistant\", \"content\": \"{{ input_request.delta.text }}\" {% else if input_request.type == \"content_block_stop\" %} \"role\": \"assistant\", \"content\": null {% else if input_request.type == \"content_block_stop\" %} \"role\": \"assistant\", \"content\": null {% endif %} }, {% if input_request.type == \"content_block_stop\" %} \"finish_reason\": \"stop\" {% else %} \"finish_reason\": null {% endif %} }]} {% else %} {\"id\": \"{{ input_request.id }}\", \"created\": null, \"object\": \"chat.completion\", \"model\": \"{{ input_request.model }}\", \"choices\": [{ \"index\": 0, \"message\": { \"role\": \"{{ input_request.role }}\", \"content\": \"{% if input_request.content and input_request.content.0.type == \"text\" %} \"{{input_request.content.0.text}}\" {% endif %}\", \"refusal\": null }, \"logprobs\": null, \"finish_reason\": \"{{ input_request.stop_reason }}\" } ], \"usage\": { \"prompt_tokens\": {{ input_request.usage.input_tokens }}, \"completion_tokens\": {{ input_request.usage.output_tokens }}, \"total_tokens\": {{ input_request.usage.input_tokens + input_request.usage.output_tokens }}, \"prompt_tokens_details\": { \"cached_tokens\": 0 }, \"completion_tokens_details\": { \"reasoning_tokens\": 0, \"accepted_prediction_tokens\": 0, \"rejected_prediction_tokens\": 0 } }, \"system_fingerprint\": \"fp_6b68a8204b\"} {% endif %}"
18+
"template": "{% if input_request.stream %} {\"object\": \"chat.completion.chunk\", \"model\": \"{{ input_request.model }}\", \"choices\": [{\"index\": 0, \"delta\": { {% if input_request.type == \"message_start\" %} \"role\": \"assistant\", \"content\": null {% else if input_request.type == \"ping\" %} \"role\": \"assistant\", \"content\": null {% else if input_request.type == \"content_block_delta\" %} \"role\": \"assistant\", \"content\": \"{{ input_request.delta.text }}\" {% else if input_request.type == \"content_block_stop\" %} \"role\": \"assistant\", \"content\": null {% else if input_request.type == \"content_block_stop\" %} \"role\": \"assistant\", \"content\": null {% endif %} }, {% if input_request.type == \"content_block_stop\" %} \"finish_reason\": \"stop\" {% else %} \"finish_reason\": null {% endif %} }]} {% else %} {\"id\": \"{{ input_request.id }}\", \"created\": null, \"object\": \"chat.completion\", \"model\": \"{{ input_request.model }}\", \"choices\": [{ \"index\": 0, \"message\": { \"role\": \"{{ input_request.role }}\", \"content\": {% if input_request.content and input_request.content.0.type == \"text\" %} \"{{input_request.content.0.text}}\" {% else %} null {% endif %}, \"refusal\": null }, \"logprobs\": null, \"finish_reason\": \"{{ input_request.stop_reason }}\" } ], \"usage\": { \"prompt_tokens\": {{ input_request.usage.input_tokens }}, \"completion_tokens\": {{ input_request.usage.output_tokens }}, \"total_tokens\": {{ input_request.usage.input_tokens + input_request.usage.output_tokens }}, \"prompt_tokens_details\": { \"cached_tokens\": 0 }, \"completion_tokens_details\": { \"reasoning_tokens\": 0, \"accepted_prediction_tokens\": 0, \"rejected_prediction_tokens\": 0 } }, \"system_fingerprint\": \"fp_6b68a8204b\"} {% endif %}"
1919
}
2020
}
2121
}

extensions/engine-management-extension/resources/mistral.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,12 +10,12 @@
1010
"transform_req": {
1111
"chat_completions": {
1212
"url": "https://api.mistral.ai/v1/chat/completions",
13-
"template": "{ {% set first = true %} {% for key, value in input_request %} {% if key == \"messages\" or key == \"model\" or key == \"temperature\" or key == \"store\" or key == \"max_tokens\" or key == \"stream\" or key == \"presence_penalty\" or key == \"metadata\" or key == \"frequency_penalty\" or key == \"tools\" or key == \"tool_choice\" or key == \"logprobs\" or key == \"top_logprobs\" or key == \"logit_bias\" or key == \"n\" or key == \"modalities\" or key == \"prediction\" or key == \"response_format\" or key == \"service_tier\" or key == \"seed\" or key == \"stop\" or key == \"stream_options\" or key == \"top_p\" or key == \"parallel_tool_calls\" or key == \"user\" %} {% if not first %},{% endif %} \"{{ key }}\": {{ tojson(value) }} {% set first = false %} {% endif %} {% endfor %} }"
13+
"template": "{{tojson(input_request)}}"
1414
}
1515
},
1616
"transform_resp": {
1717
"chat_completions": {
18-
"template": "{ {% set first = true %} {% for key, value in input_request %} {% if key == \"choices\" or key == \"created\" or key == \"model\" or key == \"service_tier\" or key == \"system_fingerprint\" or key == \"stream\" or key == \"object\" or key == \"usage\" %} {% if not first %},{% endif %} \"{{ key }}\": {{ tojson(value) }} {% set first = false %} {% endif %} {% endfor %} }"
18+
"template": "{{tojson(input_request)}}"
1919
}
2020
}
2121
}

web/utils/messageRequestBuilder.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,6 @@ export class MessageRequestBuilder {
167167
messages: this.normalizeMessages(this.messages),
168168
model: this.model,
169169
thread: this.thread,
170-
engine: this.model.engine,
171170
}
172171
}
173172
}

0 commit comments

Comments
 (0)