Skip to content

Commit 58ea3de

Browse files
author
kmrrohit
committed
fix(cli): uptake response_mime_type setting for gemini to avoid json parse issue
1 parent 060680c commit 58ea3de

File tree

6 files changed

+120
-25
lines changed

6 files changed

+120
-25
lines changed

.changeset/healthy-boxes-press.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"lingo.dev": patch
3+
---
4+
5+
Uptake response_mime_type for google provider to support json response.

packages/cli/src/cli/localizer/explicit.ts

Lines changed: 64 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,26 @@ import { I18nConfig } from "@lingo.dev/_spec";
77
import chalk from "chalk";
88
import dedent from "dedent";
99
import { ILocalizer, LocalizerData } from "./_types";
10-
import { LanguageModel, Message, generateText } from "ai";
10+
import {
11+
GenerateObjectResult,
12+
GenerateTextResult,
13+
LanguageModel,
14+
Message,
15+
generateObject,
16+
generateText,
17+
} from "ai";
1118
import { colors } from "../constants";
1219
import { jsonrepair } from "jsonrepair";
1320
import { createOllama } from "ollama-ai-provider";
21+
import {
22+
NormalizedModelSettings,
23+
normalizeProviderSettings,
24+
} from "../utils/normalize-provider-settings";
1425

1526
export default function createExplicitLocalizer(
1627
provider: NonNullable<I18nConfig["provider"]>,
1728
): ILocalizer {
18-
const settings = provider.settings || {};
29+
const settings = normalizeProviderSettings(provider.id, provider.settings);
1930

2031
switch (provider.id) {
2132
default:
@@ -38,6 +49,7 @@ export default function createExplicitLocalizer(
3849
return createAiSdkLocalizer({
3950
factory: (params) => createOpenAI(params).languageModel(provider.model),
4051
id: provider.id,
52+
model: provider.model,
4153
prompt: provider.prompt,
4254
apiKeyName: "OPENAI_API_KEY",
4355
baseUrl: provider.baseUrl,
@@ -48,6 +60,7 @@ export default function createExplicitLocalizer(
4860
factory: (params) =>
4961
createAnthropic(params).languageModel(provider.model),
5062
id: provider.id,
63+
model: provider.model,
5164
prompt: provider.prompt,
5265
apiKeyName: "ANTHROPIC_API_KEY",
5366
baseUrl: provider.baseUrl,
@@ -58,6 +71,7 @@ export default function createExplicitLocalizer(
5871
factory: (params) =>
5972
createGoogleGenerativeAI(params).languageModel(provider.model),
6073
id: provider.id,
74+
model: provider.model,
6175
prompt: provider.prompt,
6276
apiKeyName: "GOOGLE_API_KEY",
6377
baseUrl: provider.baseUrl,
@@ -68,6 +82,7 @@ export default function createExplicitLocalizer(
6882
factory: (params) =>
6983
createOpenRouter(params).languageModel(provider.model),
7084
id: provider.id,
85+
model: provider.model,
7186
prompt: provider.prompt,
7287
apiKeyName: "OPENROUTER_API_KEY",
7388
baseUrl: provider.baseUrl,
@@ -77,6 +92,7 @@ export default function createExplicitLocalizer(
7792
return createAiSdkLocalizer({
7893
factory: (_params) => createOllama().languageModel(provider.model),
7994
id: provider.id,
95+
model: provider.model,
8096
prompt: provider.prompt,
8197
skipAuth: true,
8298
settings,
@@ -86,6 +102,7 @@ export default function createExplicitLocalizer(
86102
factory: (params) =>
87103
createMistral(params).languageModel(provider.model),
88104
id: provider.id,
105+
model: provider.model,
89106
prompt: provider.prompt,
90107
apiKeyName: "MISTRAL_API_KEY",
91108
baseUrl: provider.baseUrl,
@@ -97,11 +114,12 @@ export default function createExplicitLocalizer(
97114
function createAiSdkLocalizer(params: {
98115
factory: (params: { apiKey?: string; baseUrl?: string }) => LanguageModel;
99116
id: NonNullable<I18nConfig["provider"]>["id"];
117+
model: string;
100118
prompt: string;
101119
apiKeyName?: string;
102120
baseUrl?: string;
103121
skipAuth?: boolean;
104-
settings?: { temperature?: number };
122+
settings?: NormalizedModelSettings;
105123
}): ILocalizer {
106124
const skipAuth = params.skipAuth === true;
107125

@@ -196,22 +214,49 @@ function createAiSdkLocalizer(params: {
196214
data: input.processableData,
197215
};
198216

199-
const response = await generateText({
200-
model,
201-
...params.settings,
202-
messages: [
203-
{ role: "system", content: systemPrompt },
204-
{ role: "user", content: "OK" },
205-
...shots.flatMap(
206-
([userShot, assistantShot]) =>
207-
[
208-
{ role: "user", content: JSON.stringify(userShot) },
209-
{ role: "assistant", content: JSON.stringify(assistantShot) },
210-
] as Message[],
211-
),
212-
{ role: "user", content: JSON.stringify(payload) },
213-
],
214-
});
217+
let response: GenerateTextResult<any, any> | GenerateObjectResult<any>;
218+
if (params.id === "google" && params.settings?.responseMimeType) {
219+
response = await generateObject({
220+
model,
221+
output: "no-schema",
222+
temperature: params.settings?.temperature,
223+
messages: [
224+
{ role: "system", content: systemPrompt },
225+
{ role: "user", content: "OK" },
226+
...shots.flatMap(
227+
([userShot, assistantShot]) =>
228+
[
229+
{ role: "user", content: JSON.stringify(userShot) },
230+
{ role: "assistant", content: JSON.stringify(assistantShot) },
231+
] as Message[],
232+
),
233+
{ role: "user", content: JSON.stringify(payload) },
234+
],
235+
});
236+
} else {
237+
response = await generateText({
238+
model,
239+
temperature: params.settings?.temperature,
240+
messages: [
241+
{ role: "system", content: systemPrompt },
242+
{ role: "user", content: "OK" },
243+
...shots.flatMap(
244+
([userShot, assistantShot]) =>
245+
[
246+
{ role: "user", content: JSON.stringify(userShot) },
247+
{ role: "assistant", content: JSON.stringify(assistantShot) },
248+
] as Message[],
249+
),
250+
{ role: "user", content: JSON.stringify(payload) },
251+
],
252+
});
253+
}
254+
255+
// Handle GenerateObjectResult - response is already a json object
256+
if ("object" in response) {
257+
const result = response.object as any;
258+
return result.data;
259+
}
215260

216261
const result = JSON.parse(response.text);
217262

packages/cli/src/cli/processor/basic.ts

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,12 @@
11
import { generateText, LanguageModelV1 } from "ai";
22
import { LocalizerInput, LocalizerProgressFn } from "./_base";
3+
import { NormalizedModelSettings } from "../utils/normalize-provider-settings";
34
import _ from "lodash";
45

5-
type ModelSettings = {
6-
temperature?: number;
7-
};
8-
96
export function createBasicTranslator(
107
model: LanguageModelV1,
118
systemPrompt: string,
12-
settings: ModelSettings = {},
9+
settings: NormalizedModelSettings = {},
1310
) {
1411
return async (input: LocalizerInput, onProgress: LocalizerProgressFn) => {
1512
const chunks = extractPayloadChunks(input.processableData);

packages/cli/src/cli/processor/index.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import { LocalizerFn } from "./_base";
55
import { createLingoLocalizer } from "./lingo";
66
import { createBasicTranslator } from "./basic";
77
import { createOpenAI } from "@ai-sdk/openai";
8+
import { normalizeProviderSettings } from "../utils/normalize-provider-settings";
89
import { colors } from "../constants";
910
import { createAnthropic } from "@ai-sdk/anthropic";
1011
import { createGoogleGenerativeAI } from "@ai-sdk/google";
@@ -21,7 +22,7 @@ export default function createProcessor(
2122
return result;
2223
} else {
2324
const model = getPureModelProvider(provider);
24-
const settings = provider.settings || {};
25+
const settings = normalizeProviderSettings(provider.id, provider.settings);
2526
const result = createBasicTranslator(model, provider.prompt, settings);
2627
return result;
2728
}
Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
export type ProviderModelSettings = {
2+
temperature?: number;
3+
response_mime_type?: "application/json" | "text/x.enum";
4+
response_schema?: Record<string, unknown>;
5+
};
6+
7+
export type NormalizedModelSettings = {
8+
temperature?: number;
9+
responseMimeType?: "application/json" | "text/x.enum";
10+
responseSchema?: Record<string, unknown>;
11+
};
12+
13+
export function normalizeProviderSettings(
14+
providerId: string | undefined,
15+
settings?: ProviderModelSettings | null,
16+
): NormalizedModelSettings {
17+
if (!settings) {
18+
return {};
19+
}
20+
21+
const normalized: NormalizedModelSettings = {};
22+
23+
if (typeof settings.temperature === "number") {
24+
normalized.temperature = settings.temperature;
25+
}
26+
27+
if (providerId === "google") {
28+
if (settings.response_mime_type) {
29+
normalized.responseMimeType = settings.response_mime_type;
30+
}
31+
if (settings.response_schema) {
32+
normalized.responseSchema = settings.response_schema;
33+
}
34+
}
35+
36+
return normalized;
37+
}

packages/spec/src/config.ts

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -444,6 +444,16 @@ const modelSettingsSchema = Z.object({
444444
.describe(
445445
"Controls randomness in model outputs (0=deterministic, 2=very random). Some models like GPT-5 require temperature=1.",
446446
),
447+
response_mime_type: Z.enum(["application/json", "text/x.enum"])
448+
.optional()
449+
.describe(
450+
"Gemini-specific setting to force raw JSON output without Markdown wrappers.",
451+
),
452+
response_schema: Z.record(Z.string(), Z.any())
453+
.optional()
454+
.describe(
455+
"Gemini-specific JSON schema that tunes structured output enforcement.",
456+
),
447457
})
448458
.optional()
449459
.describe("Model-specific settings for translation requests.");

0 commit comments

Comments
 (0)