Skip to content

Commit b91860c

Browse files
committed
chore: separate out OllamaOptions into Chat & Embedding Options
Signed-off-by: Gareth Evans <[email protected]>
1 parent e0ccc13 commit b91860c

File tree

31 files changed

+1507
-165
lines changed

31 files changed

+1507
-165
lines changed

auto-configurations/models/spring-ai-autoconfigure-model-ollama/src/main/java/org/springframework/ai/model/ollama/autoconfigure/OllamaChatProperties.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@
1616

1717
package org.springframework.ai.model.ollama.autoconfigure;
1818

19+
import org.springframework.ai.ollama.api.OllamaChatOptions;
1920
import org.springframework.ai.ollama.api.OllamaModel;
20-
import org.springframework.ai.ollama.api.OllamaOptions;
2121
import org.springframework.boot.context.properties.ConfigurationProperties;
2222
import org.springframework.boot.context.properties.NestedConfigurationProperty;
2323

@@ -38,7 +38,7 @@ public class OllamaChatProperties {
3838
* generative's defaults.
3939
*/
4040
@NestedConfigurationProperty
41-
private OllamaOptions options = OllamaOptions.builder().model(OllamaModel.MISTRAL.id()).build();
41+
private OllamaChatOptions options = OllamaChatOptions.builder().model(OllamaModel.MISTRAL.id()).build();
4242

4343
public String getModel() {
4444
return this.options.getModel();
@@ -48,7 +48,7 @@ public void setModel(String model) {
4848
this.options.setModel(model);
4949
}
5050

51-
public OllamaOptions getOptions() {
51+
public OllamaChatOptions getOptions() {
5252
return this.options;
5353
}
5454

auto-configurations/models/spring-ai-autoconfigure-model-ollama/src/main/java/org/springframework/ai/model/ollama/autoconfigure/OllamaEmbeddingProperties.java

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@
1616

1717
package org.springframework.ai.model.ollama.autoconfigure;
1818

19+
import org.springframework.ai.ollama.api.OllamaEmbeddingOptions;
1920
import org.springframework.ai.ollama.api.OllamaModel;
20-
import org.springframework.ai.ollama.api.OllamaOptions;
2121
import org.springframework.boot.context.properties.ConfigurationProperties;
2222
import org.springframework.boot.context.properties.NestedConfigurationProperty;
2323

@@ -38,7 +38,9 @@ public class OllamaEmbeddingProperties {
3838
* generative's defaults.
3939
*/
4040
@NestedConfigurationProperty
41-
private OllamaOptions options = OllamaOptions.builder().model(OllamaModel.MXBAI_EMBED_LARGE.id()).build();
41+
private OllamaEmbeddingOptions options = OllamaEmbeddingOptions.builder()
42+
.model(OllamaModel.MXBAI_EMBED_LARGE.id())
43+
.build();
4244

4345
public String getModel() {
4446
return this.options.getModel();
@@ -48,7 +50,7 @@ public void setModel(String model) {
4850
this.options.setModel(model);
4951
}
5052

51-
public OllamaOptions getOptions() {
53+
public OllamaEmbeddingOptions getOptions() {
5254
return this.options;
5355
}
5456

auto-configurations/models/spring-ai-autoconfigure-model-ollama/src/test/java/org/springframework/ai/model/ollama/autoconfigure/OllamaEmbeddingAutoConfigurationTests.java

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -38,9 +38,7 @@ public void propertiesTest() {
3838
new ApplicationContextRunner().withPropertyValues(
3939
// @formatter:off
4040
"spring.ai.ollama.base-url=TEST_BASE_URL",
41-
"spring.ai.ollama.embedding.options.model=MODEL_XYZ",
42-
"spring.ai.ollama.embedding.options.temperature=0.13",
43-
"spring.ai.ollama.embedding.options.topK=13"
41+
"spring.ai.ollama.embedding.options.model=MODEL_XYZ"
4442
// @formatter:on
4543
)
4644

@@ -52,9 +50,6 @@ public void propertiesTest() {
5250

5351
assertThat(embeddingProperties.getModel()).isEqualTo("MODEL_XYZ");
5452
assertThat(connectionProperties.getBaseUrl()).isEqualTo("TEST_BASE_URL");
55-
assertThat(embeddingProperties.getOptions().toMap()).containsKeys("temperature");
56-
assertThat(embeddingProperties.getOptions().toMap().get("temperature")).isEqualTo(0.13);
57-
assertThat(embeddingProperties.getOptions().getTopK()).isEqualTo(13);
5853
});
5954
}
6055

auto-configurations/models/spring-ai-autoconfigure-model-ollama/src/test/java/org/springframework/ai/model/ollama/autoconfigure/tool/FunctionCallbackInPromptIT.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
import org.slf4j.Logger;
2525
import org.slf4j.LoggerFactory;
2626
import org.springframework.ai.ollama.api.OllamaModel;
27+
import org.springframework.ai.ollama.api.OllamaChatOptions;
2728
import reactor.core.publisher.Flux;
2829

2930
import org.springframework.ai.chat.messages.AssistantMessage;
@@ -34,7 +35,6 @@
3435
import org.springframework.ai.model.ollama.autoconfigure.BaseOllamaIT;
3536
import org.springframework.ai.model.ollama.autoconfigure.OllamaChatAutoConfiguration;
3637
import org.springframework.ai.ollama.OllamaChatModel;
37-
import org.springframework.ai.ollama.api.OllamaOptions;
3838
import org.springframework.ai.tool.function.FunctionToolCallback;
3939
import org.springframework.boot.autoconfigure.AutoConfigurations;
4040
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
@@ -70,7 +70,7 @@ void functionCallTest() {
7070
UserMessage userMessage = new UserMessage(
7171
"What are the weather conditions in San Francisco, Tokyo, and Paris? Find the temperature in Celsius for each of the three locations.");
7272

73-
var promptOptions = OllamaOptions.builder()
73+
var promptOptions = OllamaChatOptions.builder()
7474
.toolCallbacks(List.of(FunctionToolCallback.builder("CurrentWeatherService", new MockWeatherService())
7575
.description(
7676
"Find the weather conditions, forecasts, and temperatures for a location, like a city or state.")
@@ -95,7 +95,7 @@ void streamingFunctionCallTest() {
9595
UserMessage userMessage = new UserMessage(
9696
"What are the weather conditions in San Francisco, Tokyo, and Paris? Find the temperature in Celsius for each of the three locations.");
9797

98-
var promptOptions = OllamaOptions.builder()
98+
var promptOptions = OllamaChatOptions.builder()
9999
.toolCallbacks(List.of(FunctionToolCallback.builder("CurrentWeatherService", new MockWeatherService())
100100
.description(
101101
"Find the weather conditions, forecasts, and temperatures for a location, like a city or state.")

auto-configurations/models/spring-ai-autoconfigure-model-ollama/src/test/java/org/springframework/ai/model/ollama/autoconfigure/tool/OllamaFunctionCallbackIT.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
import org.junit.jupiter.api.Test;
2424
import org.slf4j.Logger;
2525
import org.slf4j.LoggerFactory;
26+
import org.springframework.ai.ollama.api.OllamaChatOptions;
2627
import reactor.core.publisher.Flux;
2728

2829
import org.springframework.ai.chat.client.ChatClient;
@@ -35,7 +36,6 @@
3536
import org.springframework.ai.model.ollama.autoconfigure.OllamaChatAutoConfiguration;
3637
import org.springframework.ai.model.tool.ToolCallingChatOptions;
3738
import org.springframework.ai.ollama.OllamaChatModel;
38-
import org.springframework.ai.ollama.api.OllamaOptions;
3939
import org.springframework.ai.tool.ToolCallback;
4040
import org.springframework.ai.tool.function.FunctionToolCallback;
4141
import org.springframework.boot.autoconfigure.AutoConfigurations;
@@ -94,7 +94,7 @@ void functionCallTest() {
9494
"What are the weather conditions in San Francisco, Tokyo, and Paris? Find the temperature in Celsius for each of the three locations.");
9595

9696
ChatResponse response = chatModel
97-
.call(new Prompt(List.of(userMessage), OllamaOptions.builder().toolNames("WeatherInfo").build()));
97+
.call(new Prompt(List.of(userMessage), OllamaChatOptions.builder().toolNames("WeatherInfo").build()));
9898

9999
logger.info("Response: " + response);
100100

@@ -112,7 +112,7 @@ void streamFunctionCallTest() {
112112
"What are the weather conditions in San Francisco, Tokyo, and Paris? Find the temperature in Celsius for each of the three locations.");
113113

114114
Flux<ChatResponse> response = chatModel
115-
.stream(new Prompt(List.of(userMessage), OllamaOptions.builder().toolNames("WeatherInfo").build()));
115+
.stream(new Prompt(List.of(userMessage), OllamaChatOptions.builder().toolNames("WeatherInfo").build()));
116116

117117
String content = response.collectList()
118118
.block()

auto-configurations/models/spring-ai-autoconfigure-model-ollama/src/test/java/org/springframework/ai/model/ollama/autoconfigure/tool/OllamaFunctionToolBeanIT.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
import org.slf4j.Logger;
2626
import org.slf4j.LoggerFactory;
2727
import org.springframework.ai.ollama.api.OllamaModel;
28+
import org.springframework.ai.ollama.api.OllamaChatOptions;
2829
import reactor.core.publisher.Flux;
2930

3031
import org.springframework.ai.chat.messages.AssistantMessage;
@@ -36,7 +37,6 @@
3637
import org.springframework.ai.model.ollama.autoconfigure.OllamaChatAutoConfiguration;
3738
import org.springframework.ai.model.tool.ToolCallingChatOptions;
3839
import org.springframework.ai.ollama.OllamaChatModel;
39-
import org.springframework.ai.ollama.api.OllamaOptions;
4040
import org.springframework.ai.support.ToolCallbacks;
4141
import org.springframework.ai.tool.annotation.Tool;
4242
import org.springframework.boot.autoconfigure.AutoConfigurations;
@@ -85,7 +85,7 @@ void toolCallTest() {
8585
"What are the weather conditions in San Francisco, Tokyo, and Paris? Find the temperature in Celsius for each of the three locations.");
8686

8787
ChatResponse response = chatModel.call(new Prompt(List.of(userMessage),
88-
OllamaOptions.builder().toolCallbacks(ToolCallbacks.from(myTools)).build()));
88+
OllamaChatOptions.builder().toolCallbacks(ToolCallbacks.from(myTools)).build()));
8989

9090
logger.info("Response: {}", response);
9191

@@ -104,7 +104,7 @@ void functionCallTest() {
104104
"What are the weather conditions in San Francisco, Tokyo, and Paris? Find the temperature in Celsius for each of the three locations.");
105105

106106
ChatResponse response = chatModel
107-
.call(new Prompt(List.of(userMessage), OllamaOptions.builder().toolNames("weatherInfo").build()));
107+
.call(new Prompt(List.of(userMessage), OllamaChatOptions.builder().toolNames("weatherInfo").build()));
108108

109109
logger.info("Response: {}", response);
110110

@@ -122,7 +122,7 @@ void streamFunctionCallTest() {
122122
"What are the weather conditions in San Francisco, Tokyo, and Paris? Find the temperature in Celsius for each of the three locations.");
123123

124124
Flux<ChatResponse> response = chatModel
125-
.stream(new Prompt(List.of(userMessage), OllamaOptions.builder().toolNames("weatherInfo").build()));
125+
.stream(new Prompt(List.of(userMessage), OllamaChatOptions.builder().toolNames("weatherInfo").build()));
126126

127127
String content = response.collectList()
128128
.block()

auto-configurations/models/spring-ai-autoconfigure-model-ollama/src/test/kotlin/org/springframework/ai/model/ollama/autoconfigure/tool/FunctionCallbackContextKotlinIT.kt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ import org.springframework.ai.model.ollama.autoconfigure.BaseOllamaIT
2626
import org.springframework.ai.model.ollama.autoconfigure.OllamaChatAutoConfiguration
2727
import org.springframework.ai.model.tool.ToolCallingChatOptions
2828
import org.springframework.ai.ollama.OllamaChatModel
29-
import org.springframework.ai.ollama.api.OllamaOptions
29+
import org.springframework.ai.ollama.api.OllamaChatOptions
3030
import org.springframework.boot.autoconfigure.AutoConfigurations
3131
import org.springframework.boot.test.context.runner.ApplicationContextRunner
3232
import org.springframework.context.annotation.Bean
@@ -68,7 +68,7 @@ class FunctionCallbackResolverKotlinIT : BaseOllamaIT() {
6868
"What are the weather conditions in San Francisco, Tokyo, and Paris? Find the temperature in Celsius for each of the three locations.")
6969

7070
val response = chatModel
71-
.call(Prompt(listOf(userMessage), OllamaOptions.builder().toolNames("weatherInfo").build()))
71+
.call(Prompt(listOf(userMessage), OllamaChatOptions.builder().toolNames("weatherInfo").build()))
7272

7373
logger.info("Response: $response")
7474

models/spring-ai-ollama/src/main/java/org/springframework/ai/ollama/OllamaChatModel.java

Lines changed: 29 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
import io.micrometer.observation.contextpropagation.ObservationThreadLocalAccessor;
2929
import org.slf4j.Logger;
3030
import org.slf4j.LoggerFactory;
31+
import org.springframework.ai.ollama.api.OllamaChatOptions;
3132
import reactor.core.publisher.Flux;
3233
import reactor.core.scheduler.Schedulers;
3334

@@ -119,7 +120,7 @@ public class OllamaChatModel implements ChatModel {
119120

120121
private final OllamaApi chatApi;
121122

122-
private final OllamaOptions defaultOptions;
123+
private final OllamaChatOptions defaultOptions;
123124

124125
private final ObservationRegistry observationRegistry;
125126

@@ -137,13 +138,13 @@ public class OllamaChatModel implements ChatModel {
137138

138139
private final RetryTemplate retryTemplate;
139140

140-
public OllamaChatModel(OllamaApi ollamaApi, OllamaOptions defaultOptions, ToolCallingManager toolCallingManager,
141+
public OllamaChatModel(OllamaApi ollamaApi, OllamaChatOptions defaultOptions, ToolCallingManager toolCallingManager,
141142
ObservationRegistry observationRegistry, ModelManagementOptions modelManagementOptions) {
142143
this(ollamaApi, defaultOptions, toolCallingManager, observationRegistry, modelManagementOptions,
143144
new DefaultToolExecutionEligibilityPredicate(), RetryUtils.DEFAULT_RETRY_TEMPLATE);
144145
}
145146

146-
public OllamaChatModel(OllamaApi ollamaApi, OllamaOptions defaultOptions, ToolCallingManager toolCallingManager,
147+
public OllamaChatModel(OllamaApi ollamaApi, OllamaChatOptions defaultOptions, ToolCallingManager toolCallingManager,
147148
ObservationRegistry observationRegistry, ModelManagementOptions modelManagementOptions,
148149
ToolExecutionEligibilityPredicate toolExecutionEligibilityPredicate, RetryTemplate retryTemplate) {
149150

@@ -390,21 +391,25 @@ private Flux<ChatResponse> internalStream(Prompt prompt, ChatResponse previousCh
390391

391392
Prompt buildRequestPrompt(Prompt prompt) {
392393
// Process runtime options
393-
OllamaOptions runtimeOptions = null;
394+
OllamaChatOptions runtimeOptions = null;
394395
if (prompt.getOptions() != null) {
395-
if (prompt.getOptions() instanceof ToolCallingChatOptions toolCallingChatOptions) {
396+
if (prompt.getOptions() instanceof OllamaOptions ollamaOptions) {
397+
runtimeOptions = ModelOptionsUtils.copyToTarget(OllamaChatOptions.fromOptions(ollamaOptions),
398+
OllamaChatOptions.class, OllamaChatOptions.class);
399+
}
400+
else if (prompt.getOptions() instanceof ToolCallingChatOptions toolCallingChatOptions) {
396401
runtimeOptions = ModelOptionsUtils.copyToTarget(toolCallingChatOptions, ToolCallingChatOptions.class,
397-
OllamaOptions.class);
402+
OllamaChatOptions.class);
398403
}
399404
else {
400405
runtimeOptions = ModelOptionsUtils.copyToTarget(prompt.getOptions(), ChatOptions.class,
401-
OllamaOptions.class);
406+
OllamaChatOptions.class);
402407
}
403408
}
404409

405410
// Define request options by merging runtime options and default options
406-
OllamaOptions requestOptions = ModelOptionsUtils.merge(runtimeOptions, this.defaultOptions,
407-
OllamaOptions.class);
411+
OllamaChatOptions requestOptions = ModelOptionsUtils.merge(runtimeOptions, this.defaultOptions,
412+
OllamaChatOptions.class);
408413
// Merge @JsonIgnore-annotated options explicitly since they are ignored by
409414
// Jackson, used by ModelOptionsUtils.
410415
if (runtimeOptions != null) {
@@ -476,7 +481,13 @@ else if (message instanceof ToolResponseMessage toolMessage) {
476481
throw new IllegalArgumentException("Unsupported message type: " + message.getMessageType());
477482
}).flatMap(List::stream).toList();
478483

479-
OllamaOptions requestOptions = (OllamaOptions) prompt.getOptions();
484+
OllamaChatOptions requestOptions = null;
485+
if (prompt.getOptions() instanceof OllamaChatOptions) {
486+
requestOptions = (OllamaChatOptions) prompt.getOptions();
487+
}
488+
else {
489+
requestOptions = OllamaChatOptions.fromOptions((OllamaOptions) prompt.getOptions());
490+
}
480491

481492
OllamaApi.ChatRequest.Builder requestBuilder = OllamaApi.ChatRequest.builder(requestOptions.getModel())
482493
.stream(stream)
@@ -522,7 +533,7 @@ private List<ChatRequest.Tool> getTools(List<ToolDefinition> toolDefinitions) {
522533

523534
@Override
524535
public ChatOptions getDefaultOptions() {
525-
return OllamaOptions.fromOptions(this.defaultOptions);
536+
return OllamaChatOptions.fromOptions(this.defaultOptions);
526537
}
527538

528539
/**
@@ -547,7 +558,7 @@ public static final class Builder {
547558

548559
private OllamaApi ollamaApi;
549560

550-
private OllamaOptions defaultOptions = OllamaOptions.builder().model(OllamaModel.MISTRAL.id()).build();
561+
private OllamaChatOptions defaultOptions = OllamaChatOptions.builder().model(OllamaModel.MISTRAL.id()).build();
551562

552563
private ToolCallingManager toolCallingManager;
553564

@@ -567,7 +578,13 @@ public Builder ollamaApi(OllamaApi ollamaApi) {
567578
return this;
568579
}
569580

581+
@Deprecated
570582
public Builder defaultOptions(OllamaOptions defaultOptions) {
583+
this.defaultOptions = OllamaChatOptions.fromOptions(defaultOptions);
584+
return this;
585+
}
586+
587+
public Builder defaultOptions(OllamaChatOptions defaultOptions) {
571588
this.defaultOptions = defaultOptions;
572589
return this;
573590
}

0 commit comments

Comments
 (0)