Skip to content

Commit

Permalink
Ollama Spring Boot Starter - Supported capabilities in application.pr…
Browse files Browse the repository at this point in the history
…operties (#96)

## Issue
langchain4j/langchain4j#2250

## Change
Add support for passing supported capabilities through
application.properties

## General checklist
- [x] There are no breaking changes
- [x] I have added unit and/or integration tests for my change
- [ ] The tests cover both positive and negative cases
- [x] I have manually run all the unit and integration tests in the
module I have added/changed, and they are all green
- [ ] I have added/updated the
[documentation](https://github.com/langchain4j/langchain4j/tree/main/docs/docs)
- [ ] I have added an example in the [examples
repo](https://github.com/langchain4j/langchain4j-examples) (only for
"big" features)
  • Loading branch information
bidek authored Dec 18, 2024
1 parent 457bca4 commit 3041558
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ OllamaChatModel ollamaChatModel(Properties properties) {
.numPredict(chatModelProperties.getNumPredict())
.stop(chatModelProperties.getStop())
.format(chatModelProperties.getFormat())
.supportedCapabilities(chatModelProperties.getSupportedCapabilities())
.timeout(chatModelProperties.getTimeout())
.maxRetries(chatModelProperties.getMaxRetries())
.customHeaders(chatModelProperties.getCustomHeaders())
Expand All @@ -50,6 +51,7 @@ OllamaStreamingChatModel ollamaStreamingChatModel(Properties properties) {
.numPredict(chatModelProperties.getNumPredict())
.stop(chatModelProperties.getStop())
.format(chatModelProperties.getFormat())
.supportedCapabilities(chatModelProperties.getSupportedCapabilities())
.timeout(chatModelProperties.getTimeout())
.customHeaders(chatModelProperties.getCustomHeaders())
.logRequests(chatModelProperties.getLogRequests())
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
package dev.langchain4j.ollama.spring;

import dev.langchain4j.model.chat.Capability;
import lombok.Getter;
import lombok.Setter;

import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.Set;

@Getter
@Setter
Expand All @@ -21,6 +23,7 @@ class ChatModelProperties {
Integer numPredict;
List<String> stop;
String format;
Set<Capability> supportedCapabilities;
Duration timeout;
Integer maxRetries;
Map<String, String> customHeaders;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

import java.util.concurrent.CompletableFuture;

import static dev.langchain4j.model.chat.Capability.RESPONSE_FORMAT_JSON_SCHEMA;
import static java.lang.String.format;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;
Expand Down Expand Up @@ -57,6 +58,25 @@ void should_provide_chat_model() {
});
}

@Test
void should_provide_chat_model_with_supported_capabilities() {
contextRunner
.withPropertyValues(
"langchain4j.ollama.chat-model.base-url=" + baseUrl(),
"langchain4j.ollama.chat-model.model-name=" + MODEL_NAME,
"langchain4j.ollama.chat-model.supportedCapabilities=RESPONSE_FORMAT_JSON_SCHEMA"
)
.run(context -> {

ChatLanguageModel chatLanguageModel = context.getBean(ChatLanguageModel.class);
assertThat(chatLanguageModel).isInstanceOf(OllamaChatModel.class);
assertThat(chatLanguageModel.supportedCapabilities()).contains(RESPONSE_FORMAT_JSON_SCHEMA);

assertThat(context.getBean(OllamaChatModel.class)).isSameAs(chatLanguageModel);
});
}


@Test
void should_provide_streaming_chat_model() {
contextRunner
Expand Down

0 comments on commit 3041558

Please sign in to comment.