diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml
index 06b6aa0..adc3afc 100644
--- a/.github/workflows/maven.yml
+++ b/.github/workflows/maven.yml
@@ -31,5 +31,5 @@ jobs:
run: mvn -B package --file pom.xml
# Optional: Uploads the full dependency graph to GitHub to improve the quality of Dependabot alerts this repository can receive
- - name: Update dependency graph
- uses: advanced-security/maven-dependency-submission-action@571e99aab1055c2e71a1e2309b9691de18d6b7d6
+ #- name: Update dependency graph
+ # uses: advanced-security/maven-dependency-submission-action@571e99aab1055c2e71a1e2309b9691de18d6b7d6
diff --git a/springboot-modules/spring-ai/pom.xml b/springboot-modules/spring-ai/pom.xml
index 7b68a7b..68252bd 100644
--- a/springboot-modules/spring-ai/pom.xml
+++ b/springboot-modules/spring-ai/pom.xml
@@ -12,6 +12,31 @@
spring-ai
+
+ org.springframework.ai
+ spring-ai-starter-model-ollama
+
+
+ org.springframework.boot
+ spring-boot-testcontainers
+ ${spring-boot.version}
+ test
+
+
+ org.springframework.ai
+ spring-ai-spring-boot-testcontainers
+ test
+
+
+ org.testcontainers
+ junit-jupiter
+ test
+
+
+ org.testcontainers
+ ollama
+ test
+
org.springframework.ai
spring-ai-starter-model-openai
@@ -48,6 +73,14 @@
pom
import
+
+
+ org.testcontainers
+ testcontainers-bom
+ 1.21.1
+ pom
+ import
+
diff --git a/springboot-modules/spring-ai/src/main/java/com/kodesastra/ai/ollama/SpringAiOllamaApplication.java b/springboot-modules/spring-ai/src/main/java/com/kodesastra/ai/ollama/SpringAiOllamaApplication.java
new file mode 100644
index 0000000..5e7c048
--- /dev/null
+++ b/springboot-modules/spring-ai/src/main/java/com/kodesastra/ai/ollama/SpringAiOllamaApplication.java
@@ -0,0 +1,12 @@
+package com.kodesastra.ai.ollama;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+
+@SpringBootApplication
+public class SpringAiOllamaApplication {
+
+ public static void main(String[] args) {
+ SpringApplication.run(SpringAiOllamaApplication.class, args);
+ }
+}
diff --git a/springboot-modules/spring-ai/src/main/resources/application-ollama.properties b/springboot-modules/spring-ai/src/main/resources/application-ollama.properties
new file mode 100644
index 0000000..89e1a23
--- /dev/null
+++ b/springboot-modules/spring-ai/src/main/resources/application-ollama.properties
@@ -0,0 +1,2 @@
+spring.application.name=spring-ai-ollama
+spring.autoconfigure.exclude=org.springframework.ai.model.openai.autoconfigure.OpenAiModerationAutoConfiguration,org.springframework.ai.model.openai.autoconfigure.OpenAiEmbeddingAutoConfiguration,org.springframework.ai.model.openai.autoconfigure.OpenAiChatAutoConfiguration,org.springframework.ai.model.openai.autoconfigure.OpenAiAudioSpeechAutoConfiguration,org.springframework.ai.model.openai.autoconfigure.OpenAiAudioTranscriptionAutoConfiguration,org.springframework.ai.model.openai.autoconfigure.OpenAiImageAutoConfiguration
\ No newline at end of file
diff --git a/springboot-modules/spring-ai/src/main/resources/puml/springai-ollama-cld.puml b/springboot-modules/spring-ai/src/main/resources/puml/springai-ollama-cld.puml
new file mode 100644
index 0000000..91b5311
--- /dev/null
+++ b/springboot-modules/spring-ai/src/main/resources/puml/springai-ollama-cld.puml
@@ -0,0 +1,57 @@
+
+@startuml
+'https://plantuml.com/class-diagram
+set namespaceSeparator none
+scale 1
+skinparam padding 0
+skinparam ranksep 50
+hide empty attributes
+skinparam Handwritten false
+skinparam ClassBorderColor black
+skinparam BackgroundColor #F0EDDE
+skinparam ClassAttributeFontColor #222222
+skinparam ClassFontStyle bold
+
+skinparam class {
+ArrowColor #3C88A3
+ArrowFontColor #3C88A3
+hide empty attributes
+skinparam Handwritten false
+skinparam ClassBorderColor black
+BackgroundColor #FFFFFF
+}
+'important Spring AI Ollama classes
+class "OllamaChatModel" as ocm {
+ +call(Prompt prompt): ChatResponse
+ }
+
+class "ChatResponse" as cr {
+ +getResult(): Generation
+}
+
+class "OllamaOptions" as oo {
+ +builder(): OllamaOptions.Builder
+ +getTemperature(): Double
+ +getModel(): OllamaModel
+ ..
+ Other methods to set and get Ollama options
+ }
+
+class "Prompt" as p {
+ +Prompt(String prompt, OllamaOptions.Builder builder)
+ +builder(): Prompt.Builder
+}
+
+class "OllamaOptions.Builder" as ob {
+ +temperature(Double temperature): Prompt.Builder
+ +model(OllamaModel model)
+ +build(): OllamaOptions
+ ..
+ Other methods to build Ollama options
+}
+
+ocm .down.> p : uses
+ocm -up-> cr : call(Prompt):ChatResponse
+oo +-down- ob : static nested
+p .up.> ob : uses
+@enduml
diff --git a/springboot-modules/spring-ai/src/test/java/com/kodesastra/ai/ollama/OllamaTestContainersDefaultConfig.java b/springboot-modules/spring-ai/src/test/java/com/kodesastra/ai/ollama/OllamaTestContainersDefaultConfig.java
new file mode 100644
index 0000000..dc78219
--- /dev/null
+++ b/springboot-modules/spring-ai/src/test/java/com/kodesastra/ai/ollama/OllamaTestContainersDefaultConfig.java
@@ -0,0 +1,22 @@
+package com.kodesastra.ai.ollama;
+
+import org.springframework.boot.test.context.TestConfiguration;
+import org.springframework.boot.testcontainers.service.connection.ServiceConnection;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Profile;
+import org.testcontainers.ollama.OllamaContainer;
+import org.testcontainers.utility.DockerImageName;
+
+@TestConfiguration(proxyBeanMethods = false)
+@Profile("ollama")
+class OllamaTestContainersDefaultConfig {
+ @Bean
+ @ServiceConnection
+ OllamaContainer ollamaContainer() {
+ return new OllamaContainer(
+ DockerImageName.parse("ollama/ollama:latest")
+ ).withCreateContainerCmdModifier(cmd ->
+ cmd.getHostConfig().withDeviceRequests(null)
+ );
+ }
+}
diff --git a/springboot-modules/spring-ai/src/test/java/com/kodesastra/ai/ollama/SpringAiOllamaDefaultConfigLiveTest.java b/springboot-modules/spring-ai/src/test/java/com/kodesastra/ai/ollama/SpringAiOllamaDefaultConfigLiveTest.java
new file mode 100644
index 0000000..07e6417
--- /dev/null
+++ b/springboot-modules/spring-ai/src/test/java/com/kodesastra/ai/ollama/SpringAiOllamaDefaultConfigLiveTest.java
@@ -0,0 +1,79 @@
+package com.kodesastra.ai.ollama;
+
+import java.io.IOException;
+
+
+import static org.assertj.core.api.Assertions.*;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.ai.chat.model.ChatResponse;
+import org.springframework.ai.chat.prompt.Prompt;
+import org.springframework.ai.ollama.OllamaChatModel;
+import org.springframework.ai.ollama.api.OllamaModel;
+import org.springframework.ai.ollama.api.OllamaOptions;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.context.annotation.Import;
+import org.springframework.test.context.ActiveProfiles;
+import org.testcontainers.containers.Container;
+import org.testcontainers.junit.jupiter.Testcontainers;
+import org.testcontainers.ollama.OllamaContainer;
+
+@SpringBootTest
+@Import(OllamaTestContainersDefaultConfig.class)
+@Testcontainers
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
+@ActiveProfiles("ollama")
+class SpringAiOllamaDefaultConfigLiveTest {
+ private final Logger logger = LoggerFactory.getLogger(SpringAiOllamaDefaultConfigLiveTest.class);
+
+ @Autowired
+ private OllamaContainer ollamaContainer;
+
+ @Autowired
+ private OllamaChatModel ollamaChatModel;
+
+ @BeforeAll
+ public void setup() throws IOException, InterruptedException {
+ //print the Ollama URL and port
+ logger.info("Ollama URL: {}, port: {}", ollamaContainer.getEndpoint(), ollamaContainer.getPort());
+ Container.ExecResult execResult = ollamaContainer.execInContainer("ollama", "pull",
+ OllamaModel.LLAMA3_2.getName());
+ if (execResult.getExitCode() != 0) {
+ logger.error("Failed to pull model: {}", execResult.getStderr());
+ throw new IOException("Failed to pull model: " + execResult.getStderr());
+ }
+ }
+
+ @Test
+ void givenDefaultOllamaConnection_whenInvokedWithPrompt_thenResponds() {
+ logger.info("SpringAIOllamaLiveTest context loaded successfully.");
+ String prompt = """
+ Context:
+ The Amazon rainforest is the largest tropical rainforest in the world, spanning several countries in South America.
+ It is home to a vast diversity of plant and animal species, many of which are not found anywhere else on Earth.
+ The rainforest plays a crucial role in regulating the global climate by absorbing large amounts of carbon dioxide.
+ Question: Why is the Amazon rainforest important for the Earth's climate?
+ Instructions:
+ Please answer strictly from the context provided in the prompt and do not include any additional information.
+ Keep the answer short and concise.
+ """;
+
+ ChatResponse response = ollamaChatModel.call(new Prompt(prompt, OllamaOptions.builder()
+ .model(OllamaModel.LLAMA3_2)
+ .temperature(0.4)
+ .build()));
+ assertThat(response.getResult()
+ .getOutput()).isNotNull()
+ .extracting(output -> output.getText().toLowerCase())
+ .asString()
+ .contains("carbon dioxide");
+
+ logger.info("Response: {}", response.getResult()
+ .getOutput()
+ .getText());
+ }
+}