diff --git a/app/src/main/java/gemini/workshop/CallGemma.java b/app/src/main/java/gemini/workshop/CallGemma.java index df30ad2..6d64e6b 100644 --- a/app/src/main/java/gemini/workshop/CallGemma.java +++ b/app/src/main/java/gemini/workshop/CallGemma.java @@ -1,46 +1,33 @@ package gemini.workshop; -import com.github.dockerjava.api.model.Image; +import com.github.dockerjava.api.command.InspectContainerResponse; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.ollama.OllamaChatModel; -import org.testcontainers.DockerClientFactory; import org.testcontainers.ollama.OllamaContainer; -import org.testcontainers.utility.DockerImageName; import java.io.IOException; -import java.util.List; public class CallGemma { - private static final String TC_OLLAMA_GEMMA_2_B = "tc-ollama-gemma-2b"; - - /** Creating an Ollama container with Gemma 2B if it doesn't exist. */ - private static OllamaContainer createGemmaOllamaContainer() throws IOException, InterruptedException { - - // check if the custom Gemma Ollama image exists already - List listImagesCmd = DockerClientFactory.lazyClient() - .listImagesCmd() - .withImageNameFilter(TC_OLLAMA_GEMMA_2_B) - .exec(); - - if (listImagesCmd.isEmpty()) { - System.out.println("Creating a new Ollama container with Gemma 2B image..."); - OllamaContainer ollama = new OllamaContainer("ollama/ollama:0.1.26"); - ollama.start(); - ollama.execInContainer("ollama", "pull", "gemma:2b"); - ollama.commitToImage(TC_OLLAMA_GEMMA_2_B); - return ollama; - } else { - System.out.println("Using existing Ollama container with Gemma 2B image..."); - // Substitute the default Ollama image with our Gemma variant - return new OllamaContainer( - DockerImageName.parse(TC_OLLAMA_GEMMA_2_B) - .asCompatibleSubstituteFor("ollama/ollama")); - } - } - public static void main(String[] args) throws IOException, InterruptedException { - OllamaContainer ollama = createGemmaOllamaContainer(); + OllamaContainer ollama = new OllamaContainer("ollama/ollama:0.1.26") { + { + withReuse(true); + } + + @Override + protected void containerIsStarted(InspectContainerResponse containerInfo, boolean reused) { + if (reused) { + return; + } + + try { + execInContainer("ollama", "pull", "gemma:2b"); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + }; ollama.start(); ChatLanguageModel model = OllamaChatModel.builder()