├── .github └── workflows │ └── commit-stage.yml ├── .gitignore ├── .sdkmanrc ├── 01-chat-models ├── chat-models-ollama │ ├── README.md │ ├── build.gradle │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── thomasvitale │ │ │ │ └── ai │ │ │ │ └── langchain4j │ │ │ │ ├── ChatController.java │ │ │ │ └── ChatModelsOllamaApplication.java │ │ └── resources │ │ │ └── application.yml │ │ └── test │ │ └── java │ │ └── com │ │ └── thomasvitale │ │ └── ai │ │ └── langchain4j │ │ ├── ChatModelsOllamaApplicationTests.java │ │ └── TestChatModelsOllamaApplication.java └── chat-models-openai │ ├── README.md │ ├── build.gradle │ └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── thomasvitale │ │ │ └── ai │ │ │ └── langchain4j │ │ │ ├── ChatController.java │ │ │ └── ChatModelsOpenaiApplication.java │ └── resources │ │ └── application.yml │ └── test │ └── java │ └── com │ └── thomasvitale │ └── ai │ └── langchain4j │ └── ChatModelsOpenaiApplicationTests.java ├── 02-prompts ├── prompts-basics-ollama │ ├── README.md │ ├── build.gradle │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── thomasvitale │ │ │ │ └── ai │ │ │ │ └── langchain4j │ │ │ │ ├── ChatController.java │ │ │ │ ├── ChatService.java │ │ │ │ └── PromptsBasicsOllamaApplication.java │ │ └── resources │ │ │ └── application.yml │ │ └── test │ │ └── java │ │ └── com │ │ └── thomasvitale │ │ └── ai │ │ └── langchain4j │ │ ├── PromptsBasicsOllamaApplicationTests.java │ │ └── TestPromptsBasicsOllamaApplication.java ├── prompts-basics-openai │ ├── README.md │ ├── build.gradle │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── thomasvitale │ │ │ │ └── ai │ │ │ │ └── langchain4j │ │ │ │ ├── ChatController.java │ │ │ │ ├── ChatService.java │ │ │ │ └── PromptsBasicsApplication.java │ │ └── resources │ │ │ └── application.yml │ │ └── test │ │ └── java │ │ └── com │ │ └── thomasvitale │ │ └── ai │ │ └── langchain4j │ │ └── PromptsBasicsApplicationTests.java ├── prompts-messages-ollama │ ├── README.md │ ├── build.gradle │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── thomasvitale │ │ │ │ └── ai │ │ │ │ └── langchain4j │ │ │ │ ├── ChatController.java │ │ │ │ ├── ChatService.java │ │ │ │ └── PromptsMessagesOllamaApplication.java │ │ └── resources │ │ │ ├── application.yml │ │ │ └── prompts │ │ │ └── system-message.st │ │ └── test │ │ └── java │ │ └── com │ │ └── thomasvitale │ │ └── ai │ │ └── langchain4j │ │ ├── PromptsMessagesOllamaApplicationTests.java │ │ └── TestPromptsMessagesOllamaApplication.java ├── prompts-messages-openai │ ├── README.md │ ├── build.gradle │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── thomasvitale │ │ │ │ └── ai │ │ │ │ └── langchain4j │ │ │ │ ├── ChatController.java │ │ │ │ ├── ChatService.java │ │ │ │ └── PromptsMessagesApplication.java │ │ └── resources │ │ │ ├── application.yml │ │ │ └── prompts │ │ │ └── system-message.st │ │ └── test │ │ └── java │ │ └── com │ │ └── thomasvitale │ │ └── ai │ │ └── langchain4j │ │ └── PromptsMessagesApplicationTests.java ├── prompts-templates-ollama │ ├── README.md │ ├── build.gradle │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── thomasvitale │ │ │ │ └── ai │ │ │ │ └── langchain4j │ │ │ │ ├── ChatController.java │ │ │ │ ├── ChatService.java │ │ │ │ ├── MusicQuestion.java │ │ │ │ └── PromptsTemplatesOllamaApplication.java │ │ └── resources │ │ │ ├── application.yml │ │ │ └── prompts │ │ │ └── system-message.st │ │ └── test │ │ └── java │ │ └── com │ │ └── thomasvitale │ │ └── ai │ │ └── langchain4j │ │ ├── PromptsTemplatesOllamaApplicationTests.java │ │ └── TestPromptsTemplatesOllamaApplication.java └── prompts-templates-openai │ ├── README.md │ ├── build.gradle │ └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── thomasvitale │ │ │ └── ai │ │ │ └── langchain4j │ │ │ ├── ChatController.java │ │ │ ├── ChatService.java │ │ │ ├── MusicQuestion.java │ │ │ └── PromptsTemplatesApplication.java │ └── resources │ │ ├── application.yml │ │ └── prompts │ │ └── system-message.st │ └── test │ └── java │ └── com │ └── thomasvitale │ └── ai │ └── langchain4j │ └── PromptsTemplatesApplicationTests.java ├── 03-output-parsers ├── output-parsers-bean-ollama │ ├── README.md │ ├── build.gradle │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── thomasvitale │ │ │ │ └── ai │ │ │ │ └── langchain4j │ │ │ │ ├── ArtistInfo.java │ │ │ │ ├── ChatController.java │ │ │ │ ├── ChatService.java │ │ │ │ ├── MusicQuestion.java │ │ │ │ └── OutputParsersBeanOllamaApplication.java │ │ └── resources │ │ │ └── application.yml │ │ └── test │ │ └── java │ │ └── com │ │ └── thomasvitale │ │ └── ai │ │ └── langchain4j │ │ ├── OutputParsersBeanOllamaApplicationTests.java │ │ └── TestOutputParsersBeanOpenAiApplication.java └── output-parsers-bean-openai │ ├── README.md │ ├── build.gradle │ └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── thomasvitale │ │ │ └── ai │ │ │ └── langchain4j │ │ │ ├── ArtistInfo.java │ │ │ ├── ChatController.java │ │ │ ├── ChatService.java │ │ │ ├── MusicQuestion.java │ │ │ └── OutputParsersBeanOpenAiApplication.java │ └── resources │ │ └── application.yml │ └── test │ └── java │ └── com │ └── thomasvitale │ └── ai │ └── langchain4j │ └── OutputParsersBeanOpenAiApplicationTests.java ├── 04-embedding-models ├── embedding-models-ollama │ ├── README.md │ ├── build.gradle │ └── src │ │ ├── main │ │ ├── java │ │ │ └── com │ │ │ │ └── thomasvitale │ │ │ │ └── ai │ │ │ │ └── langchain4j │ │ │ │ ├── EmbeddingController.java │ │ │ │ └── EmbeddingModelsOllamaApplication.java │ │ └── resources │ │ │ └── application.yml │ │ └── test │ │ └── java │ │ └── com │ │ └── thomasvitale │ │ └── ai │ │ └── langchain4j │ │ ├── EmbeddingModelsOllamaApplicationTests.java │ │ └── TestEmbeddingModelsOllamaApplication.java └── embedding-models-openai │ ├── README.md │ ├── build.gradle │ └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── thomasvitale │ │ │ └── ai │ │ │ └── langchain4j │ │ │ ├── EmbeddingController.java │ │ │ └── EmbeddingModelsOpenAiApplication.java │ └── resources │ │ └── application.yml │ └── test │ └── java │ └── com │ └── thomasvitale │ └── ai │ └── langchain4j │ └── EmbeddingModelsOpenAiApplicationTests.java ├── LICENSE ├── README.md ├── buildSrc └── build.gradle ├── compose.yml ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat └── settings.gradle /.github/workflows/commit-stage.yml: -------------------------------------------------------------------------------- 1 | name: Commit Stage 2 | on: 3 | push: 4 | branches: 5 | - main 6 | pull_request: 7 | branches: 8 | - main 9 | 10 | jobs: 11 | build: 12 | name: Build 13 | runs-on: ubuntu-22.04 14 | permissions: 15 | contents: read 16 | steps: 17 | - name: Check out source code 18 | uses: actions/checkout@v4 19 | 20 | - name: Set up Java 21 | uses: actions/setup-java@v4 22 | with: 23 | java-version: 21 24 | distribution: temurin 25 | 26 | - name: Compile and test 27 | run: ./gradlew build 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled class file 2 | *.class 3 | 4 | # Log file 5 | *.log 6 | 7 | # BlueJ files 8 | *.ctxt 9 | 10 | # Mobile Tools for Java (J2ME) 11 | .mtj.tmp/ 12 | 13 | # Package Files # 14 | *.jar 15 | *.war 16 | *.nar 17 | *.ear 18 | *.zip 19 | *.tar.gz 20 | *.rar 21 | 22 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 23 | hs_err_pid* 24 | 25 | HELP.md 26 | .gradle 27 | build/ 28 | !gradle/wrapper/gradle-wrapper.jar 29 | !**/src/main/**/build/ 30 | !**/src/test/**/build/ 31 | 32 | ### STS ### 33 | .apt_generated 34 | .classpath 35 | .factorypath 36 | .project 37 | .settings 38 | .springBeans 39 | .sts4-cache 40 | bin/ 41 | !**/src/main/**/bin/ 42 | !**/src/test/**/bin/ 43 | 44 | ### IntelliJ IDEA ### 45 | .idea 46 | *.iws 47 | *.iml 48 | *.ipr 49 | out/ 50 | !**/src/main/**/out/ 51 | !**/src/test/**/out/ 52 | 53 | ### NetBeans ### 54 | /nbproject/private/ 55 | /nbbuild/ 56 | /dist/ 57 | /nbdist/ 58 | /.nb-gradle/ 59 | 60 | ### VS Code ### 61 | .vscode/ 62 | 63 | ################################ 64 | ############ MAC ############### 65 | ################################ 66 | 67 | # General 68 | .DS_Store 69 | *.DS_Store 70 | **/.DS_Store 71 | .AppleDouble 72 | .LSOverride 73 | 74 | # Icon must end with two \r 75 | Icon 76 | 77 | # Thumbnails 78 | ._* 79 | 80 | # Files that might appear in the root of a volume 81 | .DocumentRevisions-V100 82 | .fseventsd 83 | .Spotlight-V100 84 | .TemporaryItems 85 | .Trashes 86 | .VolumeIcon.icns 87 | .com.apple.timemachine.donotpresent 88 | 89 | # Directories potentially created on remote AFP share 90 | .AppleDB 91 | .AppleDesktop 92 | Network Trash Folder 93 | Temporary Items 94 | .apdisk 95 | -------------------------------------------------------------------------------- /.sdkmanrc: -------------------------------------------------------------------------------- 1 | # Use sdkman to run "sdk env" to initialize with correct JDK version 2 | # Enable auto-env through the sdkman_auto_env config 3 | # See https://sdkman.io/usage#config 4 | # A summary is to add the following to ~/.sdkman/etc/config 5 | # sdkman_auto_env=true 6 | java=21.0.2-tem 7 | -------------------------------------------------------------------------------- /01-chat-models/chat-models-ollama/README.md: -------------------------------------------------------------------------------- 1 | # Chat Models: Ollama 2 | 3 | Text generation with LLMs via Ollama. 4 | 5 | ## Description 6 | 7 | LangChain4j provides a `ChatLanguageModel` abstraction for integrating with LLMs via several providers, including Ollama. 8 | 9 | When using the _LangChain4j Ollama Spring Boot Starter_, a `ChatLanguageModel` object is autoconfigured for you to use Ollama. 10 | By default, the _mistral_ model is used. 11 | 12 | ```java 13 | @RestController 14 | class ChatController { 15 | private final ChatLanguageModel chatLanguageModel; 16 | 17 | ChatController(ChatLanguageModel chatLanguageModel) { 18 | this.chatLanguageModel = chatLanguageModel; 19 | } 20 | 21 | @GetMapping("/ai/chat") 22 | String chat(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String message) { 23 | return chatLanguageModel.generate(message); 24 | } 25 | } 26 | ``` 27 | 28 | ## Running the application 29 | 30 | The application relies on Ollama for providing LLMs. You can run the native Ollama app locally on your laptop (macOS or Linux), or rely on the Docker Compose/Testcontainers support in Spring Boot to spin up an Ollama service automatically at startup time. 31 | 32 | ### When using Ollama as a native application 33 | 34 | First, make sure you have [Ollama](https://ollama.ai) installed on your laptop (macOS or Linux). 35 | Then, use Ollama to run the _mistral_ large language model. 36 | 37 | ```shell 38 | ollama run mistral 39 | ``` 40 | 41 | Finally, run the Spring Boot application. 42 | 43 | ```shell 44 | ./gradlew bootRun 45 | ``` 46 | 47 | ### When using Ollama as a dev service with Docker Compose 48 | 49 | The application can optionally rely on the native Docker Compose support in Spring Boot to spin up an Ollama service with a _mistral_ model at startup time. 50 | To enable that, uncomment this dependency in the `build.gradle` file. 51 | 52 | ```groovy 53 | developmentOnly "io.thomasvitale.langchain4j:langchain4j-spring-boot-docker-compose:${springLangchain4jVersion}" 54 | ``` 55 | 56 | Then, run the Spring Boot application. 57 | 58 | ```shell 59 | ./gradlew bootRun 60 | ``` 61 | 62 | ### When using Ollama as a dev service with Testcontainers 63 | 64 | The application relies on the native Testcontainers support in Spring Boot to spin up an Ollama service with a _mistral_ model at startup time. 65 | 66 | ```shell 67 | ./gradlew bootTestRun 68 | ``` 69 | 70 | ## Calling the application 71 | 72 | You can now call the application that will use Ollama and mistral to generate text based on a default prompt. 73 | This example uses [httpie](https://httpie.io) to send HTTP requests. 74 | 75 | ```shell 76 | http :8080/ai/chat 77 | ``` 78 | 79 | Try passing your custom prompt and check the result. 80 | 81 | ```shell 82 | http :8080/ai/chat message=="What is the capital of Italy?" 83 | ``` 84 | -------------------------------------------------------------------------------- /01-chat-models/chat-models-ollama/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id 'org.springframework.boot' 4 | id 'io.spring.dependency-management' 5 | } 6 | 7 | group = 'com.thomasvitale' 8 | version = '0.0.1-SNAPSHOT' 9 | 10 | java { 11 | toolchain { 12 | languageVersion = JavaLanguageVersion.of(21) 13 | } 14 | } 15 | 16 | repositories { 17 | mavenCentral() 18 | mavenLocal() 19 | } 20 | 21 | dependencies { 22 | implementation 'org.springframework.boot:spring-boot-starter-web' 23 | 24 | implementation "io.thomasvitale.langchain4j:langchain4j-ollama-spring-boot-starter:${springLangchain4jVersion}" 25 | //developmentOnly "io.thomasvitale.langchain4j:langchain4j-spring-boot-docker-compose:${springLangchain4jVersion}" 26 | 27 | testAndDevelopmentOnly 'org.springframework.boot:spring-boot-devtools' 28 | 29 | testImplementation 'org.springframework.boot:spring-boot-starter-test' 30 | testImplementation "io.thomasvitale.langchain4j:langchain4j-spring-boot-testcontainers:${springLangchain4jVersion}" 31 | testImplementation 'org.testcontainers:ollama' 32 | } 33 | 34 | tasks.named('test') { 35 | useJUnitPlatform() 36 | } 37 | -------------------------------------------------------------------------------- /01-chat-models/chat-models-ollama/src/main/java/com/thomasvitale/ai/langchain4j/ChatController.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import dev.langchain4j.model.chat.ChatLanguageModel; 4 | import org.springframework.web.bind.annotation.GetMapping; 5 | import org.springframework.web.bind.annotation.RequestParam; 6 | import org.springframework.web.bind.annotation.RestController; 7 | 8 | @RestController 9 | class ChatController { 10 | 11 | private final ChatLanguageModel chatLanguageModel; 12 | 13 | ChatController(ChatLanguageModel chatLanguageModel) { 14 | this.chatLanguageModel = chatLanguageModel; 15 | } 16 | 17 | @GetMapping("/ai/chat") 18 | String chat(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String message) { 19 | return chatLanguageModel.generate(message); 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /01-chat-models/chat-models-ollama/src/main/java/com/thomasvitale/ai/langchain4j/ChatModelsOllamaApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class ChatModelsOllamaApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(ChatModelsOllamaApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /01-chat-models/chat-models-ollama/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | threads: 3 | virtual: 4 | enabled: true 5 | 6 | langchain4j: 7 | ollama: 8 | chat: 9 | model: mistral 10 | options: 11 | temperature: 0.7 12 | client: 13 | log-requests: true 14 | -------------------------------------------------------------------------------- /01-chat-models/chat-models-ollama/src/test/java/com/thomasvitale/ai/langchain4j/ChatModelsOllamaApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class ChatModelsOllamaApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /01-chat-models/chat-models-ollama/src/test/java/com/thomasvitale/ai/langchain4j/TestChatModelsOllamaApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.devtools.restart.RestartScope; 5 | import org.springframework.boot.test.context.TestConfiguration; 6 | import org.springframework.boot.testcontainers.service.connection.ServiceConnection; 7 | import org.springframework.context.annotation.Bean; 8 | import org.testcontainers.ollama.OllamaContainer; 9 | import org.testcontainers.utility.DockerImageName; 10 | 11 | @TestConfiguration(proxyBeanMethods = false) 12 | public class TestChatModelsOllamaApplication { 13 | 14 | @Bean 15 | @RestartScope 16 | @ServiceConnection 17 | OllamaContainer ollama() { 18 | return new OllamaContainer(DockerImageName.parse("ghcr.io/thomasvitale/ollama-mistral") 19 | .asCompatibleSubstituteFor("ollama/ollama")); 20 | } 21 | 22 | public static void main(String[] args) { 23 | SpringApplication.from(ChatModelsOllamaApplication::main).with(TestChatModelsOllamaApplication.class).run(args); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /01-chat-models/chat-models-openai/README.md: -------------------------------------------------------------------------------- 1 | # Chat Models: OpenAI 2 | 3 | Text generation with LLMs via OpenAI. 4 | 5 | ## Description 6 | 7 | LangChain4j provides a `ChatLanguageModel` abstraction for integrating with LLMs via several providers, including OpenAI. 8 | 9 | When using the _LangChain4j OpenAI Spring Boot Starter_, a `ChatLanguageModel` object is autoconfigured for you to use OpenAI. 10 | By default, the _gpt-3.5-turbo_ model is used. 11 | 12 | ```java 13 | @RestController 14 | class ChatController { 15 | private final ChatLanguageModel chatLanguageModel; 16 | 17 | ChatController(ChatLanguageModel chatLanguageModel) { 18 | this.chatLanguageModel = chatLanguageModel; 19 | } 20 | 21 | @GetMapping("/ai/chat") 22 | String chat(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String message) { 23 | return chatLanguageModel.generate(message); 24 | } 25 | } 26 | ``` 27 | 28 | ## Running the application 29 | 30 | The application relies on an OpenAI API for providing LLMs. 31 | 32 | ### When using OpenAI 33 | 34 | First, make sure you have an OpenAI account. 35 | Then, define an environment variable with the OpenAI API Key associated to your OpenAI account as the value. 36 | 37 | ```shell 38 | export LANGCHAIN4J_OPENAI_API_KEY= 39 | ``` 40 | 41 | Finally, run the Spring Boot application. 42 | 43 | ```shell 44 | ./gradlew bootRun 45 | ``` 46 | 47 | ## Calling the application 48 | 49 | You can now call the application that will use OpenAI and _gpt-3.5-turbo_ to generate text based on a default prompt. 50 | This example uses [httpie](https://httpie.io) to send HTTP requests. 51 | 52 | ```shell 53 | http :8080/ai/chat 54 | ``` 55 | 56 | Try passing your custom prompt and check the result. 57 | 58 | ```shell 59 | http :8080/ai/chat message=="What is the capital of Italy?" 60 | ``` 61 | -------------------------------------------------------------------------------- /01-chat-models/chat-models-openai/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id 'org.springframework.boot' 4 | id 'io.spring.dependency-management' 5 | } 6 | 7 | group = 'com.thomasvitale' 8 | version = '0.0.1-SNAPSHOT' 9 | 10 | java { 11 | toolchain { 12 | languageVersion = JavaLanguageVersion.of(21) 13 | } 14 | } 15 | 16 | repositories { 17 | mavenCentral() 18 | mavenLocal() 19 | } 20 | 21 | dependencies { 22 | implementation 'org.springframework.boot:spring-boot-starter-web' 23 | 24 | implementation "io.thomasvitale.langchain4j:langchain4j-openai-spring-boot-starter:${springLangchain4jVersion}" 25 | 26 | developmentOnly 'org.springframework.boot:spring-boot-devtools' 27 | 28 | testImplementation 'org.springframework.boot:spring-boot-starter-test' 29 | } 30 | 31 | tasks.named('test') { 32 | useJUnitPlatform() 33 | } 34 | -------------------------------------------------------------------------------- /01-chat-models/chat-models-openai/src/main/java/com/thomasvitale/ai/langchain4j/ChatController.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import dev.langchain4j.model.chat.ChatLanguageModel; 4 | import org.springframework.web.bind.annotation.GetMapping; 5 | import org.springframework.web.bind.annotation.RequestParam; 6 | import org.springframework.web.bind.annotation.RestController; 7 | 8 | @RestController 9 | class ChatController { 10 | 11 | private final ChatLanguageModel chatLanguageModel; 12 | 13 | ChatController(ChatLanguageModel chatLanguageModel) { 14 | this.chatLanguageModel = chatLanguageModel; 15 | } 16 | 17 | @GetMapping("/ai/chat") 18 | String chat(@RequestParam(defaultValue = "What did Gandalf say to the Balrog?") String message) { 19 | return chatLanguageModel.generate(message); 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /01-chat-models/chat-models-openai/src/main/java/com/thomasvitale/ai/langchain4j/ChatModelsOpenaiApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class ChatModelsOpenaiApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(ChatModelsOpenaiApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /01-chat-models/chat-models-openai/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | threads: 3 | virtual: 4 | enabled: true 5 | 6 | langchain4j: 7 | open-ai: 8 | client: 9 | api-key: ${OPENAI_API_KEY} 10 | log-requests: true 11 | chat: 12 | options: 13 | model: gpt-3.5-turbo 14 | temperature: 0.7 15 | -------------------------------------------------------------------------------- /01-chat-models/chat-models-openai/src/test/java/com/thomasvitale/ai/langchain4j/ChatModelsOpenaiApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class ChatModelsOpenaiApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /02-prompts/prompts-basics-ollama/README.md: -------------------------------------------------------------------------------- 1 | # Prompts Basic: Ollama 2 | 3 | Prompting using simple text with LLMs via Ollama. 4 | 5 | ## Running the application 6 | 7 | The application relies on Ollama for providing LLMs. You can run the native Ollama app locally on your laptop (macOS or Linux), or rely on the Docker Compose/Testcontainers support in Spring Boot to spin up an Ollama service automatically at startup time. 8 | 9 | ### When using Ollama as a native application 10 | 11 | First, make sure you have [Ollama](https://ollama.ai) installed on your laptop (macOS or Linux). 12 | Then, use Ollama to run the _mistral_ large language model. 13 | 14 | ```shell 15 | ollama run mistral 16 | ``` 17 | 18 | Finally, run the Spring Boot application. 19 | 20 | ```shell 21 | ./gradlew bootRun 22 | ``` 23 | 24 | ### When using Ollama as a dev service with Docker Compose 25 | 26 | The application can optionally rely on the native Docker Compose support in Spring Boot to spin up an Ollama service with a _mistral_ model at startup time. 27 | To enable that, uncomment this dependency in the `build.gradle` file. 28 | 29 | ```groovy 30 | developmentOnly "io.thomasvitale.langchain4j:langchain4j-spring-boot-docker-compose:${springLangchain4jVersion}" 31 | ``` 32 | 33 | Then, run the Spring Boot application. 34 | 35 | ```shell 36 | ./gradlew bootRun 37 | ``` 38 | 39 | ### When using Ollama as a dev service with Testcontainers 40 | 41 | The application relies on the native Testcontainers support in Spring Boot to spin up an Ollama service with a _mistral_ model at startup time. 42 | 43 | ```shell 44 | ./gradlew bootTestRun 45 | ``` 46 | 47 | ## Calling the application 48 | 49 | You can now call the application that will use Ollama and mistral to generate an answer to your questions. 50 | This example uses [httpie](https://httpie.io) to send HTTP requests. 51 | 52 | ```shell 53 | http --raw "What is the capital of Italy?" :8080/ai/chat/simple 54 | ``` 55 | 56 | ```shell 57 | http --raw "What is the capital of Italy?" :8080/ai/chat/prompt 58 | ``` 59 | 60 | ```shell 61 | http --raw "What is the capital of Italy?" :8080/ai/chat/full 62 | ``` 63 | -------------------------------------------------------------------------------- /02-prompts/prompts-basics-ollama/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id 'org.springframework.boot' 4 | id 'io.spring.dependency-management' 5 | } 6 | 7 | group = 'com.thomasvitale' 8 | version = '0.0.1-SNAPSHOT' 9 | 10 | java { 11 | toolchain { 12 | languageVersion = JavaLanguageVersion.of(21) 13 | } 14 | } 15 | 16 | repositories { 17 | mavenCentral() 18 | mavenLocal() 19 | } 20 | 21 | dependencies { 22 | implementation 'org.springframework.boot:spring-boot-starter-web' 23 | 24 | implementation "io.thomasvitale.langchain4j:langchain4j-ollama-spring-boot-starter:${springLangchain4jVersion}" 25 | //developmentOnly "io.thomasvitale.langchain4j:langchain4j-spring-boot-docker-compose:${springLangchain4jVersion}" 26 | 27 | testAndDevelopmentOnly 'org.springframework.boot:spring-boot-devtools' 28 | 29 | testImplementation 'org.springframework.boot:spring-boot-starter-test' 30 | testImplementation "io.thomasvitale.langchain4j:langchain4j-spring-boot-testcontainers:${springLangchain4jVersion}" 31 | testImplementation 'org.testcontainers:ollama' 32 | } 33 | 34 | tasks.named('test') { 35 | useJUnitPlatform() 36 | } 37 | -------------------------------------------------------------------------------- /02-prompts/prompts-basics-ollama/src/main/java/com/thomasvitale/ai/langchain4j/ChatController.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.web.bind.annotation.PostMapping; 4 | import org.springframework.web.bind.annotation.RequestBody; 5 | import org.springframework.web.bind.annotation.RestController; 6 | 7 | import java.util.Map; 8 | import java.util.Objects; 9 | 10 | @RestController 11 | class ChatController { 12 | 13 | private final ChatService chatService; 14 | 15 | ChatController(ChatService chatService) { 16 | this.chatService = chatService; 17 | } 18 | 19 | @PostMapping("/ai/chat/simple") 20 | String chatWithText(@RequestBody String input) { 21 | return chatService.chatWithText(input); 22 | } 23 | 24 | @PostMapping("/ai/chat/prompt") 25 | String chatWithPrompt(@RequestBody String input) { 26 | return chatService.chatWithPrompt(input).content().text(); 27 | } 28 | 29 | @PostMapping("/ai/chat/full") 30 | Map chatWithPromptAndFullResponse(@RequestBody String message) { 31 | var chatResponse = chatService.chatWithPrompt(message); 32 | return Map.of( 33 | "content", Map.of( 34 | "message", chatResponse.content().text(), 35 | "type", chatResponse.content().type() 36 | ), 37 | "tokenUsage", Map.of( 38 | "inputTokenCount", Objects.requireNonNullElse(chatResponse.tokenUsage().inputTokenCount(), 0), // Ollama might not populate this field 39 | "outputTokenCount", chatResponse.tokenUsage().outputTokenCount(), 40 | "totalTokenCount", chatResponse.tokenUsage().totalTokenCount() 41 | ), 42 | "finishReason", Objects.requireNonNullElse(chatResponse.finishReason(), "unknown") // Ollama might not populate this field 43 | ); 44 | } 45 | 46 | } 47 | -------------------------------------------------------------------------------- /02-prompts/prompts-basics-ollama/src/main/java/com/thomasvitale/ai/langchain4j/ChatService.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import dev.langchain4j.data.message.AiMessage; 4 | import dev.langchain4j.data.message.UserMessage; 5 | import dev.langchain4j.model.chat.ChatLanguageModel; 6 | import dev.langchain4j.model.output.Response; 7 | import org.springframework.stereotype.Service; 8 | 9 | @Service 10 | class ChatService { 11 | 12 | private final ChatLanguageModel chatLanguageModel; 13 | 14 | ChatService(ChatLanguageModel chatLanguageModel) { 15 | this.chatLanguageModel = chatLanguageModel; 16 | } 17 | 18 | String chatWithText(String message) { 19 | return chatLanguageModel.generate(message); 20 | } 21 | 22 | Response chatWithPrompt(String message) { 23 | return chatLanguageModel.generate(new UserMessage(message)); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /02-prompts/prompts-basics-ollama/src/main/java/com/thomasvitale/ai/langchain4j/PromptsBasicsOllamaApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class PromptsBasicsOllamaApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(PromptsBasicsOllamaApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /02-prompts/prompts-basics-ollama/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | threads: 3 | virtual: 4 | enabled: true 5 | 6 | langchain4j: 7 | ollama: 8 | chat: 9 | model: mistral 10 | options: 11 | temperature: 0.7 12 | client: 13 | log-requests: true 14 | -------------------------------------------------------------------------------- /02-prompts/prompts-basics-ollama/src/test/java/com/thomasvitale/ai/langchain4j/PromptsBasicsOllamaApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class PromptsBasicsOllamaApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /02-prompts/prompts-basics-ollama/src/test/java/com/thomasvitale/ai/langchain4j/TestPromptsBasicsOllamaApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.devtools.restart.RestartScope; 5 | import org.springframework.boot.test.context.TestConfiguration; 6 | import org.springframework.boot.testcontainers.service.connection.ServiceConnection; 7 | import org.springframework.context.annotation.Bean; 8 | import org.testcontainers.ollama.OllamaContainer; 9 | import org.testcontainers.utility.DockerImageName; 10 | 11 | @TestConfiguration(proxyBeanMethods = false) 12 | public class TestPromptsBasicsOllamaApplication { 13 | 14 | @Bean 15 | @RestartScope 16 | @ServiceConnection 17 | OllamaContainer ollama() { 18 | return new OllamaContainer(DockerImageName.parse("ghcr.io/thomasvitale/ollama-mistral") 19 | .asCompatibleSubstituteFor("ollama/ollama")); 20 | } 21 | 22 | public static void main(String[] args) { 23 | SpringApplication.from(PromptsBasicsOllamaApplication::main).with(TestPromptsBasicsOllamaApplication.class).run(args); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /02-prompts/prompts-basics-openai/README.md: -------------------------------------------------------------------------------- 1 | # Prompts Basic: OpenAI 2 | 3 | Prompting using simple text with LLMs via OpenAI. 4 | 5 | ## Running the application 6 | 7 | The application relies on an OpenAI API for providing LLMs. 8 | 9 | ### When using OpenAI 10 | 11 | First, make sure you have an OpenAI account. 12 | Then, define an environment variable with the OpenAI API Key associated to your OpenAI account as the value. 13 | 14 | ```shell 15 | export LANGCHAIN4J_OPENAI_API_KEY= 16 | ``` 17 | 18 | Finally, run the Spring Boot application. 19 | 20 | ```shell 21 | ./gradlew bootRun 22 | ``` 23 | 24 | ## Calling the application 25 | 26 | You can now call the application that will use OpenAI and _gpt-3.5-turbo_ to generate an answer to your questions. 27 | This example uses [httpie](https://httpie.io) to send HTTP requests. 28 | 29 | ```shell 30 | http --raw "What is the capital of Italy?" :8080/ai/chat/simple 31 | ``` 32 | 33 | ```shell 34 | http --raw "What is the capital of Italy?" :8080/ai/chat/prompt 35 | ``` 36 | 37 | ```shell 38 | http --raw "What is the capital of Italy?" :8080/ai/chat/full 39 | ``` 40 | -------------------------------------------------------------------------------- /02-prompts/prompts-basics-openai/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id 'org.springframework.boot' 4 | id 'io.spring.dependency-management' 5 | } 6 | 7 | group = 'com.thomasvitale' 8 | version = '0.0.1-SNAPSHOT' 9 | 10 | java { 11 | toolchain { 12 | languageVersion = JavaLanguageVersion.of(21) 13 | } 14 | } 15 | 16 | repositories { 17 | mavenCentral() 18 | mavenLocal() 19 | } 20 | 21 | dependencies { 22 | implementation 'org.springframework.boot:spring-boot-starter-web' 23 | 24 | implementation "io.thomasvitale.langchain4j:langchain4j-openai-spring-boot-starter:${springLangchain4jVersion}" 25 | 26 | developmentOnly 'org.springframework.boot:spring-boot-devtools' 27 | 28 | testImplementation 'org.springframework.boot:spring-boot-starter-test' 29 | } 30 | 31 | tasks.named('test') { 32 | useJUnitPlatform() 33 | } 34 | -------------------------------------------------------------------------------- /02-prompts/prompts-basics-openai/src/main/java/com/thomasvitale/ai/langchain4j/ChatController.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.web.bind.annotation.PostMapping; 4 | import org.springframework.web.bind.annotation.RequestBody; 5 | import org.springframework.web.bind.annotation.RestController; 6 | 7 | import java.util.Map; 8 | 9 | @RestController 10 | class ChatController { 11 | 12 | private final ChatService chatService; 13 | 14 | ChatController(ChatService chatService) { 15 | this.chatService = chatService; 16 | } 17 | 18 | @PostMapping("/ai/chat/simple") 19 | String chatWithText(@RequestBody String input) { 20 | return chatService.chatWithText(input); 21 | } 22 | 23 | @PostMapping("/ai/chat/prompt") 24 | String chatWithPrompt(@RequestBody String input) { 25 | return chatService.chatWithPrompt(input).content().text(); 26 | } 27 | 28 | @PostMapping("/ai/chat/full") 29 | Map chatWithPromptAndFullResponse(@RequestBody String message) { 30 | var chatResponse = chatService.chatWithPrompt(message); 31 | return Map.of( 32 | "content", Map.of( 33 | "message", chatResponse.content().text(), 34 | "type", chatResponse.content().type() 35 | ), 36 | "tokenUsage", Map.of( 37 | "inputTokenCount", chatResponse.tokenUsage().inputTokenCount(), 38 | "outputTokenCount", chatResponse.tokenUsage().outputTokenCount(), 39 | "totalTokenCount", chatResponse.tokenUsage().totalTokenCount() 40 | ), 41 | "finishReason", chatResponse.finishReason() 42 | ); 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /02-prompts/prompts-basics-openai/src/main/java/com/thomasvitale/ai/langchain4j/ChatService.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import dev.langchain4j.data.message.AiMessage; 4 | import dev.langchain4j.data.message.UserMessage; 5 | import dev.langchain4j.model.chat.ChatLanguageModel; 6 | import dev.langchain4j.model.output.Response; 7 | import org.springframework.stereotype.Service; 8 | 9 | @Service 10 | class ChatService { 11 | 12 | private final ChatLanguageModel chatLanguageModel; 13 | 14 | ChatService(ChatLanguageModel chatLanguageModel) { 15 | this.chatLanguageModel = chatLanguageModel; 16 | } 17 | 18 | String chatWithText(String message) { 19 | return chatLanguageModel.generate(message); 20 | } 21 | 22 | Response chatWithPrompt(String message) { 23 | return chatLanguageModel.generate(new UserMessage(message)); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /02-prompts/prompts-basics-openai/src/main/java/com/thomasvitale/ai/langchain4j/PromptsBasicsApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class PromptsBasicsApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(PromptsBasicsApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /02-prompts/prompts-basics-openai/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | threads: 3 | virtual: 4 | enabled: true 5 | 6 | langchain4j: 7 | open-ai: 8 | client: 9 | api-key: ${OPENAI_API_KEY} 10 | log-requests: true 11 | chat: 12 | options: 13 | model: gpt-3.5-turbo 14 | temperature: 0.7 15 | -------------------------------------------------------------------------------- /02-prompts/prompts-basics-openai/src/test/java/com/thomasvitale/ai/langchain4j/PromptsBasicsApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class PromptsBasicsApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /02-prompts/prompts-messages-ollama/README.md: -------------------------------------------------------------------------------- 1 | # Prompts Messages: Ollama 2 | 3 | Prompting using structured messages and roles with LLMs via Ollama. 4 | 5 | ## Running the application 6 | 7 | The application relies on Ollama for providing LLMs. You can run the native Ollama app locally on your laptop (macOS or Linux), or rely on the Docker Compose/Testcontainers support in Spring Boot to spin up an Ollama service automatically at startup time. 8 | 9 | ### When using Ollama as a native application 10 | 11 | First, make sure you have [Ollama](https://ollama.ai) installed on your laptop (macOS or Linux). 12 | Then, use Ollama to run the _mistral_ large language model. 13 | 14 | ```shell 15 | ollama run mistral 16 | ``` 17 | 18 | Finally, run the Spring Boot application. 19 | 20 | ```shell 21 | ./gradlew bootRun 22 | ``` 23 | 24 | ### When using Ollama as a dev service with Docker Compose 25 | 26 | The application can optionally rely on the native Docker Compose support in Spring Boot to spin up an Ollama service with a _mistral_ model at startup time. 27 | To enable that, uncomment this dependency in the `build.gradle` file. 28 | 29 | ```groovy 30 | developmentOnly "io.thomasvitale.langchain4j:langchain4j-spring-boot-docker-compose:${springLangchain4jVersion}" 31 | ``` 32 | 33 | Then, run the Spring Boot application. 34 | 35 | ```shell 36 | ./gradlew bootRun 37 | ``` 38 | 39 | ### When using Ollama as a dev service with Testcontainers 40 | 41 | The application relies on the native Testcontainers support in Spring Boot to spin up an Ollama service with a _mistral_ model at startup time. 42 | 43 | ```shell 44 | ./gradlew bootTestRun 45 | ``` 46 | 47 | ## Calling the application 48 | 49 | You can now call the application that will use Ollama and mistral to generate an answer to your questions. 50 | This example uses [httpie](https://httpie.io) to send HTTP requests. 51 | 52 | ```shell 53 | http --raw "What is the capital of Italy?" :8080/ai/chat/single 54 | ``` 55 | 56 | ```shell 57 | http --raw "What is the capital of Italy?" :8080/ai/chat/multiple 58 | ``` 59 | 60 | ```shell 61 | http --raw "What is the capital of Italy?" :8080/ai/chat/external 62 | ``` 63 | -------------------------------------------------------------------------------- /02-prompts/prompts-messages-ollama/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id 'org.springframework.boot' 4 | id 'io.spring.dependency-management' 5 | } 6 | 7 | group = 'com.thomasvitale' 8 | version = '0.0.1-SNAPSHOT' 9 | 10 | java { 11 | toolchain { 12 | languageVersion = JavaLanguageVersion.of(21) 13 | } 14 | } 15 | 16 | repositories { 17 | mavenCentral() 18 | mavenLocal() 19 | } 20 | 21 | dependencies { 22 | implementation 'org.springframework.boot:spring-boot-starter-web' 23 | 24 | implementation "io.thomasvitale.langchain4j:langchain4j-ollama-spring-boot-starter:${springLangchain4jVersion}" 25 | //developmentOnly "io.thomasvitale.langchain4j:langchain4j-spring-boot-docker-compose:${springLangchain4jVersion}" 26 | 27 | testAndDevelopmentOnly 'org.springframework.boot:spring-boot-devtools' 28 | 29 | testImplementation 'org.springframework.boot:spring-boot-starter-test' 30 | testImplementation "io.thomasvitale.langchain4j:langchain4j-spring-boot-testcontainers:${springLangchain4jVersion}" 31 | testImplementation 'org.testcontainers:ollama' 32 | } 33 | 34 | tasks.named('test') { 35 | useJUnitPlatform() 36 | } 37 | -------------------------------------------------------------------------------- /02-prompts/prompts-messages-ollama/src/main/java/com/thomasvitale/ai/langchain4j/ChatController.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.web.bind.annotation.PostMapping; 4 | import org.springframework.web.bind.annotation.RequestBody; 5 | import org.springframework.web.bind.annotation.RestController; 6 | 7 | import java.io.IOException; 8 | 9 | @RestController 10 | class ChatController { 11 | 12 | private final ChatService chatService; 13 | 14 | ChatController(ChatService chatService) { 15 | this.chatService = chatService; 16 | } 17 | 18 | @PostMapping("/ai/chat/single") 19 | String chatWithSingleMessage(@RequestBody String input) { 20 | return chatService.chatWithSingleMessage(input).text(); 21 | } 22 | 23 | @PostMapping("/ai/chat/multiple") 24 | String chatWithMultipleMessages(@RequestBody String input) { 25 | return chatService.chatWithMultipleMessages(input).text(); 26 | } 27 | 28 | @PostMapping("/ai/chat/external") 29 | String chatWithExternalMessage(@RequestBody String input) throws IOException { 30 | return chatService.chatWithExternalMessage(input).text(); 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /02-prompts/prompts-messages-ollama/src/main/java/com/thomasvitale/ai/langchain4j/ChatService.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import dev.langchain4j.data.message.AiMessage; 4 | import dev.langchain4j.data.message.SystemMessage; 5 | import dev.langchain4j.data.message.UserMessage; 6 | import dev.langchain4j.model.chat.ChatLanguageModel; 7 | import org.springframework.beans.factory.annotation.Value; 8 | import org.springframework.core.io.Resource; 9 | import org.springframework.stereotype.Service; 10 | 11 | import java.io.IOException; 12 | import java.nio.charset.Charset; 13 | import java.util.List; 14 | 15 | @Service 16 | class ChatService { 17 | 18 | private final ChatLanguageModel chatLanguageModel; 19 | 20 | private final Resource systemMessageResource; 21 | 22 | ChatService(ChatLanguageModel chatLanguageModel, @Value("classpath:/prompts/system-message.st") Resource systemMessageResource) { 23 | this.chatLanguageModel = chatLanguageModel; 24 | this.systemMessageResource = systemMessageResource; 25 | } 26 | 27 | AiMessage chatWithSingleMessage(String message) { 28 | var userMessage = new UserMessage(message); 29 | var chatResponse = chatLanguageModel.generate(userMessage); 30 | return chatResponse.content(); 31 | } 32 | 33 | AiMessage chatWithMultipleMessages(String message) { 34 | var systemMessage = new SystemMessage(""" 35 | You are a helpful and polite assistant. 36 | Answer in one sentence. 37 | """); 38 | var userMessage = new UserMessage(message); 39 | var prompt = List.of(systemMessage, userMessage); 40 | var chatResponse = chatLanguageModel.generate(userMessage); 41 | return chatResponse.content(); 42 | } 43 | 44 | AiMessage chatWithExternalMessage(String message) throws IOException { 45 | var systemMessage = new SystemMessage(systemMessageResource.getContentAsString(Charset.defaultCharset())); 46 | var userMessage = new UserMessage(message); 47 | var prompt = List.of(systemMessage, userMessage); 48 | var chatResponse = chatLanguageModel.generate(userMessage); 49 | return chatResponse.content(); 50 | } 51 | 52 | } 53 | -------------------------------------------------------------------------------- /02-prompts/prompts-messages-ollama/src/main/java/com/thomasvitale/ai/langchain4j/PromptsMessagesOllamaApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class PromptsMessagesOllamaApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(PromptsMessagesOllamaApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /02-prompts/prompts-messages-ollama/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | threads: 3 | virtual: 4 | enabled: true 5 | 6 | langchain4j: 7 | ollama: 8 | chat: 9 | model: mistral 10 | options: 11 | temperature: 0.7 12 | client: 13 | log-requests: true 14 | -------------------------------------------------------------------------------- /02-prompts/prompts-messages-ollama/src/main/resources/prompts/system-message.st: -------------------------------------------------------------------------------- 1 | You are a funny and polite assistant. 2 | Answer in one sentence. -------------------------------------------------------------------------------- /02-prompts/prompts-messages-ollama/src/test/java/com/thomasvitale/ai/langchain4j/PromptsMessagesOllamaApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class PromptsMessagesOllamaApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /02-prompts/prompts-messages-ollama/src/test/java/com/thomasvitale/ai/langchain4j/TestPromptsMessagesOllamaApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.devtools.restart.RestartScope; 5 | import org.springframework.boot.test.context.TestConfiguration; 6 | import org.springframework.boot.testcontainers.service.connection.ServiceConnection; 7 | import org.springframework.context.annotation.Bean; 8 | import org.testcontainers.ollama.OllamaContainer; 9 | import org.testcontainers.utility.DockerImageName; 10 | 11 | @TestConfiguration(proxyBeanMethods = false) 12 | public class TestPromptsMessagesOllamaApplication { 13 | 14 | @Bean 15 | @RestartScope 16 | @ServiceConnection 17 | OllamaContainer ollama() { 18 | return new OllamaContainer(DockerImageName.parse("ghcr.io/thomasvitale/ollama-mistral") 19 | .asCompatibleSubstituteFor("ollama/ollama")); 20 | } 21 | 22 | public static void main(String[] args) { 23 | SpringApplication.from(PromptsMessagesOllamaApplication::main).with(TestPromptsMessagesOllamaApplication.class).run(args); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /02-prompts/prompts-messages-openai/README.md: -------------------------------------------------------------------------------- 1 | # Prompts Messages: OpenAI 2 | 3 | Prompting using structured messages and roles with LLMs via OpenAI. 4 | 5 | ## Running the application 6 | 7 | The application relies on an OpenAI API for providing LLMs. 8 | 9 | ### When using OpenAI 10 | 11 | First, make sure you have an OpenAI account. 12 | Then, define an environment variable with the OpenAI API Key associated to your OpenAI account as the value. 13 | 14 | ```shell 15 | export LANGCHAIN4J_OPENAI_API_KEY= 16 | ``` 17 | 18 | Finally, run the Spring Boot application. 19 | 20 | ```shell 21 | ./gradlew bootRun 22 | ``` 23 | 24 | ## Calling the application 25 | 26 | You can now call the application that will use OpenAI and _gpt-3.5-turbo_ to generate an answer to your questions. 27 | This example uses [httpie](https://httpie.io) to send HTTP requests. 28 | 29 | ```shell 30 | http --raw "What is the capital of Italy?" :8080/ai/chat/single 31 | ``` 32 | 33 | ```shell 34 | http --raw "What is the capital of Italy?" :8080/ai/chat/multiple 35 | ``` 36 | 37 | ```shell 38 | http --raw "What is the capital of Italy?" :8080/ai/chat/external 39 | ``` 40 | -------------------------------------------------------------------------------- /02-prompts/prompts-messages-openai/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id 'org.springframework.boot' 4 | id 'io.spring.dependency-management' 5 | } 6 | 7 | group = 'com.thomasvitale' 8 | version = '0.0.1-SNAPSHOT' 9 | 10 | java { 11 | toolchain { 12 | languageVersion = JavaLanguageVersion.of(21) 13 | } 14 | } 15 | 16 | repositories { 17 | mavenCentral() 18 | mavenLocal() 19 | } 20 | 21 | dependencies { 22 | implementation 'org.springframework.boot:spring-boot-starter-web' 23 | 24 | implementation "io.thomasvitale.langchain4j:langchain4j-openai-spring-boot-starter:${springLangchain4jVersion}" 25 | 26 | developmentOnly 'org.springframework.boot:spring-boot-devtools' 27 | 28 | testImplementation 'org.springframework.boot:spring-boot-starter-test' 29 | } 30 | 31 | tasks.named('test') { 32 | useJUnitPlatform() 33 | } 34 | -------------------------------------------------------------------------------- /02-prompts/prompts-messages-openai/src/main/java/com/thomasvitale/ai/langchain4j/ChatController.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.web.bind.annotation.PostMapping; 4 | import org.springframework.web.bind.annotation.RequestBody; 5 | import org.springframework.web.bind.annotation.RestController; 6 | 7 | import java.io.IOException; 8 | 9 | @RestController 10 | class ChatController { 11 | 12 | private final ChatService chatService; 13 | 14 | ChatController(ChatService chatService) { 15 | this.chatService = chatService; 16 | } 17 | 18 | @PostMapping("/ai/chat/single") 19 | String chatWithSingleMessage(@RequestBody String input) { 20 | return chatService.chatWithSingleMessage(input).text(); 21 | } 22 | 23 | @PostMapping("/ai/chat/multiple") 24 | String chatWithMultipleMessages(@RequestBody String input) { 25 | return chatService.chatWithMultipleMessages(input).text(); 26 | } 27 | 28 | @PostMapping("/ai/chat/external") 29 | String chatWithExternalMessage(@RequestBody String input) throws IOException { 30 | return chatService.chatWithExternalMessage(input).text(); 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /02-prompts/prompts-messages-openai/src/main/java/com/thomasvitale/ai/langchain4j/ChatService.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import dev.langchain4j.data.message.AiMessage; 4 | import dev.langchain4j.data.message.SystemMessage; 5 | import dev.langchain4j.data.message.UserMessage; 6 | import dev.langchain4j.model.chat.ChatLanguageModel; 7 | import org.springframework.beans.factory.annotation.Value; 8 | import org.springframework.core.io.Resource; 9 | import org.springframework.stereotype.Service; 10 | 11 | import java.io.IOException; 12 | import java.nio.charset.Charset; 13 | import java.util.List; 14 | 15 | @Service 16 | class ChatService { 17 | 18 | private final ChatLanguageModel chatLanguageModel; 19 | 20 | private final Resource systemMessageResource; 21 | 22 | ChatService(ChatLanguageModel chatLanguageModel, @Value("classpath:/prompts/system-message.st") Resource systemMessageResource) { 23 | this.chatLanguageModel = chatLanguageModel; 24 | this.systemMessageResource = systemMessageResource; 25 | } 26 | 27 | AiMessage chatWithSingleMessage(String message) { 28 | var userMessage = new UserMessage(message); 29 | var chatResponse = chatLanguageModel.generate(userMessage); 30 | return chatResponse.content(); 31 | } 32 | 33 | AiMessage chatWithMultipleMessages(String message) { 34 | var systemMessage = new SystemMessage(""" 35 | You are a helpful and polite assistant. 36 | Answer in one sentence. 37 | """); 38 | var userMessage = new UserMessage(message); 39 | var prompt = List.of(systemMessage, userMessage); 40 | var chatResponse = chatLanguageModel.generate(userMessage); 41 | return chatResponse.content(); 42 | } 43 | 44 | AiMessage chatWithExternalMessage(String message) throws IOException { 45 | var systemMessage = new SystemMessage(systemMessageResource.getContentAsString(Charset.defaultCharset())); 46 | var userMessage = new UserMessage(message); 47 | var prompt = List.of(systemMessage, userMessage); 48 | var chatResponse = chatLanguageModel.generate(userMessage); 49 | return chatResponse.content(); 50 | } 51 | 52 | } 53 | -------------------------------------------------------------------------------- /02-prompts/prompts-messages-openai/src/main/java/com/thomasvitale/ai/langchain4j/PromptsMessagesApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class PromptsMessagesApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(PromptsMessagesApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /02-prompts/prompts-messages-openai/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | threads: 3 | virtual: 4 | enabled: true 5 | 6 | langchain4j: 7 | open-ai: 8 | client: 9 | api-key: ${OPENAI_API_KEY} 10 | log-requests: true 11 | chat: 12 | options: 13 | model: gpt-3.5-turbo 14 | temperature: 0.7 15 | -------------------------------------------------------------------------------- /02-prompts/prompts-messages-openai/src/main/resources/prompts/system-message.st: -------------------------------------------------------------------------------- 1 | You are a funny and polite assistant. 2 | Answer in one sentence. -------------------------------------------------------------------------------- /02-prompts/prompts-messages-openai/src/test/java/com/thomasvitale/ai/langchain4j/PromptsMessagesApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class PromptsMessagesApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-ollama/README.md: -------------------------------------------------------------------------------- 1 | # Prompts Messages: Ollama 2 | 3 | Prompting using templates with LLMs via Ollama. 4 | 5 | ## Running the application 6 | 7 | The application relies on Ollama for providing LLMs. You can run the native Ollama app locally on your laptop (macOS or Linux), or rely on the Docker Compose/Testcontainers support in Spring Boot to spin up an Ollama service automatically at startup time. 8 | 9 | ### When using Ollama as a native application 10 | 11 | First, make sure you have [Ollama](https://ollama.ai) installed on your laptop (macOS or Linux). 12 | Then, use Ollama to run the _mistral_ large language model. 13 | 14 | ```shell 15 | ollama run mistral 16 | ``` 17 | 18 | Finally, run the Spring Boot application. 19 | 20 | ```shell 21 | ./gradlew bootRun 22 | ``` 23 | 24 | ### When using Ollama as a dev service with Docker Compose 25 | 26 | The application can optionally rely on the native Docker Compose support in Spring Boot to spin up an Ollama service with a _mistral_ model at startup time. 27 | To enable that, uncomment this dependency in the `build.gradle` file. 28 | 29 | ```groovy 30 | developmentOnly "io.thomasvitale.langchain4j:langchain4j-spring-boot-docker-compose:${springLangchain4jVersion}" 31 | ``` 32 | 33 | Then, run the Spring Boot application. 34 | 35 | ```shell 36 | ./gradlew bootRun 37 | ``` 38 | 39 | ### When using Ollama as a dev service with Testcontainers 40 | 41 | The application relies on the native Testcontainers support in Spring Boot to spin up an Ollama service with a _mistral_ model at startup time. 42 | 43 | ```shell 44 | ./gradlew bootTestRun 45 | ``` 46 | 47 | ## Calling the application 48 | 49 | You can now call the application that will use Ollama and mistral to generate an answer to your questions. 50 | This example uses [httpie](https://httpie.io) to send HTTP requests. 51 | 52 | ```shell 53 | http :8080/ai/chat/user genre="rock" instrument="piano" 54 | ``` 55 | 56 | ```shell 57 | http --raw "What is the capital of Italy?" :8080/ai/chat/system 58 | ``` 59 | 60 | ```shell 61 | http --raw "What is the capital of Italy?" :8080/ai/chat/external 62 | ``` 63 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-ollama/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id 'org.springframework.boot' 4 | id 'io.spring.dependency-management' 5 | } 6 | 7 | group = 'com.thomasvitale' 8 | version = '0.0.1-SNAPSHOT' 9 | 10 | java { 11 | toolchain { 12 | languageVersion = JavaLanguageVersion.of(21) 13 | } 14 | } 15 | 16 | repositories { 17 | mavenCentral() 18 | mavenLocal() 19 | } 20 | 21 | dependencies { 22 | implementation 'org.springframework.boot:spring-boot-starter-web' 23 | 24 | implementation "io.thomasvitale.langchain4j:langchain4j-ollama-spring-boot-starter:${springLangchain4jVersion}" 25 | //developmentOnly "io.thomasvitale.langchain4j:langchain4j-spring-boot-docker-compose:${springLangchain4jVersion}" 26 | 27 | testAndDevelopmentOnly 'org.springframework.boot:spring-boot-devtools' 28 | 29 | testImplementation 'org.springframework.boot:spring-boot-starter-test' 30 | testImplementation "io.thomasvitale.langchain4j:langchain4j-spring-boot-testcontainers:${springLangchain4jVersion}" 31 | testImplementation 'org.testcontainers:ollama' 32 | } 33 | 34 | tasks.named('test') { 35 | useJUnitPlatform() 36 | } 37 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-ollama/src/main/java/com/thomasvitale/ai/langchain4j/ChatController.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.web.bind.annotation.PostMapping; 4 | import org.springframework.web.bind.annotation.RequestBody; 5 | import org.springframework.web.bind.annotation.RestController; 6 | 7 | import java.io.IOException; 8 | 9 | @RestController 10 | class ChatController { 11 | 12 | private final ChatService chatService; 13 | 14 | ChatController(ChatService chatService) { 15 | this.chatService = chatService; 16 | } 17 | 18 | @PostMapping("/ai/chat/user") 19 | String chatWithUserMessageTemplate(@RequestBody MusicQuestion question) { 20 | return chatService.chatWithUserMessageTemplate(question).text(); 21 | } 22 | 23 | @PostMapping("/ai/chat/system") 24 | String chatWithSystemMessageTemplate(@RequestBody String question) { 25 | return chatService.chatWithSystemMessageTemplate(question).text(); 26 | } 27 | 28 | @PostMapping("/ai/chat/external") 29 | String chatWithSystemMessageTemplateExternal(@RequestBody String question) throws IOException { 30 | return chatService.chatWithSystemMessageTemplateExternal(question).text(); 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-ollama/src/main/java/com/thomasvitale/ai/langchain4j/ChatService.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import dev.langchain4j.data.message.AiMessage; 4 | import dev.langchain4j.data.message.UserMessage; 5 | import dev.langchain4j.model.chat.ChatLanguageModel; 6 | import dev.langchain4j.model.input.PromptTemplate; 7 | import org.springframework.beans.factory.annotation.Value; 8 | import org.springframework.core.io.Resource; 9 | import org.springframework.stereotype.Service; 10 | 11 | import java.io.IOException; 12 | import java.nio.charset.Charset; 13 | import java.util.List; 14 | import java.util.Map; 15 | import java.util.Random; 16 | 17 | @Service 18 | class ChatService { 19 | 20 | private final ChatLanguageModel chatLanguageModel; 21 | 22 | private final Resource systemMessageResource; 23 | 24 | ChatService(ChatLanguageModel chatLanguageModel, @Value("classpath:/prompts/system-message.st") Resource systemMessageResource) { 25 | this.chatLanguageModel = chatLanguageModel; 26 | this.systemMessageResource = systemMessageResource; 27 | } 28 | 29 | AiMessage chatWithUserMessageTemplate(MusicQuestion question) { 30 | var userPromptTemplate = PromptTemplate.from(""" 31 | Tell me name and band of three musicians famous for playing in a {{genre}} band. 32 | Consider only the musicians that play the {{instrument}} in that band. 33 | """); 34 | Map model = Map.of("instrument", question.instrument(), "genre", question.genre()); 35 | var userMessage = userPromptTemplate.apply(model).toUserMessage(); 36 | 37 | var chatResponse = chatLanguageModel.generate(userMessage); 38 | return chatResponse.content(); 39 | } 40 | 41 | AiMessage chatWithSystemMessageTemplate(String message) { 42 | var systemPromptTemplate = PromptTemplate.from(""" 43 | You are a helpful assistant that always replies starting with {{greeting}}. 44 | """); 45 | Map model = Map.of("greeting", randomGreeting()); 46 | var systemMessage = systemPromptTemplate.apply(model).toSystemMessage(); 47 | 48 | var userMessage = new UserMessage(message); 49 | 50 | var prompt = List.of(systemMessage, userMessage); 51 | 52 | var chatResponse = chatLanguageModel.generate(prompt); 53 | return chatResponse.content(); 54 | } 55 | 56 | AiMessage chatWithSystemMessageTemplateExternal(String message) throws IOException { 57 | var systemPromptTemplate = PromptTemplate.from(systemMessageResource.getContentAsString(Charset.defaultCharset())); 58 | Map model = Map.of("greeting", randomGreeting()); 59 | var systemMessage = systemPromptTemplate.apply(model).toSystemMessage(); 60 | 61 | var userMessage = new UserMessage(message); 62 | 63 | var prompt = List.of(systemMessage, userMessage); 64 | 65 | var chatResponse = chatLanguageModel.generate(prompt); 66 | return chatResponse.content(); 67 | } 68 | 69 | private String randomGreeting() { 70 | var names = List.of("Howdy", "Ahoy", "Well, well, well"); 71 | return names.get(new Random().nextInt(names.size())); 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-ollama/src/main/java/com/thomasvitale/ai/langchain4j/MusicQuestion.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | record MusicQuestion(String genre, String instrument){} 4 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-ollama/src/main/java/com/thomasvitale/ai/langchain4j/PromptsTemplatesOllamaApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class PromptsTemplatesOllamaApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(PromptsTemplatesOllamaApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-ollama/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | threads: 3 | virtual: 4 | enabled: true 5 | 6 | langchain4j: 7 | ollama: 8 | chat: 9 | model: mistral 10 | options: 11 | temperature: 0.7 12 | client: 13 | log-requests: true 14 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-ollama/src/main/resources/prompts/system-message.st: -------------------------------------------------------------------------------- 1 | You are a helpful assistant that always replies starting with {{greeting}}. -------------------------------------------------------------------------------- /02-prompts/prompts-templates-ollama/src/test/java/com/thomasvitale/ai/langchain4j/PromptsTemplatesOllamaApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class PromptsTemplatesOllamaApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-ollama/src/test/java/com/thomasvitale/ai/langchain4j/TestPromptsTemplatesOllamaApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.devtools.restart.RestartScope; 5 | import org.springframework.boot.test.context.TestConfiguration; 6 | import org.springframework.boot.testcontainers.service.connection.ServiceConnection; 7 | import org.springframework.context.annotation.Bean; 8 | import org.testcontainers.ollama.OllamaContainer; 9 | import org.testcontainers.utility.DockerImageName; 10 | 11 | @TestConfiguration(proxyBeanMethods = false) 12 | public class TestPromptsTemplatesOllamaApplication { 13 | 14 | @Bean 15 | @RestartScope 16 | @ServiceConnection 17 | OllamaContainer ollama() { 18 | return new OllamaContainer(DockerImageName.parse("ghcr.io/thomasvitale/ollama-mistral") 19 | .asCompatibleSubstituteFor("ollama/ollama")); 20 | } 21 | 22 | public static void main(String[] args) { 23 | SpringApplication.from(PromptsTemplatesOllamaApplication::main).with(TestPromptsTemplatesOllamaApplication.class).run(args); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-openai/README.md: -------------------------------------------------------------------------------- 1 | # Prompts Templates: OpenAI 2 | 3 | Prompting using templates with LLMs via OpenAI. 4 | 5 | ## Running the application 6 | 7 | The application relies on an OpenAI API for providing LLMs. 8 | 9 | ### When using OpenAI 10 | 11 | First, make sure you have an OpenAI account. 12 | Then, define an environment variable with the OpenAI API Key associated to your OpenAI account as the value. 13 | 14 | ```shell 15 | export LANGCHAIN4J_OPENAI_API_KEY= 16 | ``` 17 | 18 | Finally, run the Spring Boot application. 19 | 20 | ```shell 21 | ./gradlew bootRun 22 | ``` 23 | 24 | ## Calling the application 25 | 26 | You can now call the application that will use OpenAI and _gpt-3.5-turbo_ to generate an answer to your questions. 27 | This example uses [httpie](https://httpie.io) to send HTTP requests. 28 | 29 | ```shell 30 | http :8080/ai/chat/user genre="rock" instrument="piano" 31 | ``` 32 | 33 | ```shell 34 | http --raw "What is the capital of Italy?" :8080/ai/chat/system 35 | ``` 36 | 37 | ```shell 38 | http --raw "What is the capital of Italy?" :8080/ai/chat/external 39 | ``` 40 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-openai/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id 'org.springframework.boot' 4 | id 'io.spring.dependency-management' 5 | } 6 | 7 | group = 'com.thomasvitale' 8 | version = '0.0.1-SNAPSHOT' 9 | 10 | java { 11 | toolchain { 12 | languageVersion = JavaLanguageVersion.of(21) 13 | } 14 | } 15 | 16 | repositories { 17 | mavenCentral() 18 | mavenLocal() 19 | } 20 | 21 | dependencies { 22 | implementation 'org.springframework.boot:spring-boot-starter-web' 23 | 24 | implementation "io.thomasvitale.langchain4j:langchain4j-openai-spring-boot-starter:${springLangchain4jVersion}" 25 | 26 | developmentOnly 'org.springframework.boot:spring-boot-devtools' 27 | 28 | testImplementation 'org.springframework.boot:spring-boot-starter-test' 29 | } 30 | 31 | tasks.named('test') { 32 | useJUnitPlatform() 33 | } 34 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-openai/src/main/java/com/thomasvitale/ai/langchain4j/ChatController.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.web.bind.annotation.PostMapping; 4 | import org.springframework.web.bind.annotation.RequestBody; 5 | import org.springframework.web.bind.annotation.RestController; 6 | 7 | import java.io.IOException; 8 | 9 | @RestController 10 | class ChatController { 11 | 12 | private final ChatService chatService; 13 | 14 | ChatController(ChatService chatService) { 15 | this.chatService = chatService; 16 | } 17 | 18 | @PostMapping("/ai/chat/user") 19 | String chatWithUserMessageTemplate(@RequestBody MusicQuestion question) { 20 | return chatService.chatWithUserMessageTemplate(question).text(); 21 | } 22 | 23 | @PostMapping("/ai/chat/system") 24 | String chatWithSystemMessageTemplate(@RequestBody String question) { 25 | return chatService.chatWithSystemMessageTemplate(question).text(); 26 | } 27 | 28 | @PostMapping("/ai/chat/external") 29 | String chatWithSystemMessageTemplateExternal(@RequestBody String question) throws IOException { 30 | return chatService.chatWithSystemMessageTemplateExternal(question).text(); 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-openai/src/main/java/com/thomasvitale/ai/langchain4j/ChatService.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import dev.langchain4j.data.message.AiMessage; 4 | import dev.langchain4j.data.message.UserMessage; 5 | import dev.langchain4j.model.chat.ChatLanguageModel; 6 | import dev.langchain4j.model.input.PromptTemplate; 7 | import org.springframework.beans.factory.annotation.Value; 8 | import org.springframework.core.io.Resource; 9 | import org.springframework.stereotype.Service; 10 | 11 | import java.io.IOException; 12 | import java.nio.charset.Charset; 13 | import java.util.List; 14 | import java.util.Map; 15 | import java.util.Random; 16 | 17 | @Service 18 | class ChatService { 19 | 20 | private final ChatLanguageModel chatLanguageModel; 21 | 22 | private final Resource systemMessageResource; 23 | 24 | ChatService(ChatLanguageModel chatLanguageModel, @Value("classpath:/prompts/system-message.st") Resource systemMessageResource) { 25 | this.chatLanguageModel = chatLanguageModel; 26 | this.systemMessageResource = systemMessageResource; 27 | } 28 | 29 | AiMessage chatWithUserMessageTemplate(MusicQuestion question) { 30 | var userPromptTemplate = PromptTemplate.from(""" 31 | Tell me name and band of three musicians famous for playing in a {{genre}} band. 32 | Consider only the musicians that play the {{instrument}} in that band. 33 | """); 34 | Map model = Map.of("instrument", question.instrument(), "genre", question.genre()); 35 | var userMessage = userPromptTemplate.apply(model).toUserMessage(); 36 | 37 | var chatResponse = chatLanguageModel.generate(userMessage); 38 | return chatResponse.content(); 39 | } 40 | 41 | AiMessage chatWithSystemMessageTemplate(String message) { 42 | var systemPromptTemplate = PromptTemplate.from(""" 43 | You are a helpful assistant that always replies starting with {{greeting}}. 44 | """); 45 | Map model = Map.of("greeting", randomGreeting()); 46 | var systemMessage = systemPromptTemplate.apply(model).toSystemMessage(); 47 | 48 | var userMessage = new UserMessage(message); 49 | 50 | var prompt = List.of(systemMessage, userMessage); 51 | 52 | var chatResponse = chatLanguageModel.generate(prompt); 53 | return chatResponse.content(); 54 | } 55 | 56 | AiMessage chatWithSystemMessageTemplateExternal(String message) throws IOException { 57 | var systemPromptTemplate = PromptTemplate.from(systemMessageResource.getContentAsString(Charset.defaultCharset())); 58 | Map model = Map.of("greeting", randomGreeting()); 59 | var systemMessage = systemPromptTemplate.apply(model).toSystemMessage(); 60 | 61 | var userMessage = new UserMessage(message); 62 | 63 | var prompt = List.of(systemMessage, userMessage); 64 | 65 | var chatResponse = chatLanguageModel.generate(prompt); 66 | return chatResponse.content(); 67 | } 68 | 69 | private String randomGreeting() { 70 | var names = List.of("Howdy", "Ahoy", "Well, well, well"); 71 | return names.get(new Random().nextInt(names.size())); 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-openai/src/main/java/com/thomasvitale/ai/langchain4j/MusicQuestion.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | record MusicQuestion(String genre, String instrument){} 4 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-openai/src/main/java/com/thomasvitale/ai/langchain4j/PromptsTemplatesApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class PromptsTemplatesApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(PromptsTemplatesApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-openai/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | threads: 3 | virtual: 4 | enabled: true 5 | 6 | langchain4j: 7 | open-ai: 8 | client: 9 | api-key: ${OPENAI_API_KEY} 10 | log-requests: true 11 | chat: 12 | options: 13 | model: gpt-3.5-turbo 14 | temperature: 0.7 15 | -------------------------------------------------------------------------------- /02-prompts/prompts-templates-openai/src/main/resources/prompts/system-message.st: -------------------------------------------------------------------------------- 1 | You are a helpful assistant that always replies starting with {{greeting}}. -------------------------------------------------------------------------------- /02-prompts/prompts-templates-openai/src/test/java/com/thomasvitale/ai/langchain4j/PromptsTemplatesApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class PromptsTemplatesApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-ollama/README.md: -------------------------------------------------------------------------------- 1 | # Prompts Messages: Ollama 2 | 3 | Parsing the LLM output as structured objects (Beans, Lists) via Ollama. 4 | 5 | ## Running the application 6 | 7 | The application relies on Ollama for providing LLMs. You can run the native Ollama app locally on your laptop (macOS or Linux), or rely on the Docker Compose/Testcontainers support in Spring Boot to spin up an Ollama service automatically at startup time. 8 | 9 | ### When using Ollama as a native application 10 | 11 | First, make sure you have [Ollama](https://ollama.ai) installed on your laptop (macOS or Linux). 12 | Then, use Ollama to run the _mistral_ large language model. 13 | 14 | ```shell 15 | ollama run mistral 16 | ``` 17 | 18 | Finally, run the Spring Boot application. 19 | 20 | ```shell 21 | ./gradlew bootRun 22 | ``` 23 | 24 | ### When using Ollama as a dev service with Docker Compose 25 | 26 | The application can optionally rely on the native Docker Compose support in Spring Boot to spin up an Ollama service with a _mistral_ model at startup time. 27 | To enable that, uncomment this dependency in the `build.gradle` file. 28 | 29 | ```groovy 30 | developmentOnly "io.thomasvitale.langchain4j:langchain4j-spring-boot-docker-compose:${springLangchain4jVersion}" 31 | ``` 32 | 33 | Then, run the Spring Boot application. 34 | 35 | ```shell 36 | ./gradlew bootRun 37 | ``` 38 | 39 | ### When using Ollama as a dev service with Testcontainers 40 | 41 | The application relies on the native Testcontainers support in Spring Boot to spin up an Ollama service with a _mistral_ model at startup time. 42 | 43 | ```shell 44 | ./gradlew bootTestRun 45 | ``` 46 | 47 | ## Calling the application 48 | 49 | You can now call the application that will use Ollama and mistral to generate an answer to your questions. 50 | This example uses [httpie](https://httpie.io) to send HTTP requests. 51 | 52 | ```shell 53 | http :8080/ai/chat/bean genre="rock" instrument="piano" 54 | ``` 55 | 56 | ```shell 57 | http :8080/ai/chat/list genre="rock" instrument="piano" 58 | ``` 59 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-ollama/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id 'org.springframework.boot' 4 | id 'io.spring.dependency-management' 5 | } 6 | 7 | group = 'com.thomasvitale' 8 | version = '0.0.1-SNAPSHOT' 9 | 10 | java { 11 | toolchain { 12 | languageVersion = JavaLanguageVersion.of(21) 13 | } 14 | } 15 | 16 | repositories { 17 | mavenCentral() 18 | mavenLocal() 19 | } 20 | 21 | dependencies { 22 | implementation 'org.springframework.boot:spring-boot-starter-web' 23 | 24 | implementation "io.thomasvitale.langchain4j:langchain4j-ollama-spring-boot-starter:${springLangchain4jVersion}" 25 | //developmentOnly "io.thomasvitale.langchain4j:langchain4j-spring-boot-docker-compose:${springLangchain4jVersion}" 26 | 27 | testAndDevelopmentOnly 'org.springframework.boot:spring-boot-devtools' 28 | 29 | testImplementation 'org.springframework.boot:spring-boot-starter-test' 30 | testImplementation "io.thomasvitale.langchain4j:langchain4j-spring-boot-testcontainers:${springLangchain4jVersion}" 31 | testImplementation 'org.testcontainers:ollama' 32 | } 33 | 34 | tasks.named('test') { 35 | useJUnitPlatform() 36 | } 37 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-ollama/src/main/java/com/thomasvitale/ai/langchain4j/ArtistInfo.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | public record ArtistInfo(String name, String band) {} 4 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-ollama/src/main/java/com/thomasvitale/ai/langchain4j/ChatController.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.web.bind.annotation.PostMapping; 4 | import org.springframework.web.bind.annotation.RequestBody; 5 | import org.springframework.web.bind.annotation.RestController; 6 | 7 | import java.util.List; 8 | 9 | @RestController 10 | class ChatController { 11 | 12 | private final ChatService chatService; 13 | 14 | ChatController(ChatService chatService) { 15 | this.chatService = chatService; 16 | } 17 | 18 | @PostMapping("/ai/chat/bean") 19 | ArtistInfo chatWithBeanOutput(@RequestBody MusicQuestion question) { 20 | return chatService.chatWithBeanOutput(question); 21 | } 22 | 23 | @PostMapping("/ai/chat/list") 24 | List chatWithListOutput(@RequestBody MusicQuestion question) { 25 | return chatService.chatWithListOutput(question); 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-ollama/src/main/java/com/thomasvitale/ai/langchain4j/ChatService.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import dev.langchain4j.model.chat.ChatLanguageModel; 4 | import dev.langchain4j.model.input.PromptTemplate; 5 | import dev.langchain4j.service.ServiceOutputParser; 6 | import org.springframework.stereotype.Service; 7 | 8 | import java.util.List; 9 | import java.util.Map; 10 | 11 | @Service 12 | class ChatService { 13 | 14 | private final ChatLanguageModel chatLanguageModel; 15 | 16 | ChatService(ChatLanguageModel chatLanguageModel) { 17 | this.chatLanguageModel = chatLanguageModel; 18 | } 19 | 20 | ArtistInfo chatWithBeanOutput(MusicQuestion question) { 21 | var formatInstructions = ServiceOutputParser.outputFormatInstructions(ArtistInfo.class); 22 | 23 | var userPromptTemplate = new PromptTemplate(""" 24 | Tell me name and band of one musician famous for playing in a {{genre}} band. 25 | Consider only the musicians that play the {{instrument}} in that band. 26 | {{format}} 27 | """); 28 | Map model = Map.of("instrument", question.instrument(), "genre", question.genre(), "format", formatInstructions); 29 | var prompt = userPromptTemplate.apply(model).toUserMessage(); 30 | 31 | var chatResponse = chatLanguageModel.generate(prompt); 32 | return (ArtistInfo) ServiceOutputParser.parse(chatResponse, ArtistInfo.class); 33 | } 34 | 35 | List chatWithListOutput(MusicQuestion question) { 36 | var formatInstructions = ServiceOutputParser.outputFormatInstructions(List.class); 37 | 38 | var userPromptTemplate = new PromptTemplate(""" 39 | Tell me names of three musicians famous for playing in a {{genre}} band. 40 | Consider only the musicians that play the {{instrument}} in that band. 41 | {{format}} 42 | """); 43 | Map model = Map.of("instrument", question.instrument(), "genre", question.genre(), "format", formatInstructions); 44 | var prompt = userPromptTemplate.apply(model).toUserMessage(); 45 | 46 | var chatResponse = chatLanguageModel.generate(prompt); 47 | return (List) ServiceOutputParser.parse(chatResponse, List.class); 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-ollama/src/main/java/com/thomasvitale/ai/langchain4j/MusicQuestion.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | record MusicQuestion(String genre, String instrument){} 4 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-ollama/src/main/java/com/thomasvitale/ai/langchain4j/OutputParsersBeanOllamaApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class OutputParsersBeanOllamaApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(OutputParsersBeanOllamaApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-ollama/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | threads: 3 | virtual: 4 | enabled: true 5 | 6 | langchain4j: 7 | ollama: 8 | chat: 9 | model: mistral 10 | options: 11 | temperature: 0.7 12 | client: 13 | log-requests: true 14 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-ollama/src/test/java/com/thomasvitale/ai/langchain4j/OutputParsersBeanOllamaApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class OutputParsersBeanOllamaApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-ollama/src/test/java/com/thomasvitale/ai/langchain4j/TestOutputParsersBeanOpenAiApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.devtools.restart.RestartScope; 5 | import org.springframework.boot.test.context.TestConfiguration; 6 | import org.springframework.boot.testcontainers.service.connection.ServiceConnection; 7 | import org.springframework.context.annotation.Bean; 8 | import org.testcontainers.ollama.OllamaContainer; 9 | import org.testcontainers.utility.DockerImageName; 10 | 11 | @TestConfiguration(proxyBeanMethods = false) 12 | public class TestOutputParsersBeanOpenAiApplication { 13 | 14 | @Bean 15 | @RestartScope 16 | @ServiceConnection 17 | OllamaContainer ollama() { 18 | return new OllamaContainer(DockerImageName.parse("ghcr.io/thomasvitale/ollama-mistral") 19 | .asCompatibleSubstituteFor("ollama/ollama")); 20 | } 21 | 22 | public static void main(String[] args) { 23 | SpringApplication.from(OutputParsersBeanOllamaApplication::main).with(TestOutputParsersBeanOpenAiApplication.class).run(args); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-openai/README.md: -------------------------------------------------------------------------------- 1 | # Output Parsers: OpenAI 2 | 3 | Parsing the LLM output as structured objects (Beans, Lists) via Open AI. 4 | 5 | ## Running the application 6 | 7 | The application relies on an OpenAI API for providing LLMs. 8 | 9 | ### When using OpenAI 10 | 11 | First, make sure you have an OpenAI account. 12 | Then, define an environment variable with the OpenAI API Key associated to your OpenAI account as the value. 13 | 14 | ```shell 15 | export LANGCHAIN4J_OPENAI_API_KEY= 16 | ``` 17 | 18 | Finally, run the Spring Boot application. 19 | 20 | ```shell 21 | ./gradlew bootRun 22 | ``` 23 | 24 | ## Calling the application 25 | 26 | You can now call the application that will use OpenAI and _gpt-3.5-turbo_ to generate an answer to your questions. 27 | This example uses [httpie](https://httpie.io) to send HTTP requests. 28 | 29 | ```shell 30 | http :8080/ai/chat/bean genre="rock" instrument="piano" 31 | ``` 32 | 33 | ```shell 34 | http :8080/ai/chat/list genre="rock" instrument="piano" 35 | ``` 36 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-openai/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id 'org.springframework.boot' 4 | id 'io.spring.dependency-management' 5 | } 6 | 7 | group = 'com.thomasvitale' 8 | version = '0.0.1-SNAPSHOT' 9 | 10 | java { 11 | toolchain { 12 | languageVersion = JavaLanguageVersion.of(21) 13 | } 14 | } 15 | 16 | repositories { 17 | mavenCentral() 18 | mavenLocal() 19 | } 20 | 21 | dependencies { 22 | implementation 'org.springframework.boot:spring-boot-starter-web' 23 | 24 | implementation "io.thomasvitale.langchain4j:langchain4j-openai-spring-boot-starter:${springLangchain4jVersion}" 25 | 26 | developmentOnly 'org.springframework.boot:spring-boot-devtools' 27 | 28 | testImplementation 'org.springframework.boot:spring-boot-starter-test' 29 | } 30 | 31 | tasks.named('test') { 32 | useJUnitPlatform() 33 | } 34 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-openai/src/main/java/com/thomasvitale/ai/langchain4j/ArtistInfo.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | public record ArtistInfo(String name, String band) {} 4 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-openai/src/main/java/com/thomasvitale/ai/langchain4j/ChatController.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.web.bind.annotation.PostMapping; 4 | import org.springframework.web.bind.annotation.RequestBody; 5 | import org.springframework.web.bind.annotation.RestController; 6 | 7 | import java.util.List; 8 | 9 | @RestController 10 | class ChatController { 11 | 12 | private final ChatService chatService; 13 | 14 | ChatController(ChatService chatService) { 15 | this.chatService = chatService; 16 | } 17 | 18 | @PostMapping("/ai/chat/bean") 19 | ArtistInfo chatWithBeanOutput(@RequestBody MusicQuestion question) { 20 | return chatService.chatWithBeanOutput(question); 21 | } 22 | 23 | @PostMapping("/ai/chat/list") 24 | List chatWithListOutput(@RequestBody MusicQuestion question) { 25 | return chatService.chatWithListOutput(question); 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-openai/src/main/java/com/thomasvitale/ai/langchain4j/ChatService.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import dev.langchain4j.model.chat.ChatLanguageModel; 4 | import dev.langchain4j.model.input.PromptTemplate; 5 | import dev.langchain4j.service.ServiceOutputParser; 6 | import org.springframework.stereotype.Service; 7 | 8 | import java.util.List; 9 | import java.util.Map; 10 | 11 | @Service 12 | class ChatService { 13 | 14 | private final ChatLanguageModel chatLanguageModel; 15 | 16 | ChatService(ChatLanguageModel chatLanguageModel) { 17 | this.chatLanguageModel = chatLanguageModel; 18 | } 19 | 20 | ArtistInfo chatWithBeanOutput(MusicQuestion question) { 21 | var formatInstructions = ServiceOutputParser.outputFormatInstructions(ArtistInfo.class); 22 | 23 | var userPromptTemplate = new PromptTemplate(""" 24 | Tell me name and band of one musician famous for playing in a {{genre}} band. 25 | Consider only the musicians that play the {{instrument}} in that band. 26 | {{format}} 27 | """); 28 | Map model = Map.of("instrument", question.instrument(), "genre", question.genre(), "format", formatInstructions); 29 | var prompt = userPromptTemplate.apply(model).toUserMessage(); 30 | 31 | var chatResponse = chatLanguageModel.generate(prompt); 32 | return (ArtistInfo) ServiceOutputParser.parse(chatResponse, ArtistInfo.class); 33 | } 34 | 35 | List chatWithListOutput(MusicQuestion question) { 36 | var formatInstructions = ServiceOutputParser.outputFormatInstructions(List.class); 37 | 38 | var userPromptTemplate = new PromptTemplate(""" 39 | Tell me names of three musicians famous for playing in a {{genre}} band. 40 | Consider only the musicians that play the {{instrument}} in that band. 41 | {{format}} 42 | """); 43 | Map model = Map.of("instrument", question.instrument(), "genre", question.genre(), "format", formatInstructions); 44 | var prompt = userPromptTemplate.apply(model).toUserMessage(); 45 | 46 | var chatResponse = chatLanguageModel.generate(prompt); 47 | return (List) ServiceOutputParser.parse(chatResponse, List.class); 48 | } 49 | 50 | } 51 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-openai/src/main/java/com/thomasvitale/ai/langchain4j/MusicQuestion.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | record MusicQuestion(String genre, String instrument){} 4 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-openai/src/main/java/com/thomasvitale/ai/langchain4j/OutputParsersBeanOpenAiApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class OutputParsersBeanOpenAiApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(OutputParsersBeanOpenAiApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-openai/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | threads: 3 | virtual: 4 | enabled: true 5 | 6 | langchain4j: 7 | open-ai: 8 | client: 9 | api-key: ${OPENAI_API_KEY} 10 | log-requests: true 11 | chat: 12 | options: 13 | model: gpt-3.5-turbo 14 | temperature: 0.7 15 | -------------------------------------------------------------------------------- /03-output-parsers/output-parsers-bean-openai/src/test/java/com/thomasvitale/ai/langchain4j/OutputParsersBeanOpenAiApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class OutputParsersBeanOpenAiApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /04-embedding-models/embedding-models-ollama/README.md: -------------------------------------------------------------------------------- 1 | # Embedding Models: OpenAI 2 | 3 | Vector transformation (embeddings) with LLMs via Ollama. 4 | 5 | ## Description 6 | 7 | LangChain4j provides an `EmbeddingModel` abstraction for integrating with LLMs via several providers, including OpenAI. 8 | 9 | When using the _LangChain4j Ollama Spring Boot Starter_, an `EmbeddingModel` object is autoconfigured for you to use OpenAI. 10 | By default, the _mistral_ model is used. 11 | 12 | ```java 13 | @RestController 14 | class EmbeddingController { 15 | private final EmbeddingModel embeddingModel; 16 | 17 | EmbeddingController(EmbeddingModel embeddingModel) { 18 | this.embeddingModel = embeddingModel; 19 | } 20 | 21 | @GetMapping("/ai/embed") 22 | String embed(@RequestParam(defaultValue = "And Gandalf yelled: 'You shall not pass!'") String message) { 23 | var embeddings = embeddingModel.embed(message); 24 | return "Size of the embedding vector: " + embeddings.content().vectorAsList().size(); 25 | } 26 | } 27 | ``` 28 | 29 | ## Running the application 30 | 31 | The application relies on Ollama for providing LLMs. You can run the native Ollama app locally on your laptop (macOS or Linux), or rely on the Docker Compose/Testcontainers support in Spring Boot to spin up an Ollama service automatically at startup time. 32 | 33 | ### When using Ollama as a native application 34 | 35 | First, make sure you have [Ollama](https://ollama.ai) installed on your laptop (macOS or Linux). 36 | Then, use Ollama to run the _mistral_ large language model. 37 | 38 | ```shell 39 | ollama run mistral 40 | ``` 41 | 42 | Finally, run the Spring Boot application. 43 | 44 | ```shell 45 | ./gradlew bootRun 46 | ``` 47 | 48 | ### When using Ollama as a dev service with Docker Compose 49 | 50 | The application can optionally rely on the native Docker Compose support in Spring Boot to spin up an Ollama service with a _mistral_ model at startup time. 51 | To enable that, uncomment this dependency in the `build.gradle` file. 52 | 53 | ```groovy 54 | developmentOnly "io.thomasvitale.langchain4j:langchain4j-spring-boot-docker-compose:${springLangchain4jVersion}" 55 | ``` 56 | 57 | Then, run the Spring Boot application. 58 | 59 | ```shell 60 | ./gradlew bootRun 61 | ``` 62 | 63 | ### When using Ollama as a dev service with Testcontainers 64 | 65 | The application relies on the native Testcontainers support in Spring Boot to spin up an Ollama service with a _mistral_ model at startup time. 66 | 67 | ```shell 68 | ./gradlew bootTestRun 69 | ``` 70 | 71 | ## Calling the application 72 | 73 | You can now call the application that will use Ollama and _mistral_ to generate a vector representation (embeddings) of a default text. 74 | This example uses [httpie](https://httpie.io) to send HTTP requests. 75 | 76 | ```shell 77 | http :8080/ai/embed 78 | ``` 79 | 80 | Try passing your custom prompt and check the result. 81 | 82 | ```shell 83 | http :8080/ai/embed message=="The capital of Italy is Rome" 84 | ``` 85 | -------------------------------------------------------------------------------- /04-embedding-models/embedding-models-ollama/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id 'org.springframework.boot' 4 | id 'io.spring.dependency-management' 5 | } 6 | 7 | group = 'com.thomasvitale' 8 | version = '0.0.1-SNAPSHOT' 9 | 10 | java { 11 | toolchain { 12 | languageVersion = JavaLanguageVersion.of(21) 13 | } 14 | } 15 | 16 | repositories { 17 | mavenCentral() 18 | mavenLocal() 19 | } 20 | 21 | dependencies { 22 | implementation 'org.springframework.boot:spring-boot-starter-web' 23 | 24 | implementation "io.thomasvitale.langchain4j:langchain4j-ollama-spring-boot-starter:${springLangchain4jVersion}" 25 | //developmentOnly "io.thomasvitale.langchain4j:langchain4j-spring-boot-docker-compose:${springLangchain4jVersion}" 26 | 27 | testAndDevelopmentOnly 'org.springframework.boot:spring-boot-devtools' 28 | 29 | testImplementation 'org.springframework.boot:spring-boot-starter-test' 30 | testImplementation "io.thomasvitale.langchain4j:langchain4j-spring-boot-testcontainers:${springLangchain4jVersion}" 31 | testImplementation 'org.testcontainers:ollama' 32 | } 33 | 34 | tasks.named('test') { 35 | useJUnitPlatform() 36 | } 37 | -------------------------------------------------------------------------------- /04-embedding-models/embedding-models-ollama/src/main/java/com/thomasvitale/ai/langchain4j/EmbeddingController.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import dev.langchain4j.model.embedding.EmbeddingModel; 4 | import org.springframework.web.bind.annotation.GetMapping; 5 | import org.springframework.web.bind.annotation.RequestParam; 6 | import org.springframework.web.bind.annotation.RestController; 7 | 8 | @RestController 9 | class EmbeddingController { 10 | 11 | private final EmbeddingModel embeddingModel; 12 | 13 | EmbeddingController(EmbeddingModel embeddingModel) { 14 | this.embeddingModel = embeddingModel; 15 | } 16 | 17 | @GetMapping("/ai/embed") 18 | String embed(@RequestParam(defaultValue = "And Gandalf yelled: 'You shall not pass!'") String message) { 19 | var embeddings = embeddingModel.embed(message); 20 | return "Size of the embedding vector: " + embeddings.content().vectorAsList().size(); 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /04-embedding-models/embedding-models-ollama/src/main/java/com/thomasvitale/ai/langchain4j/EmbeddingModelsOllamaApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class EmbeddingModelsOllamaApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(EmbeddingModelsOllamaApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /04-embedding-models/embedding-models-ollama/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | threads: 3 | virtual: 4 | enabled: true 5 | 6 | langchain4j: 7 | ollama: 8 | embedding: 9 | model: mistral 10 | client: 11 | log-requests: true 12 | -------------------------------------------------------------------------------- /04-embedding-models/embedding-models-ollama/src/test/java/com/thomasvitale/ai/langchain4j/EmbeddingModelsOllamaApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class EmbeddingModelsOllamaApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /04-embedding-models/embedding-models-ollama/src/test/java/com/thomasvitale/ai/langchain4j/TestEmbeddingModelsOllamaApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.devtools.restart.RestartScope; 5 | import org.springframework.boot.test.context.TestConfiguration; 6 | import org.springframework.boot.testcontainers.service.connection.ServiceConnection; 7 | import org.springframework.context.annotation.Bean; 8 | import org.testcontainers.ollama.OllamaContainer; 9 | import org.testcontainers.utility.DockerImageName; 10 | 11 | @TestConfiguration(proxyBeanMethods = false) 12 | public class TestEmbeddingModelsOllamaApplication { 13 | 14 | @Bean 15 | @RestartScope 16 | @ServiceConnection 17 | OllamaContainer ollama() { 18 | return new OllamaContainer(DockerImageName.parse("ghcr.io/thomasvitale/ollama-mistral") 19 | .asCompatibleSubstituteFor("ollama/ollama")); 20 | } 21 | 22 | public static void main(String[] args) { 23 | SpringApplication.from(EmbeddingModelsOllamaApplication::main).with(TestEmbeddingModelsOllamaApplication.class).run(args); 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /04-embedding-models/embedding-models-openai/README.md: -------------------------------------------------------------------------------- 1 | # Embedding Models: OpenAI 2 | 3 | Vector transformation (embeddings) with LLMs via OpenAI. 4 | 5 | ## Description 6 | 7 | LangChain4j provides an `EmbeddingModel` abstraction for integrating with LLMs via several providers, including OpenAI. 8 | 9 | When using the _LangChain4j OpenAI Spring Boot Starter_, an `EmbeddingModel` object is autoconfigured for you to use OpenAI. 10 | By default, the _text-embedding-ada-002_ model is used. 11 | 12 | ```java 13 | @RestController 14 | class EmbeddingController { 15 | private final EmbeddingModel embeddingModel; 16 | 17 | EmbeddingController(EmbeddingModel embeddingModel) { 18 | this.embeddingModel = embeddingModel; 19 | } 20 | 21 | @GetMapping("/ai/embed") 22 | String embed(@RequestParam(defaultValue = "And Gandalf yelled: 'You shall not pass!'") String message) { 23 | var embeddings = embeddingModel.embed(message); 24 | return "Size of the embedding vector: " + embeddings.content().vectorAsList().size(); 25 | } 26 | } 27 | ``` 28 | 29 | ## Running the application 30 | 31 | The application relies on an OpenAI API for providing LLMs. 32 | 33 | ### When using OpenAI 34 | 35 | First, make sure you have an OpenAI account. 36 | Then, define an environment variable with the OpenAI API Key associated to your OpenAI account as the value. 37 | 38 | ```shell 39 | export LANGCHAIN4J_OPENAI_API_KEY= 40 | ``` 41 | 42 | Finally, run the Spring Boot application. 43 | 44 | ```shell 45 | ./gradlew bootRun 46 | ``` 47 | 48 | ## Calling the application 49 | 50 | You can now call the application that will use OpenAI and _text-embedding-ada-002_ to generate a vector representation (embeddings) of a default text. 51 | This example uses [httpie](https://httpie.io) to send HTTP requests. 52 | 53 | ```shell 54 | http :8080/ai/embed 55 | ``` 56 | 57 | Try passing your custom prompt and check the result. 58 | 59 | ```shell 60 | http :8080/ai/embed message=="The capital of Italy is Rome" 61 | ``` 62 | -------------------------------------------------------------------------------- /04-embedding-models/embedding-models-openai/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id 'org.springframework.boot' 4 | id 'io.spring.dependency-management' 5 | } 6 | 7 | group = 'com.thomasvitale' 8 | version = '0.0.1-SNAPSHOT' 9 | 10 | java { 11 | toolchain { 12 | languageVersion = JavaLanguageVersion.of(21) 13 | } 14 | } 15 | 16 | repositories { 17 | mavenCentral() 18 | mavenLocal() 19 | } 20 | 21 | dependencies { 22 | implementation 'org.springframework.boot:spring-boot-starter-web' 23 | 24 | implementation "io.thomasvitale.langchain4j:langchain4j-openai-spring-boot-starter:${springLangchain4jVersion}" 25 | 26 | developmentOnly 'org.springframework.boot:spring-boot-devtools' 27 | 28 | testImplementation 'org.springframework.boot:spring-boot-starter-test' 29 | } 30 | 31 | tasks.named('test') { 32 | useJUnitPlatform() 33 | } 34 | -------------------------------------------------------------------------------- /04-embedding-models/embedding-models-openai/src/main/java/com/thomasvitale/ai/langchain4j/EmbeddingController.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import dev.langchain4j.model.embedding.EmbeddingModel; 4 | import org.springframework.web.bind.annotation.GetMapping; 5 | import org.springframework.web.bind.annotation.RequestParam; 6 | import org.springframework.web.bind.annotation.RestController; 7 | 8 | @RestController 9 | class EmbeddingController { 10 | 11 | private final EmbeddingModel embeddingModel; 12 | 13 | EmbeddingController(EmbeddingModel embeddingModel) { 14 | this.embeddingModel = embeddingModel; 15 | } 16 | 17 | @GetMapping("/ai/embed") 18 | String embed(@RequestParam(defaultValue = "And Gandalf yelled: 'You shall not pass!'") String message) { 19 | var embeddings = embeddingModel.embed(message); 20 | return "Size of the embedding vector: " + embeddings.content().vectorAsList().size(); 21 | } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /04-embedding-models/embedding-models-openai/src/main/java/com/thomasvitale/ai/langchain4j/EmbeddingModelsOpenAiApplication.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class EmbeddingModelsOpenAiApplication { 8 | 9 | public static void main(String[] args) { 10 | SpringApplication.run(EmbeddingModelsOpenAiApplication.class, args); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /04-embedding-models/embedding-models-openai/src/main/resources/application.yml: -------------------------------------------------------------------------------- 1 | spring: 2 | threads: 3 | virtual: 4 | enabled: true 5 | 6 | langchain4j: 7 | open-ai: 8 | client: 9 | api-key: ${OPENAI_API_KEY} 10 | log-requests: true 11 | embedding: 12 | options: 13 | model: text-embedding-ada-002 14 | -------------------------------------------------------------------------------- /04-embedding-models/embedding-models-openai/src/test/java/com/thomasvitale/ai/langchain4j/EmbeddingModelsOpenAiApplicationTests.java: -------------------------------------------------------------------------------- 1 | package com.thomasvitale.ai.langchain4j; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.springframework.boot.test.context.SpringBootTest; 5 | 6 | @SpringBootTest 7 | class EmbeddingModelsOpenAiApplicationTests { 8 | 9 | @Test 10 | void contextLoads() { 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # LLM Applications with Java, Spring Boot, and LangChain4j 2 | 3 | Samples showing how to build Java applications powered by Generative AI and LLMs 4 | using the [LangChain4j Spring Boot](https://github.com/ThomasVitale/langchain4j-spring-boot) extension. 5 | 6 | ## Pre-Requisites 7 | 8 | * Java 17+ 9 | * Docker/Podman 10 | * [OpenAI](http://platform.openai.com) API Key (optional) 11 | * [Ollama](https://ollama.ai) (optional) 12 | 13 | ## Content 14 | 15 | ### 1. Chat Models 16 | 17 | | Project | Description | 18 | |---------------------------------------------------------------------------------------------------------------------------|---------------------------------------| 19 | | [chat-models-ollama](https://github.com/ThomasVitale/llm-apps-java-langchain4j/tree/main/01-chat-models/chat-models-ollama) | Text generation with LLMs via Ollama. | 20 | | [chat-models-openai](https://github.com/ThomasVitale/llm-apps-java-langchain4j/tree/main/01-chat-models/chat-models-openai) | Text generation with LLMs via OpenAI. | 21 | 22 | ### 2. Prompts 23 | 24 | | Project | Description | 25 | |------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------| 26 | | [prompts-basics-ollama](https://github.com/ThomasVitale/llm-apps-java-langchain4j/tree/main/02-prompts/prompts-basics-ollama) | Prompting using simple text with LLMs via Ollama. | 27 | | [prompts-basics-openai](https://github.com/ThomasVitale/llm-apps-java-langchain4j/tree/main/02-prompts/prompts-basics-openai) | Prompting using simple text with LLMs via OpenAI. | 28 | | [prompts-messages-ollama](https://github.com/ThomasVitale/llm-apps-java-langchain4j/tree/main/02-prompts/prompts-messages-ollama) | Prompting using structured messages and roles with LLMs via Ollama. | 29 | | [prompts-messages-openai](https://github.com/ThomasVitale/llm-apps-java-langchain4j/tree/main/02-prompts/prompts-messages-openai) | Prompting using structured messages and roles with LLMs via OpenAI. | 30 | | [prompts-templates-ollama](https://github.com/ThomasVitale/llm-apps-java-langchain4j/tree/main/02-prompts/prompts-templates-ollama) | Prompting using templates with LLMs via Ollama. | 31 | | [prompts-templates-openai](https://github.com/ThomasVitale/llm-apps-java-langchain4j/tree/main/02-prompts/prompts-templates-openai) | Prompting using templates with LLMs via OpenAI. | 32 | 33 | ### 3. Output Parsers 34 | 35 | | Project | Description | 36 | |------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------| 37 | | [output-parsers-ollama](https://github.com/ThomasVitale/llm-apps-java-langchain4j/tree/main/03-output-parsers/output-parsers-ollama) | Parsing the LLM output as structured objects (Beans, Map, List) via Ollama. | 38 | | [output-parsers-openai](https://github.com/ThomasVitale/llm-apps-java-langchain4j/tree/main/03-output-parsers/output-parsers-openai) | Parsing the LLM output as structured objects (Beans, Map, List) via Open AI. | 39 | 40 | ### 4. Embedding Models 41 | 42 | | Project | Description | 43 | |--------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------| 44 | | [embedding-models-ollama](https://github.com/ThomasVitale/llm-apps-java-langchain4j/tree/main/04-embedding-models/embedding-models-ollama) | Vector transformation (embeddings) with LLMs via Ollama. | 45 | | [embedding-models-openai](https://github.com/ThomasVitale/llm-apps-java-langchain4j/tree/main/04-embedding-models/embedding-models-openai) | Vector transformation (embeddings) with LLMs via OpenAI. | 46 | 47 | ### 5. Document Readers 48 | 49 | _Coming soon_ 50 | 51 | ### 6. Document Transformers 52 | 53 | _Coming soon_ 54 | 55 | ### 7. Document Writers 56 | 57 | _Coming soon_ 58 | 59 | ### 8. Vector Stores 60 | 61 | _Coming soon_ 62 | 63 | ### 9. Tools 64 | 65 | _Coming soon_ 66 | 67 | ### 10. Image Models 68 | 69 | _Coming soon_ 70 | 71 | ## References and Additional Resources 72 | 73 | * [LangChain4j](https://github.com/langchain4j) 74 | -------------------------------------------------------------------------------- /buildSrc/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'groovy-gradle-plugin' 3 | } 4 | 5 | repositories { 6 | mavenCentral() 7 | } 8 | 9 | ext { 10 | set("springBootVersion", '3.3.0') 11 | set("dependencyManagementVersion", '1.1.5') 12 | } 13 | 14 | dependencies { 15 | implementation "io.spring.gradle:dependency-management-plugin:${dependencyManagementVersion}" 16 | implementation "org.springframework.boot:spring-boot-gradle-plugin:${springBootVersion}" 17 | } 18 | -------------------------------------------------------------------------------- /compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | ollama: 3 | image: 'ghcr.io/thomasvitale/ollama-mistral' 4 | ports: 5 | - '11434' 6 | labels: 7 | org.springframework.boot.service-connection: ollama 8 | -------------------------------------------------------------------------------- /gradle.properties: -------------------------------------------------------------------------------- 1 | springLangchain4jVersion=0.12.0 -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ThomasVitale/llm-apps-java-langchain4j/5044142d40167775e9e737c383100e8bf16c4f78/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.8-bin.zip 4 | networkTimeout=10000 5 | validateDistributionUrl=true 6 | zipStoreBase=GRADLE_USER_HOME 7 | zipStorePath=wrapper/dists 8 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Copyright © 2015-2021 the original authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | # 21 | # Gradle start up script for POSIX generated by Gradle. 22 | # 23 | # Important for running: 24 | # 25 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is 26 | # noncompliant, but you have some other compliant shell such as ksh or 27 | # bash, then to run this script, type that shell name before the whole 28 | # command line, like: 29 | # 30 | # ksh Gradle 31 | # 32 | # Busybox and similar reduced shells will NOT work, because this script 33 | # requires all of these POSIX shell features: 34 | # * functions; 35 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}», 36 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»; 37 | # * compound commands having a testable exit status, especially «case»; 38 | # * various built-in commands including «command», «set», and «ulimit». 39 | # 40 | # Important for patching: 41 | # 42 | # (2) This script targets any POSIX shell, so it avoids extensions provided 43 | # by Bash, Ksh, etc; in particular arrays are avoided. 44 | # 45 | # The "traditional" practice of packing multiple parameters into a 46 | # space-separated string is a well documented source of bugs and security 47 | # problems, so this is (mostly) avoided, by progressively accumulating 48 | # options in "$@", and eventually passing that to Java. 49 | # 50 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, 51 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; 52 | # see the in-line comments for details. 53 | # 54 | # There are tweaks for specific operating systems such as AIX, CygWin, 55 | # Darwin, MinGW, and NonStop. 56 | # 57 | # (3) This script is generated from the Groovy template 58 | # https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt 59 | # within the Gradle project. 60 | # 61 | # You can find Gradle at https://github.com/gradle/gradle/. 62 | # 63 | ############################################################################## 64 | 65 | # Attempt to set APP_HOME 66 | 67 | # Resolve links: $0 may be a link 68 | app_path=$0 69 | 70 | # Need this for daisy-chained symlinks. 71 | while 72 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path 73 | [ -h "$app_path" ] 74 | do 75 | ls=$( ls -ld "$app_path" ) 76 | link=${ls#*' -> '} 77 | case $link in #( 78 | /*) app_path=$link ;; #( 79 | *) app_path=$APP_HOME$link ;; 80 | esac 81 | done 82 | 83 | # This is normally unused 84 | # shellcheck disable=SC2034 85 | APP_BASE_NAME=${0##*/} 86 | # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) 87 | APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit 88 | 89 | # Use the maximum available, or set MAX_FD != -1 to use that value. 90 | MAX_FD=maximum 91 | 92 | warn () { 93 | echo "$*" 94 | } >&2 95 | 96 | die () { 97 | echo 98 | echo "$*" 99 | echo 100 | exit 1 101 | } >&2 102 | 103 | # OS specific support (must be 'true' or 'false'). 104 | cygwin=false 105 | msys=false 106 | darwin=false 107 | nonstop=false 108 | case "$( uname )" in #( 109 | CYGWIN* ) cygwin=true ;; #( 110 | Darwin* ) darwin=true ;; #( 111 | MSYS* | MINGW* ) msys=true ;; #( 112 | NONSTOP* ) nonstop=true ;; 113 | esac 114 | 115 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 116 | 117 | 118 | # Determine the Java command to use to start the JVM. 119 | if [ -n "$JAVA_HOME" ] ; then 120 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 121 | # IBM's JDK on AIX uses strange locations for the executables 122 | JAVACMD=$JAVA_HOME/jre/sh/java 123 | else 124 | JAVACMD=$JAVA_HOME/bin/java 125 | fi 126 | if [ ! -x "$JAVACMD" ] ; then 127 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 128 | 129 | Please set the JAVA_HOME variable in your environment to match the 130 | location of your Java installation." 131 | fi 132 | else 133 | JAVACMD=java 134 | if ! command -v java >/dev/null 2>&1 135 | then 136 | die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 137 | 138 | Please set the JAVA_HOME variable in your environment to match the 139 | location of your Java installation." 140 | fi 141 | fi 142 | 143 | # Increase the maximum file descriptors if we can. 144 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then 145 | case $MAX_FD in #( 146 | max*) 147 | # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. 148 | # shellcheck disable=SC2039,SC3045 149 | MAX_FD=$( ulimit -H -n ) || 150 | warn "Could not query maximum file descriptor limit" 151 | esac 152 | case $MAX_FD in #( 153 | '' | soft) :;; #( 154 | *) 155 | # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. 156 | # shellcheck disable=SC2039,SC3045 157 | ulimit -n "$MAX_FD" || 158 | warn "Could not set maximum file descriptor limit to $MAX_FD" 159 | esac 160 | fi 161 | 162 | # Collect all arguments for the java command, stacking in reverse order: 163 | # * args from the command line 164 | # * the main class name 165 | # * -classpath 166 | # * -D...appname settings 167 | # * --module-path (only if needed) 168 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. 169 | 170 | # For Cygwin or MSYS, switch paths to Windows format before running java 171 | if "$cygwin" || "$msys" ; then 172 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) 173 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) 174 | 175 | JAVACMD=$( cygpath --unix "$JAVACMD" ) 176 | 177 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 178 | for arg do 179 | if 180 | case $arg in #( 181 | -*) false ;; # don't mess with options #( 182 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath 183 | [ -e "$t" ] ;; #( 184 | *) false ;; 185 | esac 186 | then 187 | arg=$( cygpath --path --ignore --mixed "$arg" ) 188 | fi 189 | # Roll the args list around exactly as many times as the number of 190 | # args, so each arg winds up back in the position where it started, but 191 | # possibly modified. 192 | # 193 | # NB: a `for` loop captures its iteration list before it begins, so 194 | # changing the positional parameters here affects neither the number of 195 | # iterations, nor the values presented in `arg`. 196 | shift # remove old arg 197 | set -- "$@" "$arg" # push replacement arg 198 | done 199 | fi 200 | 201 | 202 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 203 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 204 | 205 | # Collect all arguments for the java command: 206 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, 207 | # and any embedded shellness will be escaped. 208 | # * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be 209 | # treated as '${Hostname}' itself on the command line. 210 | 211 | set -- \ 212 | "-Dorg.gradle.appname=$APP_BASE_NAME" \ 213 | -classpath "$CLASSPATH" \ 214 | org.gradle.wrapper.GradleWrapperMain \ 215 | "$@" 216 | 217 | # Stop when "xargs" is not available. 218 | if ! command -v xargs >/dev/null 2>&1 219 | then 220 | die "xargs is not available" 221 | fi 222 | 223 | # Use "xargs" to parse quoted args. 224 | # 225 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed. 226 | # 227 | # In Bash we could simply go: 228 | # 229 | # readarray ARGS < <( xargs -n1 <<<"$var" ) && 230 | # set -- "${ARGS[@]}" "$@" 231 | # 232 | # but POSIX shell has neither arrays nor command substitution, so instead we 233 | # post-process each arg (as a line of input to sed) to backslash-escape any 234 | # character that might be a shell metacharacter, then use eval to reverse 235 | # that process (while maintaining the separation between arguments), and wrap 236 | # the whole thing up as a single "set" statement. 237 | # 238 | # This will of course break if any of these variables contains a newline or 239 | # an unmatched quote. 240 | # 241 | 242 | eval "set -- $( 243 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | 244 | xargs -n1 | 245 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | 246 | tr '\n' ' ' 247 | )" '"$@"' 248 | 249 | exec "$JAVACMD" "$@" 250 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%"=="" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%"=="" set DIRNAME=. 29 | @rem This is normally unused 30 | set APP_BASE_NAME=%~n0 31 | set APP_HOME=%DIRNAME% 32 | 33 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 34 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 35 | 36 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 37 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 38 | 39 | @rem Find java.exe 40 | if defined JAVA_HOME goto findJavaFromJavaHome 41 | 42 | set JAVA_EXE=java.exe 43 | %JAVA_EXE% -version >NUL 2>&1 44 | if %ERRORLEVEL% equ 0 goto execute 45 | 46 | echo. 1>&2 47 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 48 | echo. 1>&2 49 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 50 | echo location of your Java installation. 1>&2 51 | 52 | goto fail 53 | 54 | :findJavaFromJavaHome 55 | set JAVA_HOME=%JAVA_HOME:"=% 56 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 57 | 58 | if exist "%JAVA_EXE%" goto execute 59 | 60 | echo. 1>&2 61 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 62 | echo. 1>&2 63 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 64 | echo location of your Java installation. 1>&2 65 | 66 | goto fail 67 | 68 | :execute 69 | @rem Setup the command line 70 | 71 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 72 | 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if %ERRORLEVEL% equ 0 goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | set EXIT_CODE=%ERRORLEVEL% 85 | if %EXIT_CODE% equ 0 set EXIT_CODE=1 86 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% 87 | exit /b %EXIT_CODE% 88 | 89 | :mainEnd 90 | if "%OS%"=="Windows_NT" endlocal 91 | 92 | :omega 93 | -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id "org.gradle.toolchains.foojay-resolver-convention" version '0.8.0' 3 | } 4 | 5 | rootProject.name = 'llm-apps-java-langchain4j' 6 | 7 | include '01-chat-models:chat-models-ollama' 8 | include '01-chat-models:chat-models-openai' 9 | 10 | include '02-prompts:prompts-basics-ollama' 11 | include '02-prompts:prompts-basics-openai' 12 | include '02-prompts:prompts-messages-ollama' 13 | include '02-prompts:prompts-messages-openai' 14 | include '02-prompts:prompts-templates-ollama' 15 | include '02-prompts:prompts-templates-openai' 16 | 17 | include '03-output-parsers:output-parsers-bean-ollama' 18 | include '03-output-parsers:output-parsers-bean-openai' 19 | 20 | include '04-embedding-models:embedding-models-ollama' 21 | include '04-embedding-models:embedding-models-openai' 22 | --------------------------------------------------------------------------------