From 839e1621a1c57fc959463411d1496d5269c2704d Mon Sep 17 00:00:00 2001 From: mariofusco Date: Thu, 2 Jan 2025 16:16:10 +0100 Subject: [PATCH] Add profile to use ollama + bump quarkus and langchain4j versions --- docs/docs/step-07.md | 4 +- step-01/pom.xml | 44 +++++++++++++---- .../src/main/resources/application.properties | 11 +++-- step-02/pom.xml | 44 +++++++++++++---- .../src/main/resources/application.properties | 12 +++-- step-03/pom.xml | 45 ++++++++++++++---- .../src/main/resources/application.properties | 12 +++-- step-04/pom.xml | 44 +++++++++++++---- .../src/main/resources/application.properties | 12 +++-- step-05/pom.xml | 44 +++++++++++++---- .../src/main/resources/application.properties | 12 +++-- step-06/pom.xml | 47 +++++++++++++++---- .../quarkus/workshop/RagRetriever.java | 2 +- .../src/main/resources/application.properties | 12 +++-- step-07/pom.xml | 47 +++++++++++++++---- .../quarkus/workshop/BookingRepository.java | 6 +-- .../CustomerSupportAgentWebSocket.java | 2 + .../quarkus/workshop/RagRetriever.java | 2 +- .../src/main/resources/application.properties | 13 ++++- step-08/pom.xml | 47 +++++++++++++++---- .../quarkus/workshop/BookingRepository.java | 6 +-- .../CustomerSupportAgentWebSocket.java | 2 + .../quarkus/workshop/RagRetriever.java | 2 +- .../src/main/resources/application.properties | 13 ++++- 24 files changed, 383 insertions(+), 102 deletions(-) diff --git a/docs/docs/step-07.md b/docs/docs/step-07.md index bd2411b..9b961fb 100644 --- a/docs/docs/step-07.md +++ b/docs/docs/step-07.md @@ -1,4 +1,4 @@ -# Step 06 - Function calling and Tools +# Step 07 - Function calling and Tools The RAG pattern allows passing knowledge to the LLM based on your own data. It's a very popular pattern, but not the only one that can be used. @@ -22,7 +22,7 @@ The result is sent back to the LLM, which can use it to continue the conversatio In this step, we are going to see how to implement function calling in our application. We will set up a database and create a function that allows the LLM to retrieve data (bookings, customers...) from the database. -The final code is available in the `step-06` folder. +The final code is available in the `step-07` folder. However, we recommend you follow the step-by-step guide to understand how it works, and the different steps to implement this pattern. ## A couple of new dependencies diff --git a/step-01/pom.xml b/step-01/pom.xml index f15180f..8a84a05 100644 --- a/step-01/pom.xml +++ b/step-01/pom.xml @@ -16,8 +16,8 @@ 3.13.0 - 3.15.1 - 0.18.0 + 3.17.5 + 0.22.0 @@ -34,12 +34,6 @@ - - io.quarkiverse.langchain4j - quarkus-langchain4j-openai - ${quarkus-langchain4j.version} - - io.quarkus quarkus-rest @@ -106,4 +100,38 @@ + + + + openai + + true + + openai + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-openai + ${quarkus-langchain4j.version} + + + + + ollama + + + ollama + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-ollama + ${quarkus-langchain4j.version} + + + + diff --git a/step-01/src/main/resources/application.properties b/step-01/src/main/resources/application.properties index 3ebdcd9..7e8a007 100644 --- a/step-01/src/main/resources/application.properties +++ b/step-01/src/main/resources/application.properties @@ -1,5 +1,10 @@ -quarkus.langchain4j.openai.api-key=${OPENAI_API_KEY} +quarkus.langchain4j.log-requests=true +quarkus.langchain4j.log-responses=true +# OpenAI +quarkus.langchain4j.openai.api-key=${OPENAI_API_KEY} quarkus.langchain4j.openai.chat-model.model-name=gpt-4o -quarkus.langchain4j.openai.chat-model.log-requests=true -quarkus.langchain4j.openai.chat-model.log-responses=true + +# Ollama +quarkus.langchain4j.ollama.chat-model.model-id=llama3.2 +quarkus.langchain4j.ollama.timeout=180s diff --git a/step-02/pom.xml b/step-02/pom.xml index c00b241..1ede7a5 100644 --- a/step-02/pom.xml +++ b/step-02/pom.xml @@ -17,8 +17,8 @@ 3.13.0 - 3.15.1 - 0.18.0 + 3.17.5 + 0.22.0 @@ -35,12 +35,6 @@ - - io.quarkiverse.langchain4j - quarkus-langchain4j-openai - ${quarkus-langchain4j.version} - - io.quarkus quarkus-rest @@ -109,4 +103,38 @@ + + + + openai + + true + + openai + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-openai + ${quarkus-langchain4j.version} + + + + + ollama + + + ollama + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-ollama + ${quarkus-langchain4j.version} + + + + diff --git a/step-02/src/main/resources/application.properties b/step-02/src/main/resources/application.properties index 2bbf112..7f389a6 100644 --- a/step-02/src/main/resources/application.properties +++ b/step-02/src/main/resources/application.properties @@ -1,9 +1,15 @@ -quarkus.langchain4j.openai.api-key=${OPENAI_API_KEY} +quarkus.langchain4j.log-requests=true +quarkus.langchain4j.log-responses=true +# OpenAI +quarkus.langchain4j.openai.api-key=${OPENAI_API_KEY} quarkus.langchain4j.openai.chat-model.model-name=gpt-4o -quarkus.langchain4j.openai.chat-model.log-requests=true -quarkus.langchain4j.openai.chat-model.log-responses=true quarkus.langchain4j.openai.chat-model.temperature=1.0 quarkus.langchain4j.openai.chat-model.max-tokens=1000 quarkus.langchain4j.openai.chat-model.frequency-penalty=0 + +# Ollama +quarkus.langchain4j.ollama.chat-model.model-id=llama3.2 +quarkus.langchain4j.ollama.timeout=180s +quarkus.langchain4j.ollama.chat-model.temperature=1.0 diff --git a/step-03/pom.xml b/step-03/pom.xml index e8b6e9b..c3296a9 100644 --- a/step-03/pom.xml +++ b/step-03/pom.xml @@ -16,8 +16,8 @@ 3.13.0 - 3.15.1 - 0.18.0 + 3.17.5 + 0.22.0 @@ -34,12 +34,6 @@ - - io.quarkiverse.langchain4j - quarkus-langchain4j-openai - ${quarkus-langchain4j.version} - - io.quarkus quarkus-rest @@ -104,7 +98,40 @@ - + + + + openai + + true + + openai + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-openai + ${quarkus-langchain4j.version} + + + + + ollama + + + ollama + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-ollama + ${quarkus-langchain4j.version} + + + + diff --git a/step-03/src/main/resources/application.properties b/step-03/src/main/resources/application.properties index e0d04f2..7f389a6 100644 --- a/step-03/src/main/resources/application.properties +++ b/step-03/src/main/resources/application.properties @@ -1,11 +1,15 @@ -quarkus.langchain4j.openai.api-key=${OPENAI_API_KEY} +quarkus.langchain4j.log-requests=true +quarkus.langchain4j.log-responses=true +# OpenAI +quarkus.langchain4j.openai.api-key=${OPENAI_API_KEY} quarkus.langchain4j.openai.chat-model.model-name=gpt-4o -quarkus.langchain4j.openai.chat-model.log-requests=true -quarkus.langchain4j.openai.chat-model.log-responses=true quarkus.langchain4j.openai.chat-model.temperature=1.0 quarkus.langchain4j.openai.chat-model.max-tokens=1000 quarkus.langchain4j.openai.chat-model.frequency-penalty=0 - +# Ollama +quarkus.langchain4j.ollama.chat-model.model-id=llama3.2 +quarkus.langchain4j.ollama.timeout=180s +quarkus.langchain4j.ollama.chat-model.temperature=1.0 diff --git a/step-04/pom.xml b/step-04/pom.xml index f1cd338..4f1ef39 100644 --- a/step-04/pom.xml +++ b/step-04/pom.xml @@ -16,8 +16,8 @@ 3.13.0 - 3.15.1 - 0.18.0 + 3.17.5 + 0.22.0 @@ -34,12 +34,6 @@ - - io.quarkiverse.langchain4j - quarkus-langchain4j-openai - ${quarkus-langchain4j.version} - - io.quarkus quarkus-rest @@ -107,4 +101,38 @@ + + + + openai + + true + + openai + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-openai + ${quarkus-langchain4j.version} + + + + + ollama + + + ollama + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-ollama + ${quarkus-langchain4j.version} + + + + diff --git a/step-04/src/main/resources/application.properties b/step-04/src/main/resources/application.properties index 2bbf112..7f389a6 100644 --- a/step-04/src/main/resources/application.properties +++ b/step-04/src/main/resources/application.properties @@ -1,9 +1,15 @@ -quarkus.langchain4j.openai.api-key=${OPENAI_API_KEY} +quarkus.langchain4j.log-requests=true +quarkus.langchain4j.log-responses=true +# OpenAI +quarkus.langchain4j.openai.api-key=${OPENAI_API_KEY} quarkus.langchain4j.openai.chat-model.model-name=gpt-4o -quarkus.langchain4j.openai.chat-model.log-requests=true -quarkus.langchain4j.openai.chat-model.log-responses=true quarkus.langchain4j.openai.chat-model.temperature=1.0 quarkus.langchain4j.openai.chat-model.max-tokens=1000 quarkus.langchain4j.openai.chat-model.frequency-penalty=0 + +# Ollama +quarkus.langchain4j.ollama.chat-model.model-id=llama3.2 +quarkus.langchain4j.ollama.timeout=180s +quarkus.langchain4j.ollama.chat-model.temperature=1.0 diff --git a/step-05/pom.xml b/step-05/pom.xml index 464eac0..43aa847 100644 --- a/step-05/pom.xml +++ b/step-05/pom.xml @@ -16,8 +16,8 @@ 3.13.0 - 3.15.1 - 0.18.0 + 3.17.5 + 0.22.0 @@ -34,12 +34,6 @@ - - io.quarkiverse.langchain4j - quarkus-langchain4j-openai - ${quarkus-langchain4j.version} - - io.quarkiverse.langchain4j @@ -115,4 +109,38 @@ + + + + openai + + true + + openai + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-openai + ${quarkus-langchain4j.version} + + + + + ollama + + + ollama + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-ollama + ${quarkus-langchain4j.version} + + + + diff --git a/step-05/src/main/resources/application.properties b/step-05/src/main/resources/application.properties index bf74cde..4059766 100644 --- a/step-05/src/main/resources/application.properties +++ b/step-05/src/main/resources/application.properties @@ -1,13 +1,19 @@ -quarkus.langchain4j.openai.api-key=${OPENAI_API_KEY} +quarkus.langchain4j.log-requests=true +quarkus.langchain4j.log-responses=true +# OpenAI +quarkus.langchain4j.openai.api-key=${OPENAI_API_KEY} quarkus.langchain4j.openai.chat-model.model-name=gpt-4o -quarkus.langchain4j.openai.chat-model.log-requests=true -quarkus.langchain4j.openai.chat-model.log-responses=true quarkus.langchain4j.openai.chat-model.temperature=1.0 quarkus.langchain4j.openai.chat-model.max-tokens=1000 quarkus.langchain4j.openai.chat-model.frequency-penalty=0 +# Ollama +quarkus.langchain4j.ollama.chat-model.model-id=llama3.2 +quarkus.langchain4j.ollama.timeout=180s +quarkus.langchain4j.ollama.chat-model.temperature=1.0 + #--8<-- [start:easy-rag] quarkus.langchain4j.easy-rag.path=src/main/resources/rag quarkus.langchain4j.easy-rag.max-segment-size=100 diff --git a/step-06/pom.xml b/step-06/pom.xml index b37a018..d9ad57f 100644 --- a/step-06/pom.xml +++ b/step-06/pom.xml @@ -16,8 +16,9 @@ 3.13.0 - 3.15.1 - 0.18.0 + 3.17.5 + 0.22.0 + 0.36.2 @@ -34,17 +35,11 @@ - - io.quarkiverse.langchain4j - quarkus-langchain4j-openai - ${quarkus-langchain4j.version} - - dev.langchain4j langchain4j-embeddings-bge-small-en-q - 0.35.0 + ${langchain4j.version} @@ -122,4 +117,38 @@ + + + + openai + + true + + openai + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-openai + ${quarkus-langchain4j.version} + + + + + ollama + + + ollama + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-ollama + ${quarkus-langchain4j.version} + + + + diff --git a/step-06/src/main/java/dev/langchain4j/quarkus/workshop/RagRetriever.java b/step-06/src/main/java/dev/langchain4j/quarkus/workshop/RagRetriever.java index f14e56e..f11d118 100644 --- a/step-06/src/main/java/dev/langchain4j/quarkus/workshop/RagRetriever.java +++ b/step-06/src/main/java/dev/langchain4j/quarkus/workshop/RagRetriever.java @@ -33,7 +33,7 @@ public RetrievalAugmentor create(EmbeddingStore store, EmbeddingModel model) { .contentInjector(new ContentInjector() { @Override public UserMessage inject(List list, UserMessage userMessage) { - StringBuffer prompt = new StringBuffer(userMessage.singleText()); + StringBuilder prompt = new StringBuilder(userMessage.singleText()); prompt.append("\nPlease, only use the following information:\n"); list.forEach(content -> prompt.append("- ").append(content.textSegment().text()).append("\n")); return new UserMessage(prompt.toString()); diff --git a/step-06/src/main/resources/application.properties b/step-06/src/main/resources/application.properties index 16231ad..fc54dae 100644 --- a/step-06/src/main/resources/application.properties +++ b/step-06/src/main/resources/application.properties @@ -1,13 +1,19 @@ -quarkus.langchain4j.openai.api-key=${OPENAI_API_KEY} +quarkus.langchain4j.log-requests=true +quarkus.langchain4j.log-responses=true +# OpenAI +quarkus.langchain4j.openai.api-key=${OPENAI_API_KEY} quarkus.langchain4j.openai.chat-model.model-name=gpt-4o -quarkus.langchain4j.openai.chat-model.log-requests=true -quarkus.langchain4j.openai.chat-model.log-responses=true quarkus.langchain4j.openai.chat-model.temperature=1.0 quarkus.langchain4j.openai.chat-model.max-tokens=1000 quarkus.langchain4j.openai.chat-model.frequency-penalty=0 +# Ollama +quarkus.langchain4j.ollama.chat-model.model-id=llama3.2 +quarkus.langchain4j.ollama.timeout=180s +quarkus.langchain4j.ollama.chat-model.temperature=1.0 + #--8<-- [start:pgvector] quarkus.langchain4j.pgvector.dimension=384 #--8<-- [end:pgvector] diff --git a/step-07/pom.xml b/step-07/pom.xml index d51d1c5..b903229 100644 --- a/step-07/pom.xml +++ b/step-07/pom.xml @@ -16,8 +16,9 @@ 3.13.0 - 3.15.1 - 0.18.0 + 3.17.5 + 0.22.0 + 0.36.2 @@ -34,16 +35,10 @@ - - io.quarkiverse.langchain4j - quarkus-langchain4j-openai - ${quarkus-langchain4j.version} - - dev.langchain4j langchain4j-embeddings-bge-small-en-q - 0.34.0 + ${langchain4j.version} io.quarkiverse.langchain4j @@ -129,4 +124,38 @@ + + + + openai + + true + + openai + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-openai + ${quarkus-langchain4j.version} + + + + + ollama + + + ollama + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-ollama + ${quarkus-langchain4j.version} + + + + diff --git a/step-07/src/main/java/dev/langchain4j/quarkus/workshop/BookingRepository.java b/step-07/src/main/java/dev/langchain4j/quarkus/workshop/BookingRepository.java index 300c6cd..037893c 100644 --- a/step-07/src/main/java/dev/langchain4j/quarkus/workshop/BookingRepository.java +++ b/step-07/src/main/java/dev/langchain4j/quarkus/workshop/BookingRepository.java @@ -32,12 +32,12 @@ public void cancelBooking(long bookingId, String customerFirstName, String custo } @Tool("List booking for a customer") - public List listBookingsForCustomer(String customerName, String customerSurname) { - var found = Customer.findByFirstAndLastName(customerName, customerSurname); + public List listBookingsForCustomer(String customerFirstName, String customerLastName) { + var found = Customer.findByFirstAndLastName(customerFirstName, customerLastName); return found .map(customer -> list("customer", customer)) - .orElseThrow(() -> new CustomerNotFoundException(customerName, customerSurname)); + .orElseThrow(() -> new CustomerNotFoundException(customerFirstName, customerLastName)); } diff --git a/step-07/src/main/java/dev/langchain4j/quarkus/workshop/CustomerSupportAgentWebSocket.java b/step-07/src/main/java/dev/langchain4j/quarkus/workshop/CustomerSupportAgentWebSocket.java index d1f2448..b655e22 100644 --- a/step-07/src/main/java/dev/langchain4j/quarkus/workshop/CustomerSupportAgentWebSocket.java +++ b/step-07/src/main/java/dev/langchain4j/quarkus/workshop/CustomerSupportAgentWebSocket.java @@ -3,6 +3,7 @@ import io.quarkus.websockets.next.OnOpen; import io.quarkus.websockets.next.OnTextMessage; import io.quarkus.websockets.next.WebSocket; +import jakarta.enterprise.context.control.ActivateRequestContext; @WebSocket(path = "/customer-support-agent") public class CustomerSupportAgentWebSocket { @@ -19,6 +20,7 @@ public String onOpen() { } // --8<-- [start:tools] @OnTextMessage + @ActivateRequestContext public String onTextMessage(String message) { return customerSupportAgent.chat(message); } diff --git a/step-07/src/main/java/dev/langchain4j/quarkus/workshop/RagRetriever.java b/step-07/src/main/java/dev/langchain4j/quarkus/workshop/RagRetriever.java index 0e08a87..1e540eb 100644 --- a/step-07/src/main/java/dev/langchain4j/quarkus/workshop/RagRetriever.java +++ b/step-07/src/main/java/dev/langchain4j/quarkus/workshop/RagRetriever.java @@ -30,7 +30,7 @@ public RetrievalAugmentor create(EmbeddingStore store, EmbeddingModel model) { .contentInjector(new ContentInjector() { @Override public UserMessage inject(List list, UserMessage userMessage) { - StringBuffer prompt = new StringBuffer(userMessage.singleText()); + StringBuilder prompt = new StringBuilder(userMessage.singleText()); prompt.append("\nPlease, only use the following information:\n"); list.forEach(content -> prompt.append("- ").append(content.textSegment().text()).append("\n")); return new UserMessage(prompt.toString()); diff --git a/step-07/src/main/resources/application.properties b/step-07/src/main/resources/application.properties index bf16565..8b37bdd 100644 --- a/step-07/src/main/resources/application.properties +++ b/step-07/src/main/resources/application.properties @@ -1,10 +1,19 @@ +quarkus.langchain4j.log-requests=true +quarkus.langchain4j.log-responses=true + +# OpenAI quarkus.langchain4j.openai.api-key=${OPENAI_API_KEY} quarkus.langchain4j.openai.chat-model.model-name=gpt-4o -quarkus.langchain4j.openai.chat-model.log-requests=true -quarkus.langchain4j.openai.chat-model.log-responses=true + quarkus.langchain4j.openai.chat-model.temperature=1.0 quarkus.langchain4j.openai.chat-model.max-tokens=1000 quarkus.langchain4j.openai.chat-model.frequency-penalty=0 + +# Ollama +quarkus.langchain4j.ollama.chat-model.model-id=llama3.2 +quarkus.langchain4j.ollama.timeout=180s +quarkus.langchain4j.ollama.chat-model.temperature=1.0 + quarkus.langchain4j.pgvector.dimension=384 rag.location=src/main/resources/rag quarkus.langchain4j.embedding-model.provider=dev.langchain4j.model.embedding.onnx.bgesmallenq.BgeSmallEnQuantizedEmbeddingModel diff --git a/step-08/pom.xml b/step-08/pom.xml index 5018c73..155664c 100644 --- a/step-08/pom.xml +++ b/step-08/pom.xml @@ -16,8 +16,9 @@ 3.13.0 - 3.15.1 - 0.18.0 + 3.17.5 + 0.22.0 + 0.36.2 @@ -34,16 +35,10 @@ - - io.quarkiverse.langchain4j - quarkus-langchain4j-openai - ${quarkus-langchain4j.version} - - dev.langchain4j langchain4j-embeddings-bge-small-en-q - 0.34.0 + ${langchain4j.version} io.quarkiverse.langchain4j @@ -127,4 +122,38 @@ + + + + openai + + true + + openai + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-openai + ${quarkus-langchain4j.version} + + + + + ollama + + + ollama + + + + + io.quarkiverse.langchain4j + quarkus-langchain4j-ollama + ${quarkus-langchain4j.version} + + + + diff --git a/step-08/src/main/java/dev/langchain4j/quarkus/workshop/BookingRepository.java b/step-08/src/main/java/dev/langchain4j/quarkus/workshop/BookingRepository.java index 02d723f..0ba2767 100644 --- a/step-08/src/main/java/dev/langchain4j/quarkus/workshop/BookingRepository.java +++ b/step-08/src/main/java/dev/langchain4j/quarkus/workshop/BookingRepository.java @@ -32,10 +32,10 @@ public void cancelBooking(long bookingId, String customerFirstName, String custo } @Tool("List booking for a customer") - public List listBookingsForCustomer(String customerName, String customerSurname) { - var found = Customer.find("firstName = ?1 and lastName = ?2", customerName, customerSurname).singleResultOptional(); + public List listBookingsForCustomer(String customerFirstName, String customerLastName) { + var found = Customer.find("firstName = ?1 and lastName = ?2", customerFirstName, customerLastName).singleResultOptional(); if (found.isEmpty()) { - throw new CustomerNotFoundException(customerName, customerSurname); + throw new CustomerNotFoundException(customerFirstName, customerLastName); } return list("customer", found.get()); } diff --git a/step-08/src/main/java/dev/langchain4j/quarkus/workshop/CustomerSupportAgentWebSocket.java b/step-08/src/main/java/dev/langchain4j/quarkus/workshop/CustomerSupportAgentWebSocket.java index f36b78f..17d3152 100644 --- a/step-08/src/main/java/dev/langchain4j/quarkus/workshop/CustomerSupportAgentWebSocket.java +++ b/step-08/src/main/java/dev/langchain4j/quarkus/workshop/CustomerSupportAgentWebSocket.java @@ -6,6 +6,7 @@ import io.quarkus.websockets.next.WebSocket; import io.quarkiverse.langchain4j.runtime.aiservice.GuardrailException; +import jakarta.enterprise.context.control.ActivateRequestContext; @WebSocket(path = "/customer-support-agent") public class CustomerSupportAgentWebSocket { @@ -22,6 +23,7 @@ public String onOpen() { } @OnTextMessage + @ActivateRequestContext public String onTextMessage(String message) { try { return customerSupportAgent.chat(message); diff --git a/step-08/src/main/java/dev/langchain4j/quarkus/workshop/RagRetriever.java b/step-08/src/main/java/dev/langchain4j/quarkus/workshop/RagRetriever.java index 0e08a87..1e540eb 100644 --- a/step-08/src/main/java/dev/langchain4j/quarkus/workshop/RagRetriever.java +++ b/step-08/src/main/java/dev/langchain4j/quarkus/workshop/RagRetriever.java @@ -30,7 +30,7 @@ public RetrievalAugmentor create(EmbeddingStore store, EmbeddingModel model) { .contentInjector(new ContentInjector() { @Override public UserMessage inject(List list, UserMessage userMessage) { - StringBuffer prompt = new StringBuffer(userMessage.singleText()); + StringBuilder prompt = new StringBuilder(userMessage.singleText()); prompt.append("\nPlease, only use the following information:\n"); list.forEach(content -> prompt.append("- ").append(content.textSegment().text()).append("\n")); return new UserMessage(prompt.toString()); diff --git a/step-08/src/main/resources/application.properties b/step-08/src/main/resources/application.properties index bf16565..8b37bdd 100644 --- a/step-08/src/main/resources/application.properties +++ b/step-08/src/main/resources/application.properties @@ -1,10 +1,19 @@ +quarkus.langchain4j.log-requests=true +quarkus.langchain4j.log-responses=true + +# OpenAI quarkus.langchain4j.openai.api-key=${OPENAI_API_KEY} quarkus.langchain4j.openai.chat-model.model-name=gpt-4o -quarkus.langchain4j.openai.chat-model.log-requests=true -quarkus.langchain4j.openai.chat-model.log-responses=true + quarkus.langchain4j.openai.chat-model.temperature=1.0 quarkus.langchain4j.openai.chat-model.max-tokens=1000 quarkus.langchain4j.openai.chat-model.frequency-penalty=0 + +# Ollama +quarkus.langchain4j.ollama.chat-model.model-id=llama3.2 +quarkus.langchain4j.ollama.timeout=180s +quarkus.langchain4j.ollama.chat-model.temperature=1.0 + quarkus.langchain4j.pgvector.dimension=384 rag.location=src/main/resources/rag quarkus.langchain4j.embedding-model.provider=dev.langchain4j.model.embedding.onnx.bgesmallenq.BgeSmallEnQuantizedEmbeddingModel