diff --git a/rag/rag-springai-ollama-llm/docker/docker-compose.yml b/rag/rag-springai-ollama-llm/docker/docker-compose.yml index d7dca73..e9d1046 100644 --- a/rag/rag-springai-ollama-llm/docker/docker-compose.yml +++ b/rag/rag-springai-ollama-llm/docker/docker-compose.yml @@ -1,38 +1,62 @@ services: - ollama: - image: langchain4j/ollama-mistral:latest - ports: - - '11434:11434' - postgresqldb: - container_name: postgresqldb - image: pgvector/pgvector:pg17 - extra_hosts: [ 'host.docker.internal:host-gateway' ] - restart: always - environment: - - POSTGRES_USER=appuser - - POSTGRES_PASSWORD=secret - - POSTGRES_DB=appdb - - PGPASSWORD=secret - logging: - options: - max-size: 10m - max-file: "3" - ports: - - '5432:5432' - healthcheck: - test: "pg_isready -U appuser -d appdb" - interval: 2s - timeout: 20s - retries: 10 + ollama: + container_name: ollama + image: ollama/ollama:latest + ports: + - '11434:11434' - lgtm-stack: - image: grafana/otel-lgtm:0.8.1 - extra_hosts: ['host.docker.internal:host-gateway'] - container_name: lgtm-stack - environment: - - OTEL_METRIC_EXPORT_INTERVAL=500 - ports: - - "3000:3000" - - "4317:4317" - - "4318:4318" \ No newline at end of file + postgresqldb: + container_name: postgresqldb + image: pgvector/pgvector:pg17 + extra_hosts: [ 'host.docker.internal:host-gateway' ] + restart: always + environment: + - POSTGRES_USER=appuser + - POSTGRES_PASSWORD=secret + - POSTGRES_DB=appdb + - PGPASSWORD=secret + logging: + options: + max-size: 10m + max-file: "3" + ports: + - '5432:5432' + healthcheck: + test: "pg_isready -U appuser -d appdb" + interval: 2s + timeout: 20s + retries: 10 + + pgadmin: + container_name: pgadmin_container + image: dpage/pgadmin4 + extra_hosts: [ 'host.docker.internal:host-gateway' ] + environment: + PGADMIN_DEFAULT_EMAIL: ${PGADMIN_DEFAULT_EMAIL:-pgadmin4@pgadmin.org} + PGADMIN_DEFAULT_PASSWORD: ${PGADMIN_DEFAULT_PASSWORD:-admin} + PGADMIN_CONFIG_SERVER_MODE: "False" + PGADMIN_CONFIG_MASTER_PASSWORD_REQUIRED: "False" + ports: + - "${PGADMIN_PORT:-5050}:80" + depends_on: + postgresqldb: + condition: service_healthy + volumes: + - ./docker_pgadmin_servers.json:/pgadmin4/servers.json + entrypoint: + - "/bin/sh" + - "-c" + - "/bin/echo 'postgresqldb:5432:*:appuser:secret' > /tmp/pgpassfile && chmod 600 /tmp/pgpassfile && /entrypoint.sh" + + lgtm-stack: + image: grafana/otel-lgtm:0.8.1 + extra_hosts: [ 'host.docker.internal:host-gateway' ] + container_name: lgtm-stack + environment: + - OTEL_METRIC_EXPORT_INTERVAL=500 + ports: + - "3000:3000" + - "4317:4317" + - "4318:4318" + - "9090:9090" diff --git a/rag/rag-springai-ollama-llm/docker/docker_pgadmin_servers.json b/rag/rag-springai-ollama-llm/docker/docker_pgadmin_servers.json new file mode 100644 index 0000000..7e97769 --- /dev/null +++ b/rag/rag-springai-ollama-llm/docker/docker_pgadmin_servers.json @@ -0,0 +1,14 @@ +{ + "Servers": { + "1": { + "Name": "Docker Compose DB", + "Group": "Servers", + "Port": 5432, + "Username": "appuser", + "Host": "postgresqldb", + "SSLMode": "prefer", + "MaintenanceDB": "appdb", + "PassFile": "/tmp/pgpassfile" + } + } +} \ No newline at end of file diff --git a/rag/rag-springai-ollama-llm/src/main/java/com/learning/ai/llmragwithspringai/service/DataIndexerService.java b/rag/rag-springai-ollama-llm/src/main/java/com/learning/ai/llmragwithspringai/service/DataIndexerService.java index aed87d0..70e2369 100644 --- a/rag/rag-springai-ollama-llm/src/main/java/com/learning/ai/llmragwithspringai/service/DataIndexerService.java +++ b/rag/rag-springai-ollama-llm/src/main/java/com/learning/ai/llmragwithspringai/service/DataIndexerService.java @@ -1,6 +1,7 @@ package com.learning.ai.llmragwithspringai.service; import java.util.Map; +import java.util.Objects; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.ai.document.DocumentReader; @@ -63,6 +64,6 @@ public void loadData(Resource documentResource) { } public long count() { - return this.vectorStore.similaritySearch("*").size(); + return Objects.requireNonNull(this.vectorStore.similaritySearch("*")).size(); } } diff --git a/rag/rag-springai-ollama-llm/src/main/resources/application-local.properties b/rag/rag-springai-ollama-llm/src/main/resources/application-local.properties new file mode 100644 index 0000000..c9465fb --- /dev/null +++ b/rag/rag-springai-ollama-llm/src/main/resources/application-local.properties @@ -0,0 +1,10 @@ +## only for development +spring.ai.vectorstore.pgvector.removeExistingVectorStoreTable=true +spring.ai.ollama.baseUrl=http://localhost:11434 + + +logging.level.org.springframework.ai.rag=debug + +spring.datasource.url=jdbc:postgresql://localhost/appdb +spring.datasource.username=appuser +spring.datasource.password=secret diff --git a/rag/rag-springai-ollama-llm/src/main/resources/application.properties b/rag/rag-springai-ollama-llm/src/main/resources/application.properties index 75eda33..4d7301f 100644 --- a/rag/rag-springai-ollama-llm/src/main/resources/application.properties +++ b/rag/rag-springai-ollama-llm/src/main/resources/application.properties @@ -3,7 +3,7 @@ spring.application.name=rag-springai-ollama-llm spring.threads.virtual.enabled=true spring.mvc.problemdetails.enabled=true -spring.ai.ollama.init.pull-model-strategy=when_missing +spring.ai.ollama.init.pull-model-strategy=WHEN_MISSING spring.ai.ollama.chat.options.model=mistral spring.ai.ollama.chat.options.temperature=0.3 spring.ai.ollama.chat.options.top-k=2 @@ -31,8 +31,4 @@ management.tracing.sampling.probability=1.0 management.otlp.tracing.endpoint=http://localhost:4318/v1/traces management.otlp.logging.endpoint=http://localhost:4318/v1/logs -logging.level.org.springframework.ai.rag=debug - -## only for development -spring.ai.vectorstore.pgvector.removeExistingVectorStoreTable=true -spring.ai.ollama.baseUrl=http://localhost:11434 +logging.level.org.springframework.ai.rag=info diff --git a/rag/rag-springai-ollama-llm/src/test/java/com/learning/ai/llmragwithspringai/config/TestcontainersConfiguration.java b/rag/rag-springai-ollama-llm/src/test/java/com/learning/ai/llmragwithspringai/config/TestcontainersConfiguration.java index b718825..d802615 100644 --- a/rag/rag-springai-ollama-llm/src/test/java/com/learning/ai/llmragwithspringai/config/TestcontainersConfiguration.java +++ b/rag/rag-springai-ollama-llm/src/test/java/com/learning/ai/llmragwithspringai/config/TestcontainersConfiguration.java @@ -1,6 +1,5 @@ package com.learning.ai.llmragwithspringai.config; -import java.io.IOException; import java.time.Duration; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.testcontainers.service.connection.ServiceConnection; @@ -15,14 +14,8 @@ public class TestcontainersConfiguration { @Bean @ServiceConnection - OllamaContainer ollama() throws IOException, InterruptedException { - // The model name to use (e.g., "orca-mini", "mistral", "llama2", "codellama", "phi", or - // "tinyllama") - OllamaContainer ollamaContainer = new OllamaContainer( - DockerImageName.parse("langchain4j/ollama-mistral:latest").asCompatibleSubstituteFor("ollama/ollama")); - ollamaContainer.start(); - ollamaContainer.execInContainer("ollama", "pull", "nomic-embed-text"); - return ollamaContainer; + OllamaContainer ollama() { + return new OllamaContainer(DockerImageName.parse("ollama/ollama")); } @Bean