diff --git a/build.gradle.kts b/build.gradle.kts index dd29830..03dc411 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -87,20 +87,23 @@ configurations { repositories { mavenCentral() + maven { url = uri("https://repo.spring.io/milestone") } } dependencies { - developmentOnly(libs.bundles.spring.boot.dev) + developmentOnly(libs.spring.boot.docker.compose) + developmentOnly(libs.spring.ai.spring.boot.docker.compose) { + exclude(group = "org.springframework.boot", module = "spring-boot-starter-mongodb") + } - implementation(libs.spring.boot.starter.web) + implementation(libs.spring.boot.starter.webmvc) + implementation(libs.spring.boot.starter.json) implementation(libs.spring.boot.starter.actuator) implementation(libs.spring.boot.starter.aop) implementation(libs.spring.ai.starter.mcp.server.webmvc) implementation(libs.solr.solrj) implementation(libs.commons.csv) - // JSpecify for nullability annotations - implementation(libs.jspecify) implementation(platform("io.opentelemetry.instrumentation:opentelemetry-instrumentation-bom:2.11.0")) implementation("io.opentelemetry.instrumentation:opentelemetry-spring-boot-starter") @@ -113,6 +116,15 @@ dependencies { implementation(libs.spring.boot.starter.security) implementation(libs.spring.boot.starter.oauth2.resource.server) + // OpenTelemetry (HTTP mode only - for metrics, tracing, and log export) + implementation(libs.spring.boot.starter.opentelemetry) + implementation(libs.opentelemetry.logback.appender) + implementation("io.micrometer:micrometer-tracing-bridge-otel") + runtimeOnly(libs.micrometer.registry.otlp) + + // AspectJ (required for @Observed annotation support in Spring Boot 4) + implementation(libs.spring.boot.starter.aspectj) + // Error Prone and NullAway for null safety analysis errorprone(libs.errorprone.core) errorprone(libs.nullaway) @@ -127,6 +139,18 @@ dependencyManagement { } } +// Force opentelemetry-proto to a version compiled with protobuf 3.x +// This resolves NoSuchMethodError with protobuf 4.x +// See: https://github.com/micrometer-metrics/micrometer/issues/5658 +configurations.all { + resolutionStrategy.eachDependency { + if (requested.group == "io.opentelemetry.proto" && requested.name == "opentelemetry-proto") { + useVersion("1.3.2-alpha") + because("Version 1.8.0-alpha has protobuf 4.x incompatibility causing NoSuchMethodError") + } + } +} + // Configures Spring Boot plugin to generate build metadata at build time // This creates META-INF/build-info.properties containing: // - build.artifact: The artifact name (e.g., "solr-mcp") @@ -417,12 +441,6 @@ jib { environment = mapOf( // Disable Spring Boot Docker Compose support when running in container - // Docker Compose integration is disabled in the container image. - // It is only useful for local development (HTTP profile) where - // the host has Docker and a compose.yaml. Inside a container, - // Docker Compose cannot start sibling containers without a - // Docker socket mount, so it must be turned off. - // The application-stdio.properties also disables it for STDIO mode. "SPRING_DOCKER_COMPOSE_ENABLED" to "false", ) diff --git a/compose.yaml b/compose.yaml index 75c5920..dd8d577 100644 --- a/compose.yaml +++ b/compose.yaml @@ -35,27 +35,33 @@ services: environment: ZOO_4LW_COMMANDS_WHITELIST: "mntr,conf,ruok" - # ============================================================================= - # LGTM Stack - Grafana observability backend (Loki, Grafana, Tempo, Mimir) - # ============================================================================= - # This all-in-one container provides: - # - Loki: Log aggregation (LogQL queries) - # - Grafana: Visualization at http://localhost:3000 (no auth required) - # - Tempo: Distributed tracing (TraceQL queries) - # - Mimir: Prometheus-compatible metrics storage - # - OpenTelemetry Collector: Receives OTLP data on ports 4317 (gRPC) and 4318 (HTTP) - # - # Spring Boot auto-configures OTLP endpoints when this container is running. + # ============================================================================= + # OpenTelemetry LGTM Stack (HTTP mode only) + # ============================================================================= + # Provides a complete observability stack for local development: + # - Grafana: Visualization dashboards (http://localhost:3000) + # - Loki: Log aggregation + # - Tempo: Distributed tracing + # - Mimir: Metrics storage (Prometheus-compatible) + # - OpenTelemetry Collector: Receives OTLP data on ports 4317 (gRPC) and 4318 (HTTP) + # + # Usage: + # docker compose up -d lgtm # Start only the observability stack + # docker compose up -d # Start everything including Solr + # + # Access Grafana at http://localhost:3000 (no authentication required) + # Pre-configured datasources for Loki, Tempo, and Mimir are available. lgtm: - image: grafana/otel-lgtm:latest - ports: - - "3000:3000" # Grafana UI - - "4317:4317" # OTLP gRPC receiver - - "4318:4318" # OTLP HTTP receiver - networks: [ search ] - labels: - # Prevent Spring Boot auto-configuration from trying to manage this service - org.springframework.boot.ignore: "true" + image: grafana/otel-lgtm:latest + ports: + - "3000:3000" # Grafana UI + - "4317:4317" # OTLP gRPC receiver + - "4318:4318" # OTLP HTTP receiver + networks: [ search ] + environment: + # Disable authentication for local development + GF_AUTH_ANONYMOUS_ENABLED: "true" + GF_AUTH_ANONYMOUS_ORG_ROLE: "Admin" volumes: data: diff --git a/dev-docs/Observability.md b/dev-docs/Observability.md new file mode 100644 index 0000000..6f778fa --- /dev/null +++ b/dev-docs/Observability.md @@ -0,0 +1,314 @@ +# Observability Guide for Solr MCP Server + +This guide covers setting up observability (metrics, traces, and logs) for the Solr MCP Server running in HTTP mode using OpenTelemetry. + +## Table of Contents + +- [Overview](#overview) +- [The LGTM Stack](#the-lgtm-stack) +- [Quick Start](#quick-start) +- [Architecture](#architecture) +- [Accessing Telemetry Data](#accessing-telemetry-data) + - [Grafana Dashboard](#grafana-dashboard) + - [Viewing Traces](#viewing-traces) + - [Viewing Logs](#viewing-logs) + - [Viewing Metrics](#viewing-metrics) +- [Configuration](#configuration) + - [Environment Variables](#environment-variables) + - [Sampling Configuration](#sampling-configuration) + - [Custom OTLP Endpoints](#custom-otlp-endpoints) +- [Production Considerations](#production-considerations) +- [Troubleshooting](#troubleshooting) + +## Overview + +The Solr MCP Server integrates with OpenTelemetry to provide comprehensive observability in HTTP mode: + +| Signal | Description | Backend | +|--------|-------------|---------| +| **Traces** | Distributed tracing for request flows | Tempo | +| **Metrics** | Application and JVM metrics | Mimir (Prometheus-compatible) | +| **Logs** | Structured log export with trace correlation | Loki | + +**Note:** Observability is only available in HTTP mode. STDIO mode disables telemetry to prevent stdout pollution that would interfere with MCP protocol communication. + +## The LGTM Stack + +The project uses the **Grafana LGTM stack** (`grafana/otel-lgtm`) - an all-in-one Docker image that provides a complete observability backend for local development. LGTM stands for: + +| Component | Purpose | Port | +|-----------|---------|------| +| **L**oki | Log aggregation and querying | Internal | +| **G**rafana | Visualization, dashboards, and exploration | 3000 | +| **T**empo | Distributed tracing backend | Internal | +| **M**imir | Prometheus-compatible metrics storage | Internal | + +The image also includes an **OpenTelemetry Collector** that receives telemetry data via OTLP protocol: +- **Port 4317**: OTLP gRPC receiver +- **Port 4318**: OTLP HTTP receiver (used by Spring Boot) + +This single container replaces what would otherwise require deploying and configuring multiple services separately, making it ideal for local development and testing. + +## Quick Start + +Thanks to the `spring-boot-docker-compose` dependency, **Docker containers are automatically started** when you run the application locally. Simply run: + +```bash +# Run the MCP server in HTTP mode - Docker containers start automatically! +PROFILES=http ./gradlew bootRun +``` + +Spring Boot detects the `compose.yaml` file and automatically: +1. Starts the `lgtm` container (Grafana, Loki, Tempo, Mimir) +2. Starts the `solr` and `zoo` containers +3. Configures OTLP endpoints to point to the running containers +4. Waits for containers to be healthy before accepting requests + +Once running, open Grafana at **http://localhost:3000** to explore your telemetry data. + +**Note:** To start containers manually (e.g., for debugging), use: +```bash +docker compose up -d lgtm solr +``` + +## Architecture + +``` +┌─────────────────────┐ OTLP/HTTP ┌─────────────────────────────────┐ +│ Solr MCP Server │─────────────────────│ OpenTelemetry Collector │ +│ (HTTP mode) │ :4318 │ (grafana/otel-lgtm) │ +│ │ │ │ +│ ┌───────────────┐ │ │ ┌─────────┐ ┌─────────────┐ │ +│ │ Traces │──┼─────────────────────┼─▶│ Tempo │ │ Grafana │ │ +│ │ (auto-instr.) │ │ │ └─────────┘ │ :3000 │ │ +│ └───────────────┘ │ │ │ │ │ +│ ┌───────────────┐ │ │ ┌─────────┐ │ - Dashboards│ │ +│ │ Metrics │──┼─────────────────────┼─▶│ Mimir │ │ - Explore │ │ +│ │ (actuator) │ │ │ └─────────┘ │ - Alerts │ │ +│ └───────────────┘ │ │ └─────────────┘ │ +│ ┌───────────────┐ │ │ ┌─────────┐ │ +│ │ Logs │──┼─────────────────────┼─▶│ Loki │ │ +│ │ (logback) │ │ │ └─────────┘ │ +│ └───────────────┘ │ │ │ +└─────────────────────┘ └─────────────────────────────────┘ +``` + +## Accessing Telemetry Data + +### Grafana Dashboard + +Access Grafana at **http://localhost:3000** (no login required in development mode). + +The LGTM stack comes with pre-configured datasources: +- **Tempo** - For distributed traces +- **Loki** - For logs +- **Mimir** - For metrics (Prometheus-compatible) + +### Viewing Traces + +Grafana's **Drilldown** feature provides an integrated view for exploring traces, metrics, and logs all in one place. + +1. Open Grafana: http://localhost:3000 +2. Go to **Drilldown** > **Traces** in the sidebar +3. Select **Tempo** as the datasource +4. Filter traces by: + - Service name: `solr-mcp-server` + - Span name (e.g., `http post /mcp`) + - Duration + - URL path + +The trace view shows the complete request flow with timing breakdown for each span: + +![Distributed Tracing in Grafana](images/grafana-traces.png) + +In this example, you can see: +- The root span `http post /mcp` taking 223.98ms total +- Security filter chain spans for authentication/authorization +- The `SearchService#search` span (177.01ms) created by the `@Observed` annotation on the service method +- Nested security filter spans for the secured request + +**Navigating Between Signals:** + +The Drilldown sidebar provides quick access to related telemetry: +- **Metrics** - View application and JVM metrics (request rates, latencies, memory usage) +- **Logs** - View correlated logs with the same trace ID +- **Traces** - The current distributed trace view +- **Profiles** - CPU and memory profiling data (if configured) + +This unified view makes it easy to investigate issues by correlating traces with their associated logs and metrics. + +**Example TraceQL query:** +``` +{resource.service.name="solr-mcp-server"} +``` + +### Viewing Logs + +1. Open Grafana: http://localhost:3000 +2. Go to **Explore** +3. Select **Loki** as the datasource +4. Query logs using LogQL: + +**Example queries:** +```logql +# All logs from the MCP server +{service_name="solr-mcp-server"} + +# Error logs only +{service_name="solr-mcp-server"} |= "ERROR" + +# Logs with specific trace ID +{service_name="solr-mcp-server"} | json | trace_id="" +``` + +### Viewing Metrics + +1. Open Grafana: http://localhost:3000 +2. Go to **Explore** +3. Select **Mimir** as the datasource +4. Query metrics using PromQL: + +**Example queries:** +```promql +# HTTP request rate +rate(http_server_requests_seconds_count{application="solr-mcp-server"}[5m]) + +# Request latency (p99) +histogram_quantile(0.99, rate(http_server_requests_seconds_bucket{application="solr-mcp-server"}[5m])) + +# JVM memory usage +jvm_memory_used_bytes{application="solr-mcp-server"} + +# Active threads +jvm_threads_live_threads{application="solr-mcp-server"} +``` + +## Configuration + +### Environment Variables + +For production deployments without Docker Compose, set these environment variables: + +| Variable | Default | Description | +|----------|---------|-------------| +| `OTEL_SAMPLING_PROBABILITY` | `1.0` | Trace sampling rate (0.0-1.0) | +| `OTEL_METRICS_URL` | (auto-configured) | OTLP metrics endpoint | +| `OTEL_TRACES_URL` | (auto-configured) | OTLP traces endpoint | +| `OTEL_LOGS_URL` | (auto-configured) | OTLP logs endpoint | + +Example production configuration: +```bash +export OTEL_SAMPLING_PROBABILITY=0.1 +export OTEL_METRICS_URL=https://otel-collector.prod.example.com/v1/metrics +export OTEL_TRACES_URL=https://otel-collector.prod.example.com/v1/traces +export OTEL_LOGS_URL=https://otel-collector.prod.example.com/v1/logs +``` + +### Sampling Configuration + +For production, reduce sampling to manage costs and storage: + +```bash +# Sample 10% of traces +export OTEL_SAMPLING_PROBABILITY=0.1 +``` + +Or in `application-http.properties`: +```properties +management.tracing.sampling.probability=0.1 +``` + +### Custom OTLP Endpoints + +To send telemetry to a different backend (e.g., Jaeger, Datadog, New Relic): + +```bash +# Example: Send traces to Jaeger +export OTEL_TRACES_URL=http://jaeger:4318/v1/traces + +# Example: Send metrics to Prometheus remote write endpoint +export OTEL_METRICS_URL=http://prometheus:9090/api/v1/otlp/v1/metrics +``` + +## Production Considerations + +### 1. Use Secure Endpoints + +```properties +# Use HTTPS for production OTLP endpoints +management.otlp.metrics.export.url=https://otel-collector.prod.example.com/v1/metrics +management.opentelemetry.tracing.export.otlp.endpoint=https://otel-collector.prod.example.com/v1/traces +management.opentelemetry.logging.export.otlp.endpoint=https://otel-collector.prod.example.com/v1/logs +``` + +### 2. Add Authentication Headers + +If your OTLP collector requires authentication, configure headers in your OpenTelemetry configuration. + +### 3. Resource Attributes + +Add deployment-specific attributes for better filtering: + +```properties +spring.application.name=solr-mcp-server-prod +``` + +## Troubleshooting + +### No Data in Grafana + +1. **Check the LGTM container is running:** + ```bash + docker compose ps lgtm + ``` + +2. **Verify OTLP endpoints are reachable:** + ```bash + curl -v http://localhost:4318/v1/traces + ``` + +3. **Check application logs for OTLP errors:** + ```bash + ./gradlew bootRun 2>&1 | grep -i otel + ``` + +### Traces Not Appearing + +1. Ensure you're running in HTTP mode (`PROFILES=http`) +2. Check sampling probability is > 0 +3. Verify the trace endpoint URL is correct + +### Logs Not Appearing + +1. Check that logback-spring.xml is being loaded +2. Verify the OTEL appender is installed (check startup logs) +3. Ensure log level is INFO or lower + +### Metrics Not Appearing + +1. Verify actuator endpoints are exposed: + ```bash + curl http://localhost:8080/actuator/metrics + ``` +2. Check the metrics endpoint URL is correct + +### High Memory Usage + +If the LGTM container uses too much memory: +```yaml +# compose.yaml +lgtm: + image: grafana/otel-lgtm:latest + deploy: + resources: + limits: + memory: 2G +``` + +## References + +- [Spring Boot OpenTelemetry](https://docs.spring.io/spring-boot/reference/actuator/tracing.html) +- [OpenTelemetry Documentation](https://opentelemetry.io/docs/) +- [Grafana LGTM Stack](https://grafana.com/blog/2024/03/13/an-opentelemetry-backend-in-a-docker-image-introducing-grafana/otel-lgtm/) +- [LogQL Query Language](https://grafana.com/docs/loki/latest/logql/) +- [TraceQL Query Language](https://grafana.com/docs/tempo/latest/traceql/) diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index 67ddad0..68a30cf 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -1,37 +1,42 @@ [versions] # Build plugins -spring-boot = "3.5.11" +spring-boot = "4.0.5" spring-dependency-management = "1.1.7" errorprone-plugin = "4.2.0" jib = "3.4.5" spotless = "7.0.2" # Main dependencies -spring-ai = "1.1.3" +spring-ai = "2.0.0-M4" solr = "10.0.0" commons-csv = "1.10.0" -jspecify = "1.0.0" -mcp-server-security = "0.0.4" +mcp-server-security = "0.0.6" + +# OpenTelemetry +opentelemetry-logback-appender = "2.21.0-alpha" # Error Prone and analysis tools errorprone-core = "2.38.0" nullaway = "0.12.7" - # Test dependencies -testcontainers = "1.21.3" +testcontainers = "2.0.2" awaitility = "4.2.2" opentelemetry-instrumentation-bom = "2.11.0" [libraries] # Spring -spring-boot-starter-web = { module = "org.springframework.boot:spring-boot-starter-web" } +spring-boot-starter-webmvc = { module = "org.springframework.boot:spring-boot-starter-webmvc" } +spring-boot-starter-json = { module = "org.springframework.boot:spring-boot-starter-json" } spring-boot-starter-actuator = { module = "org.springframework.boot:spring-boot-starter-actuator" } spring-boot-starter-aop = { module = "org.springframework.boot:spring-boot-starter-aop" } spring-boot-starter-security = { module = "org.springframework.boot:spring-boot-starter-security" } spring-boot-starter-oauth2-resource-server = { module = "org.springframework.boot:spring-boot-starter-oauth2-resource-server" } spring-boot-docker-compose = { module = "org.springframework.boot:spring-boot-docker-compose" } spring-boot-starter-test = { module = "org.springframework.boot:spring-boot-starter-test" } +spring-boot-starter-actuator-test = { module = "org.springframework.boot:spring-boot-starter-actuator-test" } +spring-boot-starter-opentelemetry-test = { module = "org.springframework.boot:spring-boot-starter-opentelemetry-test" } +spring-boot-starter-webmvc-test = { module = "org.springframework.boot:spring-boot-starter-webmvc-test" } spring-boot-testcontainers = { module = "org.springframework.boot:spring-boot-testcontainers" } # Spring AI spring-ai-starter-mcp-server-webmvc = { module = "org.springframework.ai:spring-ai-starter-mcp-server-webmvc" } @@ -48,8 +53,13 @@ solr-solrj = { module = "org.apache.solr:solr-solrj", version.ref = "solr" } # Apache Commons commons-csv = { module = "org.apache.commons:commons-csv", version.ref = "commons-csv" } -# Null safety -jspecify = { module = "org.jspecify:jspecify", version.ref = "jspecify" } +# OpenTelemetry (HTTP mode only) +spring-boot-starter-opentelemetry = { module = "org.springframework.boot:spring-boot-starter-opentelemetry" } +opentelemetry-logback-appender = { module = "io.opentelemetry.instrumentation:opentelemetry-logback-appender-1.0", version.ref = "opentelemetry-logback-appender" } +micrometer-registry-otlp = { module = "io.micrometer:micrometer-registry-otlp" } + +# AspectJ (required for @Observed annotation support) +spring-boot-starter-aspectj = { module = "org.springframework.boot:spring-boot-starter-aspectj" } # Error Prone errorprone-core = { module = "com.google.errorprone:error_prone_core", version.ref = "errorprone-core" } @@ -59,9 +69,9 @@ nullaway = { module = "com.uber.nullaway:nullaway", version.ref = "nullaway" } micrometer-tracing-bridge-otel = { module = "io.micrometer:micrometer-tracing-bridge-otel" } # Test dependencies -testcontainers-junit-jupiter = { module = "org.testcontainers:junit-jupiter" } -testcontainers-solr = { module = "org.testcontainers:solr", version.ref = "testcontainers" } -testcontainers-grafana = { module = "org.testcontainers:grafana", version.ref = "testcontainers" } +testcontainers-junit-jupiter = { module = "org.testcontainers:testcontainers-junit-jupiter", version.ref = "testcontainers" } +testcontainers-solr = { module = "org.testcontainers:testcontainers-solr", version.ref = "testcontainers" } +testcontainers-grafana = { module = "org.testcontainers:testcontainers-grafana", version.ref = "testcontainers" } junit-platform-launcher = { module = "org.junit.platform:junit-platform-launcher" } awaitility = { module = "org.awaitility:awaitility", version.ref = "awaitility" } opentelemetry-sdk-testing = { module = "io.opentelemetry:opentelemetry-sdk-testing" } @@ -73,7 +83,7 @@ jetty-util = { module = "org.eclipse.jetty:jetty-util" } [bundles] spring-ai-mcp = [ - "spring-boot-starter-web", + "spring-boot-starter-webmvc", "spring-ai-starter-mcp-server-webmvc" ] @@ -84,6 +94,9 @@ spring-boot-dev = [ test = [ "spring-boot-starter-test", + "spring-boot-starter-actuator-test", + "spring-boot-starter-opentelemetry-test", + "spring-boot-starter-webmvc-test", "spring-boot-testcontainers", "spring-ai-spring-boot-testcontainers", "testcontainers-junit-jupiter", diff --git a/images/grafana-traces.png b/images/grafana-traces.png new file mode 100644 index 0000000..f542792 Binary files /dev/null and b/images/grafana-traces.png differ diff --git a/settings.gradle.kts b/settings.gradle.kts index 8373d94..317049b 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -16,3 +16,18 @@ */ rootProject.name = "solr-mcp" + +pluginManagement { + repositories { + gradlePluginPortal() + mavenCentral() + maven { url = uri("https://repo.spring.io/milestone") } + } +} + +dependencyResolutionManagement { + repositories { + mavenCentral() + maven { url = uri("https://repo.spring.io/milestone") } + } +} diff --git a/src/main/java/org/apache/solr/mcp/server/collection/CollectionService.java b/src/main/java/org/apache/solr/mcp/server/collection/CollectionService.java index d5dd1df..435716b 100644 --- a/src/main/java/org/apache/solr/mcp/server/collection/CollectionService.java +++ b/src/main/java/org/apache/solr/mcp/server/collection/CollectionService.java @@ -21,7 +21,6 @@ import static org.apache.solr.mcp.server.collection.CollectionUtils.getLong; import static org.apache.solr.mcp.server.util.JsonUtils.toJson; -import com.fasterxml.jackson.databind.ObjectMapper; import io.micrometer.observation.annotation.Observed; import java.io.IOException; import java.util.ArrayList; @@ -41,12 +40,15 @@ import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.mcp.server.config.SolrConfigurationProperties; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springaicommunity.mcp.annotation.McpComplete; import org.springaicommunity.mcp.annotation.McpResource; import org.springaicommunity.mcp.annotation.McpTool; import org.springaicommunity.mcp.annotation.McpToolParam; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Service; +import tools.jackson.databind.json.JsonMapper; /** * Spring Service providing comprehensive Solr collection management and @@ -129,34 +131,18 @@ @Observed public class CollectionService { + private static final Logger log = LoggerFactory.getLogger(CollectionService.class); + // ======================================== // Constants for API Parameters and Paths // ======================================== - /** Category parameter value for cache-related MBeans requests */ - private static final String CACHE_CATEGORY = "CACHE"; - - /** Category parameter value for query handler MBeans requests */ - private static final String QUERY_HANDLER_CATEGORY = "QUERYHANDLER"; - - /** - * Combined category parameter value for both query and update handler MBeans - * requests - */ - private static final String HANDLER_CATEGORIES = "QUERYHANDLER,UPDATEHANDLER"; - /** Universal Solr query pattern to match all documents in a collection */ private static final String ALL_DOCUMENTS_QUERY = "*:*"; /** Suffix pattern used to identify shard names in SolrCloud deployments */ private static final String SHARD_SUFFIX = "_shard"; - /** Request parameter name for enabling statistics in MBeans requests */ - private static final String STATS_PARAM = "stats"; - - /** Request parameter name for specifying category filters in MBeans requests */ - private static final String CAT_PARAM = "cat"; - /** Request parameter name for specifying response writer type */ private static final String WT_PARAM = "wt"; @@ -173,30 +159,42 @@ public class CollectionService { /** Key name for segment count information in Luke response */ private static final String SEGMENT_COUNT_KEY = "segmentCount"; - /** Key name for query result cache in MBeans cache responses */ - private static final String QUERY_RESULT_CACHE_KEY = "queryResultCache"; + /** Key name for the metrics map in Metrics API responses */ + private static final String METRICS_KEY = "metrics"; - /** Key name for document cache in MBeans cache responses */ - private static final String DOCUMENT_CACHE_KEY = "documentCache"; + // ======================================== + // Metrics API Constants + // ======================================== - /** Key name for filter cache in MBeans cache responses */ - private static final String FILTER_CACHE_KEY = "filterCache"; + /** URL path for Solr Metrics API endpoint (available in Solr 7.1+) */ + private static final String ADMIN_METRICS_PATH = "/admin/metrics"; - /** Key name for statistics section in MBeans responses */ - private static final String STATS_KEY = "stats"; + /** Metrics API parameter for metric group */ + private static final String GROUP_PARAM = "group"; - // ======================================== - // Constants for Handler Paths - // ======================================== + /** Metrics API parameter for metric prefix filter */ + private static final String PREFIX_PARAM = "prefix"; + + /** Metrics API group value for core-level metrics */ + private static final String CORE_GROUP = "core"; + + /** Metrics API prefix for cache metrics */ + private static final String CACHE_METRICS_PREFIX = "CACHE.searcher"; + + /** Cache name for query result cache */ + private static final String QUERY_RESULT_CACHE_KEY = "queryResultCache"; + + /** Cache name for document cache */ + private static final String DOCUMENT_CACHE_KEY = "documentCache"; - /** URL path for Solr select (query) handler */ - private static final String SELECT_HANDLER_PATH = "/select"; + /** Cache name for filter cache */ + private static final String FILTER_CACHE_KEY = "filterCache"; - /** URL path for Solr update handler */ - private static final String UPDATE_HANDLER_PATH = "/update"; + /** Metrics API prefix for query handler metrics */ + private static final String QUERY_HANDLER_METRICS_PREFIX = "QUERY./select"; - /** URL path for Solr MBeans admin endpoint */ - private static final String ADMIN_MBEANS_PATH = "/admin/mbeans"; + /** Metrics API prefix for update handler metrics */ + private static final String UPDATE_HANDLER_METRICS_PREFIX = "UPDATE./update"; // ======================================== // Constants for Statistics Field Names @@ -260,7 +258,7 @@ public class CollectionService { /** SolrJ client for communicating with Solr server */ private final SolrClient solrClient; - private final ObjectMapper objectMapper; + private final JsonMapper objectMapper; /** * Constructs a new CollectionService with the required dependencies. @@ -272,11 +270,11 @@ public class CollectionService { * @param solrClient * the SolrJ client instance for communicating with Solr * @param objectMapper - * the Jackson ObjectMapper for JSON serialization + * the Jackson JsonMapper for JSON serialization * @see SolrClient * @see SolrConfigurationProperties */ - public CollectionService(SolrClient solrClient, ObjectMapper objectMapper) { + public CollectionService(SolrClient solrClient, JsonMapper objectMapper) { this.solrClient = solrClient; this.objectMapper = objectMapper; } @@ -352,7 +350,8 @@ public List listCollections() { @SuppressWarnings("unchecked") List collections = (List) response.getResponse().get(COLLECTIONS_KEY); return collections != null ? collections : new ArrayList<>(); - } catch (SolrServerException | IOException _) { + } catch (SolrServerException | IOException e) { + log.warn("Failed to list collections: {}", e.getMessage()); return new ArrayList<>(); } } @@ -563,138 +562,97 @@ public QueryStats buildQueryStats(QueryResponse response) { * unavailable * @see CacheStats * @see CacheInfo - * @see #extractCacheStats(NamedList) - * @see #isCacheStatsEmpty(CacheStats) */ public CacheStats getCacheMetrics(String collection) { + String actualCollection = extractCollectionName(collection); + + if (!validateCollectionExists(actualCollection)) { + log.debug("Collection '{}' not found, skipping cache metrics", actualCollection); + return null; + } + try { - // Get MBeans for cache information ModifiableSolrParams params = new ModifiableSolrParams(); - params.set(STATS_PARAM, "true"); - params.set(CAT_PARAM, CACHE_CATEGORY); + params.set(GROUP_PARAM, CORE_GROUP); + params.set(PREFIX_PARAM, CACHE_METRICS_PREFIX); params.set(WT_PARAM, JSON_FORMAT); - // Extract actual collection name from shard name if needed - String actualCollection = extractCollectionName(collection); - - // Validate collection exists first - if (!validateCollectionExists(actualCollection)) { - return null; // Return null instead of empty object - } - - String path = "/" + actualCollection + ADMIN_MBEANS_PATH; - - GenericSolrRequest request = new GenericSolrRequest(SolrRequest.METHOD.GET, path, params); - + GenericSolrRequest request = new GenericSolrRequest(SolrRequest.METHOD.GET, ADMIN_METRICS_PATH, params); NamedList response = solrClient.request(request); - CacheStats stats = extractCacheStats(response); - // Return null if all cache stats are empty/null + CacheStats stats = extractCacheStats(response, actualCollection); if (isCacheStatsEmpty(stats)) { + log.debug("No cache metrics available for collection '{}'", actualCollection); return null; } + log.debug("Retrieved cache metrics for collection '{}'", actualCollection); return stats; - } catch (SolrServerException | IOException | RuntimeException _) { - // RuntimeException covers SolrException subclasses (e.g. RemoteSolrException) - // thrown when the /admin/mbeans endpoint is unavailable (removed in Solr 10). - return null; // Return null instead of empty object + } catch (SolrServerException | IOException | RuntimeException e) { + log.warn("Failed to retrieve cache metrics for collection '{}': {}", actualCollection, e.getMessage()); + return null; } } - /** - * Checks if cache statistics are empty or contain no meaningful data. - * - *

- * Used to determine whether cache metrics are worth returning to clients. Empty - * cache stats typically indicate that caches are not configured or not yet - * populated with data. - * - * @param stats - * the cache statistics to evaluate - * @return true if the stats are null or all cache types are null - */ private boolean isCacheStatsEmpty(CacheStats stats) { return stats == null || (stats.queryResultCache() == null && stats.documentCache() == null && stats.filterCache() == null); } - /** - * Extracts cache performance statistics from Solr MBeans response data. - * - *

- * Parses the raw MBeans response to extract structured cache performance - * metrics for all available cache types. Each cache type provides detailed - * statistics including hit ratios, eviction rates, and current utilization. - * - *

- * Parsed Cache Types: - * - *

    - *
  • queryResultCache - Complete query result caching - *
  • documentCache - Retrieved document data caching - *
  • filterCache - Filter query result caching - *
- * - *

- * For each cache type, the following metrics are extracted: - * - *

    - *
  • lookups, hits, hitratio - Performance effectiveness - *
  • inserts, evictions - Memory management patterns - *
  • size - Current utilization - *
- * - * @param mbeans - * the raw MBeans response from Solr admin endpoint - * @return CacheStats object containing parsed metrics for all cache types - * @see CacheStats - * @see CacheInfo - */ - private CacheStats extractCacheStats(NamedList mbeans) { - CacheInfo queryResultCacheInfo = null; - CacheInfo documentCacheInfo = null; - CacheInfo filterCacheInfo = null; + @SuppressWarnings("unchecked") + private CacheStats extractCacheStats(NamedList metricsResponse, String collection) { + NamedList metrics = (NamedList) metricsResponse.get(METRICS_KEY); + if (metrics == null) { + return null; + } - @SuppressWarnings("unchecked") - NamedList caches = (NamedList) mbeans.get(CACHE_CATEGORY); + NamedList coreMetrics = findCoreRegistry(metrics, collection); + if (coreMetrics == null) { + return null; + } - if (caches != null) { - // Query result cache - @SuppressWarnings("unchecked") - NamedList queryResultCache = (NamedList) caches.get(QUERY_RESULT_CACHE_KEY); - if (queryResultCache != null) { - @SuppressWarnings("unchecked") - NamedList stats = (NamedList) queryResultCache.get(STATS_KEY); - queryResultCacheInfo = new CacheInfo(getLong(stats, LOOKUPS_FIELD), getLong(stats, HITS_FIELD), - getFloat(stats, HITRATIO_FIELD), getLong(stats, INSERTS_FIELD), getLong(stats, EVICTIONS_FIELD), - getLong(stats, SIZE_FIELD)); - } + return new CacheStats(extractCacheInfo(coreMetrics, QUERY_RESULT_CACHE_KEY), + extractCacheInfo(coreMetrics, DOCUMENT_CACHE_KEY), extractCacheInfo(coreMetrics, FILTER_CACHE_KEY)); + } - // Document cache - @SuppressWarnings("unchecked") - NamedList documentCache = (NamedList) caches.get(DOCUMENT_CACHE_KEY); - if (documentCache != null) { - @SuppressWarnings("unchecked") - NamedList stats = (NamedList) documentCache.get(STATS_KEY); - documentCacheInfo = new CacheInfo(getLong(stats, LOOKUPS_FIELD), getLong(stats, HITS_FIELD), - getFloat(stats, HITRATIO_FIELD), getLong(stats, INSERTS_FIELD), getLong(stats, EVICTIONS_FIELD), - getLong(stats, SIZE_FIELD)); + @SuppressWarnings("unchecked") + private NamedList findCoreRegistry(NamedList metrics, String collection) { + for (int i = 0; i < metrics.size(); i++) { + String registryName = metrics.getName(i); + if (registryName != null && registryName.startsWith("solr.core.") && registryName.contains(collection)) { + Object value = metrics.getVal(i); + if (value instanceof NamedList) { + return (NamedList) value; + } } + } + return null; + } - // Filter cache - @SuppressWarnings("unchecked") - NamedList filterCache = (NamedList) caches.get(FILTER_CACHE_KEY); - if (filterCache != null) { - @SuppressWarnings("unchecked") - NamedList stats = (NamedList) filterCache.get(STATS_KEY); - filterCacheInfo = new CacheInfo(getLong(stats, LOOKUPS_FIELD), getLong(stats, HITS_FIELD), - getFloat(stats, HITRATIO_FIELD), getLong(stats, INSERTS_FIELD), getLong(stats, EVICTIONS_FIELD), - getLong(stats, SIZE_FIELD)); - } + @SuppressWarnings("unchecked") + private CacheInfo extractCacheInfo(NamedList coreMetrics, String cacheName) { + Object cacheData = coreMetrics.get(CACHE_METRICS_PREFIX + "." + cacheName); + if (cacheData instanceof NamedList) { + NamedList stats = (NamedList) cacheData; + return new CacheInfo(getLong(stats, LOOKUPS_FIELD), getLong(stats, HITS_FIELD), + getFloat(stats, HITRATIO_FIELD), getLong(stats, INSERTS_FIELD), getLong(stats, EVICTIONS_FIELD), + getLong(stats, SIZE_FIELD)); } + if (cacheData instanceof java.util.Map) { + java.util.Map stats = (java.util.Map) cacheData; + return new CacheInfo(numberToLong(stats.get(LOOKUPS_FIELD)), numberToLong(stats.get(HITS_FIELD)), + numberToFloat(stats.get(HITRATIO_FIELD)), numberToLong(stats.get(INSERTS_FIELD)), + numberToLong(stats.get(EVICTIONS_FIELD)), numberToLong(stats.get(SIZE_FIELD))); + } + return null; + } - return new CacheStats(queryResultCacheInfo, documentCacheInfo, filterCacheInfo); + private Long numberToLong(Object value) { + return value instanceof Number number ? number.longValue() : null; + } + + private Float numberToFloat(Object value) { + return value instanceof Number number ? number.floatValue() : 0.0f; } /** @@ -709,10 +667,10 @@ private CacheStats extractCacheStats(NamedList mbeans) { * Monitored Handlers: * *
    - *
  • Select Handler ({@value #SELECT_HANDLER_PATH}): - * Processes search and query requests - *
  • Update Handler ({@value #UPDATE_HANDLER_PATH}): - * Processes document indexing operations + *
  • Select Handler (/select): Processes search and query + * requests + *
  • Update Handler (/update): Processes document indexing + * operations *
* *

@@ -738,124 +696,71 @@ private CacheStats extractCacheStats(NamedList mbeans) { * null if unavailable * @see HandlerStats * @see HandlerInfo - * @see #extractHandlerStats(NamedList) - * @see #isHandlerStatsEmpty(HandlerStats) */ public HandlerStats getHandlerMetrics(String collection) { + String actualCollection = extractCollectionName(collection); + + if (!validateCollectionExists(actualCollection)) { + log.debug("Collection '{}' not found, skipping handler metrics", actualCollection); + return null; + } + try { ModifiableSolrParams params = new ModifiableSolrParams(); - params.set(STATS_PARAM, "true"); - params.set(CAT_PARAM, HANDLER_CATEGORIES); + params.set(GROUP_PARAM, CORE_GROUP); + params.set(PREFIX_PARAM, QUERY_HANDLER_METRICS_PREFIX + "," + UPDATE_HANDLER_METRICS_PREFIX); params.set(WT_PARAM, JSON_FORMAT); - // Extract actual collection name from shard name if needed - String actualCollection = extractCollectionName(collection); - - // Validate collection exists first - if (!validateCollectionExists(actualCollection)) { - return null; // Return null instead of empty object - } - - String path = "/" + actualCollection + ADMIN_MBEANS_PATH; - - GenericSolrRequest request = new GenericSolrRequest(SolrRequest.METHOD.GET, path, params); - + GenericSolrRequest request = new GenericSolrRequest(SolrRequest.METHOD.GET, ADMIN_METRICS_PATH, params); NamedList response = solrClient.request(request); - HandlerStats stats = extractHandlerStats(response); - // Return null if all handler stats are empty/null + HandlerStats stats = extractHandlerStats(response, actualCollection); if (isHandlerStatsEmpty(stats)) { + log.debug("No handler metrics available for collection '{}'", actualCollection); return null; } + log.debug("Retrieved handler metrics for collection '{}'", actualCollection); return stats; - } catch (SolrServerException | IOException | RuntimeException _) { - // RuntimeException covers SolrException subclasses (e.g. RemoteSolrException) - // thrown when the /admin/mbeans endpoint is unavailable (removed in Solr 10). - return null; // Return null instead of empty object + } catch (SolrServerException | IOException | RuntimeException e) { + log.warn("Failed to retrieve handler metrics for collection '{}': {}", actualCollection, e.getMessage()); + return null; } } - /** - * Checks if handler statistics are empty or contain no meaningful data. - * - *

- * Used to determine whether handler metrics are worth returning to clients. - * Empty handler stats typically indicate that handlers haven't processed any - * requests yet or statistics collection is not enabled. - * - * @param stats - * the handler statistics to evaluate - * @return true if the stats are null or all handler types are null - */ private boolean isHandlerStatsEmpty(HandlerStats stats) { return stats == null || (stats.selectHandler() == null && stats.updateHandler() == null); } - /** - * Extracts request handler performance statistics from Solr MBeans response - * data. - * - *

- * Parses the raw MBeans response to extract structured handler performance - * metrics for query and update operations. Each handler provides detailed - * statistics about request processing including volume, errors, and timing. - * - *

- * Parsed Handler Types: - * - *

    - *
  • /select - Search and query request handler - *
  • /update - Document indexing request handler - *
- * - *

- * For each handler type, the following metrics are extracted: - * - *

    - *
  • requests, errors, timeouts - Volume and reliability - *
  • totalTime, avgTimePerRequest - Performance characteristics - *
  • avgRequestsPerSecond - Throughput capacity - *
- * - * @param mbeans - * the raw MBeans response from Solr admin endpoint - * @return HandlerStats object containing parsed metrics for all handler types - * @see HandlerStats - * @see HandlerInfo - */ - private HandlerStats extractHandlerStats(NamedList mbeans) { - HandlerInfo selectHandlerInfo = null; - HandlerInfo updateHandlerInfo = null; + @SuppressWarnings("unchecked") + private HandlerStats extractHandlerStats(NamedList metricsResponse, String collection) { + NamedList metrics = (NamedList) metricsResponse.get(METRICS_KEY); + if (metrics == null) { + return null; + } - @SuppressWarnings("unchecked") - NamedList queryHandlers = (NamedList) mbeans.get(QUERY_HANDLER_CATEGORY); + NamedList coreMetrics = findCoreRegistry(metrics, collection); + if (coreMetrics == null) { + return null; + } - if (queryHandlers != null) { - // Select handler - @SuppressWarnings("unchecked") - NamedList selectHandler = (NamedList) queryHandlers.get(SELECT_HANDLER_PATH); - if (selectHandler != null) { - @SuppressWarnings("unchecked") - NamedList stats = (NamedList) selectHandler.get(STATS_KEY); - selectHandlerInfo = new HandlerInfo(getLong(stats, REQUESTS_FIELD), getLong(stats, ERRORS_FIELD), - getLong(stats, TIMEOUTS_FIELD), getLong(stats, TOTAL_TIME_FIELD), - getFloat(stats, AVG_TIME_PER_REQUEST_FIELD), getFloat(stats, AVG_REQUESTS_PER_SECOND_FIELD)); - } + return new HandlerStats(extractHandlerInfo(coreMetrics, QUERY_HANDLER_METRICS_PREFIX), + extractHandlerInfo(coreMetrics, UPDATE_HANDLER_METRICS_PREFIX)); + } - // Update handler - @SuppressWarnings("unchecked") - NamedList updateHandler = (NamedList) queryHandlers.get(UPDATE_HANDLER_PATH); - if (updateHandler != null) { - @SuppressWarnings("unchecked") - NamedList stats = (NamedList) updateHandler.get(STATS_KEY); - updateHandlerInfo = new HandlerInfo(getLong(stats, REQUESTS_FIELD), getLong(stats, ERRORS_FIELD), - getLong(stats, TIMEOUTS_FIELD), getLong(stats, TOTAL_TIME_FIELD), - getFloat(stats, AVG_TIME_PER_REQUEST_FIELD), getFloat(stats, AVG_REQUESTS_PER_SECOND_FIELD)); - } + private HandlerInfo extractHandlerInfo(NamedList coreMetrics, String handlerPrefix) { + Long requests = getLong(coreMetrics, handlerPrefix + "." + REQUESTS_FIELD); + Long errors = getLong(coreMetrics, handlerPrefix + "." + ERRORS_FIELD); + Long timeouts = getLong(coreMetrics, handlerPrefix + "." + TIMEOUTS_FIELD); + Long totalTime = getLong(coreMetrics, handlerPrefix + "." + TOTAL_TIME_FIELD); + Float avgTimePerRequest = getFloat(coreMetrics, handlerPrefix + "." + AVG_TIME_PER_REQUEST_FIELD); + Float avgRequestsPerSecond = getFloat(coreMetrics, handlerPrefix + "." + AVG_REQUESTS_PER_SECOND_FIELD); + + if (requests == null && errors == null && timeouts == null) { + return null; } - return new HandlerStats(selectHandlerInfo, updateHandlerInfo); + return new HandlerInfo(requests, errors, timeouts, totalTime, avgTimePerRequest, avgRequestsPerSecond); } /** @@ -956,7 +861,8 @@ private boolean validateCollectionExists(String collection) { // shard // names) return collections.stream().anyMatch(c -> c.startsWith(collection + SHARD_SUFFIX)); - } catch (Exception _) { + } catch (Exception e) { + log.warn("Failed to validate collection '{}': {}", collection, e.getMessage()); return false; } } @@ -1023,12 +929,13 @@ public SolrHealthStatus checkHealth(@McpToolParam(description = "Solr collection statsResponse.getResults().getNumFound(), new Date(), null, null, null); } catch (Exception e) { + log.warn("Health check failed for collection '{}': {}", collection, e.getMessage()); return new SolrHealthStatus(false, e.getMessage(), null, null, new Date(), null, null, null); } } /** - * Creates a new Solr collection (SolrCloud) or core (standalone Solr). + * Creates a new Solr collection in a SolrCloud cluster. * *

* Automatically detects the deployment type and uses the appropriate API: @@ -1068,8 +975,7 @@ public CollectionCreationResult createCollection( @McpToolParam(description = "Number of shards (SolrCloud only). Defaults to 1.", required = false) Integer numShards, @McpToolParam(description = "Replication factor (SolrCloud only). Defaults to 1.", required = false) Integer replicationFactor) throws SolrServerException, IOException { - - if (name == null || name.isBlank()) { + if (name.isBlank()) { throw new IllegalArgumentException(BLANK_COLLECTION_NAME_ERROR); } diff --git a/src/main/java/org/apache/solr/mcp/server/collection/CollectionUtils.java b/src/main/java/org/apache/solr/mcp/server/collection/CollectionUtils.java index 5c02fed..ab1d0da 100644 --- a/src/main/java/org/apache/solr/mcp/server/collection/CollectionUtils.java +++ b/src/main/java/org/apache/solr/mcp/server/collection/CollectionUtils.java @@ -64,6 +64,7 @@ public class CollectionUtils { private CollectionUtils() { + // Utility class - prevent instantiation } /** diff --git a/src/main/java/org/apache/solr/mcp/server/config/InstallOpenTelemetryAppender.java b/src/main/java/org/apache/solr/mcp/server/config/InstallOpenTelemetryAppender.java new file mode 100644 index 0000000..07d1cc4 --- /dev/null +++ b/src/main/java/org/apache/solr/mcp/server/config/InstallOpenTelemetryAppender.java @@ -0,0 +1,23 @@ +package org.apache.solr.mcp.server.config; + +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.instrumentation.logback.appender.v1_0.OpenTelemetryAppender; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.context.annotation.Profile; +import org.springframework.stereotype.Component; + +@Component +@Profile("http") +class InstallOpenTelemetryAppender implements InitializingBean { + + private final OpenTelemetry openTelemetry; + + InstallOpenTelemetryAppender(OpenTelemetry openTelemetry) { + this.openTelemetry = openTelemetry; + } + + @Override + public void afterPropertiesSet() { + OpenTelemetryAppender.install(this.openTelemetry); + } +} diff --git a/src/main/java/org/apache/solr/mcp/server/config/JsonResponseParser.java b/src/main/java/org/apache/solr/mcp/server/config/JsonResponseParser.java index a78e3ab..788fbf5 100644 --- a/src/main/java/org/apache/solr/mcp/server/config/JsonResponseParser.java +++ b/src/main/java/org/apache/solr/mcp/server/config/JsonResponseParser.java @@ -16,9 +16,6 @@ */ package org.apache.solr.mcp.server.config; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; @@ -30,6 +27,9 @@ import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; import org.springframework.http.MediaType; +import tools.jackson.core.JacksonException; +import tools.jackson.databind.JsonNode; +import tools.jackson.databind.json.JsonMapper; /** * SolrJ {@link ResponseParser} that requests JSON wire format ({@code wt=json}) @@ -68,9 +68,9 @@ */ class JsonResponseParser extends ResponseParser { - private final ObjectMapper mapper; + private final JsonMapper mapper; - JsonResponseParser(ObjectMapper mapper) { + JsonResponseParser(JsonMapper mapper) { this.mapper = mapper; } @@ -88,14 +88,14 @@ public Collection getContentTypes() { public NamedList processResponse(InputStream body, String encoding) { try { return toNamedList(mapper.readTree(body)); - } catch (IOException e) { + } catch (JacksonException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Failed to parse Solr JSON response", e); } } private SimpleOrderedMap toNamedList(JsonNode objectNode) { SimpleOrderedMap result = new SimpleOrderedMap<>(); - objectNode.fields().forEachRemaining(entry -> result.add(entry.getKey(), convertValue(entry.getValue()))); + objectNode.properties().forEach(entry -> result.add(entry.getKey(), convertValue(entry.getValue()))); return result; } @@ -187,7 +187,7 @@ private SolrDocumentList toSolrDocumentList(JsonNode node) { private SolrDocument toSolrDocument(JsonNode node) { SolrDocument doc = new SolrDocument(); - node.fields().forEachRemaining(entry -> { + node.properties().forEach(entry -> { JsonNode val = entry.getValue(); if (val.isArray()) { // Multi-valued field — always a plain list, never a flat NamedList diff --git a/src/main/java/org/apache/solr/mcp/server/config/SolrConfig.java b/src/main/java/org/apache/solr/mcp/server/config/SolrConfig.java index 0fc720b..a280656 100644 --- a/src/main/java/org/apache/solr/mcp/server/config/SolrConfig.java +++ b/src/main/java/org/apache/solr/mcp/server/config/SolrConfig.java @@ -16,7 +16,6 @@ */ package org.apache.solr.mcp.server.config; -import com.fasterxml.jackson.databind.ObjectMapper; import java.util.concurrent.TimeUnit; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.impl.HttpJdkSolrClient; @@ -137,9 +136,11 @@ public class SolrConfig { * Client Type: * *

- * Creates an {@code HttpSolrClient} configured for standard HTTP-based - * communication with SolrCloud servers. This client type is suitable for - * SolrCloud deployments when used with load balancers. + * Creates an {@code HttpJdkSolrClient} configured for standard HTTP-based + * communication with SolrCloud servers using the JDK's built-in HTTP client. + * This avoids Jetty version conflicts between SolrJ and Spring Boot. This + * client type is suitable for SolrCloud deployments when used with load + * balancers. * *

* Error Handling: @@ -154,7 +155,7 @@ public class SolrConfig { * *

    *
  • Timeout values are optimized for production workloads - *
  • Connection pooling is handled by the HttpSolrClient internally + *
  • Connection pooling is handled by the HttpJdkSolrClient internally *
  • Client is thread-safe and suitable for concurrent operations *
* @@ -166,12 +167,7 @@ public class SolrConfig { * @see SolrConfigurationProperties#url() */ @Bean - JsonResponseParser jsonResponseParser(ObjectMapper objectMapper) { - return new JsonResponseParser(objectMapper); - } - - @Bean - SolrClient solrClient(SolrConfigurationProperties properties, JsonResponseParser jsonResponseParser) { + SolrClient solrClient(SolrConfigurationProperties properties) { String url = properties.url(); // Ensure URL is properly formatted for Solr @@ -189,9 +185,9 @@ SolrClient solrClient(SolrConfigurationProperties properties, JsonResponseParser } } - // Use with explicit base URL; JSON wire format replaces the JavaBin default + // Use HttpJdkSolrClient which uses the JDK's built-in HTTP client + // This avoids Jetty version conflicts between SolrJ and Spring Boot return new HttpJdkSolrClient.Builder(url).withConnectionTimeout(CONNECTION_TIMEOUT_MS, TimeUnit.MILLISECONDS) - .withIdleTimeout(SOCKET_TIMEOUT_MS, TimeUnit.MILLISECONDS).withResponseParser(jsonResponseParser) - .build(); + .withIdleTimeout(SOCKET_TIMEOUT_MS, TimeUnit.MILLISECONDS).build(); } } diff --git a/src/main/java/org/apache/solr/mcp/server/indexing/IndexingService.java b/src/main/java/org/apache/solr/mcp/server/indexing/IndexingService.java index e52ca7b..39054a1 100644 --- a/src/main/java/org/apache/solr/mcp/server/indexing/IndexingService.java +++ b/src/main/java/org/apache/solr/mcp/server/indexing/IndexingService.java @@ -24,6 +24,8 @@ import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.mcp.server.indexing.documentcreator.IndexingDocumentCreator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springaicommunity.mcp.annotation.McpTool; import org.springaicommunity.mcp.annotation.McpToolParam; import org.springframework.security.access.prepost.PreAuthorize; @@ -107,6 +109,8 @@ @Observed public class IndexingService { + private static final Logger log = LoggerFactory.getLogger(IndexingService.class); + private static final int DEFAULT_BATCH_SIZE = 1000; /** SolrJ client for communicating with Solr server */ @@ -433,15 +437,15 @@ public int indexDocuments(String collection, List documents) solrClient.add(collection, batch); successCount += batch.size(); } catch (SolrServerException | IOException | RuntimeException e) { + log.warn("Batch indexing failed for collection '{}', falling back to individual documents: {}", + collection, e.getMessage()); // Try indexing documents individually to identify problematic ones for (SolrInputDocument doc : batch) { try { solrClient.add(collection, doc); successCount++; - } catch (SolrServerException | IOException | RuntimeException _) { - // Document failed to index - this is expected behavior for problematic - // documents - // We continue processing the rest of the batch + } catch (SolrServerException | IOException | RuntimeException docError) { + log.warn("Failed to index document in collection '{}': {}", collection, docError.getMessage()); } } } diff --git a/src/main/java/org/apache/solr/mcp/server/indexing/documentcreator/JsonDocumentCreator.java b/src/main/java/org/apache/solr/mcp/server/indexing/documentcreator/JsonDocumentCreator.java index 1a8a149..a387d92 100644 --- a/src/main/java/org/apache/solr/mcp/server/indexing/documentcreator/JsonDocumentCreator.java +++ b/src/main/java/org/apache/solr/mcp/server/indexing/documentcreator/JsonDocumentCreator.java @@ -16,9 +16,6 @@ */ package org.apache.solr.mcp.server.indexing.documentcreator; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; @@ -26,6 +23,9 @@ import java.util.Set; import org.apache.solr.common.SolrInputDocument; import org.springframework.stereotype.Component; +import tools.jackson.core.JacksonException; +import tools.jackson.databind.JsonNode; +import tools.jackson.databind.json.JsonMapper; /** * Utility class for processing JSON documents and converting them to @@ -111,7 +111,7 @@ public List create(String json) throws DocumentProcessingExce List documents = new ArrayList<>(); try { - ObjectMapper mapper = new ObjectMapper(); + JsonMapper mapper = JsonMapper.builder().build(); JsonNode rootNode = mapper.readTree(json); if (rootNode.isArray()) { @@ -123,7 +123,7 @@ public List create(String json) throws DocumentProcessingExce documents.add(doc); } } - } catch (IOException e) { + } catch (JacksonException e) { throw new DocumentProcessingException("Failed to parse JSON document", e); } @@ -250,6 +250,6 @@ private Object convertJsonValue(JsonNode value) { return value.asDouble(); if (value.isInt()) return value.asInt(); - return value.asText(); + return value.asString(); } } diff --git a/src/main/java/org/apache/solr/mcp/server/metadata/SchemaService.java b/src/main/java/org/apache/solr/mcp/server/metadata/SchemaService.java index f795587..e4ef678 100644 --- a/src/main/java/org/apache/solr/mcp/server/metadata/SchemaService.java +++ b/src/main/java/org/apache/solr/mcp/server/metadata/SchemaService.java @@ -18,14 +18,17 @@ import static org.apache.solr.mcp.server.util.JsonUtils.toJson; -import com.fasterxml.jackson.databind.ObjectMapper; + import io.micrometer.observation.annotation.Observed; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.request.schema.SchemaRequest; import org.apache.solr.client.solrj.response.schema.SchemaRepresentation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springaicommunity.mcp.annotation.McpResource; import org.springaicommunity.mcp.annotation.McpTool; import org.springframework.stereotype.Service; +import tools.jackson.databind.ObjectMapper; /** * Spring Service providing schema introspection and management capabilities for @@ -123,6 +126,8 @@ @Observed public class SchemaService { + private static final Logger log = LoggerFactory.getLogger(SchemaService.class); + /** SolrJ client for communicating with Solr server */ private final SolrClient solrClient; @@ -166,6 +171,7 @@ public String getSchemaResource(String collection) { try { return toJson(objectMapper, getSchema(collection)); } catch (Exception e) { + log.warn("Failed to retrieve schema for collection '{}': {}", collection, e.getMessage()); return "{\"error\": \"" + e.getMessage() + "\"}"; } } diff --git a/src/main/java/org/apache/solr/mcp/server/security/HttpSecurityConfiguration.java b/src/main/java/org/apache/solr/mcp/server/security/HttpSecurityConfiguration.java index ac0c3ae..ab8cde3 100644 --- a/src/main/java/org/apache/solr/mcp/server/security/HttpSecurityConfiguration.java +++ b/src/main/java/org/apache/solr/mcp/server/security/HttpSecurityConfiguration.java @@ -51,8 +51,10 @@ SecurityFilterChain securityFilterChain(HttpSecurity http) throws Exception { auth.anyRequest().authenticated(); }) // Configure OAuth2 on the MCP server - .with(McpServerOAuth2Configurer.mcpServerOAuth2(), - mcpAuthorization -> mcpAuthorization.authorizationServer(issuerUrl)) + .with(McpServerOAuth2Configurer.mcpServerOAuth2(), (mcpAuthorization) -> { + // REQUIRED: the issuerURI + mcpAuthorization.authorizationServer(issuerUrl); + }) // MCP inspector .cors(cors -> cors.configurationSource(corsConfigurationSource())).csrf(CsrfConfigurer::disable) .build(); diff --git a/src/main/java/org/apache/solr/mcp/server/util/JsonUtils.java b/src/main/java/org/apache/solr/mcp/server/util/JsonUtils.java index 6ecc3bc..e8c35d9 100644 --- a/src/main/java/org/apache/solr/mcp/server/util/JsonUtils.java +++ b/src/main/java/org/apache/solr/mcp/server/util/JsonUtils.java @@ -16,8 +16,10 @@ */ package org.apache.solr.mcp.server.util; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import tools.jackson.core.JacksonException; +import tools.jackson.databind.ObjectMapper; /** * Utility class for JSON serialization operations. @@ -31,6 +33,8 @@ */ public final class JsonUtils { + private static final Logger log = LoggerFactory.getLogger(JsonUtils.class); + private JsonUtils() { // Utility class - prevent instantiation } @@ -51,7 +55,8 @@ private JsonUtils() { public static String toJson(ObjectMapper objectMapper, Object obj) { try { return objectMapper.writeValueAsString(obj); - } catch (JsonProcessingException e) { + } catch (JacksonException e) { + log.warn("Failed to serialize response: {}", e.getMessage()); return "{\"error\": \"Failed to serialize response\"}"; } } diff --git a/src/main/resources/application-http.properties b/src/main/resources/application-http.properties index 95daa2e..356d28c 100644 --- a/src/main/resources/application-http.properties +++ b/src/main/resources/application-http.properties @@ -2,9 +2,7 @@ spring.main.web-application-type=servlet spring.ai.mcp.server.type=sync spring.ai.mcp.server.protocol=stateless spring.ai.mcp.server.stdio=false -# Docker Compose integration: automatically start Solr and other services on bootRun. -# Spring Boot detects compose.yaml and starts declared services before the application context. -spring.docker.compose.enabled=true + # OAuth2 Security Configuration # Configure the issuer URI for your OAuth2 authorization server # For Auth0: https:///.well-known/openid-configuration @@ -13,12 +11,42 @@ spring.docker.compose.enabled=true spring.security.oauth2.resourceserver.jwt.issuer-uri=${OAUTH2_ISSUER_URI:https://your-auth0-domain.auth0.com/} # Security toggle - set to true to enable OAuth2 authentication, false to bypass http.security.enabled=${HTTP_SECURITY_ENABLED:false} -# observability + +# ============================================================================= +# OpenTelemetry Configuration (HTTP mode only) +# ============================================================================= +# Provides distributed tracing, metrics, and log export via OTLP protocol. +# See Observability.md for detailed setup instructions. +# +# LOCAL DEVELOPMENT: +# Run `docker compose up -d lgtm` - Spring Boot Docker Compose will +# auto-detect the grafana/otel-lgtm container and configure OTLP endpoints. +# +# PRODUCTION: +# Set environment variables to override endpoints: +# - OTEL_METRICS_URL=https://collector.example.com/v1/metrics +# - OTEL_TRACES_URL=https://collector.example.com/v1/traces +# - OTEL_LOGS_URL=https://collector.example.com/v1/logs + +# Application name for telemetry identification +spring.application.name=solr-mcp-server + management.endpoints.web.exposure.include=health,sbom,metrics,info,loggers,prometheus -# Enable @Observed annotation support for custom spans management.observations.annotations.enabled=true + # Tracing Configuration # Set to 1.0 for 100% sampling in development, lower in production (e.g., 0.1) management.tracing.sampling.probability=${OTEL_SAMPLING_PROBABILITY:1.0} -otel.exporter.otlp.endpoint=${OTEL_TRACES_URL:http://localhost:4317} -otel.exporter.otlp.protocol=grpc +# OTLP endpoints - auto-configured by Spring Boot Docker Compose when lgtm is running +# Spring Boot will detect the LGTM container and automatically configure these URLs +# Override with environment variables for production deployments (e.g., OTEL_TRACES_URL) +# OTLP Metrics Export +# Endpoint for metrics export (Prometheus-compatible via OTLP) +management.otlp.metrics.export.url=${OTEL_METRICS_URL:http://localhost:4318/v1/metrics} +# OTLP Tracing Export +# Endpoint for distributed trace export +management.opentelemetry.tracing.export.otlp.endpoint=${OTEL_TRACES_URL:http://localhost:4318/v1/traces} +# OTLP Logging Export +# Endpoint for log export (requires logback-spring.xml configuration) +management.opentelemetry.logging.export.otlp.endpoint=${OTEL_LOGS_URL:http://localhost:4318/v1/logs} + diff --git a/src/main/resources/application-stdio.properties b/src/main/resources/application-stdio.properties index b8864df..f72e9e8 100644 --- a/src/main/resources/application-stdio.properties +++ b/src/main/resources/application-stdio.properties @@ -7,6 +7,3 @@ spring.ai.mcp.server.stdio=true spring.autoconfigure.exclude=\ org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration,\ org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration -# Docker Compose integration is only useful in HTTP mode for local development. -# Disable it in STDIO mode to avoid startup delays and unexpected container management. -spring.docker.compose.enabled=false diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 1592a77..671b4ec 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -5,5 +5,5 @@ spring.ai.mcp.server.name=${spring.application.name} spring.ai.mcp.server.version=1.0.0 # Solr configuration solr.url=${SOLR_URL:http://localhost:8983/solr/} -# Enable virtual threads for improved concurrency +# Virtual threads spring.threads.virtual.enabled=true diff --git a/src/test/java/org/apache/solr/mcp/server/MainTest.java b/src/test/java/org/apache/solr/mcp/server/MainTest.java index 1b9ae42..f3a23c4 100644 --- a/src/test/java/org/apache/solr/mcp/server/MainTest.java +++ b/src/test/java/org/apache/solr/mcp/server/MainTest.java @@ -22,7 +22,6 @@ import org.apache.solr.mcp.server.search.SearchService; import org.junit.jupiter.api.Test; import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.bean.override.mockito.MockitoBean; /** @@ -32,7 +31,6 @@ * dependencies. */ @SpringBootTest -@ActiveProfiles("test") class MainTest { @MockitoBean diff --git a/src/test/java/org/apache/solr/mcp/server/TestcontainersConfiguration.java b/src/test/java/org/apache/solr/mcp/server/TestcontainersConfiguration.java index 0a0f91d..d4b7b58 100644 --- a/src/test/java/org/apache/solr/mcp/server/TestcontainersConfiguration.java +++ b/src/test/java/org/apache/solr/mcp/server/TestcontainersConfiguration.java @@ -27,9 +27,11 @@ public class TestcontainersConfiguration { private static final int SOLR_PORT = 8983; + static final String DEFAULT_SOLR_IMAGE = "solr:9.9-slim"; + @Bean SolrContainer solr() { - String solrImage = System.getProperty("solr.test.image"); + String solrImage = System.getProperty("solr.test.image", DEFAULT_SOLR_IMAGE); return new SolrContainer(DockerImageName.parse(solrImage)); } diff --git a/src/test/java/org/apache/solr/mcp/server/collection/CollectionServiceIntegrationTest.java b/src/test/java/org/apache/solr/mcp/server/collection/CollectionServiceIntegrationTest.java index c520bd0..cfa3127 100644 --- a/src/test/java/org/apache/solr/mcp/server/collection/CollectionServiceIntegrationTest.java +++ b/src/test/java/org/apache/solr/mcp/server/collection/CollectionServiceIntegrationTest.java @@ -125,23 +125,28 @@ void testGetCollectionStats() throws Exception { long timestampTime = metrics.timestamp().getTime(); assertTrue(currentTime - timestampTime < 10000, "Timestamp should be recent (within 10 seconds)"); - // Verify optional stats (cache and handler stats may be null, which is - // acceptable) - if (metrics.cacheStats() != null) { - CacheStats cacheStats = metrics.cacheStats(); - // Verify at least one cache type exists if cache stats are present - assertTrue( - cacheStats.queryResultCache() != null || cacheStats.documentCache() != null - || cacheStats.filterCache() != null, - "At least one cache type should be present if cache stats exist"); - } - - if (metrics.handlerStats() != null) { - HandlerStats handlerStats = metrics.handlerStats(); - // Verify at least one handler type exists if handler stats are present - assertTrue(handlerStats.selectHandler() != null || handlerStats.updateHandler() != null, - "At least one handler type should be present if handler stats exist"); - } + // Cache stats via Metrics API + assertNotNull(metrics.cacheStats(), "Cache stats should not be null"); + CacheStats cacheStats = metrics.cacheStats(); + assertNotNull(cacheStats.queryResultCache(), "Query result cache should be present"); + assertNotNull(cacheStats.documentCache(), "Document cache should be present"); + assertNotNull(cacheStats.filterCache(), "Filter cache should be present"); + + CacheInfo qrc = cacheStats.queryResultCache(); + assertNotNull(qrc.lookups(), "Query result cache lookups should not be null"); + assertTrue(qrc.lookups() >= 0, "Query result cache lookups should be non-negative"); + assertNotNull(qrc.size(), "Query result cache size should not be null"); + assertTrue(qrc.size() >= 0, "Query result cache size should be non-negative"); + + // Handler stats via Metrics API + assertNotNull(metrics.handlerStats(), "Handler stats should not be null"); + HandlerStats handlerStats = metrics.handlerStats(); + assertNotNull(handlerStats.selectHandler(), "Select handler should be present"); + assertNotNull(handlerStats.updateHandler(), "Update handler should be present"); + + HandlerInfo selectHandler = handlerStats.selectHandler(); + assertNotNull(selectHandler.requests(), "Select handler requests should not be null"); + assertTrue(selectHandler.requests() >= 0, "Select handler requests should be non-negative"); } @Test diff --git a/src/test/java/org/apache/solr/mcp/server/collection/CollectionServiceTest.java b/src/test/java/org/apache/solr/mcp/server/collection/CollectionServiceTest.java index 4f72424..f250978 100644 --- a/src/test/java/org/apache/solr/mcp/server/collection/CollectionServiceTest.java +++ b/src/test/java/org/apache/solr/mcp/server/collection/CollectionServiceTest.java @@ -19,9 +19,9 @@ import static org.junit.jupiter.api.Assertions.*; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.isNull; import static org.mockito.Mockito.*; -import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException; import java.lang.reflect.Method; import java.util.Arrays; @@ -40,6 +40,7 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import tools.jackson.databind.json.JsonMapper; @ExtendWith(MockitoExtension.class) class CollectionServiceTest { @@ -58,7 +59,7 @@ class CollectionServiceTest { private CollectionService collectionService; - private final ObjectMapper objectMapper = new ObjectMapper(); + private final JsonMapper objectMapper = JsonMapper.builder().build(); @BeforeEach void setUp() { @@ -71,84 +72,41 @@ void constructor_ShouldInitializeWithSolrClient() { assertNotNull(collectionService); } - @Test - void listCollections_WhenExceptionOccurs_ShouldReturnEmptyList() throws Exception { - // Given - mock throws exception - when(solrClient.request(any(), any())).thenThrow(new SolrServerException("Connection error")); - - // When - List result = collectionService.listCollections(); - - // Then - assertNotNull(result); - assertTrue(result.isEmpty()); - } - // Collection name extraction tests @Test void extractCollectionName_WithShardName_ShouldExtractCollectionName() { - // Given - String shardName = "films_shard1_replica_n1"; - - // When - String result = collectionService.extractCollectionName(shardName); - - // Then - assertEquals("films", result); + assertEquals("films", collectionService.extractCollectionName("films_shard1_replica_n1")); } @Test void extractCollectionName_WithMultipleShards_ShouldExtractCorrectly() { - // Given & When & Then assertEquals("products", collectionService.extractCollectionName("products_shard2_replica_n3")); assertEquals("users", collectionService.extractCollectionName("users_shard5_replica_n10")); } @Test void extractCollectionName_WithSimpleCollectionName_ShouldReturnUnchanged() { - // Given - String simpleName = "simple_collection"; - - // When - String result = collectionService.extractCollectionName(simpleName); - - // Then - assertEquals("simple_collection", result); + assertEquals("simple_collection", collectionService.extractCollectionName("simple_collection")); } @Test void extractCollectionName_WithNullInput_ShouldReturnNull() { - // When - String result = collectionService.extractCollectionName(null); - - // Then - assertNull(result); + assertNull(collectionService.extractCollectionName(null)); } @Test void extractCollectionName_WithEmptyString_ShouldReturnEmptyString() { - // When - String result = collectionService.extractCollectionName(""); - - // Then - assertEquals("", result); + assertEquals("", collectionService.extractCollectionName("")); } @Test void extractCollectionName_WithCollectionNameContainingUnderscore_ShouldOnlyExtractBeforeShard() { - // Given - collection name itself contains underscore - String complexName = "my_complex_collection_shard1_replica_n1"; - - // When - String result = collectionService.extractCollectionName(complexName); - - // Then - assertEquals("my_complex_collection", result); + assertEquals("my_complex_collection", + collectionService.extractCollectionName("my_complex_collection_shard1_replica_n1")); } @Test void extractCollectionName_EdgeCases_ShouldHandleCorrectly() { - // Test various edge cases assertEquals("a", collectionService.extractCollectionName("a_shard1")); assertEquals("collection", collectionService.extractCollectionName("collection_shard")); assertEquals("test_name", collectionService.extractCollectionName("test_name")); @@ -157,32 +115,19 @@ void extractCollectionName_EdgeCases_ShouldHandleCorrectly() { @Test void extractCollectionName_WithShardInMiddleOfName_ShouldExtractCorrectly() { - // Given - "shard" appears in collection name but not as suffix pattern - String name = "resharding_tasks"; - - // When - String result = collectionService.extractCollectionName(name); - - // Then - assertEquals("resharding_tasks", result, "Should not extract when '_shard' is not followed by number"); + assertEquals("resharding_tasks", collectionService.extractCollectionName("resharding_tasks"), + "Should not extract when '_shard' is not followed by number"); } @Test void extractCollectionName_WithMultipleOccurrencesOfShard_ShouldUseFirst() { - // Given - String name = "data_shard1_shard2_replica_n1"; - - // When - String result = collectionService.extractCollectionName(name); - - // Then - assertEquals("data", result, "Should use first occurrence of '_shard'"); + assertEquals("data", collectionService.extractCollectionName("data_shard1_shard2_replica_n1"), + "Should use first occurrence of '_shard'"); } // Health check tests @Test void checkHealth_WithHealthyCollection_ShouldReturnHealthyStatus() throws Exception { - // Given SolrDocumentList docList = new SolrDocumentList(); docList.setNumFound(100); @@ -191,10 +136,8 @@ void checkHealth_WithHealthyCollection_ShouldReturnHealthyStatus() throws Except when(solrClient.query(eq("test_collection"), any())).thenReturn(queryResponse); when(queryResponse.getResults()).thenReturn(docList); - // When SolrHealthStatus result = collectionService.checkHealth("test_collection"); - // Then assertNotNull(result); assertTrue(result.isHealthy()); assertNull(result.errorMessage()); @@ -204,13 +147,10 @@ void checkHealth_WithHealthyCollection_ShouldReturnHealthyStatus() throws Except @Test void checkHealth_WithUnhealthyCollection_ShouldReturnUnhealthyStatus() throws Exception { - // Given when(solrClient.ping("unhealthy_collection")).thenThrow(new SolrServerException("Connection failed")); - // When SolrHealthStatus result = collectionService.checkHealth("unhealthy_collection"); - // Then assertNotNull(result); assertFalse(result.isHealthy()); assertNotNull(result.errorMessage()); @@ -221,23 +161,18 @@ void checkHealth_WithUnhealthyCollection_ShouldReturnUnhealthyStatus() throws Ex @Test void checkHealth_WhenPingSucceedsButQueryFails_ShouldReturnUnhealthyStatus() throws Exception { - // Given when(solrClient.ping("test_collection")).thenReturn(pingResponse); when(solrClient.query(eq("test_collection"), any())).thenThrow(new IOException("Query failed")); - // When SolrHealthStatus result = collectionService.checkHealth("test_collection"); - // Then assertNotNull(result); assertFalse(result.isHealthy()); - assertNotNull(result.errorMessage()); assertTrue(result.errorMessage().contains("Query failed")); } @Test void checkHealth_WithEmptyCollection_ShouldReturnHealthyWithZeroDocuments() throws Exception { - // Given SolrDocumentList emptyDocList = new SolrDocumentList(); emptyDocList.setNumFound(0); @@ -246,37 +181,14 @@ void checkHealth_WithEmptyCollection_ShouldReturnHealthyWithZeroDocuments() thro when(solrClient.query(eq("empty_collection"), any())).thenReturn(queryResponse); when(queryResponse.getResults()).thenReturn(emptyDocList); - // When SolrHealthStatus result = collectionService.checkHealth("empty_collection"); - // Then assertNotNull(result); assertTrue(result.isHealthy()); assertEquals(0L, result.totalDocuments()); assertEquals(5, result.responseTime()); } - @Test - void checkHealth_WithSlowResponse_ShouldCaptureResponseTime() throws Exception { - // Given - SolrDocumentList docList = new SolrDocumentList(); - docList.setNumFound(1000); - - when(solrClient.ping("slow_collection")).thenReturn(pingResponse); - when(pingResponse.getElapsedTime()).thenReturn(5000L); // 5 seconds - when(solrClient.query(eq("slow_collection"), any())).thenReturn(queryResponse); - when(queryResponse.getResults()).thenReturn(docList); - - // When - SolrHealthStatus result = collectionService.checkHealth("slow_collection"); - - // Then - assertNotNull(result); - assertTrue(result.isHealthy()); - assertEquals(5000, result.responseTime()); - assertTrue(result.responseTime() > 1000, "Should capture slow response time"); - } - @Test void checkHealth_IOException() throws Exception { when(solrClient.ping("error_collection")).thenThrow(new IOException("Network error")); @@ -291,7 +203,6 @@ void checkHealth_IOException() throws Exception { // Query stats tests @Test void buildQueryStats_WithValidResponse_ShouldExtractStats() { - // Given SolrDocumentList docList = new SolrDocumentList(); docList.setNumFound(250); docList.setStart(0); @@ -300,10 +211,8 @@ void buildQueryStats_WithValidResponse_ShouldExtractStats() { when(queryResponse.getQTime()).thenReturn(25); when(queryResponse.getResults()).thenReturn(docList); - // When QueryStats result = collectionService.buildQueryStats(queryResponse); - // Then assertNotNull(result); assertEquals(25, result.queryTime()); assertEquals(250, result.totalResults()); @@ -313,7 +222,6 @@ void buildQueryStats_WithValidResponse_ShouldExtractStats() { @Test void buildQueryStats_WithNullMaxScore_ShouldHandleGracefully() { - // Given SolrDocumentList docList = new SolrDocumentList(); docList.setNumFound(100); docList.setStart(10); @@ -322,14 +230,10 @@ void buildQueryStats_WithNullMaxScore_ShouldHandleGracefully() { when(queryResponse.getQTime()).thenReturn(15); when(queryResponse.getResults()).thenReturn(docList); - // When QueryStats result = collectionService.buildQueryStats(queryResponse); - // Then assertNotNull(result); assertEquals(15, result.queryTime()); - assertEquals(100, result.totalResults()); - assertEquals(10, result.start()); assertNull(result.maxScore()); } @@ -385,24 +289,10 @@ void validateCollectionExists() throws Exception { assertFalse((boolean) method.invoke(spyService, "non_existent")); } - @Test - void validateCollectionExists_WithException() throws Exception { - CollectionService spyService = spy(collectionService); - doReturn(Collections.emptyList()).when(spyService).listCollections(); - - Method method = CollectionService.class.getDeclaredMethod("validateCollectionExists", String.class); - method.setAccessible(true); - - assertFalse((boolean) method.invoke(spyService, "any_collection")); - } - - // Cache metrics tests + // Cache metrics tests (Metrics API) @Test void getCacheMetrics_WithNonExistentCollection_ShouldReturnNull() { - // When - Mock will not have collection configured CacheStats result = collectionService.getCacheMetrics("nonexistent"); - - // Then assertNull(result); } @@ -411,14 +301,31 @@ void getCacheMetrics_Success() throws Exception { CollectionService spyService = spy(collectionService); doReturn(Arrays.asList("test_collection")).when(spyService).listCollections(); - NamedList mbeans = createMockCacheData(); - when(solrClient.request(any(SolrRequest.class))).thenReturn(mbeans); + NamedList metricsResponse = createMockMetricsCacheData("test_collection"); + when(solrClient.request(any(SolrRequest.class))).thenReturn(metricsResponse); CacheStats result = spyService.getCacheMetrics("test_collection"); assertNotNull(result); assertNotNull(result.queryResultCache()); assertEquals(100L, result.queryResultCache().lookups()); + assertEquals(80L, result.queryResultCache().hits()); + } + + @Test + void getCacheMetrics_AllCacheTypes() throws Exception { + CollectionService spyService = spy(collectionService); + doReturn(Arrays.asList("test_collection")).when(spyService).listCollections(); + + NamedList metricsResponse = createMockMetricsCacheData("test_collection"); + when(solrClient.request(any(SolrRequest.class))).thenReturn(metricsResponse); + + CacheStats result = spyService.getCacheMetrics("test_collection"); + + assertNotNull(result); + assertNotNull(result.queryResultCache()); + assertNotNull(result.documentCache()); + assertNotNull(result.filterCache()); } @Test @@ -426,47 +333,37 @@ void getCacheMetrics_CollectionNotFound() { CollectionService spyService = spy(collectionService); doReturn(Collections.emptyList()).when(spyService).listCollections(); - CacheStats result = spyService.getCacheMetrics("non_existent"); - - assertNull(result); + assertNull(spyService.getCacheMetrics("non_existent")); } @Test void getCacheMetrics_SolrServerException() throws Exception { CollectionService spyService = spy(collectionService); doReturn(Arrays.asList("test_collection")).when(spyService).listCollections(); - when(solrClient.request(any(SolrRequest.class))).thenThrow(new SolrServerException("Error")); - CacheStats result = spyService.getCacheMetrics("test_collection"); - - assertNull(result); + assertNull(spyService.getCacheMetrics("test_collection")); } @Test void getCacheMetrics_IOException() throws Exception { CollectionService spyService = spy(collectionService); doReturn(Arrays.asList("test_collection")).when(spyService).listCollections(); - when(solrClient.request(any(SolrRequest.class))).thenThrow(new IOException("IO Error")); - CacheStats result = spyService.getCacheMetrics("test_collection"); - - assertNull(result); + assertNull(spyService.getCacheMetrics("test_collection")); } @Test - void getCacheMetrics_EmptyStats() throws Exception { + void getCacheMetrics_EmptyMetrics() throws Exception { CollectionService spyService = spy(collectionService); doReturn(Arrays.asList("test_collection")).when(spyService).listCollections(); - NamedList mbeans = new NamedList<>(); - mbeans.add("CACHE", new NamedList<>()); - when(solrClient.request(any(SolrRequest.class))).thenReturn(mbeans); - - CacheStats result = spyService.getCacheMetrics("test_collection"); + NamedList response = new NamedList<>(); + response.add("metrics", new NamedList<>()); + when(solrClient.request(any(SolrRequest.class))).thenReturn(response); - assertNull(result); + assertNull(spyService.getCacheMetrics("test_collection")); } @Test @@ -474,77 +371,17 @@ void getCacheMetrics_WithShardName() throws Exception { CollectionService spyService = spy(collectionService); doReturn(Arrays.asList("films_shard1_replica_n1")).when(spyService).listCollections(); - NamedList mbeans = createMockCacheData(); - when(solrClient.request(any(SolrRequest.class))).thenReturn(mbeans); + NamedList metricsResponse = createMockMetricsCacheData("films"); + when(solrClient.request(any(SolrRequest.class))).thenReturn(metricsResponse); CacheStats result = spyService.getCacheMetrics("films_shard1_replica_n1"); - - assertNotNull(result); - } - - @Test - void extractCacheStats() throws Exception { - NamedList mbeans = createMockCacheData(); - Method method = CollectionService.class.getDeclaredMethod("extractCacheStats", NamedList.class); - method.setAccessible(true); - - CacheStats result = (CacheStats) method.invoke(collectionService, mbeans); - - assertNotNull(result.queryResultCache()); - assertEquals(100L, result.queryResultCache().lookups()); - assertEquals(80L, result.queryResultCache().hits()); - } - - @Test - void extractCacheStats_AllCacheTypes() throws Exception { - NamedList mbeans = createCompleteMockCacheData(); - Method method = CollectionService.class.getDeclaredMethod("extractCacheStats", NamedList.class); - method.setAccessible(true); - - CacheStats result = (CacheStats) method.invoke(collectionService, mbeans); - - assertNotNull(result.queryResultCache()); - assertNotNull(result.documentCache()); - assertNotNull(result.filterCache()); - } - - @Test - void extractCacheStats_NullCacheCategory() throws Exception { - NamedList mbeans = new NamedList<>(); - mbeans.add("CACHE", null); - - Method method = CollectionService.class.getDeclaredMethod("extractCacheStats", NamedList.class); - method.setAccessible(true); - - CacheStats result = (CacheStats) method.invoke(collectionService, mbeans); - assertNotNull(result); - assertNull(result.queryResultCache()); - assertNull(result.documentCache()); - assertNull(result.filterCache()); - } - - @Test - void isCacheStatsEmpty() throws Exception { - Method method = CollectionService.class.getDeclaredMethod("isCacheStatsEmpty", CacheStats.class); - method.setAccessible(true); - - CacheStats emptyStats = new CacheStats(null, null, null); - assertTrue((boolean) method.invoke(collectionService, emptyStats)); - assertTrue((boolean) method.invoke(collectionService, (CacheStats) null)); - - CacheStats nonEmptyStats = new CacheStats(new CacheInfo(100L, null, null, null, null, null), null, null); - assertFalse((boolean) method.invoke(collectionService, nonEmptyStats)); } - // Handler metrics tests + // Handler metrics tests (Metrics API) @Test void getHandlerMetrics_WithNonExistentCollection_ShouldReturnNull() { - // When - Mock will not have collection configured - HandlerStats result = collectionService.getHandlerMetrics("nonexistent"); - - // Then - assertNull(result); + assertNull(collectionService.getHandlerMetrics("nonexistent")); } @Test @@ -552,13 +389,16 @@ void getHandlerMetrics_Success() throws Exception { CollectionService spyService = spy(collectionService); doReturn(Arrays.asList("test_collection")).when(spyService).listCollections(); - NamedList mbeans = createMockHandlerData(); - when(solrClient.request(any(SolrRequest.class))).thenReturn(mbeans); + NamedList metricsResponse = createMockMetricsHandlerData("test_collection"); + when(solrClient.request(any(SolrRequest.class))).thenReturn(metricsResponse); HandlerStats result = spyService.getHandlerMetrics("test_collection"); assertNotNull(result); assertNotNull(result.selectHandler()); + assertEquals(500L, result.selectHandler().requests()); + assertNotNull(result.updateHandler()); + assertEquals(250L, result.updateHandler().requests()); } @Test @@ -566,47 +406,37 @@ void getHandlerMetrics_CollectionNotFound() { CollectionService spyService = spy(collectionService); doReturn(Collections.emptyList()).when(spyService).listCollections(); - HandlerStats result = spyService.getHandlerMetrics("non_existent"); - - assertNull(result); + assertNull(spyService.getHandlerMetrics("non_existent")); } @Test void getHandlerMetrics_SolrServerException() throws Exception { CollectionService spyService = spy(collectionService); doReturn(Arrays.asList("test_collection")).when(spyService).listCollections(); - when(solrClient.request(any(SolrRequest.class))).thenThrow(new SolrServerException("Error")); - HandlerStats result = spyService.getHandlerMetrics("test_collection"); - - assertNull(result); + assertNull(spyService.getHandlerMetrics("test_collection")); } @Test void getHandlerMetrics_IOException() throws Exception { CollectionService spyService = spy(collectionService); doReturn(Arrays.asList("test_collection")).when(spyService).listCollections(); - when(solrClient.request(any(SolrRequest.class))).thenThrow(new IOException("IO Error")); - HandlerStats result = spyService.getHandlerMetrics("test_collection"); - - assertNull(result); + assertNull(spyService.getHandlerMetrics("test_collection")); } @Test - void getHandlerMetrics_EmptyStats() throws Exception { + void getHandlerMetrics_EmptyMetrics() throws Exception { CollectionService spyService = spy(collectionService); doReturn(Arrays.asList("test_collection")).when(spyService).listCollections(); - NamedList mbeans = new NamedList<>(); - mbeans.add("QUERYHANDLER", new NamedList<>()); - when(solrClient.request(any(SolrRequest.class))).thenReturn(mbeans); - - HandlerStats result = spyService.getHandlerMetrics("test_collection"); + NamedList response = new NamedList<>(); + response.add("metrics", new NamedList<>()); + when(solrClient.request(any(SolrRequest.class))).thenReturn(response); - assertNull(result); + assertNull(spyService.getHandlerMetrics("test_collection")); } @Test @@ -614,66 +444,11 @@ void getHandlerMetrics_WithShardName() throws Exception { CollectionService spyService = spy(collectionService); doReturn(Arrays.asList("films_shard1_replica_n1")).when(spyService).listCollections(); - NamedList mbeans = createMockHandlerData(); - when(solrClient.request(any(SolrRequest.class))).thenReturn(mbeans); + NamedList metricsResponse = createMockMetricsHandlerData("films"); + when(solrClient.request(any(SolrRequest.class))).thenReturn(metricsResponse); HandlerStats result = spyService.getHandlerMetrics("films_shard1_replica_n1"); - - assertNotNull(result); - } - - @Test - void extractHandlerStats() throws Exception { - NamedList mbeans = createMockHandlerData(); - Method method = CollectionService.class.getDeclaredMethod("extractHandlerStats", NamedList.class); - method.setAccessible(true); - - HandlerStats result = (HandlerStats) method.invoke(collectionService, mbeans); - - assertNotNull(result.selectHandler()); - assertEquals(500L, result.selectHandler().requests()); - } - - @Test - void extractHandlerStats_BothHandlers() throws Exception { - NamedList mbeans = createCompleteHandlerData(); - Method method = CollectionService.class.getDeclaredMethod("extractHandlerStats", NamedList.class); - method.setAccessible(true); - - HandlerStats result = (HandlerStats) method.invoke(collectionService, mbeans); - - assertNotNull(result.selectHandler()); - assertNotNull(result.updateHandler()); - assertEquals(500L, result.selectHandler().requests()); - assertEquals(250L, result.updateHandler().requests()); - } - - @Test - void extractHandlerStats_NullHandlerCategory() throws Exception { - NamedList mbeans = new NamedList<>(); - mbeans.add("QUERYHANDLER", null); - - Method method = CollectionService.class.getDeclaredMethod("extractHandlerStats", NamedList.class); - method.setAccessible(true); - - HandlerStats result = (HandlerStats) method.invoke(collectionService, mbeans); - assertNotNull(result); - assertNull(result.selectHandler()); - assertNull(result.updateHandler()); - } - - @Test - void isHandlerStatsEmpty() throws Exception { - Method method = CollectionService.class.getDeclaredMethod("isHandlerStatsEmpty", HandlerStats.class); - method.setAccessible(true); - - HandlerStats emptyStats = new HandlerStats(null, null); - assertTrue((boolean) method.invoke(collectionService, emptyStats)); - assertTrue((boolean) method.invoke(collectionService, (HandlerStats) null)); - - HandlerStats nonEmptyStats = new HandlerStats(new HandlerInfo(100L, null, null, null, null, null), null); - assertFalse((boolean) method.invoke(collectionService, nonEmptyStats)); } // List collections tests @@ -725,123 +500,6 @@ void listCollections_IOError() throws Exception { assertTrue(result.isEmpty()); } - // Helper methods - private NamedList createMockCacheData() { - NamedList mbeans = new NamedList<>(); - NamedList cacheCategory = new NamedList<>(); - NamedList queryResultCache = new NamedList<>(); - NamedList queryStats = new NamedList<>(); - - queryStats.add("lookups", 100L); - queryStats.add("hits", 80L); - queryStats.add("hitratio", 0.8f); - queryStats.add("inserts", 20L); - queryStats.add("evictions", 5L); - queryStats.add("size", 100L); - queryResultCache.add("stats", queryStats); - cacheCategory.add("queryResultCache", queryResultCache); - mbeans.add("CACHE", cacheCategory); - - return mbeans; - } - - private NamedList createCompleteMockCacheData() { - NamedList mbeans = new NamedList<>(); - NamedList cacheCategory = new NamedList<>(); - - // Query Result Cache - NamedList queryResultCache = new NamedList<>(); - NamedList queryStats = new NamedList<>(); - queryStats.add("lookups", 100L); - queryStats.add("hits", 80L); - queryStats.add("hitratio", 0.8f); - queryStats.add("inserts", 20L); - queryStats.add("evictions", 5L); - queryStats.add("size", 100L); - queryResultCache.add("stats", queryStats); - - // Document Cache - NamedList documentCache = new NamedList<>(); - NamedList docStats = new NamedList<>(); - docStats.add("lookups", 200L); - docStats.add("hits", 150L); - docStats.add("hitratio", 0.75f); - docStats.add("inserts", 50L); - docStats.add("evictions", 10L); - docStats.add("size", 180L); - documentCache.add("stats", docStats); - - // Filter Cache - NamedList filterCache = new NamedList<>(); - NamedList filterStats = new NamedList<>(); - filterStats.add("lookups", 150L); - filterStats.add("hits", 120L); - filterStats.add("hitratio", 0.8f); - filterStats.add("inserts", 30L); - filterStats.add("evictions", 8L); - filterStats.add("size", 140L); - filterCache.add("stats", filterStats); - - cacheCategory.add("queryResultCache", queryResultCache); - cacheCategory.add("documentCache", documentCache); - cacheCategory.add("filterCache", filterCache); - mbeans.add("CACHE", cacheCategory); - - return mbeans; - } - - private NamedList createMockHandlerData() { - NamedList mbeans = new NamedList<>(); - NamedList queryHandlerCategory = new NamedList<>(); - NamedList selectHandler = new NamedList<>(); - NamedList selectStats = new NamedList<>(); - - selectStats.add("requests", 500L); - selectStats.add("errors", 5L); - selectStats.add("timeouts", 2L); - selectStats.add("totalTime", 10000L); - selectStats.add("avgTimePerRequest", 20.0f); - selectStats.add("avgRequestsPerSecond", 25.0f); - selectHandler.add("stats", selectStats); - queryHandlerCategory.add("/select", selectHandler); - mbeans.add("QUERYHANDLER", queryHandlerCategory); - - return mbeans; - } - - private NamedList createCompleteHandlerData() { - NamedList mbeans = new NamedList<>(); - NamedList queryHandlerCategory = new NamedList<>(); - - // Select Handler - NamedList selectHandler = new NamedList<>(); - NamedList selectStats = new NamedList<>(); - selectStats.add("requests", 500L); - selectStats.add("errors", 5L); - selectStats.add("timeouts", 2L); - selectStats.add("totalTime", 10000L); - selectStats.add("avgTimePerRequest", 20.0f); - selectStats.add("avgRequestsPerSecond", 25.0f); - selectHandler.add("stats", selectStats); - - // Update Handler - NamedList updateHandler = new NamedList<>(); - NamedList updateStats = new NamedList<>(); - updateStats.add("requests", 250L); - updateStats.add("errors", 2L); - updateStats.add("timeouts", 1L); - updateStats.add("totalTime", 5000L); - updateStats.add("avgTimePerRequest", 20.0f); - updateStats.add("avgRequestsPerSecond", 50.0f); - updateHandler.add("stats", updateStats); - - queryHandlerCategory.add("/select", selectHandler); - queryHandlerCategory.add("/update", updateHandler); - mbeans.add("QUERYHANDLER", queryHandlerCategory); - - return mbeans; - } - // createCollection tests @Test void createCollection_success() throws Exception { @@ -849,6 +507,108 @@ void createCollection_success() throws Exception { CollectionCreationResult result = collectionService.createCollection("new_collection", "_default", 1, 1); + assertNotNull(result); + assertTrue(result.success()); + assertEquals("new_collection", result.name()); + } + + @Test + void createCollection_withDefaults() throws Exception { + when(solrClient.request(any(), isNull())).thenReturn(new NamedList<>()); + + CollectionCreationResult result = collectionService.createCollection("new_collection", null, null, null); + + assertTrue(result.success()); + assertEquals("new_collection", result.name()); + } + + @Test + void createCollection_blankName_throwsIllegalArgument() { + assertThrows(IllegalArgumentException.class, () -> collectionService.createCollection(" ", null, null, null)); + } + + @Test + void createCollection_emptyName_throwsIllegalArgument() { + assertThrows(IllegalArgumentException.class, () -> collectionService.createCollection("", null, null, null)); + } + + @Test + void createCollection_solrException_propagates() throws Exception { + when(solrClient.request(any(), isNull())).thenThrow(new SolrServerException("Solr error")); + + assertThrows(SolrServerException.class, + () -> collectionService.createCollection("fail_collection", null, null, null)); + } + + // Helper methods to create Metrics API response format + private NamedList createMockMetricsCacheData(String collection) { + NamedList response = new NamedList<>(); + NamedList metrics = new NamedList<>(); + NamedList coreMetrics = new NamedList<>(); + + NamedList qrcStats = new NamedList<>(); + qrcStats.add("lookups", 100L); + qrcStats.add("hits", 80L); + qrcStats.add("hitratio", 0.8f); + qrcStats.add("inserts", 20L); + qrcStats.add("evictions", 5L); + qrcStats.add("size", 100L); + coreMetrics.add("CACHE.searcher.queryResultCache", qrcStats); + + NamedList dcStats = new NamedList<>(); + dcStats.add("lookups", 200L); + dcStats.add("hits", 150L); + dcStats.add("hitratio", 0.75f); + dcStats.add("inserts", 50L); + dcStats.add("evictions", 10L); + dcStats.add("size", 180L); + coreMetrics.add("CACHE.searcher.documentCache", dcStats); + + NamedList fcStats = new NamedList<>(); + fcStats.add("lookups", 150L); + fcStats.add("hits", 120L); + fcStats.add("hitratio", 0.8f); + fcStats.add("inserts", 30L); + fcStats.add("evictions", 8L); + fcStats.add("size", 140L); + coreMetrics.add("CACHE.searcher.filterCache", fcStats); + + metrics.add("solr.core." + collection, coreMetrics); + response.add("metrics", metrics); + return response; + } + + private NamedList createMockMetricsHandlerData(String collection) { + NamedList response = new NamedList<>(); + NamedList metrics = new NamedList<>(); + NamedList coreMetrics = new NamedList<>(); + + coreMetrics.add("QUERY./select.requests", 500L); + coreMetrics.add("QUERY./select.errors", 5L); + coreMetrics.add("QUERY./select.timeouts", 2L); + coreMetrics.add("QUERY./select.totalTime", 10000L); + coreMetrics.add("QUERY./select.avgTimePerRequest", 20.0f); + coreMetrics.add("QUERY./select.avgRequestsPerSecond", 25.0f); + + coreMetrics.add("UPDATE./update.requests", 250L); + coreMetrics.add("UPDATE./update.errors", 2L); + coreMetrics.add("UPDATE./update.timeouts", 1L); + coreMetrics.add("UPDATE./update.totalTime", 5000L); + coreMetrics.add("UPDATE./update.avgTimePerRequest", 20.0f); + coreMetrics.add("UPDATE./update.avgRequestsPerSecond", 50.0f); + + metrics.add("solr.core." + collection, coreMetrics); + response.add("metrics", metrics); + return response; + } + + // createCollection tests (additional) + @Test + void createCollection_success_withExplicitParams() throws Exception { + when(solrClient.request(any(), isNull())).thenReturn(new NamedList<>()); + + CollectionCreationResult result = collectionService.createCollection("new_collection", "_default", 1, 1); + assertNotNull(result); assertTrue(result.success()); assertEquals("new_collection", result.name()); diff --git a/src/test/java/org/apache/solr/mcp/server/config/SolrConfigTest.java b/src/test/java/org/apache/solr/mcp/server/config/SolrConfigTest.java index 44247fb..731c4f0 100644 --- a/src/test/java/org/apache/solr/mcp/server/config/SolrConfigTest.java +++ b/src/test/java/org/apache/solr/mcp/server/config/SolrConfigTest.java @@ -49,8 +49,7 @@ void testSolrClientConfiguration() { // Verify that the SolrClient is using the correct URL // Note: SolrConfig normalizes the URL to have trailing slash, but - // HttpJdkSolrClient removes - // it + // HttpJdkSolrClient removes it var httpSolrClient = assertInstanceOf(HttpJdkSolrClient.class, solrClient); String expectedUrl = "http://" + solrContainer.getHost() + ":" + solrContainer.getMappedPort(8983) + "/solr"; assertEquals(expectedUrl, httpSolrClient.getBaseURL()); diff --git a/src/test/java/org/apache/solr/mcp/server/config/SolrConfigUrlNormalizationTest.java b/src/test/java/org/apache/solr/mcp/server/config/SolrConfigUrlNormalizationTest.java index 2ca312f..7de35b6 100644 --- a/src/test/java/org/apache/solr/mcp/server/config/SolrConfigUrlNormalizationTest.java +++ b/src/test/java/org/apache/solr/mcp/server/config/SolrConfigUrlNormalizationTest.java @@ -18,19 +18,20 @@ import static org.junit.jupiter.api.Assertions.*; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.impl.HttpJdkSolrClient; +import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.json.JsonTest; +import tools.jackson.databind.json.JsonMapper; @JsonTest class SolrConfigUrlNormalizationTest { @Autowired - private ObjectMapper objectMapper; + private JsonMapper jsonMapper; @ParameterizedTest @CsvSource({"http://localhost:8983, http://localhost:8983/solr", @@ -38,15 +39,88 @@ class SolrConfigUrlNormalizationTest { "http://localhost:8983/solr, http://localhost:8983/solr", "http://localhost:8983/solr/, http://localhost:8983/solr", "http://localhost:8983/custom/solr/, http://localhost:8983/custom/solr"}) - void testUrlNormalization(String inputUrl, String expectedUrl) throws Exception { + void testUrlNormalization(String inputUrl, String expectedUrl) { SolrConfigurationProperties testProperties = new SolrConfigurationProperties(inputUrl); SolrConfig solrConfig = new SolrConfig(); - try (SolrClient client = solrConfig.solrClient(testProperties, new JsonResponseParser(objectMapper))) { - assertNotNull(client); + SolrClient client = solrConfig.solrClient(testProperties); + assertNotNull(client); - var httpClient = assertInstanceOf(HttpJdkSolrClient.class, client); - assertEquals(expectedUrl, httpClient.getBaseURL()); + var httpClient = assertInstanceOf(HttpJdkSolrClient.class, client); + assertEquals(expectedUrl, httpClient.getBaseURL()); + + try { + client.close(); + } catch (Exception _) { + // Ignore close errors in test + } + } + + @Test + void testUrlWithoutTrailingSlash() { + SolrConfigurationProperties testProperties = new SolrConfigurationProperties("http://localhost:8983"); + SolrConfig solrConfig = new SolrConfig(); + + SolrClient client = solrConfig.solrClient(testProperties); + HttpJdkSolrClient httpClient = (HttpJdkSolrClient) client; + + assertEquals("http://localhost:8983/solr", httpClient.getBaseURL()); + + try { + client.close(); + } catch (Exception _) { + // Ignore close errors in test + } + } + + @Test + void testUrlWithTrailingSlashButNoSolrPath() { + SolrConfigurationProperties testProperties = new SolrConfigurationProperties("http://localhost:8983/"); + SolrConfig solrConfig = new SolrConfig(); + + SolrClient client = solrConfig.solrClient(testProperties); + HttpJdkSolrClient httpClient = (HttpJdkSolrClient) client; + + assertEquals("http://localhost:8983/solr", httpClient.getBaseURL()); + + try { + client.close(); + } catch (Exception _) { + // Ignore close errors in test + } + } + + @Test + void testUrlWithSolrPathButNoTrailingSlash() { + SolrConfigurationProperties testProperties = new SolrConfigurationProperties("http://localhost:8983/solr"); + SolrConfig solrConfig = new SolrConfig(); + + SolrClient client = solrConfig.solrClient(testProperties); + HttpJdkSolrClient httpClient = (HttpJdkSolrClient) client; + + assertEquals("http://localhost:8983/solr", httpClient.getBaseURL()); + + try { + client.close(); + } catch (Exception _) { + // Ignore close errors in test + } + } + + @Test + void testUrlAlreadyProperlyFormatted() { + SolrConfigurationProperties testProperties = new SolrConfigurationProperties("http://localhost:8983/solr/"); + SolrConfig solrConfig = new SolrConfig(); + + SolrClient client = solrConfig.solrClient(testProperties); + HttpJdkSolrClient httpClient = (HttpJdkSolrClient) client; + + assertEquals("http://localhost:8983/solr", httpClient.getBaseURL()); + + try { + client.close(); + } catch (Exception _) { + // Ignore close errors in test } } } diff --git a/src/test/java/org/apache/solr/mcp/server/containerization/DockerImageHttpIntegrationTest.java b/src/test/java/org/apache/solr/mcp/server/containerization/DockerImageHttpIntegrationTest.java index 224f53d..c73602f 100644 --- a/src/test/java/org/apache/solr/mcp/server/containerization/DockerImageHttpIntegrationTest.java +++ b/src/test/java/org/apache/solr/mcp/server/containerization/DockerImageHttpIntegrationTest.java @@ -95,7 +95,7 @@ class DockerImageHttpIntegrationTest { // Docker image name and tag from build-info.properties private static final String DOCKER_IMAGE = BuildInfoReader.getDockerImageName(); - private static final String SOLR_IMAGE = System.getProperty("solr.test.image"); + private static final String SOLR_IMAGE = System.getProperty("solr.test.image", "solr:9.9-slim"); private static final int HTTP_PORT = 8080; // Network for container communication diff --git a/src/test/java/org/apache/solr/mcp/server/containerization/DockerImageStdioIntegrationTest.java b/src/test/java/org/apache/solr/mcp/server/containerization/DockerImageStdioIntegrationTest.java index 26e6a27..38c9875 100644 --- a/src/test/java/org/apache/solr/mcp/server/containerization/DockerImageStdioIntegrationTest.java +++ b/src/test/java/org/apache/solr/mcp/server/containerization/DockerImageStdioIntegrationTest.java @@ -85,7 +85,7 @@ class DockerImageStdioIntegrationTest { // Docker image name and tag from build-info.properties private static final String DOCKER_IMAGE = BuildInfoReader.getDockerImageName(); - private static final String SOLR_IMAGE = System.getProperty("solr.test.image"); + private static final String SOLR_IMAGE = System.getProperty("solr.test.image", "solr:9.9-slim"); // Network for container communication private static final Network network = Network.newNetwork(); diff --git a/src/test/java/org/apache/solr/mcp/server/metadata/SchemaServiceTest.java b/src/test/java/org/apache/solr/mcp/server/metadata/SchemaServiceTest.java index 964c3a5..bb70cb2 100644 --- a/src/test/java/org/apache/solr/mcp/server/metadata/SchemaServiceTest.java +++ b/src/test/java/org/apache/solr/mcp/server/metadata/SchemaServiceTest.java @@ -21,7 +21,6 @@ import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.when; -import com.fasterxml.jackson.databind.ObjectMapper; import java.io.IOException; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; @@ -33,6 +32,7 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import tools.jackson.databind.ObjectMapper; /** * Comprehensive test suite for the SchemaService class. Tests schema retrieval diff --git a/src/test/java/org/apache/solr/mcp/server/observability/InMemoryTracingTestConfiguration.java b/src/test/java/org/apache/solr/mcp/server/observability/InMemoryTracingTestConfiguration.java new file mode 100644 index 0000000..56af46f --- /dev/null +++ b/src/test/java/org/apache/solr/mcp/server/observability/InMemoryTracingTestConfiguration.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.mcp.server.observability; + +import io.opentelemetry.sdk.testing.exporter.InMemorySpanExporter; +import org.springframework.boot.test.context.TestConfiguration; +import org.springframework.context.annotation.Bean; + +/** + * Minimal test configuration that provides InMemorySpanExporter bean. + *

+ * Spring Boot's opentelemetry-test starter requires this to be explicitly + * configured. + */ +@TestConfiguration +public class InMemoryTracingTestConfiguration { + + @Bean + public InMemorySpanExporter inMemorySpanExporter() { + return InMemorySpanExporter.create(); + } + +}