loggingEventEnhancerClassNames = new HashSet<>();
+ private LogbackBatchingSettings logbackBatchingSettings = null;
+
+ /**
+ * Sets a threshold for log severity level to flush all log entries that were batched so far.
+ *
+ * Defaults to OFF.
+ *
+ * @param flushLevel Logback log level
+ */
+ public void setFlushLevel(Level flushLevel) {
+ this.flushLevel = flushLevel;
+ }
+
+ /**
+ * Sets the LOG_ID part of the log
+ * name for which the logs are ingested.
+ *
+ * @param log LOG_ID part of the name
+ */
+ public void setLog(String log) {
+ this.log = log;
+ }
+
+ /**
+ * Sets the name of the monitored resource (Optional). If not define the appender will try to
+ * identify the resource type automatically. Currently support resource types include "gae_app",
+ * "gce_instance", "k8s_container", "cloud_run_revision" and "cloud_function". If the appender
+ * fails to identify the resource type, it will be set to "global".
+ *
+ *
Must be a one of the supported resource types.
+ *
+ * @param resourceType the name of the monitored resource.
+ */
+ public void setResourceType(String resourceType) {
+ this.resourceType = resourceType;
+ }
+
+ /**
+ * This method is obsolete because of a potential security risk. Use the {@link
+ * #setCredentials(GoogleCredentials)} method instead.
+ *
+ *
If you know that you will be loading credential configurations of a specific type, it is
+ * recommended to use a credential-type-specific `fromStream()` method. This will ensure that an
+ * unexpected credential type with potential for malicious intent is not loaded unintentionally.
+ * You might still have to do validation for certain credential types. Please follow the
+ * recommendation for that method.
+ *
+ *
If you are loading your credential configuration from an untrusted source and have not
+ * mitigated the risks (e.g. by validating the configuration yourself), make these changes as soon
+ * as possible to prevent security risks to your environment.
+ *
+ *
Regardless of the method used, it is always your responsibility to validate configurations
+ * received from external sources.
+ *
+ *
Sets the path to the credential
+ * file. If not set the appender will use {@link GoogleCredentials#getApplicationDefault()} to
+ * authenticate.
+ *
+ * @param credentialsFile the path to the credentials file.
+ */
+ @ObsoleteApi(
+ "This method is obsolete because of a potential security risk. Use the setCredentials() method instead")
+ public void setCredentialsFile(String credentialsFile) {
+ this.credentialsFile = credentialsFile;
+ }
+
+ /**
+ * Sets the credential to use. If not set the appender will use {@link
+ * GoogleCredentials#getApplicationDefault()} to authenticate.
+ *
+ * @param credentials the GoogleCredentials to set
+ */
+ public void setCredentials(GoogleCredentials credentials) {
+ Preconditions.checkNotNull(credentials, "Credentials cannot be null");
+ this.credentials = credentials;
+ }
+
+ /**
+ * Sets project ID to be used to customize log destination name for written log entries.
+ *
+ * @param projectId The project ID to be used to construct the resource destination name for log
+ * entries.
+ */
+ public void setLogDestinationProjectId(String projectId) {
+ this.logDestinationProjectId = projectId;
+ }
+
+ /**
+ * Sets the log ingestion mode. It can be one of the {@link Synchronicity} values.
+ *
+ *
Default to {@code Synchronicity.ASYNC}
+ *
+ * @param flag the new ingestion mode.
+ */
+ public void setWriteSynchronicity(Synchronicity flag) {
+ this.writeSyncFlag = flag;
+ }
+
+ /**
+ * Sets the automatic population of metadata fields for ingested logs.
+ *
+ *
Default to {@code true}.
+ *
+ * @param flag the metadata auto-population flag.
+ */
+ public void setAutoPopulateMetadata(boolean flag) {
+ autoPopulateMetadata = flag;
+ }
+
+ /**
+ * Sets the redirect of the appender's output to STDOUT instead of ingesting logs to Cloud Logging
+ * using Logging API.
+ *
+ *
Default to {@code false}.
+ *
+ * @param flag the redirect flag.
+ */
+ public void setRedirectToStdout(boolean flag) {
+ redirectToStdout = flag;
+ }
+
+ /**
+ * Sets the {@link LogbackBatchingSettings} to be used for the asynchronous mode call(s) to
+ * Logging API
+ *
+ *
Default to {@code null}.
+ *
+ * @param batchingSettings the {@link LogbackBatchingSettings} to be used for asynchronous mode
+ * call(s) to Logging API
+ */
+ public void setLogbackBatchingSettings(LogbackBatchingSettings batchingSettings) {
+ logbackBatchingSettings = batchingSettings;
+ }
+
+ /**
+ * Sets the flag indicating if a batch's valid entries should be written even if some other entry
+ * failed due to an error.
+ *
+ *
Default to {@code true}.
+ *
+ * @param flag the partialSuccess flag.
+ */
+ public void setPartialSuccess(boolean flag) {
+ partialSuccess = flag;
+ }
+
+ /** Add extra labels using classes that implement {@link LoggingEnhancer}. */
+ public void addEnhancer(String enhancerClassName) {
+ this.enhancerClassNames.add(enhancerClassName);
+ }
+
+ public void addLoggingEventEnhancer(String enhancerClassName) {
+ this.loggingEventEnhancerClassNames.add(enhancerClassName);
+ }
+
+ /**
+ * Returns the current value of the ingestion mode.
+ *
+ *
The method is deprecated. Use appender configuration to set up the ingestion
+ *
+ * @return a {@link Synchronicity} value of the ingestion module.
+ */
+ @Deprecated
+ public Synchronicity getWriteSynchronicity() {
+ return (this.writeSyncFlag != null) ? this.writeSyncFlag : Synchronicity.ASYNC;
+ }
+
+ private void setupMonitoredResource() {
+ if (monitoredResource == null && autoPopulateMetadata) {
+ monitoredResource = MonitoredResourceUtil.getResource(getProjectId(), resourceType);
+ }
+ }
+
+ @InternalApi("Visible for testing")
+ void setupMonitoredResource(MonitoredResource monitoredResource) {
+ this.monitoredResource = monitoredResource;
+ }
+
+ private Level getFlushLevel() {
+ return (flushLevel != null) ? flushLevel : Level.OFF;
+ }
+
+ private String getLogName() {
+ return (log != null) ? log : "java.log";
+ }
+
+ private List getLoggingEnhancers() {
+ return getEnhancers(enhancerClassNames, LoggingEnhancer.class);
+ }
+
+ private List getLoggingEventEnhancers() {
+ if (loggingEventEnhancerClassNames.isEmpty()) {
+ return DEFAULT_LOGGING_EVENT_ENHANCERS;
+ } else {
+ return getEnhancers(loggingEventEnhancerClassNames, LoggingEventEnhancer.class);
+ }
+ }
+
+ private List getEnhancers(Set classNames, Class classOfT) {
+ List enhancers = new ArrayList<>();
+ if (classNames != null) {
+ for (String className : classNames) {
+ if (className != null) {
+ try {
+ T enhancer =
+ Loader.loadClass(className.trim())
+ .asSubclass(classOfT)
+ .getDeclaredConstructor()
+ .newInstance();
+ enhancers.add(enhancer);
+ } catch (Exception ex) {
+ // invalid className: ignore
+ }
+ }
+ }
+ }
+ return enhancers;
+ }
+
+ /** Initialize and configure the cloud logging service. */
+ @Override
+ public synchronized void start() {
+ if (isStarted()) {
+ return;
+ }
+
+ setupMonitoredResource();
+
+ defaultWriteOptions =
+ new WriteOption[] {
+ WriteOption.logName(getLogName()),
+ WriteOption.resource(monitoredResource),
+ WriteOption.partialSuccess(partialSuccess)
+ };
+ Level flushLevel = getFlushLevel();
+ if (flushLevel != Level.OFF) {
+ getLogging().setFlushSeverity(severityFor(flushLevel));
+ }
+ loggingEnhancers = new ArrayList<>();
+ List resourceEnhancers = MonitoredResourceUtil.getResourceEnhancers();
+ loggingEnhancers.addAll(resourceEnhancers);
+ loggingEnhancers.addAll(getLoggingEnhancers());
+ loggingEventEnhancers = new ArrayList<>();
+ loggingEventEnhancers.addAll(getLoggingEventEnhancers());
+
+ super.start();
+ }
+
+ String getProjectId() {
+ return getLoggingOptions().getProjectId();
+ }
+
+ @Override
+ protected void append(ILoggingEvent e) {
+ List entriesList = new ArrayList<>();
+ entriesList.add(logEntryFor(e));
+ // Check if instrumentation was already added - if not, create a log entry with instrumentation
+ // data
+ if (!setInstrumentationStatus(true)) {
+ entriesList.add(
+ Instrumentation.createDiagnosticEntry(
+ JAVA_LOGBACK_LIBRARY_NAME, DEFAULT_INSTRUMENTATION_VERSION));
+ }
+ Iterable entries = entriesList;
+ if (autoPopulateMetadata) {
+ entries =
+ getLogging()
+ .populateMetadata(
+ entries,
+ monitoredResource,
+ "com.google.cloud.logging",
+ "jdk",
+ "sun",
+ "java",
+ "ch.qos.logback");
+ }
+ if (redirectToStdout) {
+ for (LogEntry entry : entries) {
+ System.out.println(entry.toStructuredJsonString());
+ }
+ } else {
+ getLogging().write(entries, defaultWriteOptions);
+ }
+ }
+
+ @Override
+ public synchronized void stop() {
+ if (logging != null) {
+ try {
+ logging.close();
+ } catch (Exception ex) {
+ // ignore
+ }
+ }
+ logging = null;
+ super.stop();
+ }
+
+ Logging getLogging() {
+ if (logging == null) {
+ synchronized (this) {
+ if (logging == null) {
+ logging = getLoggingOptions().getService();
+ logging.setWriteSynchronicity(writeSyncFlag);
+ }
+ }
+ }
+ return logging;
+ }
+
+ /** Flushes any pending asynchronous logging writes. */
+ @Deprecated
+ public void flush() {
+ if (!isStarted()) {
+ return;
+ }
+ synchronized (this) {
+ getLogging().flush();
+ }
+ }
+
+ /** Gets the {@link LoggingOptions} to use for this {@link LoggingAppender}. */
+ protected LoggingOptions getLoggingOptions() {
+ if (loggingOptions == null) {
+ LoggingOptions.Builder builder = LoggingOptions.newBuilder();
+ builder.setProjectId(logDestinationProjectId);
+ if (credentials != null) {
+ builder.setCredentials(credentials);
+ } else if (!Strings.isNullOrEmpty(credentialsFile)) {
+ try {
+ builder.setCredentials(
+ GoogleCredentials.fromStream(Files.newInputStream(Paths.get(credentialsFile))));
+ } catch (IOException e) {
+ throw new RuntimeException(
+ String.format(
+ "Could not read credentials file %s. Please verify that the file exists and is a valid Google credentials file.",
+ credentialsFile),
+ e);
+ }
+ }
+ // opt-out metadata auto-population to control it in the appender code
+ builder.setAutoPopulateMetadata(false);
+ builder.setBatchingSettings(
+ this.logbackBatchingSettings != null ? this.logbackBatchingSettings.build() : null);
+ loggingOptions = builder.build();
+ }
+ return loggingOptions;
+ }
+
+ private LogEntry logEntryFor(ILoggingEvent e) {
+ StringBuilder payload = new StringBuilder().append(e.getFormattedMessage()).append('\n');
+ writeStack(e.getThrowableProxy(), "", payload);
+
+ Level level = e.getLevel();
+ Severity severity = severityFor(level);
+
+ Map jsonContent = new HashMap<>();
+ jsonContent.put("message", payload.toString().trim());
+ if (severity == Severity.ERROR) {
+ jsonContent.put("@type", TYPE);
+ }
+ LogEntry.Builder builder =
+ LogEntry.newBuilder(Payload.JsonPayload.of(jsonContent))
+ .setTimestamp(Instant.ofEpochMilli(e.getTimeStamp()))
+ .setSeverity(severity);
+ builder
+ .addLabel(LEVEL_NAME_KEY, level.toString())
+ .addLabel(LEVEL_VALUE_KEY, String.valueOf(level.toInt()))
+ .addLabel(LOGGER_NAME_KEY, e.getLoggerName());
+
+ if (loggingEnhancers != null) {
+ for (LoggingEnhancer enhancer : loggingEnhancers) {
+ enhancer.enhanceLogEntry(builder);
+ }
+ }
+
+ if (loggingEventEnhancers != null) {
+ for (LoggingEventEnhancer enhancer : loggingEventEnhancers) {
+ enhancer.enhanceLogEntry(builder, e);
+ }
+ }
+
+ return builder.build();
+ }
+
+ @InternalApi("Visible for testing")
+ static void writeStack(IThrowableProxy throwProxy, String prefix, StringBuilder payload) {
+ if (throwProxy == null) {
+ return;
+ }
+ payload
+ .append(prefix)
+ .append(throwProxy.getClassName())
+ .append(": ")
+ .append(throwProxy.getMessage())
+ .append('\n');
+ StackTraceElementProxy[] trace = throwProxy.getStackTraceElementProxyArray();
+ if (trace == null) {
+ trace = new StackTraceElementProxy[0];
+ }
+
+ int commonFrames = throwProxy.getCommonFrames();
+ int printFrames = trace.length - commonFrames;
+ for (int i = 0; i < printFrames; i++) {
+ payload.append(" ").append(trace[i]).append('\n');
+ }
+ if (commonFrames != 0) {
+ payload.append(" ... ").append(commonFrames).append(" common frames elided\n");
+ }
+
+ writeStack(throwProxy.getCause(), "caused by: ", payload);
+ }
+
+ /**
+ * Transforms Logback logging levels to Cloud severity.
+ *
+ * @param level Logback logging level
+ * @return Cloud severity level
+ */
+ private static Severity severityFor(Level level) {
+ switch (level.toInt()) {
+ // TRACE
+ case 5000:
+ return Severity.DEBUG;
+ // DEBUG
+ case 10000:
+ return Severity.DEBUG;
+ // INFO
+ case 20000:
+ return Severity.INFO;
+ // WARNING
+ case 30000:
+ return Severity.WARNING;
+ // ERROR
+ case 40000:
+ return Severity.ERROR;
+ default:
+ return Severity.DEFAULT;
+ }
+ }
+
+ /**
+ * The package-private helper method used to set the flag which indicates if instrumentation info
+ * already written or not.
+ *
+ * @return The value of the flag before it was set.
+ */
+ static boolean setInstrumentationStatus(boolean value) {
+ if (instrumentationAdded == value) return instrumentationAdded;
+ synchronized (instrumentationLock) {
+ boolean current = instrumentationAdded;
+ instrumentationAdded = value;
+ return current;
+ }
+ }
+}
diff --git a/java-logging-logback/src/main/java/com/google/cloud/logging/logback/LoggingEventEnhancer.java b/java-logging-logback/src/main/java/com/google/cloud/logging/logback/LoggingEventEnhancer.java
new file mode 100644
index 000000000000..bf47a77b1c29
--- /dev/null
+++ b/java-logging-logback/src/main/java/com/google/cloud/logging/logback/LoggingEventEnhancer.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2026 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.logging.logback;
+
+import ch.qos.logback.classic.spi.ILoggingEvent;
+import com.google.cloud.logging.LogEntry;
+
+/**
+ * An enhancer for {@linkplain ILoggingEvent}Â log entries. Used to add custom labels to the {@link
+ * LogEntry.Builder}.
+ */
+public interface LoggingEventEnhancer {
+ void enhanceLogEntry(LogEntry.Builder builder, ILoggingEvent e);
+}
diff --git a/java-logging-logback/src/main/java/com/google/cloud/logging/logback/MDCEventEnhancer.java b/java-logging-logback/src/main/java/com/google/cloud/logging/logback/MDCEventEnhancer.java
new file mode 100644
index 000000000000..c9e45168ce7a
--- /dev/null
+++ b/java-logging-logback/src/main/java/com/google/cloud/logging/logback/MDCEventEnhancer.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2026 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.logging.logback;
+
+import ch.qos.logback.classic.spi.ILoggingEvent;
+import com.google.cloud.logging.LogEntry;
+import java.util.Map;
+
+/**
+ * MDCEventEnhancer takes values found in the MDC property map and adds them as labels to the {@link
+ * LogEntry}. This {@link LoggingEventEnhancer} is turned on by default. If you wish to filter which
+ * MDC values get added as labels to your {@link LogEntry}, implement a {@link LoggingEventEnhancer}
+ * and add its classpath to your {@code logback.xml}. If any {@link LoggingEventEnhancer} is added
+ * this class is no longer registered.
+ */
+final class MDCEventEnhancer implements LoggingEventEnhancer {
+
+ @Override
+ public void enhanceLogEntry(LogEntry.Builder builder, ILoggingEvent e) {
+ for (Map.Entry entry : e.getMDCPropertyMap().entrySet()) {
+ if (null != entry.getKey() && null != entry.getValue()) {
+ builder.addLabel(entry.getKey(), entry.getValue());
+ }
+ }
+ }
+}
diff --git a/java-logging-logback/src/main/java/com/google/cloud/logging/logback/TraceLoggingEventEnhancer.java b/java-logging-logback/src/main/java/com/google/cloud/logging/logback/TraceLoggingEventEnhancer.java
new file mode 100644
index 000000000000..8fedcf9f3bb3
--- /dev/null
+++ b/java-logging-logback/src/main/java/com/google/cloud/logging/logback/TraceLoggingEventEnhancer.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2026 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.logging.logback;
+
+import ch.qos.logback.classic.spi.ILoggingEvent;
+import com.google.cloud.logging.LogEntry;
+import org.slf4j.MDC;
+
+/** Adds support for grouping logs by incoming http request */
+public class TraceLoggingEventEnhancer implements LoggingEventEnhancer {
+
+ // A key used by Cloud Logging for trace Id
+ private static final String TRACE_ID = "logging.googleapis.trace";
+
+ /**
+ * Set the Trace ID associated with any logging done by the current thread.
+ *
+ * @param id The traceID, in the form projects/[PROJECT_ID]/traces/[TRACE_ID]
+ */
+ public static void setCurrentTraceId(String id) {
+ MDC.put(TRACE_ID, id);
+ }
+
+ /** Clearing a trace Id from the MDC */
+ public static void clearTraceId() {
+ MDC.remove(TRACE_ID);
+ }
+
+ /**
+ * Get the Trace ID associated with any logging done by the current thread.
+ *
+ * @return id The traceID
+ */
+ public static String getCurrentTraceId() {
+ return MDC.get(TRACE_ID);
+ }
+
+ @Override
+ public void enhanceLogEntry(LogEntry.Builder builder, ILoggingEvent e) {
+ Object value = e.getMDCPropertyMap().get(TRACE_ID);
+ String traceId = value != null ? value.toString() : null;
+ if (traceId != null) {
+ builder.setTrace(traceId);
+ }
+ }
+}
diff --git a/java-logging-logback/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-logging-logback/reflect-config.json b/java-logging-logback/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-logging-logback/reflect-config.json
new file mode 100644
index 000000000000..9d249db03f61
--- /dev/null
+++ b/java-logging-logback/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-logging-logback/reflect-config.json
@@ -0,0 +1,53 @@
+[
+ {
+ "name":"ch.qos.logback.classic.Level",
+ "methods":[{"name":"valueOf","parameterTypes":["java.lang.String"] }]
+ },
+ {
+ "name":"ch.qos.logback.classic.filter.ThresholdFilter",
+ "queryAllPublicMethods":true,
+ "methods":[
+ {"name":"","parameterTypes":[] },
+ {"name":"setLevel","parameterTypes":["java.lang.String"] }
+ ]
+ },
+ {
+ "name":"ch.qos.logback.core.UnsynchronizedAppenderBase",
+ "methods":[{"name":"addFilter","parameterTypes":["ch.qos.logback.core.filter.Filter"] }]
+ },
+ {
+ "name":"com.google.cloud.logging.logback.LogbackBatchingSettings",
+ "queryAllPublicMethods":true,
+ "methods":[
+ {"name":"","parameterTypes":[] },
+ {"name":"setDelayThreshold","parameterTypes":["java.lang.Long"] },
+ {"name":"setElementCountThreshold","parameterTypes":["java.lang.Long"] },
+ {"name":"setLimitExceededBehavior","parameterTypes":["com.google.api.gax.batching.FlowController$LimitExceededBehavior"] },
+ {"name":"setMaxOutstandingElementCount","parameterTypes":["java.lang.Long"] },
+ {"name":"setMaxOutstandingRequestBytes","parameterTypes":["java.lang.Long"] },
+ {"name":"setRequestByteThreshold","parameterTypes":["java.lang.Long"] }
+ ]
+ },
+ {
+ "name":"com.google.cloud.logging.logback.LoggingAppender",
+ "queryAllPublicMethods":true,
+ "methods":[
+ {"name":"","parameterTypes":[] },
+ {"name":"setAutoPopulateMetadata","parameterTypes":["boolean"] },
+ {"name":"setCredentialsFile","parameterTypes":["java.lang.String"] },
+ {"name":"setCredentials","parameterTypes":["com.google.auth.oauth2.GoogleCredentials"] },
+ {"name":"setFlushLevel","parameterTypes":["ch.qos.logback.classic.Level"] },
+ {"name":"setLog","parameterTypes":["java.lang.String"] },
+ {"name":"setLogDestinationProjectId","parameterTypes":["java.lang.String"] },
+ {"name":"setLogbackBatchingSettings","parameterTypes":["com.google.cloud.logging.logback.LogbackBatchingSettings"] },
+ {"name":"setPartialSuccess","parameterTypes":["boolean"] },
+ {"name":"setRedirectToStdout","parameterTypes":["boolean"] },
+ {"name":"setResourceType","parameterTypes":["java.lang.String"] },
+ {"name":"setWriteSynchronicity","parameterTypes":["com.google.cloud.logging.Synchronicity"] }
+ ]
+ },
+ {
+ "name":"java.lang.Long",
+ "methods":[{"name":"valueOf","parameterTypes":["java.lang.String"] }]
+ }
+]
diff --git a/java-logging-logback/src/test/java/com/google/cloud/logging/logback/LoggingAppenderLogbackTest.java b/java-logging-logback/src/test/java/com/google/cloud/logging/logback/LoggingAppenderLogbackTest.java
new file mode 100644
index 000000000000..9ebbcf1b0409
--- /dev/null
+++ b/java-logging-logback/src/test/java/com/google/cloud/logging/logback/LoggingAppenderLogbackTest.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2026 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.logging.logback;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import ch.qos.logback.classic.LoggerContext;
+import ch.qos.logback.classic.joran.JoranConfigurator;
+import ch.qos.logback.core.joran.spi.JoranException;
+import com.google.api.gax.batching.FlowController.LimitExceededBehavior;
+import com.google.cloud.logging.LoggingOptions;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class LoggingAppenderLogbackTest {
+ @Test
+ public void testLoggingOptionsFromLogbackXMLFileConfig() throws JoranException {
+ LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
+ JoranConfigurator jc = new JoranConfigurator();
+ jc.setContext(context);
+ context.reset();
+ jc.doConfigure("src/test/java/com/google/cloud/logging/logback/logback.xml");
+ Logger logger = LoggerFactory.getLogger(LoggingAppenderLogbackTest.class);
+ assertThat(logger.getName())
+ .isEqualTo("com.google.cloud.logging.logback.LoggingAppenderLogbackTest");
+ LoggingAppender appender = (LoggingAppender) context.getLogger("ROOT").getAppender("CLOUD");
+ LoggingOptions options = appender.getLoggingOptions();
+ assertThat(options.getAutoPopulateMetadata()).isEqualTo(false);
+ assertThat(options.getBatchingSettings().getDelayThreshold().toMillis()).isEqualTo(500);
+ assertThat(options.getBatchingSettings().getElementCountThreshold()).isEqualTo(100);
+ assertThat(options.getBatchingSettings().getIsEnabled()).isEqualTo(true);
+ assertThat(options.getBatchingSettings().getRequestByteThreshold()).isEqualTo(1000);
+ assertThat(options.getBatchingSettings().getFlowControlSettings().getLimitExceededBehavior())
+ .isEqualTo(LimitExceededBehavior.Ignore);
+ assertThat(
+ options.getBatchingSettings().getFlowControlSettings().getMaxOutstandingElementCount())
+ .isEqualTo(10000);
+ assertThat(
+ options.getBatchingSettings().getFlowControlSettings().getMaxOutstandingRequestBytes())
+ .isEqualTo(100000);
+ }
+}
diff --git a/java-logging-logback/src/test/java/com/google/cloud/logging/logback/LoggingAppenderTest.java b/java-logging-logback/src/test/java/com/google/cloud/logging/logback/LoggingAppenderTest.java
new file mode 100644
index 000000000000..5b97542ce2a5
--- /dev/null
+++ b/java-logging-logback/src/test/java/com/google/cloud/logging/logback/LoggingAppenderTest.java
@@ -0,0 +1,586 @@
+/*
+ * Copyright 2026 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.logging.logback;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThrows;
+
+import ch.qos.logback.classic.Level;
+import ch.qos.logback.classic.filter.ThresholdFilter;
+import ch.qos.logback.classic.spi.ILoggingEvent;
+import ch.qos.logback.classic.spi.LoggingEvent;
+import com.google.auth.oauth2.GoogleCredentials;
+import com.google.cloud.MonitoredResource;
+import com.google.cloud.Timestamp;
+import com.google.cloud.logging.Instrumentation;
+import com.google.cloud.logging.LogEntry;
+import com.google.cloud.logging.Logging;
+import com.google.cloud.logging.Logging.WriteOption;
+import com.google.cloud.logging.LoggingEnhancer;
+import com.google.cloud.logging.Payload;
+import com.google.cloud.logging.Payload.JsonPayload;
+import com.google.cloud.logging.Payload.Type;
+import com.google.cloud.logging.Severity;
+import com.google.common.base.Strings;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.protobuf.ListValue;
+import com.google.protobuf.Value;
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
+import java.time.Instant;
+import java.util.Map;
+import org.easymock.Capture;
+import org.easymock.EasyMock;
+import org.easymock.EasyMockRunner;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.slf4j.MDC;
+
+@RunWith(EasyMockRunner.class)
+public class LoggingAppenderTest {
+ private static final String PROJECT_ID = "test-project";
+ private static final String CRED_FILE_PROJECT_ID = "project-12345";
+ private static final String OVERRIDDEN_PROJECT_ID = "some-project-id";
+ private static final String DUMMY_CRED_FILE_PATH =
+ "src/test/java/com/google/cloud/logging/logback/dummy-credentials.json";
+ private static final Payload.JsonPayload JSON_PAYLOAD =
+ Payload.JsonPayload.of(ImmutableMap.of("message", "this is a test"));
+ private static final Payload.JsonPayload JSON_ERROR_PAYLOAD =
+ Payload.JsonPayload.of(
+ ImmutableMap.of(
+ "message",
+ "this is a test",
+ "@type",
+ "type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent"));
+ private static final MonitoredResource DEFAULT_RESOURCE =
+ MonitoredResource.of("global", ImmutableMap.of("project_id", PROJECT_ID));
+ private static final LogEntry WARN_ENTRY =
+ LogEntry.newBuilder(JSON_PAYLOAD)
+ .setTimestamp(Instant.ofEpochMilli(100000L))
+ .setSeverity(Severity.WARNING)
+ .setLabels(
+ new ImmutableMap.Builder()
+ .put("levelName", "WARN")
+ .put("levelValue", String.valueOf(30000L))
+ .put("loggerName", LoggingAppenderTest.class.getName())
+ // .put("test-label-1", "test-value-1")
+ // .put("test-label-2", "test-value-2")
+ .build())
+ .build();
+ private static final LogEntry ERROR_ENTRY =
+ LogEntry.newBuilder(JSON_ERROR_PAYLOAD)
+ .setTimestamp(Instant.ofEpochMilli(100000L))
+ .setSeverity(Severity.ERROR)
+ .setLabels(
+ new ImmutableMap.Builder()
+ .put("levelName", "ERROR")
+ .put("levelValue", String.valueOf(40000L))
+ .put("loggerName", LoggingAppenderTest.class.getName())
+ .build())
+ .build();
+ private static final LogEntry INFO_ENTRY =
+ LogEntry.newBuilder(JSON_PAYLOAD)
+ .setTimestamp(Instant.ofEpochMilli(100000L))
+ .setSeverity(Severity.INFO)
+ .setLabels(
+ new ImmutableMap.Builder()
+ .put("levelName", "INFO")
+ .put("levelValue", String.valueOf(20000L))
+ .put("loggerName", LoggingAppenderTest.class.getName())
+ .put("mdc1", "value1")
+ .put("mdc2", "value2")
+ .build())
+ .build();
+
+ private Logging logging;
+ private LoggingAppender loggingAppender;
+
+ static class CustomLoggingEventEnhancer implements LoggingEventEnhancer {
+
+ @Override
+ public void enhanceLogEntry(LogEntry.Builder builder, ILoggingEvent e) {
+ builder.addLabel("foo", "bar");
+ }
+ }
+
+ static class CustomLoggingEnhancer implements LoggingEnhancer {
+
+ @Override
+ public void enhanceLogEntry(LogEntry.Builder builder) {
+ builder.addLabel("foo", "bar");
+ }
+ }
+
+ class TestLoggingAppender extends LoggingAppender {
+ @Override
+ String getProjectId() {
+ return PROJECT_ID;
+ }
+
+ @Override
+ Logging getLogging() {
+ return logging;
+ }
+ }
+
+ @Before
+ public void setUp() {
+ LoggingAppender.setInstrumentationStatus(true);
+ logging = EasyMock.createStrictMock(Logging.class);
+ loggingAppender = new TestLoggingAppender();
+ loggingAppender.setAutoPopulateMetadata(false);
+ }
+
+ private final WriteOption[] defaultWriteOptions =
+ new WriteOption[] {
+ WriteOption.logName("java.log"),
+ WriteOption.resource(
+ MonitoredResource.newBuilder("global")
+ .setLabels(
+ new ImmutableMap.Builder()
+ .put("project_id", PROJECT_ID)
+ .build())
+ .build()),
+ WriteOption.partialSuccess(true),
+ };
+
+ @Test
+ public void testFlushLevelConfigUpdatesLoggingFlushSeverity() {
+ logging.setFlushSeverity(Severity.WARNING);
+ Capture> capturedArgument = Capture.newInstance();
+ logging.write(
+ capture(capturedArgument),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class));
+ replay(logging);
+ Timestamp timestamp = Timestamp.ofTimeSecondsAndNanos(100000, 0);
+ LoggingEvent loggingEvent = createLoggingEvent(Level.WARN, timestamp.getSeconds());
+ // error is the default, updating to warn for test
+ loggingAppender.setFlushLevel(Level.WARN);
+ loggingAppender.start();
+ loggingAppender.doAppend(loggingEvent);
+ verify(logging);
+ assertThat(capturedArgument.getValue().iterator().hasNext()).isTrue();
+ assertThat(capturedArgument.getValue().iterator().next()).isEqualTo(WARN_ENTRY);
+ }
+
+ @Test
+ public void testFlushLevelConfigSupportsFlushLevelOff() {
+ loggingAppender.setFlushLevel(Level.OFF);
+ loggingAppender.start();
+ Severity foundSeverity = logging.getFlushSeverity();
+ assertThat(foundSeverity).isEqualTo(null);
+ }
+
+ @Test
+ public void testDefaultFlushLevelOff() {
+ loggingAppender.start();
+ Severity foundSeverity = logging.getFlushSeverity();
+ assertThat(foundSeverity).isEqualTo(null);
+ }
+
+ @Test
+ public void testFilterLogsOnlyLogsAtOrAboveLogLevel() {
+ Capture> capturedArgument = Capture.newInstance();
+ logging.write(
+ capture(capturedArgument),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class));
+ expectLastCall().once();
+ replay(logging);
+ Timestamp timestamp = Timestamp.ofTimeSecondsAndNanos(100000, 0);
+ LoggingEvent loggingEvent1 = createLoggingEvent(Level.INFO, timestamp.getSeconds());
+ ThresholdFilter thresholdFilter = new ThresholdFilter();
+ thresholdFilter.setLevel("ERROR");
+ thresholdFilter.start();
+ loggingAppender.addFilter(thresholdFilter);
+ loggingAppender.start();
+ // info event does not get logged
+ loggingAppender.doAppend(loggingEvent1);
+ LoggingEvent loggingEvent2 = createLoggingEvent(Level.ERROR, timestamp.getSeconds());
+ // error event gets logged
+ loggingAppender.doAppend(loggingEvent2);
+ verify(logging);
+ assertThat(capturedArgument.getValue().iterator().hasNext()).isTrue();
+ assertThat(capturedArgument.getValue().iterator().next()).isEqualTo(ERROR_ENTRY);
+ }
+
+ @Test
+ public void testPartialSuccessOverrideHasExpectedValue() {
+ Capture logNameArg = Capture.newInstance();
+ Capture resourceArg = Capture.newInstance();
+ Capture partialSuccessArg = Capture.newInstance();
+ logging.write(
+ EasyMock.>anyObject(),
+ capture(logNameArg),
+ capture(resourceArg),
+ capture(partialSuccessArg));
+ expectLastCall().once();
+ replay(logging);
+ loggingAppender.start();
+ Timestamp timestamp = Timestamp.ofTimeSecondsAndNanos(100000, 0);
+ LoggingEvent loggingEvent = createLoggingEvent(Level.ERROR, timestamp.getSeconds());
+ loggingAppender.doAppend(loggingEvent);
+
+ assertThat(logNameArg.getValue()).isEqualTo(defaultWriteOptions[0]);
+ // TODO(chingor): Fix this test to work on GCE and locally
+ // assertThat(resourceArg.getValue()).isEqualTo(defaultWriteOptions[1]);
+ assertThat(partialSuccessArg.getValue()).isEqualTo(defaultWriteOptions[2]);
+ }
+
+ @Test
+ public void testDefaultWriteOptionsHasExpectedDefaults() {
+ Capture partialSuccessArg = Capture.newInstance();
+ logging.write(
+ EasyMock.>anyObject(),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class),
+ capture(partialSuccessArg));
+ expectLastCall().once();
+ replay(logging);
+ loggingAppender.setPartialSuccess(false);
+ loggingAppender.start();
+ Timestamp timestamp = Timestamp.ofTimeSecondsAndNanos(100000, 0);
+ LoggingEvent loggingEvent = createLoggingEvent(Level.ERROR, timestamp.getSeconds());
+ loggingAppender.doAppend(loggingEvent);
+ assertThat(partialSuccessArg.getValue()).isEqualTo(WriteOption.partialSuccess(false));
+ }
+
+ @Test
+ public void testMdcValuesAreConvertedToLabels() {
+ Capture> capturedArgument = Capture.newInstance();
+ logging.write(
+ capture(capturedArgument),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class));
+ expectLastCall().once();
+ replay(logging);
+ Timestamp timestamp = Timestamp.ofTimeSecondsAndNanos(100000, 0);
+ LoggingEvent loggingEvent = createLoggingEvent(Level.INFO, timestamp.getSeconds());
+ loggingEvent.setMDCPropertyMap(ImmutableMap.of("mdc1", "value1", "mdc2", "value2"));
+ loggingAppender.start();
+ // info event does not get logged
+ loggingAppender.doAppend(loggingEvent);
+ verify(logging);
+ assertThat(capturedArgument.getValue().iterator().hasNext()).isTrue();
+ assertThat(capturedArgument.getValue().iterator().next()).isEqualTo(INFO_ENTRY);
+ }
+
+ @Test
+ public void testCreateLoggingOptionsWithValidCredentials() {
+ LoggingAppender appender = new LoggingAppender();
+ appender.setCredentials(GoogleCredentials.newBuilder().build());
+ // ServiceOptions requires a projectId to be set. Normally this is determined by the
+ // GoogleCredentials (Credential set above is a dummy value with no ProjectId).
+ appender.setLogDestinationProjectId(PROJECT_ID);
+ appender.getLoggingOptions();
+ }
+
+ @Test
+ public void testCreateLoggingOptionsWithNullCredentials() {
+ LoggingAppender appender = new LoggingAppender();
+ assertThrows(NullPointerException.class, () -> appender.setCredentials(null));
+ }
+
+ @Test(expected = RuntimeException.class)
+ public void testCreateLoggingOptionsWithInvalidCredentials() {
+ final String nonExistentFile = "/path/to/non/existent/file";
+ LoggingAppender appender = new LoggingAppender();
+ appender.setCredentialsFile(nonExistentFile);
+ appender.getLoggingOptions();
+ }
+
+ @Test
+ public void testCreateLoggingOptionsWithCredentials() {
+ // Try to build LoggingOptions with file based credentials.
+ LoggingAppender appender = new LoggingAppender();
+ appender.setCredentialsFile(DUMMY_CRED_FILE_PATH);
+ assertThat(appender.getLoggingOptions().getProjectId()).isEqualTo(CRED_FILE_PROJECT_ID);
+ }
+
+ @Test
+ public void testCreateLoggingOptionsWithDestination() {
+ // Try to build LoggingOptions with file based credentials.
+ LoggingAppender appender = new LoggingAppender();
+ appender.setCredentialsFile(DUMMY_CRED_FILE_PATH);
+ appender.setLogDestinationProjectId(OVERRIDDEN_PROJECT_ID);
+ assertThat(appender.getLoggingOptions().getProjectId()).isEqualTo(OVERRIDDEN_PROJECT_ID);
+ }
+
+ private LoggingEvent createLoggingEvent(Level level, long timestamp) {
+ LoggingEvent loggingEvent = new LoggingEvent();
+ loggingEvent.setMessage("this is a test");
+ loggingEvent.setLevel(level);
+ loggingEvent.setTimeStamp(timestamp);
+ loggingEvent.setLoggerName(this.getClass().getName());
+ return loggingEvent;
+ }
+
+ @Test
+ public void testMdcValuesAreConvertedToLabelsWithPassingNullValues() {
+ MDC.put("mdc1", "value1");
+ MDC.put("mdc2", null);
+ MDC.put("mdc3", "value3");
+ Capture> capturedArgument = Capture.newInstance();
+ logging.write(
+ capture(capturedArgument),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class));
+ expectLastCall().once();
+ replay(logging);
+ Timestamp timestamp = Timestamp.ofTimeSecondsAndNanos(100000, 0);
+ LoggingEvent loggingEvent = createLoggingEvent(Level.INFO, timestamp.getSeconds());
+ loggingAppender.start();
+ loggingAppender.doAppend(loggingEvent);
+ verify(logging);
+ MDC.remove("mdc1");
+ MDC.remove("mdc3");
+ Map capturedArgumentMap =
+ capturedArgument.getValue().iterator().next().getLabels();
+ assertThat(capturedArgumentMap.get("mdc1")).isEqualTo("value1");
+ assertThat(capturedArgumentMap.get("mdc2")).isNull();
+ assertThat(capturedArgumentMap.get("mdc3")).isEqualTo("value3");
+ }
+
+ @Test
+ public void testAddCustomLoggingEventEnhancers() {
+ MDC.put("mdc1", "value1");
+ Capture> capturedArgument = Capture.newInstance();
+ logging.write(
+ capture(capturedArgument),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class));
+ expectLastCall().once();
+ replay(logging);
+ Timestamp timestamp = Timestamp.ofTimeSecondsAndNanos(100000, 0);
+ LoggingEvent loggingEvent = createLoggingEvent(Level.INFO, timestamp.getSeconds());
+ loggingAppender.addLoggingEventEnhancer(CustomLoggingEventEnhancer.class.getName());
+ loggingAppender.start();
+ loggingAppender.doAppend(loggingEvent);
+ verify(logging);
+ MDC.remove("mdc1");
+ Map capturedArgumentMap =
+ capturedArgument.getValue().iterator().next().getLabels();
+ assertThat(capturedArgumentMap.get("mdc1")).isNull();
+ assertThat(capturedArgumentMap.get("foo")).isEqualTo("bar");
+ }
+
+ @Test
+ public void testAddCustomLoggingEnhancer() {
+ Capture> capturedArgument = Capture.newInstance();
+ logging.write(
+ capture(capturedArgument),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class));
+ expectLastCall().once();
+ replay(logging);
+ loggingAppender.addEnhancer(CustomLoggingEnhancer.class.getName());
+ loggingAppender.start();
+ Timestamp timestamp = Timestamp.ofTimeSecondsAndNanos(100000, 0);
+ LoggingEvent loggingEvent = createLoggingEvent(Level.WARN, timestamp.getSeconds());
+ loggingAppender.doAppend(loggingEvent);
+ verify(logging);
+ Map capturedArgumentMap =
+ capturedArgument.getValue().iterator().next().getLabels();
+ assertThat(capturedArgumentMap.get("foo")).isEqualTo("bar");
+ }
+
+ @Test
+ @SuppressWarnings("deprecation")
+ public void testFlush() {
+ logging.write(
+ EasyMock.>anyObject(),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class));
+ expectLastCall().times(2);
+ logging.flush();
+ replay(logging);
+ loggingAppender.start();
+ Timestamp timestamp = Timestamp.ofTimeSecondsAndNanos(100000, 0);
+ LoggingEvent firstLoggingEvent = createLoggingEvent(Level.WARN, timestamp.getSeconds());
+ LoggingEvent secondLoggingEvent = createLoggingEvent(Level.INFO, timestamp.getSeconds());
+ loggingAppender.doAppend(firstLoggingEvent);
+ loggingAppender.doAppend(secondLoggingEvent);
+ loggingAppender.flush();
+ verify(logging);
+ }
+
+ @Test
+ public void testAutoPopulationEnabled() {
+ Capture> capturedLogEntries = Capture.newInstance();
+ EasyMock.expect(
+ logging.populateMetadata(
+ capture(capturedLogEntries),
+ EasyMock.eq(DEFAULT_RESOURCE),
+ EasyMock.eq("com.google.cloud.logging"),
+ EasyMock.eq("jdk"),
+ EasyMock.eq("sun"),
+ EasyMock.eq("java"),
+ EasyMock.eq("ch.qos.logback")))
+ .andReturn(ImmutableList.of(INFO_ENTRY))
+ .once();
+ // it is impossible to define expectation for varargs using a single anyObject() matcher
+ // see the EasyMock bug https://github.com/easymock/easymock/issues/130.
+ // the following mock uses the known fact that the method pass two WriteOption arguments
+ // the arguments should be replaced with a single anyObject() matchers when the bug is fixed
+ logging.write(
+ EasyMock.>anyObject(),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class));
+ expectLastCall().once();
+ replay(logging);
+
+ loggingAppender.setupMonitoredResource(DEFAULT_RESOURCE);
+ loggingAppender.setAutoPopulateMetadata(true);
+ loggingAppender.start();
+ Timestamp timestamp = Timestamp.ofTimeSecondsAndNanos(100000, 0);
+ LoggingEvent loggingEvent = createLoggingEvent(Level.INFO, timestamp.getSeconds());
+ loggingEvent.setMDCPropertyMap(ImmutableMap.of("mdc1", "value1", "mdc2", "value2"));
+ loggingAppender.doAppend(loggingEvent);
+ verify(logging);
+ LogEntry testLogEntry = capturedLogEntries.getValue().iterator().next();
+ assertThat(testLogEntry).isEqualTo(INFO_ENTRY);
+ }
+
+ @Test
+ public void testRedirectToStdoutEnabled() {
+ EasyMock.expect(
+ logging.populateMetadata(
+ EasyMock.>anyObject(),
+ EasyMock.anyObject(MonitoredResource.class),
+ EasyMock.anyString(),
+ EasyMock.anyString(),
+ EasyMock.anyString(),
+ EasyMock.anyString(),
+ EasyMock.anyString()))
+ .andReturn(ImmutableList.of(INFO_ENTRY))
+ .once();
+ replay(logging);
+
+ ByteArrayOutputStream bout = new ByteArrayOutputStream();
+ PrintStream out = new PrintStream(bout);
+ System.setOut(out);
+ loggingAppender.setupMonitoredResource(DEFAULT_RESOURCE);
+ loggingAppender.setAutoPopulateMetadata(true);
+ loggingAppender.setRedirectToStdout(true);
+ loggingAppender.start();
+ Timestamp timestamp = Timestamp.ofTimeSecondsAndNanos(100000, 0);
+ LoggingEvent loggingEvent = createLoggingEvent(Level.INFO, timestamp.getSeconds());
+ loggingAppender.doAppend(loggingEvent);
+ verify(logging);
+ assertThat(Strings.isNullOrEmpty(bout.toString())).isFalse();
+ System.setOut(null);
+ }
+
+ @Test
+ public void testRedirectToStdoutDisabled() {
+ ByteArrayOutputStream bout = new ByteArrayOutputStream();
+ PrintStream out = new PrintStream(bout);
+ System.setOut(out);
+
+ testAutoPopulationEnabled();
+
+ assertThat(Strings.isNullOrEmpty(bout.toString())).isTrue();
+ System.setOut(null);
+ }
+
+ @Test
+ public void testFDiagnosticInfoAdded() {
+ LoggingAppender.setInstrumentationStatus(false);
+ Capture> capturedArgument = Capture.newInstance();
+ logging.write(
+ capture(capturedArgument),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class));
+ replay(logging);
+ LoggingEvent loggingEvent =
+ createLoggingEvent(Level.ERROR, Timestamp.ofTimeSecondsAndNanos(100000, 0).getSeconds());
+ loggingAppender.start();
+ loggingAppender.doAppend(loggingEvent);
+ verify(logging);
+ int count = 0;
+ int diagnosticRecordCount = 0;
+ for (LogEntry entry : capturedArgument.getValue()) {
+ count++;
+ if (entry.getPayload().getType() == Type.JSON) {
+ JsonPayload payload = entry.getPayload();
+ if (!payload.getData().containsFields(Instrumentation.DIAGNOSTIC_INFO_KEY)) continue;
+ ListValue infoList =
+ payload
+ .getData()
+ .getFieldsOrThrow(Instrumentation.DIAGNOSTIC_INFO_KEY)
+ .getStructValue()
+ .getFieldsOrThrow(Instrumentation.INSTRUMENTATION_SOURCE_KEY)
+ .getListValue();
+ for (Value val : infoList.getValuesList()) {
+ String name =
+ val.getStructValue()
+ .getFieldsOrThrow(Instrumentation.INSTRUMENTATION_NAME_KEY)
+ .getStringValue();
+ assertThat(name.startsWith(Instrumentation.JAVA_LIBRARY_NAME_PREFIX)).isTrue();
+ if (name.equals(LoggingAppender.JAVA_LOGBACK_LIBRARY_NAME)) {
+ diagnosticRecordCount++;
+ }
+ }
+ }
+ }
+ assertEquals(count, 2);
+ assertEquals(diagnosticRecordCount, 1);
+ }
+
+ @Test
+ public void testFDiagnosticInfoNotAdded() {
+ Capture> capturedArgument = Capture.newInstance();
+ logging.write(
+ capture(capturedArgument),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class),
+ anyObject(WriteOption.class));
+ replay(logging);
+ LoggingEvent loggingEvent =
+ createLoggingEvent(Level.WARN, Timestamp.ofTimeSecondsAndNanos(100000, 0).getSeconds());
+ loggingAppender.start();
+ loggingAppender.doAppend(loggingEvent);
+ verify(logging);
+ int count = 0;
+ for (LogEntry entry : capturedArgument.getValue()) {
+ count++;
+ if (entry.getPayload().getType() == Type.JSON) {
+ JsonPayload payload = entry.getPayload();
+ assertThat(payload.getData().containsFields(Instrumentation.DIAGNOSTIC_INFO_KEY)).isFalse();
+ }
+ }
+ assertEquals(count, 1);
+ }
+}
diff --git a/java-logging-logback/src/test/java/com/google/cloud/logging/logback/MDCEventEnhancerTest.java b/java-logging-logback/src/test/java/com/google/cloud/logging/logback/MDCEventEnhancerTest.java
new file mode 100644
index 000000000000..25d645196c49
--- /dev/null
+++ b/java-logging-logback/src/test/java/com/google/cloud/logging/logback/MDCEventEnhancerTest.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2026 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.logging.logback;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import ch.qos.logback.classic.spi.LoggingEvent;
+import com.google.cloud.logging.LogEntry;
+import com.google.cloud.logging.Payload.StringPayload;
+import java.util.Collections;
+import org.junit.Before;
+import org.junit.Test;
+
+public class MDCEventEnhancerTest {
+ private MDCEventEnhancer classUnderTest;
+
+ @Before
+ public void setUp() {
+ classUnderTest = new MDCEventEnhancer();
+ }
+
+ @Test
+ public void testEnhanceLogEntry() {
+ LoggingEvent loggingEvent = new LoggingEvent();
+ loggingEvent.setMessage("this is a test");
+ loggingEvent.setMDCPropertyMap(Collections.singletonMap("foo", "bar"));
+ LogEntry.Builder builder = LogEntry.newBuilder(StringPayload.of("this is a test"));
+
+ classUnderTest.enhanceLogEntry(builder, loggingEvent);
+ LogEntry logEntry = builder.build();
+
+ assertThat(logEntry.getLabels().get("foo")).isEqualTo("bar");
+ }
+}
diff --git a/java-logging-logback/src/test/java/com/google/cloud/logging/logback/StackTraceTest.java b/java-logging-logback/src/test/java/com/google/cloud/logging/logback/StackTraceTest.java
new file mode 100644
index 000000000000..c3b2d78eac48
--- /dev/null
+++ b/java-logging-logback/src/test/java/com/google/cloud/logging/logback/StackTraceTest.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2026 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.logging.logback;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import ch.qos.logback.classic.spi.ThrowableProxy;
+import org.junit.Test;
+
+public class StackTraceTest {
+ @Test
+ public void testStack() {
+ Exception ex = new UnsupportedOperationException("foo");
+ ex = new IllegalStateException("bar", ex);
+
+ StringBuilder stackBuilder = new StringBuilder();
+ LoggingAppender.writeStack(new ThrowableProxy(ex), "", stackBuilder);
+ String stack = stackBuilder.toString();
+
+ assertThat(stack).contains("java.lang.IllegalStateException: bar");
+ assertThat(stack).contains("caused by: java.lang.UnsupportedOperationException: foo");
+ assertThat(stack).contains("common frames elided");
+ }
+}
diff --git a/java-logging-logback/src/test/java/com/google/cloud/logging/logback/TraceLoggingEventEnhancerTest.java b/java-logging-logback/src/test/java/com/google/cloud/logging/logback/TraceLoggingEventEnhancerTest.java
new file mode 100644
index 000000000000..aa0b7fe2b09b
--- /dev/null
+++ b/java-logging-logback/src/test/java/com/google/cloud/logging/logback/TraceLoggingEventEnhancerTest.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2026 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.logging.logback;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import ch.qos.logback.classic.spi.LoggingEvent;
+import com.google.cloud.logging.LogEntry;
+import com.google.cloud.logging.Payload.StringPayload;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TraceLoggingEventEnhancerTest {
+ private TraceLoggingEventEnhancer classUnderTest;
+
+ @Before
+ public void setUp() {
+ classUnderTest = new TraceLoggingEventEnhancer();
+ }
+
+ @After
+ public void tearDown() {
+ TraceLoggingEventEnhancer.clearTraceId();
+ }
+
+ @Test
+ public void testEnhanceLogEntry() {
+ // setup
+ String traceId = "abc";
+ TraceLoggingEventEnhancer.setCurrentTraceId(traceId);
+ LoggingEvent loggingEvent = new LoggingEvent();
+ loggingEvent.setMessage("this is a test");
+ LogEntry.Builder builder = LogEntry.newBuilder(StringPayload.of("this is a test"));
+
+ // act
+ classUnderTest.enhanceLogEntry(builder, loggingEvent);
+ LogEntry logEntry = builder.build();
+
+ // assert - Trace Id should be recorded as explicit Trace field, not as a label
+ assertThat(traceId.equalsIgnoreCase(logEntry.getTrace()));
+ }
+
+ @Test
+ public void testGetCurrentTraceId() {
+ // setup
+ String traceId = "abc";
+ TraceLoggingEventEnhancer.setCurrentTraceId(traceId);
+
+ // act
+ String currentTraceId = TraceLoggingEventEnhancer.getCurrentTraceId();
+
+ // assert
+ assertThat(traceId.equalsIgnoreCase(currentTraceId));
+ }
+}
diff --git a/java-logging-logback/src/test/java/com/google/cloud/logging/logback/dummy-credentials.json b/java-logging-logback/src/test/java/com/google/cloud/logging/logback/dummy-credentials.json
new file mode 100644
index 000000000000..c99e8764e24d
--- /dev/null
+++ b/java-logging-logback/src/test/java/com/google/cloud/logging/logback/dummy-credentials.json
@@ -0,0 +1,12 @@
+{
+ "type": "service_account",
+ "project_id": "project-12345",
+ "private_key_id": "12345",
+ "private_key": "-----BEGIN PRIVATE KEY-----\nMIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAKhPSTDs4cpKfnMc\np86fCkpnuER7bGc+mGkhkw6bE+BnROfrDCFBSjrENLS5JcsenANQ1kYGt9iVW2fd\nZAWUdDoj+t7g6+fDpzY1BzPSUls421Dmu7joDPY8jSdMzFCeg7Lyj0I36bJJ7ooD\nVPW6Q0XQcb8FfBiFPAKuY4elj/YDAgMBAAECgYBo2GMWmCmbM0aL/KjH/KiTawMN\nnfkMY6DbtK9/5LjADHSPKAt5V8ueygSvI7rYSiwToLKqEptJztiO3gnls/GmFzj1\nV/QEvFs6Ux3b0hD2SGpGy1m6NWWoAFlMISRkNiAxo+AMdCi4I1hpk4+bHr9VO2Bv\nV0zKFxmgn1R8qAR+4QJBANqKxJ/qJ5+lyPuDYf5s+gkZWjCLTC7hPxIJQByDLICw\niEnqcn0n9Gslk5ngJIGQcKBXIp5i0jWSdKN/hLxwgHECQQDFKGmo8niLzEJ5sa1r\nspww8Hc2aJM0pBwceshT8ZgVPnpgmITU1ENsKpJ+y1RTjZD6N0aj9gS9UB/UXdTr\nHBezAkEAqkDRTYOtusH9AXQpM3zSjaQijw72Gs9/wx1RxOSsFtVwV6U97CLkV1S+\n2HG1/vn3w/IeFiYGfZXLKFR/pA5BAQJAbFeu6IaGM9yFUzaOZDZ8mnAqMp349t6Q\nDB5045xJxLLWsSpfJE2Y12H1qvO1XUzYNIgXq5ZQOHBFbYA6txBy/QJBAKDRQN47\n6YClq9652X+1lYIY/h8MxKiXpVZVncXRgY6pbj4pmWEAM88jra9Wq6R77ocyECzi\nXCqi18A/sl6ymWc=\n-----END PRIVATE KEY-----\n",
+ "client_email": "project-12345@appspot.gserviceaccount.com",
+ "client_id": "123456789012345678901",
+ "auth_uri": "https://accounts.google.com/o/oauth2/auth",
+ "token_uri": "https://oauth2.googleapis.com/token",
+ "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
+ "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/project-12345%40appspot.gserviceaccount.com"
+ }
diff --git a/java-logging-logback/src/test/java/com/google/cloud/logging/logback/logback.xml b/java-logging-logback/src/test/java/com/google/cloud/logging/logback/logback.xml
new file mode 100644
index 000000000000..66e86e4e7d4d
--- /dev/null
+++ b/java-logging-logback/src/test/java/com/google/cloud/logging/logback/logback.xml
@@ -0,0 +1,57 @@
+
+
+
+
+ INFO
+
+
+
+ application.log
+
+
+ WARN
+
+
+ SYNC
+
+
+ false
+
+
+ true
+
+
+ global
+
+
+ src/test/java/com/google/cloud/logging/logback/dummy-credentials.json
+
+
+ String
+
+
+
+
+
+ true
+
+
+
+ 100
+ 1000
+ 500
+ 10000
+ 100000
+ Ignore
+
+
+
+
+
+
+
diff --git a/monorepo-migration/fix_copyright_headers.py b/monorepo-migration/fix_copyright_headers.py
new file mode 100644
index 000000000000..cd813851d383
--- /dev/null
+++ b/monorepo-migration/fix_copyright_headers.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python3
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import re
+import sys
+
+def fix_copyright(path):
+ if os.path.isfile(path):
+ if path.endswith(".java"):
+ _fix_file(path)
+ elif os.path.isdir(path):
+ for root, _, files in os.walk(path):
+ for file in files:
+ if file.endswith(".java"):
+ _fix_file(os.path.join(root, file))
+
+def _fix_file(file_path):
+ with open(file_path, 'r') as f:
+ content = f.read()
+
+ # Replace "Copyright [Year] Google LLC" or "Copyright [Year] Google Inc."
+ # with "Copyright 2026 Google LLC"
+ new_content = re.sub(
+ r'Copyright \d{4} Google (Inc\.|LLC)',
+ 'Copyright 2026 Google LLC',
+ content
+ )
+
+ if new_content != content:
+ with open(file_path, 'w') as f:
+ f.write(new_content)
+ print(f"Updated copyright in {file_path}")
+
+if __name__ == "__main__":
+ if len(sys.argv) < 2:
+ print("Usage: fix_copyright_headers.py ...")
+ sys.exit(1)
+
+ for arg in sys.argv[1:]:
+ fix_copyright(arg)
diff --git a/monorepo-migration/migrate.sh b/monorepo-migration/migrate.sh
new file mode 100755
index 000000000000..4a2c8bedf5f5
--- /dev/null
+++ b/monorepo-migration/migrate.sh
@@ -0,0 +1,381 @@
+#!/bin/bash
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Exit on error
+set -e
+
+# Function to check if a command exists
+check_command() {
+ if ! command -v "$1" >/dev/null 2>&1; then
+ echo "Error: $1 is not installed or not in PATH." >&2
+ exit 1
+ fi
+}
+
+# Check for necessary CLI binaries
+check_command git
+check_command python3
+check_command mvn
+
+# Configuration
+MONOREPO_URL="https://github.com/googleapis/google-cloud-java"
+if [ -z "$SOURCE_REPO_URL" ]; then
+ read -p "Enter SOURCE_REPO_URL [https://github.com/googleapis/java-logging]: " input_url
+ SOURCE_REPO_URL="${input_url:-https://github.com/googleapis/java-logging}"
+fi
+CODEOWNER="${CODEOWNER:-}"
+
+# Derive names from URLs to avoid duplication
+SOURCE_REPO_NAME="${SOURCE_REPO_URL##*/}"
+MONOREPO_NAME="${MONOREPO_URL##*/}"
+
+# Use a temporary working directory sibling to the current monorepo
+WORKING_DIR="../../migration-work"
+SOURCE_DIR="$WORKING_DIR/$SOURCE_REPO_NAME-source"
+TARGET_DIR="$WORKING_DIR/$MONOREPO_NAME-target"
+
+# Get absolute path to the transformation script before any cd
+TRANSFORM_SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
+TRANSFORM_SCRIPT="$TRANSFORM_SCRIPT_DIR/transform_workflow.py"
+MODERNIZE_POM_SCRIPT="$TRANSFORM_SCRIPT_DIR/modernize_pom.py"
+UPDATE_ROOT_POM_SCRIPT="$TRANSFORM_SCRIPT_DIR/update_root_pom.py"
+FIX_COPYRIGHT_SCRIPT="$TRANSFORM_SCRIPT_DIR/fix_copyright_headers.py"
+UPDATE_GENERATION_CONFIG_SCRIPT="$TRANSFORM_SCRIPT_DIR/update_generation_config.py"
+UPDATE_OWLBOT_HERMETIC_SCRIPT="$TRANSFORM_SCRIPT_DIR/update_owlbot_hermetic.py"
+TRANSFORM_OWLBOT_SCRIPT="$TRANSFORM_SCRIPT_DIR/update_owlbot.py"
+
+# Track number of commits made by this script
+COMMIT_COUNT=0
+
+echo "Starting migration using git read-tree with isolated clones..."
+
+# 0. Create working directory
+mkdir -p "$WORKING_DIR"
+
+MIGRATION_HEAD_BRANCH="add-migration-script"
+
+# 1. Clone the source repository
+if [ ! -d "$SOURCE_DIR" ]; then
+ echo "Cloning source repo: $SOURCE_REPO_URL into $SOURCE_DIR"
+ git clone "$SOURCE_REPO_URL" "$SOURCE_DIR"
+else
+ echo "Source directory $SOURCE_DIR already exists. Ensuring it is clean and up-to-date..."
+ cd "$SOURCE_DIR"
+ git fetch origin
+ git checkout -f "main"
+ git reset --hard origin/main
+ git clean -fd
+ cd - > /dev/null
+fi
+
+# 1.5 Extract CODEOWNERS from source repository as default
+if [ -z "$CODEOWNER" ]; then
+ echo "Attempting to find default CODEOWNER from source repository..."
+ CODEOWNERS_FILE=""
+ if [ -f "$SOURCE_DIR/.github/CODEOWNERS" ]; then
+ CODEOWNERS_FILE="$SOURCE_DIR/.github/CODEOWNERS"
+ elif [ -f "$SOURCE_DIR/CODEOWNERS" ]; then
+ CODEOWNERS_FILE="$SOURCE_DIR/CODEOWNERS"
+ fi
+
+ DEFAULT_CODEOWNER=""
+ if [ -n "$CODEOWNERS_FILE" ]; then
+ # Extract the line(s) starting with * (global owners)
+ # Use grep to find the line, then sed to remove the '*' and standard team handle
+ EXTRACTED_OWNERS=$(grep "^\*" "$CODEOWNERS_FILE" | sed 's/^\*[[:space:]]*//' | sed 's/@googleapis\/cloud-java-team-teamsync//g' | xargs)
+ if [ -n "$EXTRACTED_OWNERS" ]; then
+ DEFAULT_CODEOWNER="$EXTRACTED_OWNERS"
+ echo "Found default CODEOWNER: $DEFAULT_CODEOWNER"
+ fi
+ fi
+
+ if [ -n "$DEFAULT_CODEOWNER" ]; then
+ read -p "Enter CODEOWNER [$DEFAULT_CODEOWNER]: " input_owner
+ CODEOWNER="${input_owner:-$DEFAULT_CODEOWNER}"
+ else
+ read -p "Enter CODEOWNER (e.g., @chingor13): " CODEOWNER
+ fi
+fi
+
+# 2. Clone the target monorepo (the "isolated clone")
+if [ ! -d "$TARGET_DIR" ]; then
+ echo "Cloning target monorepo: $MONOREPO_URL into $TARGET_DIR"
+ git clone "$MONOREPO_URL" "$TARGET_DIR"
+ git checkout -f "${MIGRATION_HEAD_BRANCH}"
+ git reset --hard origin/${MIGRATION_HEAD_BRANCH}
+else
+ echo "Target directory $TARGET_DIR already exists. Ensuring it is clean and up-to-date..."
+ cd "$TARGET_DIR"
+ git fetch origin
+ git checkout -f "${MIGRATION_HEAD_BRANCH}"
+ git reset --hard origin/${MIGRATION_HEAD_BRANCH}
+ git clean -fd
+ cd - > /dev/null
+fi
+
+cd "$TARGET_DIR"
+
+# Ensure we are on a clean main branch in the target clone
+echo "Ensuring clean state in target monorepo..."
+git fetch origin
+git reset --hard HEAD
+git clean -fd
+git checkout -f "${MIGRATION_HEAD_BRANCH}"
+git reset --hard origin/${MIGRATION_HEAD_BRANCH}
+git clean -fdx
+
+# Check if the repository is already migrated
+if [ -d "$SOURCE_REPO_NAME" ]; then
+ echo "Error: Directory $SOURCE_REPO_NAME already exists in the monorepo." >&2
+ echo "This repository seems to have already been migrated." >&2
+ exit 1
+fi
+
+
+# 2.5 Create a new feature branch for the migration
+BRANCH_NAME="migrate-$SOURCE_REPO_NAME"
+echo "Creating feature branch: $BRANCH_NAME"
+if git rev-parse --verify "$BRANCH_NAME" >/dev/null 2>&1; then
+ git branch -D "$BRANCH_NAME"
+fi
+git checkout -b "$BRANCH_NAME"
+
+# 3. Add the source repo as a remote
+echo "Adding remote for $SOURCE_REPO_NAME: $SOURCE_DIR"
+if git remote | grep -q "^$SOURCE_REPO_NAME$"; then
+ git remote remove "$SOURCE_REPO_NAME"
+fi
+git remote add "$SOURCE_REPO_NAME" "../$SOURCE_REPO_NAME-source"
+
+# 4. Fetch the source repo
+echo "Fetching $SOURCE_REPO_NAME..."
+git fetch "$SOURCE_REPO_NAME"
+
+# 5. Merge the histories using 'ours' strategy to keep monorepo content
+echo "Merging histories (strategy: ours)..."
+git merge --allow-unrelated-histories --no-ff "$SOURCE_REPO_NAME/main" -s ours --no-commit -m "chore($SOURCE_REPO_NAME): migrate $SOURCE_REPO_NAME into monorepo"
+
+# 6. Read the tree from the source repo into the desired subdirectory
+echo "Reading tree into prefix $SOURCE_REPO_NAME/..."
+git read-tree --prefix="$SOURCE_REPO_NAME/" -u "$SOURCE_REPO_NAME/main"
+
+# 6.5 Remove common files from the root of the migrated library
+echo "Removing common files from the root of $SOURCE_REPO_NAME/..."
+rm -f "$SOURCE_REPO_NAME/.gitignore"
+rm -f "$SOURCE_REPO_NAME/renovate.json"
+rm -f "$SOURCE_REPO_NAME/LICENSE"
+rm -f "$SOURCE_REPO_NAME/java.header"
+rm -rf "$SOURCE_REPO_NAME/.kokoro"
+# rm -rf "$SOURCE_REPO_NAME/.kokoro/continuous" "$SOURCE_REPO_NAME/.kokoro/nightly" "$SOURCE_REPO_NAME/.kokoro/presubmit"
+rm -f "$SOURCE_REPO_NAME/codecov.yaml"
+rm -f "$SOURCE_REPO_NAME/synth.metadata"
+rm -f "$SOURCE_REPO_NAME/license-checks.xml"
+find "$SOURCE_REPO_NAME" -maxdepth 1 -name "*.md" ! -name "CHANGELOG.md" ! -name "README.md" -delete
+
+# 7. Commit the migration
+echo "Committing migration..."
+git commit -n --no-gpg-sign -m "chore($SOURCE_REPO_NAME): migrate $SOURCE_REPO_NAME into monorepo"
+((COMMIT_COUNT++))
+
+# 7.1 Update CODEOWNERS
+if [ -n "$CODEOWNER" ]; then
+ echo "Updating .github/CODEOWNERS..."
+ mkdir -p .github
+ echo "/$SOURCE_REPO_NAME/ $CODEOWNER @googleapis/cloud-java-team-teamsync" >> .github/CODEOWNERS
+
+ echo "Committing CODEOWNERS update..."
+ git add .github/CODEOWNERS
+ git commit -n --no-gpg-sign -m "chore($SOURCE_REPO_NAME): add code owners for $SOURCE_REPO_NAME"
+ ((COMMIT_COUNT++))
+fi
+
+# 7.2 Update root pom.xml modules
+echo "Updating root pom.xml modules..."
+python3 "$UPDATE_ROOT_POM_SCRIPT" "pom.xml" "$SOURCE_REPO_NAME"
+
+echo "Committing root pom.xml modules update..."
+git add pom.xml
+git commit -n --no-gpg-sign -m "chore($SOURCE_REPO_NAME): add module to root pom.xml"
+((COMMIT_COUNT++))
+
+
+# 7.5 Migrate GitHub Actions workflows
+echo "Checking for GitHub Actions workflows..."
+if [ -d "$SOURCE_REPO_NAME/.github/workflows" ]; then
+ echo "Migrating workflows to root .github/workflows/..."
+ mkdir -p .github/workflows
+
+ for workflow in "$SOURCE_REPO_NAME/.github/workflows/"*; do
+ if [ -f "$workflow" ]; then
+ filename=$(basename "$workflow")
+
+ # Skip redundant workflows as requested by user
+ case "$filename" in
+ "hermetic_library_generation.yaml" | "update_generation_config.yaml" | \
+ "approve-readme.yaml" | "auto-release.yaml" | "renovate_config_check.yaml" | \
+ "samples.yaml" | "unmanaged_dependency_check.yaml")
+ echo "Skipping redundant workflow: $filename"
+ continue
+ ;;
+ esac
+
+ new_filename="${SOURCE_REPO_NAME}-${filename}"
+ target_path=".github/workflows/$new_filename"
+
+ echo "Migrating and adapting $filename to $target_path"
+ python3 "$TRANSFORM_SCRIPT" "$SOURCE_REPO_NAME" < "$workflow" > "$target_path"
+ fi
+ done
+
+ # Cleanup empty .github directory if it exists
+ rm -rf "$SOURCE_REPO_NAME/.github"
+
+ echo "Committing workflow migration..."
+ git add .github/workflows
+ git commit -n --no-gpg-sign -m "chore($SOURCE_REPO_NAME): migrate and adapt GitHub Actions workflows"
+ ((COMMIT_COUNT++))
+fi
+
+# 7.6 Update generation_config.yaml
+echo "Updating generation_config.yaml..."
+SOURCE_CONFIG="$SOURCE_REPO_NAME/generation_config.yaml"
+if [ -f "$SOURCE_CONFIG" ]; then
+ python3 "$UPDATE_GENERATION_CONFIG_SCRIPT" "generation_config.yaml" "$SOURCE_CONFIG"
+
+ # Remove the source generation_config.yaml as it is now merged
+ rm "$SOURCE_CONFIG"
+
+ echo "Committing generation_config.yaml update..."
+ git add generation_config.yaml "$SOURCE_CONFIG"
+ git commit -n --no-gpg-sign -m "chore($SOURCE_REPO_NAME): add library to generation_config.yaml"
+ ((COMMIT_COUNT++))
+fi
+
+# 7.7 Consolidate versions.txt
+echo "Consolidating versions.txt..."
+SOURCE_VERSIONS="$SOURCE_REPO_NAME/versions.txt"
+if [ -f "$SOURCE_VERSIONS" ]; then
+ # Append data lines only to root versions.txt (exclude comments/headers)
+ grep "^[a-zA-Z0-9]" "$SOURCE_VERSIONS" >> versions.txt
+
+ # Remove the migrated subdirectory's versions.txt
+ rm "$SOURCE_VERSIONS"
+
+ echo "Committing versions.txt update..."
+ git add versions.txt "$SOURCE_VERSIONS"
+ git commit -n --no-gpg-sign -m "chore($SOURCE_REPO_NAME): consolidate versions.txt into root"
+ ((COMMIT_COUNT++))
+fi
+
+# 7.8 Migrate .OwlBot-hermetic.yaml
+echo "Migrating .OwlBot-hermetic.yaml..."
+if [ -f "$SOURCE_DIR/.github/.OwlBot-hermetic.yaml" ]; then
+ SOURCE_OWLBOT="$SOURCE_DIR/.github/.OwlBot-hermetic.yaml"
+else
+ SOURCE_OWLBOT=""
+fi
+
+if [ -n "$SOURCE_OWLBOT" ]; then
+ TARGET_OWLBOT="$SOURCE_REPO_NAME/.OwlBot-hermetic.yaml"
+ python3 "$UPDATE_OWLBOT_HERMETIC_SCRIPT" "$TARGET_OWLBOT" "$SOURCE_OWLBOT" "$SOURCE_REPO_NAME"
+
+ echo "Committing .OwlBot-hermetic.yaml migration..."
+ git add "$TARGET_OWLBOT"
+ git commit -n --no-gpg-sign -m "chore($SOURCE_REPO_NAME): migrate .OwlBot-hermetic.yaml"
+ ((COMMIT_COUNT++))
+fi
+
+
+# 7.8b Migrate owlbot.py
+echo "Migrating owlbot.py..."
+if [ -f "$SOURCE_DIR/owlbot.py" ]; then
+ TARGET_OWLBOT="$SOURCE_REPO_NAME/owlbot.py"
+
+ python3 "$TRANSFORM_OWLBOT_SCRIPT" "$TARGET_OWLBOT" "$SOURCE_DIR/owlbot.py"
+
+ echo "Committing owlbot.py migration..."
+ git add "$TARGET_OWLBOT"
+ git commit -n --no-gpg-sign -m "chore($SOURCE_REPO_NAME): migrate owlbot.py"
+ ((COMMIT_COUNT++))
+fi
+
+# 7.9 Fix copyright headers in Java files
+echo "Fixing copyright headers in Java files..."
+python3 "$FIX_COPYRIGHT_SCRIPT" "$SOURCE_REPO_NAME"
+
+echo "Committing copyright header fixes..."
+git add "$SOURCE_REPO_NAME"
+git commit -n --no-gpg-sign -m "chore($SOURCE_REPO_NAME): update copyright headers to 2026 Google LLC"
+((COMMIT_COUNT++))
+
+# 7.11 Modernize root pom.xml
+echo "Modernizing root pom.xml..."
+PARENT_VERSION=$(grep -m 1 ".*{x-version-update:google-cloud-java:current}" google-cloud-jar-parent/pom.xml | sed -E 's/.*(.*)<\/version>.*/\1/')
+python3 "$MODERNIZE_POM_SCRIPT" "$SOURCE_REPO_NAME/pom.xml" "$PARENT_VERSION" "$SOURCE_REPO_NAME"
+
+echo "Committing root pom.xml modernization..."
+git add "$SOURCE_REPO_NAME/pom.xml"
+git commit -n --no-gpg-sign -m "chore($SOURCE_REPO_NAME): modernize root pom.xml"
+((COMMIT_COUNT++))
+
+# 7.12 Modernize BOM pom.xml
+echo "Modernizing BOM pom.xml..."
+# Find potential BOM POMs (usually in a subdirectory ending with -bom)
+# Use process substitution or just a loop over the output of find to avoid subshell issues with counters
+while read -r bom_pom; do
+ echo "Modernizing BOM: $bom_pom"
+ # BOMs should inherit from google-cloud-pom-parent
+ python3 "$MODERNIZE_POM_SCRIPT" "$bom_pom" "$PARENT_VERSION" "$SOURCE_REPO_NAME" "google-cloud-pom-parent" "../../google-cloud-pom-parent/pom.xml"
+
+ echo "Committing BOM pom.xml modernization for $bom_pom..."
+ git add "$bom_pom"
+ git commit -n --no-gpg-sign -m "chore($SOURCE_REPO_NAME): modernize BOM pom.xml"
+ ((COMMIT_COUNT++))
+done < <(find "$SOURCE_REPO_NAME" -name "pom.xml" | grep "\-bom/pom.xml" | grep -v "samples")
+
+# 7.11 Verify compilation
+echo "Verifying compilation..."
+(cd "$SOURCE_REPO_NAME" && mvn compile -DskipTests -T 1C)
+
+# 7.13 Squash commits
+if [ "${SQUASH_COMMITS:-false}" = "true" ]; then
+ echo "Squashing $COMMIT_COUNT commits..."
+ if [ "$COMMIT_COUNT" -gt 1 ]; then
+ # Reset soft to the first commit of the migration
+ # We want to keep the very first commit message, or maybe a combined one?
+ # The requirement is "squash all the commits it's made into a single migration commit"
+ # The first commit we made was "chore($SOURCE_REPO_NAME): migrate $SOURCE_REPO_NAME into monorepo"
+
+ # We can do this by soft resetting back COMMIT_COUNT-1 commits
+ # This leaves the first commit as HEAD, but with changes from subsequent commits staged.
+ # NO. Soft reset back N commits.
+ # If we made 3 commits: C1, C2, C3. HEAD is C3.
+ # reset --soft HEAD~2 results in HEAD at C1, with changes from C2 and C3 staged.
+ # then commit --amend adds those staged changes to C1.
+
+ git reset --soft "HEAD~$((COMMIT_COUNT - 1))"
+ git commit --amend --no-edit --no-gpg-sign
+ echo "Squashed everything into one commit."
+ fi
+fi
+
+# 8. Cleanup
+echo "Cleaning up temporary source clone..."
+rm -rf "$SOURCE_DIR"
+
+echo "Migration complete!"
+echo "The migrated codebase is available in: $TARGET_DIR"
+echo "You are on the $BRANCH_NAME branch in that clone."
diff --git a/monorepo-migration/modernize_pom.py b/monorepo-migration/modernize_pom.py
new file mode 100644
index 000000000000..ea0c023a9b57
--- /dev/null
+++ b/monorepo-migration/modernize_pom.py
@@ -0,0 +1,171 @@
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import re
+
+
+def modernize_pom(file_path, parent_version, source_repo_name=None, parent_artifactId='google-cloud-jar-parent', relative_path='../google-cloud-jar-parent/pom.xml'):
+ with open(file_path, 'r') as f:
+ lines = f.readlines()
+
+ new_lines = []
+ in_parent = False
+ in_dep_mgmt = False
+ in_dependencies = False
+ in_dependency = False
+ in_reporting = False
+ current_dependency_lines = []
+ should_preserve = False
+ current_group_id = None
+ has_version = False
+
+ for line in lines:
+ # URL Modernization
+ if any(tag in line for tag in ['', '', '']):
+ if 'github.com' in line and 'googleapis/' in line:
+ if source_repo_name:
+ repo_pattern = re.escape(source_repo_name)
+ else:
+ repo_pattern = r'[a-zA-Z0-9-]+'
+
+ # Replace HTTPS URLs
+ line = re.sub(
+ r'https://github\.com/googleapis/' + repo_pattern,
+ 'https://github.com/googleapis/google-cloud-java',
+ line
+ )
+ # Replace Git SSH URLs
+ line = re.sub(
+ r'git@github\.com:googleapis/' + repo_pattern + r'(\.git)?',
+ 'git@github.com:googleapis/google-cloud-java.git',
+ line
+ )
+ # Handle scm:git: prefix if it has https
+ line = re.sub(
+ r'scm:git:https://github\.com/googleapis/' + repo_pattern,
+ 'scm:git:https://github.com/googleapis/google-cloud-java.git',
+ line
+ )
+
+ # Parent section modernization
+ if '' in line and not in_parent:
+ in_parent = True
+ indent = line[:line.find('<')]
+ new_lines.append(f"{indent}\n")
+ new_lines.append(f"{indent} com.google.cloud\n")
+ new_lines.append(f"{indent} {parent_artifactId}\n")
+ new_lines.append(f"{indent} {parent_version}\n")
+ new_lines.append(f"{indent} {relative_path}\n")
+ continue
+ if '' in line and in_parent:
+ in_parent = False
+ new_lines.append(line)
+ continue
+ if in_parent:
+ continue # skip original parent content
+
+ # Dependency Management pruning
+ if '' in line:
+ in_dep_mgmt = True
+ new_lines.append(line)
+ continue
+ if '' in line:
+ in_dep_mgmt = False
+ new_lines.append(line)
+ continue
+
+ if in_dep_mgmt:
+ if '' in line:
+ in_dependencies = True
+ new_lines.append(line)
+ continue
+ if '' in line:
+ in_dependencies = False
+ new_lines.append(line)
+ continue
+
+ if in_dependencies:
+ if '' in line:
+ in_dependency = True
+ current_dependency_lines = [line]
+ should_preserve = False
+ current_group_id = None
+ current_artifact_id = None
+ has_version = False
+ continue
+ if '' in line:
+ in_dependency = False
+ current_dependency_lines.append(line)
+
+ # Preservation logic:
+ # 1. Has x-version-update comment
+ # 2. Is NOT com.google group AND has a version tag
+ # 3. Is com.google.cloud group AND artifactId starts with google-cloud- AND has a version tag
+ is_external = current_group_id and not current_group_id.startswith('com.google')
+ is_google_cloud_lib = current_group_id == 'com.google.cloud' and current_artifact_id and current_artifact_id.startswith('google-cloud-')
+
+ if should_preserve or (is_external and has_version) or (is_google_cloud_lib and has_version):
+ new_lines.extend(current_dependency_lines)
+ continue
+
+ if in_dependency:
+ current_dependency_lines.append(line)
+ if '{x-version-update:' in line:
+ should_preserve = True
+ if '' in line:
+ match = re.search(r'(.*?)', line)
+ if match:
+ current_group_id = match.group(1).strip()
+ if '' in line:
+ match = re.search(r'(.*?)', line)
+ if match:
+ current_artifact_id = match.group(1).strip()
+ if '' in line:
+ has_version = True
+ continue
+
+ # Prune comments and extra whitespace in depMgmt for a cleaner result
+ if not line.strip():
+ new_lines.append(line)
+ continue
+
+ # Reporting section removal
+ if '' in line:
+ in_reporting = True
+ continue
+ if '' in line:
+ in_reporting = False
+ continue
+ if in_reporting:
+ continue
+
+ new_lines.append(line)
+
+ with open(file_path, 'w') as f:
+ # Clean up double empty lines potentially introduced by pruning
+ content = "".join(new_lines)
+ content = re.sub(r'\n\s*\n\s*\n', '\n\n', content)
+ f.write(content)
+
+if __name__ == "__main__":
+ if len(sys.argv) > 2:
+ source_repo = sys.argv[3] if len(sys.argv) > 3 else None
+ parent_artifactId = sys.argv[4] if len(sys.argv) > 4 else 'google-cloud-jar-parent'
+ relative_path = sys.argv[5] if len(sys.argv) > 5 else '../google-cloud-jar-parent/pom.xml'
+ modernize_pom(sys.argv[1], sys.argv[2], source_repo, parent_artifactId, relative_path)
+ else:
+ print("Usage: python3 modernize_pom.py [source_repo_name] [parent_artifactId] [relative_path]")
+ sys.exit(1)
+
diff --git a/monorepo-migration/transform_workflow.py b/monorepo-migration/transform_workflow.py
new file mode 100644
index 000000000000..fba791634ebe
--- /dev/null
+++ b/monorepo-migration/transform_workflow.py
@@ -0,0 +1,100 @@
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import re
+
+def transform(content, lib_name):
+ lines = content.splitlines()
+ new_lines = []
+ inserted_defaults = False
+
+ filter_job = f""" filter:
+ runs-on: ubuntu-latest
+ outputs:
+ library: ${{{{ steps.filter.outputs.library }}}}
+ steps:
+ - uses: actions/checkout@v4
+ - uses: dorny/paths-filter@v3
+ id: filter
+ with:
+ filters: |
+ library:
+ - '{lib_name}/**'"""
+
+ in_jobs = False
+ skip_current_job = False
+ current_job_is_windows = False
+
+ for line in lines:
+ if line.startswith('name:') and not in_jobs:
+ name_match = re.match(r'^name:\s*(.*)', line)
+ if name_match:
+ orig_name = name_match.group(1).strip()
+ # Remove quotes if they exist
+ orig_name = orig_name.strip("\"'")
+ new_lines.append(f"name: {lib_name} {orig_name}")
+ continue
+
+ if line.startswith('jobs:'):
+ if not inserted_defaults:
+ new_lines.append("env:")
+ new_lines.append(f" BUILD_SUBDIR: {lib_name}")
+ inserted_defaults = True
+ new_lines.append(line)
+ new_lines.append(filter_job)
+ in_jobs = True
+ continue
+
+ if in_jobs and line.startswith(' ') and not line.startswith(' ') and line.strip() and not line.strip().startswith('#'):
+ job_match = re.match(r'^ ([\w-]+):', line)
+ if job_match:
+ job_name = job_match.group(1)
+ current_job_is_windows = False # Reset for new job
+ if job_name == 'clirr':
+ skip_current_job = True
+ continue
+ else:
+ skip_current_job = False
+
+ if job_name != 'filter':
+ new_lines.append(line)
+ new_lines.append(" needs: filter")
+ new_lines.append(f" if: ${{{{ needs.filter.outputs.library == 'true' }}}}")
+ continue
+
+ if not skip_current_job:
+ if 'runs-on:' in line and 'windows' in line:
+ current_job_is_windows = True
+
+ if line.strip() == 'steps:' and current_job_is_windows:
+ new_lines.append(line)
+ new_lines.append(" - name: Support longpaths")
+ new_lines.append(" run: git config --system core.longpaths true")
+ continue
+
+ if 'run: echo "SUREFIRE_JVM_OPT=' in line and '!java17' not in line:
+ line = line.replace('" >> $GITHUB_ENV', ' -P !java17" >> $GITHUB_ENV')
+ if 'build.bat' in line:
+ line = line.replace('build.bat', 'build.sh')
+
+ new_lines.append(line)
+ return "\n".join(new_lines)
+
+if __name__ == "__main__":
+ if len(sys.argv) < 2:
+ print("Usage: python3 transform_workflow.py ")
+ sys.exit(1)
+ lib = sys.argv[1]
+ print(transform(sys.stdin.read(), lib))
diff --git a/monorepo-migration/update_generation_config.py b/monorepo-migration/update_generation_config.py
new file mode 100644
index 000000000000..1ba7e6fc8c3e
--- /dev/null
+++ b/monorepo-migration/update_generation_config.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python3
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import yaml
+import re
+
+def get_library_id(lib):
+ """
+ Returns a unique identifier for a library.
+ Prefer 'library_name', then 'api_shortname'.
+ """
+ if 'library_name' in lib:
+ return f"java-{lib['library_name']}"
+ if 'api_shortname' in lib:
+ return f"java-{lib['api_shortname']}"
+ return "unknown"
+
+def merge_libraries(target_libs, source_libs):
+ """
+ Merges source_libs into target_libs.
+ Libraries are matched by get_library_id.
+ GAPICs are merged and deduplicated by proto_path.
+ The final list is sorted by library_id.
+ """
+ # Map from library_id to library dict
+ target_map = {get_library_id(lib): lib for lib in target_libs}
+
+ for s_lib in source_libs:
+ lib_id = get_library_id(s_lib)
+
+ # Clean up source library (remove repo fields)
+ s_lib_cleaned = {k: v for k, v in s_lib.items() if k not in ('repo', 'repo_short')}
+
+ if lib_id in target_map:
+ t_lib = target_map[lib_id]
+ # Merge GAPICs
+ t_gapics_list = t_lib.get('GAPICs', [])
+ s_gapics_list = s_lib_cleaned.get('GAPICs', [])
+
+ # Map by proto_path for deduplication
+ proto_map = {g['proto_path']: g for g in t_gapics_list}
+ for g in s_gapics_list:
+ proto_map[g['proto_path']] = g
+
+ # Sort GAPICs by proto_path
+ sorted_protos = sorted(proto_map.keys())
+ t_lib['GAPICs'] = [proto_map[p] for p in sorted_protos]
+
+ # Update other fields from source
+ for k, v in s_lib_cleaned.items():
+ if k != 'GAPICs':
+ t_lib[k] = v
+ else:
+ target_map[lib_id] = s_lib_cleaned
+
+ # Return sorted list of libraries
+ sorted_ids = sorted(target_map.keys())
+ return [target_map[lib_id] for lib_id in sorted_ids]
+
+def update_config(target_path, source_path):
+ with open(target_path, 'r') as f:
+ target_content = f.read()
+
+ with open(source_path, 'r') as f:
+ source_data = yaml.safe_load(f) or {}
+
+ # Load target data
+ target_data = yaml.safe_load(target_content) or {}
+
+ target_libs = target_data.get('libraries', [])
+ source_libs = source_data.get('libraries', [])
+
+ merged_libs = merge_libraries(target_libs, source_libs)
+ target_data['libraries'] = merged_libs
+
+ # Write back
+ with open(target_path, 'w') as f:
+ # Check if there was a license header in the original file
+ header_match = re.search(r'^(#.*?\n\n)', target_content, re.DOTALL)
+ if header_match:
+ f.write(header_match.group(1))
+
+ # Use yaml.dump to write the data.
+ # sort_keys=False to preserve order of fields within libraries if possible (YAML 1.2+ usually does, but pyyaml depends)
+ yaml.dump(target_data, f, sort_keys=False, default_flow_style=False, indent=2)
+
+if __name__ == "__main__":
+ if len(sys.argv) != 3:
+ print("Usage: python3 update_generation_config.py ")
+ sys.exit(1)
+
+ target_path = sys.argv[1]
+ source_path = sys.argv[2]
+ update_config(target_path, source_path)
diff --git a/monorepo-migration/update_owlbot.py b/monorepo-migration/update_owlbot.py
new file mode 100644
index 000000000000..2b3d866d40f4
--- /dev/null
+++ b/monorepo-migration/update_owlbot.py
@@ -0,0 +1,225 @@
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ast
+import sys
+import os
+
+def is_call_to(node, name_parts):
+ """
+ Checks if an AST node is a call to a specific function.
+ name_parts: list of strings, e.g. ['s', 'move'] for s.move()
+ or ['java', 'common_templates']
+ """
+ if not isinstance(node, ast.Call):
+ return False
+
+ func = node.func
+ # Handle attribute access (e.g. s.move)
+ if isinstance(func, ast.Attribute):
+ if len(name_parts) == 2:
+ # Check object and attr
+ obj = func.value
+ if isinstance(obj, ast.Name) and obj.id == name_parts[0] and func.attr == name_parts[1]:
+ return True
+ elif len(name_parts) == 1:
+ if func.attr == name_parts[0]:
+ return True
+ # Handle direct name (e.g. if imported directly, though less common for these)
+ elif isinstance(func, ast.Name):
+ if len(name_parts) == 1 and func.id == name_parts[0]:
+ return True
+
+ return False
+
+def extract_excludes_from_call(call_node):
+ excludes = []
+ for keyword in call_node.keywords:
+ if keyword.arg == 'excludes':
+ if isinstance(keyword.value, ast.List):
+ for elt in keyword.value.elts:
+ if isinstance(elt, ast.Constant): # Python 3.8+
+ excludes.append(elt.value)
+ elif isinstance(elt, ast.Str): # Python < 3.8
+ excludes.append(elt.s)
+ break
+ return excludes
+
+def extract_info(source_code):
+ excludes = []
+ loop_body_lines = []
+ top_level_lines = []
+
+ try:
+ tree = ast.parse(source_code)
+ except SyntaxError:
+ return excludes, top_level_lines, loop_body_lines
+
+ for node in tree.body:
+ # Some nodes are wrapped in Expr, e.g. s.remove_staging_dirs()
+ inner_node = node
+ if isinstance(node, ast.Expr):
+ inner_node = node.value
+
+ # Ignore standard imports (we will inject them)
+ if isinstance(node, (ast.Import, ast.ImportFrom)):
+ # We assume we only care about synthtool/java imports which we regenerate.
+ # If there are other imports, we should probably preserve them.
+ # Heuristic: if it mentions 'synthtool', ignore it.
+ if isinstance(node, ast.Import):
+ if any('synthtool' in alias.name for alias in node.names):
+ continue
+ if isinstance(node, ast.ImportFrom):
+ if node.module and 'synthtool' in node.module:
+ continue
+ # Preserve other imports
+ if sys.version_info >= (3, 9):
+ top_level_lines.append(ast.unparse(node))
+ continue
+
+ # Check for java.common_templates (top level)
+ if is_call_to(inner_node, ['java', 'common_templates']) or is_call_to(inner_node, ['common', 'java_library']):
+ excludes.extend(extract_excludes_from_call(inner_node))
+ continue
+
+ # Check for s.remove_staging_dirs()
+ if is_call_to(inner_node, ['s', 'remove_staging_dirs']):
+ continue
+
+ # Check for the main loop: for library in s.get_staging_dirs():
+ if isinstance(node, ast.For):
+ is_staging_loop = False
+ if isinstance(node.iter, ast.Call):
+ # Check for s.get_staging_dirs()
+ if is_call_to(node.iter, ['s', 'get_staging_dirs']):
+ is_staging_loop = True
+
+ if is_staging_loop:
+ # Extract body
+ for child in node.body:
+ child_inner = child
+ if isinstance(child, ast.Expr):
+ child_inner = child.value
+
+ # Check for nested common_templates (rare but possible)
+ if is_call_to(child_inner, ['java', 'common_templates']) or is_call_to(child_inner, ['common', 'java_library']):
+ excludes.extend(extract_excludes_from_call(child_inner))
+ continue
+
+ if sys.version_info >= (3, 9):
+ loop_body_lines.append(ast.unparse(child))
+ continue
+ # else fall through to preserve other loops
+
+ # Preserve everything else (constants, functions, other logic)
+ if sys.version_info >= (3, 9):
+ top_level_lines.append(ast.unparse(node))
+
+ return excludes, top_level_lines, loop_body_lines
+
+def generate_target_content(excludes, top_level_lines, loop_body_lines, standard_excludes=None):
+ if standard_excludes is None:
+ standard_excludes = {
+ ".github/*",
+ "samples/*",
+ "CODE_OF_CONDUCT.md",
+ "CONTRIBUTING.md",
+ "LICENSE",
+ "SECURITY.md",
+ "java.header",
+ "license-checks.xml",
+ "renovate.json",
+ ".gitignore"
+ }
+
+ final_excludes = sorted(list(set(standard_excludes)))
+ excludes_str = ",\n ".join([f'"{e}"' for e in final_excludes])
+
+ # Reconstruct content
+ lines = []
+ lines.append("# Copyright 2026 Google LLC")
+ lines.append("#")
+ lines.append("# Licensed under the Apache License, Version 2.0 (the \"License\");")
+ lines.append("# you may not use this file except in compliance with the License.")
+ lines.append("# You may obtain a copy of the License at")
+ lines.append("#")
+ lines.append("# https://www.apache.org/licenses/LICENSE-2.0")
+ lines.append("#")
+ lines.append("# Unless required by applicable law or agreed to in writing, software")
+ lines.append("# distributed under the License is distributed on an \"AS IS\" BASIS,")
+ lines.append("# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.")
+ lines.append("# See the License for the specific language governing permissions and")
+ lines.append("# limitations under the License.")
+ lines.append("")
+ lines.append("import synthtool as s")
+ lines.append("from synthtool.languages import java")
+ lines.append("")
+
+ if top_level_lines:
+ lines.extend(top_level_lines)
+ lines.append("")
+
+ lines.append("for library in s.get_staging_dirs():")
+ lines.append(" # put any special-case replacements here")
+ for l in loop_body_lines:
+ # Indent loop body
+ for sl in l.split('\n'):
+ lines.append(" " + sl)
+
+ lines.append("s.remove_staging_dirs()")
+ lines.append(f"java.common_templates(monorepo=True, excludes=[")
+ lines.append(f" {excludes_str}")
+ lines.append("])")
+
+ return "\n".join(lines) + "\n"
+
+def main():
+ if len(sys.argv) < 3:
+ print("Usage: update_owlbot.py [template_file]")
+ sys.exit(1)
+
+ target_file = sys.argv[1]
+ source_file = sys.argv[2]
+ template_file = sys.argv[3] if len(sys.argv) > 3 else None
+
+ if not os.path.exists(source_file):
+ print(f"Source file {source_file} not found.")
+ sys.exit(1)
+
+ with open(source_file, 'r') as f:
+ source_code = f.read()
+
+ excludes, top_level_lines, loop_body_lines = extract_info(source_code)
+
+ standard_excludes = None
+ if template_file:
+ if os.path.exists(template_file):
+ with open(template_file, 'r') as f:
+ template_code = f.read()
+ template_excludes, _, _ = extract_info(template_code)
+ standard_excludes = template_excludes
+ else:
+ print(f"Template file {template_file} not found using default excludes.")
+
+ target_content = generate_target_content(excludes, top_level_lines, loop_body_lines, standard_excludes)
+
+ if os.path.dirname(target_file):
+ os.makedirs(os.path.dirname(target_file), exist_ok=True)
+ with open(target_file, 'w') as f:
+ f.write(target_content)
+
+ print(f"Generated {target_file}")
+
+if __name__ == "__main__":
+ main()
diff --git a/monorepo-migration/update_owlbot_hermetic.py b/monorepo-migration/update_owlbot_hermetic.py
new file mode 100644
index 000000000000..7f9f803822f5
--- /dev/null
+++ b/monorepo-migration/update_owlbot_hermetic.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python3
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import yaml
+import re
+
+def update_config(target_path, source_path, prefix):
+ """
+ Reads source_path, prepends prefix to paths in deep-remove-regex and deep-preserve-regex,
+ and writes to target_path.
+ """
+ with open(source_path, 'r') as f:
+ source_content = f.read()
+
+ # Load source data
+ source_data = yaml.safe_load(source_content) or {}
+
+ # Define fields to update
+ fields_to_update = ['deep-remove-regex', 'deep-preserve-regex']
+
+ for field in fields_to_update:
+ if field in source_data:
+ updated_list = []
+ for item in source_data[field]:
+ # If item is a string, prepend prefix
+ # Regex might need handling if it starts with ^
+ # But usually these are just paths.
+ # Assuming simple concatenation for now as per requirement.
+ # "When referencing paths in the deep-remove-regex and deep-preserve-regex, the new directory name should be prefixed"
+
+ # If the regex starts with ^, insert the prefix after it.
+ if item.startswith('^'):
+ updated_list.append(f"^{prefix}/{item[1:]}")
+ else:
+ updated_list.append(f"/{prefix}{item}")
+ source_data[field] = updated_list
+
+ if 'deep-copy-regex' in source_data:
+ for item in source_data['deep-copy-regex']:
+ if 'dest' in item and item['dest'].startswith('/owl-bot-staging/'):
+ item['dest'] = item['dest'].replace('/owl-bot-staging/', f'/owl-bot-staging/{prefix}/', 1)
+
+ # Write to target_path
+ with open(target_path, 'w') as f:
+ # Check if there was a license header in the original file
+ # Match a block of lines starting with # at the beginning of the file
+ header_match = re.search(r'^((?:#[^\n]*\n)+)', source_content)
+ if header_match:
+ f.write(header_match.group(1))
+ f.write("\n") # Add a newline after the header
+
+ # Use yaml.dump to write the data.
+ yaml.dump(source_data, f, sort_keys=False, default_flow_style=False, indent=2)
+
+if __name__ == "__main__":
+ if len(sys.argv) != 4:
+ print("Usage: python3 update_owlbot_hermetic.py ")
+ sys.exit(1)
+
+ target_path = sys.argv[1]
+ source_path = sys.argv[2]
+ prefix = sys.argv[3]
+
+ update_config(target_path, source_path, prefix)
diff --git a/monorepo-migration/update_root_pom.py b/monorepo-migration/update_root_pom.py
new file mode 100644
index 000000000000..fec12930dee3
--- /dev/null
+++ b/monorepo-migration/update_root_pom.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python3
+# Copyright 2026 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+def update_root_pom(pom_path, module_name):
+ new_module = f' {module_name}\n'
+ with open(pom_path, 'r') as f:
+ content = f.read()
+
+ start_tag = ''
+ end_tag = ''
+ start_idx = content.find(start_tag)
+ end_idx = content.find(end_tag)
+
+ if start_idx == -1 or end_idx == -1:
+ print(f"Error: {start_tag} or {end_tag} not found in {pom_path}")
+ sys.exit(1)
+
+ modules_section = content[start_idx + len(start_tag):end_idx]
+ lines = [l for l in modules_section.splitlines(keepends=True) if l.strip()]
+
+ java_indices = [i for i, l in enumerate(lines) if 'java-' in l]
+ if java_indices:
+ start_java = java_indices[0]
+ end_java = java_indices[-1] + 1
+ java_lines = lines[start_java:end_java]
+ if not any(f'{module_name}' in l for l in java_lines):
+ java_lines.append(new_module)
+ java_lines.sort()
+ lines = lines[:start_java] + java_lines + lines[end_java:]
+ else:
+ if not any(f'{module_name}' in l for l in lines):
+ lines.append(new_module)
+
+ new_content = content[:start_idx + len(start_tag)] + '\n' + ''.join(lines) + ' ' + content[end_idx:]
+ with open(pom_path, 'w') as f:
+ f.write(new_content)
+
+if __name__ == "__main__":
+ if len(sys.argv) != 3:
+ print("Usage: update_root_pom.py ")
+ sys.exit(1)
+ update_root_pom(sys.argv[1], sys.argv[2])
diff --git a/pom.xml b/pom.xml
index 96bd03560242..1e0016a01ffd 100644
--- a/pom.xml
+++ b/pom.xml
@@ -132,6 +132,7 @@
java-licensemanager
java-life-sciences
java-locationfinder
+ java-logging-logback
java-lustre
java-maintenance
java-managed-identities
diff --git a/versions.txt b/versions.txt
index b402889bf022..9af177bf3894 100644
--- a/versions.txt
+++ b/versions.txt
@@ -937,3 +937,4 @@ grpc-google-cloud-gkerecommender-v1:0.2.0:0.3.0-SNAPSHOT
google-cloud-cloudapiregistry:0.1.0:0.2.0-SNAPSHOT
proto-google-cloud-cloudapiregistry-v1beta:0.1.0:0.2.0-SNAPSHOT
grpc-google-cloud-cloudapiregistry-v1beta:0.1.0:0.2.0-SNAPSHOT
+google-cloud-logging-logback:0.132.21-alpha:0.132.21-alpha