getExecutionTimeMonitoringData(ShortIdMap shortId
for (SimpleExecutionState state : executionStates) {
if (state.getTotalMillis() != 0) {
String shortId = state.getTotalMillisShortId(shortIds);
- if (result.containsKey(shortId)) {
- // This can happen due to flatten unzipping.
- result.put(shortId, state.mergeTotalMillisPayload(result.get(shortId)));
- } else {
- result.put(shortId, state.getTotalMillisPayload());
- }
+ result.compute(
+ shortId,
+ (String k, @Nullable ByteString existing) -> {
+ if (existing != null) {
+ // This can happen due to flatten unzipping.
+ return state.mergeTotalMillisPayload(existing);
+ } else {
+ return state.getTotalMillisPayload();
+ }
+ });
}
}
return result;
diff --git a/runners/flink/flink_runner.gradle b/runners/flink/flink_runner.gradle
index 2b6392f46b9bc..bc0cc188f0632 100644
--- a/runners/flink/flink_runner.gradle
+++ b/runners/flink/flink_runner.gradle
@@ -290,7 +290,7 @@ tasks.register('validatesRunner') {
// Generates :runners:flink:1.13:runQuickstartJavaFlinkLocal
createJavaExamplesArchetypeValidationTask(type: 'Quickstart', runner: 'FlinkLocal')
-task examplesIntegrationTest(type: Test) {
+tasks.register("examplesIntegrationTest", Test) {
group = "Verification"
// Disable gradle cache
outputs.upToDateWhen { false }
diff --git a/runners/flink/job-server/flink_job_server.gradle b/runners/flink/job-server/flink_job_server.gradle
index 981a2cd870b16..2b3bbdfceb337 100644
--- a/runners/flink/job-server/flink_job_server.gradle
+++ b/runners/flink/job-server/flink_job_server.gradle
@@ -212,7 +212,7 @@ project.ext.validatesPortableRunnerBatch = portableValidatesRunnerTask("Batch",
project.ext.validatesPortableRunnerStreaming = portableValidatesRunnerTask("Streaming", true, false, false)
project.ext.validatesPortableRunnerStreamingCheckpoint = portableValidatesRunnerTask("StreamingCheckpointing", true, true, false)
-task validatesPortableRunner() {
+tasks.register("validatesPortableRunner") {
dependsOn validatesPortableRunnerDocker
dependsOn validatesPortableRunnerBatch
dependsOn validatesPortableRunnerStreaming
@@ -265,7 +265,8 @@ createCrossLanguageValidatesRunnerTask(
)
// miniCluster jar starts an embedded Flink cluster intended for use in testing.
-task miniCluster(type: Jar, dependsOn: shadowJar) {
+tasks.register("miniCluster", Jar) {
+ dependsOn shadowJar
archiveBaseName = "${project.archivesBaseName}-mini-cluster"
dependencies {
runtimeOnly project(path: flinkRunnerProject, configuration: "miniCluster")
diff --git a/runners/google-cloud-dataflow-java/build.gradle b/runners/google-cloud-dataflow-java/build.gradle
index 89d185bf53512..355fc4547dfdc 100644
--- a/runners/google-cloud-dataflow-java/build.gradle
+++ b/runners/google-cloud-dataflow-java/build.gradle
@@ -367,7 +367,13 @@ task printRunnerV2PipelineOptions {
task validatesRunner {
group = "Verification"
description "Validates Dataflow runner"
- dependsOn(createLegacyWorkerValidatesRunnerTest(name: 'validatesRunnerLegacyWorkerTest'))
+ dependsOn(createLegacyWorkerValidatesRunnerTest(
+ name: 'validatesRunnerLegacyWorkerTest',
+ excludedTests: [
+ // TODO(BEAM-13952)
+ 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testAfterProcessingTimeContinuationTriggerUsingState',
+ ]
+ ))
}
task validatesRunnerStreaming {
@@ -382,6 +388,10 @@ task validatesRunnerStreaming {
'org.apache.beam.sdk.testing.UsesRequiresTimeSortedInput',
'org.apache.beam.sdk.testing.UsesSetState',
],
+ excludedTests: [
+ // TODO(BEAM-13952)
+ 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testAfterProcessingTimeContinuationTriggerUsingState'
+ ]
))
}
@@ -473,6 +483,9 @@ task validatesRunnerV2 {
'org.apache.beam.sdk.transforms.GroupByKeyTest$WindowTests.testRewindowWithTimestampCombiner',
'org.apache.beam.sdk.transforms.FlattenTest.testFlattenWithDifferentInputAndOutputCoders2',
+
+ // TODO(BEAM-13952)
+ 'org.apache.beam.sdk.transforms.GroupByKeyTest$BasicTests.testAfterProcessingTimeContinuationTriggerUsingState',
]
))
}
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/options/DataflowPipelineDebugOptions.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/options/DataflowPipelineDebugOptions.java
index 264827f33aa69..2995a485527cd 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/options/DataflowPipelineDebugOptions.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/options/DataflowPipelineDebugOptions.java
@@ -177,6 +177,30 @@ public Dataflow create(PipelineOptions options) {
void setNumberOfWorkerHarnessThreads(int value);
+ /**
+ * Maximum number of bundles outstanding from windmill before the worker stops requesting.
+ *
+ * If <= 0, use the default value of 100 + getNumberOfWorkerHarnessThreads()
+ */
+ @Description(
+ "Maximum number of bundles outstanding from windmill before the worker stops requesting.")
+ @Default.Integer(0)
+ int getMaxBundlesFromWindmillOutstanding();
+
+ void setMaxBundlesFromWindmillOutstanding(int value);
+
+ /**
+ * Maximum number of bytes outstanding from windmill before the worker stops requesting.
+ *
+ *
If <= 0, use the default value of 50% of jvm memory.
+ */
+ @Description(
+ "Maximum number of bytes outstanding from windmill before the worker stops requesting. If <= 0, use the default value of 50% of jvm memory.")
+ @Default.Long(0)
+ long getMaxBytesFromWindmillOutstanding();
+
+ void setMaxBytesFromWindmillOutstanding(long value);
+
/**
* If {@literal true}, save a heap dump before killing a thread or process which is GC thrashing
* or out of memory. The location of the heap file will either be echoed back to the user, or the
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java
index 236e47715a367..c49865aa7511c 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java
@@ -191,10 +191,6 @@ public class StreamingDataflowWorker {
// Maximum number of threads for processing. Currently each thread processes one key at a time.
static final int MAX_PROCESSING_THREADS = 300;
static final long THREAD_EXPIRATION_TIME_SEC = 60;
- // Maximum work units retrieved from Windmill and queued before processing. Limiting this delays
- // retrieving extra work from Windmill without working on it, leading to better
- // prioritization / utilization.
- static final int MAX_WORK_UNITS_QUEUED = 100;
static final long TARGET_COMMIT_BUNDLE_BYTES = 32 << 20;
static final int MAX_COMMIT_QUEUE_BYTES = 500 << 20; // 500MB
static final int NUM_COMMIT_STREAMS = 1;
@@ -210,9 +206,6 @@ public class StreamingDataflowWorker {
// Matches kWindmillCounterUpdate in workflow_worker_service_multi_hubs.cc.
private static final String WINDMILL_COUNTER_UPDATE_WORK_ID = "3";
- /** Maximum number of items to return in a GetWork request. */
- private static final long MAX_GET_WORK_ITEMS = MAX_WORK_UNITS_QUEUED + MAX_PROCESSING_THREADS;
-
/** Maximum number of failure stacktraces to report in each update sent to backend. */
private static final int MAX_FAILURES_TO_REPORT_IN_UPDATE = 1000;
@@ -666,7 +659,8 @@ public static StreamingDataflowWorker fromDataflowWorkerHarnessOptions(
chooseMaximumNumberOfThreads(),
THREAD_EXPIRATION_TIME_SEC,
TimeUnit.SECONDS,
- MAX_WORK_UNITS_QUEUED,
+ chooseMaximumBundlesOutstanding(),
+ chooseMaximumBytesOutstanding(),
threadFactory);
maxSinkBytes =
@@ -785,6 +779,22 @@ private int chooseMaximumNumberOfThreads() {
return MAX_PROCESSING_THREADS;
}
+ private int chooseMaximumBundlesOutstanding() {
+ int maxBundles = options.getMaxBundlesFromWindmillOutstanding();
+ if (maxBundles > 0) {
+ return maxBundles;
+ }
+ return chooseMaximumNumberOfThreads() + 100;
+ }
+
+ private long chooseMaximumBytesOutstanding() {
+ long maxMem = options.getMaxBytesFromWindmillOutstanding();
+ if (maxMem > 0) {
+ return maxMem;
+ }
+ return Runtime.getRuntime().maxMemory() / 2;
+ }
+
void addStateNameMappings(Map nameMap) {
stateNameMap.putAll(nameMap);
}
@@ -804,7 +814,7 @@ public void setMaxWorkItemCommitBytes(int maxWorkItemCommitBytes) {
@VisibleForTesting
public boolean workExecutorIsEmpty() {
- return workUnitExecutor.getQueue().isEmpty();
+ return workUnitExecutor.executorQueueIsEmpty();
}
public void start() {
@@ -949,9 +959,6 @@ public void stop() {
memoryMonitor.stop();
memoryMonitorThread.join();
workUnitExecutor.shutdown();
- if (!workUnitExecutor.awaitTermination(5, TimeUnit.MINUTES)) {
- throw new RuntimeException("Work executor did not terminate within 5 minutes");
- }
for (ComputationState state : computationMap.values()) {
state.close();
}
@@ -1065,7 +1072,7 @@ void streamingDispatchLoop() {
windmillServer.getWorkStream(
Windmill.GetWorkRequest.newBuilder()
.setClientId(clientId)
- .setMaxItems(MAX_GET_WORK_ITEMS)
+ .setMaxItems(chooseMaximumBundlesOutstanding())
.setMaxBytes(MAX_GET_WORK_FETCH_BYTES)
.build(),
(String computation,
@@ -1236,7 +1243,8 @@ private void callFinalizeCallbacks(Windmill.WorkItem work) {
} catch (Throwable t) {
LOG.error("Source checkpoint finalization failed:", t);
}
- });
+ },
+ 0);
}
}
}
@@ -1548,7 +1556,7 @@ private void process(
if (retryLocally) {
// Try again after some delay and at the end of the queue to avoid a tight loop.
sleep(retryLocallyDelayMs);
- workUnitExecutor.forceExecute(work);
+ workUnitExecutor.forceExecute(work, work.getWorkItem().getSerializedSize());
} else {
// Consider the item invalid. It will eventually be retried by Windmill if it still needs to
// be processed.
@@ -1726,7 +1734,7 @@ private Windmill.GetWorkResponse getWork() {
return windmillServer.getWork(
Windmill.GetWorkRequest.newBuilder()
.setClientId(clientId)
- .setMaxItems(MAX_GET_WORK_ITEMS)
+ .setMaxItems(chooseMaximumBundlesOutstanding())
.setMaxBytes(MAX_GET_WORK_FETCH_BYTES)
.build());
}
@@ -2285,7 +2293,7 @@ public boolean activateWork(ShardedKey shardedKey, Work work) {
// Fall through to execute without the lock held.
}
}
- executor.execute(work);
+ executor.execute(work, work.getWorkItem().getSerializedSize());
return true;
}
@@ -2327,7 +2335,7 @@ public void completeWork(ShardedKey shardedKey, long workToken) {
}
}
if (nextWork != null) {
- executor.forceExecute(nextWork);
+ executor.forceExecute(nextWork, nextWork.getWorkItem().getSerializedSize());
}
}
@@ -2511,27 +2519,9 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) thro
}
private class MetricsDataProvider implements StatusDataProvider {
-
@Override
public void appendSummaryHtml(PrintWriter writer) {
- writer.println(
- "Worker Threads: "
- + workUnitExecutor.getPoolSize()
- + "/"
- + workUnitExecutor.getMaximumPoolSize()
- + " ");
- writer.println("Active Threads: " + workUnitExecutor.getActiveCount() + " ");
- writer.println(
- "Work Queue Size: "
- + workUnitExecutor.getQueue().size()
- + "/"
- + MAX_WORK_UNITS_QUEUED
- + " ");
- writer.print("Commit Queue: ");
- appendHumanizedBytes(commitQueue.weight(), writer);
- writer.print(", ");
- writer.print(commitQueue.size());
- writer.println(" elements ");
+ writer.println(workUnitExecutor.summaryHtml());
writer.print("Active commit: ");
appendHumanizedBytes(activeCommitBytes.get(), writer);
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/BoundedQueueExecutor.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/BoundedQueueExecutor.java
index 9a51e8c18febe..29a4ea7c5355c 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/BoundedQueueExecutor.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/BoundedQueueExecutor.java
@@ -18,62 +18,138 @@
package org.apache.beam.runners.dataflow.worker.util;
import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.Semaphore;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.Monitor;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.Monitor.Guard;
-/** Executor that blocks on execute() if its queue is full. */
+/** An executor for executing work on windmill items. */
@SuppressWarnings({
"nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
})
-public class BoundedQueueExecutor extends ThreadPoolExecutor {
- private static class ReducableSemaphore extends Semaphore {
- ReducableSemaphore(int permits) {
- super(permits);
- }
-
- @Override
- public void reducePermits(int permits) {
- super.reducePermits(permits);
- }
- }
+public class BoundedQueueExecutor {
+ private final ThreadPoolExecutor executor;
+ private final int maximumElementsOutstanding;
+ private final long maximumBytesOutstanding;
- private ReducableSemaphore semaphore;
+ private final Monitor monitor = new Monitor();
+ private int elementsOutstanding = 0;
+ private long bytesOutstanding = 0;
public BoundedQueueExecutor(
int maximumPoolSize,
long keepAliveTime,
TimeUnit unit,
- int maximumQueueSize,
+ int maximumElementsOutstanding,
+ long maximumBytesOutstanding,
ThreadFactory threadFactory) {
- super(
- maximumPoolSize,
- maximumPoolSize,
- keepAliveTime,
- unit,
- new LinkedBlockingQueue(),
- threadFactory);
- this.semaphore = new ReducableSemaphore(maximumQueueSize);
- allowCoreThreadTimeOut(true);
+ executor =
+ new ThreadPoolExecutor(
+ maximumPoolSize,
+ maximumPoolSize,
+ keepAliveTime,
+ unit,
+ new LinkedBlockingQueue<>(),
+ threadFactory);
+ executor.allowCoreThreadTimeOut(true);
+ this.maximumElementsOutstanding = maximumElementsOutstanding;
+ this.maximumBytesOutstanding = maximumBytesOutstanding;
}
- // Before adding a Runnable to the queue, acquire the semaphore.
- @Override
- public void execute(Runnable r) {
- semaphore.acquireUninterruptibly();
- super.execute(r);
+ // Before adding a Work to the queue, check that there are enough bytes of space or no other
+ // outstanding elements of work.
+ public void execute(Runnable work, long workBytes) {
+ monitor.enterWhenUninterruptibly(
+ new Guard(monitor) {
+ @Override
+ public boolean isSatisfied() {
+ return elementsOutstanding == 0
+ || (bytesAvailable() >= workBytes
+ && elementsOutstanding < maximumElementsOutstanding);
+ }
+ });
+ executeLockHeld(work, workBytes);
}
// Forcibly add something to the queue, ignoring the length limit.
- public void forceExecute(Runnable r) {
- semaphore.reducePermits(1);
- super.execute(r);
+ public void forceExecute(Runnable work, long workBytes) {
+ monitor.enter();
+ executeLockHeld(work, workBytes);
+ }
+
+ public void shutdown() throws InterruptedException {
+ executor.shutdown();
+ if (!executor.awaitTermination(5, TimeUnit.MINUTES)) {
+ throw new RuntimeException("Work executor did not terminate within 5 minutes");
+ }
+ }
+
+ public boolean executorQueueIsEmpty() {
+ return executor.getQueue().isEmpty();
+ }
+
+ public String summaryHtml() {
+ monitor.enter();
+ try {
+ StringBuilder builder = new StringBuilder();
+ builder.append("Worker Threads: ");
+ builder.append(executor.getPoolSize());
+ builder.append("/");
+ builder.append(executor.getMaximumPoolSize());
+ builder.append(" /n");
+
+ builder.append("Active Threads: ");
+ builder.append(executor.getActiveCount());
+ builder.append(" /n");
+
+ builder.append("Work Queue Size: ");
+ builder.append(elementsOutstanding);
+ builder.append("/");
+ builder.append(maximumElementsOutstanding);
+ builder.append(" /n");
+
+ builder.append("Work Queue Bytes: ");
+ builder.append(bytesOutstanding);
+ builder.append("/");
+ builder.append(maximumBytesOutstanding);
+ builder.append(" /n");
+
+ return builder.toString();
+ } finally {
+ monitor.leave();
+ }
+ }
+
+ private void executeLockHeld(Runnable work, long workBytes) {
+ bytesOutstanding += workBytes;
+ ++elementsOutstanding;
+ monitor.leave();
+
+ try {
+ executor.execute(
+ () -> {
+ try {
+ work.run();
+ } finally {
+ decrementCounters(workBytes);
+ }
+ });
+ } catch (RuntimeException e) {
+ // If the execute() call threw an exception, decrement counters here.
+ decrementCounters(workBytes);
+ throw e;
+ }
+ }
+
+ private void decrementCounters(long workBytes) {
+ monitor.enter();
+ --elementsOutstanding;
+ bytesOutstanding -= workBytes;
+ monitor.leave();
}
- // Release the semaphore after taking a Runnable off the queue.
- @Override
- public void beforeExecute(Thread t, Runnable r) {
- semaphore.release();
+ private long bytesAvailable() {
+ return maximumBytesOutstanding - bytesOutstanding;
}
}
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java
index c175d634508e6..c383e5d16e599 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java
@@ -2693,14 +2693,14 @@ public void testActiveWork() throws Exception {
MockWork m1 = new MockWork(1);
assertTrue(computationState.activateWork(key1, m1));
- Mockito.verify(mockExecutor).execute(m1);
+ Mockito.verify(mockExecutor).execute(m1, m1.getWorkItem().getSerializedSize());
computationState.completeWork(key1, 1);
Mockito.verifyNoMoreInteractions(mockExecutor);
// Verify work queues.
MockWork m2 = new MockWork(2);
assertTrue(computationState.activateWork(key1, m2));
- Mockito.verify(mockExecutor).execute(m2);
+ Mockito.verify(mockExecutor).execute(m2, m2.getWorkItem().getSerializedSize());
MockWork m3 = new MockWork(3);
assertTrue(computationState.activateWork(key1, m3));
Mockito.verifyNoMoreInteractions(mockExecutor);
@@ -2708,19 +2708,19 @@ public void testActiveWork() throws Exception {
// Verify another key is a separate queue.
MockWork m4 = new MockWork(4);
assertTrue(computationState.activateWork(key2, m4));
- Mockito.verify(mockExecutor).execute(m4);
+ Mockito.verify(mockExecutor).execute(m4, m4.getWorkItem().getSerializedSize());
computationState.completeWork(key2, 4);
Mockito.verifyNoMoreInteractions(mockExecutor);
computationState.completeWork(key1, 2);
- Mockito.verify(mockExecutor).forceExecute(m3);
+ Mockito.verify(mockExecutor).forceExecute(m3, m3.getWorkItem().getSerializedSize());
computationState.completeWork(key1, 3);
Mockito.verifyNoMoreInteractions(mockExecutor);
// Verify duplicate work dropped.
MockWork m5 = new MockWork(5);
computationState.activateWork(key1, m5);
- Mockito.verify(mockExecutor).execute(m5);
+ Mockito.verify(mockExecutor).execute(m5, m5.getWorkItem().getSerializedSize());
assertFalse(computationState.activateWork(key1, m5));
Mockito.verifyNoMoreInteractions(mockExecutor);
computationState.completeWork(key1, 5);
@@ -2743,14 +2743,14 @@ public void testActiveWorkForShardedKeys() throws Exception {
MockWork m1 = new MockWork(1);
assertTrue(computationState.activateWork(key1Shard1, m1));
- Mockito.verify(mockExecutor).execute(m1);
+ Mockito.verify(mockExecutor).execute(m1, m1.getWorkItem().getSerializedSize());
computationState.completeWork(key1Shard1, 1);
Mockito.verifyNoMoreInteractions(mockExecutor);
// Verify work queues.
MockWork m2 = new MockWork(2);
assertTrue(computationState.activateWork(key1Shard1, m2));
- Mockito.verify(mockExecutor).execute(m2);
+ Mockito.verify(mockExecutor).execute(m2, m2.getWorkItem().getSerializedSize());
MockWork m3 = new MockWork(3);
assertTrue(computationState.activateWork(key1Shard1, m3));
Mockito.verifyNoMoreInteractions(mockExecutor);
@@ -2760,7 +2760,7 @@ public void testActiveWorkForShardedKeys() throws Exception {
assertFalse(computationState.activateWork(key1Shard1, m4));
Mockito.verifyNoMoreInteractions(mockExecutor);
assertTrue(computationState.activateWork(key1Shard2, m4));
- Mockito.verify(mockExecutor).execute(m4);
+ Mockito.verify(mockExecutor).execute(m4, m4.getWorkItem().getSerializedSize());
// Verify duplicate work dropped
assertFalse(computationState.activateWork(key1Shard2, m4));
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/environment/EmbeddedEnvironmentFactory.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/environment/EmbeddedEnvironmentFactory.java
index 1e09d99674a26..bd164fe26b61b 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/environment/EmbeddedEnvironmentFactory.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/environment/EmbeddedEnvironmentFactory.java
@@ -37,11 +37,11 @@
import org.apache.beam.runners.fnexecution.logging.GrpcLoggingService;
import org.apache.beam.runners.fnexecution.provisioning.StaticGrpcProvisionService;
import org.apache.beam.sdk.fn.IdGenerator;
+import org.apache.beam.sdk.fn.channel.ManagedChannelFactory;
import org.apache.beam.sdk.fn.server.GrpcFnServer;
import org.apache.beam.sdk.fn.server.InProcessServerFactory;
import org.apache.beam.sdk.fn.server.ServerFactory;
import org.apache.beam.sdk.fn.stream.OutboundObserverFactory;
-import org.apache.beam.sdk.fn.test.InProcessManagedChannelFactory;
import org.apache.beam.sdk.options.PipelineOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -106,7 +106,7 @@ public RemoteEnvironment createEnvironment(Environment environment, String worke
loggingServer.getApiServiceDescriptor(),
controlServer.getApiServiceDescriptor(),
null,
- InProcessManagedChannelFactory.create(),
+ ManagedChannelFactory.createInProcess(),
OutboundObserverFactory.clientDirect(),
Caches.fromOptions(options));
} catch (NoClassDefFoundError e) {
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java
index 3a38e31f07f2b..59e61c1109a1e 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java
@@ -98,12 +98,12 @@
import org.apache.beam.sdk.coders.CoderException;
import org.apache.beam.sdk.coders.KvCoder;
import org.apache.beam.sdk.coders.StringUtf8Coder;
+import org.apache.beam.sdk.fn.channel.ManagedChannelFactory;
import org.apache.beam.sdk.fn.data.FnDataReceiver;
import org.apache.beam.sdk.fn.server.GrpcContextHeaderAccessorProvider;
import org.apache.beam.sdk.fn.server.GrpcFnServer;
import org.apache.beam.sdk.fn.server.InProcessServerFactory;
import org.apache.beam.sdk.fn.stream.OutboundObserverFactory;
-import org.apache.beam.sdk.fn.test.InProcessManagedChannelFactory;
import org.apache.beam.sdk.metrics.Metrics;
import org.apache.beam.sdk.options.ExperimentalOptions;
import org.apache.beam.sdk.options.PipelineOptions;
@@ -219,7 +219,7 @@ public void launchSdkHarness(PipelineOptions options) throws Exception {
loggingServer.getApiServiceDescriptor(),
controlServer.getApiServiceDescriptor(),
null,
- InProcessManagedChannelFactory.create(),
+ ManagedChannelFactory.createInProcess(),
OutboundObserverFactory.clientDirect(),
Caches.eternal());
} catch (Exception e) {
diff --git a/runners/portability/java/src/test/java/org/apache/beam/runners/portability/PortableRunnerTest.java b/runners/portability/java/src/test/java/org/apache/beam/runners/portability/PortableRunnerTest.java
index 98f098bc4bd56..ef1aea4aa749d 100644
--- a/runners/portability/java/src/test/java/org/apache/beam/runners/portability/PortableRunnerTest.java
+++ b/runners/portability/java/src/test/java/org/apache/beam/runners/portability/PortableRunnerTest.java
@@ -38,7 +38,7 @@
import org.apache.beam.runners.portability.testing.TestJobService;
import org.apache.beam.sdk.PipelineResult;
import org.apache.beam.sdk.PipelineResult.State;
-import org.apache.beam.sdk.fn.test.InProcessManagedChannelFactory;
+import org.apache.beam.sdk.fn.channel.ManagedChannelFactory;
import org.apache.beam.sdk.metrics.MetricQueryResults;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
@@ -94,7 +94,7 @@ public class PortableRunnerTest implements Serializable {
@Test
public void stagesAndRunsJob() throws Exception {
createJobServer(JobState.Enum.DONE, JobApi.MetricResults.getDefaultInstance());
- PortableRunner runner = PortableRunner.create(options, InProcessManagedChannelFactory.create());
+ PortableRunner runner = PortableRunner.create(options, ManagedChannelFactory.createInProcess());
State state = runner.run(p).waitUntilFinish();
assertThat(state, is(State.DONE));
}
@@ -103,7 +103,7 @@ public void stagesAndRunsJob() throws Exception {
public void extractsMetrics() throws Exception {
JobApi.MetricResults metricResults = generateMetricResults();
createJobServer(JobState.Enum.DONE, metricResults);
- PortableRunner runner = PortableRunner.create(options, InProcessManagedChannelFactory.create());
+ PortableRunner runner = PortableRunner.create(options, ManagedChannelFactory.createInProcess());
PipelineResult result = runner.run(p);
result.waitUntilFinish();
MetricQueryResults metricQueryResults = result.metrics().allMetrics();
diff --git a/runners/samza/build.gradle b/runners/samza/build.gradle
index acacf9f8cd607..68ea1780bd465 100644
--- a/runners/samza/build.gradle
+++ b/runners/samza/build.gradle
@@ -86,7 +86,7 @@ configurations.all {
exclude group: "org.slf4j", module: "slf4j-jdk14"
}
-task validatesRunner(type: Test) {
+tasks.register("validatesRunner", Test) {
group = "Verification"
description "Validates Samza runner"
systemProperty "beamTestPipelineOptions", JsonOutput.toJson([
diff --git a/runners/samza/job-server/build.gradle b/runners/samza/job-server/build.gradle
index 8302f2d0b7173..a8adca036a674 100644
--- a/runners/samza/job-server/build.gradle
+++ b/runners/samza/job-server/build.gradle
@@ -185,7 +185,7 @@ def portableValidatesRunnerTask(String name, boolean docker) {
project.ext.validatesPortableRunnerDocker = portableValidatesRunnerTask("Docker", true)
project.ext.validatesPortableRunnerEmbedded = portableValidatesRunnerTask("Embedded", false)
-task validatesPortableRunner() {
+tasks.register("validatesPortableRunner") {
dependsOn validatesPortableRunnerDocker
dependsOn validatesPortableRunnerEmbedded
}
diff --git a/runners/spark/job-server/spark_job_server.gradle b/runners/spark/job-server/spark_job_server.gradle
index a483284f5074b..18ab88f7b0d07 100644
--- a/runners/spark/job-server/spark_job_server.gradle
+++ b/runners/spark/job-server/spark_job_server.gradle
@@ -213,7 +213,7 @@ project.ext.validatesPortableRunnerDocker= portableValidatesRunnerTask("Docker",
project.ext.validatesPortableRunnerBatch = portableValidatesRunnerTask("Batch", false, false)
project.ext.validatesPortableRunnerStreaming = portableValidatesRunnerTask("Streaming", true, false)
-task validatesPortableRunner() {
+tasks.register("validatesPortableRunner") {
dependsOn validatesPortableRunnerDocker
dependsOn validatesPortableRunnerBatch
dependsOn validatesPortableRunnerStreaming
diff --git a/runners/spark/spark_runner.gradle b/runners/spark/spark_runner.gradle
index 08124142560ad..7d08b582b0780 100644
--- a/runners/spark/spark_runner.gradle
+++ b/runners/spark/spark_runner.gradle
@@ -237,7 +237,7 @@ hadoopVersions.each {kv ->
}
}
-task validatesRunnerBatch(type: Test) {
+def validatesRunnerBatch = tasks.register("validatesRunnerBatch", Test) {
group = "Verification"
// Disable gradle cache
outputs.upToDateWhen { false }
@@ -290,7 +290,7 @@ task validatesRunnerBatch(type: Test) {
jvmArgs '-Xmx3g'
}
-task validatesRunnerStreaming(type: Test) {
+def validatesRunnerStreaming = tasks.register("validatesRunnerStreaming", Test) {
group = "Verification"
// Disable gradle cache
outputs.upToDateWhen { false }
@@ -316,7 +316,7 @@ task validatesRunnerStreaming(type: Test) {
}
}
-task validatesStructuredStreamingRunnerBatch(type: Test) {
+tasks.register("validatesStructuredStreamingRunnerBatch", Test) {
group = "Verification"
// Disable gradle cache
outputs.upToDateWhen { false }
@@ -385,7 +385,7 @@ task validatesStructuredStreamingRunnerBatch(type: Test) {
}
}
-task validatesRunner {
+tasks.register("validatesRunner") {
group = "Verification"
description "Validates Spark runner"
dependsOn validatesRunnerBatch
@@ -398,14 +398,15 @@ task validatesRunner {
// Generates :runners:spark:*:runQuickstartJavaSpark task
createJavaExamplesArchetypeValidationTask(type: 'Quickstart', runner: 'Spark')
-task hadoopVersionsTest(group: "Verification") {
+tasks.register("hadoopVersionsTest") {
+ group = "Verification"
def taskNames = hadoopVersions.keySet().stream()
.map{num -> "hadoopVersion${num}Test"}
.collect(Collectors.toList())
dependsOn taskNames
}
-task examplesIntegrationTest(type: Test) {
+tasks.register("examplesIntegrationTest", Test) {
group = "Verification"
// Disable gradle cache
outputs.upToDateWhen { false }
@@ -434,7 +435,8 @@ task examplesIntegrationTest(type: Test) {
}
hadoopVersions.each {kv ->
- task "hadoopVersion${kv.key}Test"(type: Test, group: "Verification") {
+ tasks.register("hadoopVersion${kv.key}Test", Test) {
+ group = "Verification"
description = "Runs Spark tests with Hadoop version $kv.value"
classpath = configurations."hadoopVersion$kv.key" + sourceSets.test.runtimeClasspath
systemProperty "beam.spark.test.reuseSparkContext", "true"
diff --git a/runners/twister2/build.gradle b/runners/twister2/build.gradle
index 36671b318d0c7..a363bacb4a718 100644
--- a/runners/twister2/build.gradle
+++ b/runners/twister2/build.gradle
@@ -68,7 +68,7 @@ dependencies {
}
}
-task validatesRunnerBatch(type: Test) {
+def validatesRunnerBatch = tasks.register("validatesRunnerBatch", Test) {
group = "Verification"
def pipelineOptions = JsonOutput.toJson([
"--runner=Twister2TestRunner",
@@ -101,7 +101,7 @@ task validatesRunnerBatch(type: Test) {
maxHeapSize = '6g'
}
-task validatesRunner {
+tasks.register("validatesRunner") {
group = "Verification"
description "Validates Twister2 Runner"
dependsOn validatesRunnerBatch
diff --git a/scripts/ci/pr-bot/.gitignore b/scripts/ci/pr-bot/.gitignore
new file mode 100644
index 0000000000000..7b665129e6948
--- /dev/null
+++ b/scripts/ci/pr-bot/.gitignore
@@ -0,0 +1,38 @@
+lib-cov
+*.seed
+*.log
+*.csv
+*.dat
+*.out
+*.pid
+*.gz
+*.swp
+
+pids
+logs
+results
+tmp
+
+# Coverage reports
+coverage
+
+# API keys and secrets
+.env
+
+# Dependency directory
+node_modules
+bower_components
+
+# Editors
+.idea
+*.iml
+
+# OS metadata
+.DS_Store
+Thumbs.db
+
+# Ignore built js files
+lib/**/*
+
+# ignore yarn.lock
+yarn.lock
\ No newline at end of file
diff --git a/scripts/ci/pr-bot/Commands.md b/scripts/ci/pr-bot/Commands.md
new file mode 100644
index 0000000000000..83d21cb7836de
--- /dev/null
+++ b/scripts/ci/pr-bot/Commands.md
@@ -0,0 +1,32 @@
+
+
+# PR Bot Commands
+
+The following commands are available for interaction with the PR Bot
+All commands are case insensitive.
+
+| Command | Description |
+| ----------- | ----------- |
+| `r: @username` | Ask someone for a review. This will disable the bot for the PR since it assumes you are able to find a reviewer. |
+| `assign to next reviewer` | If someone has been assigned to a PR by the bot, this unassigns them and picks a new reviewer. Useful if you don't have the bandwitdth or context to review. |
+| `stop reviewer notifications` | This will disable the bot for the PR. |
+| `remind me after tests pass` | This will comment after all checks complete and tag the person who commented the command. |
+| `waiting on author` | This shifts the attention set to the author. The author can shift the attention set back to the reviewer by commenting anywhere or pushing. |
+| `assign set of reviewers` | If the bot has not yet assigned a set of reviewers to the PR, this command will trigger that happening. |
\ No newline at end of file
diff --git a/scripts/ci/pr-bot/README.md b/scripts/ci/pr-bot/README.md
new file mode 100644
index 0000000000000..d6a55d45e9f13
--- /dev/null
+++ b/scripts/ci/pr-bot/README.md
@@ -0,0 +1,45 @@
+
+
+# PR Bot
+
+This directory holds all the code (except for Actions Workflows) for our PR bot designed to improve the PR experience.
+For a list of commands to use when interacting with the bot, see [Commands.md](./Commands.md).
+For a design doc explaining the design and implementation, see [Automate Reviewer Assignment](https://docs.google.com/document/d/1FhRPRD6VXkYlLAPhNfZB7y2Yese2FCWBzjx67d3TjBo/edit#)
+
+## Build/Test
+
+To build, run:
+
+```
+npm install
+npm run build
+```
+
+To run the tests:
+
+```
+npm test
+```
+
+Before checking in code, run prettier on it:
+
+```
+npm run format
+```
\ No newline at end of file
diff --git a/scripts/ci/pr-bot/package-lock.json b/scripts/ci/pr-bot/package-lock.json
new file mode 100644
index 0000000000000..cf881686c1405
--- /dev/null
+++ b/scripts/ci/pr-bot/package-lock.json
@@ -0,0 +1,2062 @@
+{
+ "name": "pr-bot",
+ "version": "1.0.0",
+ "lockfileVersion": 2,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "pr-bot",
+ "version": "1.0.0",
+ "dependencies": {
+ "@actions/exec": "^1.1.0",
+ "@actions/github": "^5.0.0",
+ "@octokit/rest": "^18.12.0",
+ "js-yaml": "^4.1.0",
+ "prettier": "^2.5.1"
+ },
+ "devDependencies": {
+ "@types/mocha": "^9.1.0",
+ "@types/node": "^16.11.7",
+ "mocha": "^9.1.3",
+ "typescript": "4.2.4"
+ }
+ },
+ "node_modules/@actions/exec": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.0.tgz",
+ "integrity": "sha512-LImpN9AY0J1R1mEYJjVJfSZWU4zYOlEcwSTgPve1rFQqK5AwrEs6uWW5Rv70gbDIQIAUwI86z6B+9mPK4w9Sbg==",
+ "dependencies": {
+ "@actions/io": "^1.0.1"
+ }
+ },
+ "node_modules/@actions/github": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.0.0.tgz",
+ "integrity": "sha512-QvE9eAAfEsS+yOOk0cylLBIO/d6WyWIOvsxxzdrPFaud39G6BOkUwScXZn1iBzQzHyu9SBkkLSWlohDWdsasAQ==",
+ "dependencies": {
+ "@actions/http-client": "^1.0.11",
+ "@octokit/core": "^3.4.0",
+ "@octokit/plugin-paginate-rest": "^2.13.3",
+ "@octokit/plugin-rest-endpoint-methods": "^5.1.1"
+ }
+ },
+ "node_modules/@actions/http-client": {
+ "version": "1.0.11",
+ "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz",
+ "integrity": "sha512-VRYHGQV1rqnROJqdMvGUbY/Kn8vriQe/F9HR2AlYHzmKuM/p3kjNuXhmdBfcVgsvRWTz5C5XW5xvndZrVBuAYg==",
+ "dependencies": {
+ "tunnel": "0.0.6"
+ }
+ },
+ "node_modules/@actions/io": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.1.tgz",
+ "integrity": "sha512-Qi4JoKXjmE0O67wAOH6y0n26QXhMKMFo7GD/4IXNVcrtLjUlGjGuVys6pQgwF3ArfGTQu0XpqaNr0YhED2RaRA=="
+ },
+ "node_modules/@octokit/auth-token": {
+ "version": "2.5.0",
+ "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz",
+ "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==",
+ "dependencies": {
+ "@octokit/types": "^6.0.3"
+ }
+ },
+ "node_modules/@octokit/core": {
+ "version": "3.5.1",
+ "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.5.1.tgz",
+ "integrity": "sha512-omncwpLVxMP+GLpLPgeGJBF6IWJFjXDS5flY5VbppePYX9XehevbDykRH9PdCdvqt9TS5AOTiDide7h0qrkHjw==",
+ "dependencies": {
+ "@octokit/auth-token": "^2.4.4",
+ "@octokit/graphql": "^4.5.8",
+ "@octokit/request": "^5.6.0",
+ "@octokit/request-error": "^2.0.5",
+ "@octokit/types": "^6.0.3",
+ "before-after-hook": "^2.2.0",
+ "universal-user-agent": "^6.0.0"
+ }
+ },
+ "node_modules/@octokit/endpoint": {
+ "version": "6.0.12",
+ "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz",
+ "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==",
+ "dependencies": {
+ "@octokit/types": "^6.0.3",
+ "is-plain-object": "^5.0.0",
+ "universal-user-agent": "^6.0.0"
+ }
+ },
+ "node_modules/@octokit/graphql": {
+ "version": "4.8.0",
+ "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz",
+ "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==",
+ "dependencies": {
+ "@octokit/request": "^5.6.0",
+ "@octokit/types": "^6.0.3",
+ "universal-user-agent": "^6.0.0"
+ }
+ },
+ "node_modules/@octokit/openapi-types": {
+ "version": "11.2.0",
+ "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-11.2.0.tgz",
+ "integrity": "sha512-PBsVO+15KSlGmiI8QAzaqvsNlZlrDlyAJYcrXBCvVUxCp7VnXjkwPoFHgjEJXx3WF9BAwkA6nfCUA7i9sODzKA=="
+ },
+ "node_modules/@octokit/plugin-paginate-rest": {
+ "version": "2.17.0",
+ "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.17.0.tgz",
+ "integrity": "sha512-tzMbrbnam2Mt4AhuyCHvpRkS0oZ5MvwwcQPYGtMv4tUa5kkzG58SVB0fcsLulOZQeRnOgdkZWkRUiyBlh0Bkyw==",
+ "dependencies": {
+ "@octokit/types": "^6.34.0"
+ },
+ "peerDependencies": {
+ "@octokit/core": ">=2"
+ }
+ },
+ "node_modules/@octokit/plugin-request-log": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz",
+ "integrity": "sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==",
+ "peerDependencies": {
+ "@octokit/core": ">=3"
+ }
+ },
+ "node_modules/@octokit/plugin-rest-endpoint-methods": {
+ "version": "5.13.0",
+ "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.13.0.tgz",
+ "integrity": "sha512-uJjMTkN1KaOIgNtUPMtIXDOjx6dGYysdIFhgA52x4xSadQCz3b/zJexvITDVpANnfKPW/+E0xkOvLntqMYpviA==",
+ "dependencies": {
+ "@octokit/types": "^6.34.0",
+ "deprecation": "^2.3.1"
+ },
+ "peerDependencies": {
+ "@octokit/core": ">=3"
+ }
+ },
+ "node_modules/@octokit/request": {
+ "version": "5.6.3",
+ "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz",
+ "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==",
+ "dependencies": {
+ "@octokit/endpoint": "^6.0.1",
+ "@octokit/request-error": "^2.1.0",
+ "@octokit/types": "^6.16.1",
+ "is-plain-object": "^5.0.0",
+ "node-fetch": "^2.6.7",
+ "universal-user-agent": "^6.0.0"
+ }
+ },
+ "node_modules/@octokit/request-error": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz",
+ "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==",
+ "dependencies": {
+ "@octokit/types": "^6.0.3",
+ "deprecation": "^2.0.0",
+ "once": "^1.4.0"
+ }
+ },
+ "node_modules/@octokit/rest": {
+ "version": "18.12.0",
+ "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-18.12.0.tgz",
+ "integrity": "sha512-gDPiOHlyGavxr72y0guQEhLsemgVjwRePayJ+FcKc2SJqKUbxbkvf5kAZEWA/MKvsfYlQAMVzNJE3ezQcxMJ2Q==",
+ "dependencies": {
+ "@octokit/core": "^3.5.1",
+ "@octokit/plugin-paginate-rest": "^2.16.8",
+ "@octokit/plugin-request-log": "^1.0.4",
+ "@octokit/plugin-rest-endpoint-methods": "^5.12.0"
+ }
+ },
+ "node_modules/@octokit/types": {
+ "version": "6.34.0",
+ "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.34.0.tgz",
+ "integrity": "sha512-s1zLBjWhdEI2zwaoSgyOFoKSl109CUcVBCc7biPJ3aAf6LGLU6szDvi31JPU7bxfla2lqfhjbbg/5DdFNxOwHw==",
+ "dependencies": {
+ "@octokit/openapi-types": "^11.2.0"
+ }
+ },
+ "node_modules/@types/mocha": {
+ "version": "9.1.0",
+ "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-9.1.0.tgz",
+ "integrity": "sha512-QCWHkbMv4Y5U9oW10Uxbr45qMMSzl4OzijsozynUAgx3kEHUdXB00udx2dWDQ7f2TU2a2uuiFaRZjCe3unPpeg==",
+ "dev": true
+ },
+ "node_modules/@types/node": {
+ "version": "16.11.25",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.25.tgz",
+ "integrity": "sha512-NrTwfD7L1RTc2qrHQD4RTTy4p0CO2LatKBEKEds3CaVuhoM/+DJzmWZl5f+ikR8cm8F5mfJxK+9rQq07gRiSjQ==",
+ "dev": true
+ },
+ "node_modules/@ungap/promise-all-settled": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz",
+ "integrity": "sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q==",
+ "dev": true
+ },
+ "node_modules/ansi-colors": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz",
+ "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==",
+ "dev": true,
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true,
+ "dependencies": {
+ "color-convert": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-styles?sponsor=1"
+ }
+ },
+ "node_modules/anymatch": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz",
+ "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==",
+ "dev": true,
+ "dependencies": {
+ "normalize-path": "^3.0.0",
+ "picomatch": "^2.0.4"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/argparse": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
+ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="
+ },
+ "node_modules/balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+ "dev": true
+ },
+ "node_modules/before-after-hook": {
+ "version": "2.2.2",
+ "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.2.tgz",
+ "integrity": "sha512-3pZEU3NT5BFUo/AD5ERPWOgQOCZITni6iavr5AUw5AUwQjMlI0kzu5btnyD39AF0gUEsDPwJT+oY1ORBJijPjQ=="
+ },
+ "node_modules/binary-extensions": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
+ "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/brace-expansion": {
+ "version": "1.1.11",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
+ "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+ "dev": true,
+ "dependencies": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ }
+ },
+ "node_modules/braces": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
+ "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
+ "dev": true,
+ "dependencies": {
+ "fill-range": "^7.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/browser-stdout": {
+ "version": "1.3.1",
+ "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz",
+ "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==",
+ "dev": true
+ },
+ "node_modules/camelcase": {
+ "version": "6.3.0",
+ "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz",
+ "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==",
+ "dev": true,
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/chalk": {
+ "version": "4.1.2",
+ "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
+ "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
+ "dev": true,
+ "dependencies": {
+ "ansi-styles": "^4.1.0",
+ "supports-color": "^7.1.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/chalk?sponsor=1"
+ }
+ },
+ "node_modules/chalk/node_modules/supports-color": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
+ "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
+ "dev": true,
+ "dependencies": {
+ "has-flag": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/chokidar": {
+ "version": "3.5.3",
+ "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
+ "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "individual",
+ "url": "https://paulmillr.com/funding/"
+ }
+ ],
+ "dependencies": {
+ "anymatch": "~3.1.2",
+ "braces": "~3.0.2",
+ "glob-parent": "~5.1.2",
+ "is-binary-path": "~2.1.0",
+ "is-glob": "~4.0.1",
+ "normalize-path": "~3.0.0",
+ "readdirp": "~3.6.0"
+ },
+ "engines": {
+ "node": ">= 8.10.0"
+ },
+ "optionalDependencies": {
+ "fsevents": "~2.3.2"
+ }
+ },
+ "node_modules/cliui": {
+ "version": "7.0.4",
+ "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
+ "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
+ "dev": true,
+ "dependencies": {
+ "string-width": "^4.2.0",
+ "strip-ansi": "^6.0.0",
+ "wrap-ansi": "^7.0.0"
+ }
+ },
+ "node_modules/color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "dev": true,
+ "dependencies": {
+ "color-name": "~1.1.4"
+ },
+ "engines": {
+ "node": ">=7.0.0"
+ }
+ },
+ "node_modules/color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+ "dev": true
+ },
+ "node_modules/concat-map": {
+ "version": "0.0.1",
+ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+ "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
+ "dev": true
+ },
+ "node_modules/debug": {
+ "version": "4.3.3",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz",
+ "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==",
+ "dev": true,
+ "dependencies": {
+ "ms": "2.1.2"
+ },
+ "engines": {
+ "node": ">=6.0"
+ },
+ "peerDependenciesMeta": {
+ "supports-color": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/debug/node_modules/ms": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
+ "dev": true
+ },
+ "node_modules/decamelize": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz",
+ "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/deprecation": {
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz",
+ "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="
+ },
+ "node_modules/diff": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz",
+ "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.3.1"
+ }
+ },
+ "node_modules/emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true
+ },
+ "node_modules/escalade": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
+ "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==",
+ "dev": true,
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/escape-string-regexp": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
+ "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
+ "dev": true,
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/fill-range": {
+ "version": "7.0.1",
+ "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
+ "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
+ "dev": true,
+ "dependencies": {
+ "to-regex-range": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/find-up": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
+ "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
+ "dev": true,
+ "dependencies": {
+ "locate-path": "^6.0.0",
+ "path-exists": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/flat": {
+ "version": "5.0.2",
+ "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz",
+ "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==",
+ "dev": true,
+ "bin": {
+ "flat": "cli.js"
+ }
+ },
+ "node_modules/fs.realpath": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
+ "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=",
+ "dev": true
+ },
+ "node_modules/fsevents": {
+ "version": "2.3.2",
+ "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
+ "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
+ "dev": true,
+ "hasInstallScript": true,
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
+ }
+ },
+ "node_modules/get-caller-file": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
+ "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
+ "dev": true,
+ "engines": {
+ "node": "6.* || 8.* || >= 10.*"
+ }
+ },
+ "node_modules/glob": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz",
+ "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==",
+ "dev": true,
+ "dependencies": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^3.0.4",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ },
+ "engines": {
+ "node": "*"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/glob-parent": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
+ "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
+ "dev": true,
+ "dependencies": {
+ "is-glob": "^4.0.1"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/growl": {
+ "version": "1.10.5",
+ "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz",
+ "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==",
+ "dev": true,
+ "engines": {
+ "node": ">=4.x"
+ }
+ },
+ "node_modules/has-flag": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/he": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz",
+ "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==",
+ "dev": true,
+ "bin": {
+ "he": "bin/he"
+ }
+ },
+ "node_modules/inflight": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
+ "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
+ "dev": true,
+ "dependencies": {
+ "once": "^1.3.0",
+ "wrappy": "1"
+ }
+ },
+ "node_modules/inherits": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
+ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
+ "dev": true
+ },
+ "node_modules/is-binary-path": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
+ "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
+ "dev": true,
+ "dependencies": {
+ "binary-extensions": "^2.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/is-extglob": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
+ "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=",
+ "dev": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/is-glob": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
+ "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
+ "dev": true,
+ "dependencies": {
+ "is-extglob": "^2.1.1"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/is-number": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
+ "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.12.0"
+ }
+ },
+ "node_modules/is-plain-obj": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz",
+ "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/is-plain-object": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz",
+ "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/is-unicode-supported": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz",
+ "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==",
+ "dev": true,
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/isexe": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+ "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=",
+ "dev": true
+ },
+ "node_modules/js-yaml": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
+ "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
+ "dependencies": {
+ "argparse": "^2.0.1"
+ },
+ "bin": {
+ "js-yaml": "bin/js-yaml.js"
+ }
+ },
+ "node_modules/locate-path": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
+ "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
+ "dev": true,
+ "dependencies": {
+ "p-locate": "^5.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/log-symbols": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz",
+ "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==",
+ "dev": true,
+ "dependencies": {
+ "chalk": "^4.1.0",
+ "is-unicode-supported": "^0.1.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/minimatch": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
+ "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
+ "dev": true,
+ "dependencies": {
+ "brace-expansion": "^1.1.7"
+ },
+ "engines": {
+ "node": "*"
+ }
+ },
+ "node_modules/mocha": {
+ "version": "9.2.0",
+ "resolved": "https://registry.npmjs.org/mocha/-/mocha-9.2.0.tgz",
+ "integrity": "sha512-kNn7E8g2SzVcq0a77dkphPsDSN7P+iYkqE0ZsGCYWRsoiKjOt+NvXfaagik8vuDa6W5Zw3qxe8Jfpt5qKf+6/Q==",
+ "dev": true,
+ "dependencies": {
+ "@ungap/promise-all-settled": "1.1.2",
+ "ansi-colors": "4.1.1",
+ "browser-stdout": "1.3.1",
+ "chokidar": "3.5.3",
+ "debug": "4.3.3",
+ "diff": "5.0.0",
+ "escape-string-regexp": "4.0.0",
+ "find-up": "5.0.0",
+ "glob": "7.2.0",
+ "growl": "1.10.5",
+ "he": "1.2.0",
+ "js-yaml": "4.1.0",
+ "log-symbols": "4.1.0",
+ "minimatch": "3.0.4",
+ "ms": "2.1.3",
+ "nanoid": "3.2.0",
+ "serialize-javascript": "6.0.0",
+ "strip-json-comments": "3.1.1",
+ "supports-color": "8.1.1",
+ "which": "2.0.2",
+ "workerpool": "6.2.0",
+ "yargs": "16.2.0",
+ "yargs-parser": "20.2.4",
+ "yargs-unparser": "2.0.0"
+ },
+ "bin": {
+ "_mocha": "bin/_mocha",
+ "mocha": "bin/mocha"
+ },
+ "engines": {
+ "node": ">= 12.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/mochajs"
+ }
+ },
+ "node_modules/ms": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
+ "dev": true
+ },
+ "node_modules/nanoid": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.2.0.tgz",
+ "integrity": "sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA==",
+ "dev": true,
+ "bin": {
+ "nanoid": "bin/nanoid.cjs"
+ },
+ "engines": {
+ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
+ }
+ },
+ "node_modules/node-fetch": {
+ "version": "2.6.7",
+ "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz",
+ "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==",
+ "dependencies": {
+ "whatwg-url": "^5.0.0"
+ },
+ "engines": {
+ "node": "4.x || >=6.0.0"
+ },
+ "peerDependencies": {
+ "encoding": "^0.1.0"
+ },
+ "peerDependenciesMeta": {
+ "encoding": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/normalize-path": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
+ "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
+ "dev": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/once": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
+ "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
+ "dependencies": {
+ "wrappy": "1"
+ }
+ },
+ "node_modules/p-limit": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
+ "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
+ "dev": true,
+ "dependencies": {
+ "yocto-queue": "^0.1.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/p-locate": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
+ "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
+ "dev": true,
+ "dependencies": {
+ "p-limit": "^3.0.2"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/path-exists": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+ "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/path-is-absolute": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
+ "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
+ "dev": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/picomatch": {
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
+ "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
+ "dev": true,
+ "engines": {
+ "node": ">=8.6"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/jonschlinkert"
+ }
+ },
+ "node_modules/prettier": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.5.1.tgz",
+ "integrity": "sha512-vBZcPRUR5MZJwoyi3ZoyQlc1rXeEck8KgeC9AwwOn+exuxLxq5toTRDTSaVrXHxelDMHy9zlicw8u66yxoSUFg==",
+ "bin": {
+ "prettier": "bin-prettier.js"
+ },
+ "engines": {
+ "node": ">=10.13.0"
+ }
+ },
+ "node_modules/randombytes": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz",
+ "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==",
+ "dev": true,
+ "dependencies": {
+ "safe-buffer": "^5.1.0"
+ }
+ },
+ "node_modules/readdirp": {
+ "version": "3.6.0",
+ "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
+ "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
+ "dev": true,
+ "dependencies": {
+ "picomatch": "^2.2.1"
+ },
+ "engines": {
+ "node": ">=8.10.0"
+ }
+ },
+ "node_modules/require-directory": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
+ "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=",
+ "dev": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/safe-buffer": {
+ "version": "5.2.1",
+ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
+ "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
+ "dev": true,
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/feross"
+ },
+ {
+ "type": "patreon",
+ "url": "https://www.patreon.com/feross"
+ },
+ {
+ "type": "consulting",
+ "url": "https://feross.org/support"
+ }
+ ]
+ },
+ "node_modules/serialize-javascript": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
+ "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
+ "dev": true,
+ "dependencies": {
+ "randombytes": "^2.1.0"
+ }
+ },
+ "node_modules/string-width": {
+ "version": "4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "dev": true,
+ "dependencies": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "dependencies": {
+ "ansi-regex": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/strip-json-comments": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
+ "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
+ "dev": true,
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/supports-color": {
+ "version": "8.1.1",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz",
+ "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
+ "dev": true,
+ "dependencies": {
+ "has-flag": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/supports-color?sponsor=1"
+ }
+ },
+ "node_modules/to-regex-range": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
+ "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
+ "dev": true,
+ "dependencies": {
+ "is-number": "^7.0.0"
+ },
+ "engines": {
+ "node": ">=8.0"
+ }
+ },
+ "node_modules/tr46": {
+ "version": "0.0.3",
+ "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
+ "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o="
+ },
+ "node_modules/tunnel": {
+ "version": "0.0.6",
+ "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
+ "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==",
+ "engines": {
+ "node": ">=0.6.11 <=0.7.0 || >=0.7.3"
+ }
+ },
+ "node_modules/typescript": {
+ "version": "4.2.4",
+ "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.2.4.tgz",
+ "integrity": "sha512-V+evlYHZnQkaz8TRBuxTA92yZBPotr5H+WhQ7bD3hZUndx5tGOa1fuCgeSjxAzM1RiN5IzvadIXTVefuuwZCRg==",
+ "dev": true,
+ "bin": {
+ "tsc": "bin/tsc",
+ "tsserver": "bin/tsserver"
+ },
+ "engines": {
+ "node": ">=4.2.0"
+ }
+ },
+ "node_modules/universal-user-agent": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz",
+ "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w=="
+ },
+ "node_modules/webidl-conversions": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
+ "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE="
+ },
+ "node_modules/whatwg-url": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
+ "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=",
+ "dependencies": {
+ "tr46": "~0.0.3",
+ "webidl-conversions": "^3.0.0"
+ }
+ },
+ "node_modules/which": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
+ "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
+ "dev": true,
+ "dependencies": {
+ "isexe": "^2.0.0"
+ },
+ "bin": {
+ "node-which": "bin/node-which"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/workerpool": {
+ "version": "6.2.0",
+ "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.0.tgz",
+ "integrity": "sha512-Rsk5qQHJ9eowMH28Jwhe8HEbmdYDX4lwoMWshiCXugjtHqMD9ZbiqSDLxcsfdqsETPzVUtX5s1Z5kStiIM6l4A==",
+ "dev": true
+ },
+ "node_modules/wrap-ansi": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+ "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+ "dev": true,
+ "dependencies": {
+ "ansi-styles": "^4.0.0",
+ "string-width": "^4.1.0",
+ "strip-ansi": "^6.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
+ }
+ },
+ "node_modules/wrappy": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
+ "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
+ },
+ "node_modules/y18n": {
+ "version": "5.0.8",
+ "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
+ "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
+ "dev": true,
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/yargs": {
+ "version": "16.2.0",
+ "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
+ "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==",
+ "dev": true,
+ "dependencies": {
+ "cliui": "^7.0.2",
+ "escalade": "^3.1.1",
+ "get-caller-file": "^2.0.5",
+ "require-directory": "^2.1.1",
+ "string-width": "^4.2.0",
+ "y18n": "^5.0.5",
+ "yargs-parser": "^20.2.2"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/yargs-parser": {
+ "version": "20.2.4",
+ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz",
+ "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==",
+ "dev": true,
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/yargs-unparser": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz",
+ "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==",
+ "dev": true,
+ "dependencies": {
+ "camelcase": "^6.0.0",
+ "decamelize": "^4.0.0",
+ "flat": "^5.0.2",
+ "is-plain-obj": "^2.1.0"
+ },
+ "engines": {
+ "node": ">=10"
+ }
+ },
+ "node_modules/yocto-queue": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
+ "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
+ "dev": true,
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ }
+ },
+ "dependencies": {
+ "@actions/exec": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.0.tgz",
+ "integrity": "sha512-LImpN9AY0J1R1mEYJjVJfSZWU4zYOlEcwSTgPve1rFQqK5AwrEs6uWW5Rv70gbDIQIAUwI86z6B+9mPK4w9Sbg==",
+ "requires": {
+ "@actions/io": "^1.0.1"
+ }
+ },
+ "@actions/github": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/@actions/github/-/github-5.0.0.tgz",
+ "integrity": "sha512-QvE9eAAfEsS+yOOk0cylLBIO/d6WyWIOvsxxzdrPFaud39G6BOkUwScXZn1iBzQzHyu9SBkkLSWlohDWdsasAQ==",
+ "requires": {
+ "@actions/http-client": "^1.0.11",
+ "@octokit/core": "^3.4.0",
+ "@octokit/plugin-paginate-rest": "^2.13.3",
+ "@octokit/plugin-rest-endpoint-methods": "^5.1.1"
+ }
+ },
+ "@actions/http-client": {
+ "version": "1.0.11",
+ "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz",
+ "integrity": "sha512-VRYHGQV1rqnROJqdMvGUbY/Kn8vriQe/F9HR2AlYHzmKuM/p3kjNuXhmdBfcVgsvRWTz5C5XW5xvndZrVBuAYg==",
+ "requires": {
+ "tunnel": "0.0.6"
+ }
+ },
+ "@actions/io": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.1.tgz",
+ "integrity": "sha512-Qi4JoKXjmE0O67wAOH6y0n26QXhMKMFo7GD/4IXNVcrtLjUlGjGuVys6pQgwF3ArfGTQu0XpqaNr0YhED2RaRA=="
+ },
+ "@octokit/auth-token": {
+ "version": "2.5.0",
+ "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz",
+ "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==",
+ "requires": {
+ "@octokit/types": "^6.0.3"
+ }
+ },
+ "@octokit/core": {
+ "version": "3.5.1",
+ "resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.5.1.tgz",
+ "integrity": "sha512-omncwpLVxMP+GLpLPgeGJBF6IWJFjXDS5flY5VbppePYX9XehevbDykRH9PdCdvqt9TS5AOTiDide7h0qrkHjw==",
+ "requires": {
+ "@octokit/auth-token": "^2.4.4",
+ "@octokit/graphql": "^4.5.8",
+ "@octokit/request": "^5.6.0",
+ "@octokit/request-error": "^2.0.5",
+ "@octokit/types": "^6.0.3",
+ "before-after-hook": "^2.2.0",
+ "universal-user-agent": "^6.0.0"
+ }
+ },
+ "@octokit/endpoint": {
+ "version": "6.0.12",
+ "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz",
+ "integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==",
+ "requires": {
+ "@octokit/types": "^6.0.3",
+ "is-plain-object": "^5.0.0",
+ "universal-user-agent": "^6.0.0"
+ }
+ },
+ "@octokit/graphql": {
+ "version": "4.8.0",
+ "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz",
+ "integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==",
+ "requires": {
+ "@octokit/request": "^5.6.0",
+ "@octokit/types": "^6.0.3",
+ "universal-user-agent": "^6.0.0"
+ }
+ },
+ "@octokit/openapi-types": {
+ "version": "11.2.0",
+ "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-11.2.0.tgz",
+ "integrity": "sha512-PBsVO+15KSlGmiI8QAzaqvsNlZlrDlyAJYcrXBCvVUxCp7VnXjkwPoFHgjEJXx3WF9BAwkA6nfCUA7i9sODzKA=="
+ },
+ "@octokit/plugin-paginate-rest": {
+ "version": "2.17.0",
+ "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.17.0.tgz",
+ "integrity": "sha512-tzMbrbnam2Mt4AhuyCHvpRkS0oZ5MvwwcQPYGtMv4tUa5kkzG58SVB0fcsLulOZQeRnOgdkZWkRUiyBlh0Bkyw==",
+ "requires": {
+ "@octokit/types": "^6.34.0"
+ }
+ },
+ "@octokit/plugin-request-log": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz",
+ "integrity": "sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==",
+ "requires": {}
+ },
+ "@octokit/plugin-rest-endpoint-methods": {
+ "version": "5.13.0",
+ "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.13.0.tgz",
+ "integrity": "sha512-uJjMTkN1KaOIgNtUPMtIXDOjx6dGYysdIFhgA52x4xSadQCz3b/zJexvITDVpANnfKPW/+E0xkOvLntqMYpviA==",
+ "requires": {
+ "@octokit/types": "^6.34.0",
+ "deprecation": "^2.3.1"
+ }
+ },
+ "@octokit/request": {
+ "version": "5.6.3",
+ "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz",
+ "integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==",
+ "requires": {
+ "@octokit/endpoint": "^6.0.1",
+ "@octokit/request-error": "^2.1.0",
+ "@octokit/types": "^6.16.1",
+ "is-plain-object": "^5.0.0",
+ "node-fetch": "^2.6.7",
+ "universal-user-agent": "^6.0.0"
+ }
+ },
+ "@octokit/request-error": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz",
+ "integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==",
+ "requires": {
+ "@octokit/types": "^6.0.3",
+ "deprecation": "^2.0.0",
+ "once": "^1.4.0"
+ }
+ },
+ "@octokit/rest": {
+ "version": "18.12.0",
+ "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-18.12.0.tgz",
+ "integrity": "sha512-gDPiOHlyGavxr72y0guQEhLsemgVjwRePayJ+FcKc2SJqKUbxbkvf5kAZEWA/MKvsfYlQAMVzNJE3ezQcxMJ2Q==",
+ "requires": {
+ "@octokit/core": "^3.5.1",
+ "@octokit/plugin-paginate-rest": "^2.16.8",
+ "@octokit/plugin-request-log": "^1.0.4",
+ "@octokit/plugin-rest-endpoint-methods": "^5.12.0"
+ }
+ },
+ "@octokit/types": {
+ "version": "6.34.0",
+ "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.34.0.tgz",
+ "integrity": "sha512-s1zLBjWhdEI2zwaoSgyOFoKSl109CUcVBCc7biPJ3aAf6LGLU6szDvi31JPU7bxfla2lqfhjbbg/5DdFNxOwHw==",
+ "requires": {
+ "@octokit/openapi-types": "^11.2.0"
+ }
+ },
+ "@types/mocha": {
+ "version": "9.1.0",
+ "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-9.1.0.tgz",
+ "integrity": "sha512-QCWHkbMv4Y5U9oW10Uxbr45qMMSzl4OzijsozynUAgx3kEHUdXB00udx2dWDQ7f2TU2a2uuiFaRZjCe3unPpeg==",
+ "dev": true
+ },
+ "@types/node": {
+ "version": "16.11.25",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.25.tgz",
+ "integrity": "sha512-NrTwfD7L1RTc2qrHQD4RTTy4p0CO2LatKBEKEds3CaVuhoM/+DJzmWZl5f+ikR8cm8F5mfJxK+9rQq07gRiSjQ==",
+ "dev": true
+ },
+ "@ungap/promise-all-settled": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz",
+ "integrity": "sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q==",
+ "dev": true
+ },
+ "ansi-colors": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz",
+ "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==",
+ "dev": true
+ },
+ "ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true
+ },
+ "ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true,
+ "requires": {
+ "color-convert": "^2.0.1"
+ }
+ },
+ "anymatch": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz",
+ "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==",
+ "dev": true,
+ "requires": {
+ "normalize-path": "^3.0.0",
+ "picomatch": "^2.0.4"
+ }
+ },
+ "argparse": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
+ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="
+ },
+ "balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+ "dev": true
+ },
+ "before-after-hook": {
+ "version": "2.2.2",
+ "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.2.tgz",
+ "integrity": "sha512-3pZEU3NT5BFUo/AD5ERPWOgQOCZITni6iavr5AUw5AUwQjMlI0kzu5btnyD39AF0gUEsDPwJT+oY1ORBJijPjQ=="
+ },
+ "binary-extensions": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz",
+ "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==",
+ "dev": true
+ },
+ "brace-expansion": {
+ "version": "1.1.11",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
+ "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+ "dev": true,
+ "requires": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ }
+ },
+ "braces": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
+ "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
+ "dev": true,
+ "requires": {
+ "fill-range": "^7.0.1"
+ }
+ },
+ "browser-stdout": {
+ "version": "1.3.1",
+ "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz",
+ "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==",
+ "dev": true
+ },
+ "camelcase": {
+ "version": "6.3.0",
+ "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz",
+ "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==",
+ "dev": true
+ },
+ "chalk": {
+ "version": "4.1.2",
+ "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
+ "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
+ "dev": true,
+ "requires": {
+ "ansi-styles": "^4.1.0",
+ "supports-color": "^7.1.0"
+ },
+ "dependencies": {
+ "supports-color": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
+ "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
+ "dev": true,
+ "requires": {
+ "has-flag": "^4.0.0"
+ }
+ }
+ }
+ },
+ "chokidar": {
+ "version": "3.5.3",
+ "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
+ "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==",
+ "dev": true,
+ "requires": {
+ "anymatch": "~3.1.2",
+ "braces": "~3.0.2",
+ "fsevents": "~2.3.2",
+ "glob-parent": "~5.1.2",
+ "is-binary-path": "~2.1.0",
+ "is-glob": "~4.0.1",
+ "normalize-path": "~3.0.0",
+ "readdirp": "~3.6.0"
+ }
+ },
+ "cliui": {
+ "version": "7.0.4",
+ "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
+ "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
+ "dev": true,
+ "requires": {
+ "string-width": "^4.2.0",
+ "strip-ansi": "^6.0.0",
+ "wrap-ansi": "^7.0.0"
+ }
+ },
+ "color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "dev": true,
+ "requires": {
+ "color-name": "~1.1.4"
+ }
+ },
+ "color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+ "dev": true
+ },
+ "concat-map": {
+ "version": "0.0.1",
+ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+ "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
+ "dev": true
+ },
+ "debug": {
+ "version": "4.3.3",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz",
+ "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==",
+ "dev": true,
+ "requires": {
+ "ms": "2.1.2"
+ },
+ "dependencies": {
+ "ms": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
+ "dev": true
+ }
+ }
+ },
+ "decamelize": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz",
+ "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==",
+ "dev": true
+ },
+ "deprecation": {
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz",
+ "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="
+ },
+ "diff": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz",
+ "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==",
+ "dev": true
+ },
+ "emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true
+ },
+ "escalade": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
+ "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==",
+ "dev": true
+ },
+ "escape-string-regexp": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
+ "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
+ "dev": true
+ },
+ "fill-range": {
+ "version": "7.0.1",
+ "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
+ "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
+ "dev": true,
+ "requires": {
+ "to-regex-range": "^5.0.1"
+ }
+ },
+ "find-up": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
+ "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
+ "dev": true,
+ "requires": {
+ "locate-path": "^6.0.0",
+ "path-exists": "^4.0.0"
+ }
+ },
+ "flat": {
+ "version": "5.0.2",
+ "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz",
+ "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==",
+ "dev": true
+ },
+ "fs.realpath": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
+ "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=",
+ "dev": true
+ },
+ "fsevents": {
+ "version": "2.3.2",
+ "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
+ "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
+ "dev": true,
+ "optional": true
+ },
+ "get-caller-file": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
+ "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
+ "dev": true
+ },
+ "glob": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz",
+ "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==",
+ "dev": true,
+ "requires": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^3.0.4",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ }
+ },
+ "glob-parent": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
+ "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
+ "dev": true,
+ "requires": {
+ "is-glob": "^4.0.1"
+ }
+ },
+ "growl": {
+ "version": "1.10.5",
+ "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz",
+ "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==",
+ "dev": true
+ },
+ "has-flag": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
+ "dev": true
+ },
+ "he": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz",
+ "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==",
+ "dev": true
+ },
+ "inflight": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
+ "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
+ "dev": true,
+ "requires": {
+ "once": "^1.3.0",
+ "wrappy": "1"
+ }
+ },
+ "inherits": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
+ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
+ "dev": true
+ },
+ "is-binary-path": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
+ "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
+ "dev": true,
+ "requires": {
+ "binary-extensions": "^2.0.0"
+ }
+ },
+ "is-extglob": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
+ "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=",
+ "dev": true
+ },
+ "is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+ "dev": true
+ },
+ "is-glob": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
+ "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
+ "dev": true,
+ "requires": {
+ "is-extglob": "^2.1.1"
+ }
+ },
+ "is-number": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
+ "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
+ "dev": true
+ },
+ "is-plain-obj": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz",
+ "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==",
+ "dev": true
+ },
+ "is-plain-object": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz",
+ "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q=="
+ },
+ "is-unicode-supported": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz",
+ "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==",
+ "dev": true
+ },
+ "isexe": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+ "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=",
+ "dev": true
+ },
+ "js-yaml": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
+ "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
+ "requires": {
+ "argparse": "^2.0.1"
+ }
+ },
+ "locate-path": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
+ "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
+ "dev": true,
+ "requires": {
+ "p-locate": "^5.0.0"
+ }
+ },
+ "log-symbols": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz",
+ "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==",
+ "dev": true,
+ "requires": {
+ "chalk": "^4.1.0",
+ "is-unicode-supported": "^0.1.0"
+ }
+ },
+ "minimatch": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
+ "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
+ "dev": true,
+ "requires": {
+ "brace-expansion": "^1.1.7"
+ }
+ },
+ "mocha": {
+ "version": "9.2.0",
+ "resolved": "https://registry.npmjs.org/mocha/-/mocha-9.2.0.tgz",
+ "integrity": "sha512-kNn7E8g2SzVcq0a77dkphPsDSN7P+iYkqE0ZsGCYWRsoiKjOt+NvXfaagik8vuDa6W5Zw3qxe8Jfpt5qKf+6/Q==",
+ "dev": true,
+ "requires": {
+ "@ungap/promise-all-settled": "1.1.2",
+ "ansi-colors": "4.1.1",
+ "browser-stdout": "1.3.1",
+ "chokidar": "3.5.3",
+ "debug": "4.3.3",
+ "diff": "5.0.0",
+ "escape-string-regexp": "4.0.0",
+ "find-up": "5.0.0",
+ "glob": "7.2.0",
+ "growl": "1.10.5",
+ "he": "1.2.0",
+ "js-yaml": "4.1.0",
+ "log-symbols": "4.1.0",
+ "minimatch": "3.0.4",
+ "ms": "2.1.3",
+ "nanoid": "3.2.0",
+ "serialize-javascript": "6.0.0",
+ "strip-json-comments": "3.1.1",
+ "supports-color": "8.1.1",
+ "which": "2.0.2",
+ "workerpool": "6.2.0",
+ "yargs": "16.2.0",
+ "yargs-parser": "20.2.4",
+ "yargs-unparser": "2.0.0"
+ }
+ },
+ "ms": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
+ "dev": true
+ },
+ "nanoid": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.2.0.tgz",
+ "integrity": "sha512-fmsZYa9lpn69Ad5eDn7FMcnnSR+8R34W9qJEijxYhTbfOWzr22n1QxCMzXLK+ODyW2973V3Fux959iQoUxzUIA==",
+ "dev": true
+ },
+ "node-fetch": {
+ "version": "2.6.7",
+ "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz",
+ "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==",
+ "requires": {
+ "whatwg-url": "^5.0.0"
+ }
+ },
+ "normalize-path": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
+ "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
+ "dev": true
+ },
+ "once": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
+ "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
+ "requires": {
+ "wrappy": "1"
+ }
+ },
+ "p-limit": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
+ "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
+ "dev": true,
+ "requires": {
+ "yocto-queue": "^0.1.0"
+ }
+ },
+ "p-locate": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
+ "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
+ "dev": true,
+ "requires": {
+ "p-limit": "^3.0.2"
+ }
+ },
+ "path-exists": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+ "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
+ "dev": true
+ },
+ "path-is-absolute": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
+ "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
+ "dev": true
+ },
+ "picomatch": {
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
+ "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
+ "dev": true
+ },
+ "prettier": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.5.1.tgz",
+ "integrity": "sha512-vBZcPRUR5MZJwoyi3ZoyQlc1rXeEck8KgeC9AwwOn+exuxLxq5toTRDTSaVrXHxelDMHy9zlicw8u66yxoSUFg=="
+ },
+ "randombytes": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz",
+ "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==",
+ "dev": true,
+ "requires": {
+ "safe-buffer": "^5.1.0"
+ }
+ },
+ "readdirp": {
+ "version": "3.6.0",
+ "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
+ "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
+ "dev": true,
+ "requires": {
+ "picomatch": "^2.2.1"
+ }
+ },
+ "require-directory": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
+ "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=",
+ "dev": true
+ },
+ "safe-buffer": {
+ "version": "5.2.1",
+ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
+ "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
+ "dev": true
+ },
+ "serialize-javascript": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz",
+ "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==",
+ "dev": true,
+ "requires": {
+ "randombytes": "^2.1.0"
+ }
+ },
+ "string-width": {
+ "version": "4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "dev": true,
+ "requires": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ }
+ },
+ "strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "requires": {
+ "ansi-regex": "^5.0.1"
+ }
+ },
+ "strip-json-comments": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
+ "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
+ "dev": true
+ },
+ "supports-color": {
+ "version": "8.1.1",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz",
+ "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
+ "dev": true,
+ "requires": {
+ "has-flag": "^4.0.0"
+ }
+ },
+ "to-regex-range": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
+ "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
+ "dev": true,
+ "requires": {
+ "is-number": "^7.0.0"
+ }
+ },
+ "tr46": {
+ "version": "0.0.3",
+ "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
+ "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o="
+ },
+ "tunnel": {
+ "version": "0.0.6",
+ "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
+ "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
+ },
+ "typescript": {
+ "version": "4.2.4",
+ "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.2.4.tgz",
+ "integrity": "sha512-V+evlYHZnQkaz8TRBuxTA92yZBPotr5H+WhQ7bD3hZUndx5tGOa1fuCgeSjxAzM1RiN5IzvadIXTVefuuwZCRg==",
+ "dev": true
+ },
+ "universal-user-agent": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz",
+ "integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w=="
+ },
+ "webidl-conversions": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
+ "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE="
+ },
+ "whatwg-url": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
+ "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=",
+ "requires": {
+ "tr46": "~0.0.3",
+ "webidl-conversions": "^3.0.0"
+ }
+ },
+ "which": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
+ "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
+ "dev": true,
+ "requires": {
+ "isexe": "^2.0.0"
+ }
+ },
+ "workerpool": {
+ "version": "6.2.0",
+ "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.0.tgz",
+ "integrity": "sha512-Rsk5qQHJ9eowMH28Jwhe8HEbmdYDX4lwoMWshiCXugjtHqMD9ZbiqSDLxcsfdqsETPzVUtX5s1Z5kStiIM6l4A==",
+ "dev": true
+ },
+ "wrap-ansi": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+ "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+ "dev": true,
+ "requires": {
+ "ansi-styles": "^4.0.0",
+ "string-width": "^4.1.0",
+ "strip-ansi": "^6.0.0"
+ }
+ },
+ "wrappy": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
+ "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
+ },
+ "y18n": {
+ "version": "5.0.8",
+ "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
+ "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
+ "dev": true
+ },
+ "yargs": {
+ "version": "16.2.0",
+ "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
+ "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==",
+ "dev": true,
+ "requires": {
+ "cliui": "^7.0.2",
+ "escalade": "^3.1.1",
+ "get-caller-file": "^2.0.5",
+ "require-directory": "^2.1.1",
+ "string-width": "^4.2.0",
+ "y18n": "^5.0.5",
+ "yargs-parser": "^20.2.2"
+ }
+ },
+ "yargs-parser": {
+ "version": "20.2.4",
+ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz",
+ "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==",
+ "dev": true
+ },
+ "yargs-unparser": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz",
+ "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==",
+ "dev": true,
+ "requires": {
+ "camelcase": "^6.0.0",
+ "decamelize": "^4.0.0",
+ "flat": "^5.0.2",
+ "is-plain-obj": "^2.1.0"
+ }
+ },
+ "yocto-queue": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
+ "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
+ "dev": true
+ }
+ }
+}
diff --git a/scripts/ci/pr-bot/package.json b/scripts/ci/pr-bot/package.json
new file mode 100644
index 0000000000000..5f6ae37a9f024
--- /dev/null
+++ b/scripts/ci/pr-bot/package.json
@@ -0,0 +1,27 @@
+{
+ "name": "pr-bot",
+ "version": "1.0.0",
+ "description": "Scripts for the Beam PR bot",
+ "main": "processNewPrs.js",
+ "scripts": {
+ "build": "tsc",
+ "format": "prettier --write *.ts shared/**/*.ts test/**/*.ts",
+ "test": "mocha lib/test",
+ "processNewPrs": "npm run build && node lib/processNewPrs.js",
+ "processPrUpdate": "npm run build && node lib/processPrUpdate.js",
+ "gatherMetrics": "npm run build && node lib/gatherMetrics.js"
+ },
+ "dependencies": {
+ "@actions/exec": "^1.1.0",
+ "@actions/github": "^5.0.0",
+ "@octokit/rest": "^18.12.0",
+ "js-yaml": "^4.1.0",
+ "prettier": "^2.5.1"
+ },
+ "devDependencies": {
+ "@types/mocha": "^9.1.0",
+ "@types/node": "^16.11.7",
+ "mocha": "^9.1.3",
+ "typescript": "4.2.4"
+ }
+}
diff --git a/scripts/ci/pr-bot/processNewPrs.ts b/scripts/ci/pr-bot/processNewPrs.ts
new file mode 100644
index 0000000000000..411e3b5428ece
--- /dev/null
+++ b/scripts/ci/pr-bot/processNewPrs.ts
@@ -0,0 +1,226 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+const github = require("./shared/githubUtils");
+const { getChecksStatus } = require("./shared/checks");
+const commentStrings = require("./shared/commentStrings");
+const { ReviewerConfig } = require("./shared/reviewerConfig");
+const { PersistentState } = require("./shared/persistentState");
+const { Pr } = require("./shared/pr");
+const { REPO_OWNER, REPO, PATH_TO_CONFIG_FILE } = require("./shared/constants");
+import { CheckStatus } from "./shared/checks";
+
+/*
+ * Returns true if the pr needs to be processed or false otherwise.
+ * We don't need to process PRs that:
+ * 1) Have WIP in their name
+ * 2) Are less than 20 minutes old
+ * 3) Are draft prs
+ * 4) Are closed
+ * 5) Have already been processed
+ * 6) Have notifications stopped
+ * 7) The pr doesn't contain the go label (temporary). TODO(damccorm) - remove this when we're ready to roll this out to everyone.
+ * unless we're supposed to remind the user after tests pass
+ * (in which case that's all we need to do).
+ */
+function needsProcessed(pull: any, prState: typeof Pr): boolean {
+ if (!pull.labels.find((label) => label.name.toLowerCase() === "go")) {
+ console.log(
+ `Skipping PR ${pull.number} because it doesn't contain the go label`
+ );
+ return false;
+ }
+ if (prState.remindAfterTestsPass && prState.remindAfterTestsPass.length > 0) {
+ return true;
+ }
+ if (pull.title.toLowerCase().indexOf("wip") >= 0) {
+ console.log(`Skipping PR ${pull.number} because it is a WIP`);
+ return false;
+ }
+ let timeCutoff = new Date(new Date().getTime() - 20 * 60000);
+ if (new Date(pull.created_at) > timeCutoff) {
+ console.log(
+ `Skipping PR ${pull.number} because it was created less than 20 minutes ago`
+ );
+ return false;
+ }
+ if (pull.state.toLowerCase() !== "open") {
+ console.log(`Skipping PR ${pull.number} because it is closed`);
+ return false;
+ }
+ if (pull.draft) {
+ console.log(`Skipping PR ${pull.number} because it is a draft`);
+ return false;
+ }
+ if (Object.keys(prState.reviewersAssignedForLabels).length > 0) {
+ console.log(
+ `Skipping PR ${pull.number} because it already has been assigned`
+ );
+ return false;
+ }
+ if (prState.stopReviewerNotifications) {
+ console.log(
+ `Skipping PR ${pull.number} because reviewer notifications have been stopped`
+ );
+ return false;
+ }
+
+ return true;
+}
+
+/*
+ * If the checks passed in via checkstate have completed, notifies the users who have configured notifications.
+ */
+async function remindIfChecksCompleted(
+ pull: any,
+ stateClient: typeof PersistentState,
+ checkState: CheckStatus,
+ prState: typeof Pr
+) {
+ console.log(
+ `Notifying reviewers if checks for PR ${pull.number} have completed, then returning`
+ );
+ if (!checkState.completed) {
+ return;
+ }
+ if (checkState.succeeded) {
+ await github.addPrComment(
+ pull.number,
+ commentStrings.allChecksPassed(prState.remindAfterTestsPass)
+ );
+ } else {
+ await github.addPrComment(
+ pull.number,
+ commentStrings.someChecksFailing(prState.remindAfterTestsPass)
+ );
+ }
+ prState.remindAfterTestsPass = [];
+ await stateClient.writePrState(pull.number, prState);
+}
+
+/*
+ * If we haven't already, let the author know checks are failing.
+ */
+async function notifyChecksFailed(
+ pull: any,
+ stateClient: typeof PersistentState,
+ prState: typeof Pr
+) {
+ console.log(
+ `Checks are failing for PR ${pull.number}. Commenting if we haven't already and skipping.`
+ );
+ if (!prState.commentedAboutFailingChecks) {
+ await github.addPrComment(
+ pull.number,
+ commentStrings.failingChecksCantAssign()
+ );
+ }
+ prState.commentedAboutFailingChecks = true;
+ await stateClient.writePrState(pull.number, prState);
+}
+
+/*
+ * Performs all the business logic of processing a new pull request, including:
+ * 1) Checking if it needs processed
+ * 2) Reminding reviewers if checks have completed (if they've subscribed to that)
+ * 3) Picking/assigning reviewers
+ * 4) Adding "Next Action: Reviewers label"
+ * 5) Storing the state of the pull request/reviewers in a dedicated branch.
+ */
+async function processPull(
+ pull: any,
+ reviewerConfig: typeof ReviewerConfig,
+ stateClient: typeof PersistentState
+) {
+ let prState = await stateClient.getPrState(pull.number);
+ if (!needsProcessed(pull, prState)) {
+ return;
+ }
+
+ let checkState = await getChecksStatus(REPO_OWNER, REPO, pull.head.sha);
+
+ if (prState.remindAfterTestsPass && prState.remindAfterTestsPass.length > 0) {
+ return await remindIfChecksCompleted(
+ pull,
+ stateClient,
+ checkState,
+ prState
+ );
+ }
+
+ if (!checkState.succeeded) {
+ return await notifyChecksFailed(pull, stateClient, prState);
+ }
+ prState.commentedAboutFailingChecks = false;
+
+ // Pick reviewers to assign. Store them in reviewerStateToUpdate and update the prState object with those reviewers (and their associated labels)
+ let reviewerStateToUpdate: { [key: string]: typeof ReviewersForLabel } = {};
+ const reviewersForLabels: { [key: string]: string[] } =
+ reviewerConfig.getReviewersForLabels(pull.labels, [pull.user.login]);
+ var labels = Object.keys(reviewersForLabels);
+ if (!labels || labels.length === 0) {
+ return;
+ }
+ for (const label of labels) {
+ let availableReviewers = reviewersForLabels[label];
+ let reviewersState = await stateClient.getReviewersForLabelState(label);
+ let chosenReviewer = reviewersState.assignNextReviewer(availableReviewers);
+ reviewerStateToUpdate[label] = reviewersState;
+ prState.reviewersAssignedForLabels[label] = chosenReviewer;
+ }
+
+ console.log(`Assigning reviewers for PR ${pull.number}`);
+ await github.addPrComment(
+ pull.number,
+ commentStrings.assignReviewer(prState.reviewersAssignedForLabels)
+ );
+
+ github.nextActionReviewers(pull.number, pull.labels);
+ prState.nextAction = "Reviewers";
+
+ await stateClient.writePrState(pull.number, prState);
+ let labelsToUpdate = Object.keys(reviewerStateToUpdate);
+ for (const label of labelsToUpdate) {
+ await stateClient.writeReviewersForLabelState(
+ label,
+ reviewerStateToUpdate[label]
+ );
+ }
+}
+
+async function processNewPrs() {
+ const githubClient = github.getGitHubClient();
+ let reviewerConfig = new ReviewerConfig(PATH_TO_CONFIG_FILE);
+ let stateClient = new PersistentState();
+
+ let openPulls = await githubClient.paginate(
+ "GET /repos/{owner}/{repo}/pulls",
+ {
+ owner: REPO_OWNER,
+ repo: REPO,
+ }
+ );
+
+ for (const pull of openPulls) {
+ await processPull(pull, reviewerConfig, stateClient);
+ }
+}
+
+processNewPrs();
+
+export {};
diff --git a/scripts/ci/pr-bot/processPrUpdate.ts b/scripts/ci/pr-bot/processPrUpdate.ts
new file mode 100644
index 0000000000000..154444ee076b3
--- /dev/null
+++ b/scripts/ci/pr-bot/processPrUpdate.ts
@@ -0,0 +1,208 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+const github = require("@actions/github");
+const commentStrings = require("./shared/commentStrings");
+const { processCommand } = require("./shared/userCommand");
+const {
+ addPrComment,
+ nextActionReviewers,
+ getPullAuthorFromPayload,
+ getPullNumberFromPayload,
+} = require("./shared/githubUtils");
+const { PersistentState } = require("./shared/persistentState");
+const { ReviewerConfig } = require("./shared/reviewerConfig");
+const { PATH_TO_CONFIG_FILE } = require("./shared/constants");
+
+const reviewerAction = "Reviewers";
+
+async function areReviewersAssigned(
+ pullNumber: number,
+ stateClient: typeof PersistentState
+): Promise {
+ const prState = await stateClient.getPrState(pullNumber);
+ return Object.values(prState.reviewersAssignedForLabels).length > 0;
+}
+
+async function processPrComment(
+ payload: any,
+ stateClient: typeof PersistentState,
+ reviewerConfig: typeof ReviewerConfig
+) {
+ const commentContents = payload.comment.body;
+ const commentAuthor = payload.sender.login;
+ console.log(commentContents);
+ if (
+ await processCommand(
+ payload,
+ commentAuthor,
+ commentContents,
+ stateClient,
+ reviewerConfig
+ )
+ ) {
+ // If we've processed a command, don't worry about trying to change the attention set.
+ // This is not a meaningful push or comment from the author.
+ console.log("Processed command");
+ return;
+ }
+
+ // If comment was from the author, we should shift attention back to the reviewers.
+ console.log(
+ "No command to be processed, checking if we should shift attention to reviewers"
+ );
+ const pullAuthor = getPullAuthorFromPayload(payload);
+ if (pullAuthor === commentAuthor) {
+ await setNextActionReviewers(payload, stateClient);
+ } else {
+ console.log(
+ `Comment was from ${commentAuthor}, not author: ${pullAuthor}. No action to take.`
+ );
+ }
+}
+
+/*
+ * On approval from a reviewer we have assigned, assign committer if one not already assigned
+ */
+async function processPrReview(
+ payload: any,
+ stateClient: typeof PersistentState,
+ reviewerConfig: typeof ReviewerConfig
+) {
+ if (payload.review.state !== "approved") {
+ return;
+ }
+
+ const pullNumber = getPullNumberFromPayload(payload);
+ if (!(await areReviewersAssigned(pullNumber, stateClient))) {
+ return;
+ }
+
+ let prState = await stateClient.getPrState(pullNumber);
+ // TODO(damccorm) - also check if the author is a committer, if they are don't auto-assign a committer
+ if (await prState.isAnyAssignedReviewerCommitter()) {
+ return;
+ }
+
+ const labelOfReviewer = prState.getLabelForReviewer(payload.sender.login);
+ if (labelOfReviewer) {
+ let reviewersState = await stateClient.getReviewersForLabelState(
+ labelOfReviewer
+ );
+ const availableReviewers =
+ reviewerConfig.getReviewersForLabel(labelOfReviewer);
+ const chosenCommitter = await reviewersState.assignNextCommitter(
+ availableReviewers
+ );
+ prState.reviewersAssignedForLabels[labelOfReviewer] = chosenCommitter;
+
+ // Set next action to committer
+ await addPrComment(
+ pullNumber,
+ commentStrings.assignCommitter(chosenCommitter)
+ );
+ const existingLabels =
+ payload.issue?.labels || payload.pull_request?.labels;
+ await nextActionReviewers(pullNumber, existingLabels);
+ prState.nextAction = reviewerAction;
+
+ // Persist state
+ await stateClient.writePrState(pullNumber, prState);
+ await stateClient.writeReviewersForLabelState(
+ labelOfReviewer,
+ reviewersState
+ );
+ }
+}
+
+/*
+ * On pr push or author comment, we should put the attention set back on the reviewers
+ */
+async function setNextActionReviewers(
+ payload: any,
+ stateClient: typeof PersistentState
+) {
+ const pullNumber = getPullNumberFromPayload(payload);
+ if (!(await areReviewersAssigned(pullNumber, stateClient))) {
+ console.log("No reviewers assigned, dont need to manipulate attention set");
+ return;
+ }
+ const existingLabels = payload.issue?.labels || payload.pull_request?.labels;
+ await nextActionReviewers(pullNumber, existingLabels);
+ let prState = await stateClient.getPrState(pullNumber);
+ prState.nextAction = reviewerAction;
+ await stateClient.writePrState(pullNumber, prState);
+}
+
+async function processPrUpdate() {
+ const reviewerConfig = new ReviewerConfig(PATH_TO_CONFIG_FILE);
+ const context = github.context;
+ console.log("Event context:");
+ console.log(context);
+ const payload = context.payload;
+
+ // TODO(damccorm) - remove this when we roll out to more than go
+ const existingLabels = payload.issue?.labels || payload.pull_request?.labels;
+ if (!existingLabels.find((label) => label.name.toLowerCase() === "go")) {
+ console.log("Does not contain the go label - skipping");
+ return;
+ }
+
+ if (!payload.issue?.pull_request && !payload.pull_request) {
+ console.log("Issue, not pull request - returning");
+ return;
+ }
+ const pullNumber = getPullNumberFromPayload(payload);
+
+ const stateClient = new PersistentState();
+ const prState = await stateClient.getPrState(pullNumber);
+ if (prState.stopReviewerNotifications) {
+ console.log("Notifications have been paused for this pull - skipping");
+ return;
+ }
+
+ switch (github.context.eventName) {
+ case "pull_request_review_comment":
+ case "issue_comment":
+ console.log("Processing comment event");
+ if (payload.action !== "created") {
+ console.log("Comment wasnt just created, skipping");
+ return;
+ }
+ await processPrComment(payload, stateClient, reviewerConfig);
+ break;
+ case "pull_request_review":
+ console.log("Processing PR review event");
+ await processPrReview(payload, stateClient, reviewerConfig);
+ break;
+ case "pull_request_target":
+ if (payload.action === "synchronize") {
+ console.log("Processing synchronize action");
+ await setNextActionReviewers(payload, stateClient);
+ }
+ // TODO(damccorm) - it would be good to eventually handle the following events here, even though they're not part of the normal workflow
+ // review requested, assigned, label added, label removed
+ break;
+ default:
+ console.log("Not a PR comment, push, or review, doing nothing");
+ }
+}
+
+processPrUpdate();
+
+export {};
diff --git a/scripts/ci/pr-bot/shared/checks.ts b/scripts/ci/pr-bot/shared/checks.ts
new file mode 100644
index 0000000000000..51bcf18025928
--- /dev/null
+++ b/scripts/ci/pr-bot/shared/checks.ts
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+const { getGitHubClient } = require("./githubUtils");
+
+export interface CheckStatus {
+ completed: boolean;
+ succeeded: boolean;
+}
+
+// Returns the status of the most recent checks runs -
+export async function getChecksStatus(
+ owner: string,
+ repo: string,
+ checkSha: string
+): Promise {
+ let checkStatus: CheckStatus = {
+ completed: true,
+ succeeded: true,
+ };
+ const mostRecentChecks = await getMostRecentChecks(owner, repo, checkSha);
+ for (let i = 0; i < mostRecentChecks.length; i++) {
+ if (mostRecentChecks[i].status != "completed") {
+ checkStatus.completed = false;
+ }
+ if (mostRecentChecks[i].conclusion != "success") {
+ checkStatus.succeeded = false;
+ }
+ }
+
+ return checkStatus;
+}
+
+async function getMostRecentChecks(
+ owner: string,
+ repo: string,
+ checkSha: string
+): Promise {
+ let mostRecentChecks: any[] = [];
+ const checksByName = await getChecksByName(owner, repo, checkSha);
+
+ const checkNames = Object.keys(checksByName);
+ for (let i = 0; i < checkNames.length; i++) {
+ let checks = checksByName[checkNames[i]];
+ let mostRecent = checks.sort((a, b) =>
+ a.completionTime > b.completionTime ? 1 : -1
+ )[0];
+ mostRecentChecks.push(mostRecent);
+ }
+
+ return mostRecentChecks;
+}
+
+async function getChecksByName(
+ owner: string,
+ repo: string,
+ checkSha: string
+): Promise {
+ const githubClient = getGitHubClient();
+ const allChecks = (
+ await githubClient.rest.checks.listForRef({
+ owner: owner,
+ repo: repo,
+ ref: checkSha,
+ })
+ ).data.check_runs;
+ let checksByName = {};
+ allChecks.forEach((checkRun) => {
+ if (!shouldExcludeCheck(checkRun)) {
+ let name = checkRun.name;
+ let check = {
+ status: checkRun.status,
+ conclusion: checkRun.conclusion,
+ completionTime: checkRun.completed_at,
+ };
+ if (!checksByName[name]) {
+ checksByName[name] = [check];
+ } else {
+ checksByName[name].push(check);
+ }
+ }
+ });
+
+ return checksByName;
+}
+
+// Returns checks we should exclude because they are flaky or not always predictive of pr mergability.
+// Currently just excludes codecov.
+function shouldExcludeCheck(check): boolean {
+ if (check.name.toLowerCase().indexOf("codecov") != -1) {
+ return true;
+ }
+ return false;
+}
diff --git a/scripts/ci/pr-bot/shared/commentStrings.ts b/scripts/ci/pr-bot/shared/commentStrings.ts
new file mode 100644
index 0000000000000..e70a721c95fa7
--- /dev/null
+++ b/scripts/ci/pr-bot/shared/commentStrings.ts
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+export function allChecksPassed(reviewersToNotify: string[]): string {
+ return `All checks have passed: @${reviewersToNotify.join(" ")}`;
+}
+
+export function assignCommitter(committer: string): string {
+ return `R: @${committer} for final approval`;
+}
+
+export function assignReviewer(labelToReviewerMapping: any): string {
+ let commentString =
+ "Assigning reviewers. If you would like to opt out of this review, comment `assign to next reviewer`:\n\n";
+
+ for (let label in labelToReviewerMapping) {
+ let reviewer = labelToReviewerMapping[label];
+ if (label === "no-matching-label") {
+ commentString += `R: @${reviewer} added as fallback since no labels match configuration\n`;
+ } else {
+ commentString += `R: @${reviewer} for label ${label}.\n`;
+ }
+ }
+
+ commentString += `
+Available commands:
+- \`stop reviewer notifications\` - opt out of the automated review tooling
+- \`remind me after tests pass\` - tag the comment author after tests pass
+- \`waiting on author\` - shift the attention set back to the author (any comment or push by the author will return the attention set to the reviewers)`;
+ return commentString;
+}
+
+export function failingChecksCantAssign(): string {
+ return "Checks are failing. Will not request review until checks are succeeding. If you'd like to override that behavior, comment `assign set of reviewers`";
+}
+
+export function someChecksFailing(reviewersToNotify: string[]): string {
+ return `Some checks have failed: @${reviewersToNotify.join(" ")}`;
+}
+
+export function stopNotifications(reason: string): string {
+ return `Stopping reviewer notifications for this pull request: ${reason}`;
+}
+
+export function remindReviewerAfterTestsPass(requester: string): string {
+ return `Ok - I'll remind @${requester} after tests pass`;
+}
+
+export function reviewersAlreadyAssigned(reviewers: string[]): string {
+ return `Reviewers are already assigned to this PR: ${reviewers
+ .map((reviewer) => "@" + reviewer)
+ .join(" ")}`;
+}
+
+export function noLegalReviewers(): string {
+ return "No reviewers could be found from any of the labels on the PR or in the fallback reviewers list. Check the config file to make sure reviewers are configured";
+}
diff --git a/scripts/ci/pr-bot/shared/constants.ts b/scripts/ci/pr-bot/shared/constants.ts
new file mode 100644
index 0000000000000..859bf8589e623
--- /dev/null
+++ b/scripts/ci/pr-bot/shared/constants.ts
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+const path = require("path");
+
+export const REPO_OWNER = "apache";
+export const REPO = "beam";
+export const PATH_TO_CONFIG_FILE = path.join(
+ __dirname,
+ "../../../../../.github/REVIEWERS.yml"
+);
+export const PATH_TO_METRICS_CSV = path.resolve(
+ path.join(__dirname, "../../metrics.csv")
+);
+export const BOT_NAME = "github-actions";
diff --git a/scripts/ci/pr-bot/shared/githubUtils.ts b/scripts/ci/pr-bot/shared/githubUtils.ts
new file mode 100644
index 0000000000000..667255b169247
--- /dev/null
+++ b/scripts/ci/pr-bot/shared/githubUtils.ts
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+const { Octokit } = require("@octokit/rest");
+const { REPO_OWNER, REPO } = require("./constants");
+
+export interface Label {
+ name: string;
+}
+
+export function getGitHubClient() {
+ let auth = process.env["GITHUB_TOKEN"];
+ if (!auth) {
+ throw new Error(
+ "No github token provided - process.env['GITHUB_TOKEN'] must be set."
+ );
+ }
+ return new Octokit({ auth });
+}
+
+export async function addPrComment(pullNumber: number, body: string) {
+ await getGitHubClient().rest.issues.createComment({
+ owner: REPO_OWNER,
+ repo: REPO,
+ issue_number: pullNumber,
+ body,
+ });
+}
+
+export async function nextActionReviewers(
+ pullNumber: number,
+ existingLabels: Label[]
+) {
+ let newLabels = removeNextActionLabel(existingLabels);
+ newLabels.push("Next Action: Reviewers");
+ await getGitHubClient().rest.issues.setLabels({
+ owner: REPO_OWNER,
+ repo: REPO,
+ issue_number: pullNumber,
+ labels: newLabels,
+ });
+}
+
+export async function nextActionAuthor(
+ pullNumber: number,
+ existingLabels: Label[]
+) {
+ let newLabels = removeNextActionLabel(existingLabels);
+ newLabels.push("Next Action: Author");
+ await getGitHubClient().rest.issues.setLabels({
+ owner: REPO_OWNER,
+ repo: REPO,
+ issue_number: pullNumber,
+ labels: newLabels,
+ });
+}
+
+export async function checkIfCommitter(username: string): Promise {
+ const permissionLevel = (
+ await getGitHubClient().rest.repos.getCollaboratorPermissionLevel({
+ owner: REPO_OWNER,
+ repo: REPO,
+ username,
+ })
+ ).data;
+
+ return (
+ permissionLevel.permission === "write" ||
+ permissionLevel.permission === "admin"
+ );
+}
+
+export function getPullAuthorFromPayload(payload: any) {
+ return payload.issue?.user?.login || payload.pull_request?.user?.login;
+}
+
+export function getPullNumberFromPayload(payload: any) {
+ return payload.issue?.number || payload.pull_request?.number;
+}
+
+function removeNextActionLabel(existingLabels: Label[]): string[] {
+ return existingLabels
+ .filter(
+ (label) =>
+ label.name != "Next Action: Reviewers" &&
+ label.name != "Next Action: Author"
+ )
+ .map((label) => label.name);
+}
diff --git a/scripts/ci/pr-bot/shared/persistentState.ts b/scripts/ci/pr-bot/shared/persistentState.ts
new file mode 100644
index 0000000000000..f7fade16f9b93
--- /dev/null
+++ b/scripts/ci/pr-bot/shared/persistentState.ts
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+const exec = require("@actions/exec");
+const fs = require("fs");
+const path = require("path");
+const { Pr } = require("./pr");
+const { ReviewersForLabel } = require("./reviewersForLabel");
+const { BOT_NAME } = require("./constants");
+
+function getPrFileName(prNumber) {
+ return `pr-${prNumber}.json`.toLowerCase();
+}
+
+function getReviewersForLabelFileName(label) {
+ return `reviewers-for-label-${label}.json`.toLowerCase();
+}
+
+async function commitStateToRepo() {
+ await exec.exec("git pull origin pr-bot-state");
+ await exec.exec("git add state/*");
+ await exec.exec(`git commit -m "Updating config from bot" --allow-empty`);
+ await exec.exec("git push origin pr-bot-state");
+}
+
+export class PersistentState {
+ private switchedBranch = false;
+
+ // Returns a Pr object representing the current saved state of the pr.
+ async getPrState(prNumber: number): Promise {
+ var fileName = getPrFileName(prNumber);
+ return new Pr(await this.getState(fileName, "state/pr-state"));
+ }
+
+ // Writes a Pr object representing the current saved state of the pr to persistent storage.
+ async writePrState(prNumber: number, newState: any) {
+ var fileName = getPrFileName(prNumber);
+ await this.writeState(fileName, "state/pr-state", new Pr(newState));
+ }
+
+ // Returns a ReviewersForLabel object representing the current saved state of which reviewers have reviewed recently.
+ async getReviewersForLabelState(
+ label: string
+ ): Promise {
+ var fileName = getReviewersForLabelFileName(label);
+ return new ReviewersForLabel(label, await this.getState(fileName, "state"));
+ }
+
+ // Writes a ReviewersForLabel object representing the current saved state of which reviewers have reviewed recently.
+ async writeReviewersForLabelState(label: string, newState: any) {
+ var fileName = getReviewersForLabelFileName(label);
+ await this.writeState(
+ fileName,
+ "state",
+ new ReviewersForLabel(label, newState)
+ );
+ }
+
+ private async getState(fileName, baseDirectory) {
+ await this.ensureCorrectBranch();
+ fileName = path.join(baseDirectory, fileName);
+ if (!fs.existsSync(fileName)) {
+ return null;
+ }
+ return JSON.parse(fs.readFileSync(fileName, { encoding: "utf-8" }));
+ }
+
+ private async writeState(fileName, baseDirectory, state) {
+ await this.ensureCorrectBranch();
+ fileName = path.join(baseDirectory, fileName);
+ if (!fs.existsSync(baseDirectory)) {
+ fs.mkdirSync(baseDirectory, { recursive: true });
+ }
+ fs.writeFileSync(fileName, JSON.stringify(state, null, 2), {
+ encoding: "utf-8",
+ });
+ await commitStateToRepo();
+ }
+
+ private async ensureCorrectBranch() {
+ if (this.switchedBranch) {
+ return;
+ }
+ console.log(
+ "Switching to branch pr-bot-state for reading/storing persistent state between runs"
+ );
+ try {
+ await exec.exec(`git config user.name ${BOT_NAME}`);
+ await exec.exec(`git config user.email ${BOT_NAME}@github.com`);
+ await exec.exec("git config pull.rebase false");
+ await exec.exec("git fetch origin pr-bot-state");
+ await exec.exec("git checkout pr-bot-state");
+ } catch {
+ console.log(
+ "Couldnt find branch pr-bot-state in origin, trying to create it"
+ );
+ try {
+ await exec.exec("git checkout -b pr-bot-state");
+ } catch {
+ console.log("Creating branch failed, trying a simple checkout.");
+ await exec.exec("git checkout pr-bot-state");
+ }
+ }
+ this.switchedBranch = true;
+ }
+}
diff --git a/scripts/ci/pr-bot/shared/pr.ts b/scripts/ci/pr-bot/shared/pr.ts
new file mode 100644
index 0000000000000..c1419b8419cee
--- /dev/null
+++ b/scripts/ci/pr-bot/shared/pr.ts
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+const github = require("./githubUtils");
+
+export class Pr {
+ public commentedAboutFailingChecks: boolean;
+ public reviewersAssignedForLabels: { [key: string]: string };
+ public nextAction: string;
+ public stopReviewerNotifications: boolean;
+ public remindAfterTestsPass: string[];
+
+ constructor(propertyDictionary) {
+ this.commentedAboutFailingChecks = false;
+ this.reviewersAssignedForLabels = {}; // map of label to reviewer
+ this.nextAction = "Author";
+ this.stopReviewerNotifications = false;
+ this.remindAfterTestsPass = []; // List of handles
+
+ if (!propertyDictionary) {
+ return;
+ }
+ if (propertyDictionary) {
+ if ("commentedAboutFailingChecks" in propertyDictionary) {
+ this.commentedAboutFailingChecks =
+ propertyDictionary["commentedAboutFailingChecks"];
+ }
+ if ("reviewersAssignedForLabels" in propertyDictionary) {
+ this.reviewersAssignedForLabels =
+ propertyDictionary["reviewersAssignedForLabels"];
+ }
+ if ("nextAction" in propertyDictionary) {
+ this.nextAction = propertyDictionary["nextAction"];
+ }
+ if ("stopReviewerNotifications" in propertyDictionary) {
+ this.stopReviewerNotifications =
+ propertyDictionary["stopReviewerNotifications"];
+ }
+ if ("remindAfterTestsPass" in propertyDictionary) {
+ this.remindAfterTestsPass = propertyDictionary["remindAfterTestsPass"];
+ }
+ }
+ }
+
+ // Returns a label that the reviewer is assigned for.
+ // If none, returns an empty string
+ getLabelForReviewer(reviewer: string): string {
+ const labels = Object.keys(this.reviewersAssignedForLabels);
+ for (let i = 0; i < labels.length; i++) {
+ let label = labels[i];
+ if (this.reviewersAssignedForLabels[label] === reviewer) {
+ return label;
+ }
+ }
+
+ return "";
+ }
+
+ // Returns whether any of the assigned reviewers are committers
+ async isAnyAssignedReviewerCommitter(): Promise {
+ const labels = Object.keys(this.reviewersAssignedForLabels);
+ for (let i = 0; i < labels.length; i++) {
+ if (
+ await github.checkIfCommitter(
+ this.reviewersAssignedForLabels[labels[i]]
+ )
+ ) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+}
diff --git a/scripts/ci/pr-bot/shared/reviewerConfig.ts b/scripts/ci/pr-bot/shared/reviewerConfig.ts
new file mode 100644
index 0000000000000..f8300adbdfaf3
--- /dev/null
+++ b/scripts/ci/pr-bot/shared/reviewerConfig.ts
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+const yaml = require("js-yaml");
+const fs = require("fs");
+import { Label } from "./githubUtils";
+
+export class ReviewerConfig {
+ private config: any;
+ constructor(pathToConfigFile) {
+ this.config = yaml.load(
+ fs.readFileSync(pathToConfigFile, { encoding: "utf-8" })
+ );
+ }
+
+ // Given a list of labels and an exclusion list of reviewers not to include (e.g. the author)
+ // returns all possible reviewers for each label
+ getReviewersForLabels(
+ labels: Label[],
+ exclusionList: string[]
+ ): { [key: string]: string[] } {
+ let reviewersFound = false;
+ let labelToReviewerMapping = {};
+ labels.forEach((label) => {
+ let reviewers = this.getReviewersForLabel(label.name, exclusionList);
+ if (reviewers.length > 0) {
+ labelToReviewerMapping[label.name] = reviewers;
+ reviewersFound = true;
+ }
+ });
+ if (!reviewersFound) {
+ const fallbackReviewers = this.getFallbackReviewers(exclusionList);
+ if (fallbackReviewers.length > 0) {
+ labelToReviewerMapping["no-matching-label"] =
+ this.getFallbackReviewers(exclusionList);
+ }
+ }
+ return labelToReviewerMapping;
+ }
+
+ // Get possible reviewers excluding the author.
+ getReviewersForLabel(label: string, exclusionList: string[]): string[] {
+ var labelObjects = this.config.labels;
+ const labelObject = labelObjects.find(
+ (labelObject) => labelObject.name.toLowerCase() === label.toLowerCase()
+ );
+ if (!labelObject) {
+ return [];
+ }
+
+ return this.excludeFromReviewers(labelObject.reviewers, exclusionList);
+ }
+
+ getExclusionListForLabel(label: string): string[] {
+ var labelObjects = this.config.labels;
+ const labelObject = labelObjects.find(
+ (labelObject) => labelObject.name.toLowerCase() === label.toLowerCase()
+ );
+ return labelObject?.exclusionList ?? [];
+ }
+
+ // Get fallback reviewers excluding the author.
+ getFallbackReviewers(exclusionList: string[]): string[] {
+ return this.excludeFromReviewers(
+ this.config.fallbackReviewers,
+ exclusionList
+ );
+ }
+
+ private excludeFromReviewers(
+ reviewers: string[],
+ exclusionList: string[]
+ ): string[] {
+ if (!exclusionList) {
+ return reviewers;
+ }
+
+ return reviewers.filter(
+ (reviewer) => exclusionList.indexOf(reviewer) == -1
+ );
+ }
+}
diff --git a/scripts/ci/pr-bot/shared/reviewersForLabel.ts b/scripts/ci/pr-bot/shared/reviewersForLabel.ts
new file mode 100644
index 0000000000000..971f3f1cd7a53
--- /dev/null
+++ b/scripts/ci/pr-bot/shared/reviewersForLabel.ts
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+const github = require("./githubUtils");
+
+export class ReviewersForLabel {
+ public label: string;
+ public dateOfLastReviewAssignment: { [key: string]: number };
+
+ constructor(
+ label: string,
+ propertyDictionary: {
+ dateOfLastReviewAssignment: { [key: string]: number };
+ }
+ ) {
+ this.label = label;
+ this.dateOfLastReviewAssignment = {}; // map of reviewer to date
+
+ if (!propertyDictionary) {
+ return;
+ }
+ if ("dateOfLastReviewAssignment" in propertyDictionary) {
+ this.dateOfLastReviewAssignment =
+ propertyDictionary["dateOfLastReviewAssignment"];
+ }
+ }
+
+ // Given a list of available reviewers,
+ // returns the next reviewer up based on who has reviewed least recently.
+ // Updates this object to reflect their assignment.
+ assignNextReviewer(availableReviewers: string[]): string {
+ if (availableReviewers.length === 0) {
+ throw new Error(`No reviewers available for label ${this.label}`);
+ }
+
+ if (!this.dateOfLastReviewAssignment[availableReviewers[0]]) {
+ this.dateOfLastReviewAssignment[availableReviewers[0]] = Date.now();
+ return availableReviewers[0];
+ }
+
+ let earliestDate = this.dateOfLastReviewAssignment[availableReviewers[0]];
+ let earliestReviewer = availableReviewers[0];
+
+ for (let i = 0; i < availableReviewers.length; i++) {
+ let availableReviewer = availableReviewers[i];
+ if (!this.dateOfLastReviewAssignment[availableReviewer]) {
+ this.dateOfLastReviewAssignment[availableReviewer] = Date.now();
+ return availableReviewer;
+ }
+ if (earliestDate > this.dateOfLastReviewAssignment[availableReviewer]) {
+ earliestDate = this.dateOfLastReviewAssignment[availableReviewer];
+ earliestReviewer = availableReviewer;
+ }
+ }
+
+ this.dateOfLastReviewAssignment[earliestReviewer] = Date.now();
+ return earliestReviewer;
+ }
+
+ // Given the up to date list of available reviewers (excluding the author),
+ // returns the next reviewer up based on who has reviewed least recently.
+ // Updates this object to reflect their assignment.
+ async assignNextCommitter(availableReviewers: string[]): Promise {
+ let earliestDate = Date.now();
+ let earliestCommitter: string = "";
+
+ for (let i = 0; i < availableReviewers.length; i++) {
+ let availableReviewer = availableReviewers[i];
+ if (await github.checkIfCommitter(availableReviewer)) {
+ if (!this.dateOfLastReviewAssignment[availableReviewer]) {
+ this.dateOfLastReviewAssignment[availableReviewer] = Date.now();
+ return availableReviewer;
+ }
+ if (earliestDate > this.dateOfLastReviewAssignment[availableReviewer]) {
+ earliestDate = this.dateOfLastReviewAssignment[availableReviewer];
+ earliestCommitter = availableReviewer;
+ }
+ }
+ }
+
+ if (!earliestCommitter) {
+ throw new Error(`No committers available for label ${this.label}`);
+ }
+ this.dateOfLastReviewAssignment[earliestCommitter] = Date.now();
+ return earliestCommitter;
+ }
+}
diff --git a/scripts/ci/pr-bot/shared/userCommand.ts b/scripts/ci/pr-bot/shared/userCommand.ts
new file mode 100644
index 0000000000000..e32746eb7fce8
--- /dev/null
+++ b/scripts/ci/pr-bot/shared/userCommand.ts
@@ -0,0 +1,229 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+const github = require("./githubUtils");
+const commentStrings = require("./commentStrings");
+const { BOT_NAME } = require("./constants");
+const { StateClient } = require("./persistentState");
+const { ReviewerConfig } = require("./reviewerConfig");
+
+// Reads the comment and processes the command if one is contained in it.
+// Returns true if it runs a command, false otherwise.
+export async function processCommand(
+ payload: any,
+ commentAuthor: string,
+ commentText: string,
+ stateClient: typeof StateClient,
+ reviewerConfig: typeof ReviewerConfig
+) {
+ // Don't process any commands from our bot.
+ if (commentAuthor === BOT_NAME) {
+ return false;
+ }
+ console.log(commentAuthor);
+
+ const pullNumber = payload.issue?.number || payload.pull_request?.number;
+ commentText = commentText.toLowerCase();
+ if (commentText.indexOf("r: @") > -1) {
+ await manuallyAssignedToReviewer(pullNumber, stateClient);
+ } else if (commentText.indexOf("assign to next reviewer") > -1) {
+ await assignToNextReviewer(
+ payload,
+ commentAuthor,
+ pullNumber,
+ stateClient,
+ reviewerConfig
+ );
+ } else if (commentText.indexOf("stop reviewer notifications") > -1) {
+ await stopReviewerNotifications(
+ pullNumber,
+ stateClient,
+ "requested by reviewer"
+ );
+ } else if (commentText.indexOf("remind me after tests pass") > -1) {
+ await remindAfterTestsPass(pullNumber, commentAuthor, stateClient);
+ } else if (commentText.indexOf("waiting on author") > -1) {
+ await waitOnAuthor(payload, pullNumber, stateClient);
+ } else if (commentText.indexOf("assign set of reviewers") > -1) {
+ await assignReviewerSet(payload, pullNumber, stateClient, reviewerConfig);
+ } else {
+ return false;
+ }
+
+ return true;
+}
+
+async function assignToNextReviewer(
+ payload: any,
+ commentAuthor: string,
+ pullNumber: number,
+ stateClient: typeof StateClient,
+ reviewerConfig: typeof ReviewerConfig
+) {
+ let prState = await stateClient.getPrState(pullNumber);
+ let labelOfReviewer = prState.getLabelForReviewer(payload.sender.login);
+ if (labelOfReviewer) {
+ let reviewersState = await stateClient.getReviewersForLabelState(
+ labelOfReviewer
+ );
+ const pullAuthor = github.getPullAuthorFromPayload(payload);
+ let availableReviewers = reviewerConfig.getReviewersForLabel(
+ labelOfReviewer,
+ [commentAuthor, pullAuthor]
+ );
+ let chosenReviewer = reviewersState.assignNextReviewer(availableReviewers);
+ prState.reviewersAssignedForLabels[labelOfReviewer] = chosenReviewer;
+
+ // Comment assigning reviewer
+ console.log(`Assigning ${chosenReviewer}`);
+ await github.addPrComment(
+ pullNumber,
+ commentStrings.assignReviewer(prState.reviewersAssignedForLabels)
+ );
+
+ // Set next action to reviewer
+ const existingLabels =
+ payload.issue?.labels || payload.pull_request?.labels;
+ await github.nextActionReviewers(pullNumber, existingLabels);
+ prState.nextAction = "Reviewers";
+
+ // Persist state
+ await stateClient.writePrState(pullNumber, prState);
+ await stateClient.writeReviewersForLabelState(
+ labelOfReviewer,
+ reviewersState
+ );
+ }
+}
+
+// If they've manually assigned a reviewer, just silence notifications and ignore this pr going forward.
+// TODO(damccorm) - we could try to do something more intelligent here like figuring out which label that reviewer belongs to.
+async function manuallyAssignedToReviewer(
+ pullNumber: number,
+ stateClient: typeof StateClient
+) {
+ await stopReviewerNotifications(
+ pullNumber,
+ stateClient,
+ "review requested by someone other than the bot, ceding control"
+ );
+}
+
+async function stopReviewerNotifications(
+ pullNumber: number,
+ stateClient: typeof StateClient,
+ reason: string
+) {
+ let prState = await stateClient.getPrState(pullNumber);
+ prState.stopReviewerNotifications = true;
+ await stateClient.writePrState(pullNumber, prState);
+
+ // Comment acknowledging command
+ await github.addPrComment(
+ pullNumber,
+ commentStrings.stopNotifications(reason)
+ );
+}
+
+async function remindAfterTestsPass(
+ pullNumber: number,
+ username: string,
+ stateClient: typeof StateClient
+) {
+ let prState = await stateClient.getPrState(pullNumber);
+ prState.remindAfterTestsPass.push(username);
+ await stateClient.writePrState(pullNumber, prState);
+
+ // Comment acknowledging command
+ await github.addPrComment(
+ pullNumber,
+ commentStrings.remindReviewerAfterTestsPass(username)
+ );
+}
+
+async function waitOnAuthor(
+ payload: any,
+ pullNumber: number,
+ stateClient: typeof StateClient
+) {
+ const existingLabels = payload.issue?.labels || payload.pull_request?.labels;
+ await github.nextActionAuthor(pullNumber, existingLabels);
+ let prState = await stateClient.getPrState(pullNumber);
+ prState.nextAction = "Author";
+ await stateClient.writePrState(pullNumber, prState);
+}
+
+async function assignReviewerSet(
+ payload: any,
+ pullNumber: number,
+ stateClient: typeof StateClient,
+ reviewerConfig: typeof ReviewerConfig
+) {
+ let prState = await stateClient.getPrState(pullNumber);
+ if (Object.values(prState.reviewersAssignedForLabels).length > 0) {
+ await github.addPrComment(
+ pullNumber,
+ commentStrings.reviewersAlreadyAssigned(
+ Object.values(prState.reviewersAssignedForLabels)
+ )
+ );
+ return;
+ }
+
+ const existingLabels = payload.issue?.labels || payload.pull_request?.labels;
+ const pullAuthor = github.getPullAuthorFromPayload(payload);
+ const reviewersForLabels = reviewerConfig.getReviewersForLabels(
+ existingLabels,
+ [pullAuthor]
+ );
+ let reviewerStateToUpdate = {};
+ var labels = Object.keys(reviewersForLabels);
+ if (!labels || labels.length == 0) {
+ await github.addPrComment(
+ pullNumber,
+ commentStrings.noLegalReviewers(existingLabels)
+ );
+ return;
+ }
+ for (let i = 0; i < labels.length; i++) {
+ let label = labels[i];
+ let availableReviewers = reviewersForLabels[label];
+ let reviewersState = await stateClient.getReviewersForLabelState(label);
+ let chosenReviewer = reviewersState.assignNextReviewer(availableReviewers);
+ reviewerStateToUpdate[label] = reviewersState;
+ prState.reviewersAssignedForLabels[label] = chosenReviewer;
+ }
+ console.log(`Assigning reviewers for pr ${pullNumber}`);
+ await github.addPrComment(
+ pullNumber,
+ commentStrings.assignReviewer(prState.reviewersAssignedForLabels)
+ );
+
+ github.nextActionReviewers(pullNumber, existingLabels);
+ prState.nextAction = "Reviewers";
+
+ await stateClient.writePrState(pullNumber, prState);
+ let labelsToUpdate = Object.keys(reviewerStateToUpdate);
+ for (let i = 0; i < labelsToUpdate.length; i++) {
+ let label = labelsToUpdate[i];
+ await stateClient.writeReviewersForLabelState(
+ label,
+ reviewerStateToUpdate[label]
+ );
+ }
+}
diff --git a/scripts/ci/pr-bot/test/prTest.ts b/scripts/ci/pr-bot/test/prTest.ts
new file mode 100644
index 0000000000000..b771852584a30
--- /dev/null
+++ b/scripts/ci/pr-bot/test/prTest.ts
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var assert = require("assert");
+const { Pr } = require("../shared/pr");
+describe("Pr", function () {
+ describe("getLabelForReviewer()", function () {
+ it("should return the label a reviewer is assigned to", function () {
+ let testPr = new Pr({});
+ testPr.reviewersAssignedForLabels = {
+ Go: "testReviewer1",
+ Java: "testReviewer2",
+ Python: "testReviewer3",
+ };
+ assert.equal("Go", testPr.getLabelForReviewer("testReviewer1"));
+ assert.equal("Java", testPr.getLabelForReviewer("testReviewer2"));
+ assert.equal("Python", testPr.getLabelForReviewer("testReviewer3"));
+ });
+
+ it("should return an empty string when a reviewer is not assigned", function () {
+ let testPr = new Pr({});
+ testPr.reviewersAssignedForLabels = {
+ Go: "testReviewer1",
+ Java: "testReviewer2",
+ Python: "testReviewer3",
+ };
+ assert.equal("", testPr.getLabelForReviewer("testReviewer4"));
+ });
+ });
+});
diff --git a/scripts/ci/pr-bot/test/reviewerConfigTest.ts b/scripts/ci/pr-bot/test/reviewerConfigTest.ts
new file mode 100644
index 0000000000000..589aab635bc95
--- /dev/null
+++ b/scripts/ci/pr-bot/test/reviewerConfigTest.ts
@@ -0,0 +1,238 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var assert = require("assert");
+var fs = require("fs");
+const { ReviewerConfig } = require("../shared/reviewerConfig");
+const configPath = "test-config.yml";
+const configContents = `labels:
+- name: "Go"
+ reviewers: ["testReviewer1", "testReviewer2"]
+ exclusionList: ["testReviewer3"] # These users will never be suggested as reviewers
+# I don't know the other areas well enough to assess who the normal committers/contributors who might want to be reviewers are
+- name: "Java"
+ reviewers: ["testReviewer3", "testReviewer2"]
+ exclusionList: [] # These users will never be suggested as reviewers
+- name: "Python"
+ reviewers: ["testReviewer4"]
+ exclusionList: [] # These users will never be suggested as reviewers
+fallbackReviewers: ["testReviewer5", "testReviewer1", "testReviewer3"] # List of committers to use when no label matches
+`;
+describe("ReviewerConfig", function () {
+ before(function () {
+ if (fs.existsSync(configPath)) {
+ fs.rmSync(configPath);
+ }
+ fs.writeFileSync(configPath, configContents);
+ });
+
+ after(function () {
+ fs.rmSync(configPath);
+ });
+
+ describe("getReviewersForLabels()", function () {
+ it("should return all reviewers configured for all labels", function () {
+ const config = new ReviewerConfig(configPath);
+ const reviewersForLabels = config.getReviewersForLabels(
+ [{ name: "Go" }, { name: "Java" }],
+ []
+ );
+ assert(
+ reviewersForLabels["Go"].find(
+ (reviewer) => reviewer === "testReviewer1"
+ ),
+ "Return value for Go label should include testReviewer1"
+ );
+ assert(
+ reviewersForLabels["Go"].find(
+ (reviewer) => reviewer === "testReviewer2"
+ ),
+ "Return value for Go label should include testReviewer2"
+ );
+ assert(
+ !reviewersForLabels["Go"].find(
+ (reviewer) => reviewer === "testReviewer3"
+ ),
+ "Return value for Go label should not include testReviewer3"
+ );
+ assert(
+ !reviewersForLabels["Go"].find(
+ (reviewer) => reviewer === "testReviewer4"
+ ),
+ "Return value for Go label should not include testReviewer4"
+ );
+
+ assert(
+ reviewersForLabels["Java"].find(
+ (reviewer) => reviewer === "testReviewer3"
+ ),
+ "Return value for Java label should include testReviewer3"
+ );
+ assert(
+ reviewersForLabels["Java"].find(
+ (reviewer) => reviewer === "testReviewer2"
+ ),
+ "Return value for Java label should include testReviewer2"
+ );
+ assert(
+ !reviewersForLabels["Java"].find(
+ (reviewer) => reviewer === "testReviewer4"
+ ),
+ "Return value for Java label should not include testReviewer4"
+ );
+ assert(
+ !reviewersForLabels["Java"].find(
+ (reviewer) => reviewer === "testReviewer1"
+ ),
+ "Return value for Java label should not include testReviewer1"
+ );
+ assert(
+ !reviewersForLabels["Java"].find(
+ (reviewer) => reviewer === "testReviewer5"
+ ),
+ "Return value for Java label should not include testReviewer5"
+ );
+
+ assert(
+ Object.keys(reviewersForLabels).indexOf("Python") == -1,
+ "No reviewers should be included for python"
+ );
+ });
+
+ it("should return no entry if a label is not configured", function () {
+ const config = new ReviewerConfig(configPath);
+ const reviewersForLabels = config.getReviewersForLabels(
+ [{ name: "FakeLabel" }],
+ []
+ );
+
+ assert(
+ !("FakeLabel" in reviewersForLabels),
+ "FakeLabel should not be included in the returned label map"
+ );
+ });
+
+ it("should exclude any reviewers who are passed into the exlusionList", function () {
+ const config = new ReviewerConfig(configPath);
+ const reviewersForLabels = config.getReviewersForLabels(
+ [{ name: "Go" }, { name: "Java" }],
+ ["testReviewer1"]
+ );
+
+ assert(
+ !reviewersForLabels["Go"].find(
+ (reviewer) => reviewer === "testReviewer1"
+ ),
+ "testReviewer1 should have been excluded from the result set"
+ );
+ });
+ });
+
+ describe("getReviewersForLabel()", function () {
+ it("should return all reviewers configured for a label", function () {
+ const config = new ReviewerConfig(configPath);
+ const reviewersForGo = config.getReviewersForLabel("Go", []);
+
+ assert(
+ reviewersForGo.find((reviewer) => reviewer === "testReviewer1"),
+ "Return value for Go label should include testReviewer1"
+ );
+ assert(
+ reviewersForGo.find((reviewer) => reviewer === "testReviewer2"),
+ "Return value for Go label should include testReviewer2"
+ );
+ assert(
+ !reviewersForGo.find((reviewer) => reviewer === "testReviewer3"),
+ "Return value for Go label should not include testReviewer3"
+ );
+ assert(
+ !reviewersForGo.find((reviewer) => reviewer === "testReviewer4"),
+ "Return value for Go label should not include testReviewer4"
+ );
+ });
+
+ it("should return an empty list if a label is not configured", function () {
+ const config = new ReviewerConfig(configPath);
+ const reviewersForFakeLabel = config.getReviewersForLabel(
+ "FakeLabel",
+ []
+ );
+ assert.equal(0, reviewersForFakeLabel.length);
+ });
+
+ it("should exclude any reviewers who are passed into the exlusionList", function () {
+ const config = new ReviewerConfig(configPath);
+ const reviewersForGo = config.getReviewersForLabel("Go", [
+ "testReviewer1",
+ ]);
+
+ assert(
+ !reviewersForGo.find((reviewer) => reviewer === "testReviewer1"),
+ "Return value for Go label should not include testReviewer1"
+ );
+ });
+ });
+
+ describe("getExclusionListForLabel()", function () {
+ it("should get the exclusion list configured for a label", function () {
+ const config = new ReviewerConfig(configPath);
+ const goExclusionList = config.getExclusionListForLabel("Go");
+
+ assert(
+ goExclusionList.find((reviewer) => reviewer === "testReviewer3"),
+ "Return value for Go label should include testReviewer3"
+ );
+ assert.equal(1, goExclusionList.length);
+ });
+ });
+
+ describe("getFallbackReviewers()", function () {
+ it("should get the configured falback list", function () {
+ const config = new ReviewerConfig(configPath);
+ const fallbackReviewers = config.getFallbackReviewers([]);
+
+ assert.equal(3, fallbackReviewers.length);
+ assert(
+ fallbackReviewers.find((reviewer) => reviewer === "testReviewer5"),
+ "Fallback reviewers should include testReviewer5"
+ );
+ assert(
+ fallbackReviewers.find((reviewer) => reviewer === "testReviewer1"),
+ "Fallback reviewers should include testReviewer1"
+ );
+ assert(
+ fallbackReviewers.find((reviewer) => reviewer === "testReviewer3"),
+ "Fallback reviewers should include testReviewer3"
+ );
+ });
+
+ it("should not include excluded reviewers", function () {
+ const config = new ReviewerConfig(configPath);
+ const fallbackReviewers = config.getFallbackReviewers([
+ "testReviewer1",
+ "testReviewer3",
+ ]);
+
+ assert.equal(1, fallbackReviewers.length);
+ assert(
+ fallbackReviewers.find((reviewer) => reviewer === "testReviewer5"),
+ "Fallback reviewers should only include testReviewer5"
+ );
+ });
+ });
+});
diff --git a/scripts/ci/pr-bot/test/reviewersForLabelTest.ts b/scripts/ci/pr-bot/test/reviewersForLabelTest.ts
new file mode 100644
index 0000000000000..f425750505927
--- /dev/null
+++ b/scripts/ci/pr-bot/test/reviewersForLabelTest.ts
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var assert = require("assert");
+const { ReviewersForLabel } = require("../shared/reviewersForLabel");
+describe("ReviewersForLabel", function () {
+ describe("assignNextReviewer()", function () {
+ it("should repeatedly assign the reviewer who reviewed least recently", function () {
+ const dateOfLastReviewAssignment = {
+ testReviewer1: 1,
+ testReviewer2: 3,
+ testReviewer3: 4,
+ testReviewer4: 2,
+ };
+ let reviewersForGo = new ReviewersForLabel("Go", {
+ dateOfLastReviewAssignment: dateOfLastReviewAssignment,
+ });
+
+ assert.equal(
+ "testReviewer1",
+ reviewersForGo.assignNextReviewer([
+ "testReviewer1",
+ "testReviewer2",
+ "testReviewer3",
+ "testReviewer4",
+ ])
+ );
+ assert.equal(
+ "testReviewer4",
+ reviewersForGo.assignNextReviewer([
+ "testReviewer1",
+ "testReviewer2",
+ "testReviewer3",
+ "testReviewer4",
+ ])
+ );
+ assert.equal(
+ "testReviewer2",
+ reviewersForGo.assignNextReviewer([
+ "testReviewer1",
+ "testReviewer2",
+ "testReviewer3",
+ "testReviewer4",
+ ])
+ );
+ assert.equal(
+ "testReviewer3",
+ reviewersForGo.assignNextReviewer([
+ "testReviewer1",
+ "testReviewer2",
+ "testReviewer3",
+ "testReviewer4",
+ ])
+ );
+ assert.equal(
+ "testReviewer1",
+ reviewersForGo.assignNextReviewer([
+ "testReviewer1",
+ "testReviewer2",
+ "testReviewer3",
+ "testReviewer4",
+ ])
+ );
+ });
+
+ it("should assign a reviewer who hasnt reviewed before", function () {
+ const dateOfLastReviewAssignment = {
+ testReviewer1: 1,
+ testReviewer2: 2,
+ testReviewer3: 3,
+ testReviewer4: 4,
+ };
+ let reviewersForGo = new ReviewersForLabel("Go", {
+ dateOfLastReviewAssignment: dateOfLastReviewAssignment,
+ });
+
+ assert.equal(
+ "testReviewer5",
+ reviewersForGo.assignNextReviewer([
+ "testReviewer1",
+ "testReviewer2",
+ "testReviewer3",
+ "testReviewer4",
+ "testReviewer5",
+ ])
+ );
+ });
+
+ it("should only assign reviewers in the availableReviewers list", function () {
+ const dateOfLastReviewAssignment = {
+ testReviewer1: 1,
+ testReviewer2: 2,
+ testReviewer3: 3,
+ testReviewer4: 4,
+ };
+ let reviewersForGo = new ReviewersForLabel("Go", {
+ dateOfLastReviewAssignment: dateOfLastReviewAssignment,
+ });
+
+ assert.equal(
+ "testReviewer2",
+ reviewersForGo.assignNextReviewer(["testReviewer4", "testReviewer2"])
+ );
+ });
+
+ it("should throw if no reviewer available", function () {
+ const dateOfLastReviewAssignment = {
+ testReviewer1: 1,
+ testReviewer2: 2,
+ testReviewer3: 3,
+ testReviewer4: 4,
+ };
+ let reviewersForGo = new ReviewersForLabel("Go", {
+ dateOfLastReviewAssignment: dateOfLastReviewAssignment,
+ });
+
+ assert.throws(() => reviewersForGo.assignNextReviewer([]));
+ });
+ });
+});
diff --git a/scripts/ci/pr-bot/tsconfig.json b/scripts/ci/pr-bot/tsconfig.json
new file mode 100644
index 0000000000000..719f5dff8b9f5
--- /dev/null
+++ b/scripts/ci/pr-bot/tsconfig.json
@@ -0,0 +1,12 @@
+{
+ "compilerOptions": {
+ "target": "es6" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */,
+ "module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */,
+ "outDir": "./lib" /* Redirect output structure to the directory. */,
+ "rootDir": "./" /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */,
+ "strict": true /* Enable all strict type-checking options. */,
+ "noImplicitAny": false /* Raise error on expressions and declarations with an implied 'any' type. */,
+ "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
+ },
+ "exclude": ["node_modules"]
+}
diff --git a/sdks/go.mod b/sdks/go.mod
index ce8703ff82e97..49315401b4b42 100644
--- a/sdks/go.mod
+++ b/sdks/go.mod
@@ -40,6 +40,7 @@ require (
github.com/linkedin/goavro v2.1.0+incompatible
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect
github.com/nightlyone/lockfile v1.0.0
+ github.com/proullon/ramsql v0.0.0-20211120092837-c8d0a408b939 // indirect
github.com/spf13/cobra v1.2.1
github.com/testcontainers/testcontainers-go v0.12.0
golang.org/x/net v0.0.0-20211108170745-6635138e15ea
diff --git a/sdks/go.sum b/sdks/go.sum
index 1efb69e04fbef..d79b849b5a535 100644
--- a/sdks/go.sum
+++ b/sdks/go.sum
@@ -272,6 +272,7 @@ github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeME
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
+github.com/go-gorp/gorp v2.0.0+incompatible/go.mod h1:7IfkAQnO7jfT/9IQ3R9wL1dFhukN6aQxzKTHnkxzA/E=
github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
@@ -459,6 +460,7 @@ github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.10.4 h1:SO9z7FRPzA03QhHKJrH5BXA6HU1rS4V2nIVrrNC1iYk=
github.com/lib/pq v1.10.4/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/linkedin/goavro v2.1.0+incompatible h1:DV2aUlj2xZiuxQyvag8Dy7zjY69ENjS66bWkSfdpddY=
@@ -588,6 +590,8 @@ github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+Gx
github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
github.com/prometheus/procfs v0.2.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
+github.com/proullon/ramsql v0.0.0-20211120092837-c8d0a408b939 h1:mtMU7aT8cTAyNL3O4RyOfe/OOUxwCN525SIbKQoUvw0=
+github.com/proullon/ramsql v0.0.0-20211120092837-c8d0a408b939/go.mod h1:jG8oAQG0ZPHPyxg5QlMERS31airDC+ZuqiAe8DUvFVo=
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
@@ -1146,10 +1150,10 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
-gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
-gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk=
gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=
gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
+gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
+gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk=
gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk=
gotest.tools/v3 v3.0.3 h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0=
gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8=
diff --git a/sdks/go/pkg/beam/core/graph/mtime/time.go b/sdks/go/pkg/beam/core/graph/mtime/time.go
index 25ea9ce567f72..bdbb0c441af80 100644
--- a/sdks/go/pkg/beam/core/graph/mtime/time.go
+++ b/sdks/go/pkg/beam/core/graph/mtime/time.go
@@ -37,7 +37,7 @@ const (
// EndOfGlobalWindowTime is the timestamp at the end of the global window. It
// is a day before the max timestamp.
- // TODO Use GLOBAL_WINDOW_MAX_TIMESTAMP_MILLIS from the Runner API constants
+ // TODO(BEAM-4179) Use GLOBAL_WINDOW_MAX_TIMESTAMP_MILLIS from the Runner API constants
EndOfGlobalWindowTime = MaxTimestamp - 24*60*60*1000
// ZeroTimestamp is the default zero value time. It corresponds to the unix epoch.
@@ -65,7 +65,8 @@ func FromDuration(d time.Duration) Time {
// FromTime returns a milli-second precision timestamp from a time.Time.
func FromTime(t time.Time) Time {
- return Normalize(Time(n2m(t.UnixNano())))
+ // TODO(BEAM-13988): Replace t.UnixNano() with t.UnixMilli() for Go 1.17 or higher.
+ return Normalize(Time(t.UnixNano() / 1e6))
}
// Milliseconds returns the number of milli-seconds since the Unix epoch.
@@ -73,14 +74,18 @@ func (t Time) Milliseconds() int64 {
return int64(t)
}
-// Add returns the time plus the duration.
+// Add returns the time plus the duration. Input Durations of less than one
+// millisecond will not increment the time due to a loss of precision when
+// converting to milliseconds.
func (t Time) Add(d time.Duration) Time {
- return Normalize(Time(int64(t) + n2m(d.Nanoseconds())))
+ return Normalize(Time(int64(t) + d.Milliseconds()))
}
-// Subtract returns the time minus the duration.
+// Subtract returns the time minus the duration. Input Durations of less than one
+// millisecond will not increment the time due to a loss of precision when
+// converting to milliseconds.
func (t Time) Subtract(d time.Duration) Time {
- return Normalize(Time(int64(t) - n2m(d.Nanoseconds())))
+ return Normalize(Time(int64(t) - d.Milliseconds()))
}
func (t Time) String() string {
@@ -116,8 +121,3 @@ func Max(a, b Time) Time {
func Normalize(t Time) Time {
return Min(Max(t, MinTimestamp), MaxTimestamp)
}
-
-// n2m converts nanoseconds to milliseconds.
-func n2m(v int64) int64 {
- return v / 1e6
-}
diff --git a/sdks/go/pkg/beam/core/graph/mtime/time_test.go b/sdks/go/pkg/beam/core/graph/mtime/time_test.go
new file mode 100644
index 0000000000000..7012184c18ce7
--- /dev/null
+++ b/sdks/go/pkg/beam/core/graph/mtime/time_test.go
@@ -0,0 +1,215 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements. See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package mtime
+
+import (
+ "math"
+ "testing"
+ "time"
+)
+
+func TestAdd(t *testing.T) {
+ tests := []struct {
+ name string
+ baseTime Time
+ addition time.Duration
+ expOut Time
+ }{
+ {
+ "insignificant addition small",
+ Time(1000),
+ 1 * time.Nanosecond,
+ Time(1000),
+ },
+ {
+ "insignificant addition large",
+ Time(1000),
+ 999999 * time.Nanosecond,
+ Time(1000),
+ },
+ {
+ "significant addition small",
+ Time(1000),
+ 1 * time.Millisecond,
+ Time(1001),
+ },
+ {
+ "significant addition large",
+ Time(1000),
+ 10 * time.Second,
+ Time(11000),
+ },
+ {
+ "add past max timestamp",
+ MaxTimestamp,
+ 1 * time.Minute,
+ MaxTimestamp,
+ },
+ {
+ "add across max boundary",
+ Time(int64(MaxTimestamp) - 10000),
+ 10 * time.Minute,
+ MaxTimestamp,
+ },
+ }
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ if got, want := test.baseTime.Add(test.addition), test.expOut; got != want {
+ t.Errorf("(%v).Add(%v), got time %v, want %v", test.baseTime, test.addition, got, want)
+ }
+ })
+ }
+}
+
+func TestSubtract(t *testing.T) {
+ tests := []struct {
+ name string
+ baseTime Time
+ subtraction time.Duration
+ expOut Time
+ }{
+ {
+ "insignificant subtraction small",
+ Time(1000),
+ 1 * time.Nanosecond,
+ Time(1000),
+ },
+ {
+ "insignificant subtraction large",
+ Time(1000),
+ 999999 * time.Nanosecond,
+ Time(1000),
+ },
+ {
+ "significant subtraction small",
+ Time(1000),
+ 1 * time.Millisecond,
+ Time(999),
+ },
+ {
+ "significant subtraction large",
+ Time(1000),
+ 10 * time.Second,
+ Time(-9000),
+ },
+ {
+ "subtract past min timestamp",
+ MinTimestamp,
+ 1 * time.Minute,
+ MinTimestamp,
+ },
+ {
+ "subtract across min boundary",
+ Time(int64(MinTimestamp) + 10000),
+ 10 * time.Minute,
+ MinTimestamp,
+ },
+ }
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ if got, want := test.baseTime.Subtract(test.subtraction), test.expOut; got != want {
+ t.Errorf("(%v).Subtract(%v), got time %v, want %v", test.baseTime, test.subtraction, got, want)
+ }
+ })
+ }
+}
+
+func TestNormalize(t *testing.T) {
+ tests := []struct {
+ name string
+ in Time
+ expOut Time
+ }{
+ {
+ "min timestamp",
+ MinTimestamp,
+ MinTimestamp,
+ },
+ {
+ "max timestamp",
+ MaxTimestamp,
+ MaxTimestamp,
+ },
+ {
+ "end of global window",
+ EndOfGlobalWindowTime,
+ EndOfGlobalWindowTime,
+ },
+ {
+ "beyond max timestamp",
+ Time(math.MaxInt64),
+ MaxTimestamp,
+ },
+ {
+ "below min timestamp",
+ Time(math.MinInt64),
+ MinTimestamp,
+ },
+ {
+ "normal value",
+ Time(int64(20000)),
+ Time(int64(20000)),
+ },
+ }
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ if got, want := Normalize(test.in), test.expOut; got != want {
+ t.Errorf("Normalize(%v), got Time %v, want %v", test.in, got, want)
+ }
+ })
+ }
+}
+
+func TestFromTime(t *testing.T) {
+ tests := []struct {
+ name string
+ input time.Time
+ expOut Time
+ }{
+ {
+ "zero unix",
+ time.Unix(0, 0).UTC(),
+ Time(0),
+ },
+ {
+ "behind unix",
+ time.Unix(-1, 0).UTC(),
+ Time(-1000),
+ },
+ {
+ "ahead of unix",
+ time.Unix(1, 0).UTC(),
+ Time(1000),
+ },
+ {
+ "insignificant time small",
+ time.Unix(0, 1),
+ Time(0),
+ },
+ {
+ "insignificant time large",
+ time.Unix(0, 999999),
+ Time(0),
+ },
+ }
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ if got, want := FromTime(test.input), test.expOut; got != want {
+ t.Errorf("FromTime(%v), got %v, want %v", test.input, got, want)
+ }
+ })
+ }
+}
diff --git a/sdks/go/pkg/beam/core/graph/window/trigger/trigger.go b/sdks/go/pkg/beam/core/graph/window/trigger/trigger.go
index c12c03aa5e893..148a672fc695b 100644
--- a/sdks/go/pkg/beam/core/graph/window/trigger/trigger.go
+++ b/sdks/go/pkg/beam/core/graph/window/trigger/trigger.go
@@ -66,6 +66,9 @@ func (t *AfterCountTrigger) ElementCount() int32 {
// AfterCount constructs a trigger that fires after
// at least `count` number of elements are processed.
func AfterCount(count int32) *AfterCountTrigger {
+ if count < 1 {
+ panic(fmt.Errorf("trigger.AfterCount(%v) must be a positive integer", count))
+ }
return &AfterCountTrigger{elementCount: count}
}
diff --git a/sdks/go/pkg/beam/core/graph/window/trigger/trigger_test.go b/sdks/go/pkg/beam/core/graph/window/trigger/trigger_test.go
new file mode 100644
index 0000000000000..a23de354bfd61
--- /dev/null
+++ b/sdks/go/pkg/beam/core/graph/window/trigger/trigger_test.go
@@ -0,0 +1,73 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements. See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+package trigger
+
+import (
+ "testing"
+ "time"
+
+ "github.com/google/go-cmp/cmp"
+)
+
+func TestAfterCountTrigger(t *testing.T) {
+ tr := AfterCount(1)
+ want := int32(1)
+ if got := tr.ElementCount(); got != want {
+ t.Errorf("element count not configured correctly. got %v, want %v", got, want)
+ }
+}
+
+func TestAfterProcessingTimeTrigger(t *testing.T) {
+ tests := []struct {
+ tr *AfterProcessingTimeTrigger
+ tt []TimestampTransform
+ }{
+ {
+ tr: AfterProcessingTime().PlusDelay(time.Millisecond),
+ tt: []TimestampTransform{DelayTransform{Delay: 1}},
+ },
+ {
+ tr: AfterProcessingTime().PlusDelay(time.Millisecond).AlignedTo(time.Millisecond, time.Time{}),
+ tt: []TimestampTransform{DelayTransform{Delay: 1}, AlignToTransform{Period: 1, Offset: 0}},
+ },
+ }
+ for _, test := range tests {
+ if diff := cmp.Diff(test.tr.TimestampTransforms(), test.tt); diff != "" {
+ t.Errorf("timestamp transforms are not equal: %v", diff)
+ }
+ }
+}
+
+func TestRepeatTrigger(t *testing.T) {
+ subTr := AfterCount(2)
+ tr := Repeat(subTr)
+
+ if got, ok := tr.SubTrigger().(*AfterCountTrigger); ok && got != subTr {
+ t.Errorf("subtrigger not configured correctly. got %v, want %v", got, subTr)
+ }
+}
+
+func TestAfterEndOfWindowTrigger(t *testing.T) {
+ earlyTr := AfterCount(50)
+ lateTr := Always()
+ tr := AfterEndOfWindow().EarlyFiring(earlyTr).LateFiring(lateTr)
+
+ if got, ok := tr.Early().(*AfterCountTrigger); ok && got != earlyTr {
+ t.Errorf("early firing trigger not configured correctly. got %v, want %v", got, earlyTr)
+ }
+ if got, ok := tr.Late().(*AlwaysTrigger); ok && got != lateTr {
+ t.Errorf("late firing trigger not configured correctly. got %v, want %v", got, lateTr)
+ }
+}
diff --git a/sdks/go/pkg/beam/core/runtime/exec/coder_test.go b/sdks/go/pkg/beam/core/runtime/exec/coder_test.go
index 25812aca4e560..02d1f81da7e05 100644
--- a/sdks/go/pkg/beam/core/runtime/exec/coder_test.go
+++ b/sdks/go/pkg/beam/core/runtime/exec/coder_test.go
@@ -25,6 +25,7 @@ import (
"github.com/apache/beam/sdks/v2/go/pkg/beam/core/util/reflectx"
"github.com/apache/beam/sdks/v2/go/pkg/beam/core/graph/coder"
+ "github.com/apache/beam/sdks/v2/go/pkg/beam/core/graph/window"
"github.com/apache/beam/sdks/v2/go/pkg/beam/core/runtime/coderx"
)
@@ -54,6 +55,9 @@ func TestCoders(t *testing.T) {
return &coder.Coder{Kind: coder.Custom, Custom: c, T: typex.New(reflectx.String)}
}(),
val: &FullValue{Elm: "myString"},
+ }, {
+ coder: &coder.Coder{Kind: coder.LP, Components: []*coder.Coder{coder.NewString()}},
+ val: &FullValue{Elm: "myString"},
}, {
coder: coder.NewKV([]*coder.Coder{coder.NewVarInt(), coder.NewBool()}),
val: &FullValue{Elm: int64(72), Elm2: false},
@@ -64,6 +68,18 @@ func TestCoders(t *testing.T) {
coder.NewDouble(),
coder.NewBool()})}),
val: &FullValue{Elm: int64(42), Elm2: &FullValue{Elm: float64(3.14), Elm2: true}},
+ }, {
+ coder: &coder.Coder{Kind: coder.Window, Window: coder.NewGlobalWindow()},
+ val: &FullValue{Windows: []typex.Window{window.GlobalWindow{}}},
+ }, {
+ coder: &coder.Coder{Kind: coder.Window, Window: coder.NewIntervalWindow()},
+ val: &FullValue{Windows: []typex.Window{window.IntervalWindow{Start: 0, End: 100}}},
+ }, {
+ coder: coder.NewW(coder.NewVarInt(), coder.NewGlobalWindow()),
+ val: &FullValue{Elm: int64(13), Windows: []typex.Window{window.GlobalWindow{}}},
+ }, {
+ coder: coder.NewPW(coder.NewString(), coder.NewGlobalWindow()),
+ val: &FullValue{Elm: "myString" /*Windowing info isn't encoded for PW so we can omit it here*/},
},
} {
t.Run(fmt.Sprintf("%v", test.coder), func(t *testing.T) {
@@ -116,4 +132,140 @@ func compareFV(t *testing.T, got *FullValue, want *FullValue) {
t.Errorf("got %v [type: %s], want %v [type %s]",
got, reflect.TypeOf(got), wantFv, reflect.TypeOf(wantFv))
}
+
+ // Check if the desired FV has windowing information
+ if want.Windows != nil {
+ if gotLen, wantLen := len(got.Windows), len(want.Windows); gotLen != wantLen {
+ t.Fatalf("got %d windows in FV, want %v", gotLen, wantLen)
+ }
+ for i := range want.Windows {
+ if gotWin, wantWin := got.Windows[i], want.Windows[i]; !wantWin.Equals(gotWin) {
+ t.Errorf("got window %v at position %d, want %v", gotWin, i, wantWin)
+ }
+ }
+ }
+}
+
+func TestIterableCoder(t *testing.T) {
+ cod := coder.NewI(coder.NewVarInt())
+ wantVals := []int64{8, 24, 72}
+ val := &FullValue{Elm: wantVals}
+
+ var buf bytes.Buffer
+ enc := MakeElementEncoder(cod)
+ if err := enc.Encode(val, &buf); err != nil {
+ t.Fatalf("Couldn't encode value: %v", err)
+ }
+
+ dec := MakeElementDecoder(cod)
+ result, err := dec.Decode(&buf)
+ if err != nil {
+ t.Fatalf("Couldn't decode value: %v", err)
+ }
+
+ gotVals, ok := result.Elm.([]int64)
+ if !ok {
+ t.Fatalf("got output element %v, want []int64", result.Elm)
+ }
+
+ if got, want := len(gotVals), len(wantVals); got != want {
+ t.Errorf("got %d elements in iterable, want %d", got, want)
+ }
+
+ for i := range gotVals {
+ if got, want := gotVals[i], wantVals[i]; got != want {
+ t.Errorf("got %d at position %d, want %d", got, i, want)
+ }
+ }
+}
+
+// TODO(BEAM-10660): Update once proper timer support is added
+func TestTimerCoder(t *testing.T) {
+ var buf bytes.Buffer
+ tCoder := coder.NewT(coder.NewVarInt(), coder.NewGlobalWindow())
+ wantVal := &FullValue{Elm: int64(13)}
+
+ enc := MakeElementEncoder(tCoder)
+ if err := enc.Encode(wantVal, &buf); err != nil {
+ t.Fatalf("Couldn't encode value: %v", err)
+ }
+
+ dec := MakeElementDecoder(tCoder)
+ result, err := dec.Decode(&buf)
+ if err != nil {
+ t.Fatalf("Couldn't decode value: %v", err)
+ }
+
+ compareFV(t, result, wantVal)
+}
+
+type namedTypeForTest struct {
+ A, B int64
+ C string
+}
+
+func TestRowCoder(t *testing.T) {
+ var buf bytes.Buffer
+ rCoder := coder.NewR(typex.New(reflect.TypeOf((*namedTypeForTest)(nil))))
+ wantStruct := &namedTypeForTest{A: int64(8), B: int64(24), C: "myString"}
+ wantVal := &FullValue{Elm: wantStruct}
+
+ enc := MakeElementEncoder(rCoder)
+ if err := enc.Encode(wantVal, &buf); err != nil {
+ t.Fatalf("Couldn't encode value: %v", err)
+ }
+
+ dec := MakeElementDecoder(rCoder)
+ result, err := dec.Decode(&buf)
+ if err != nil {
+ t.Fatalf("Couldn't decode value: %v", err)
+ }
+ gotPtr, ok := result.Elm.(*namedTypeForTest)
+ gotStruct := *gotPtr
+ if !ok {
+ t.Fatalf("got %v, want namedTypeForTest struct", result.Elm)
+ }
+ if got, want := gotStruct.A, wantStruct.A; got != want {
+ t.Errorf("got A field value %d, want %d", got, want)
+ }
+ if got, want := gotStruct.B, wantStruct.B; got != want {
+ t.Errorf("got B field value %d, want %d", got, want)
+ }
+ if got, want := gotStruct.C, wantStruct.C; got != want {
+ t.Errorf("got C field value %v, want %v", got, want)
+ }
+}
+
+func TestPaneCoder(t *testing.T) {
+ pn := coder.NewPane(0x04)
+ val := &FullValue{Pane: pn}
+ cod := &coder.Coder{Kind: coder.PaneInfo}
+
+ var buf bytes.Buffer
+ enc := MakeElementEncoder(cod)
+ if err := enc.Encode(val, &buf); err != nil {
+ t.Fatalf("Couldn't encode value: %v", err)
+ }
+
+ dec := MakeElementDecoder(cod)
+ result, err := dec.Decode(&buf)
+ if err != nil {
+ t.Fatalf("Couldn't decode value: %v", err)
+ }
+
+ if got, want := result.Pane.Timing, pn.Timing; got != want {
+ t.Errorf("got pane timing %v, want %v", got, want)
+ }
+ if got, want := result.Pane.IsFirst, pn.IsFirst; got != want {
+ t.Errorf("got IsFirst %v, want %v", got, want)
+ }
+ if got, want := result.Pane.IsLast, pn.IsLast; got != want {
+ t.Errorf("got IsLast %v, want %v", got, want)
+ }
+ if got, want := result.Pane.Index, pn.Index; got != want {
+ t.Errorf("got pane index %v, want %v", got, want)
+ }
+ if got, want := result.Pane.NonSpeculativeIndex, pn.NonSpeculativeIndex; got != want {
+ t.Errorf("got pane non-speculative index %v, want %v", got, want)
+ }
}
diff --git a/sdks/go/pkg/beam/core/runtime/exec/sideinput_test.go b/sdks/go/pkg/beam/core/runtime/exec/sideinput_test.go
index 30990642aed3d..3293fe44a51ee 100644
--- a/sdks/go/pkg/beam/core/runtime/exec/sideinput_test.go
+++ b/sdks/go/pkg/beam/core/runtime/exec/sideinput_test.go
@@ -67,26 +67,26 @@ func TestNewSideInputAdapter(t *testing.T) {
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
- adapter := NewSideInputAdapter(test.sid, test.sideInputID, test.c, nil)
- adapterStruct, ok := adapter.(*sideInputAdapter)
- if !ok {
- t.Errorf("failed to convert interface to sideInputAdapter struct in test %v", test)
- }
- if got, want := adapterStruct.sid, test.sid; got != want {
- t.Errorf("got SID %v, want %v", got, want)
- }
- if got, want := adapterStruct.sideInputID, test.sideInputID; got != want {
- t.Errorf("got sideInputID %v, want %v", got, want)
- }
- if got, want := adapterStruct.c, test.c; got != want {
- t.Errorf("got coder %v, want %v", got, want)
- }
- if got, want := reflect.TypeOf(adapterStruct.kc), reflect.TypeOf(test.kc); got != want {
- t.Errorf("got ElementEncoder type %v, want %v", got, want)
- }
- if got, want := reflect.TypeOf(adapterStruct.ec), reflect.TypeOf(test.ec); got != want {
- t.Errorf("got ElementDecoder type %v, want %v", got, want)
- }
+ adapter := NewSideInputAdapter(test.sid, test.sideInputID, test.c, nil)
+ adapterStruct, ok := adapter.(*sideInputAdapter)
+ if !ok {
+ t.Errorf("failed to convert interface to sideInputAdapter struct in test %v", test)
+ }
+ if got, want := adapterStruct.sid, test.sid; got != want {
+ t.Errorf("got SID %v, want %v", got, want)
+ }
+ if got, want := adapterStruct.sideInputID, test.sideInputID; got != want {
+ t.Errorf("got sideInputID %v, want %v", got, want)
+ }
+ if got, want := adapterStruct.c, test.c; got != want {
+ t.Errorf("got coder %v, want %v", got, want)
+ }
+ if got, want := reflect.TypeOf(adapterStruct.kc), reflect.TypeOf(test.kc); got != want {
+ t.Errorf("got ElementEncoder type %v, want %v", got, want)
+ }
+ if got, want := reflect.TypeOf(adapterStruct.ec), reflect.TypeOf(test.ec); got != want {
+ t.Errorf("got ElementDecoder type %v, want %v", got, want)
+ }
})
}
}
diff --git a/sdks/go/pkg/beam/core/util/stringx/bytes.go b/sdks/go/pkg/beam/core/util/stringx/bytes.go
index b2110fa1072ce..bf4cd111dddec 100644
--- a/sdks/go/pkg/beam/core/util/stringx/bytes.go
+++ b/sdks/go/pkg/beam/core/util/stringx/bytes.go
@@ -15,6 +15,9 @@
// Package stringx contains utilities for working with strings. It
// complements the standard "strings" package.
+//
+// Deprecated: the utilities in this package are unused within the code base
+// and will be removed in a future Beam release.
package stringx
// ToBytes converts a string to a byte slice.
diff --git a/sdks/go/pkg/beam/internal/errors/errors_test.go b/sdks/go/pkg/beam/internal/errors/errors_test.go
index 614609ff80b89..26e0057a74679 100644
--- a/sdks/go/pkg/beam/internal/errors/errors_test.go
+++ b/sdks/go/pkg/beam/internal/errors/errors_test.go
@@ -57,6 +57,9 @@ func TestWrap(t *testing.T) {
}, {
err: Wrap(Wrap(New(base), msg1), msg2),
want: errorStructure{{ERROR, msg2}, {ERROR, msg1}, {ERROR, base}},
+ }, {
+ err: Wrap(nil, msg1),
+ want: nil,
},
}
for _, test := range tests {
@@ -76,6 +79,13 @@ func TestWrapf(t *testing.T) {
}
}
+func TestWrapf_NilErr(t *testing.T) {
+ err := Wrapf(nil, "%s %d", "ten", 10)
+ if err != nil {
+ t.Errorf(`Wrapf(nil, "%%s %%d", "ten", 10). Want: nil, Got: %q`, err)
+ }
+}
+
func TestContext(t *testing.T) {
tests := []struct {
err error
@@ -90,6 +100,9 @@ func TestContext(t *testing.T) {
}, {
err: Wrap(WithContext(WithContext(Wrap(New(base), msg1), ctx1), ctx2), msg2),
want: errorStructure{{ERROR, msg2}, {CONTEXT, ctx2}, {CONTEXT, ctx1}, {ERROR, msg1}, {ERROR, base}},
+ }, {
+ err: WithContext(nil, ctx1),
+ want: nil,
},
}
for _, test := range tests {
@@ -100,6 +113,13 @@ func TestContext(t *testing.T) {
}
}
+func TestWithContextf_NilErr(t *testing.T) {
+ err := WithContextf(nil, "%s %d", "ten", 10)
+ if err != nil {
+ t.Errorf(`WithContextf(nil, "%%s %%d", "ten", 10). Want: nil, Got: %q`, err)
+ }
+}
+
func TestWithContextf(t *testing.T) {
want := fmt.Sprintf("%s %d", "ten", 10)
err := WithContextf(New(base), "%s %d", "ten", 10)
@@ -129,6 +149,9 @@ func TestTopLevelMsg(t *testing.T) {
}, {
err: Wrap(SetTopLevelMsg(WithContext(SetTopLevelMsg(New(base), top1), ctx1), top2), msg1),
want: top2,
+ }, {
+ err: SetTopLevelMsg(nil, top1),
+ want: "",
},
}
for _, test := range tests {
@@ -147,10 +170,51 @@ func TestSetTopLevelMsgf(t *testing.T) {
}
}
+func TestSetTopLevelMsgf_NilErr(t *testing.T) {
+ want := ""
+ err := SetTopLevelMsgf(nil, "%s %d", "ten", 10)
+ if getTop(err) != want {
+ t.Errorf("Incorrect formatting. Want: %q, Got: %q", want, getTop(err))
+ }
+}
+
+func TestError(t *testing.T) {
+ tests := []struct {
+ err error
+ want string
+ }{
+ {
+ err: Wrap(New(base), msg1),
+ want: "message 1\n\tcaused by:\nbase",
+ },
+ {
+ err: SetTopLevelMsg(New(base), top1),
+ want: "top level message 1\nFull error:\nbase",
+ },
+ {
+ err: SetTopLevelMsg(Wrap(Wrap(New(base), msg1), msg2), top1),
+ want: "top level message 1\nFull error:\nmessage 2\n\tcaused by:\nmessage 1\n\tcaused by:\nbase",
+ },
+ }
+
+ for _, test := range tests {
+ if be, ok := test.err.(*beamError); ok {
+ if got, want := be.Error(), test.want; got != want {
+ t.Errorf("Incorrect formatting. Want: %q, Got: %q", want, got)
+ }
+ } else {
+ t.Errorf("Error should be type *beamError, got: %q", test.err)
+ }
+ }
+}
+
// getStructure extracts the structure of an error, outputting a slice that
// represents the nested messages in that error in the order they are output
// and with the type of message (context or error) described.
func getStructure(e error) errorStructure {
+ if e == nil {
+ return nil
+ }
var structure errorStructure
for {
@@ -175,6 +239,10 @@ func getStructure(e error) errorStructure {
}
func equalStructure(left errorStructure, right errorStructure) bool {
+ if left == nil || right == nil {
+ return left == nil && right == nil
+ }
+
if len(left) != len(right) {
return false
}
diff --git a/sdks/go/pkg/beam/io/databaseio/database_test.go b/sdks/go/pkg/beam/io/databaseio/database_test.go
new file mode 100644
index 0000000000000..1876f57012159
--- /dev/null
+++ b/sdks/go/pkg/beam/io/databaseio/database_test.go
@@ -0,0 +1,86 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements. See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package databaseio
+
+import (
+ "database/sql"
+ "reflect"
+ "testing"
+
+ "github.com/apache/beam/sdks/v2/go/pkg/beam"
+ _ "github.com/apache/beam/sdks/v2/go/pkg/beam/runners/direct"
+ "github.com/apache/beam/sdks/v2/go/pkg/beam/testing/passert"
+ "github.com/apache/beam/sdks/v2/go/pkg/beam/testing/ptest"
+ _ "github.com/proullon/ramsql/driver"
+)
+
+type Address struct {
+ Street string
+ Street_number int
+}
+
+func TestRead(t *testing.T) {
+ db, err := sql.Open("ramsql", "user:password@/dbname")
+ if err != nil {
+ t.Fatalf("Test infra failure: Failed to open database with error %v", err)
+ }
+ defer db.Close()
+ if err = insertTestData(db); err != nil {
+ t.Fatalf("Test infra failure: Failed to create/populate table with error %v", err)
+ }
+
+ p, s := beam.NewPipelineWithRoot()
+ elements := Read(s, "ramsql", "user:password@/dbname", "address", reflect.TypeOf(Address{}))
+ passert.Count(s, elements, "NumElements", 2)
+ passert.Equals(s, elements, Address{Street: "orchard lane", Street_number: 1}, Address{Street: "morris st", Street_number: 200})
+
+ ptest.RunAndValidate(t, p)
+}
+
+func TestQuery(t *testing.T) {
+ db, err := sql.Open("ramsql", "user:password@/dbname2")
+ if err != nil {
+ t.Fatalf("Test infra failure: Failed to open database with error %v", err)
+ }
+ defer db.Close()
+ if err = insertTestData(db); err != nil {
+ t.Fatalf("Test infra failure: Failed to create/populate table with error %v", err)
+ }
+
+ p, s := beam.NewPipelineWithRoot()
+ read := Query(s, "ramsql", "user:password@/dbname2", "SELECT * FROM address WHERE street_number < 10", reflect.TypeOf(Address{}))
+
+ passert.Count(s, read, "NumElementsFromRead", 1)
+ passert.Equals(s, read, Address{Street: "orchard lane", Street_number: 1})
+
+ ptest.RunAndValidate(t, p)
+}
+
+func insertTestData(db *sql.DB) error {
+ _, err := db.Exec("CREATE TABLE address (street TEXT, street_number INT);")
+ if err != nil {
+ return err
+ }
+ _, err = db.Exec("INSERT INTO address (street, street_number) VALUES ('orchard lane', 1);")
+ if err != nil {
+ return err
+ }
+ _, err = db.Exec("INSERT INTO address (street, street_number) VALUES ('morris st', 200);")
+ if err != nil {
+ return err
+ }
+ return nil
+}
diff --git a/sdks/go/pkg/beam/io/textio/sdf_test.go b/sdks/go/pkg/beam/io/textio/sdf_test.go
index 881b9ab519992..7b1b416637617 100644
--- a/sdks/go/pkg/beam/io/textio/sdf_test.go
+++ b/sdks/go/pkg/beam/io/textio/sdf_test.go
@@ -28,9 +28,19 @@ import (
// outputs the correct number of lines for it, even for an exceedingly long
// line.
func TestReadSdf(t *testing.T) {
- f := "../../../../data/textio_test.txt"
p, s := beam.NewPipelineWithRoot()
- lines := ReadSdf(s, f)
+ lines := ReadSdf(s, testFilePath)
+ passert.Count(s, lines, "NumLines", 1)
+
+ if _, err := beam.Run(context.Background(), "direct", p); err != nil {
+ t.Fatalf("Failed to execute job: %v", err)
+ }
+}
+
+func TestReadAllSdf(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ files := beam.Create(s, testFilePath)
+ lines := ReadAllSdf(s, files)
passert.Count(s, lines, "NumLines", 1)
if _, err := beam.Run(context.Background(), "direct", p); err != nil {
diff --git a/sdks/go/pkg/beam/io/textio/textio_test.go b/sdks/go/pkg/beam/io/textio/textio_test.go
index 4090535226c77..0cd1699e657ea 100644
--- a/sdks/go/pkg/beam/io/textio/textio_test.go
+++ b/sdks/go/pkg/beam/io/textio/textio_test.go
@@ -17,20 +17,25 @@
package textio
import (
+ "errors"
+ "os"
"testing"
+ "github.com/apache/beam/sdks/v2/go/pkg/beam"
_ "github.com/apache/beam/sdks/v2/go/pkg/beam/io/filesystem/local"
+ "github.com/apache/beam/sdks/v2/go/pkg/beam/testing/passert"
+ "github.com/apache/beam/sdks/v2/go/pkg/beam/testing/ptest"
)
-func TestRead(t *testing.T) {
- f := "../../../../data/textio_test.txt"
+const testFilePath = "../../../../data/textio_test.txt"
+func TestReadFn(t *testing.T) {
receivedLines := []string{}
getLines := func(line string) {
receivedLines = append(receivedLines, line)
}
- err := readFn(nil, f, getLines)
+ err := readFn(nil, testFilePath, getLines)
if err != nil {
t.Fatalf("failed with %v", err)
}
@@ -40,3 +45,63 @@ func TestRead(t *testing.T) {
}
}
+
+func TestRead(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ lines := Read(s, testFilePath)
+ passert.Count(s, lines, "NumLines", 1)
+
+ ptest.RunAndValidate(t, p)
+}
+
+func TestReadAll(t *testing.T) {
+ p, s, files := ptest.CreateList([]string{testFilePath})
+ lines := ReadAll(s, files)
+ passert.Count(s, lines, "NumLines", 1)
+
+ ptest.RunAndValidate(t, p)
+}
+
+func TestWrite(t *testing.T) {
+ out := "text.txt"
+ p, s := beam.NewPipelineWithRoot()
+ lines := Read(s, testFilePath)
+ Write(s, out, lines)
+
+ ptest.RunAndValidate(t, p)
+
+ if _, err := os.Stat(out); errors.Is(err, os.ErrNotExist) {
+ t.Fatalf("Failed to write %v", out)
+ }
+ t.Cleanup(func() {
+ os.Remove(out)
+ })
+
+ outfileContents, _ := os.ReadFile(out)
+ infileContents, _ := os.ReadFile(testFilePath)
+ if got, want := string(outfileContents), string(infileContents); got != want {
+ t.Fatalf("Write() wrote the wrong contents. Got: %v Want: %v", got, want)
+ }
+}
+
+func TestImmediate(t *testing.T) {
+ f, err := os.CreateTemp("", "test2.txt")
+ if err != nil {
+ t.Fatalf("Failed to create temp file, err: %v", err)
+ }
+ t.Cleanup(func() {
+ os.Remove(f.Name())
+ })
+ if err := os.WriteFile(f.Name(), []byte("hello\ngo\n"), 0644); err != nil {
+ t.Fatalf("Failed to write file %v, err: %v", f, err)
+ }
+
+ p, s := beam.NewPipelineWithRoot()
+ lines, err := Immediate(s, f.Name())
+ if err != nil {
+ t.Fatalf("Failed to insert Immediate: %v", err)
+ }
+ passert.Count(s, lines, "NumLines", 2)
+
+ ptest.RunAndValidate(t, p)
+}
diff --git a/sdks/go/pkg/beam/runners/dataflow/dataflow.go b/sdks/go/pkg/beam/runners/dataflow/dataflow.go
index 4cd877f5afde0..1a7da076755a0 100644
--- a/sdks/go/pkg/beam/runners/dataflow/dataflow.go
+++ b/sdks/go/pkg/beam/runners/dataflow/dataflow.go
@@ -159,8 +159,59 @@ func Execute(ctx context.Context, p *beam.Pipeline) (beam.PipelineResult, error)
panic("Beam has not been initialized. Call beam.Init() before pipeline construction.")
}
- // (1) Gather job options
+ beam.PipelineOptions.LoadOptionsFromFlags(flagFilter)
+ opts, err := getJobOptions(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ // (1) Build and submit
+ // NOTE(herohde) 10/8/2018: the last segment of the names must be "worker" and "dataflow-worker.jar".
+ id := fmt.Sprintf("go-%v-%v", atomic.AddInt32(&unique, 1), time.Now().UnixNano())
+
+ modelURL := gcsx.Join(*stagingLocation, id, "model")
+ workerURL := gcsx.Join(*stagingLocation, id, "worker")
+ jarURL := gcsx.Join(*stagingLocation, id, "dataflow-worker.jar")
+ xlangURL := gcsx.Join(*stagingLocation, id, "xlang")
+
+ edges, _, err := p.Build()
+ if err != nil {
+ return nil, err
+ }
+ artifactURLs, err := dataflowlib.ResolveXLangArtifacts(ctx, edges, opts.Project, xlangURL)
+ if err != nil {
+ return nil, errors.WithContext(err, "resolving cross-language artifacts")
+ }
+ opts.ArtifactURLs = artifactURLs
+ environment, err := graphx.CreateEnvironment(ctx, jobopts.GetEnvironmentUrn(ctx), getContainerImage)
+ if err != nil {
+ return nil, errors.WithContext(err, "creating environment for model pipeline")
+ }
+ model, err := graphx.Marshal(edges, &graphx.Options{Environment: environment})
+ if err != nil {
+ return nil, errors.WithContext(err, "generating model pipeline")
+ }
+ err = pipelinex.ApplySdkImageOverrides(model, jobopts.GetSdkImageOverrides())
+ if err != nil {
+ return nil, errors.WithContext(err, "applying container image overrides")
+ }
+
+ if *dryRun {
+ log.Info(ctx, "Dry-run: not submitting job!")
+
+ log.Info(ctx, proto.MarshalTextString(model))
+ job, err := dataflowlib.Translate(ctx, model, opts, workerURL, jarURL, modelURL)
+ if err != nil {
+ return nil, err
+ }
+ dataflowlib.PrintJob(ctx, job)
+ return nil, nil
+ }
+ return dataflowlib.Execute(ctx, model, opts, workerURL, jarURL, modelURL, *endpoint, *executeAsync)
+}
+
+func getJobOptions(ctx context.Context) (*dataflowlib.JobOptions, error) {
project := gcpopts.GetProjectFromFlagOrEnvironment(ctx)
if project == "" {
return nil, errors.New("no Google Cloud project specified. Use --project=")
@@ -254,51 +305,9 @@ func Execute(ctx context.Context, p *beam.Pipeline) (beam.PipelineResult, error)
opts.TempLocation = gcsx.Join(*stagingLocation, "tmp")
}
- // (1) Build and submit
- // NOTE(herohde) 10/8/2018: the last segment of the names must be "worker" and "dataflow-worker.jar".
- id := fmt.Sprintf("go-%v-%v", atomic.AddInt32(&unique, 1), time.Now().UnixNano())
-
- modelURL := gcsx.Join(*stagingLocation, id, "model")
- workerURL := gcsx.Join(*stagingLocation, id, "worker")
- jarURL := gcsx.Join(*stagingLocation, id, "dataflow-worker.jar")
- xlangURL := gcsx.Join(*stagingLocation, id, "xlang")
-
- edges, _, err := p.Build()
- if err != nil {
- return nil, err
- }
- artifactURLs, err := dataflowlib.ResolveXLangArtifacts(ctx, edges, opts.Project, xlangURL)
- if err != nil {
- return nil, errors.WithContext(err, "resolving cross-language artifacts")
- }
- opts.ArtifactURLs = artifactURLs
- environment, err := graphx.CreateEnvironment(ctx, jobopts.GetEnvironmentUrn(ctx), getContainerImage)
- if err != nil {
- return nil, errors.WithContext(err, "creating environment for model pipeline")
- }
- model, err := graphx.Marshal(edges, &graphx.Options{Environment: environment})
- if err != nil {
- return nil, errors.WithContext(err, "generating model pipeline")
- }
- err = pipelinex.ApplySdkImageOverrides(model, jobopts.GetSdkImageOverrides())
- if err != nil {
- return nil, errors.WithContext(err, "applying container image overrides")
- }
-
- if *dryRun {
- log.Info(ctx, "Dry-run: not submitting job!")
-
- log.Info(ctx, proto.MarshalTextString(model))
- job, err := dataflowlib.Translate(ctx, model, opts, workerURL, jarURL, modelURL)
- if err != nil {
- return nil, err
- }
- dataflowlib.PrintJob(ctx, job)
- return nil, nil
- }
-
- return dataflowlib.Execute(ctx, model, opts, workerURL, jarURL, modelURL, *endpoint, *executeAsync)
+ return opts, nil
}
+
func gcsRecorderHook(opts []string) perf.CaptureHook {
bucket, prefix, err := gcsx.ParseObject(opts[0])
if err != nil {
diff --git a/sdks/go/pkg/beam/runners/dataflow/dataflow_test.go b/sdks/go/pkg/beam/runners/dataflow/dataflow_test.go
index 1e2844630c4b3..568860194ec18 100644
--- a/sdks/go/pkg/beam/runners/dataflow/dataflow_test.go
+++ b/sdks/go/pkg/beam/runners/dataflow/dataflow_test.go
@@ -15,7 +15,12 @@
package dataflow
-import "testing"
+import (
+ "github.com/apache/beam/sdks/v2/go/pkg/beam/options/gcpopts"
+ "github.com/apache/beam/sdks/v2/go/pkg/beam/options/jobopts"
+ "sort"
+ "testing"
+)
func TestDontUseFlagAsPipelineOption(t *testing.T) {
f := "dummy_flag"
@@ -27,3 +32,131 @@ func TestDontUseFlagAsPipelineOption(t *testing.T) {
t.Fatalf("%q should be in the filter, but isn't set", f)
}
}
+
+func TestGetJobOptions(t *testing.T) {
+ *labels = `{"label1": "val1", "label2": "val2"}`
+ *stagingLocation = "gs://testStagingLocation"
+ *autoscalingAlgorithm = "NONE"
+ *minCPUPlatform = "testPlatform"
+
+ *gcpopts.Project = "testProject"
+ *gcpopts.Region = "testRegion"
+
+ *jobopts.Experiments = "use_runner_v2,use_portable_job_submission"
+ *jobopts.JobName = "testJob"
+
+ opts, err := getJobOptions(nil)
+ if err != nil {
+ t.Fatalf("getJobOptions() returned error %q, want %q", err, "nil")
+ }
+ if got, want := opts.Name, "testJob"; got != want {
+ t.Errorf("getJobOptions().Name = %q, want %q", got, want)
+ }
+ if got, want := len(opts.Experiments), 3; got != want {
+ t.Errorf("len(getJobOptions().Experiments) = %q, want %q", got, want)
+ } else {
+ sort.Strings(opts.Experiments)
+ expectedExperiments := []string{"min_cpu_platform=testPlatform", "use_portable_job_submission", "use_runner_v2"}
+ for i := 0; i < 3; i++ {
+ if got, want := opts.Experiments[i], expectedExperiments[i]; got != want {
+ t.Errorf("getJobOptions().Experiments = %q, want %q", got, want)
+ }
+ }
+ }
+ if got, want := opts.Project, "testProject"; got != want {
+ t.Errorf("getJobOptions().Project = %q, want %q", got, want)
+ }
+ if got, want := opts.Region, "testRegion"; got != want {
+ t.Errorf("getJobOptions().Region = %q, want %q", got, want)
+ }
+ if got, want := len(opts.Labels), 2; got != want {
+ t.Errorf("len(getJobOptions().Labels) = %q, want %q", got, want)
+ } else {
+ if got, want := opts.Labels["label1"], "val1"; got != want {
+ t.Errorf("getJobOptions().Labels[\"label1\"] = %q, want %q", got, want)
+ }
+ if got, want := opts.Labels["label2"], "val2"; got != want {
+ t.Errorf("getJobOptions().Labels[\"label2\"] = %q, want %q", got, want)
+ }
+ }
+ if got, want := opts.TempLocation, "gs://testStagingLocation/tmp"; got != want {
+ t.Errorf("getJobOptions().TempLocation = %q, want %q", got, want)
+ }
+}
+
+func TestGetJobOptions_NoExperimentsSet(t *testing.T) {
+ *labels = `{"label1": "val1", "label2": "val2"}`
+ *stagingLocation = "gs://testStagingLocation"
+ *autoscalingAlgorithm = "NONE"
+ *minCPUPlatform = ""
+
+ *gcpopts.Project = "testProject"
+ *gcpopts.Region = "testRegion"
+
+ *jobopts.Experiments = ""
+ *jobopts.JobName = "testJob"
+
+ opts, err := getJobOptions(nil)
+
+ if err != nil {
+ t.Fatalf("getJobOptions() returned error %q, want %q", err, "nil")
+ }
+ if got, want := len(opts.Experiments), 2; got != want {
+ t.Fatalf("len(getJobOptions().Experiments) = %q, want %q", got, want)
+ }
+ sort.Strings(opts.Experiments)
+ expectedExperiments := []string{"use_portable_job_submission", "use_unified_worker"}
+ for i := 0; i < 2; i++ {
+ if got, want := opts.Experiments[i], expectedExperiments[i]; got != want {
+ t.Errorf("getJobOptions().Experiments = %q, want %q", got, want)
+ }
+ }
+}
+
+func TestGetJobOptions_NoStagingLocation(t *testing.T) {
+ *stagingLocation = ""
+ *gcpopts.Project = "testProject"
+ *gcpopts.Region = "testRegion"
+
+ _, err := getJobOptions(nil)
+ if err == nil {
+ t.Fatalf("getJobOptions() returned error nil, want an error")
+ }
+}
+
+func TestGetJobOptions_InvalidAutoscaling(t *testing.T) {
+ *labels = `{"label1": "val1", "label2": "val2"}`
+ *stagingLocation = "gs://testStagingLocation"
+ *autoscalingAlgorithm = "INVALID"
+ *minCPUPlatform = "testPlatform"
+
+ *gcpopts.Project = "testProject"
+ *gcpopts.Region = "testRegion"
+
+ *jobopts.Experiments = "use_runner_v2,use_portable_job_submission"
+ *jobopts.JobName = "testJob"
+
+ _, err := getJobOptions(nil)
+ if err == nil {
+ t.Fatalf("getJobOptions() returned error nil, want an error")
+ }
+}
+
+func TestGetJobOptions_DockerNoImage(t *testing.T) {
+ *jobopts.EnvironmentType = "docker"
+ *jobopts.EnvironmentConfig = "testContainerImage"
+
+ if got, want := getContainerImage(nil), "testContainerImage"; got != want {
+ t.Fatalf("getContainerImage() = %q, want %q", got, want)
+ }
+}
+
+func TestGetJobOptions_DockerWithImage(t *testing.T) {
+ *jobopts.EnvironmentType = "docker"
+ *jobopts.EnvironmentConfig = "testContainerImage"
+ *image = "testContainerImageOverride"
+
+ if got, want := getContainerImage(nil), "testContainerImageOverride"; got != want {
+ t.Fatalf("getContainerImage() = %q, want %q", got, want)
+ }
+}
diff --git a/sdks/go/pkg/beam/runners/direct/direct_test.go b/sdks/go/pkg/beam/runners/direct/direct_test.go
new file mode 100644
index 0000000000000..b65d33f90919f
--- /dev/null
+++ b/sdks/go/pkg/beam/runners/direct/direct_test.go
@@ -0,0 +1,455 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements. See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package direct
+
+import (
+ "context"
+ "flag"
+ "fmt"
+ "os"
+ "reflect"
+ "sort"
+ "testing"
+
+ "github.com/apache/beam/sdks/v2/go/pkg/beam"
+ "github.com/apache/beam/sdks/v2/go/pkg/beam/core/metrics"
+ "github.com/google/go-cmp/cmp"
+)
+
+func executeWithT(ctx context.Context, t *testing.T, p *beam.Pipeline) (beam.PipelineResult, error) {
+ fmt.Println("startingTest - ", t.Name())
+ return Execute(ctx, p)
+}
+
+func init() {
+ beam.RegisterFunction(dofn1)
+ beam.RegisterFunction(dofn1x2)
+ beam.RegisterFunction(dofn1x5)
+ beam.RegisterFunction(dofn2x1)
+ beam.RegisterFunction(dofn3x1)
+ beam.RegisterFunction(dofn2x2KV)
+ beam.RegisterFunction(dofn2)
+ beam.RegisterFunction(dofnKV)
+ beam.RegisterFunction(dofnKV2)
+ beam.RegisterFunction(dofnGBK)
+ beam.RegisterFunction(dofnGBK2)
+ beam.RegisterType(reflect.TypeOf((*int64Check)(nil)))
+ beam.RegisterType(reflect.TypeOf((*stringCheck)(nil)))
+
+ beam.RegisterType(reflect.TypeOf((*testRow)(nil)))
+ beam.RegisterFunction(dofnKV3)
+ beam.RegisterFunction(dofnGBK3)
+
+ beam.RegisterFunction(dofn1Counter)
+ beam.RegisterFunction(dofnSink)
+}
+
+func dofn1(imp []byte, emit func(int64)) {
+ emit(1)
+ emit(2)
+ emit(3)
+}
+
+func dofn1x2(imp []byte, emitA func(int64), emitB func(int64)) {
+ emitA(1)
+ emitA(2)
+ emitA(3)
+ emitB(4)
+ emitB(5)
+ emitB(6)
+}
+
+func dofn1x5(imp []byte, emitA, emitB, emitC, emitD, emitE func(int64)) {
+ emitA(1)
+ emitB(2)
+ emitC(3)
+ emitD(4)
+ emitE(5)
+ emitA(6)
+ emitB(7)
+ emitC(8)
+ emitD(9)
+ emitE(10)
+}
+
+func dofn2x1(imp []byte, iter func(*int64) bool, emit func(int64)) {
+ var v, sum int64
+ for iter(&v) {
+ sum += v
+ }
+ emit(sum)
+}
+
+func dofn3x1(sum int64, iter1, iter2 func(*int64) bool, emit func(int64)) {
+ var v int64
+ for iter1(&v) {
+ sum += v
+ }
+ for iter2(&v) {
+ sum += v
+ }
+ emit(sum)
+}
+
+func dofn2x2KV(imp []byte, iter func(*string, *int64) bool, emitK func(string), emitV func(int64)) {
+ var k string
+ var v, sum int64
+ for iter(&k, &v) {
+ sum += v
+ emitK(k)
+ }
+ emitV(sum)
+}
+
+// int64Check validates that within a single bundle,
+// we received the expected int64 values.
+type int64Check struct {
+ Name string
+ Want []int
+ got []int
+}
+
+func (fn *int64Check) ProcessElement(v int64, _ func(int64)) {
+ fn.got = append(fn.got, int(v))
+}
+
+func (fn *int64Check) FinishBundle(_ func(int64)) error {
+ sort.Ints(fn.got)
+ sort.Ints(fn.Want)
+ if d := cmp.Diff(fn.Want, fn.got); d != "" {
+ return fmt.Errorf("int64Check[%v] (-want, +got): %v", fn.Name, d)
+ }
+ return nil
+}
+
+// stringCheck validates that within a single bundle,
+// we received the expected string values.
+type stringCheck struct {
+ Name string
+ Want []string
+ got []string
+}
+
+func (fn *stringCheck) ProcessElement(v string, _ func(string)) {
+ fn.got = append(fn.got, v)
+}
+
+func (fn *stringCheck) FinishBundle(_ func(string)) error {
+ sort.Strings(fn.got)
+ sort.Strings(fn.Want)
+ if d := cmp.Diff(fn.Want, fn.got); d != "" {
+ return fmt.Errorf("stringCheck[%v] (-want, +got): %v", fn.Name, d)
+ }
+ return nil
+}
+
+func dofn2(v int64, emit func(int64)) {
+ emit(v + 1)
+}
+
+func dofnKV(imp []byte, emit func(string, int64)) {
+ emit("a", 1)
+ emit("b", 2)
+ emit("a", 3)
+ emit("b", 4)
+ emit("a", 5)
+ emit("b", 6)
+}
+
+func dofnKV2(imp []byte, emit func(int64, string)) {
+ emit(1, "a")
+ emit(2, "b")
+ emit(1, "a")
+ emit(2, "b")
+ emit(1, "a")
+ emit(2, "b")
+}
+
+func dofnGBK(k string, vs func(*int64) bool, emit func(int64)) {
+ var v, sum int64
+ for vs(&v) {
+ sum += v
+ }
+ emit(sum)
+}
+
+func dofnGBK2(k int64, vs func(*string) bool, emit func(string)) {
+ var v, sum string
+ for vs(&v) {
+ sum += v
+ }
+ emit(sum)
+}
+
+type testRow struct {
+ A string
+ B int64
+}
+
+func dofnKV3(imp []byte, emit func(testRow, testRow)) {
+ emit(testRow{"a", 1}, testRow{"a", 1})
+}
+
+func dofnGBK3(k testRow, vs func(*testRow) bool, emit func(string)) {
+ var v testRow
+ vs(&v)
+ emit(fmt.Sprintf("%v: %v", k, v))
+}
+
+const (
+ ns = "directtest"
+)
+
+func dofnSink(ctx context.Context, _ []byte) {
+ beam.NewCounter(ns, "sunk").Inc(ctx, 73)
+}
+
+func dofn1Counter(ctx context.Context, _ []byte, emit func(int64)) {
+ beam.NewCounter(ns, "count").Inc(ctx, 1)
+}
+
+func TestRunner_Pipelines(t *testing.T) {
+ t.Run("simple", func(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ imp := beam.Impulse(s)
+ col := beam.ParDo(s, dofn1, imp)
+ beam.ParDo(s, &int64Check{
+ Name: "simple",
+ Want: []int{1, 2, 3},
+ }, col)
+
+ if _, err := executeWithT(context.Background(), t, p); err != nil {
+ t.Fatal(err)
+ }
+ })
+ t.Run("sequence", func(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ imp := beam.Impulse(s)
+ beam.Seq(s, imp, dofn1, dofn2, dofn2, dofn2, &int64Check{Name: "sequence", Want: []int{4, 5, 6}})
+ if _, err := executeWithT(context.Background(), t, p); err != nil {
+ t.Fatal(err)
+ }
+ })
+ t.Run("gbk", func(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ imp := beam.Impulse(s)
+ col := beam.ParDo(s, dofnKV, imp)
+ gbk := beam.GroupByKey(s, col)
+ beam.Seq(s, gbk, dofnGBK, &int64Check{Name: "gbk", Want: []int{9, 12}})
+ if _, err := executeWithT(context.Background(), t, p); err != nil {
+ t.Fatal(err)
+ }
+ })
+ t.Run("gbk2", func(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ imp := beam.Impulse(s)
+ col := beam.ParDo(s, dofnKV2, imp)
+ gbk := beam.GroupByKey(s, col)
+ beam.Seq(s, gbk, dofnGBK2, &stringCheck{Name: "gbk2", Want: []string{"aaa", "bbb"}})
+ if _, err := executeWithT(context.Background(), t, p); err != nil {
+ t.Fatal(err)
+ }
+ })
+ t.Run("gbk3", func(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ imp := beam.Impulse(s)
+ col := beam.ParDo(s, dofnKV3, imp)
+ gbk := beam.GroupByKey(s, col)
+ beam.Seq(s, gbk, dofnGBK3, &stringCheck{Name: "gbk3", Want: []string{"{a 1}: {a 1}"}})
+ if _, err := executeWithT(context.Background(), t, p); err != nil {
+ t.Fatal(err)
+ }
+ })
+ t.Run("sink_nooutputs", func(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ imp := beam.Impulse(s)
+ beam.ParDo0(s, dofnSink, imp)
+ pr, err := executeWithT(context.Background(), t, p)
+ if err != nil {
+ t.Fatal(err)
+ }
+ qr := pr.Metrics().Query(func(sr metrics.SingleResult) bool {
+ return sr.Name() == "sunk"
+ })
+ if got, want := qr.Counters()[0].Committed, int64(73); got != want {
+ t.Errorf("pr.Metrics.Query(Name = \"sunk\")).Committed = %v, want %v", got, want)
+ }
+ })
+ t.Run("fork_impulse", func(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ imp := beam.Impulse(s)
+ col1 := beam.ParDo(s, dofn1, imp)
+ col2 := beam.ParDo(s, dofn1, imp)
+ beam.ParDo(s, &int64Check{
+ Name: "fork check1",
+ Want: []int{1, 2, 3},
+ }, col1)
+ beam.ParDo(s, &int64Check{
+ Name: "fork check2",
+ Want: []int{1, 2, 3},
+ }, col2)
+ if _, err := executeWithT(context.Background(), t, p); err != nil {
+ t.Fatal(err)
+ }
+ })
+ t.Run("fork_postDoFn", func(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ imp := beam.Impulse(s)
+ col := beam.ParDo(s, dofn1, imp)
+ beam.ParDo(s, &int64Check{
+ Name: "fork check1",
+ Want: []int{1, 2, 3},
+ }, col)
+ beam.ParDo(s, &int64Check{
+ Name: "fork check2",
+ Want: []int{1, 2, 3},
+ }, col)
+ if _, err := executeWithT(context.Background(), t, p); err != nil {
+ t.Fatal(err)
+ }
+ })
+ t.Run("fork_multipleOutputs1", func(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ imp := beam.Impulse(s)
+ col1, col2 := beam.ParDo2(s, dofn1x2, imp)
+ beam.ParDo(s, &int64Check{
+ Name: "col1",
+ Want: []int{1, 2, 3},
+ }, col1)
+ beam.ParDo(s, &int64Check{
+ Name: "col2",
+ Want: []int{4, 5, 6},
+ }, col2)
+ if _, err := executeWithT(context.Background(), t, p); err != nil {
+ t.Fatal(err)
+ }
+ })
+ t.Run("fork_multipleOutputs2", func(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ imp := beam.Impulse(s)
+ col1, col2, col3, col4, col5 := beam.ParDo5(s, dofn1x5, imp)
+ beam.ParDo(s, &int64Check{
+ Name: "col1",
+ Want: []int{1, 6},
+ }, col1)
+ beam.ParDo(s, &int64Check{
+ Name: "col2",
+ Want: []int{2, 7},
+ }, col2)
+ beam.ParDo(s, &int64Check{
+ Name: "col3",
+ Want: []int{3, 8},
+ }, col3)
+ beam.ParDo(s, &int64Check{
+ Name: "col4",
+ Want: []int{4, 9},
+ }, col4)
+ beam.ParDo(s, &int64Check{
+ Name: "col5",
+ Want: []int{5, 10},
+ }, col5)
+ if _, err := executeWithT(context.Background(), t, p); err != nil {
+ t.Fatal(err)
+ }
+ })
+ t.Run("flatten", func(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ imp := beam.Impulse(s)
+ col1 := beam.ParDo(s, dofn1, imp)
+ col2 := beam.ParDo(s, dofn1, imp)
+ flat := beam.Flatten(s, col1, col2)
+ beam.ParDo(s, &int64Check{
+ Name: "flatten check",
+ Want: []int{1, 1, 2, 2, 3, 3},
+ }, flat)
+ if _, err := executeWithT(context.Background(), t, p); err != nil {
+ t.Fatal(err)
+ }
+ })
+ t.Run("sideinput_iterable", func(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ imp := beam.Impulse(s)
+ col1 := beam.ParDo(s, dofn1, imp)
+ sum := beam.ParDo(s, dofn2x1, imp, beam.SideInput{Input: col1})
+ beam.ParDo(s, &int64Check{
+ Name: "iter sideinput check",
+ Want: []int{6},
+ }, sum)
+ if _, err := executeWithT(context.Background(), t, p); err != nil {
+ t.Fatal(err)
+ }
+ })
+ t.Run("sideinput_iterableKV", func(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ imp := beam.Impulse(s)
+ col1 := beam.ParDo(s, dofnKV, imp)
+ keys, sum := beam.ParDo2(s, dofn2x2KV, imp, beam.SideInput{Input: col1})
+ beam.ParDo(s, &stringCheck{
+ Name: "iterKV sideinput check K",
+ Want: []string{"a", "a", "a", "b", "b", "b"},
+ }, keys)
+ beam.ParDo(s, &int64Check{
+ Name: "iterKV sideinput check V",
+ Want: []int{21},
+ }, sum)
+ if _, err := executeWithT(context.Background(), t, p); err != nil {
+ t.Fatal(err)
+ }
+ })
+ // Validates the waiting on side input readiness in buffer.
+ t.Run("sideinput_2iterable", func(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ imp := beam.Impulse(s)
+ col0 := beam.ParDo(s, dofn1, imp)
+ col1 := beam.ParDo(s, dofn1, imp)
+ col2 := beam.ParDo(s, dofn2, col1)
+ sum := beam.ParDo(s, dofn3x1, col0, beam.SideInput{Input: col1}, beam.SideInput{Input: col2})
+ beam.ParDo(s, &int64Check{
+ Name: "iter sideinput check",
+ Want: []int{16, 17, 18},
+ }, sum)
+ if _, err := executeWithT(context.Background(), t, p); err != nil {
+ t.Fatal(err)
+ }
+ })
+}
+
+func TestRunner_Metrics(t *testing.T) {
+ t.Run("counter", func(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ imp := beam.Impulse(s)
+ beam.ParDo(s, dofn1Counter, imp)
+ pr, err := executeWithT(context.Background(), t, p)
+ if err != nil {
+ t.Fatal(err)
+ }
+ qr := pr.Metrics().Query(func(sr metrics.SingleResult) bool {
+ return sr.Name() == "count"
+ })
+ if got, want := qr.Counters()[0].Committed, int64(1); got != want {
+ t.Errorf("pr.Metrics.Query(Name = \"count\")).Committed = %v, want %v", got, want)
+ }
+ })
+}
+
+func TestMain(m *testing.M) {
+ // Can't use ptest since it causes a loop.
+ if !flag.Parsed() {
+ flag.Parse()
+ }
+ beam.Init()
+ os.Exit(m.Run())
+}
diff --git a/sdks/go/pkg/beam/util/errorx/guarded.go b/sdks/go/pkg/beam/util/errorx/guarded.go
index cc0b07b4eee1f..186885b717c62 100644
--- a/sdks/go/pkg/beam/util/errorx/guarded.go
+++ b/sdks/go/pkg/beam/util/errorx/guarded.go
@@ -39,7 +39,7 @@ func (g *GuardedError) TrySetError(err error) bool {
g.mu.Lock()
defer g.mu.Unlock()
- upd := g.err == nil
+ upd := (g.err == nil)
if upd {
g.err = err
}
diff --git a/sdks/go/pkg/beam/util/errorx/guarded_test.go b/sdks/go/pkg/beam/util/errorx/guarded_test.go
new file mode 100644
index 0000000000000..1e9c9b2247060
--- /dev/null
+++ b/sdks/go/pkg/beam/util/errorx/guarded_test.go
@@ -0,0 +1,45 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements. See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package errorx
+
+import (
+ "errors"
+ "testing"
+)
+
+func TestTrySetError(t *testing.T) {
+ var gu GuardedError
+ setErr := errors.New("attempted error")
+ success := gu.TrySetError(setErr)
+ if !success {
+ t.Fatal("got false when trying to set error, want true")
+ }
+ if got, want := gu.Error(), setErr; got != want {
+ t.Errorf("got error %v when checking message, want %v", got, want)
+ }
+}
+
+func TestTrySetError_bad(t *testing.T) {
+ setErr := errors.New("old error")
+ gu := &GuardedError{err: setErr}
+ success := gu.TrySetError(setErr)
+ if success {
+ t.Fatal("got true when trying to set error, want false")
+ }
+ if got, want := gu.Error(), setErr; got != want {
+ t.Errorf("got error %v when checking message, want %v", got, want)
+ }
+}
diff --git a/playground/backend/internal/utils/validators_utils.go b/sdks/go/pkg/beam/util/gcsx/example_test.go
similarity index 54%
rename from playground/backend/internal/utils/validators_utils.go
rename to sdks/go/pkg/beam/util/gcsx/example_test.go
index 5d9406e912df7..52664a054a878 100644
--- a/playground/backend/internal/utils/validators_utils.go
+++ b/sdks/go/pkg/beam/util/gcsx/example_test.go
@@ -13,28 +13,35 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-package utils
+package gcsx_test
import (
- pb "beam.apache.org/playground/backend/internal/api/v1"
- "beam.apache.org/playground/backend/internal/validators"
- "fmt"
+ "context"
+ "time"
+
+ "cloud.google.com/go/storage"
+ "github.com/apache/beam/sdks/v2/go/pkg/beam/util/gcsx"
)
-// GetValidators returns slice of validators.Validator according to sdk
-func GetValidators(sdk pb.Sdk, filepath string) (*[]validators.Validator, error) {
- var val *[]validators.Validator
- switch sdk {
- case pb.Sdk_SDK_JAVA:
- val = validators.GetJavaValidators(filepath)
- case pb.Sdk_SDK_GO:
- val = validators.GetGoValidators(filepath)
- case pb.Sdk_SDK_PYTHON:
- val = validators.GetPyValidators(filepath)
- case pb.Sdk_SDK_SCIO:
- val = validators.GetScioValidators(filepath)
- default:
- return nil, fmt.Errorf("incorrect sdk: %s", sdk)
+func Example() {
+ ctx := context.Background()
+ c, err := gcsx.NewClient(ctx, storage.ScopeReadOnly)
+ if err != nil {
+ // do something
+ }
+
+ buckets, object, err := gcsx.ParseObject("gs://some-bucket/some-object")
+ if err != nil {
+ // do something
}
- return val, nil
+
+ ctx, cancel := context.WithTimeout(ctx, 30*time.Second)
+ defer cancel()
+
+ bytes, err := gcsx.ReadObject(ctx, c, buckets, object)
+ if err != nil {
+ // do something
+ }
+
+ _ = bytes
}
diff --git a/sdks/go/pkg/beam/util/gcsx/gcs_test.go b/sdks/go/pkg/beam/util/gcsx/gcs_test.go
index 52664a054a878..90fb4b59f2fe8 100644
--- a/sdks/go/pkg/beam/util/gcsx/gcs_test.go
+++ b/sdks/go/pkg/beam/util/gcsx/gcs_test.go
@@ -13,35 +13,86 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-package gcsx_test
+package gcsx
import (
- "context"
- "time"
+ "strings"
+ "testing"
- "cloud.google.com/go/storage"
- "github.com/apache/beam/sdks/v2/go/pkg/beam/util/gcsx"
+ "github.com/apache/beam/sdks/v2/go/pkg/beam/internal/errors"
)
-func Example() {
- ctx := context.Background()
- c, err := gcsx.NewClient(ctx, storage.ScopeReadOnly)
- if err != nil {
- // do something
+func TestMakeObject(t *testing.T) {
+ if got, want := MakeObject("some-bucket", "some/path"), "gs://some-bucket/some/path"; got != want {
+ t.Fatalf("MakeObject() Got: %v Want: %v", got, want)
}
+}
- buckets, object, err := gcsx.ParseObject("gs://some-bucket/some-object")
- if err != nil {
- // do something
+func TestParseObject(t *testing.T) {
+ tests := []struct {
+ object string
+ bucket string
+ path string
+ err error
+ }{
+ {
+ object: "gs://some-bucket/some-object",
+ bucket: "some-bucket",
+ path: "some-object",
+ err: nil,
+ },
+ {
+ object: "gs://some-bucket",
+ bucket: "some-bucket",
+ path: "",
+ err: nil,
+ },
+ {
+ object: "gs://",
+ bucket: "",
+ path: "",
+ err: errors.Errorf("object gs:// must have bucket"),
+ },
+ {
+ object: "other://some-bucket/some-object",
+ bucket: "",
+ path: "",
+ err: errors.Errorf("object other://some-bucket/some-object must have 'gs' scheme"),
+ },
}
- ctx, cancel := context.WithTimeout(ctx, 30*time.Second)
- defer cancel()
-
- bytes, err := gcsx.ReadObject(ctx, c, buckets, object)
- if err != nil {
- // do something
+ for _, test := range tests {
+ if bucket, path, err := ParseObject(test.object); bucket != test.bucket || path != test.path || (err != nil && test.err == nil) || (err == nil && test.err != nil) {
+ t.Errorf("ParseObject(%v) Got: %v, %v, %v Want: %v, %v, %v", test.object, bucket, path, err, test.bucket, test.path, test.err)
+ }
}
+}
- _ = bytes
+func TestJoin(t *testing.T) {
+ tests := []struct {
+ object string
+ elms []string
+ result string
+ }{
+ {
+ object: "gs://some-bucket/some-object",
+ elms: []string{"some/path", "more/pathing"},
+ result: "gs://some-bucket/some-object/some/path/more/pathing",
+ },
+ {
+ object: "gs://some-bucket/some-object",
+ elms: []string{"some/path"},
+ result: "gs://some-bucket/some-object/some/path",
+ },
+ {
+ object: "gs://some-bucket/some-object",
+ elms: []string{},
+ result: "gs://some-bucket/some-object",
+ },
+ }
+ for _, test := range tests {
+ if got, want := Join(test.object, test.elms...), test.result; got != want {
+ t.Errorf("Join(%v, %v) Got: %v Want: %v", test.object, strings.Join(test.elms, ", "), got, want)
+ }
+ }
}
diff --git a/sdks/go/pkg/beam/util/harnessopts/cache.go b/sdks/go/pkg/beam/util/harnessopts/cache.go
index 117f45f008255..2ad538b73af25 100644
--- a/sdks/go/pkg/beam/util/harnessopts/cache.go
+++ b/sdks/go/pkg/beam/util/harnessopts/cache.go
@@ -13,7 +13,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
-// Package harnessopts defines user-facing entrypoints into Beam hooks
+// Package harnessopts defines user-facing entrypoints into Beam hooks
// affecting the SDK harness. Call these functions at any time before
// submitting your pipeline to a runner, for that pipeline's workers to be affected.
package harnessopts
diff --git a/sdks/go/pkg/beam/util/syscallx/syscall_default.go b/sdks/go/pkg/beam/util/syscallx/syscall_default.go
index a85cd3f0e7e23..55756d0dbd44c 100644
--- a/sdks/go/pkg/beam/util/syscallx/syscall_default.go
+++ b/sdks/go/pkg/beam/util/syscallx/syscall_default.go
@@ -13,6 +13,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+//go:build !linux
// +build !linux
package syscallx
diff --git a/sdks/go/pkg/beam/util/syscallx/syscall_linux.go b/sdks/go/pkg/beam/util/syscallx/syscall_linux.go
index c639f876bebc1..379437ad0a17c 100644
--- a/sdks/go/pkg/beam/util/syscallx/syscall_linux.go
+++ b/sdks/go/pkg/beam/util/syscallx/syscall_linux.go
@@ -13,6 +13,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+//go:build linux
// +build linux
package syscallx
diff --git a/sdks/go/pkg/beam/x/debug/head_test.go b/sdks/go/pkg/beam/x/debug/head_test.go
new file mode 100644
index 0000000000000..8aa5b41545daa
--- /dev/null
+++ b/sdks/go/pkg/beam/x/debug/head_test.go
@@ -0,0 +1,44 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements. See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package debug
+
+import (
+ "testing"
+
+ "github.com/apache/beam/sdks/v2/go/pkg/beam"
+ "github.com/apache/beam/sdks/v2/go/pkg/beam/testing/passert"
+ "github.com/apache/beam/sdks/v2/go/pkg/beam/testing/ptest"
+)
+
+func TestHead(t *testing.T) {
+ p, s, sequence := ptest.CreateList([]int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10})
+ headSequence := Head(s, sequence, 5)
+ passert.Count(s, headSequence, "NumElements", 5)
+ passert.Equals(s, headSequence, 1, 2, 3, 4, 5)
+
+ ptest.RunAndValidate(t, p)
+}
+
+func TestHead_KV(t *testing.T) {
+ p, s, sequence := ptest.CreateList([]int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10})
+ kvSequence := beam.AddFixedKey(s, sequence)
+ headKvSequence := Head(s, kvSequence, 5)
+ headSequence := beam.DropKey(s, headKvSequence)
+ passert.Count(s, headSequence, "NumElements", 5)
+ passert.Equals(s, headSequence, 1, 2, 3, 4, 5)
+
+ ptest.RunAndValidate(t, p)
+}
diff --git a/sdks/go/pkg/beam/x/debug/print_test.go b/sdks/go/pkg/beam/x/debug/print_test.go
new file mode 100644
index 0000000000000..0bbdee0b6fb9c
--- /dev/null
+++ b/sdks/go/pkg/beam/x/debug/print_test.go
@@ -0,0 +1,101 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements. See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package debug
+
+import (
+ "bytes"
+ "log"
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/apache/beam/sdks/v2/go/pkg/beam"
+ "github.com/apache/beam/sdks/v2/go/pkg/beam/testing/ptest"
+)
+
+func TestPrint(t *testing.T) {
+ p, s, sequence := ptest.CreateList([]string{"abc", "def", "ghi"})
+ Print(s, sequence)
+
+ output := captureRunLogging(p)
+ if !strings.Contains(output, "Elm: abc") {
+ t.Errorf("Print() should contain \"Elm: abc\", got: %v", output)
+ }
+ if !strings.Contains(output, "Elm: def") {
+ t.Errorf("Print() should contain \"Elm: def\", got: %v", output)
+ }
+ if !strings.Contains(output, "Elm: ghi") {
+ t.Errorf("Print() should contain \"Elm: ghi\", got: %v", output)
+ }
+}
+
+func TestPrintf(t *testing.T) {
+ p, s := beam.NewPipelineWithRoot()
+ sequence := beam.Create(s, "abc", "def", "ghi")
+ Printf(s, "myformatting - %v", sequence)
+
+ output := captureRunLogging(p)
+ if !strings.Contains(output, "myformatting - abc") {
+ t.Errorf("Printf() should contain \"myformatting - abc\", got: %v", output)
+ }
+ if !strings.Contains(output, "myformatting - def") {
+ t.Errorf("Printf() should contain \"myformatting - def\", got: %v", output)
+ }
+ if !strings.Contains(output, "myformatting - ghi") {
+ t.Errorf("Printf() should contain \"myformatting - ghi\", got: %v", output)
+ }
+}
+
+func TestPrint_KV(t *testing.T) {
+ p, s, sequence := ptest.CreateList([]string{"abc", "def", "ghi"})
+ kvSequence := beam.AddFixedKey(s, sequence)
+ Print(s, kvSequence)
+
+ output := captureRunLogging(p)
+ if !strings.Contains(output, "Elm: (0,abc)") {
+ t.Errorf("Print() should contain \"Elm: (0,abc)\", got: %v", output)
+ }
+ if !strings.Contains(output, "Elm: (0,def)") {
+ t.Errorf("Print() should contain \"Elm: (0,def)\", got: %v", output)
+ }
+ if !strings.Contains(output, "Elm: (0,ghi)") {
+ t.Errorf("Print() should contain \"Elm: (0,ghi)\", got: %v", output)
+ }
+}
+
+func TestPrint_CoGBK(t *testing.T) {
+ p, s, sequence := ptest.CreateList([]string{"abc", "def", "ghi"})
+ kvSequence := beam.AddFixedKey(s, sequence)
+ gbkSequence := beam.CoGroupByKey(s, kvSequence)
+ Print(s, gbkSequence)
+
+ output := captureRunLogging(p)
+ if !strings.Contains(output, "Elm: (0,[abc def ghi])") {
+ t.Errorf("Print() should contain \"Elm: (0,[abc def ghi])\", got: %v", output)
+ }
+}
+
+func captureRunLogging(p *beam.Pipeline) string {
+ // Pipe output to out
+ var out bytes.Buffer
+ log.SetOutput(&out)
+
+ ptest.Run(p)
+
+ // Return to original state
+ log.SetOutput(os.Stderr)
+ return out.String()
+}
diff --git a/sdks/java/container/license_scripts/dep_urls_java.yaml b/sdks/java/container/license_scripts/dep_urls_java.yaml
index 03a882303586a..57745777c26fd 100644
--- a/sdks/java/container/license_scripts/dep_urls_java.yaml
+++ b/sdks/java/container/license_scripts/dep_urls_java.yaml
@@ -42,7 +42,7 @@ jaxen:
'1.1.6':
type: "3-Clause BSD"
libraries-bom:
- '24.2.0':
+ '24.3.0':
license: "https://raw.githubusercontent.com/GoogleCloudPlatform/cloud-opensource-java/master/LICENSE"
type: "Apache License 2.0"
paranamer:
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java
index 4cb7eb4eac45f..d014f08ac2b8c 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java
@@ -36,11 +36,13 @@
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.deser.DefaultDeserializationContext;
import com.fasterxml.jackson.databind.deser.impl.MethodProperty;
+import com.fasterxml.jackson.databind.deser.impl.TypeWrappedDeserializer;
import com.fasterxml.jackson.databind.introspect.AnnotatedMember;
import com.fasterxml.jackson.databind.introspect.AnnotatedMethod;
import com.fasterxml.jackson.databind.introspect.AnnotationCollector;
import com.fasterxml.jackson.databind.introspect.BeanPropertyDefinition;
import com.fasterxml.jackson.databind.introspect.TypeResolutionContext;
+import com.fasterxml.jackson.databind.jsontype.TypeDeserializer;
import com.fasterxml.jackson.databind.node.TreeTraversingParser;
import com.fasterxml.jackson.databind.ser.DefaultSerializerProvider;
import com.fasterxml.jackson.databind.type.TypeBindings;
@@ -1730,21 +1732,23 @@ private static JsonDeserializer computeDeserializerForMethod(Method meth
BeanProperty prop = createBeanProperty(method);
AnnotatedMember annotatedMethod = prop.getMember();
+ DefaultDeserializationContext context = DESERIALIZATION_CONTEXT.get();
Object maybeDeserializerClass =
- DESERIALIZATION_CONTEXT
- .get()
- .getAnnotationIntrospector()
- .findDeserializer(annotatedMethod);
+ context.getAnnotationIntrospector().findDeserializer(annotatedMethod);
JsonDeserializer jsonDeserializer =
- DESERIALIZATION_CONTEXT
- .get()
- .deserializerInstance(annotatedMethod, maybeDeserializerClass);
+ context.deserializerInstance(annotatedMethod, maybeDeserializerClass);
if (jsonDeserializer == null) {
- jsonDeserializer =
- DESERIALIZATION_CONTEXT.get().findContextualValueDeserializer(prop.getType(), prop);
+ jsonDeserializer = context.findContextualValueDeserializer(prop.getType(), prop);
}
+
+ TypeDeserializer typeDeserializer =
+ context.getFactory().findTypeDeserializer(context.getConfig(), prop.getType());
+ if (typeDeserializer != null) {
+ jsonDeserializer = new TypeWrappedDeserializer(typeDeserializer, jsonDeserializer);
+ }
+
return jsonDeserializer;
} catch (JsonMappingException e) {
throw new RuntimeException(e);
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java
index 4e6921b5226b8..25983caf2b15c 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java
@@ -17,6 +17,11 @@
*/
package org.apache.beam.sdk.schemas;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.PipedInputStream;
+import java.io.PipedOutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -28,6 +33,7 @@
import org.apache.beam.model.pipeline.v1.SchemaApi.AtomicTypeValue;
import org.apache.beam.model.pipeline.v1.SchemaApi.FieldValue;
import org.apache.beam.model.pipeline.v1.SchemaApi.IterableTypeValue;
+import org.apache.beam.model.pipeline.v1.SchemaApi.LogicalTypeValue;
import org.apache.beam.model.pipeline.v1.SchemaApi.MapTypeEntry;
import org.apache.beam.model.pipeline.v1.SchemaApi.MapTypeValue;
import org.apache.beam.sdk.annotations.Experimental;
@@ -45,6 +51,7 @@
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Maps;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.ByteStreams;
import org.checkerframework.checker.nullness.qual.Nullable;
/** Utility methods for translating schemas. */
@@ -319,6 +326,7 @@ private static FieldType fieldTypeFromProtoWithoutNullable(SchemaApi.FieldType p
fieldTypeFromProto(protoFieldType.getMapType().getValueType()));
case LOGICAL_TYPE:
String urn = protoFieldType.getLogicalType().getUrn();
+ SchemaApi.LogicalType logicalType = protoFieldType.getLogicalType();
Class extends LogicalType, ?>> logicalTypeClass = STANDARD_LOGICAL_TYPES.get(urn);
if (logicalTypeClass != null) {
try {
@@ -351,22 +359,21 @@ private static FieldType fieldTypeFromProtoWithoutNullable(SchemaApi.FieldType p
return FieldType.logicalType(
(LogicalType)
SerializableUtils.deserializeFromByteArray(
- protoFieldType.getLogicalType().getPayload().toByteArray(), "logicalType"));
+ logicalType.getPayload().toByteArray(), "logicalType"));
} else {
@Nullable FieldType argumentType = null;
@Nullable Object argumentValue = null;
- if (protoFieldType.getLogicalType().hasArgumentType()) {
- argumentType = fieldTypeFromProto(protoFieldType.getLogicalType().getArgumentType());
- argumentValue =
- fieldValueFromProto(argumentType, protoFieldType.getLogicalType().getArgument());
+ if (logicalType.hasArgumentType()) {
+ argumentType = fieldTypeFromProto(logicalType.getArgumentType());
+ argumentValue = fieldValueFromProto(argumentType, logicalType.getArgument());
}
return FieldType.logicalType(
new UnknownLogicalType(
urn,
- protoFieldType.getLogicalType().getPayload().toByteArray(),
+ logicalType.getPayload().toByteArray(),
argumentType,
argumentValue,
- fieldTypeFromProto(protoFieldType.getLogicalType().getRepresentation())));
+ fieldTypeFromProto(logicalType.getRepresentation())));
}
default:
throw new IllegalArgumentException(
@@ -393,6 +400,14 @@ public static Object rowFromProto(SchemaApi.Row row, FieldType fieldType) {
static SchemaApi.FieldValue fieldValueToProto(FieldType fieldType, Object value) {
FieldValue.Builder builder = FieldValue.newBuilder();
+ if (value == null) {
+ if (fieldType.getNullable()) {
+ return builder.build();
+ } else {
+ throw new RuntimeException("Null value found for field that doesn't support nulls.");
+ }
+ }
+
switch (fieldType.getTypeName()) {
case ARRAY:
return builder
@@ -411,26 +426,74 @@ static SchemaApi.FieldValue fieldValueToProto(FieldType fieldType, Object value)
.build();
case ROW:
return builder.setRowValue(rowToProto((Row) value)).build();
+ case DATETIME:
+ return builder
+ .setLogicalTypeValue(logicalTypeToProto(FieldType.INT64, fieldType, value))
+ .build();
+ case DECIMAL:
+ return builder
+ .setLogicalTypeValue(logicalTypeToProto(FieldType.BYTES, fieldType, value))
+ .build();
case LOGICAL_TYPE:
+ return builder
+ .setLogicalTypeValue(logicalTypeToProto(fieldType.getLogicalType(), value))
+ .build();
default:
return builder.setAtomicValue(primitiveRowFieldToProto(fieldType, value)).build();
}
}
+ /** Returns if the given field is null and throws exception if it is and can't be. */
+ static boolean isNullFieldValueFromProto(FieldType fieldType, boolean hasNonNullValue) {
+ if (!hasNonNullValue && !fieldType.getNullable()) {
+ throw new RuntimeException("FieldTypeValue has no value but the field cannot be null.");
+ }
+ return !hasNonNullValue;
+ }
+
static Object fieldValueFromProto(FieldType fieldType, SchemaApi.FieldValue value) {
switch (fieldType.getTypeName()) {
case ARRAY:
+ if (isNullFieldValueFromProto(fieldType, value.hasArrayValue())) {
+ return null;
+ }
return arrayValueFromProto(fieldType.getCollectionElementType(), value.getArrayValue());
case ITERABLE:
+ if (isNullFieldValueFromProto(fieldType, value.hasIterableValue())) {
+ return null;
+ }
return iterableValueFromProto(
fieldType.getCollectionElementType(), value.getIterableValue());
case MAP:
+ if (isNullFieldValueFromProto(fieldType, value.hasMapValue())) {
+ return null;
+ }
return mapFromProto(
fieldType.getMapKeyType(), fieldType.getMapValueType(), value.getMapValue());
case ROW:
+ if (isNullFieldValueFromProto(fieldType, value.hasRowValue())) {
+ return null;
+ }
return rowFromProto(value.getRowValue(), fieldType);
case LOGICAL_TYPE:
+ if (isNullFieldValueFromProto(fieldType, value.hasLogicalTypeValue())) {
+ return null;
+ }
+ return logicalTypeFromProto(fieldType.getLogicalType(), value);
+ case DATETIME:
+ if (isNullFieldValueFromProto(fieldType, value.hasLogicalTypeValue())) {
+ return null;
+ }
+ return logicalTypeFromProto(FieldType.INT64, fieldType, value.getLogicalTypeValue());
+ case DECIMAL:
+ if (isNullFieldValueFromProto(fieldType, value.hasLogicalTypeValue())) {
+ return null;
+ }
+ return logicalTypeFromProto(FieldType.BYTES, fieldType, value.getLogicalTypeValue());
default:
+ if (isNullFieldValueFromProto(fieldType, value.hasAtomicValue())) {
+ return null;
+ }
return primitiveFromProto(fieldType, value.getAtomicValue());
}
}
@@ -485,6 +548,74 @@ private static Object mapFromProto(
entry -> fieldValueFromProto(mapValueType, entry.getValue())));
}
+ /** Converts logical type value from proto using a default type coder. */
+ private static Object logicalTypeFromProto(
+ FieldType baseType, FieldType inputType, LogicalTypeValue value) {
+ try {
+ PipedInputStream in = new PipedInputStream();
+ DataOutputStream stream = new DataOutputStream(new PipedOutputStream(in));
+ switch (baseType.getTypeName()) {
+ case INT64:
+ stream.writeLong(value.getValue().getAtomicValue().getInt64());
+ break;
+ case BYTES:
+ stream.write(value.getValue().getAtomicValue().getBytes().toByteArray());
+ break;
+ default:
+ throw new UnsupportedOperationException(
+ "Unsupported underlying type for parsing logical type via coder.");
+ }
+ stream.close();
+ return SchemaCoderHelpers.coderForFieldType(inputType).decode(in);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /** Converts logical type value to a proto using a default type coder. */
+ private static LogicalTypeValue logicalTypeToProto(
+ FieldType baseType, FieldType inputType, Object value) {
+ try {
+ PipedInputStream in = new PipedInputStream();
+ PipedOutputStream out = new PipedOutputStream(in);
+ SchemaCoderHelpers.coderForFieldType(inputType).encode(value, out);
+ out.close(); // Close required for toByteArray.
+ Object baseObject;
+ switch (baseType.getTypeName()) {
+ case INT64:
+ baseObject = new DataInputStream(in).readLong();
+ break;
+ case BYTES:
+ baseObject = ByteStreams.toByteArray(in);
+ break;
+ default:
+ throw new UnsupportedOperationException(
+ "Unsupported underlying type for producing LogicalType via coder.");
+ }
+ return LogicalTypeValue.newBuilder()
+ .setValue(fieldValueToProto(baseType, baseObject))
+ .build();
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private static LogicalTypeValue logicalTypeToProto(LogicalType logicalType, Object value) {
+ return LogicalTypeValue.newBuilder()
+ .setValue(
+ fieldValueToProto(
+ logicalType.getBaseType(), SchemaUtils.toLogicalBaseType(logicalType, value)))
+ .build();
+ }
+
+ private static Object logicalTypeFromProto(
+ LogicalType logicalType, SchemaApi.FieldValue logicalValue) {
+ return SchemaUtils.toLogicalInputType(
+ logicalType,
+ fieldValueFromProto(
+ logicalType.getBaseType(), logicalValue.getLogicalTypeValue().getValue()));
+ }
+
private static AtomicTypeValue primitiveRowFieldToProto(FieldType fieldType, Object value) {
switch (fieldType.getTypeName()) {
case BYTE:
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaUtils.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaUtils.java
index ebddfb38afa77..da08269fd4b15 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaUtils.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaUtils.java
@@ -18,6 +18,7 @@
package org.apache.beam.sdk.schemas;
import org.apache.beam.sdk.schemas.Schema.FieldType;
+import org.apache.beam.sdk.schemas.Schema.LogicalType;
/** A set of utility functions for schemas. */
@SuppressWarnings({
@@ -101,4 +102,24 @@ static FieldType widenNullableTypes(FieldType fieldType1, FieldType fieldType2)
}
return result.withNullable(fieldType1.getNullable() || fieldType2.getNullable());
}
+
+ /**
+ * Returns the base type given a logical type and the input type.
+ *
+ * This function can be used to handle logical types without knowing InputT or BaseT.
+ */
+ public static BaseT toLogicalBaseType(
+ LogicalType logicalType, InputT inputType) {
+ return logicalType.toBaseType(inputType);
+ }
+
+ /**
+ * Returns the input type given a logical type and the base type.
+ *
+ * This function can be used to handle logical types without knowing InputT or BaseT.
+ */
+ public static InputT toLogicalInputType(
+ LogicalType logicalType, BaseT baseType) {
+ return logicalType.toInputType(baseType);
+ }
}
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/Row.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/Row.java
index 9dd02d32c2f34..f7f434d02c473 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/Row.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/Row.java
@@ -43,6 +43,7 @@
import org.apache.beam.sdk.schemas.Schema;
import org.apache.beam.sdk.schemas.Schema.FieldType;
import org.apache.beam.sdk.schemas.Schema.TypeName;
+import org.apache.beam.sdk.schemas.SchemaUtils;
import org.apache.beam.sdk.values.RowUtils.CapturingRowCases;
import org.apache.beam.sdk.values.RowUtils.FieldOverride;
import org.apache.beam.sdk.values.RowUtils.FieldOverrides;
@@ -460,7 +461,10 @@ public static boolean deepEquals(Object a, Object b, Schema.FieldType fieldType)
if (a == null || b == null) {
return a == b;
} else if (fieldType.getTypeName() == TypeName.LOGICAL_TYPE) {
- return deepEquals(a, b, fieldType.getLogicalType().getBaseType());
+ return deepEquals(
+ SchemaUtils.toLogicalBaseType(fieldType.getLogicalType(), a),
+ SchemaUtils.toLogicalBaseType(fieldType.getLogicalType(), b),
+ fieldType.getLogicalType().getBaseType());
} else if (fieldType.getTypeName() == Schema.TypeName.BYTES) {
return Arrays.equals((byte[]) a, (byte[]) b);
} else if (fieldType.getTypeName() == TypeName.ARRAY) {
@@ -598,6 +602,9 @@ public String toString(boolean includeFieldNames) {
}
private String toString(Schema.FieldType fieldType, Object value, boolean includeFieldNames) {
+ if (value == null) {
+ return "";
+ }
StringBuilder builder = new StringBuilder();
switch (fieldType.getTypeName()) {
case ARRAY:
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java
index 94fd3f41faacd..ffdfbc8681a13 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java
@@ -39,6 +39,8 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
@@ -60,7 +62,6 @@
import java.util.Collection;
import java.util.List;
import java.util.Map;
-import java.util.Objects;
import java.util.Set;
import org.apache.beam.model.jobmanagement.v1.JobApi.PipelineOptionDescriptor;
import org.apache.beam.model.jobmanagement.v1.JobApi.PipelineOptionType;
@@ -1070,6 +1071,53 @@ public void testComplexTypes() {
assertEquals("value2", options.getObjectValue().get().value2);
}
+ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME)
+ @JsonSubTypes({
+ @JsonSubTypes.Type(value = PolymorphicTypeOne.class, name = "one"),
+ @JsonSubTypes.Type(value = PolymorphicTypeTwo.class, name = "two")
+ })
+ public abstract static class PolymorphicType {
+ String key;
+
+ @JsonProperty("key")
+ public String getKey() {
+ return key;
+ }
+
+ public void setKey(String key) {
+ this.key = key;
+ }
+ }
+
+ public static class PolymorphicTypeOne extends PolymorphicType {}
+
+ public static class PolymorphicTypeTwo extends PolymorphicType {}
+
+ public interface PolymorphicTypes extends PipelineOptions {
+ PolymorphicType getObject();
+
+ void setObject(PolymorphicType value);
+
+ ValueProvider getObjectValue();
+
+ void setObjectValue(ValueProvider value);
+ }
+
+ @Test
+ public void testPolymorphicType() {
+ String[] args =
+ new String[] {
+ "--object={\"key\":\"value\",\"@type\":\"one\"}",
+ "--objectValue={\"key\":\"value\",\"@type\":\"two\"}"
+ };
+ PolymorphicTypes options = PipelineOptionsFactory.fromArgs(args).as(PolymorphicTypes.class);
+ assertEquals("value", options.getObject().key);
+ assertEquals(PolymorphicTypeOne.class, options.getObject().getClass());
+
+ assertEquals("value", options.getObjectValue().get().key);
+ assertEquals(PolymorphicTypeTwo.class, options.getObjectValue().get().getClass());
+ }
+
@Test
public void testMissingArgument() {
String[] args = new String[] {};
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/SchemaTranslationTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/SchemaTranslationTest.java
index 9f1f7d47efa7c..2c0cadb45eaac 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/SchemaTranslationTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/schemas/SchemaTranslationTest.java
@@ -22,6 +22,8 @@
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertThrows;
+import java.math.BigDecimal;
+import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -36,12 +38,14 @@
import org.apache.beam.model.pipeline.v1.SchemaApi.LogicalType;
import org.apache.beam.sdk.schemas.Schema.Field;
import org.apache.beam.sdk.schemas.Schema.FieldType;
+import org.apache.beam.sdk.schemas.logicaltypes.DateTime;
import org.apache.beam.sdk.schemas.logicaltypes.FixedBytes;
import org.apache.beam.sdk.schemas.logicaltypes.MicrosInstant;
import org.apache.beam.sdk.values.Row;
import org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.ByteString;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Charsets;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
+import org.joda.time.Instant;
import org.junit.Test;
import org.junit.experimental.runners.Enclosed;
import org.junit.runner.RunWith;
@@ -173,6 +177,9 @@ public static Iterable data() {
.add(
Schema.of(
Field.of("null_argument", FieldType.logicalType(new NullArgumentLogicalType()))))
+ .add(Schema.of(Field.of("logical_argument", FieldType.logicalType(new DateTime()))))
+ .add(
+ Schema.of(Field.of("single_arg_argument", FieldType.logicalType(FixedBytes.of(100)))))
.build();
}
@@ -290,6 +297,72 @@ public void fromProtoAndToProto() throws Exception {
}
}
+ /** Tests round-trip proto encodings for {@link Row}. */
+ @RunWith(Parameterized.class)
+ public static class RowToFromProtoTest {
+
+ public static Row simpleRow(FieldType type, Object value) {
+ return Row.withSchema(Schema.of(Field.of("s", type))).addValue(value).build();
+ }
+
+ public static Row simpleNullRow(FieldType type) {
+ return Row.withSchema(Schema.of(Field.nullable("s", type))).addValue(null).build();
+ }
+
+ @Parameters(name = "{index}: {0}")
+ public static Iterable data() {
+ Map map = new HashMap<>();
+ map.put("string", 42);
+ List list = new ArrayList<>();
+ list.add("string");
+ Schema schema =
+ Schema.builder()
+ .addField("field_one", FieldType.STRING)
+ .addField("field_two", FieldType.INT32)
+ .build();
+ Row row = Row.withSchema(schema).addValue("value").addValue(42).build();
+
+ return ImmutableList.builder()
+ .add(simpleRow(FieldType.STRING, "string"))
+ .add(simpleRow(FieldType.BOOLEAN, true))
+ .add(simpleRow(FieldType.BYTE, (byte) 12))
+ .add(simpleRow(FieldType.INT16, (short) 12))
+ .add(simpleRow(FieldType.INT32, 12))
+ .add(simpleRow(FieldType.INT64, 12L))
+ .add(simpleRow(FieldType.BYTES, new byte[] {0x42, 0x69, 0x00}))
+ .add(simpleRow(FieldType.FLOAT, (float) 12))
+ .add(simpleRow(FieldType.DOUBLE, 12.0))
+ .add(simpleRow(FieldType.map(FieldType.STRING, FieldType.INT32), map))
+ .add(simpleRow(FieldType.array(FieldType.STRING), list))
+ .add(simpleRow(FieldType.row(row.getSchema()), row))
+ .add(simpleRow(FieldType.DATETIME, new Instant(23L)))
+ .add(simpleRow(FieldType.DECIMAL, BigDecimal.valueOf(100000)))
+ .add(simpleRow(FieldType.logicalType(new NullArgumentLogicalType()), "str"))
+ .add(simpleRow(FieldType.logicalType(new DateTime()), LocalDateTime.of(2000, 1, 3, 3, 1)))
+ .add(simpleNullRow(FieldType.STRING))
+ .add(simpleNullRow(FieldType.INT32))
+ .add(simpleNullRow(FieldType.map(FieldType.STRING, FieldType.INT32)))
+ .add(simpleNullRow(FieldType.array(FieldType.STRING)))
+ .add(simpleNullRow(FieldType.row(row.getSchema())))
+ .add(simpleNullRow(FieldType.logicalType(new NullArgumentLogicalType())))
+ .add(simpleNullRow(FieldType.logicalType(new DateTime())))
+ .add(simpleNullRow(FieldType.DECIMAL))
+ .add(simpleNullRow(FieldType.DATETIME))
+ .build();
+ }
+
+ @Parameter(0)
+ public Row row;
+
+ @Test
+ public void toAndFromProto() throws Exception {
+ SchemaApi.Row rowProto = SchemaTranslation.rowToProto(row);
+ Row decodedRow =
+ (Row) SchemaTranslation.rowFromProto(rowProto, FieldType.row(row.getSchema()));
+ assertThat(decodedRow, equalTo(row));
+ }
+ }
+
/** Tests that we raise helpful errors when decoding bad {@link Schema} protos. */
@RunWith(JUnit4.class)
public static class DecodeErrorTest {
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/channel/ManagedChannelFactory.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/channel/ManagedChannelFactory.java
index e9a9e218911c0..4b172f169fb2a 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/channel/ManagedChannelFactory.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/channel/ManagedChannelFactory.java
@@ -18,103 +18,121 @@
package org.apache.beam.sdk.fn.channel;
import java.net.SocketAddress;
+import java.util.Collections;
import java.util.List;
import org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.ClientInterceptor;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.ManagedChannel;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.ManagedChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.inprocess.InProcessChannelBuilder;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.netty.NettyChannelBuilder;
import org.apache.beam.vendor.grpc.v1p43p2.io.netty.channel.epoll.EpollDomainSocketChannel;
import org.apache.beam.vendor.grpc.v1p43p2.io.netty.channel.epoll.EpollEventLoopGroup;
import org.apache.beam.vendor.grpc.v1p43p2.io.netty.channel.epoll.EpollSocketChannel;
import org.apache.beam.vendor.grpc.v1p43p2.io.netty.channel.unix.DomainSocketAddress;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
-/** A Factory which creates an underlying {@link ManagedChannel} implementation. */
-public abstract class ManagedChannelFactory {
+/** A Factory which creates {@link ManagedChannel} instances. */
+public class ManagedChannelFactory {
+ /**
+ * Creates a {@link ManagedChannel} relying on the {@link ManagedChannelBuilder} to choose the
+ * channel type.
+ */
public static ManagedChannelFactory createDefault() {
- return new Default();
+ return new ManagedChannelFactory(Type.DEFAULT, Collections.emptyList(), false);
}
+ /**
+ * Creates a {@link ManagedChannelFactory} backed by an {@link EpollDomainSocketChannel} if the
+ * address is a {@link DomainSocketAddress}. Otherwise creates a {@link ManagedChannel} backed by
+ * an {@link EpollSocketChannel}.
+ */
public static ManagedChannelFactory createEpoll() {
org.apache.beam.vendor.grpc.v1p43p2.io.netty.channel.epoll.Epoll.ensureAvailability();
- return new Epoll();
+ return new ManagedChannelFactory(Type.EPOLL, Collections.emptyList(), false);
}
- public ManagedChannel forDescriptor(ApiServiceDescriptor apiServiceDescriptor) {
- return builderFor(apiServiceDescriptor).build();
+ /** Creates a {@link ManagedChannel} using an in-process channel. */
+ public static ManagedChannelFactory createInProcess() {
+ return new ManagedChannelFactory(Type.IN_PROCESS, Collections.emptyList(), false);
}
- /** Create a {@link ManagedChannelBuilder} for the provided {@link ApiServiceDescriptor}. */
- protected abstract ManagedChannelBuilder> builderFor(ApiServiceDescriptor descriptor);
+ public ManagedChannel forDescriptor(ApiServiceDescriptor apiServiceDescriptor) {
+ ManagedChannelBuilder> channelBuilder;
+ switch (type) {
+ case EPOLL:
+ SocketAddress address = SocketAddressFactory.createFrom(apiServiceDescriptor.getUrl());
+ channelBuilder =
+ NettyChannelBuilder.forAddress(address)
+ .channelType(
+ address instanceof DomainSocketAddress
+ ? EpollDomainSocketChannel.class
+ : EpollSocketChannel.class)
+ .eventLoopGroup(new EpollEventLoopGroup());
+ break;
- /**
- * Returns a {@link ManagedChannelFactory} like this one, but which will apply the provided {@link
- * ClientInterceptor ClientInterceptors} to any channel it creates.
- */
- public ManagedChannelFactory withInterceptors(List interceptors) {
- return new InterceptedManagedChannelFactory(this, interceptors);
- }
+ case DEFAULT:
+ channelBuilder = ManagedChannelBuilder.forTarget(apiServiceDescriptor.getUrl());
+ break;
- /**
- * Creates a {@link ManagedChannel} backed by an {@link EpollDomainSocketChannel} if the address
- * is a {@link DomainSocketAddress}. Otherwise creates a {@link ManagedChannel} backed by an
- * {@link EpollSocketChannel}.
- */
- private static class Epoll extends ManagedChannelFactory {
- @Override
- public ManagedChannelBuilder> builderFor(ApiServiceDescriptor apiServiceDescriptor) {
- SocketAddress address = SocketAddressFactory.createFrom(apiServiceDescriptor.getUrl());
- return NettyChannelBuilder.forAddress(address)
- .channelType(
- address instanceof DomainSocketAddress
- ? EpollDomainSocketChannel.class
- : EpollSocketChannel.class)
- .eventLoopGroup(new EpollEventLoopGroup())
- .usePlaintext()
- // Set the message size to max value here. The actual size is governed by the
- // buffer size in the layers above.
- .maxInboundMessageSize(Integer.MAX_VALUE);
+ case IN_PROCESS:
+ channelBuilder = InProcessChannelBuilder.forName(apiServiceDescriptor.getUrl());
+ break;
+
+ default:
+ throw new IllegalStateException("Unknown type " + type);
}
- }
- /**
- * Creates a {@link ManagedChannel} relying on the {@link ManagedChannelBuilder} to create
- * instances.
- */
- private static class Default extends ManagedChannelFactory {
- @Override
- public ManagedChannelBuilder> builderFor(ApiServiceDescriptor apiServiceDescriptor) {
- return ManagedChannelBuilder.forTarget(apiServiceDescriptor.getUrl())
- .usePlaintext()
- // Set the message size to max value here. The actual size is governed by the
- // buffer size in the layers above.
- .maxInboundMessageSize(Integer.MAX_VALUE);
+ channelBuilder =
+ channelBuilder
+ .usePlaintext()
+ // Set the message size to max value here. The actual size is governed by the
+ // buffer size in the layers above.
+ .maxInboundMessageSize(Integer.MAX_VALUE)
+ .intercept(interceptors);
+ if (directExecutor) {
+ channelBuilder = channelBuilder.directExecutor();
}
+ return channelBuilder.build();
}
- private static class InterceptedManagedChannelFactory extends ManagedChannelFactory {
- private final ManagedChannelFactory channelFactory;
- private final List interceptors;
+ /** The channel type. */
+ private enum Type {
+ EPOLL,
+ DEFAULT,
+ IN_PROCESS,
+ }
- private InterceptedManagedChannelFactory(
- ManagedChannelFactory managedChannelFactory, List interceptors) {
- this.channelFactory = managedChannelFactory;
- this.interceptors = interceptors;
- }
+ private final Type type;
+ private final List interceptors;
+ private final boolean directExecutor;
- @Override
- public ManagedChannel forDescriptor(ApiServiceDescriptor apiServiceDescriptor) {
- return builderFor(apiServiceDescriptor).intercept(interceptors).build();
- }
+ private ManagedChannelFactory(
+ Type type, List interceptors, boolean directExecutor) {
+ this.type = type;
+ this.interceptors = interceptors;
+ this.directExecutor = directExecutor;
+ }
- @Override
- protected ManagedChannelBuilder> builderFor(ApiServiceDescriptor descriptor) {
- return channelFactory.builderFor(descriptor);
- }
+ /**
+ * Returns a {@link ManagedChannelFactory} like this one, but which will apply the provided {@link
+ * ClientInterceptor ClientInterceptors} to any channel it creates.
+ */
+ public ManagedChannelFactory withInterceptors(List interceptors) {
+ return new ManagedChannelFactory(
+ type,
+ ImmutableList.builder()
+ .addAll(this.interceptors)
+ .addAll(interceptors)
+ .build(),
+ directExecutor);
+ }
- @Override
- public ManagedChannelFactory withInterceptors(List interceptors) {
- return new InterceptedManagedChannelFactory(channelFactory, interceptors);
- }
+ /**
+ * Returns a {@link ManagedChannelFactory} like this one, but will construct the channel to use
+ * the direct executor.
+ */
+ public ManagedChannelFactory withDirectExecutor() {
+ return new ManagedChannelFactory(type, interceptors, true);
}
}
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/test/InProcessManagedChannelFactory.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/test/InProcessManagedChannelFactory.java
deleted file mode 100644
index f5b7c6e860d53..0000000000000
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/test/InProcessManagedChannelFactory.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.sdk.fn.test;
-
-import org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor;
-import org.apache.beam.sdk.fn.channel.ManagedChannelFactory;
-import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.ManagedChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.inprocess.InProcessChannelBuilder;
-
-/**
- * A {@link ManagedChannelFactory} that uses in-process channels.
- *
- * The channel builder uses {@link ApiServiceDescriptor#getUrl()} as the unique in-process name.
- */
-public class InProcessManagedChannelFactory extends ManagedChannelFactory {
- public static ManagedChannelFactory create() {
- return new InProcessManagedChannelFactory();
- }
-
- private InProcessManagedChannelFactory() {}
-
- @Override
- public ManagedChannelBuilder> builderFor(ApiServiceDescriptor apiServiceDescriptor) {
- return InProcessChannelBuilder.forName(apiServiceDescriptor.getUrl());
- }
-}
diff --git a/sdks/java/harness/jmh/src/main/java/org/apache/beam/fn/harness/jmh/logging/BeamFnLoggingClientBenchmark.java b/sdks/java/harness/jmh/src/main/java/org/apache/beam/fn/harness/jmh/logging/BeamFnLoggingClientBenchmark.java
index ca9648b4b1d56..f1ef8a6dd9d61 100644
--- a/sdks/java/harness/jmh/src/main/java/org/apache/beam/fn/harness/jmh/logging/BeamFnLoggingClientBenchmark.java
+++ b/sdks/java/harness/jmh/src/main/java/org/apache/beam/fn/harness/jmh/logging/BeamFnLoggingClientBenchmark.java
@@ -31,7 +31,6 @@
import org.apache.beam.runners.core.metrics.MonitoringInfoConstants;
import org.apache.beam.runners.core.metrics.SimpleExecutionState;
import org.apache.beam.sdk.fn.channel.ManagedChannelFactory;
-import org.apache.beam.sdk.fn.test.InProcessManagedChannelFactory;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.Server;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.inprocess.InProcessServerBuilder;
@@ -89,7 +88,7 @@ public ManageLoggingClientAndService() {
ApiServiceDescriptor.newBuilder()
.setUrl(BeamFnLoggingClientBenchmark.class.getName() + "#" + UUID.randomUUID())
.build();
- ManagedChannelFactory managedChannelFactory = InProcessManagedChannelFactory.create();
+ ManagedChannelFactory managedChannelFactory = ManagedChannelFactory.createInProcess();
loggingService = new CallCountLoggingService();
server =
InProcessServerBuilder.forName(apiServiceDescriptor.getUrl())
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnApiDoFnRunner.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnApiDoFnRunner.java
index 7a20afbd151e8..e73968854b6bb 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnApiDoFnRunner.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnApiDoFnRunner.java
@@ -180,6 +180,7 @@ static class Factory
@@ -242,6 +243,7 @@ static class Factory onTimerContext;
private final OnWindowExpirationContext> onWindowExpirationContext;
private final FinishBundleArgumentProvider finishBundleArgumentProvider;
+ private final Duration allowedLateness;
/**
* Used to guarantee a consistent view of this {@link FnApiDoFnRunner} while setting up for {@link
@@ -344,6 +346,7 @@ private interface TriFunction {
Map windowingStrategies,
Consumer addStartFunction,
Consumer addFinishFunction,
+ Consumer addResetFunction,
Consumer addTearDownFunction,
Function>> getPCollectionConsumer,
TriFunction>, Coder>> addPCollectionConsumer,
@@ -457,6 +460,13 @@ private interface TriFunction {
}
timerFamilyInfos = timerFamilyInfosBuilder.build();
+ this.mainInputId = ParDoTranslation.getMainInputName(pTransform);
+ this.allowedLateness =
+ rehydratedComponents
+ .getPCollection(pTransform.getInputsOrThrow(mainInputId))
+ .getWindowingStrategy()
+ .getAllowedLateness();
+
} catch (IOException exn) {
throw new IllegalArgumentException("Malformed ParDoPayload", exn);
}
@@ -473,12 +483,11 @@ private interface TriFunction {
this.bundleFinalizer = bundleFinalizer;
this.onTimerContext = new OnTimerContext();
this.onWindowExpirationContext = new OnWindowExpirationContext<>();
+ this.timerBundleTracker =
+ new FnApiTimerBundleTracker(
+ keyCoder, windowCoder, this::getCurrentKey, () -> currentWindow);
+ addResetFunction.accept(timerBundleTracker::reset);
- try {
- this.mainInputId = ParDoTranslation.getMainInputName(pTransform);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
this.mainOutputConsumers =
(Collection>>)
(Collection) localNameToConsumer.get(mainOutputTag.getId());
@@ -756,9 +765,6 @@ private Object getCurrentKey() {
}
private void startBundle() {
- timerBundleTracker =
- new FnApiTimerBundleTracker(
- keyCoder, windowCoder, this::getCurrentKey, () -> currentWindow);
doFnInvoker.invokeStartBundle(startBundleArgumentProvider);
}
@@ -988,13 +994,12 @@ public void onClaimFailed(PositionT position) {}
/** Internal class to hold the primary and residual roots when converted to an input element. */
@AutoValue
@AutoValue.CopyAnnotations
- @SuppressWarnings({"rawtypes"})
abstract static class WindowedSplitResult {
public static WindowedSplitResult forRoots(
- WindowedValue primaryInFullyProcessedWindowsRoot,
- WindowedValue primarySplitRoot,
- WindowedValue residualSplitRoot,
- WindowedValue residualInUnprocessedWindowsRoot) {
+ WindowedValue> primaryInFullyProcessedWindowsRoot,
+ WindowedValue> primarySplitRoot,
+ WindowedValue> residualSplitRoot,
+ WindowedValue> residualInUnprocessedWindowsRoot) {
return new AutoValue_FnApiDoFnRunner_WindowedSplitResult(
primaryInFullyProcessedWindowsRoot,
primarySplitRoot,
@@ -1002,18 +1007,17 @@ public static WindowedSplitResult forRoots(
residualInUnprocessedWindowsRoot);
}
- public abstract @Nullable WindowedValue getPrimaryInFullyProcessedWindowsRoot();
+ public abstract @Nullable WindowedValue> getPrimaryInFullyProcessedWindowsRoot();
- public abstract @Nullable WindowedValue getPrimarySplitRoot();
+ public abstract @Nullable WindowedValue> getPrimarySplitRoot();
- public abstract @Nullable WindowedValue getResidualSplitRoot();
+ public abstract @Nullable WindowedValue> getResidualSplitRoot();
- public abstract @Nullable WindowedValue getResidualInUnprocessedWindowsRoot();
+ public abstract @Nullable WindowedValue> getResidualInUnprocessedWindowsRoot();
}
@AutoValue
@AutoValue.CopyAnnotations
- @SuppressWarnings({"rawtypes"})
abstract static class SplitResultsWithStopIndex {
public static SplitResultsWithStopIndex of(
WindowedSplitResult windowSplit,
@@ -1694,14 +1698,9 @@ private void processTimer(
// The timerIdOrTimerFamilyId contains either a timerId from timer declaration or
// timerFamilyId
// from timer family declaration.
- String timerId =
- timerIdOrTimerFamilyId.startsWith(TimerFamilyDeclaration.PREFIX)
- ? ""
- : timerIdOrTimerFamilyId;
- String timerFamilyId =
- timerIdOrTimerFamilyId.startsWith(TimerFamilyDeclaration.PREFIX)
- ? timerIdOrTimerFamilyId
- : "";
+ boolean isFamily = timerIdOrTimerFamilyId.startsWith(TimerFamilyDeclaration.PREFIX);
+ String timerId = isFamily ? "" : timerIdOrTimerFamilyId;
+ String timerFamilyId = isFamily ? timerIdOrTimerFamilyId : "";
processTimerDirect(timerFamilyId, timerId, timeDomain, timer);
}
}
@@ -1778,7 +1777,6 @@ private class FnApiTimer implements org.apache.beam.sdk.state.Timer {
private final K userKey;
private final String dynamicTimerTag;
private final TimeDomain timeDomain;
- private final Duration allowedLateness;
private final Instant fireTimestamp;
private final Instant elementTimestampOrTimerHoldTimestamp;
private final BoundedWindow boundedWindow;
@@ -1817,18 +1815,6 @@ private class FnApiTimer implements org.apache.beam.sdk.state.Timer {
throw new IllegalArgumentException(
String.format("Unknown or unsupported time domain %s", timeDomain));
}
-
- try {
- this.allowedLateness =
- rehydratedComponents
- .getPCollection(
- pTransform.getInputsOrThrow(ParDoTranslation.getMainInputName(pTransform)))
- .getWindowingStrategy()
- .getAllowedLateness();
- } catch (IOException e) {
- throw new IllegalArgumentException(
- String.format("Unable to get allowed lateness for timer %s", timerIdOrFamily));
- }
}
@Override
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnHarness.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnHarness.java
index 5e2fa13a0e01b..4e35e0199967e 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnHarness.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnHarness.java
@@ -242,8 +242,7 @@ public static void main(
new BeamFnDataGrpcClient(options, channelFactory::forDescriptor, outboundObserverFactory);
BeamFnStateGrpcClientCache beamFnStateGrpcClientCache =
- new BeamFnStateGrpcClientCache(
- idGenerator, channelFactory::forDescriptor, outboundObserverFactory);
+ new BeamFnStateGrpcClientCache(idGenerator, channelFactory, outboundObserverFactory);
FinalizeBundleHandler finalizeBundleHandler =
new FinalizeBundleHandler(options.as(GcsOptions.class).getExecutorService());
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCache.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCache.java
index abbe032b0ca6d..d028ef61d454c 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCache.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCache.java
@@ -22,13 +22,12 @@
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
-import java.util.function.Function;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateRequest;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateResponse;
import org.apache.beam.model.fnexecution.v1.BeamFnStateGrpc;
-import org.apache.beam.model.pipeline.v1.Endpoints;
import org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor;
import org.apache.beam.sdk.fn.IdGenerator;
+import org.apache.beam.sdk.fn.channel.ManagedChannelFactory;
import org.apache.beam.sdk.fn.stream.OutboundObserverFactory;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.ManagedChannel;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.stub.StreamObserver;
@@ -47,22 +46,24 @@ public class BeamFnStateGrpcClientCache {
private static final Logger LOG = LoggerFactory.getLogger(BeamFnStateGrpcClientCache.class);
private final ConcurrentMap cache;
- private final Function channelFactory;
+ private final ManagedChannelFactory channelFactory;
private final OutboundObserverFactory outboundObserverFactory;
private final IdGenerator idGenerator;
public BeamFnStateGrpcClientCache(
IdGenerator idGenerator,
- Function channelFactory,
+ ManagedChannelFactory channelFactory,
OutboundObserverFactory outboundObserverFactory) {
this.idGenerator = idGenerator;
- this.channelFactory = channelFactory;
+ // We use the directExecutor because we just complete futures when handling responses.
+ // This showed a 1-2% improvement in the ProcessBundleBenchmark#testState* benchmarks.
+ this.channelFactory = channelFactory.withDirectExecutor();
this.outboundObserverFactory = outboundObserverFactory;
this.cache = new ConcurrentHashMap<>();
}
/**
- * ( Creates or returns an existing {@link BeamFnStateClient} depending on whether the passed in
+ * Creates or returns an existing {@link BeamFnStateClient} depending on whether the passed in
* {@link ApiServiceDescriptor} currently has a {@link BeamFnStateClient} bound to the same
* channel.
*/
@@ -86,7 +87,7 @@ private class GrpcStateClient implements BeamFnStateClient {
private GrpcStateClient(ApiServiceDescriptor apiServiceDescriptor) {
this.apiServiceDescriptor = apiServiceDescriptor;
this.outstandingRequests = new ConcurrentHashMap<>();
- this.channel = channelFactory.apply(apiServiceDescriptor);
+ this.channel = channelFactory.forDescriptor(apiServiceDescriptor);
this.outboundObserver =
outboundObserverFactory.outboundObserverFor(
BeamFnStateGrpc.newStub(channel)::state, new InboundObserver());
@@ -135,6 +136,8 @@ private synchronized void closeAndCleanUp(RuntimeException cause) {
*
* Also propagates server side failures and closes completing any outstanding requests
* exceptionally.
+ *
+ *
This implementation must never block since we use a direct executor.
*/
private class InboundObserver implements StreamObserver {
@Override
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/FnApiTimerBundleTracker.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/FnApiTimerBundleTracker.java
index f8a1dc3f9a258..39f735d98efe4 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/FnApiTimerBundleTracker.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/FnApiTimerBundleTracker.java
@@ -153,6 +153,10 @@ public FnApiTimerBundleTracker(
});
}
+ public void reset() {
+ timerModifications.clear();
+ }
+
public void timerModified(String timerFamilyOrId, TimeDomain timeDomain, Timer timer) {
ByteString keyString = encodedCurrentKeySupplier.get();
ByteString windowString = encodedCurrentWindowSupplier.get();
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/BeamFnControlClientTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/BeamFnControlClientTest.java
index 9ca5efc32a776..a84d1a0b58af9 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/BeamFnControlClientTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/BeamFnControlClientTest.java
@@ -40,8 +40,8 @@
import org.apache.beam.model.fnexecution.v1.BeamFnApi.RegisterRequest;
import org.apache.beam.model.fnexecution.v1.BeamFnControlGrpc;
import org.apache.beam.model.pipeline.v1.Endpoints;
+import org.apache.beam.sdk.fn.channel.ManagedChannelFactory;
import org.apache.beam.sdk.fn.stream.OutboundObserverFactory;
-import org.apache.beam.sdk.fn.test.InProcessManagedChannelFactory;
import org.apache.beam.sdk.fn.test.TestStreams;
import org.apache.beam.sdk.function.ThrowingFunction;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.Server;
@@ -142,7 +142,7 @@ public StreamObserver control(
BeamFnControlClient client =
new BeamFnControlClient(
apiServiceDescriptor,
- InProcessManagedChannelFactory.create(),
+ ManagedChannelFactory.createInProcess(),
OutboundObserverFactory.trivial(),
executor,
handlers);
@@ -216,7 +216,7 @@ public StreamObserver control(
BeamFnControlClient client =
new BeamFnControlClient(
apiServiceDescriptor,
- InProcessManagedChannelFactory.create(),
+ ManagedChannelFactory.createInProcess(),
OutboundObserverFactory.trivial(),
executor,
handlers);
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java
index 13b8c24ed74af..9970e802ee340 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java
@@ -509,6 +509,7 @@ public void testOrderOfSetupTeardownCalls() throws Exception {
PCollection.newBuilder()
.setWindowingStrategyId("window-strategy")
.setCoderId("2L-output-coder")
+ .setIsBounded(IsBounded.Enum.BOUNDED)
.build())
.putWindowingStrategies(
"window-strategy",
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCacheTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCacheTest.java
index 2fc2a0b12984e..e62df4669049d 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCacheTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCacheTest.java
@@ -34,13 +34,12 @@
import org.apache.beam.model.fnexecution.v1.BeamFnStateGrpc;
import org.apache.beam.model.pipeline.v1.Endpoints;
import org.apache.beam.sdk.fn.IdGenerators;
+import org.apache.beam.sdk.fn.channel.ManagedChannelFactory;
import org.apache.beam.sdk.fn.stream.OutboundObserverFactory;
import org.apache.beam.sdk.fn.test.TestStreams;
-import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.ManagedChannel;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.Server;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.Status;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.StatusRuntimeException;
-import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.inprocess.InProcessChannelBuilder;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.inprocess.InProcessServerBuilder;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.stub.CallStreamObserver;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.stub.StreamObserver;
@@ -60,7 +59,6 @@ public class BeamFnStateGrpcClientCacheTest {
private static final String SERVER_ERROR = "SERVER ERROR";
private Endpoints.ApiServiceDescriptor apiServiceDescriptor;
- private ManagedChannel testChannel;
private Server testServer;
private BeamFnStateGrpcClientCache clientCache;
private BlockingQueue> outboundServerObservers;
@@ -75,7 +73,7 @@ public void setUp() throws Exception {
apiServiceDescriptor =
Endpoints.ApiServiceDescriptor.newBuilder()
- .setUrl(this.getClass().getName() + "-" + UUID.randomUUID().toString())
+ .setUrl(this.getClass().getName() + "-" + UUID.randomUUID())
.build();
testServer =
InProcessServerBuilder.forName(apiServiceDescriptor.getUrl())
@@ -91,29 +89,48 @@ public StreamObserver state(
.build();
testServer.start();
- testChannel = InProcessChannelBuilder.forName(apiServiceDescriptor.getUrl()).build();
-
clientCache =
new BeamFnStateGrpcClientCache(
IdGenerators.decrementingLongs(),
- (Endpoints.ApiServiceDescriptor descriptor) -> testChannel,
+ ManagedChannelFactory.createInProcess(),
OutboundObserverFactory.trivial());
}
@After
public void tearDown() throws Exception {
testServer.shutdownNow();
- testChannel.shutdownNow();
}
@Test
public void testCachingOfClient() throws Exception {
- assertSame(
- clientCache.forApiServiceDescriptor(apiServiceDescriptor),
- clientCache.forApiServiceDescriptor(apiServiceDescriptor));
- assertNotSame(
- clientCache.forApiServiceDescriptor(apiServiceDescriptor),
- clientCache.forApiServiceDescriptor(Endpoints.ApiServiceDescriptor.getDefaultInstance()));
+ Endpoints.ApiServiceDescriptor otherApiServiceDescriptor =
+ Endpoints.ApiServiceDescriptor.newBuilder()
+ .setUrl(apiServiceDescriptor.getUrl() + "-other")
+ .build();
+ Server testServer2 =
+ InProcessServerBuilder.forName(otherApiServiceDescriptor.getUrl())
+ .addService(
+ new BeamFnStateGrpc.BeamFnStateImplBase() {
+ @Override
+ public StreamObserver state(
+ StreamObserver outboundObserver) {
+ throw new IllegalStateException("Unexpected in test.");
+ }
+ })
+ .build();
+ testServer2.start();
+
+ try {
+
+ assertSame(
+ clientCache.forApiServiceDescriptor(apiServiceDescriptor),
+ clientCache.forApiServiceDescriptor(apiServiceDescriptor));
+ assertNotSame(
+ clientCache.forApiServiceDescriptor(apiServiceDescriptor),
+ clientCache.forApiServiceDescriptor(otherApiServiceDescriptor));
+ } finally {
+ testServer2.shutdownNow();
+ }
}
@Test
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/status/BeamFnStatusClientTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/status/BeamFnStatusClientTest.java
index 50b728a63debe..c0229f23fbe3d 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/status/BeamFnStatusClientTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/status/BeamFnStatusClientTest.java
@@ -44,7 +44,6 @@
import org.apache.beam.model.pipeline.v1.Endpoints;
import org.apache.beam.runners.core.metrics.ExecutionStateTracker;
import org.apache.beam.sdk.fn.channel.ManagedChannelFactory;
-import org.apache.beam.sdk.fn.test.InProcessManagedChannelFactory;
import org.apache.beam.sdk.fn.test.TestStreams;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.vendor.grpc.v1p43p2.io.grpc.Server;
@@ -81,7 +80,7 @@ public void testActiveBundleState() {
when(handler.getBundleProcessorCache()).thenReturn(processorCache);
when(processorCache.getActiveBundleProcessors()).thenReturn(bundleProcessorMap);
- ManagedChannelFactory channelFactory = InProcessManagedChannelFactory.create();
+ ManagedChannelFactory channelFactory = ManagedChannelFactory.createInProcess();
BeamFnStatusClient client =
new BeamFnStatusClient(
apiServiceDescriptor,
@@ -125,7 +124,7 @@ public StreamObserver workerStatus(
try {
BundleProcessorCache processorCache = mock(BundleProcessorCache.class);
when(processorCache.getActiveBundleProcessors()).thenReturn(Collections.emptyMap());
- ManagedChannelFactory channelFactory = InProcessManagedChannelFactory.create();
+ ManagedChannelFactory channelFactory = ManagedChannelFactory.createInProcess();
new BeamFnStatusClient(
apiServiceDescriptor,
channelFactory::forDescriptor,
@@ -144,7 +143,7 @@ public StreamObserver workerStatus(
@Test
public void testCacheStatsExist() {
- ManagedChannelFactory channelFactory = InProcessManagedChannelFactory.create();
+ ManagedChannelFactory channelFactory = ManagedChannelFactory.createInProcess();
BeamFnStatusClient client =
new BeamFnStatusClient(
apiServiceDescriptor,
diff --git a/sdks/java/io/amazon-web-services2/build.gradle b/sdks/java/io/amazon-web-services2/build.gradle
index 1af0532fd13c8..dceb4c41bed91 100644
--- a/sdks/java/io/amazon-web-services2/build.gradle
+++ b/sdks/java/io/amazon-web-services2/build.gradle
@@ -59,10 +59,10 @@ dependencies {
testImplementation project(path: ":sdks:java:io:common", configuration: "testRuntimeMigration")
testImplementation project(path: ":sdks:java:io:kinesis", configuration: "testRuntimeMigration")
testImplementation "io.findify:s3mock_2.12:0.2.6"
+ testImplementation 'org.elasticmq:elasticmq-rest-sqs_2.12:1.3.5'
testImplementation library.java.mockito_core
testImplementation library.java.guava_testlib
testImplementation library.java.junit
- testImplementation 'org.elasticmq:elasticmq-rest-sqs_2.12:0.15.6' // later versions conflict with s3mock
testImplementation library.java.hamcrest
testImplementation "org.assertj:assertj-core:3.11.1"
testRuntimeOnly library.java.slf4j_jdk14
diff --git a/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/sqs/SqsUnboundedReaderTest.java b/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/sqs/SqsUnboundedReaderTest.java
index 2c2329974df2b..5b1ac0cbb28b2 100644
--- a/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/sqs/SqsUnboundedReaderTest.java
+++ b/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/sqs/SqsUnboundedReaderTest.java
@@ -37,7 +37,6 @@
import org.apache.beam.sdk.io.aws2.sqs.EmbeddedSqsServer.TestCaseEnv;
import org.apache.beam.sdk.util.CoderUtils;
import org.junit.ClassRule;
-import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
@@ -110,7 +109,6 @@ public void testAckDeletedMessage() throws IOException {
}
@Test
- @Ignore("Behavior of SQSRestServer is broken: https://issues.apache.org/jira/browse/BEAM-13738")
public void testExtendDeletedMessage() throws IOException {
setupMessages(DATA);
Clock clock = mock(Clock.class);
diff --git a/sdks/java/io/amazon-web-services2/src/test/resources/application.conf b/sdks/java/io/amazon-web-services2/src/test/resources/application.conf
new file mode 100644
index 0000000000000..f1fcb1e9f3da4
--- /dev/null
+++ b/sdks/java/io/amazon-web-services2/src/test/resources/application.conf
@@ -0,0 +1,21 @@
+###############################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+###############################################################################
+
+# Enable implicit HEAD requests using GET routes for S3Mock
+# https://doc.akka.io/docs/akka-http/current/migration-guide/migration-guide-10.2.x.html#transparent-head-requests-now-disabled-by-default
+akka.http.server.transparent-head-requests = on
diff --git a/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java b/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
index 273443a4fdbd8..dc29ac6074b07 100644
--- a/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
+++ b/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
@@ -2359,9 +2359,10 @@ private ProcessContextAdapter(DoFn.ProcessContext context) {
@Override
public void output(
- TupleTag tag, Document document, Instant timestamp, BoundedWindow ignored) {
- // Note: window is intentionally unused, but required as a param to fit the interface
- context.outputWithTimestamp(tag, document, timestamp);
+ TupleTag tag, Document document, Instant ignored1, BoundedWindow ignored2) {
+ // Note: window and timestamp are intentionally unused, but required as params to fit the
+ // interface
+ context.output(tag, document);
}
}
diff --git a/sdks/java/io/google-cloud-platform/build.gradle b/sdks/java/io/google-cloud-platform/build.gradle
index be4e551cd6b9a..364321e0b27ed 100644
--- a/sdks/java/io/google-cloud-platform/build.gradle
+++ b/sdks/java/io/google-cloud-platform/build.gradle
@@ -52,6 +52,7 @@ dependencies {
// BEAM-13781: gax-grpc's gRPC version was older than Beam declared
exclude group: 'io.grpc', module: 'grpc-netty-shaded'
}
+ implementation library.java.gax_grpc_test
implementation library.java.gax_httpjson
permitUnusedDeclared library.java.gax_httpjson // BEAM-8755
implementation library.java.google_api_client
@@ -146,6 +147,7 @@ dependencies {
testImplementation library.java.powermock
testImplementation library.java.powermock_mockito
testImplementation library.java.joda_time
+ testImplementation library.java.google_cloud_spanner_test
testRuntimeOnly library.java.slf4j_jdk14
}
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
index f6d433498fde1..97a9bfa1595c7 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
@@ -74,7 +74,6 @@
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers.TableSchemaToJsonSchema;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers.TableSpecToTableRef;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers.TimePartitioningToJson;
-import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TypedRead.Method;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryResourceNaming.JobType;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.DatasetService;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.JobService;
@@ -91,6 +90,8 @@
import org.apache.beam.sdk.options.ValueProvider;
import org.apache.beam.sdk.options.ValueProvider.NestedValueProvider;
import org.apache.beam.sdk.options.ValueProvider.StaticValueProvider;
+import org.apache.beam.sdk.schemas.FieldAccessDescriptor;
+import org.apache.beam.sdk.schemas.ProjectionProducer;
import org.apache.beam.sdk.schemas.Schema;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.DoFn;
@@ -117,6 +118,7 @@
import org.apache.beam.sdk.values.ValueInSingleWindow;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.MoreObjects;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Predicates;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Strings;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
@@ -730,7 +732,8 @@ public Read withTemplateCompatibility() {
/** Implementation of {@link BigQueryIO#read(SerializableFunction)}. */
@AutoValue
- public abstract static class TypedRead extends PTransform> {
+ public abstract static class TypedRead extends PTransform>
+ implements ProjectionProducer>> {
/** Determines the method used to read data from BigQuery. */
public enum Method {
/** The default behavior if no method is explicitly set. Currently {@link #EXPORT}. */
@@ -1619,6 +1622,29 @@ public TypedRead withTestServices(BigQueryServices testServices) {
public TypedRead useAvroLogicalTypes() {
return toBuilder().setUseAvroLogicalTypes(true).build();
}
+
+ @Override
+ public boolean supportsProjectionPushdown() {
+ // We can't do projection pushdown when a query is set. The query may project certain fields
+ // itself, and we can't know without parsing the query.
+ return Method.DIRECT_READ.equals(getMethod()) && getQuery() == null;
+ }
+
+ @Override
+ public PTransform> actuateProjectionPushdown(
+ Map, FieldAccessDescriptor> outputFields) {
+ Preconditions.checkArgument(supportsProjectionPushdown());
+ FieldAccessDescriptor fieldAccessDescriptor = outputFields.get(new TupleTag<>("output"));
+ org.apache.beam.sdk.util.Preconditions.checkArgumentNotNull(
+ fieldAccessDescriptor, "Expected pushdown on the main output (tagged 'output')");
+ Preconditions.checkArgument(
+ outputFields.size() == 1,
+ "Expected only to pushdown on the main output (tagged 'output'). Requested tags: %s",
+ outputFields.keySet());
+ ImmutableList fields =
+ ImmutableList.copyOf(fieldAccessDescriptor.fieldNamesAccessed());
+ return withSelectedFields(fields);
+ }
}
static String getExtractDestinationUri(String extractDestinationDir) {
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/BatchSpannerRead.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/BatchSpannerRead.java
index efc983e39f5ea..810f7ce8aaaea 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/BatchSpannerRead.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/BatchSpannerRead.java
@@ -218,7 +218,6 @@ public void processElement(ProcessContext c) throws Exception {
BatchReadOnlyTransaction batchTx =
spannerAccessor.getBatchClient().batchReadOnlyTransaction(tx.transactionId());
- serviceCallMetric.call("ok");
Partition p = c.element();
try (ResultSet resultSet = batchTx.execute(p)) {
while (resultSet.next()) {
@@ -227,7 +226,9 @@ public void processElement(ProcessContext c) throws Exception {
}
} catch (SpannerException e) {
serviceCallMetric.call(e.getErrorCode().getGrpcStatusCode().toString());
+ throw (e);
}
+ serviceCallMetric.call("ok");
}
private ServiceCallMetric createServiceCallMetric(
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerAccessor.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerAccessor.java
index e34863ea97de0..a4223dc804eaf 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerAccessor.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerAccessor.java
@@ -17,6 +17,7 @@
*/
package org.apache.beam.sdk.io.gcp.spanner;
+import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.FixedHeaderProvider;
import com.google.api.gax.rpc.ServerStreamingCallSettings;
@@ -107,34 +108,59 @@ public static SpannerAccessor getOrCreate(SpannerConfig spannerConfig) {
private static SpannerAccessor createAndConnect(SpannerConfig spannerConfig) {
SpannerOptions.Builder builder = SpannerOptions.newBuilder();
- ValueProvider commitDeadline = spannerConfig.getCommitDeadline();
- if (commitDeadline != null && commitDeadline.get().getMillis() > 0) {
+ // Set retryable codes for all API methods
+ if (spannerConfig.getRetryableCodes() != null) {
+ builder
+ .getSpannerStubSettingsBuilder()
+ .applyToAllUnaryMethods(
+ input -> {
+ input.setRetryableCodes(spannerConfig.getRetryableCodes());
+ return null;
+ });
+ builder
+ .getSpannerStubSettingsBuilder()
+ .executeStreamingSqlSettings()
+ .setRetryableCodes(spannerConfig.getRetryableCodes());
+ }
+ // Set commit retry settings
+ UnaryCallSettings.Builder commitSettings =
+ builder.getSpannerStubSettingsBuilder().commitSettings();
+ ValueProvider commitDeadline = spannerConfig.getCommitDeadline();
+ if (spannerConfig.getCommitRetrySettings() != null) {
+ commitSettings.setRetrySettings(spannerConfig.getCommitRetrySettings());
+ } else if (commitDeadline != null && commitDeadline.get().getMillis() > 0) {
// Set the GRPC deadline on the Commit API call.
- UnaryCallSettings.Builder commitSettings =
- builder.getSpannerStubSettingsBuilder().commitSettings();
- RetrySettings.Builder commitRetrySettings = commitSettings.getRetrySettings().toBuilder();
+ RetrySettings.Builder commitRetrySettingsBuilder =
+ commitSettings.getRetrySettings().toBuilder();
commitSettings.setRetrySettings(
- commitRetrySettings
+ commitRetrySettingsBuilder
.setTotalTimeout(org.threeten.bp.Duration.ofMillis(commitDeadline.get().getMillis()))
.setMaxRpcTimeout(org.threeten.bp.Duration.ofMillis(commitDeadline.get().getMillis()))
.setInitialRpcTimeout(
org.threeten.bp.Duration.ofMillis(commitDeadline.get().getMillis()))
.build());
}
- // Setting the timeout for streaming read to 2 hours. This is 1 hour by default
- // after BEAM 2.20.
+
+ // Set execute streaming sql retry settings
ServerStreamingCallSettings.Builder
executeStreamingSqlSettings =
builder.getSpannerStubSettingsBuilder().executeStreamingSqlSettings();
- RetrySettings.Builder executeSqlStreamingRetrySettings =
- executeStreamingSqlSettings.getRetrySettings().toBuilder();
- executeStreamingSqlSettings.setRetrySettings(
- executeSqlStreamingRetrySettings
- .setInitialRpcTimeout(org.threeten.bp.Duration.ofMinutes(120))
- .setMaxRpcTimeout(org.threeten.bp.Duration.ofMinutes(120))
- .setTotalTimeout(org.threeten.bp.Duration.ofMinutes(120))
- .build());
+ if (spannerConfig.getExecuteStreamingSqlRetrySettings() != null) {
+ executeStreamingSqlSettings.setRetrySettings(
+ spannerConfig.getExecuteStreamingSqlRetrySettings());
+ } else {
+ // Setting the timeout for streaming read to 2 hours. This is 1 hour by default
+ // after BEAM 2.20.
+ RetrySettings.Builder executeSqlStreamingRetrySettings =
+ executeStreamingSqlSettings.getRetrySettings().toBuilder();
+ executeStreamingSqlSettings.setRetrySettings(
+ executeSqlStreamingRetrySettings
+ .setInitialRpcTimeout(org.threeten.bp.Duration.ofMinutes(120))
+ .setMaxRpcTimeout(org.threeten.bp.Duration.ofMinutes(120))
+ .setTotalTimeout(org.threeten.bp.Duration.ofMinutes(120))
+ .build());
+ }
ValueProvider projectId = spannerConfig.getProjectId();
if (projectId != null) {
@@ -151,6 +177,10 @@ private static SpannerAccessor createAndConnect(SpannerConfig spannerConfig) {
ValueProvider emulatorHost = spannerConfig.getEmulatorHost();
if (emulatorHost != null) {
builder.setEmulatorHost(emulatorHost.get());
+ if (spannerConfig.getIsLocalChannelProvider() != null
+ && spannerConfig.getIsLocalChannelProvider().get()) {
+ builder.setChannelProvider(LocalChannelProvider.create(emulatorHost.get()));
+ }
builder.setCredentials(NoCredentials.getInstance());
}
String userAgentString = USER_AGENT_PREFIX + "/" + ReleaseInfo.getReleaseInfo().getVersion();
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerConfig.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerConfig.java
index 7fcc0b5676b92..a00b7896c35af 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerConfig.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerConfig.java
@@ -19,6 +19,8 @@
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkNotNull;
+import com.google.api.gax.retrying.RetrySettings;
+import com.google.api.gax.rpc.StatusCode.Code;
import com.google.auto.value.AutoValue;
import com.google.cloud.ServiceFactory;
import com.google.cloud.spanner.Options.RpcPriority;
@@ -28,6 +30,7 @@
import org.apache.beam.sdk.options.ValueProvider;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.joda.time.Duration;
@@ -56,10 +59,18 @@ public abstract class SpannerConfig implements Serializable {
public abstract @Nullable ValueProvider getEmulatorHost();
+ public abstract @Nullable ValueProvider getIsLocalChannelProvider();
+
public abstract @Nullable ValueProvider getCommitDeadline();
public abstract @Nullable ValueProvider getMaxCumulativeBackoff();
+ public abstract @Nullable RetrySettings getExecuteStreamingSqlRetrySettings();
+
+ public abstract @Nullable RetrySettings getCommitRetrySettings();
+
+ public abstract @Nullable ImmutableSet getRetryableCodes();
+
public abstract @Nullable ValueProvider getRpcPriority();
@VisibleForTesting
@@ -117,10 +128,19 @@ public abstract static class Builder {
abstract Builder setEmulatorHost(ValueProvider emulatorHost);
+ abstract Builder setIsLocalChannelProvider(ValueProvider isLocalChannelProvider);
+
abstract Builder setCommitDeadline(ValueProvider commitDeadline);
abstract Builder setMaxCumulativeBackoff(ValueProvider maxCumulativeBackoff);
+ abstract Builder setExecuteStreamingSqlRetrySettings(
+ RetrySettings executeStreamingSqlRetrySettings);
+
+ abstract Builder setCommitRetrySettings(RetrySettings commitRetrySettings);
+
+ abstract Builder setRetryableCodes(ImmutableSet retryableCodes);
+
abstract Builder setServiceFactory(ServiceFactory serviceFactory);
abstract Builder setRpcPriority(ValueProvider rpcPriority);
@@ -160,6 +180,10 @@ public SpannerConfig withEmulatorHost(ValueProvider emulatorHost) {
return toBuilder().setEmulatorHost(emulatorHost).build();
}
+ public SpannerConfig withIsLocalChannelProvider(ValueProvider isLocalChannelProvider) {
+ return toBuilder().setIsLocalChannelProvider(isLocalChannelProvider).build();
+ }
+
public SpannerConfig withCommitDeadline(Duration commitDeadline) {
return withCommitDeadline(ValueProvider.StaticValueProvider.of(commitDeadline));
}
@@ -176,6 +200,21 @@ public SpannerConfig withMaxCumulativeBackoff(ValueProvider maxCumulat
return toBuilder().setMaxCumulativeBackoff(maxCumulativeBackoff).build();
}
+ public SpannerConfig withExecuteStreamingSqlRetrySettings(
+ RetrySettings executeStreamingSqlRetrySettings) {
+ return toBuilder()
+ .setExecuteStreamingSqlRetrySettings(executeStreamingSqlRetrySettings)
+ .build();
+ }
+
+ public SpannerConfig withCommitRetrySettings(RetrySettings commitRetrySettings) {
+ return toBuilder().setCommitRetrySettings(commitRetrySettings).build();
+ }
+
+ public SpannerConfig withRetryableCodes(ImmutableSet retryableCodes) {
+ return toBuilder().setRetryableCodes(retryableCodes).build();
+ }
+
@VisibleForTesting
SpannerConfig withServiceFactory(ServiceFactory serviceFactory) {
return toBuilder().setServiceFactory(serviceFactory).build();
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java
index 9fd929d097132..863d88ab54e68 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java
@@ -23,6 +23,8 @@
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkNotNull;
+import com.google.api.gax.retrying.RetrySettings;
+import com.google.api.gax.rpc.StatusCode.Code;
import com.google.auto.value.AutoValue;
import com.google.cloud.ServiceFactory;
import com.google.cloud.Timestamp;
@@ -79,6 +81,7 @@
import org.apache.beam.sdk.metrics.Distribution;
import org.apache.beam.sdk.metrics.Metrics;
import org.apache.beam.sdk.options.ValueProvider;
+import org.apache.beam.sdk.options.ValueProvider.StaticValueProvider;
import org.apache.beam.sdk.schemas.Schema;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.DoFn;
@@ -115,6 +118,7 @@
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.MoreObjects;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Stopwatch;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.primitives.UnsignedBytes;
import org.checkerframework.checker.nullness.qual.Nullable;
@@ -1349,6 +1353,8 @@ public abstract static class ReadChangeStream
abstract @Nullable String getMetadataDatabase();
+ abstract @Nullable String getMetadataTable();
+
abstract Timestamp getInclusiveStartAt();
abstract @Nullable Timestamp getInclusiveEndAt();
@@ -1370,6 +1376,8 @@ abstract static class Builder {
abstract Builder setMetadataDatabase(String metadataDatabase);
+ abstract Builder setMetadataTable(String metadataTable);
+
abstract Builder setInclusiveStartAt(Timestamp inclusiveStartAt);
abstract Builder setInclusiveEndAt(Timestamp inclusiveEndAt);
@@ -1434,6 +1442,10 @@ public ReadChangeStream withMetadataDatabase(String metadataDatabase) {
return toBuilder().setMetadataDatabase(metadataDatabase).build();
}
+ public ReadChangeStream withMetadataTable(String metadataTable) {
+ return toBuilder().setMetadataTable(metadataTable).build();
+ }
+
/** Specifies the time that the change stream should be read from. */
public ReadChangeStream withInclusiveStartAt(Timestamp timestamp) {
return toBuilder().setInclusiveStartAt(timestamp).build();
@@ -1497,7 +1509,8 @@ && getInclusiveStartAt().toSqlTimestamp().after(getInclusiveEndAt().toSqlTimesta
final String partitionMetadataDatabaseId =
MoreObjects.firstNonNull(getMetadataDatabase(), changeStreamDatabaseId.getDatabase());
final String partitionMetadataTableName =
- generatePartitionMetadataTableName(partitionMetadataDatabaseId);
+ MoreObjects.firstNonNull(
+ getMetadataTable(), generatePartitionMetadataTableName(partitionMetadataDatabaseId));
if (getTraceSampleProbability() != null) {
TraceConfig globalTraceConfig = Tracing.getTraceConfig();
@@ -1511,16 +1524,36 @@ && getInclusiveStartAt().toSqlTimestamp().after(getInclusiveEndAt().toSqlTimesta
.spanBuilder("SpannerIO.ReadChangeStream.expand")
.setRecordEvents(true)
.startScopedSpan()) {
- final SpannerConfig changeStreamSpannerConfig = getSpannerConfig();
+ SpannerConfig changeStreamSpannerConfig = getSpannerConfig();
+ // Set default retryable errors for ReadChangeStream
+ if (changeStreamSpannerConfig.getRetryableCodes() == null) {
+ ImmutableSet defaultRetryableCodes =
+ ImmutableSet.of(Code.UNAVAILABLE, Code.ABORTED);
+ changeStreamSpannerConfig =
+ changeStreamSpannerConfig
+ .toBuilder()
+ .setRetryableCodes(defaultRetryableCodes)
+ .build();
+ }
+ // Set default retry timeouts for ReadChangeStream
+ if (changeStreamSpannerConfig.getExecuteStreamingSqlRetrySettings() == null) {
+ changeStreamSpannerConfig =
+ changeStreamSpannerConfig
+ .toBuilder()
+ .setExecuteStreamingSqlRetrySettings(
+ RetrySettings.newBuilder()
+ .setTotalTimeout(org.threeten.bp.Duration.ofMinutes(5))
+ .setInitialRpcTimeout(org.threeten.bp.Duration.ofMinutes(1))
+ .setMaxRpcTimeout(org.threeten.bp.Duration.ofMinutes(1))
+ .build())
+ .build();
+ }
final SpannerConfig partitionMetadataSpannerConfig =
- SpannerConfig.create()
- .withProjectId(changeStreamSpannerConfig.getProjectId())
- .withHost(changeStreamSpannerConfig.getHost())
- .withInstanceId(partitionMetadataInstanceId)
- .withDatabaseId(partitionMetadataDatabaseId)
- .withCommitDeadline(changeStreamSpannerConfig.getCommitDeadline())
- .withEmulatorHost(changeStreamSpannerConfig.getEmulatorHost())
- .withMaxCumulativeBackoff(changeStreamSpannerConfig.getMaxCumulativeBackoff());
+ changeStreamSpannerConfig
+ .toBuilder()
+ .setInstanceId(StaticValueProvider.of(partitionMetadataInstanceId))
+ .setDatabaseId(StaticValueProvider.of(partitionMetadataDatabaseId))
+ .build();
final String changeStreamName = getChangeStreamName();
// FIXME: The backend only supports microsecond granularity. Remove when fixed.
final Timestamp startTimestamp = TimestampConverter.truncateNanos(getInclusiveStartAt());
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageQueryTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageQueryTest.java
index 86f538a4d0f3b..b23de6b3fc334 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageQueryTest.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageQueryTest.java
@@ -93,7 +93,10 @@
import org.junit.runners.JUnit4;
import org.junit.runners.model.Statement;
-/** Tests for {@link BigQueryIO#readTableRows()} using {@link Method#DIRECT_READ}. */
+/**
+ * Tests for {@link BigQueryIO#read(SerializableFunction)} using {@link Method#DIRECT_READ} and
+ * {@link BigQueryIO.TypedRead#fromQuery(String)}.
+ */
@RunWith(JUnit4.class)
public class BigQueryIOStorageQueryTest {
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadTest.java
index fcf335c01b309..8f8ab867a9101 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadTest.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadTest.java
@@ -25,6 +25,7 @@
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
@@ -92,20 +93,29 @@
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TypedRead;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TypedRead.Method;
import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServices.StorageClient;
+import org.apache.beam.sdk.io.gcp.bigquery.BigQueryUtils.ConversionOptions;
import org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices;
import org.apache.beam.sdk.io.gcp.testing.FakeBigQueryServices.FakeBigQueryServerStream;
import org.apache.beam.sdk.io.gcp.testing.FakeDatasetService;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.ValueProvider;
import org.apache.beam.sdk.options.ValueProvider.StaticValueProvider;
+import org.apache.beam.sdk.schemas.FieldAccessDescriptor;
+import org.apache.beam.sdk.schemas.SchemaCoder;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.SerializableFunction;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PBegin;
import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.Row;
+import org.apache.beam.sdk.values.TupleTag;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists;
+import org.hamcrest.Matchers;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
@@ -2064,6 +2074,56 @@ public void testStreamSourceSplitAtFractionFailsWhenParentIsPastSplitPointArrow(
assertFalse(parent.advance());
}
+ @Test
+ public void testActuateProjectionPushdown() {
+ org.apache.beam.sdk.schemas.Schema schema =
+ org.apache.beam.sdk.schemas.Schema.builder()
+ .addStringField("foo")
+ .addStringField("bar")
+ .build();
+ TypedRead read =
+ BigQueryIO.read(
+ record ->
+ BigQueryUtils.toBeamRow(
+ record.getRecord(), schema, ConversionOptions.builder().build()))
+ .withMethod(Method.DIRECT_READ)
+ .withCoder(SchemaCoder.of(schema));
+
+ assertTrue(read.supportsProjectionPushdown());
+ PTransform> pushdownT =
+ read.actuateProjectionPushdown(
+ ImmutableMap.of(new TupleTag<>("output"), FieldAccessDescriptor.withFieldNames("foo")));
+
+ TypedRead pushdownRead = (TypedRead) pushdownT;
+ assertEquals(Method.DIRECT_READ, pushdownRead.getMethod());
+ assertThat(pushdownRead.getSelectedFields().get(), Matchers.containsInAnyOrder("foo"));
+ }
+
+ @Test
+ public void testReadFromQueryDoesNotSupportProjectionPushdown() {
+ org.apache.beam.sdk.schemas.Schema schema =
+ org.apache.beam.sdk.schemas.Schema.builder()
+ .addStringField("foo")
+ .addStringField("bar")
+ .build();
+ TypedRead read =
+ BigQueryIO.read(
+ record ->
+ BigQueryUtils.toBeamRow(
+ record.getRecord(), schema, ConversionOptions.builder().build()))
+ .fromQuery("SELECT bar FROM `dataset.table`")
+ .withMethod(Method.DIRECT_READ)
+ .withCoder(SchemaCoder.of(schema));
+
+ assertFalse(read.supportsProjectionPushdown());
+ assertThrows(
+ IllegalArgumentException.class,
+ () ->
+ read.actuateProjectionPushdown(
+ ImmutableMap.of(
+ new TupleTag<>("output"), FieldAccessDescriptor.withFieldNames("foo"))));
+ }
+
private static org.apache.arrow.vector.types.pojo.Field field(
String name,
boolean nullable,
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOReadTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOReadTest.java
index c80490ea472f3..48f7fed7feee6 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOReadTest.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOReadTest.java
@@ -34,6 +34,7 @@
import com.google.cloud.spanner.Partition;
import com.google.cloud.spanner.PartitionOptions;
import com.google.cloud.spanner.ResultSets;
+import com.google.cloud.spanner.SpannerException;
import com.google.cloud.spanner.SpannerExceptionFactory;
import com.google.cloud.spanner.Statement;
import com.google.cloud.spanner.Struct;
@@ -41,6 +42,7 @@
import com.google.cloud.spanner.Type;
import com.google.cloud.spanner.Value;
import com.google.protobuf.ByteString;
+import io.grpc.Status.Code;
import java.io.Serializable;
import java.util.Arrays;
import java.util.HashMap;
@@ -49,6 +51,7 @@
import org.apache.beam.runners.core.metrics.MetricsContainerImpl;
import org.apache.beam.runners.core.metrics.MonitoringInfoConstants;
import org.apache.beam.runners.core.metrics.MonitoringInfoMetricName;
+import org.apache.beam.sdk.Pipeline.PipelineExecutionException;
import org.apache.beam.sdk.io.gcp.spanner.SpannerIO.Read;
import org.apache.beam.sdk.metrics.MetricsEnvironment;
import org.apache.beam.sdk.testing.PAssert;
@@ -293,7 +296,7 @@ public void runReadWithPriority() throws Exception {
}
@Test
- public void testQueryMetrics() throws Exception {
+ public void testQueryMetricsFail() throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
@@ -322,25 +325,74 @@ public void testQueryMetrics() throws Exception {
any(PartitionOptions.class),
eq(Statement.of("SELECT * FROM users")),
any(ReadQueryUpdateTransactionOption.class)))
- .thenReturn(Arrays.asList(fakePartition, fakePartition));
+ .thenReturn(Arrays.asList(fakePartition));
when(mockBatchTx.execute(any(Partition.class)))
.thenThrow(
SpannerExceptionFactory.newSpannerException(
- ErrorCode.DEADLINE_EXCEEDED, "Simulated Timeout 1"))
- .thenThrow(
- SpannerExceptionFactory.newSpannerException(
- ErrorCode.DEADLINE_EXCEEDED, "Simulated Timeout 2"))
+ ErrorCode.DEADLINE_EXCEEDED, "Simulated Timeout 1"));
+ try {
+ pipeline.run();
+ } catch (PipelineExecutionException e) {
+ if (e.getCause() instanceof SpannerException
+ && ((SpannerException) e.getCause()).getErrorCode().getGrpcStatusCode()
+ == Code.DEADLINE_EXCEEDED) {
+ // expected
+ } else {
+ throw e;
+ }
+ }
+ verifyMetricWasSet("test", "aaa", "123", "deadline_exceeded", null, 1);
+ verifyMetricWasSet("test", "aaa", "123", "ok", null, 0);
+ }
+
+ @Test
+ public void testQueryMetricsSucceed() throws Exception {
+ Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
+ TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
+
+ SpannerConfig spannerConfig = getSpannerConfig();
+
+ pipeline.apply(
+ "read q",
+ SpannerIO.read()
+ .withSpannerConfig(spannerConfig)
+ .withQuery("SELECT * FROM users")
+ .withQueryName("queryName")
+ .withTimestampBound(timestampBound));
+
+ FakeBatchTransactionId id = new FakeBatchTransactionId("runQueryTest");
+ when(mockBatchTx.getBatchTransactionId()).thenReturn(id);
+
+ when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound))
+ .thenReturn(mockBatchTx);
+ when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class)))
+ .thenReturn(mockBatchTx);
+
+ Partition fakePartition =
+ FakePartitionFactory.createFakeQueryPartition(ByteString.copyFromUtf8("one"));
+
+ when(mockBatchTx.partitionQuery(
+ any(PartitionOptions.class),
+ eq(Statement.of("SELECT * FROM users")),
+ any(ReadQueryUpdateTransactionOption.class)))
+ .thenReturn(Arrays.asList(fakePartition, fakePartition));
+ when(mockBatchTx.execute(any(Partition.class)))
.thenReturn(
ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)),
- ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 6)));
+ ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 4)),
+ ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(4, 6)))
+ .thenReturn(
+ ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)),
+ ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 4)),
+ ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(4, 6)));
pipeline.run();
- verifyMetricWasSet("test", "aaa", "123", "deadline_exceeded", null, 2);
+ verifyMetricWasSet("test", "aaa", "123", "deadline_exceeded", null, 0);
verifyMetricWasSet("test", "aaa", "123", "ok", null, 2);
}
@Test
- public void testReadMetrics() throws Exception {
+ public void testReadMetricsFail() throws Exception {
Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
@@ -371,21 +423,66 @@ public void testReadMetrics() throws Exception {
eq(KeySet.all()),
eq(Arrays.asList("id", "name")),
any(ReadQueryUpdateTransactionOption.class)))
- .thenReturn(Arrays.asList(fakePartition, fakePartition, fakePartition));
+ .thenReturn(Arrays.asList(fakePartition));
when(mockBatchTx.execute(any(Partition.class)))
.thenThrow(
SpannerExceptionFactory.newSpannerException(
- ErrorCode.DEADLINE_EXCEEDED, "Simulated Timeout 1"))
- .thenThrow(
- SpannerExceptionFactory.newSpannerException(
- ErrorCode.DEADLINE_EXCEEDED, "Simulated Timeout 2"))
+ ErrorCode.DEADLINE_EXCEEDED, "Simulated Timeout 1"));
+ try {
+ pipeline.run();
+ } catch (PipelineExecutionException e) {
+ if (e.getCause() instanceof SpannerException
+ && ((SpannerException) e.getCause()).getErrorCode().getGrpcStatusCode()
+ == Code.DEADLINE_EXCEEDED) {
+ // expected
+ } else {
+ throw e;
+ }
+ }
+ verifyMetricWasSet("test", "aaa", "123", "deadline_exceeded", null, 1);
+ verifyMetricWasSet("test", "aaa", "123", "ok", null, 0);
+ }
+
+ @Test
+ public void testReadMetricsSucceed() throws Exception {
+ Timestamp timestamp = Timestamp.ofTimeMicroseconds(12345);
+ TimestampBound timestampBound = TimestampBound.ofReadTimestamp(timestamp);
+
+ SpannerConfig spannerConfig = getSpannerConfig();
+
+ pipeline.apply(
+ "read q",
+ SpannerIO.read()
+ .withSpannerConfig(spannerConfig)
+ .withTable("users")
+ .withColumns("id", "name")
+ .withTimestampBound(timestampBound));
+
+ FakeBatchTransactionId id = new FakeBatchTransactionId("runReadTest");
+ when(mockBatchTx.getBatchTransactionId()).thenReturn(id);
+
+ when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(timestampBound))
+ .thenReturn(mockBatchTx);
+ when(serviceFactory.mockBatchClient().batchReadOnlyTransaction(any(BatchTransactionId.class)))
+ .thenReturn(mockBatchTx);
+
+ Partition fakePartition =
+ FakePartitionFactory.createFakeReadPartition(ByteString.copyFromUtf8("one"));
+
+ when(mockBatchTx.partitionRead(
+ any(PartitionOptions.class),
+ eq("users"),
+ eq(KeySet.all()),
+ eq(Arrays.asList("id", "name")),
+ any(ReadQueryUpdateTransactionOption.class)))
+ .thenReturn(Arrays.asList(fakePartition, fakePartition, fakePartition));
+ when(mockBatchTx.execute(any(Partition.class)))
.thenReturn(
ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(0, 2)),
ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(2, 4)),
ResultSets.forRows(FAKE_TYPE, FAKE_ROWS.subList(4, 6)));
pipeline.run();
- verifyMetricWasSet("test", "aaa", "123", "deadline_exceeded", null, 2);
verifyMetricWasSet("test", "aaa", "123", "ok", null, 3);
}
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/SpannerChangeStreamErrorTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/SpannerChangeStreamErrorTest.java
new file mode 100644
index 0000000000000..34d7cd4a91a7d
--- /dev/null
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/changestreams/SpannerChangeStreamErrorTest.java
@@ -0,0 +1,497 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.io.gcp.spanner.changestreams;
+
+import static org.apache.beam.sdk.io.gcp.spanner.changestreams.dao.PartitionMetadataAdminDao.COLUMN_CREATED_AT;
+import static org.apache.beam.sdk.io.gcp.spanner.changestreams.dao.PartitionMetadataAdminDao.COLUMN_END_TIMESTAMP;
+import static org.apache.beam.sdk.io.gcp.spanner.changestreams.dao.PartitionMetadataAdminDao.COLUMN_FINISHED_AT;
+import static org.apache.beam.sdk.io.gcp.spanner.changestreams.dao.PartitionMetadataAdminDao.COLUMN_HEARTBEAT_MILLIS;
+import static org.apache.beam.sdk.io.gcp.spanner.changestreams.dao.PartitionMetadataAdminDao.COLUMN_PARENT_TOKENS;
+import static org.apache.beam.sdk.io.gcp.spanner.changestreams.dao.PartitionMetadataAdminDao.COLUMN_PARTITION_TOKEN;
+import static org.apache.beam.sdk.io.gcp.spanner.changestreams.dao.PartitionMetadataAdminDao.COLUMN_RUNNING_AT;
+import static org.apache.beam.sdk.io.gcp.spanner.changestreams.dao.PartitionMetadataAdminDao.COLUMN_SCHEDULED_AT;
+import static org.apache.beam.sdk.io.gcp.spanner.changestreams.dao.PartitionMetadataAdminDao.COLUMN_START_TIMESTAMP;
+import static org.apache.beam.sdk.io.gcp.spanner.changestreams.dao.PartitionMetadataAdminDao.COLUMN_STATE;
+import static org.apache.beam.sdk.io.gcp.spanner.changestreams.dao.PartitionMetadataAdminDao.COLUMN_WATERMARK;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+import com.google.api.gax.grpc.testing.MockServiceHelper;
+import com.google.api.gax.retrying.RetrySettings;
+import com.google.cloud.Timestamp;
+import com.google.cloud.spanner.ErrorCode;
+import com.google.cloud.spanner.MockSpannerServiceImpl;
+import com.google.cloud.spanner.MockSpannerServiceImpl.SimulatedExecutionTime;
+import com.google.cloud.spanner.MockSpannerServiceImpl.StatementResult;
+import com.google.cloud.spanner.Statement;
+import com.google.protobuf.ListValue;
+import com.google.protobuf.NullValue;
+import com.google.protobuf.Value;
+import com.google.spanner.v1.ExecuteSqlRequest;
+import com.google.spanner.v1.ResultSet;
+import com.google.spanner.v1.ResultSetMetadata;
+import com.google.spanner.v1.StructType;
+import com.google.spanner.v1.StructType.Field;
+import com.google.spanner.v1.Type;
+import com.google.spanner.v1.TypeCode;
+import io.grpc.Status;
+import java.io.Serializable;
+import java.util.Collections;
+import org.apache.beam.sdk.Pipeline.PipelineExecutionException;
+import org.apache.beam.sdk.io.gcp.spanner.SpannerConfig;
+import org.apache.beam.sdk.io.gcp.spanner.SpannerIO;
+import org.apache.beam.sdk.io.gcp.spanner.changestreams.dao.DaoFactory;
+import org.apache.beam.sdk.io.gcp.spanner.changestreams.model.PartitionMetadata.State;
+import org.apache.beam.sdk.options.ValueProvider.StaticValueProvider;
+import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet;
+import org.hamcrest.Matchers;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+@RunWith(JUnit4.class)
+public class SpannerChangeStreamErrorTest implements Serializable {
+
+ public static final String SPANNER_HOST = "my-host";
+ private static final String TEST_PROJECT = "my-project";
+ private static final String TEST_INSTANCE = "my-instance";
+ private static final String TEST_DATABASE = "my-database";
+ private static final String TEST_TABLE = "my-metadata-table";
+ private static final String TEST_CHANGE_STREAM = "my-change-stream";
+
+ @Rule
+ public final transient TestPipeline pipeline =
+ TestPipeline.create().enableAbandonedNodeEnforcement(false);
+
+ @Rule public final transient ExpectedException thrown = ExpectedException.none();
+
+ private MockSpannerServiceImpl mockSpannerService;
+ private MockServiceHelper serviceHelper;
+
+ @Before
+ public void setUp() throws Exception {
+ mockSpannerService = new MockSpannerServiceImpl();
+ serviceHelper =
+ new MockServiceHelper(SPANNER_HOST, Collections.singletonList(mockSpannerService));
+ serviceHelper.start();
+ serviceHelper.reset();
+ }
+
+ @After
+ public void tearDown() throws NoSuchFieldException, IllegalAccessException {
+ serviceHelper.reset();
+ serviceHelper.stop();
+ mockSpannerService.reset();
+ resetDaoFactoryFields();
+ }
+
+ @Test
+ public void testResourceExhaustedDoesNotRetry() {
+ mockSpannerService.setExecuteStreamingSqlExecutionTime(
+ SimulatedExecutionTime.ofStickyException(Status.RESOURCE_EXHAUSTED.asRuntimeException()));
+
+ final Timestamp now = Timestamp.now();
+ final Timestamp after3Seconds =
+ Timestamp.ofTimeSecondsAndNanos(now.getSeconds() + 3, now.getNanos());
+ try {
+ pipeline.apply(
+ SpannerIO.readChangeStream()
+ .withSpannerConfig(getSpannerConfig())
+ .withChangeStreamName(TEST_CHANGE_STREAM)
+ .withMetadataDatabase(TEST_DATABASE)
+ .withInclusiveStartAt(now)
+ .withInclusiveEndAt(after3Seconds));
+ pipeline.run().waitUntilFinish();
+ } finally {
+ thrown.expect(PipelineExecutionException.class);
+ thrown.expectMessage(ErrorCode.RESOURCE_EXHAUSTED.name());
+ }
+ }
+
+ @Test
+ public void testUnavailableExceptionRetries() {
+ mockSpannerService.setExecuteStreamingSqlExecutionTime(
+ SimulatedExecutionTime.ofExceptions(
+ ImmutableSet.of(
+ Status.UNAVAILABLE.asRuntimeException(),
+ Status.RESOURCE_EXHAUSTED.asRuntimeException())));
+
+ final Timestamp now = Timestamp.now();
+ final Timestamp after3Seconds =
+ Timestamp.ofTimeSecondsAndNanos(now.getSeconds() + 3, now.getNanos());
+ try {
+ pipeline.apply(
+ SpannerIO.readChangeStream()
+ .withSpannerConfig(getSpannerConfig())
+ .withChangeStreamName(TEST_CHANGE_STREAM)
+ .withMetadataDatabase(TEST_DATABASE)
+ .withInclusiveStartAt(now)
+ .withInclusiveEndAt(after3Seconds));
+ pipeline.run().waitUntilFinish();
+ } finally {
+ assertThat(
+ mockSpannerService.countRequestsOfType(ExecuteSqlRequest.class), Matchers.greaterThan(1));
+ thrown.expect(PipelineExecutionException.class);
+ thrown.expectMessage(ErrorCode.RESOURCE_EXHAUSTED.name());
+ }
+ }
+
+ @Test
+ public void testAbortedExceptionRetries() {
+ mockSpannerService.setExecuteStreamingSqlExecutionTime(
+ SimulatedExecutionTime.ofStickyException(Status.ABORTED.asRuntimeException()));
+
+ final Timestamp now = Timestamp.now();
+ final Timestamp after3Seconds =
+ Timestamp.ofTimeSecondsAndNanos(now.getSeconds() + 3, now.getNanos());
+ try {
+ pipeline.apply(
+ SpannerIO.readChangeStream()
+ .withSpannerConfig(getSpannerConfig())
+ .withChangeStreamName(TEST_CHANGE_STREAM)
+ .withMetadataDatabase(TEST_DATABASE)
+ .withInclusiveStartAt(now)
+ .withInclusiveEndAt(after3Seconds));
+ pipeline.run().waitUntilFinish();
+ } finally {
+ assertThat(
+ mockSpannerService.countRequestsOfType(ExecuteSqlRequest.class), Matchers.greaterThan(1));
+ thrown.expect(PipelineExecutionException.class);
+ thrown.expectMessage(ErrorCode.ABORTED.name());
+ }
+ }
+
+ @Test
+ public void testUnknownExceptionDoesNotRetry() {
+ mockSpannerService.setExecuteStreamingSqlExecutionTime(
+ SimulatedExecutionTime.ofStickyException(Status.UNKNOWN.asRuntimeException()));
+
+ final Timestamp now = Timestamp.now();
+ final Timestamp after3Seconds =
+ Timestamp.ofTimeSecondsAndNanos(now.getSeconds() + 3, now.getNanos());
+ try {
+ pipeline.apply(
+ SpannerIO.readChangeStream()
+ .withSpannerConfig(getSpannerConfig())
+ .withChangeStreamName(TEST_CHANGE_STREAM)
+ .withMetadataDatabase(TEST_DATABASE)
+ .withInclusiveStartAt(now)
+ .withInclusiveEndAt(after3Seconds));
+ pipeline.run().waitUntilFinish();
+ } finally {
+ assertThat(
+ mockSpannerService.countRequestsOfType(ExecuteSqlRequest.class), Matchers.equalTo(1));
+ thrown.expect(PipelineExecutionException.class);
+ thrown.expectMessage(ErrorCode.UNKNOWN.name());
+ }
+ }
+
+ @Test
+ public void testInvalidRecordReceived() {
+ final Timestamp now = Timestamp.now();
+ final Timestamp after3Seconds =
+ Timestamp.ofTimeSecondsAndNanos(now.getSeconds() + 3, now.getNanos());
+
+ mockTableExists();
+ ResultSet getPartitionResultSet = mockGetParentPartition(now, after3Seconds);
+ mockGetWatermark(now);
+ mockGetPartitionsAfter(
+ Timestamp.ofTimeSecondsAndNanos(now.getSeconds(), now.getNanos() + 1000),
+ getPartitionResultSet);
+ mockGetPartitionsAfter(
+ Timestamp.ofTimeSecondsAndNanos(now.getSeconds(), now.getNanos() - 1000),
+ getPartitionResultSet);
+ mockInvalidChangeStreamRecordReceived(now, after3Seconds);
+
+ try {
+ pipeline.apply(
+ SpannerIO.readChangeStream()
+ .withSpannerConfig(getSpannerConfig())
+ .withChangeStreamName(TEST_CHANGE_STREAM)
+ .withMetadataDatabase(TEST_DATABASE)
+ .withMetadataTable(TEST_TABLE)
+ .withInclusiveStartAt(now)
+ .withInclusiveEndAt(after3Seconds));
+ pipeline.run().waitUntilFinish();
+ } finally {
+ thrown.expect(PipelineExecutionException.class);
+ thrown.expectMessage("Field not found");
+ }
+ }
+
+ private void mockInvalidChangeStreamRecordReceived(Timestamp now, Timestamp after3Seconds) {
+ Statement changeStreamQueryStatement =
+ Statement.newBuilder(
+ "SELECT * FROM READ_my-change-stream( start_timestamp => @startTimestamp, end_timestamp => @endTimestamp, partition_token => @partitionToken, read_options => null, heartbeat_milliseconds => @heartbeatMillis)")
+ .bind("startTimestamp")
+ .to(now)
+ .bind("endTimestamp")
+ .to(after3Seconds)
+ .bind("partitionToken")
+ .to((String) null)
+ .bind("heartbeatMillis")
+ .to(500)
+ .build();
+ ResultSetMetadata readChangeStreamResultSetMetadata =
+ ResultSetMetadata.newBuilder()
+ .setRowType(
+ StructType.newBuilder()
+ .addFields(
+ Field.newBuilder()
+ .setName("COL1")
+ .setType(
+ Type.newBuilder()
+ .setCode(TypeCode.ARRAY)
+ .setArrayElementType(
+ Type.newBuilder()
+ .setCode(TypeCode.STRUCT)
+ .setStructType(
+ StructType.newBuilder()
+ .addFields(
+ Field.newBuilder()
+ .setName("field_name")
+ .setType(
+ Type.newBuilder()
+ .setCode(TypeCode.STRUCT)
+ .setStructType(
+ StructType.newBuilder()
+ .addFields(
+ Field.newBuilder()
+ .setType(
+ Type
+ .newBuilder()
+ .setCode(
+ TypeCode
+ .STRING)))))))))))
+ .build();
+ ResultSet readChangeStreamResultSet =
+ ResultSet.newBuilder()
+ .addRows(
+ ListValue.newBuilder()
+ .addValues(
+ Value.newBuilder()
+ .setListValue(
+ ListValue.newBuilder()
+ .addValues(
+ Value.newBuilder()
+ .setListValue(
+ ListValue.newBuilder()
+ .addValues(
+ Value.newBuilder()
+ .setListValue(
+ ListValue.newBuilder()
+ .addValues(
+ Value.newBuilder()
+ .setStringValue(
+ "bad_value")))))))))
+ .setMetadata(readChangeStreamResultSetMetadata)
+ .build();
+ mockSpannerService.putStatementResult(
+ StatementResult.query(changeStreamQueryStatement, readChangeStreamResultSet));
+ }
+
+ private void mockGetPartitionsAfter(Timestamp timestamp, ResultSet getPartitionResultSet) {
+ Statement getPartitionsAfterStatement =
+ Statement.newBuilder(
+ "SELECT * FROM my-metadata-table WHERE CreatedAt > @timestamp ORDER BY CreatedAt ASC, StartTimestamp ASC")
+ .bind("timestamp")
+ .to(Timestamp.ofTimeSecondsAndNanos(timestamp.getSeconds(), timestamp.getNanos()))
+ .build();
+ mockSpannerService.putStatementResult(
+ StatementResult.query(getPartitionsAfterStatement, getPartitionResultSet));
+ }
+
+ private void mockGetWatermark(Timestamp now) {
+ Statement watermarkStatement =
+ Statement.newBuilder(
+ "SELECT Watermark FROM my-metadata-table WHERE State != @state ORDER BY Watermark ASC LIMIT 1")
+ .bind("state")
+ .to(State.FINISHED.name())
+ .build();
+ ResultSetMetadata watermarkResultSetMetadata =
+ ResultSetMetadata.newBuilder()
+ .setRowType(
+ StructType.newBuilder()
+ .addFields(
+ Field.newBuilder()
+ .setName("Watermark")
+ .setType(Type.newBuilder().setCode(TypeCode.TIMESTAMP).build())
+ .build())
+ .build())
+ .build();
+ ResultSet watermarkResultSet =
+ ResultSet.newBuilder()
+ .addRows(
+ ListValue.newBuilder()
+ .addValues(Value.newBuilder().setStringValue(now.toString()).build())
+ .build())
+ .setMetadata(watermarkResultSetMetadata)
+ .build();
+ mockSpannerService.putStatementResult(
+ StatementResult.query(watermarkStatement, watermarkResultSet));
+ }
+
+ private ResultSet mockGetParentPartition(Timestamp now, Timestamp after3Seconds) {
+ Statement getPartitionStatement =
+ Statement.newBuilder("SELECT * FROM my-metadata-table WHERE PartitionToken = @partition")
+ .bind("partition")
+ .to("Parent0")
+ .build();
+ ResultSet getPartitionResultSet =
+ ResultSet.newBuilder()
+ .addRows(
+ ListValue.newBuilder()
+ .addValues(Value.newBuilder().setStringValue("Parent0"))
+ .addValues(Value.newBuilder().setListValue(ListValue.newBuilder().build()))
+ .addValues(Value.newBuilder().setStringValue(now.toString()))
+ .addValues(Value.newBuilder().setStringValue(after3Seconds.toString()))
+ .addValues(Value.newBuilder().setStringValue("500"))
+ .addValues(Value.newBuilder().setStringValue(State.CREATED.name()))
+ .addValues(Value.newBuilder().setStringValue(now.toString()))
+ .addValues(Value.newBuilder().setStringValue(now.toString()))
+ .addValues(Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())
+ .addValues(Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())
+ .addValues(Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())
+ .build())
+ .setMetadata(PARTITION_METADATA_RESULT_SET_METADATA)
+ .build();
+ mockSpannerService.putStatementResult(
+ StatementResult.query(getPartitionStatement, getPartitionResultSet));
+ return getPartitionResultSet;
+ }
+
+ private void mockTableExists() {
+ Statement tableExistsStatement =
+ Statement.of(
+ "SELECT t.table_name FROM information_schema.tables AS t WHERE t.table_catalog = '' AND t.table_schema = '' AND t.table_name = 'my-metadata-table'");
+ ResultSetMetadata tableExistsResultSetMetadata =
+ ResultSetMetadata.newBuilder()
+ .setRowType(
+ StructType.newBuilder()
+ .addFields(
+ Field.newBuilder()
+ .setName("table_name")
+ .setType(Type.newBuilder().setCode(TypeCode.STRING).build())
+ .build())
+ .build())
+ .build();
+ ResultSet tableExistsResultSet =
+ ResultSet.newBuilder()
+ .addRows(
+ ListValue.newBuilder()
+ .addValues(Value.newBuilder().setStringValue(TEST_TABLE).build())
+ .build())
+ .setMetadata(tableExistsResultSetMetadata)
+ .build();
+ mockSpannerService.putStatementResult(
+ StatementResult.query(tableExistsStatement, tableExistsResultSet));
+ }
+
+ private SpannerConfig getSpannerConfig() {
+ RetrySettings quickRetrySettings =
+ RetrySettings.newBuilder()
+ .setInitialRetryDelay(org.threeten.bp.Duration.ofMillis(250))
+ .setMaxRetryDelay(org.threeten.bp.Duration.ofSeconds(1))
+ .setRetryDelayMultiplier(5)
+ .setTotalTimeout(org.threeten.bp.Duration.ofSeconds(1))
+ .build();
+ return SpannerConfig.create()
+ .withEmulatorHost(StaticValueProvider.of(SPANNER_HOST))
+ .withIsLocalChannelProvider(StaticValueProvider.of(true))
+ .withCommitRetrySettings(quickRetrySettings)
+ .withExecuteStreamingSqlRetrySettings(quickRetrySettings)
+ .withProjectId(TEST_PROJECT)
+ .withInstanceId(TEST_INSTANCE)
+ .withDatabaseId(TEST_DATABASE);
+ }
+
+ private static void resetDaoFactoryFields() throws NoSuchFieldException, IllegalAccessException {
+ java.lang.reflect.Field partitionMetadataAdminDaoField =
+ DaoFactory.class.getDeclaredField("partitionMetadataAdminDao");
+ partitionMetadataAdminDaoField.setAccessible(true);
+ partitionMetadataAdminDaoField.set(null, null);
+ java.lang.reflect.Field partitionMetadataDaoInstanceField =
+ DaoFactory.class.getDeclaredField("partitionMetadataDaoInstance");
+ partitionMetadataDaoInstanceField.setAccessible(true);
+ partitionMetadataDaoInstanceField.set(null, null);
+ java.lang.reflect.Field changeStreamDaoInstanceField =
+ DaoFactory.class.getDeclaredField("changeStreamDaoInstance");
+ changeStreamDaoInstanceField.setAccessible(true);
+ changeStreamDaoInstanceField.set(null, null);
+ }
+
+ private static final ResultSetMetadata PARTITION_METADATA_RESULT_SET_METADATA =
+ ResultSetMetadata.newBuilder()
+ .setRowType(
+ StructType.newBuilder()
+ .addFields(
+ Field.newBuilder()
+ .setName(COLUMN_PARTITION_TOKEN)
+ .setType(Type.newBuilder().setCode(TypeCode.STRING))
+ .build())
+ .addFields(
+ Field.newBuilder()
+ .setName(COLUMN_PARENT_TOKENS)
+ .setType(
+ Type.newBuilder()
+ .setCode(TypeCode.ARRAY)
+ .setArrayElementType(Type.newBuilder().setCode(TypeCode.STRING)))
+ .build())
+ .addFields(
+ Field.newBuilder()
+ .setName(COLUMN_START_TIMESTAMP)
+ .setType(Type.newBuilder().setCode(TypeCode.TIMESTAMP)))
+ .addFields(
+ Field.newBuilder()
+ .setName(COLUMN_END_TIMESTAMP)
+ .setType(Type.newBuilder().setCode(TypeCode.TIMESTAMP)))
+ .addFields(
+ Field.newBuilder()
+ .setName(COLUMN_HEARTBEAT_MILLIS)
+ .setType(Type.newBuilder().setCode(TypeCode.INT64)))
+ .addFields(
+ Field.newBuilder()
+ .setName(COLUMN_STATE)
+ .setType(Type.newBuilder().setCode(TypeCode.STRING)))
+ .addFields(
+ Field.newBuilder()
+ .setName(COLUMN_WATERMARK)
+ .setType(Type.newBuilder().setCode(TypeCode.TIMESTAMP)))
+ .addFields(
+ Field.newBuilder()
+ .setName(COLUMN_CREATED_AT)
+ .setType(Type.newBuilder().setCode(TypeCode.TIMESTAMP)))
+ .addFields(
+ Field.newBuilder()
+ .setName(COLUMN_SCHEDULED_AT)
+ .setType(Type.newBuilder().setCode(TypeCode.TIMESTAMP)))
+ .addFields(
+ Field.newBuilder()
+ .setName(COLUMN_RUNNING_AT)
+ .setType(Type.newBuilder().setCode(TypeCode.TIMESTAMP)))
+ .addFields(
+ Field.newBuilder()
+ .setName(COLUMN_FINISHED_AT)
+ .setType(Type.newBuilder().setCode(TypeCode.TIMESTAMP)))
+ .build())
+ .build();
+}
diff --git a/sdks/java/io/neo4j/OWNERS b/sdks/java/io/neo4j/OWNERS
new file mode 100644
index 0000000000000..0ff6e82359fb6
--- /dev/null
+++ b/sdks/java/io/neo4j/OWNERS
@@ -0,0 +1,5 @@
+# See the OWNERS docs at https://s.apache.org/beam-owners
+
+reviewers:
+ - mcasters
+
diff --git a/sdks/java/io/neo4j/build.gradle b/sdks/java/io/neo4j/build.gradle
new file mode 100644
index 0000000000000..9d5adfc32b1d0
--- /dev/null
+++ b/sdks/java/io/neo4j/build.gradle
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+plugins { id 'org.apache.beam.module' }
+applyJavaNature(automaticModuleName: 'org.apache.beam.sdk.io.neo4j')
+provideIntegrationTestingDependencies()
+enableJavaPerformanceTesting()
+
+description = "Apache Beam :: SDKs :: Java :: IO :: Neo4j"
+ext.summary = "IO to read from and write to Neo4j graphs"
+
+dependencies {
+ implementation project(path: ":sdks:java:core", configuration: "shadow")
+ implementation "org.neo4j.driver:neo4j-java-driver:4.4.3"
+ implementation library.java.slf4j_api
+ implementation library.java.vendored_guava_26_0_jre
+ testImplementation library.java.junit
+ testImplementation library.java.hamcrest
+ testImplementation project(path: ":sdks:java:io:common", configuration: "testRuntimeMigration")
+ testImplementation project(path: ":sdks:java:testing:test-utils", configuration: "testRuntimeMigration")
+ testImplementation "org.testcontainers:neo4j:1.16.2"
+ testRuntimeOnly library.java.slf4j_jdk14
+ testRuntimeOnly project(path: ":runners:direct-java", configuration: "shadow")
+}
diff --git a/sdks/java/io/neo4j/src/main/java/org/apache/beam/sdk/io/neo4j/Neo4jIO.java b/sdks/java/io/neo4j/src/main/java/org/apache/beam/sdk/io/neo4j/Neo4jIO.java
new file mode 100644
index 0000000000000..9b011aff410d2
--- /dev/null
+++ b/sdks/java/io/neo4j/src/main/java/org/apache/beam/sdk/io/neo4j/Neo4jIO.java
@@ -0,0 +1,1221 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.io.neo4j;
+
+import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
+
+import com.google.auto.value.AutoValue;
+import java.io.Serializable;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+import org.apache.beam.repackaged.core.org.apache.commons.lang3.StringUtils;
+import org.apache.beam.sdk.annotations.Experimental;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.harness.JvmInitializer;
+import org.apache.beam.sdk.options.ValueProvider;
+import org.apache.beam.sdk.schemas.NoSuchSchemaException;
+import org.apache.beam.sdk.schemas.Schema;
+import org.apache.beam.sdk.schemas.SchemaRegistry;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.SerializableFunction;
+import org.apache.beam.sdk.transforms.display.DisplayData;
+import org.apache.beam.sdk.transforms.display.HasDisplayData;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PDone;
+import org.apache.beam.sdk.values.TypeDescriptor;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
+import org.checkerframework.checker.initialization.qual.Initialized;
+import org.checkerframework.checker.nullness.qual.NonNull;
+import org.checkerframework.checker.nullness.qual.Nullable;
+import org.neo4j.driver.AuthToken;
+import org.neo4j.driver.AuthTokens;
+import org.neo4j.driver.Config;
+import org.neo4j.driver.Driver;
+import org.neo4j.driver.GraphDatabase;
+import org.neo4j.driver.Record;
+import org.neo4j.driver.Result;
+import org.neo4j.driver.Session;
+import org.neo4j.driver.SessionConfig;
+import org.neo4j.driver.TransactionConfig;
+import org.neo4j.driver.TransactionWork;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This is a Beam IO to read from, and write data to, Neo4j.
+ *
+ *
+ *
+ *
+ *
+ *
Driver configuration
+ *
+ * To read from or write to Neo4j you have to provide a {@link DriverConfiguration} using
+ * {@link DriverConfiguration#create()} or {@link DriverConfiguration#create(String, String,
+ * String)} (URL, username and password). Note that subclasses of DriverConfiguration must also be
+ * {@link Serializable}).
+ * At the level of the Neo4j driver configuration you can specify a Neo4j {@link Config} object with
+ * {@link DriverConfiguration#withConfig(Config)}. This way you can configure the Neo4j driver
+ * characteristics. Likewise, you can control the characteristics of Neo4j sessions by optionally
+ * passing a {@link SessionConfig} object to {@link ReadAll} or {@link WriteUnwind}. For example,
+ * the session configuration will allow you to target a specific database or set a fetch size.
+ * Finally, in even rarer cases you might need to configure the various aspects of Neo4j
+ * transactions, for example their timeout. You can do this with a Neo4j {@link TransactionConfig}
+ * object.
+ *
+ *
+ *
+ *
+ *
+ *
Neo4j Aura
+ *
+ * If you have trouble connecting to a Neo4j Aura database please try to disable a few security
+ * algorithms in your JVM. This makes sure that the right one is picked to connect:
+ *
+ *
+ *
+ *
{@code
+ * Security.setProperty(
+ * "jdk.tls.disabledAlgorithms",
+ * "SSLv3, RC4, DES, MD5withRSA, DH keySize < 1024, EC keySize < 224, 3DES_EDE_CBC, anon, NULL");
+ * }
+ *
+ *
+ *
+ *
+ *
+ *
To execute this code on GCP Dataflow you can create a class which extends {@link
+ * JvmInitializer} and implement the {@link JvmInitializer#onStartup()} method. You need to annotate
+ * this new class with {@link com.google.auto.service.AutoService}
+ *
+ *
{@code
+ * @AutoService(value = JvmInitializer.class)
+ * }
+ *
+ *
+ *
+ *
Reading from Neo4j
+ *
+ * {@link Neo4jIO#readAll()} source returns a bounded collection of {@code OuptutT} as a {@code
+ * PCollection}. OutputT is the type returned by the provided {@link RowMapper}. It accepts
+ * parameters as input in the form of {@code ParameterT} as a {@code PCollection}
+ *
+ * The following example reads ages to return the IDs of Person nodes. It runs a Cypher query for
+ * each provided age.
+ *
+ *
The mapping {@link SerializableFunction} maps input values to each execution of the Cypher
+ * statement. In the function simply return a map containing the parameters you want to set.
+ *
+ *
The {@link RowMapper} converts output Neo4j {@link Record} values to the output of the source.
+ *
+ *
{@code
+ * pipeline
+ * .apply(Create.of(40, 50, 60))
+ * .apply(Neo4jIO.readAll()
+ * .withDriverConfiguration(Neo4jIO.DriverConfiguration.create("neo4j://localhost:7687", "neo4j", "password"))
+ * .withCypher("MATCH(n:Person) WHERE n.age = $age RETURN n.id")
+ * .withReadTransaction()
+ * .withCoder(StringUtf8Coder.of())
+ * .withParametersFunction( age -> Collections.singletonMap( "age", age ))
+ * .withRowMapper( record -> return record.get(0).asString() )
+ * );
+ * }
+ *
+ * Writing to Neo4j
+ *
+ * The Neo4j {@link WriteUnwind} transform supports writing data to a graph. It writes a {@link
+ * PCollection} to the graph by collecting a batch of elements after which all elements in the batch
+ * are written together to Neo4j.
+ *
+ *
Like the source, to configure this sink, you have to provide a {@link DriverConfiguration}.
+ *
+ *
In the following example we'll merge a collection of {@link org.apache.beam.sdk.values.Row}
+ * into Person nodes. Since this is a Sink it has no output and as such no RowMapper is needed. The
+ * rows are being used as a container for the parameters of the Cypher statement. The used Cypher in
+ * question needs to be an UNWIND statement. Like in the read case, the parameters {@link
+ * SerializableFunction} converts parameter values to a {@link Map}. The difference here is that the
+ * resulting Map is stored in a {@link List} (containing maps) which in turn is stored in another
+ * Map under the name provided by the {@link WriteUnwind#withUnwindMapName(String)} method. All of
+ * this is handled automatically. You do need to provide the unwind map name so that you can
+ * reference that in the UNWIND statement.
+ *
+ *
+ *
+ *
For example:
+ *
+ *
{@code
+ * pipeline
+ * .apply(...)
+ * .apply(Neo4jIO.writeUnwind()
+ * .withDriverConfiguration(Neo4jIO.DriverConfiguration.create("neo4j://localhost:7687", "neo4j", "password"))
+ * .withUnwindMapName("rows")
+ * .withCypher("UNWIND $rows AS row MERGE(n:Person { id : row.id } ) SET n.firstName = row.first, n.lastName = row.last")
+ * .withParametersFunction( row -> ImmutableMap.of(
+ * "id", row.getString("id),
+ * "first", row.getString("firstName")
+ * "last", row.getString("lastName")))
+ * );
+ * }
+ */
+@Experimental(Experimental.Kind.SOURCE_SINK)
+public class Neo4jIO {
+
+ private static final Logger LOG = LoggerFactory.getLogger(Neo4jIO.class);
+
+ /**
+ * Read all rows using a Neo4j Cypher query.
+ *
+ * @param Type of the data representing query parameters.
+ * @param Type of the data to be read.
+ */
+ public static ReadAll readAll() {
+ return new AutoValue_Neo4jIO_ReadAll.Builder().build();
+ }
+
+ /**
+ * Write all rows using a Neo4j Cypher UNWIND cypher statement. This sets a default batch size of
+ * 5000.
+ *
+ * @param Type of the data representing query parameters.
+ */
+ public static WriteUnwind writeUnwind() {
+ return new AutoValue_Neo4jIO_WriteUnwind.Builder()
+ .setBatchSize(ValueProvider.StaticValueProvider.of(5000L))
+ .build();
+ }
+
+ private static PCollection getOutputPCollection(
+ PCollection input,
+ DoFn writeFn,
+ @Nullable Coder coder) {
+ PCollection output = input.apply(ParDo.of(writeFn));
+ if (coder != null) {
+ output.setCoder(coder);
+ try {
+ TypeDescriptor typeDesc = coder.getEncodedTypeDescriptor();
+ SchemaRegistry registry = input.getPipeline().getSchemaRegistry();
+ Schema schema = registry.getSchema(typeDesc);
+ output.setSchema(
+ schema,
+ typeDesc,
+ registry.getToRowFunction(typeDesc),
+ registry.getFromRowFunction(typeDesc));
+ } catch (NoSuchSchemaException e) {
+ // ignore
+ }
+ }
+ return output;
+ }
+
+ /**
+ * An interface used by {@link ReadAll} for converting each row of a Neo4j {@link Result} record
+ * {@link Record} into an element of the resulting {@link PCollection}.
+ */
+ @FunctionalInterface
+ public interface RowMapper extends Serializable {
+ T mapRow(Record record) throws Exception;
+ }
+
+ /**
+ * A convenience method to clarify the way {@link ValueProvider} works to the static code checker
+ * framework for {@link Nullable} values.
+ *
+ * @param valueProvider
+ * @param
+ * @return The provided value or null if none was specified.
+ */
+ private static T getProvidedValue(@Nullable ValueProvider valueProvider) {
+ if (valueProvider == null) {
+ return (T) null;
+ }
+ return valueProvider.get();
+ }
+
+ /** This describes all the information needed to create a Neo4j {@link Session}. */
+ @AutoValue
+ public abstract static class DriverConfiguration implements Serializable {
+ public static DriverConfiguration create() {
+ return new AutoValue_Neo4jIO_DriverConfiguration.Builder()
+ .build()
+ .withDefaultConfig(true)
+ .withConfig(Config.defaultConfig());
+ }
+
+ public static DriverConfiguration create(String url, String username, String password) {
+ checkArgument(url != null, "url can not be null");
+ checkArgument(username != null, "username can not be null");
+ checkArgument(password != null, "password can not be null");
+ return new AutoValue_Neo4jIO_DriverConfiguration.Builder()
+ .build()
+ .withDefaultConfig(true)
+ .withConfig(Config.defaultConfig())
+ .withUrl(url)
+ .withUsername(username)
+ .withPassword(password);
+ }
+
+ abstract @Nullable ValueProvider getUrl();
+
+ abstract @Nullable ValueProvider> getUrls();
+
+ abstract @Nullable ValueProvider getUsername();
+
+ abstract @Nullable ValueProvider getPassword();
+
+ abstract @Nullable Config getConfig();
+
+ abstract @Nullable ValueProvider getHasDefaultConfig();
+
+ abstract Builder builder();
+
+ public DriverConfiguration withUrl(String url) {
+ return withUrl(ValueProvider.StaticValueProvider.of(url));
+ }
+
+ public DriverConfiguration withUrl(ValueProvider url) {
+ Preconditions.checkArgument(
+ url != null, "a neo4j connection URL can not be empty or null", url);
+ Preconditions.checkArgument(
+ StringUtils.isNotEmpty(url.get()),
+ "a neo4j connection URL can not be empty or null",
+ url);
+ return builder().setUrl(url).build();
+ }
+
+ public DriverConfiguration withUrls(List urls) {
+ return withUrls(ValueProvider.StaticValueProvider.of(urls));
+ }
+
+ public DriverConfiguration withUrls(ValueProvider> urls) {
+ Preconditions.checkArgument(
+ urls != null, "a list of neo4j connection URLs can not be empty or null", urls);
+ Preconditions.checkArgument(
+ urls.get() != null && !urls.get().isEmpty(),
+ "a neo4j connection URL can not be empty or null",
+ urls);
+ return builder().setUrls(urls).build();
+ }
+
+ public DriverConfiguration withConfig(Config config) {
+ return builder().setConfig(config).build();
+ }
+
+ public DriverConfiguration withUsername(String username) {
+ return withUsername(ValueProvider.StaticValueProvider.of(username));
+ }
+
+ public DriverConfiguration withUsername(ValueProvider username) {
+ Preconditions.checkArgument(username != null, "neo4j username can not be null", username);
+ Preconditions.checkArgument(
+ username.get() != null, "neo4j username can not be null", username);
+ return builder().setUsername(username).build();
+ }
+
+ public DriverConfiguration withPassword(String password) {
+ return withPassword(ValueProvider.StaticValueProvider.of(password));
+ }
+
+ public DriverConfiguration withPassword(ValueProvider password) {
+ Preconditions.checkArgument(password != null, "neo4j password can not be null", password);
+ Preconditions.checkArgument(
+ password.get() != null, "neo4j password can not be null", password);
+ return builder().setPassword(password).build();
+ }
+
+ public DriverConfiguration withDefaultConfig(boolean useDefault) {
+ return withDefaultConfig(ValueProvider.StaticValueProvider.of(useDefault));
+ }
+
+ public DriverConfiguration withDefaultConfig(ValueProvider useDefault) {
+ Preconditions.checkArgument(
+ useDefault != null, "withDefaultConfig parameter useDefault can not be null", useDefault);
+ Preconditions.checkArgument(
+ useDefault.get() != null,
+ "withDefaultConfig parameter useDefault can not be null",
+ useDefault);
+ return builder().setHasDefaultConfig(useDefault).build();
+ }
+
+ void populateDisplayData(DisplayData.Builder builder) {
+ builder.addIfNotNull(DisplayData.item("neo4j-url", getUrl()));
+ builder.addIfNotNull(DisplayData.item("neo4j-username", getUsername()));
+ builder.addIfNotNull(
+ DisplayData.item(
+ "neo4j-password", getPassword() != null ? "" : ""));
+ }
+
+ Driver buildDriver() {
+ // Create the Neo4j Driver
+ // This uses the provided Neo4j configuration along with URLs, username and password
+ //
+ Config config = getConfig();
+ if (config == null) {
+ throw new RuntimeException("please provide a neo4j config");
+ }
+ // We're trying to work around a subtle serialisation bug in the Neo4j Java driver.
+ // The fix is work in progress. For now, we harden our code to avoid
+ // wild goose chases.
+ //
+ Boolean hasDefaultConfig = getProvidedValue(getHasDefaultConfig());
+ if (hasDefaultConfig != null && hasDefaultConfig) {
+ config = Config.defaultConfig();
+ }
+
+ // Get the list of the URI to connect with
+ //
+ List uris = new ArrayList<>();
+ String url = getProvidedValue(getUrl());
+ if (url != null) {
+ try {
+ uris.add(new URI(url));
+ } catch (URISyntaxException e) {
+ throw new RuntimeException("Error creating URI from URL '" + url + "'", e);
+ }
+ }
+ List providedUrls = getProvidedValue(getUrls());
+ if (providedUrls != null) {
+ for (String providedUrl : providedUrls) {
+ try {
+ uris.add(new URI(providedUrl));
+ } catch (URISyntaxException e) {
+ throw new RuntimeException(
+ "Error creating URI '"
+ + providedUrl
+ + "' from a list of "
+ + providedUrls.size()
+ + " URLs",
+ e);
+ }
+ }
+ }
+
+ // A specific routing driver can be used to connect to specific clustered configurations.
+ // Often we don't need it because the Java driver automatically can figure this out
+ // automatically. To keep things simple we use the routing driver in case we have more
+ // than one URL specified. This is an exceptional case.
+ //
+ Driver driver;
+ AuthToken authTokens =
+ getAuthToken(getProvidedValue(getUsername()), getProvidedValue(getPassword()));
+ if (uris.size() > 1) {
+ driver = GraphDatabase.routingDriver(uris, authTokens, config);
+ } else {
+ // Just take the first URI that was provided
+ driver = GraphDatabase.driver(uris.get(0), authTokens, config);
+ }
+
+ return driver;
+ }
+
+ /**
+ * Certain embedded scenarios and so on actually allow for having no authentication at all.
+ *
+ * @param username The username if one is needed
+ * @param password The password if one is needed
+ * @return The AuthToken
+ */
+ protected AuthToken getAuthToken(String username, String password) {
+ if (username != null && password != null) {
+ return AuthTokens.basic(username, password);
+ } else {
+ return AuthTokens.none();
+ }
+ }
+
+ /**
+ * The Builder class below is not visible. We use it to service the "with" methods below the
+ * Builder class.
+ */
+ @AutoValue.Builder
+ abstract static class Builder {
+ abstract Builder setUrl(ValueProvider url);
+
+ abstract Builder setUrls(ValueProvider> url);
+
+ abstract Builder setUsername(ValueProvider username);
+
+ abstract Builder setPassword(ValueProvider password);
+
+ abstract Builder setConfig(Config config);
+
+ abstract Builder setHasDefaultConfig(ValueProvider useDefault);
+
+ abstract DriverConfiguration build();
+ }
+ }
+
+ /** This is the class which handles the work behind the {@link #readAll} method. */
+ @AutoValue
+ public abstract static class ReadAll
+ extends PTransform, PCollection> {
+
+ abstract @Nullable SerializableFunction getDriverProviderFn();
+
+ abstract @Nullable SessionConfig getSessionConfig();
+
+ abstract @Nullable TransactionConfig getTransactionConfig();
+
+ abstract @Nullable ValueProvider getCypher();
+
+ abstract @Nullable ValueProvider getWriteTransaction();
+
+ abstract @Nullable RowMapper getRowMapper();
+
+ abstract @Nullable SerializableFunction>
+ getParametersFunction();
+
+ abstract @Nullable Coder getCoder();
+
+ abstract @Nullable ValueProvider getLogCypher();
+
+ abstract Builder toBuilder();
+
+ public ReadAll withDriverConfiguration(DriverConfiguration config) {
+ return toBuilder()
+ .setDriverProviderFn(new DriverProviderFromDriverConfiguration(config))
+ .build();
+ }
+
+ public ReadAll withCypher(String cypher) {
+ checkArgument(
+ cypher != null, "Neo4jIO.readAll().withCypher(query) called with null cypher query");
+ return withCypher(ValueProvider.StaticValueProvider.of(cypher));
+ }
+
+ public ReadAll withCypher(ValueProvider cypher) {
+ checkArgument(cypher != null, "Neo4jIO.readAll().withCypher(cypher) called with null cypher");
+ return toBuilder().setCypher(cypher).build();
+ }
+
+ public ReadAll withSessionConfig(SessionConfig sessionConfig) {
+ checkArgument(
+ sessionConfig != null,
+ "Neo4jIO.readAll().withSessionConfig(sessionConfig) called with null sessionConfig");
+ return toBuilder().setSessionConfig(sessionConfig).build();
+ }
+
+ public ReadAll withTransactionConfig(TransactionConfig transactionConfig) {
+ checkArgument(
+ transactionConfig != null,
+ "Neo4jIO.readAll().withTransactionConfig(transactionConfig) called with null transactionConfig");
+ return toBuilder().setTransactionConfig(transactionConfig).build();
+ }
+
+ public ReadAll withRowMapper(RowMapper rowMapper) {
+ checkArgument(
+ rowMapper != null,
+ "Neo4jIO.readAll().withRowMapper(rowMapper) called with null rowMapper");
+ return toBuilder().setRowMapper(rowMapper).build();
+ }
+
+ public ReadAll withParametersFunction(
+ SerializableFunction> parametersFunction) {
+ checkArgument(
+ parametersFunction != null,
+ "Neo4jIO.readAll().withParametersFunction(parametersFunction) called with null parametersFunction");
+ return toBuilder().setParametersFunction(parametersFunction).build();
+ }
+
+ public ReadAll withCoder(Coder coder) {
+ checkArgument(coder != null, "Neo4jIO.readAll().withCoder(coder) called with null coder");
+ return toBuilder().setCoder(coder).build();
+ }
+
+ public ReadAll withReadTransaction() {
+ return toBuilder()
+ .setWriteTransaction(ValueProvider.StaticValueProvider.of(Boolean.FALSE))
+ .build();
+ }
+
+ public ReadAll withWriteTransaction() {
+ return toBuilder()
+ .setWriteTransaction(ValueProvider.StaticValueProvider.of(Boolean.TRUE))
+ .build();
+ }
+
+ public ReadAll withCypherLogging() {
+ return toBuilder().setLogCypher(ValueProvider.StaticValueProvider.of(Boolean.TRUE)).build();
+ }
+
+ @Override
+ public PCollection expand(PCollection input) {
+
+ final SerializableFunction driverProviderFn = getDriverProviderFn();
+ final RowMapper rowMapper = getRowMapper();
+ SerializableFunction> parametersFunction =
+ getParametersFunction();
+
+ final String cypher = getProvidedValue(getCypher());
+ checkArgument(cypher != null, "please provide a cypher statement to execute");
+
+ SessionConfig sessionConfig = getSessionConfig();
+ if (sessionConfig == null) {
+ // Create a default session configuration as recommended by Neo4j
+ //
+ sessionConfig = SessionConfig.defaultConfig();
+ }
+
+ TransactionConfig transactionConfig = getTransactionConfig();
+ if (transactionConfig == null) {
+ transactionConfig = TransactionConfig.empty();
+ }
+
+ Boolean writeTransaction = getProvidedValue(getWriteTransaction());
+ if (writeTransaction == null) {
+ writeTransaction = Boolean.FALSE;
+ }
+
+ Boolean logCypher = getProvidedValue(getLogCypher());
+ if (logCypher == null) {
+ logCypher = Boolean.FALSE;
+ }
+
+ if (driverProviderFn == null) {
+ throw new RuntimeException("please provide a driver provider");
+ }
+ if (rowMapper == null) {
+ throw new RuntimeException("please provide a row mapper");
+ }
+ if (parametersFunction == null) {
+ parametersFunction = t -> Collections.emptyMap();
+ }
+
+ ReadFn readFn =
+ new ReadFn<>(
+ driverProviderFn,
+ sessionConfig,
+ transactionConfig,
+ cypher,
+ rowMapper,
+ parametersFunction,
+ writeTransaction,
+ logCypher);
+
+ return getOutputPCollection(input, readFn, getCoder());
+ }
+
+ @Override
+ public void populateDisplayData(DisplayData.Builder builder) {
+ super.populateDisplayData(builder);
+ String cypher = getProvidedValue(getCypher());
+ if (cypher == null) {
+ cypher = "";
+ }
+ builder.add(DisplayData.item("cypher", cypher));
+ SerializableFunction driverProviderFn = getDriverProviderFn();
+ if (driverProviderFn != null) {
+ if (driverProviderFn instanceof HasDisplayData) {
+ ((HasDisplayData) driverProviderFn).populateDisplayData(builder);
+ }
+ }
+ }
+
+ @AutoValue.Builder
+ abstract static class Builder {
+ abstract Builder setDriverProviderFn(
+ SerializableFunction driverProviderFn);
+
+ abstract Builder setCypher(ValueProvider