diff --git a/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java index c1b273ca9bec2..5a0930cd35f5b 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java +++ b/examples/java/src/main/java/org/apache/beam/examples/DebuggingWordCount.java @@ -17,6 +17,9 @@ */ package org.apache.beam.examples; +import java.util.Arrays; +import java.util.List; +import java.util.regex.Pattern; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.TextIO; import org.apache.beam.sdk.options.Default; @@ -29,17 +32,12 @@ import org.apache.beam.sdk.transforms.Sum; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.Arrays; -import java.util.List; -import java.util.regex.Pattern; - /** - * An example that verifies word counts in Shakespeare and includes Dataflow best practices. + * An example that verifies word counts in Shakespeare and includes Beam best practices. * *

This class, {@link DebuggingWordCount}, is the third in a series of four successively more * detailed 'word count' examples. You may first want to take a look at {@link MinimalWordCount} diff --git a/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java index 842cb54b40fd2..5f60524209c19 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java +++ b/examples/java/src/main/java/org/apache/beam/examples/WindowedWordCount.java @@ -17,6 +17,13 @@ */ package org.apache.beam.examples; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.api.services.bigquery.model.TableReference; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.examples.common.ExampleBigQueryTableOptions; import org.apache.beam.examples.common.ExampleOptions; import org.apache.beam.examples.common.ExampleUtils; @@ -33,19 +40,9 @@ import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - -import com.google.api.services.bigquery.model.TableFieldSchema; -import com.google.api.services.bigquery.model.TableReference; -import com.google.api.services.bigquery.model.TableRow; -import com.google.api.services.bigquery.model.TableSchema; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - /** * An example that counts words in text, and can run over either unbounded or bounded input @@ -186,8 +183,7 @@ public static interface Options extends WordCount.WordCountOptions, public static void main(String[] args) throws IOException { Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); options.setBigQuerySchema(getSchema()); - // DataflowExampleUtils creates the necessary input sources to simplify execution of this - // Pipeline. + // ExampleUtils creates the necessary input sources to simplify execution of this Pipeline. ExampleUtils exampleUtils = new ExampleUtils(options); exampleUtils.setup(); diff --git a/examples/java/src/main/java/org/apache/beam/examples/WordCount.java b/examples/java/src/main/java/org/apache/beam/examples/WordCount.java index 42d30bb5fa94a..d42d6214973d3 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/WordCount.java +++ b/examples/java/src/main/java/org/apache/beam/examples/WordCount.java @@ -17,6 +17,9 @@ */ package org.apache.beam.examples; +import com.google.common.base.Strings; +import com.google.common.io.Resources; +import java.io.IOException; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.TextIO; import org.apache.beam.sdk.options.Default; @@ -37,11 +40,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; -import com.google.common.base.Strings; -import com.google.common.io.Resources; - -import java.io.IOException; - /** * An example that counts words in Shakespeare and includes Beam best practices. * diff --git a/examples/java/src/main/java/org/apache/beam/examples/common/ExampleBigQueryTableOptions.java b/examples/java/src/main/java/org/apache/beam/examples/common/ExampleBigQueryTableOptions.java index 54cc99ea32fc6..2eef525faed0b 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/common/ExampleBigQueryTableOptions.java +++ b/examples/java/src/main/java/org/apache/beam/examples/common/ExampleBigQueryTableOptions.java @@ -17,14 +17,13 @@ */ package org.apache.beam.examples.common; +import com.google.api.services.bigquery.model.TableSchema; import org.apache.beam.sdk.options.Default; import org.apache.beam.sdk.options.DefaultValueFactory; import org.apache.beam.sdk.options.Description; import org.apache.beam.sdk.options.GcpOptions; import org.apache.beam.sdk.options.PipelineOptions; -import com.google.api.services.bigquery.model.TableSchema; - /** * Options that can be used to configure BigQuery tables in Beam examples. * The project defaults to the project being used to run the example. diff --git a/examples/java/src/main/java/org/apache/beam/examples/common/ExampleOptions.java b/examples/java/src/main/java/org/apache/beam/examples/common/ExampleOptions.java index 43afeb4c61128..8b7ed073f3593 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/common/ExampleOptions.java +++ b/examples/java/src/main/java/org/apache/beam/examples/common/ExampleOptions.java @@ -17,26 +17,23 @@ */ package org.apache.beam.examples.common; +import com.google.common.base.MoreObjects; +import java.util.concurrent.ThreadLocalRandom; import org.apache.beam.sdk.options.ApplicationNameOptions; import org.apache.beam.sdk.options.Default; import org.apache.beam.sdk.options.DefaultValueFactory; import org.apache.beam.sdk.options.Description; import org.apache.beam.sdk.options.PipelineOptions; - -import com.google.common.base.MoreObjects; - import org.joda.time.DateTimeUtils; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; -import java.util.concurrent.ThreadLocalRandom; - /** * Options that can be used to configure the Beam examples. */ public interface ExampleOptions extends PipelineOptions { - @Description("Whether to keep jobs running on the Dataflow service after local process exit") + @Description("Whether to keep jobs running after local process exit") @Default.Boolean(false) boolean getKeepJobsRunning(); void setKeepJobsRunning(boolean keepJobsRunning); diff --git a/examples/java/src/main/java/org/apache/beam/examples/common/ExampleUtils.java b/examples/java/src/main/java/org/apache/beam/examples/common/ExampleUtils.java index 7f03fc0d1c6f6..eadb580a257c4 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/common/ExampleUtils.java +++ b/examples/java/src/main/java/org/apache/beam/examples/common/ExampleUtils.java @@ -17,13 +17,6 @@ */ package org.apache.beam.examples.common; -import org.apache.beam.sdk.PipelineResult; -import org.apache.beam.sdk.options.BigQueryOptions; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.options.PubsubOptions; -import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff; -import org.apache.beam.sdk.util.Transport; - import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.api.client.googleapis.services.AbstractGoogleClientRequest; import com.google.api.client.util.BackOff; @@ -43,12 +36,17 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.util.concurrent.Uninterruptibles; - import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; +import org.apache.beam.sdk.PipelineResult; +import org.apache.beam.sdk.options.BigQueryOptions; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.options.PubsubOptions; +import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff; +import org.apache.beam.sdk.util.Transport; /** * The utility class that sets up and tears down external resources, diff --git a/examples/java/src/main/java/org/apache/beam/examples/common/PubsubFileInjector.java b/examples/java/src/main/java/org/apache/beam/examples/common/PubsubFileInjector.java index 0a93521821d84..e6a1495e545d5 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/common/PubsubFileInjector.java +++ b/examples/java/src/main/java/org/apache/beam/examples/common/PubsubFileInjector.java @@ -17,6 +17,12 @@ */ package org.apache.beam.examples.common; +import com.google.api.services.pubsub.Pubsub; +import com.google.api.services.pubsub.model.PublishRequest; +import com.google.api.services.pubsub.model.PubsubMessage; +import com.google.common.collect.ImmutableMap; +import java.io.IOException; +import java.util.Arrays; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.TextIO; import org.apache.beam.sdk.options.Description; @@ -28,14 +34,6 @@ import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.util.Transport; -import com.google.api.services.pubsub.Pubsub; -import com.google.api.services.pubsub.model.PublishRequest; -import com.google.api.services.pubsub.model.PubsubMessage; -import com.google.common.collect.ImmutableMap; - -import java.io.IOException; -import java.util.Arrays; - /** * A batch Dataflow pipeline for injecting a set of GCS files into * a PubSub topic line by line. Empty lines are skipped. diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java b/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java index 120c64fae54ac..56c7855e45e09 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java +++ b/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java @@ -21,6 +21,21 @@ import static com.google.datastore.v1.client.DatastoreHelper.makeKey; import static com.google.datastore.v1.client.DatastoreHelper.makeValue; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.api.services.bigquery.model.TableReference; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; +import com.google.common.base.MoreObjects; +import com.google.datastore.v1.Entity; +import com.google.datastore.v1.Key; +import com.google.datastore.v1.Value; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.apache.beam.examples.common.ExampleBigQueryTableOptions; import org.apache.beam.examples.common.ExampleOptions; import org.apache.beam.examples.common.ExampleUtils; @@ -53,26 +68,8 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionList; - -import com.google.api.services.bigquery.model.TableFieldSchema; -import com.google.api.services.bigquery.model.TableReference; -import com.google.api.services.bigquery.model.TableRow; -import com.google.api.services.bigquery.model.TableSchema; -import com.google.common.base.MoreObjects; -import com.google.datastore.v1.Entity; -import com.google.datastore.v1.Key; -import com.google.datastore.v1.Value; - import org.joda.time.Duration; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - /** * An example that computes the most popular hash tags * for every prefix, which can be used for auto-completion. diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/README.md b/examples/java/src/main/java/org/apache/beam/examples/complete/README.md index 99c93ef4b82a5..b98be7a723960 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/complete/README.md +++ b/examples/java/src/main/java/org/apache/beam/examples/complete/README.md @@ -43,14 +43,14 @@ This directory contains end-to-end example pipelines that perform complex data p Windowing to perform time-based aggregations of data.

  • TrafficMaxLaneFlow - — A streaming Cloud Dataflow example using BigQuery output in the + — A streaming Beam Example using BigQuery output in the traffic sensor domain. Demonstrates the Cloud Dataflow streaming runner, sliding windows, Cloud Pub/Sub topic ingestion, the use of the AvroCoder to encode a custom class, and custom Combine transforms.
  • TrafficRoutes - — A streaming Cloud Dataflow example using BigQuery output in the + — A streaming Beam Example using BigQuery output in the traffic sensor domain. Demonstrates the Cloud Dataflow streaming runner, GroupByKey, keyed state, sliding windows, and Cloud Pub/Sub topic ingestion. diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java b/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java index 3f30f2150504a..348bab84b996d 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java +++ b/examples/java/src/main/java/org/apache/beam/examples/complete/StreamingWordExtract.java @@ -17,6 +17,11 @@ */ package org.apache.beam.examples.complete; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; +import java.io.IOException; +import java.util.ArrayList; import org.apache.beam.examples.common.ExampleBigQueryTableOptions; import org.apache.beam.examples.common.ExampleOptions; import org.apache.beam.examples.common.ExampleUtils; @@ -31,15 +36,8 @@ import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.ParDo; -import com.google.api.services.bigquery.model.TableFieldSchema; -import com.google.api.services.bigquery.model.TableRow; -import com.google.api.services.bigquery.model.TableSchema; - -import java.io.IOException; -import java.util.ArrayList; - /** - * A streaming Dataflow Example using BigQuery output. + * A streaming Beam Example using BigQuery output. * *

    This pipeline example reads lines of the input text file, splits each line * into individual words, capitalizes those words, and writes the output to diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java index 76b6b6a060085..a5a939263ee4e 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java +++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TfIdf.java @@ -17,6 +17,12 @@ */ package org.apache.beam.examples.complete; +import java.io.File; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashSet; +import java.util.Set; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.KvCoder; @@ -51,17 +57,9 @@ import org.apache.beam.sdk.values.PDone; import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.TupleTag; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.File; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.HashSet; -import java.util.Set; - /** * An example that computes a basic TF-IDF search table for a directory or GCS prefix. * diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java index aff41ccb5a4ec..1b2064ad068ee 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java +++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TopWikipediaSessions.java @@ -17,6 +17,8 @@ */ package org.apache.beam.examples.complete; +import com.google.api.services.bigquery.model.TableRow; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.TableRowJsonCoder; import org.apache.beam.sdk.io.TextIO; @@ -38,14 +40,9 @@ import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - -import com.google.api.services.bigquery.model.TableRow; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.util.List; - /** * An example that reads Wikipedia edit data from Cloud Storage and computes the user with * the longest string of edits separated by no more than an hour within each month. @@ -184,7 +181,7 @@ public void processElement(ProcessContext c) { /** * Options supported by this class. * - *

    Inherits standard Dataflow configuration options. + *

    Inherits standard Beam configuration options. */ private static interface Options extends PipelineOptions { @Description( diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java index 394b4327025c0..1b27e650f03e9 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java +++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficMaxLaneFlow.java @@ -17,6 +17,14 @@ */ package org.apache.beam.examples.complete; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.api.services.bigquery.model.TableReference; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import org.apache.avro.reflect.Nullable; import org.apache.beam.examples.common.ExampleBigQueryTableOptions; import org.apache.beam.examples.common.ExampleOptions; import org.apache.beam.examples.common.ExampleUtils; @@ -39,24 +47,13 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; - -import com.google.api.services.bigquery.model.TableFieldSchema; -import com.google.api.services.bigquery.model.TableReference; -import com.google.api.services.bigquery.model.TableRow; -import com.google.api.services.bigquery.model.TableSchema; - -import org.apache.avro.reflect.Nullable; import org.joda.time.Duration; import org.joda.time.Instant; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - /** - * A Dataflow Example that runs in both batch and streaming modes with traffic sensor data. + * A Beam Example that runs in both batch and streaming modes with traffic sensor data. * You can configure the running mode by setting {@literal --streaming} to true or false. * *

    Concepts: The batch and streaming runners, sliding windows, @@ -332,7 +329,7 @@ public static void main(String[] args) throws IOException { .withValidation() .as(TrafficMaxLaneFlowOptions.class); options.setBigQuerySchema(FormatMaxesFn.getSchema()); - // Using DataflowExampleUtils to set up required resources. + // Using ExampleUtils to set up required resources. ExampleUtils exampleUtils = new ExampleUtils(options); exampleUtils.setup(); diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java index ef716e9dd832d..f3c2d3936ee74 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java +++ b/examples/java/src/main/java/org/apache/beam/examples/complete/TrafficRoutes.java @@ -17,6 +17,19 @@ */ package org.apache.beam.examples.complete; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.api.services.bigquery.model.TableReference; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; +import com.google.common.collect.Lists; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Hashtable; +import java.util.List; +import java.util.Map; +import org.apache.avro.reflect.Nullable; import org.apache.beam.examples.common.ExampleBigQueryTableOptions; import org.apache.beam.examples.common.ExampleOptions; import org.apache.beam.examples.common.ExampleUtils; @@ -38,29 +51,13 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; - -import com.google.api.services.bigquery.model.TableFieldSchema; -import com.google.api.services.bigquery.model.TableReference; -import com.google.api.services.bigquery.model.TableRow; -import com.google.api.services.bigquery.model.TableSchema; -import com.google.common.collect.Lists; - -import org.apache.avro.reflect.Nullable; import org.joda.time.Duration; import org.joda.time.Instant; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.Hashtable; -import java.util.List; -import java.util.Map; - /** - * A Dataflow Example that runs in both batch and streaming modes with traffic sensor data. + * A Beam Example that runs in both batch and streaming modes with traffic sensor data. * You can configure the running mode by setting {@literal --streaming} to true or false. * *

    Concepts: The batch and streaming runners, GroupByKey, sliding windows. @@ -343,7 +340,7 @@ public static void main(String[] args) throws IOException { .as(TrafficRoutesOptions.class); options.setBigQuerySchema(FormatStatsFn.getSchema()); - // Using DataflowExampleUtils to set up required resources. + // Using ExampleUtils to set up required resources. ExampleUtils exampleUtils = new ExampleUtils(options); exampleUtils.setup(); diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java index 09d9c29734e90..439cf020aaa9f 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java +++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/BigQueryTornadoes.java @@ -17,6 +17,11 @@ */ package org.apache.beam.examples.cookbook; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO; import org.apache.beam.sdk.options.Default; @@ -31,13 +36,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; -import com.google.api.services.bigquery.model.TableFieldSchema; -import com.google.api.services.bigquery.model.TableRow; -import com.google.api.services.bigquery.model.TableSchema; - -import java.util.ArrayList; -import java.util.List; - /** * An example that reads the public samples of weather data from BigQuery, counts the number of * tornadoes that occur in each month, and writes the results to BigQuery. diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java index 67918a3f74284..1d280a6e154b5 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java +++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/CombinePerKeyExamples.java @@ -17,6 +17,11 @@ */ package org.apache.beam.examples.cookbook; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO; import org.apache.beam.sdk.options.Default; @@ -34,13 +39,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; -import com.google.api.services.bigquery.model.TableFieldSchema; -import com.google.api.services.bigquery.model.TableRow; -import com.google.api.services.bigquery.model.TableSchema; - -import java.util.ArrayList; -import java.util.List; - /** * An example that reads the public 'Shakespeare' data, and for each word in * the dataset that is over a given length, generates a string containing the diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java index 215e2ffc78981..9a9e79968670e 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java +++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java @@ -22,6 +22,14 @@ import static com.google.datastore.v1.client.DatastoreHelper.makeKey; import static com.google.datastore.v1.client.DatastoreHelper.makeValue; +import com.google.datastore.v1.Entity; +import com.google.datastore.v1.Key; +import com.google.datastore.v1.PropertyFilter; +import com.google.datastore.v1.Query; +import com.google.datastore.v1.Value; +import java.util.Map; +import java.util.UUID; +import javax.annotation.Nullable; import org.apache.beam.examples.WordCount; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.TextIO; @@ -36,16 +44,6 @@ import org.apache.beam.sdk.transforms.MapElements; import org.apache.beam.sdk.transforms.ParDo; -import com.google.datastore.v1.Entity; -import com.google.datastore.v1.Key; -import com.google.datastore.v1.PropertyFilter; -import com.google.datastore.v1.Query; -import com.google.datastore.v1.Value; - -import java.util.Map; -import java.util.UUID; -import javax.annotation.Nullable; - /** * A WordCount example using DatastoreIO. * diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java index 9a0f7a2a54938..6c42520ff7c6e 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java +++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/FilterExamples.java @@ -17,6 +17,12 @@ */ package org.apache.beam.examples.cookbook; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; +import java.util.ArrayList; +import java.util.List; +import java.util.logging.Logger; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO; import org.apache.beam.sdk.options.Default; @@ -32,14 +38,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; -import com.google.api.services.bigquery.model.TableFieldSchema; -import com.google.api.services.bigquery.model.TableRow; -import com.google.api.services.bigquery.model.TableSchema; - -import java.util.ArrayList; -import java.util.List; -import java.util.logging.Logger; - /** * This is an example that demonstrates several approaches to filtering, and use of the Mean * transform. It shows how to dynamically set parameters by defining and using new pipeline options, diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java index 5ff2ce22a0d9e..1b91bf1e0615b 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java +++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/JoinExamples.java @@ -17,6 +17,7 @@ */ package org.apache.beam.examples.cookbook; +import com.google.api.services.bigquery.model.TableRow; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.TextIO; import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO; @@ -33,8 +34,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TupleTag; -import com.google.api.services.bigquery.model.TableRow; - /** * This example shows how to do a join on two collections. * It uses a sample of the GDELT 'world event' data (http://goo.gl/OB6oin), joining the event diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java index 4f266d30ac968..3772a7bc5b86a 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java +++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/MaxPerKeyExamples.java @@ -17,6 +17,11 @@ */ package org.apache.beam.examples.cookbook; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO; import org.apache.beam.sdk.options.Default; @@ -31,13 +36,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; -import com.google.api.services.bigquery.model.TableFieldSchema; -import com.google.api.services.bigquery.model.TableRow; -import com.google.api.services.bigquery.model.TableSchema; - -import java.util.ArrayList; -import java.util.List; - /** * An example that reads the public samples of weather data from BigQuery, and finds * the maximum temperature ('mean_temp') for each month. diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java index 04ac2c363ed64..db59435555458 100644 --- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java +++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/TriggerExample.java @@ -17,6 +17,13 @@ */ package org.apache.beam.examples.cookbook; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.api.services.bigquery.model.TableReference; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; import org.apache.beam.examples.common.ExampleBigQueryTableOptions; import org.apache.beam.examples.common.ExampleOptions; import org.apache.beam.examples.common.ExampleUtils; @@ -42,19 +49,9 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionList; - -import com.google.api.services.bigquery.model.TableFieldSchema; -import com.google.api.services.bigquery.model.TableReference; -import com.google.api.services.bigquery.model.TableRow; -import com.google.api.services.bigquery.model.TableSchema; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; - /** * This example illustrates the basic concepts behind triggering. It shows how to use different * trigger definitions to produce partial (speculative) results before all the data is processed and diff --git a/examples/java/src/test/java/org/apache/beam/examples/DebuggingWordCountTest.java b/examples/java/src/test/java/org/apache/beam/examples/DebuggingWordCountTest.java index f463b1e651dbd..c1bd5d45e38bf 100644 --- a/examples/java/src/test/java/org/apache/beam/examples/DebuggingWordCountTest.java +++ b/examples/java/src/test/java/org/apache/beam/examples/DebuggingWordCountTest.java @@ -18,16 +18,14 @@ package org.apache.beam.examples; import com.google.common.io.Files; - +import java.io.File; +import java.nio.charset.StandardCharsets; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.File; -import java.nio.charset.StandardCharsets; - /** * Tests for {@link DebuggingWordCount}. */ diff --git a/examples/java/src/test/java/org/apache/beam/examples/WordCountIT.java b/examples/java/src/test/java/org/apache/beam/examples/WordCountIT.java index f93dc2b2f7792..ca0c9d6bda09d 100644 --- a/examples/java/src/test/java/org/apache/beam/examples/WordCountIT.java +++ b/examples/java/src/test/java/org/apache/beam/examples/WordCountIT.java @@ -18,6 +18,7 @@ package org.apache.beam.examples; +import java.util.Date; import org.apache.beam.examples.WordCount.WordCountOptions; import org.apache.beam.sdk.options.Default; import org.apache.beam.sdk.options.PipelineOptionsFactory; @@ -25,13 +26,10 @@ import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.testing.TestPipelineOptions; import org.apache.beam.sdk.util.IOChannelUtils; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Date; - /** * End-to-end tests of WordCount. */ diff --git a/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java b/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java index 9d36a3e3e9703..98c5b17621be1 100644 --- a/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java +++ b/examples/java/src/test/java/org/apache/beam/examples/WordCountTest.java @@ -17,6 +17,8 @@ */ package org.apache.beam.examples; +import java.util.Arrays; +import java.util.List; import org.apache.beam.examples.WordCount.CountWords; import org.apache.beam.examples.WordCount.ExtractWordsFn; import org.apache.beam.examples.WordCount.FormatAsTextFn; @@ -30,7 +32,6 @@ import org.apache.beam.sdk.transforms.DoFnTester; import org.apache.beam.sdk.transforms.MapElements; import org.apache.beam.sdk.values.PCollection; - import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.Test; @@ -38,9 +39,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Tests of WordCount. */ diff --git a/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java b/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java index 6f28dec0ecdb2..b6751c528bc87 100644 --- a/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java +++ b/examples/java/src/test/java/org/apache/beam/examples/complete/AutoCompleteTest.java @@ -17,6 +17,11 @@ */ package org.apache.beam.examples.complete; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; import org.apache.beam.examples.complete.AutoComplete.CompletionCandidate; import org.apache.beam.examples.complete.AutoComplete.ComputeTopCompletions; import org.apache.beam.sdk.Pipeline; @@ -33,19 +38,12 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TimestampedValue; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; - /** * Tests of AutoComplete. */ diff --git a/examples/java/src/test/java/org/apache/beam/examples/complete/TfIdfTest.java b/examples/java/src/test/java/org/apache/beam/examples/complete/TfIdfTest.java index c7ce67e0c6958..c2d654ec18d99 100644 --- a/examples/java/src/test/java/org/apache/beam/examples/complete/TfIdfTest.java +++ b/examples/java/src/test/java/org/apache/beam/examples/complete/TfIdfTest.java @@ -17,6 +17,8 @@ */ package org.apache.beam.examples.complete; +import java.net.URI; +import java.util.Arrays; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.StringDelegateCoder; import org.apache.beam.sdk.testing.PAssert; @@ -27,15 +29,11 @@ import org.apache.beam.sdk.transforms.RemoveDuplicates; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.net.URI; -import java.util.Arrays; - /** * Tests of {@link TfIdf}. */ diff --git a/examples/java/src/test/java/org/apache/beam/examples/complete/TopWikipediaSessionsTest.java b/examples/java/src/test/java/org/apache/beam/examples/complete/TopWikipediaSessionsTest.java index d19998ee3896d..42fb06a031b34 100644 --- a/examples/java/src/test/java/org/apache/beam/examples/complete/TopWikipediaSessionsTest.java +++ b/examples/java/src/test/java/org/apache/beam/examples/complete/TopWikipediaSessionsTest.java @@ -17,22 +17,19 @@ */ package org.apache.beam.examples.complete; +import com.google.api.services.bigquery.model.TableRow; +import java.util.Arrays; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.RunnableOnService; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.values.PCollection; - -import com.google.api.services.bigquery.model.TableRow; - import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; - /** Unit tests for {@link TopWikipediaSessions}. */ @RunWith(JUnit4.class) public class TopWikipediaSessionsTest { diff --git a/examples/java/src/test/java/org/apache/beam/examples/cookbook/BigQueryTornadoesIT.java b/examples/java/src/test/java/org/apache/beam/examples/cookbook/BigQueryTornadoesIT.java index fbd775cf50c8f..8bcab4a705221 100644 --- a/examples/java/src/test/java/org/apache/beam/examples/cookbook/BigQueryTornadoesIT.java +++ b/examples/java/src/test/java/org/apache/beam/examples/cookbook/BigQueryTornadoesIT.java @@ -21,7 +21,6 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.testing.TestPipelineOptions; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; diff --git a/examples/java/src/test/java/org/apache/beam/examples/cookbook/BigQueryTornadoesTest.java b/examples/java/src/test/java/org/apache/beam/examples/cookbook/BigQueryTornadoesTest.java index b986c0bdfa3b8..87e1614ee0ca2 100644 --- a/examples/java/src/test/java/org/apache/beam/examples/cookbook/BigQueryTornadoesTest.java +++ b/examples/java/src/test/java/org/apache/beam/examples/cookbook/BigQueryTornadoesTest.java @@ -17,21 +17,18 @@ */ package org.apache.beam.examples.cookbook; +import com.google.api.services.bigquery.model.TableRow; +import java.util.List; import org.apache.beam.examples.cookbook.BigQueryTornadoes.ExtractTornadoesFn; import org.apache.beam.examples.cookbook.BigQueryTornadoes.FormatCountsFn; import org.apache.beam.sdk.transforms.DoFnTester; import org.apache.beam.sdk.values.KV; - -import com.google.api.services.bigquery.model.TableRow; - import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.List; - /** * Test case for {@link BigQueryTornadoes}. */ diff --git a/examples/java/src/test/java/org/apache/beam/examples/cookbook/CombinePerKeyExamplesTest.java b/examples/java/src/test/java/org/apache/beam/examples/cookbook/CombinePerKeyExamplesTest.java index 6d0b16793865f..34e06799e6408 100644 --- a/examples/java/src/test/java/org/apache/beam/examples/cookbook/CombinePerKeyExamplesTest.java +++ b/examples/java/src/test/java/org/apache/beam/examples/cookbook/CombinePerKeyExamplesTest.java @@ -17,21 +17,18 @@ */ package org.apache.beam.examples.cookbook; +import com.google.api.services.bigquery.model.TableRow; +import java.util.List; import org.apache.beam.examples.cookbook.CombinePerKeyExamples.ExtractLargeWordsFn; import org.apache.beam.examples.cookbook.CombinePerKeyExamples.FormatShakespeareOutputFn; import org.apache.beam.sdk.transforms.DoFnTester; import org.apache.beam.sdk.values.KV; - -import com.google.api.services.bigquery.model.TableRow; - import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.List; - /** Unit tests for {@link CombinePerKeyExamples}. */ @RunWith(JUnit4.class) public class CombinePerKeyExamplesTest { diff --git a/examples/java/src/test/java/org/apache/beam/examples/cookbook/DeDupExampleTest.java b/examples/java/src/test/java/org/apache/beam/examples/cookbook/DeDupExampleTest.java index 20e247062c499..c725e4f6bf02e 100644 --- a/examples/java/src/test/java/org/apache/beam/examples/cookbook/DeDupExampleTest.java +++ b/examples/java/src/test/java/org/apache/beam/examples/cookbook/DeDupExampleTest.java @@ -17,6 +17,8 @@ */ package org.apache.beam.examples.cookbook; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.testing.PAssert; @@ -25,15 +27,11 @@ import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.RemoveDuplicates; import org.apache.beam.sdk.values.PCollection; - import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** Unit tests for {@link DeDupExample}. */ @RunWith(JUnit4.class) public class DeDupExampleTest { diff --git a/examples/java/src/test/java/org/apache/beam/examples/cookbook/FilterExamplesTest.java b/examples/java/src/test/java/org/apache/beam/examples/cookbook/FilterExamplesTest.java index 2598a971dd2ff..279478c50a243 100644 --- a/examples/java/src/test/java/org/apache/beam/examples/cookbook/FilterExamplesTest.java +++ b/examples/java/src/test/java/org/apache/beam/examples/cookbook/FilterExamplesTest.java @@ -17,21 +17,18 @@ */ package org.apache.beam.examples.cookbook; +import com.google.api.services.bigquery.model.TableRow; +import java.util.Arrays; +import java.util.List; import org.apache.beam.examples.cookbook.FilterExamples.FilterSingleMonthDataFn; import org.apache.beam.examples.cookbook.FilterExamples.ProjectionFn; import org.apache.beam.sdk.transforms.DoFnTester; - -import com.google.api.services.bigquery.model.TableRow; - import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** Unit tests for {@link FilterExamples}. */ @RunWith(JUnit4.class) public class FilterExamplesTest { diff --git a/examples/java/src/test/java/org/apache/beam/examples/cookbook/JoinExamplesTest.java b/examples/java/src/test/java/org/apache/beam/examples/cookbook/JoinExamplesTest.java index 9b04667407552..60f71a2120f53 100644 --- a/examples/java/src/test/java/org/apache/beam/examples/cookbook/JoinExamplesTest.java +++ b/examples/java/src/test/java/org/apache/beam/examples/cookbook/JoinExamplesTest.java @@ -17,6 +17,9 @@ */ package org.apache.beam.examples.cookbook; +import com.google.api.services.bigquery.model.TableRow; +import java.util.Arrays; +import java.util.List; import org.apache.beam.examples.cookbook.JoinExamples.ExtractCountryInfoFn; import org.apache.beam.examples.cookbook.JoinExamples.ExtractEventDataFn; import org.apache.beam.sdk.Pipeline; @@ -27,9 +30,6 @@ import org.apache.beam.sdk.transforms.DoFnTester; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - -import com.google.api.services.bigquery.model.TableRow; - import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.Test; @@ -37,9 +37,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** Unit tests for {@link JoinExamples}. */ @RunWith(JUnit4.class) public class JoinExamplesTest { diff --git a/examples/java/src/test/java/org/apache/beam/examples/cookbook/MaxPerKeyExamplesTest.java b/examples/java/src/test/java/org/apache/beam/examples/cookbook/MaxPerKeyExamplesTest.java index 1d5bcf473c1cb..b5ea0fc4bf0e2 100644 --- a/examples/java/src/test/java/org/apache/beam/examples/cookbook/MaxPerKeyExamplesTest.java +++ b/examples/java/src/test/java/org/apache/beam/examples/cookbook/MaxPerKeyExamplesTest.java @@ -17,22 +17,19 @@ */ package org.apache.beam.examples.cookbook; +import com.google.api.services.bigquery.model.TableRow; +import com.google.common.collect.ImmutableList; +import java.util.List; import org.apache.beam.examples.cookbook.MaxPerKeyExamples.ExtractTempFn; import org.apache.beam.examples.cookbook.MaxPerKeyExamples.FormatMaxesFn; import org.apache.beam.sdk.transforms.DoFnTester; import org.apache.beam.sdk.values.KV; - -import com.google.api.services.bigquery.model.TableRow; -import com.google.common.collect.ImmutableList; - import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.List; - /** Unit tests for {@link MaxPerKeyExamples}. */ @RunWith(JUnit4.class) public class MaxPerKeyExamplesTest { diff --git a/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java b/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java index fee3c141358d6..3848ca1135e82 100644 --- a/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java +++ b/examples/java/src/test/java/org/apache/beam/examples/cookbook/TriggerExampleTest.java @@ -17,6 +17,13 @@ */ package org.apache.beam.examples.cookbook; +import com.google.api.services.bigquery.model.TableRow; +import com.google.common.base.Joiner; +import com.google.common.collect.Lists; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; import org.apache.beam.examples.cookbook.TriggerExample.ExtractFlowInfo; import org.apache.beam.examples.cookbook.TriggerExample.TotalFlow; import org.apache.beam.sdk.Pipeline; @@ -32,11 +39,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TimestampedValue; - -import com.google.api.services.bigquery.model.TableRow; -import com.google.common.base.Joiner; -import com.google.common.collect.Lists; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Assert; @@ -45,11 +47,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; - /** * Unit Tests for {@link TriggerExample}. * The results generated by triggers are by definition non-deterministic and hence hard to test. diff --git a/examples/java8/src/main/java/org/apache/beam/examples/MinimalWordCountJava8.java b/examples/java8/src/main/java/org/apache/beam/examples/MinimalWordCountJava8.java index ff8ca552b57c7..a49da7bdfbb65 100644 --- a/examples/java8/src/main/java/org/apache/beam/examples/MinimalWordCountJava8.java +++ b/examples/java8/src/main/java/org/apache/beam/examples/MinimalWordCountJava8.java @@ -17,6 +17,7 @@ */ package org.apache.beam.examples; +import java.util.Arrays; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.TextIO; import org.apache.beam.sdk.options.PipelineOptions; @@ -28,8 +29,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.TypeDescriptors; -import java.util.Arrays; - /** * An example that counts words in Shakespeare, using Java 8 language features. * diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java index 01ffb1de6883e..f9957ebc0e3b9 100644 --- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java +++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/GameStats.java @@ -17,6 +17,9 @@ */ package org.apache.beam.examples.complete.game; +import java.util.HashMap; +import java.util.Map; +import java.util.TimeZone; import org.apache.beam.examples.common.ExampleUtils; import org.apache.beam.examples.complete.game.utils.WriteWindowedToBigQuery; import org.apache.beam.sdk.Pipeline; @@ -45,7 +48,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TypeDescriptors; - import org.joda.time.DateTimeZone; import org.joda.time.Duration; import org.joda.time.Instant; @@ -54,10 +56,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.HashMap; -import java.util.Map; -import java.util.TimeZone; - /** * This class is the fourth in a series of four pipelines that tell a story in a 'gaming' * domain, following {@link UserScore}, {@link HourlyTeamScore}, and {@link LeaderBoard}. diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/HourlyTeamScore.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/HourlyTeamScore.java index e489607dee4d6..d408e2132dabf 100644 --- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/HourlyTeamScore.java +++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/HourlyTeamScore.java @@ -17,6 +17,9 @@ */ package org.apache.beam.examples.complete.game; +import java.util.HashMap; +import java.util.Map; +import java.util.TimeZone; import org.apache.beam.examples.complete.game.utils.WriteWindowedToBigQuery; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.TextIO; @@ -30,17 +33,12 @@ import org.apache.beam.sdk.transforms.windowing.IntervalWindow; import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.KV; - import org.joda.time.DateTimeZone; import org.joda.time.Duration; import org.joda.time.Instant; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; -import java.util.HashMap; -import java.util.Map; -import java.util.TimeZone; - /** * This class is the second in a series of four pipelines that tell a story in a 'gaming' * domain, following {@link UserScore}. In addition to the concepts introduced in {@link UserScore}, diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/LeaderBoard.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/LeaderBoard.java index bd223059e3051..8dd4e39bee01f 100644 --- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/LeaderBoard.java +++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/LeaderBoard.java @@ -17,6 +17,9 @@ */ package org.apache.beam.examples.complete.game; +import java.util.HashMap; +import java.util.Map; +import java.util.TimeZone; import org.apache.beam.examples.common.ExampleOptions; import org.apache.beam.examples.common.ExampleUtils; import org.apache.beam.examples.complete.game.utils.WriteToBigQuery; @@ -39,17 +42,12 @@ import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.joda.time.DateTimeZone; import org.joda.time.Duration; import org.joda.time.Instant; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; -import java.util.HashMap; -import java.util.Map; -import java.util.TimeZone; - /** * This class is the third in a series of four pipelines that tell a story in a 'gaming' domain, * following {@link UserScore} and {@link HourlyTeamScore}. Concepts include: processing unbounded diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java index c97eb4152991e..65036cee6b922 100644 --- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java +++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/UserScore.java @@ -17,6 +17,9 @@ */ package org.apache.beam.examples.complete.game; +import java.util.HashMap; +import java.util.Map; +import org.apache.avro.reflect.Nullable; import org.apache.beam.examples.complete.game.utils.WriteToBigQuery; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.AvroCoder; @@ -36,14 +39,9 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptors; - -import org.apache.avro.reflect.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.HashMap; -import java.util.Map; - /** * This class is the first in a series of four pipelines that tell a story in a 'gaming' domain. * Concepts: batch processing; reading input from Google Cloud Storage and writing output to diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/injector/Injector.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/injector/Injector.java index 034a186462927..8f8bd9febc31c 100644 --- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/injector/Injector.java +++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/injector/Injector.java @@ -21,11 +21,6 @@ import com.google.api.services.pubsub.model.PublishRequest; import com.google.api.services.pubsub.model.PubsubMessage; import com.google.common.collect.ImmutableMap; - -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; - import java.io.BufferedOutputStream; import java.io.FileOutputStream; import java.io.IOException; @@ -36,6 +31,9 @@ import java.util.List; import java.util.Random; import java.util.TimeZone; +import org.joda.time.DateTimeZone; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; /** diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/injector/InjectorUtils.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/injector/InjectorUtils.java index 53e644d67fe75..8cba6c2d05f0f 100644 --- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/injector/InjectorUtils.java +++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/injector/InjectorUtils.java @@ -29,7 +29,6 @@ import com.google.api.services.pubsub.Pubsub; import com.google.api.services.pubsub.PubsubScopes; import com.google.api.services.pubsub.model.Topic; - import java.io.IOException; class InjectorUtils { diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/injector/RetryHttpInitializerWrapper.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/injector/RetryHttpInitializerWrapper.java index 45be28791fc76..059999cae7d23 100644 --- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/injector/RetryHttpInitializerWrapper.java +++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/injector/RetryHttpInitializerWrapper.java @@ -28,7 +28,6 @@ import com.google.api.client.http.HttpUnsuccessfulResponseHandler; import com.google.api.client.util.ExponentialBackOff; import com.google.api.client.util.Sleeper; - import java.io.IOException; import java.util.logging.Logger; diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteToBigQuery.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteToBigQuery.java index 6af6e15a53217..40c4286f3afeb 100644 --- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteToBigQuery.java +++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteToBigQuery.java @@ -17,6 +17,14 @@ */ package org.apache.beam.examples.complete.game.utils; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.api.services.bigquery.model.TableReference; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; import org.apache.beam.examples.complete.game.UserScore; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO; @@ -31,16 +39,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone; -import com.google.api.services.bigquery.model.TableFieldSchema; -import com.google.api.services.bigquery.model.TableReference; -import com.google.api.services.bigquery.model.TableRow; -import com.google.api.services.bigquery.model.TableSchema; - -import java.io.Serializable; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - /** * Generate, format, and write BigQuery table row information. Use provided information about * the field names and types, as well as lambda functions that describe how to generate their diff --git a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteWindowedToBigQuery.java b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteWindowedToBigQuery.java index c59fd61f68a92..09f3b6cf90d97 100644 --- a/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteWindowedToBigQuery.java +++ b/examples/java8/src/main/java/org/apache/beam/examples/complete/game/utils/WriteWindowedToBigQuery.java @@ -17,6 +17,8 @@ */ package org.apache.beam.examples.complete.game.utils; +import com.google.api.services.bigquery.model.TableRow; +import java.util.Map; import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO; import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition; import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition; @@ -27,10 +29,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone; -import com.google.api.services.bigquery.model.TableRow; - -import java.util.Map; - /** * Generate, format, and write BigQuery table row information. Subclasses {@link WriteToBigQuery} * to require windowing; so this subclass may be used for writes that require access to the diff --git a/examples/java8/src/test/java/org/apache/beam/examples/MinimalWordCountJava8Test.java b/examples/java8/src/test/java/org/apache/beam/examples/MinimalWordCountJava8Test.java index 4dfa474c7cf51..85841a7801474 100644 --- a/examples/java8/src/test/java/org/apache/beam/examples/MinimalWordCountJava8Test.java +++ b/examples/java8/src/test/java/org/apache/beam/examples/MinimalWordCountJava8Test.java @@ -17,6 +17,15 @@ */ package org.apache.beam.examples; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.io.Serializable; +import java.nio.channels.FileChannel; +import java.nio.channels.SeekableByteChannel; +import java.nio.file.Files; +import java.nio.file.StandardOpenOption; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.TextIO; import org.apache.beam.sdk.options.GcsOptions; @@ -29,9 +38,6 @@ import org.apache.beam.sdk.util.gcsfs.GcsPath; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.TypeDescriptors; - -import com.google.common.collect.ImmutableList; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @@ -39,15 +45,6 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import java.io.IOException; -import java.io.Serializable; -import java.nio.channels.FileChannel; -import java.nio.channels.SeekableByteChannel; -import java.nio.file.Files; -import java.nio.file.StandardOpenOption; -import java.util.Arrays; -import java.util.List; - /** * To keep {@link MinimalWordCountJava8} simple, it is not factored or testable. This test * file should be maintained with a copy of its code for a basic smoke test. diff --git a/examples/java8/src/test/java/org/apache/beam/examples/complete/game/GameStatsTest.java b/examples/java8/src/test/java/org/apache/beam/examples/complete/game/GameStatsTest.java index df8800ddb5f6a..7cd03f365b345 100644 --- a/examples/java8/src/test/java/org/apache/beam/examples/complete/game/GameStatsTest.java +++ b/examples/java8/src/test/java/org/apache/beam/examples/complete/game/GameStatsTest.java @@ -17,6 +17,9 @@ */ package org.apache.beam.examples.complete.game; +import java.io.Serializable; +import java.util.Arrays; +import java.util.List; import org.apache.beam.examples.complete.game.GameStats.CalculateSpammyUsers; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.PAssert; @@ -25,16 +28,11 @@ import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.Arrays; -import java.util.List; - /** * Tests of GameStats. * Because the pipeline was designed for easy readability and explanations, it lacks good diff --git a/examples/java8/src/test/java/org/apache/beam/examples/complete/game/HourlyTeamScoreTest.java b/examples/java8/src/test/java/org/apache/beam/examples/complete/game/HourlyTeamScoreTest.java index b917b4cf535f9..f9fefb61f35c6 100644 --- a/examples/java8/src/test/java/org/apache/beam/examples/complete/game/HourlyTeamScoreTest.java +++ b/examples/java8/src/test/java/org/apache/beam/examples/complete/game/HourlyTeamScoreTest.java @@ -17,6 +17,9 @@ */ package org.apache.beam.examples.complete.game; +import java.io.Serializable; +import java.util.Arrays; +import java.util.List; import org.apache.beam.examples.complete.game.UserScore.GameActionInfo; import org.apache.beam.examples.complete.game.UserScore.ParseEventFn; import org.apache.beam.sdk.Pipeline; @@ -31,17 +34,12 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptors; - import org.joda.time.Instant; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.Arrays; -import java.util.List; - /** * Tests of HourlyTeamScore. * Because the pipeline was designed for easy readability and explanations, it lacks good diff --git a/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java b/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java index 75d371a8caa49..7c86adf3754fa 100644 --- a/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java +++ b/examples/java8/src/test/java/org/apache/beam/examples/complete/game/UserScoreTest.java @@ -17,6 +17,9 @@ */ package org.apache.beam.examples.complete.game; +import java.io.Serializable; +import java.util.Arrays; +import java.util.List; import org.apache.beam.examples.complete.game.UserScore.ExtractAndSumScore; import org.apache.beam.examples.complete.game.UserScore.GameActionInfo; import org.apache.beam.examples.complete.game.UserScore.ParseEventFn; @@ -32,17 +35,12 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptors; - import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.Arrays; -import java.util.List; - /** * Tests of UserScore. */ diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/SideInputHandler.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/SideInputHandler.java new file mode 100644 index 0000000000000..a97d3f306d0ee --- /dev/null +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/SideInputHandler.java @@ -0,0 +1,239 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.runners.core; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import javax.annotation.Nullable; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.SetCoder; +import org.apache.beam.sdk.transforms.Combine; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.util.ReadyCheckingSideInputReader; +import org.apache.beam.sdk.util.WindowedValue; +import org.apache.beam.sdk.util.state.AccumulatorCombiningState; +import org.apache.beam.sdk.util.state.StateInternals; +import org.apache.beam.sdk.util.state.StateNamespaces; +import org.apache.beam.sdk.util.state.StateTag; +import org.apache.beam.sdk.util.state.StateTags; +import org.apache.beam.sdk.util.state.ValueState; +import org.apache.beam.sdk.values.PCollectionView; + +/** + * Generic side input handler that uses {@link StateInternals} to store all data. Both the actual + * side-input data and data about the windows for which we have side inputs available are stored + * using {@code StateInternals}. + * + *

    The given {@code StateInternals} must not be scoped to an element key. The state + * must instead be scoped to one key group for which the side input is being managed. + * + *

    This is useful for runners that transmit the side-input elements in band, as opposed + * to how Dataflow has an external service for managing side inputs. + * + *

    Note: storing the available windows in an extra state is redundant for now but in the + * future we might want to know which windows we have available so that we can garbage collect + * side input data. For now, this will never clean up side-input data because we have no way + * of knowing when we reach the GC horizon. + */ +public class SideInputHandler implements ReadyCheckingSideInputReader { + + /** The list of side inputs that we're handling. */ + protected final Collection> sideInputs; + + /** State internals that are scoped not to the key of a value but instead to one key group. */ + private final StateInternals stateInternals; + + /** + * A state tag for each side input that we handle. The state is used to track + * for which windows we have input available. + */ + private final Map< + PCollectionView, + StateTag< + Object, + AccumulatorCombiningState< + BoundedWindow, + Set, + Set>>> availableWindowsTags; + + /** + * State tag for the actual contents of each side input per window. + */ + private final Map< + PCollectionView, + StateTag>>>> sideInputContentsTags; + + /** + * Creates a new {@code SideInputHandler} for the given side inputs that uses + * the given {@code StateInternals} to store side input data and side-input meta data. + */ + public SideInputHandler( + Collection> sideInputs, + StateInternals stateInternals) { + this.sideInputs = sideInputs; + this.stateInternals = stateInternals; + this.availableWindowsTags = new HashMap<>(); + this.sideInputContentsTags = new HashMap<>(); + + for (PCollectionView sideInput: sideInputs) { + + @SuppressWarnings("unchecked") + Coder windowCoder = + (Coder) sideInput + .getWindowingStrategyInternal() + .getWindowFn() + .windowCoder(); + + StateTag< + Object, + AccumulatorCombiningState< + BoundedWindow, + Set, + Set>> availableTag = StateTags.combiningValue( + "side-input-available-windows-" + sideInput.getTagInternal().getId(), + SetCoder.of(windowCoder), + new WindowSetCombineFn()); + + availableWindowsTags.put(sideInput, availableTag); + + Coder>> coder = sideInput.getCoderInternal(); + StateTag>>> stateTag = + StateTags.value("side-input-data-" + sideInput.getTagInternal().getId(), coder); + sideInputContentsTags.put(sideInput, stateTag); + } + } + + /** + * Add the given value to the internal side-input store of the given side input. This + * might change the result of {@link #isReady(PCollectionView, BoundedWindow)} for that side + * input. + */ + public void addSideInputValue( + PCollectionView sideInput, + WindowedValue> value) { + + @SuppressWarnings("unchecked") + Coder windowCoder = + (Coder) sideInput + .getWindowingStrategyInternal() + .getWindowFn() + .windowCoder(); + + // reify the WindowedValue + List> inputWithReifiedWindows = new ArrayList<>(); + for (Object e: value.getValue()) { + inputWithReifiedWindows.add(value.withValue(e)); + } + + StateTag>>> stateTag = + sideInputContentsTags.get(sideInput); + + for (BoundedWindow window: value.getWindows()) { + stateInternals + .state(StateNamespaces.window(windowCoder, window), stateTag) + .write(inputWithReifiedWindows); + + stateInternals + .state(StateNamespaces.global(), availableWindowsTags.get(sideInput)) + .add(window); + } + } + + @Nullable + @Override + public T get(PCollectionView sideInput, BoundedWindow window) { + + if (!isReady(sideInput, window)) { + throw new IllegalStateException( + "Side input " + sideInput + " is not ready for window " + window); + } + + @SuppressWarnings("unchecked") + Coder windowCoder = + (Coder) sideInput + .getWindowingStrategyInternal() + .getWindowFn() + .windowCoder(); + + StateTag>>> stateTag = + sideInputContentsTags.get(sideInput); + + ValueState>> state = + stateInternals.state(StateNamespaces.window(windowCoder, window), stateTag); + + Iterable> elements = state.read(); + + return sideInput.fromIterableInternal(elements); + } + + @Override + public boolean isReady(PCollectionView sideInput, BoundedWindow window) { + Set readyWindows = + stateInternals.state(StateNamespaces.global(), availableWindowsTags.get(sideInput)).read(); + + boolean result = readyWindows != null && readyWindows.contains(window); + return result; + } + + @Override + public boolean contains(PCollectionView view) { + return sideInputs.contains(view); + } + + @Override + public boolean isEmpty() { + return sideInputs.isEmpty(); + } + + /** + * For keeping track of the windows for which we have available side input. + */ + private static class WindowSetCombineFn + extends Combine.CombineFn, Set> { + + @Override + public Set createAccumulator() { + return new HashSet<>(); + } + + @Override + public Set addInput(Set accumulator, BoundedWindow input) { + accumulator.add(input); + return accumulator; + } + + @Override + public Set mergeAccumulators(Iterable> accumulators) { + Set result = new HashSet<>(); + for (Set acc: accumulators) { + result.addAll(acc); + } + return result; + } + + @Override + public Set extractOutput(Set accumulator) { + return accumulator; + } + } +} diff --git a/runners/core-java/src/main/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSource.java b/runners/core-java/src/main/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSource.java index 3ce0c060c061e..73688d45592e9 100644 --- a/runners/core-java/src/main/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSource.java +++ b/runners/core-java/src/main/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSource.java @@ -17,11 +17,25 @@ */ package org.apache.beam.runners.core; -import static org.apache.beam.sdk.util.StringUtils.approximateSimpleName; - import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import static org.apache.beam.sdk.util.StringUtils.approximateSimpleName; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.ListCoder; @@ -40,29 +54,10 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.TimestampedValue; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Function; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.Arrays; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.NoSuchElementException; - -import javax.annotation.Nullable; - /** * {@link PTransform} that converts a {@link BoundedSource} as an {@link UnboundedSource}. * diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/AssignWindowsDoFn.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/AssignWindowsDoFn.java index 739db45e667a6..7e26253a829ad 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/AssignWindowsDoFn.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/AssignWindowsDoFn.java @@ -19,18 +19,15 @@ import static com.google.common.base.Preconditions.checkNotNull; +import com.google.common.collect.Iterables; +import java.util.Collection; import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.transforms.windowing.WindowFn; - -import com.google.common.collect.Iterables; - import org.joda.time.Instant; -import java.util.Collection; - /** * {@link OldDoFn} that tags elements of a {@link PCollection} with windows, according to the * provided {@link WindowFn}. diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/BatchTimerInternals.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/BatchTimerInternals.java index 0dd03ba085e58..f3e84a6861cb7 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/BatchTimerInternals.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/BatchTimerInternals.java @@ -19,17 +19,13 @@ import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; - import com.google.common.base.MoreObjects; - -import org.joda.time.Instant; - import java.util.HashSet; import java.util.PriorityQueue; import java.util.Set; - import javax.annotation.Nullable; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.joda.time.Instant; /** * TimerInternals that uses priority queues to manage the timers that are ready to fire. diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunnerBase.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunnerBase.java index ed9ec10f0c688..8a0f6bf868d91 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunnerBase.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunnerBase.java @@ -19,6 +19,17 @@ import static com.google.common.base.Preconditions.checkNotNull; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.IterableCoder; import org.apache.beam.sdk.options.PipelineOptions; @@ -37,19 +48,8 @@ import org.apache.beam.sdk.util.state.StateInternals; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import org.joda.time.Instant; import org.joda.time.format.PeriodFormat; -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; /** * A base implementation of {@link DoFnRunner}. diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunners.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunners.java index 60892281c7b78..c4df7b222036f 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunners.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/DoFnRunners.java @@ -17,6 +17,7 @@ */ package org.apache.beam.sdk.util; +import java.util.List; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.Aggregator.AggregatorFactory; import org.apache.beam.sdk.transforms.OldDoFn; @@ -26,8 +27,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.TupleTag; -import java.util.List; - /** * Static utility methods that provide {@link DoFnRunner} implementations. */ diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFn.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFn.java index f872ffcf1bb09..cc418daec7e97 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFn.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFn.java @@ -17,18 +17,15 @@ */ package org.apache.beam.sdk.util; +import com.google.common.collect.Iterables; +import java.util.List; import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.util.state.StateInternals; import org.apache.beam.sdk.util.state.StateInternalsFactory; import org.apache.beam.sdk.values.KV; - -import com.google.common.collect.Iterables; - import org.joda.time.Instant; -import java.util.List; - /** * The default batch {@link GroupAlsoByWindowsDoFn} implementation, if no specialized "fast path" * implementation is applicable. diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupByKeyViaGroupByKeyOnly.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupByKeyViaGroupByKeyOnly.java index f0f900744e73a..fdad17a8bb0c7 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupByKeyViaGroupByKeyOnly.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/GroupByKeyViaGroupByKeyOnly.java @@ -19,6 +19,10 @@ import static com.google.common.base.Preconditions.checkArgument; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.IterableCoder; import org.apache.beam.sdk.coders.KvCoder; @@ -32,11 +36,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; - /** * An implementation of {@link GroupByKey} built on top of a lower-level {@link GroupByKeyOnly} * primitive. diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunner.java index 8b3ba24bb21cd..08c670e4753f5 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunner.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunner.java @@ -17,16 +17,14 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.transforms.Aggregator; -import org.apache.beam.sdk.transforms.OldDoFn; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.values.KV; - import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; - +import org.apache.beam.sdk.transforms.Aggregator; +import org.apache.beam.sdk.transforms.OldDoFn; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.values.KV; import org.joda.time.Instant; /** diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/PaneInfoTracker.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/PaneInfoTracker.java index 0c5849e99cdc5..90c10b5c7449e 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/PaneInfoTracker.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/PaneInfoTracker.java @@ -19,6 +19,8 @@ import static com.google.common.base.Preconditions.checkState; +import com.google.common.annotations.VisibleForTesting; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.apache.beam.sdk.transforms.windowing.AfterWatermark; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.transforms.windowing.PaneInfo.PaneInfoCoder; @@ -28,11 +30,7 @@ import org.apache.beam.sdk.util.state.StateTag; import org.apache.beam.sdk.util.state.StateTags; import org.apache.beam.sdk.util.state.ValueState; - -import com.google.common.annotations.VisibleForTesting; - import org.joda.time.Instant; -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; /** * Determine the timing and other properties of a new pane for a given computation, key and window. diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/PushbackSideInputDoFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/PushbackSideInputDoFnRunner.java index b1442dd1e24a5..d9f1fbffccb38 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/PushbackSideInputDoFnRunner.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/PushbackSideInputDoFnRunner.java @@ -17,16 +17,14 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.values.PCollectionView; - import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; - import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Set; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.values.PCollectionView; /** * A {@link DoFnRunner} that can refuse to process elements that are not ready, instead returning diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFn.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFn.java index c5ee1e1f940d0..8135a5beca530 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFn.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFn.java @@ -17,16 +17,14 @@ */ package org.apache.beam.sdk.util; +import java.io.Serializable; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.util.state.MergingStateAccessor; import org.apache.beam.sdk.util.state.ReadableState; import org.apache.beam.sdk.util.state.StateAccessor; - import org.joda.time.Instant; -import java.io.Serializable; - /** * Specification for processing to happen after elements have been grouped by key. * diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnContextFactory.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnContextFactory.java index 2d865086f74d4..5d27d51ad2f43 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnContextFactory.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnContextFactory.java @@ -20,6 +20,10 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import com.google.common.collect.ImmutableMap; +import java.util.Collection; +import java.util.Map; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; @@ -36,16 +40,8 @@ import org.apache.beam.sdk.util.state.StateNamespaces; import org.apache.beam.sdk.util.state.StateNamespaces.WindowNamespace; import org.apache.beam.sdk.util.state.StateTag; - -import com.google.common.collect.ImmutableMap; - import org.joda.time.Instant; -import java.util.Collection; -import java.util.Map; - -import javax.annotation.Nullable; - /** * Factory for creating instances of the various {@link ReduceFn} contexts. */ diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnRunner.java index 1fa083097aada..61e5b21ebfd93 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnRunner.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/ReduceFnRunner.java @@ -20,6 +20,17 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import javax.annotation.Nullable; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.Aggregator; import org.apache.beam.sdk.transforms.OldDoFn; @@ -41,24 +52,9 @@ import org.apache.beam.sdk.util.state.StateNamespaces.WindowNamespace; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.annotation.Nullable; - /** * Manages the execution of a {@link ReduceFn} after a {@link GroupByKeyOnly} has partitioned the * {@link PCollection} by key. diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/SimpleDoFnRunner.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/SimpleDoFnRunner.java index a0cdb40f779f3..6c1cf451d61a6 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/SimpleDoFnRunner.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/SimpleDoFnRunner.java @@ -17,13 +17,13 @@ */ package org.apache.beam.sdk.util; +import java.util.List; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.Aggregator.AggregatorFactory; import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.util.DoFnRunners.OutputManager; import org.apache.beam.sdk.util.ExecutionContext.StepContext; import org.apache.beam.sdk.values.TupleTag; -import java.util.List; /** * Runs a {@link OldDoFn} by constructing the appropriate contexts and passing them in. diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/SystemReduceFn.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/SystemReduceFn.java index f7dca9433ec8d..28177a811cd3a 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/SystemReduceFn.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/SystemReduceFn.java @@ -18,6 +18,7 @@ package org.apache.beam.sdk.util; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.Combine.CombineFn; import org.apache.beam.sdk.transforms.Combine.KeyedCombineFn; @@ -34,8 +35,6 @@ import org.apache.beam.sdk.util.state.StateTag; import org.apache.beam.sdk.util.state.StateTags; -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; - /** * {@link ReduceFn} implementing the default reduction behaviors of {@link GroupByKey}. * diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/TriggerRunner.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/TriggerRunner.java index e00b843e53417..a53fb8c175135 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/TriggerRunner.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/TriggerRunner.java @@ -19,6 +19,12 @@ import static com.google.common.base.Preconditions.checkState; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableMap; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import java.util.BitSet; +import java.util.Collection; +import java.util.Map; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.DefaultTrigger; import org.apache.beam.sdk.transforms.windowing.Trigger; @@ -27,18 +33,8 @@ import org.apache.beam.sdk.util.state.StateTag; import org.apache.beam.sdk.util.state.StateTags; import org.apache.beam.sdk.util.state.ValueState; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; - import org.joda.time.Instant; -import java.util.BitSet; -import java.util.Collection; -import java.util.Map; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; - /** * Executes a trigger while managing persistence of information about which subtriggers are * finished. Subtriggers include all recursive trigger expressions as well as the entire trigger. diff --git a/runners/core-java/src/main/java/org/apache/beam/sdk/util/WatermarkHold.java b/runners/core-java/src/main/java/org/apache/beam/sdk/util/WatermarkHold.java index 5c1700992c296..7404e1bc8ba3a 100644 --- a/runners/core-java/src/main/java/org/apache/beam/sdk/util/WatermarkHold.java +++ b/runners/core-java/src/main/java/org/apache/beam/sdk/util/WatermarkHold.java @@ -19,6 +19,10 @@ import static com.google.common.base.Preconditions.checkState; +import com.google.common.annotations.VisibleForTesting; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import java.io.Serializable; +import javax.annotation.Nullable; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.OutputTimeFn; import org.apache.beam.sdk.transforms.windowing.OutputTimeFns; @@ -30,17 +34,9 @@ import org.apache.beam.sdk.util.state.StateTag; import org.apache.beam.sdk.util.state.StateTags; import org.apache.beam.sdk.util.state.WatermarkHoldState; - -import com.google.common.annotations.VisibleForTesting; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.io.Serializable; -import javax.annotation.Nullable; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; - /** * Implements the logic to hold the output watermark for a computation back * until it has seen all the elements it needs based on the input watermark for the diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/SideInputHandlerTest.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/SideInputHandlerTest.java new file mode 100644 index 0000000000000..0bf5e90d0d6a2 --- /dev/null +++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/SideInputHandlerTest.java @@ -0,0 +1,220 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.runners.core; + +import static org.hamcrest.Matchers.contains; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import org.apache.beam.sdk.coders.StringUtf8Coder; +import org.apache.beam.sdk.testing.PCollectionViewTesting; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.transforms.windowing.FixedWindows; +import org.apache.beam.sdk.transforms.windowing.IntervalWindow; +import org.apache.beam.sdk.transforms.windowing.PaneInfo; +import org.apache.beam.sdk.util.WindowedValue; +import org.apache.beam.sdk.util.WindowingStrategy; +import org.apache.beam.sdk.util.state.InMemoryStateInternals; +import org.apache.beam.sdk.values.PCollectionView; +import org.apache.beam.sdk.values.TupleTag; +import org.joda.time.Duration; +import org.joda.time.Instant; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +/** + * Unit tests for {@link SideInputHandler}. + */ +@RunWith(JUnit4.class) +public class SideInputHandlerTest { + + private static final long WINDOW_MSECS_1 = 100; + private static final long WINDOW_MSECS_2 = 500; + + private WindowingStrategy windowingStrategy1 = + WindowingStrategy.of(FixedWindows.of(new Duration(WINDOW_MSECS_1))); + + private PCollectionView> view1 = PCollectionViewTesting.testingView( + new TupleTag>>() {}, + new PCollectionViewTesting.IdentityViewFn(), + StringUtf8Coder.of(), + windowingStrategy1); + + private WindowingStrategy windowingStrategy2 = + WindowingStrategy.of(FixedWindows.of(new Duration(WINDOW_MSECS_2))); + + private PCollectionView> view2 = PCollectionViewTesting.testingView( + new TupleTag>>() {}, + new PCollectionViewTesting.IdentityViewFn(), + StringUtf8Coder.of(), + windowingStrategy2); + + @Test + public void testIsEmpty() { + SideInputHandler sideInputHandler = new SideInputHandler( + ImmutableList.>of(view1), + InMemoryStateInternals.forKey(null)); + + assertFalse(sideInputHandler.isEmpty()); + + // create an empty handler + SideInputHandler emptySideInputHandler = new SideInputHandler( + ImmutableList.>of(), + InMemoryStateInternals.forKey(null)); + + assertTrue(emptySideInputHandler.isEmpty()); + } + + @Test + public void testContains() { + SideInputHandler sideInputHandler = new SideInputHandler( + ImmutableList.>of(view1), + InMemoryStateInternals.forKey(null)); + + assertTrue(sideInputHandler.contains(view1)); + assertFalse(sideInputHandler.contains(view2)); + } + + @Test + public void testIsReady() { + SideInputHandler sideInputHandler = new SideInputHandler( + ImmutableList.>of(view1, view2), + InMemoryStateInternals.forKey(null)); + + IntervalWindow firstWindow = + new IntervalWindow(new Instant(0), new Instant(WINDOW_MSECS_1)); + + IntervalWindow secondWindow = + new IntervalWindow(new Instant(0), new Instant(WINDOW_MSECS_2)); + + + // side input should not yet be ready + assertFalse(sideInputHandler.isReady(view1, firstWindow)); + + // add a value for view1 + sideInputHandler.addSideInputValue( + view1, + valuesInWindow(ImmutableList.of("Hello"), new Instant(0), firstWindow)); + + // now side input should be ready + assertTrue(sideInputHandler.isReady(view1, firstWindow)); + + // second window input should still not be ready + assertFalse(sideInputHandler.isReady(view1, secondWindow)); + } + + @Test + public void testNewInputReplacesPreviousInput() { + // new input should completely replace old input + // the creation of the Iterable that has the side input + // contents happens upstream. this is also where + // accumulation/discarding is decided. + + SideInputHandler sideInputHandler = new SideInputHandler( + ImmutableList.>of(view1), + InMemoryStateInternals.forKey(null)); + + IntervalWindow window = + new IntervalWindow(new Instant(0), new Instant(WINDOW_MSECS_1)); + + // add a first value for view1 + sideInputHandler.addSideInputValue( + view1, + valuesInWindow(ImmutableList.of("Hello"), new Instant(0), window)); + + Assert.assertThat(sideInputHandler.get(view1, window), contains("Hello")); + + // subsequent values should replace existing values + sideInputHandler.addSideInputValue( + view1, + valuesInWindow(ImmutableList.of("Ciao", "Buongiorno"), new Instant(0), window)); + + Assert.assertThat(sideInputHandler.get(view1, window), contains("Ciao", "Buongiorno")); + } + + @Test + public void testMultipleWindows() { + SideInputHandler sideInputHandler = new SideInputHandler( + ImmutableList.>of(view1), + InMemoryStateInternals.forKey(null)); + + // two windows that we'll later use for adding elements/retrieving side input + IntervalWindow firstWindow = + new IntervalWindow(new Instant(0), new Instant(WINDOW_MSECS_1)); + IntervalWindow secondWindow = + new IntervalWindow(new Instant(1000), new Instant(1000 + WINDOW_MSECS_2)); + + // add a first value for view1 in the first window + sideInputHandler.addSideInputValue( + view1, + valuesInWindow(ImmutableList.of("Hello"), new Instant(0), firstWindow)); + + Assert.assertThat(sideInputHandler.get(view1, firstWindow), contains("Hello")); + + // add something for second window of view1 + sideInputHandler.addSideInputValue( + view1, + valuesInWindow(ImmutableList.of("Arrivederci"), new Instant(0), secondWindow)); + + Assert.assertThat(sideInputHandler.get(view1, secondWindow), contains("Arrivederci")); + + // contents for first window should be unaffected + Assert.assertThat(sideInputHandler.get(view1, firstWindow), contains("Hello")); + } + + @Test + public void testMultipleSideInputs() { + SideInputHandler sideInputHandler = new SideInputHandler( + ImmutableList.>of(view1, view2), + InMemoryStateInternals.forKey(null)); + + // two windows that we'll later use for adding elements/retrieving side input + IntervalWindow firstWindow = + new IntervalWindow(new Instant(0), new Instant(WINDOW_MSECS_1)); + + // add value for view1 in the first window + sideInputHandler.addSideInputValue( + view1, + valuesInWindow(ImmutableList.of("Hello"), new Instant(0), firstWindow)); + + Assert.assertThat(sideInputHandler.get(view1, firstWindow), contains("Hello")); + + // view2 should not have any data + assertFalse(sideInputHandler.isReady(view2, firstWindow)); + + // also add some data for view2 + sideInputHandler.addSideInputValue( + view2, + valuesInWindow(ImmutableList.of("Salut"), new Instant(0), firstWindow)); + + assertTrue(sideInputHandler.isReady(view2, firstWindow)); + Assert.assertThat(sideInputHandler.get(view2, firstWindow), contains("Salut")); + + // view1 should not be affected by that + Assert.assertThat(sideInputHandler.get(view1, firstWindow), contains("Hello")); + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + private WindowedValue> valuesInWindow( + Iterable values, Instant timestamp, BoundedWindow window) { + return (WindowedValue) WindowedValue.of(values, timestamp, window, PaneInfo.NO_FIRING); + } +} diff --git a/runners/core-java/src/test/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSourceTest.java b/runners/core-java/src/test/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSourceTest.java index 45c1414e2a578..efc446e008fa9 100644 --- a/runners/core-java/src/test/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSourceTest.java +++ b/runners/core-java/src/test/java/org/apache/beam/runners/core/UnboundedReadFromBoundedSourceTest.java @@ -21,6 +21,18 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.channels.ReadableByteChannel; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.Random; import org.apache.beam.runners.core.UnboundedReadFromBoundedSource.BoundedToUnboundedSourceAdapter; import org.apache.beam.runners.core.UnboundedReadFromBoundedSource.BoundedToUnboundedSourceAdapter.Checkpoint; import org.apache.beam.runners.core.UnboundedReadFromBoundedSource.BoundedToUnboundedSourceAdapter.CheckpointCoder; @@ -45,11 +57,6 @@ import org.apache.beam.sdk.util.CoderUtils; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TimestampedValue; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - import org.joda.time.Instant; import org.junit.Rule; import org.junit.Test; @@ -59,16 +66,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.nio.ByteBuffer; -import java.nio.channels.ReadableByteChannel; -import java.util.List; -import java.util.NoSuchElementException; -import java.util.Random; - /** * Unit tests for {@link UnboundedReadFromBoundedSource}. */ diff --git a/runners/core-java/src/test/java/org/apache/beam/sdk/util/BatchTimerInternalsTest.java b/runners/core-java/src/test/java/org/apache/beam/sdk/util/BatchTimerInternalsTest.java index 3e1528f3ac503..20a9852b09580 100644 --- a/runners/core-java/src/test/java/org/apache/beam/sdk/util/BatchTimerInternalsTest.java +++ b/runners/core-java/src/test/java/org/apache/beam/sdk/util/BatchTimerInternalsTest.java @@ -20,7 +20,6 @@ import org.apache.beam.sdk.util.TimerInternals.TimerData; import org.apache.beam.sdk.util.state.StateNamespace; import org.apache.beam.sdk.util.state.StateNamespaceForTest; - import org.joda.time.Instant; import org.junit.Before; import org.junit.Test; diff --git a/runners/core-java/src/test/java/org/apache/beam/sdk/util/GroupAlsoByWindowsProperties.java b/runners/core-java/src/test/java/org/apache/beam/sdk/util/GroupAlsoByWindowsProperties.java index 43c287e434155..215cd4c38cb31 100644 --- a/runners/core-java/src/test/java/org/apache/beam/sdk/util/GroupAlsoByWindowsProperties.java +++ b/runners/core-java/src/test/java/org/apache/beam/sdk/util/GroupAlsoByWindowsProperties.java @@ -23,6 +23,14 @@ import static org.hamcrest.Matchers.hasSize; import static org.junit.Assert.assertThat; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; import org.apache.beam.sdk.transforms.Combine.CombineFn; import org.apache.beam.sdk.transforms.DoFnTester; import org.apache.beam.sdk.transforms.DoFnTester.CloningBehavior; @@ -38,20 +46,9 @@ import org.apache.beam.sdk.util.state.StateInternalsFactory; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.TimestampedValue; - -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; - /** * Properties of {@link GroupAlsoByWindowsDoFn}. * diff --git a/runners/core-java/src/test/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFnTest.java b/runners/core-java/src/test/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFnTest.java index 1f02a8f997b99..a1586c8bf2d9d 100644 --- a/runners/core-java/src/test/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFnTest.java +++ b/runners/core-java/src/test/java/org/apache/beam/sdk/util/GroupAlsoByWindowsViaOutputBufferDoFnTest.java @@ -22,7 +22,6 @@ import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.util.GroupAlsoByWindowsProperties.GroupAlsoByWindowsDoFnFactory; import org.apache.beam.sdk.util.state.StateInternalsFactory; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; diff --git a/runners/core-java/src/test/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunnerTest.java b/runners/core-java/src/test/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunnerTest.java index d929d39430116..c63e43ef55b03 100644 --- a/runners/core-java/src/test/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunnerTest.java +++ b/runners/core-java/src/test/java/org/apache/beam/sdk/util/LateDataDroppingDoFnRunnerTest.java @@ -22,16 +22,15 @@ import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import java.util.Arrays; import org.apache.beam.sdk.transforms.Aggregator; import org.apache.beam.sdk.transforms.Combine.CombineFn; import org.apache.beam.sdk.transforms.Sum; import org.apache.beam.sdk.transforms.windowing.FixedWindows; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.util.LateDataDroppingDoFnRunner.LateDataFilter; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Before; @@ -41,8 +40,6 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import java.util.Arrays; - /** * Unit tests for {@link LateDataDroppingDoFnRunner}. */ diff --git a/runners/core-java/src/test/java/org/apache/beam/sdk/util/PushbackSideInputDoFnRunnerTest.java b/runners/core-java/src/test/java/org/apache/beam/sdk/util/PushbackSideInputDoFnRunnerTest.java index 88851187bb65d..f8ad291d9bc95 100644 --- a/runners/core-java/src/test/java/org/apache/beam/sdk/util/PushbackSideInputDoFnRunnerTest.java +++ b/runners/core-java/src/test/java/org/apache/beam/sdk/util/PushbackSideInputDoFnRunnerTest.java @@ -24,6 +24,9 @@ import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; +import com.google.common.collect.ImmutableList; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.Sum; @@ -34,9 +37,6 @@ import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; - -import com.google.common.collect.ImmutableList; - import org.hamcrest.Matchers; import org.joda.time.Instant; import org.junit.Before; @@ -47,9 +47,6 @@ import org.mockito.Mockito; import org.mockito.MockitoAnnotations; -import java.util.ArrayList; -import java.util.List; - /** * Tests for {@link PushbackSideInputDoFnRunner}. */ diff --git a/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnRunnerTest.java b/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnRunnerTest.java index 8d604cb2b9e03..647495cc344a3 100644 --- a/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnRunnerTest.java +++ b/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnRunnerTest.java @@ -17,11 +17,9 @@ */ package org.apache.beam.sdk.util; +import static com.google.common.base.Preconditions.checkArgument; import static org.apache.beam.sdk.WindowMatchers.isSingleWindowedValue; import static org.apache.beam.sdk.WindowMatchers.isWindowedValue; - -import static com.google.common.base.Preconditions.checkArgument; - import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.emptyIterable; @@ -37,6 +35,9 @@ import static org.mockito.Mockito.when; import static org.mockito.Mockito.withSettings; +import com.google.common.collect.Iterables; +import java.util.Iterator; +import java.util.List; import org.apache.beam.sdk.WindowMatchers; import org.apache.beam.sdk.coders.VarIntCoder; import org.apache.beam.sdk.options.PipelineOptions; @@ -67,9 +68,6 @@ import org.apache.beam.sdk.util.WindowingStrategy.AccumulationMode; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TimestampedValue; - -import com.google.common.collect.Iterables; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Before; @@ -83,9 +81,6 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import java.util.Iterator; -import java.util.List; - /** * Tests for {@link ReduceFnRunner}. These tests instantiate a full "stack" of * {@link ReduceFnRunner} with enclosed {@link ReduceFn}, down to the installed {@link Trigger} diff --git a/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnTester.java b/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnTester.java index feba191267e4f..24e33ddf310ce 100644 --- a/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnTester.java +++ b/runners/core-java/src/test/java/org/apache/beam/sdk/util/ReduceFnTester.java @@ -19,11 +19,30 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.FluentIterable; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; +import com.google.common.collect.Sets; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.PriorityQueue; +import java.util.Set; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderRegistry; import org.apache.beam.sdk.coders.IterableCoder; @@ -57,33 +76,9 @@ import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TimestampedValue; import org.apache.beam.sdk.values.TupleTag; - -import com.google.common.base.Function; -import com.google.common.base.MoreObjects; -import com.google.common.collect.FluentIterable; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Iterables; -import com.google.common.collect.Sets; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.PriorityQueue; -import java.util.Set; - -import javax.annotation.Nullable; - /** * Test utility that runs a {@link ReduceFn}, {@link WindowFn}, {@link Trigger} using in-memory stub * implementations to provide the {@link TimerInternals} and {@link WindowingInternals} needed to diff --git a/runners/core-java/src/test/java/org/apache/beam/sdk/util/SimpleDoFnRunnerTest.java b/runners/core-java/src/test/java/org/apache/beam/sdk/util/SimpleDoFnRunnerTest.java index f0c52b99a377a..156b4a92ca17b 100644 --- a/runners/core-java/src/test/java/org/apache/beam/sdk/util/SimpleDoFnRunnerTest.java +++ b/runners/core-java/src/test/java/org/apache/beam/sdk/util/SimpleDoFnRunnerTest.java @@ -20,19 +20,17 @@ import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.util.BaseExecutionContext.StepContext; import org.apache.beam.sdk.values.TupleTag; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Tests for base {@link DoFnRunnerBase} functionality. */ diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/AggregatorContainer.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/AggregatorContainer.java index 75e6558bb7ee4..06490dc553556 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/AggregatorContainer.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/AggregatorContainer.java @@ -17,10 +17,6 @@ */ package org.apache.beam.runners.direct; -import org.apache.beam.sdk.transforms.Aggregator; -import org.apache.beam.sdk.transforms.Aggregator.AggregatorFactory; -import org.apache.beam.sdk.transforms.Combine.CombineFn; -import org.apache.beam.sdk.util.ExecutionContext; import com.google.auto.value.AutoValue; import com.google.common.base.Preconditions; import java.util.Arrays; @@ -30,6 +26,10 @@ import java.util.concurrent.ConcurrentMap; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; +import org.apache.beam.sdk.transforms.Aggregator; +import org.apache.beam.sdk.transforms.Aggregator.AggregatorFactory; +import org.apache.beam.sdk.transforms.Combine.CombineFn; +import org.apache.beam.sdk.util.ExecutionContext; /** * AccumT container for the current values associated with {@link Aggregator Aggregators}. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactory.java index 0c4b7fd9bc800..2b15ad002eb51 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactory.java @@ -17,6 +17,12 @@ */ package org.apache.beam.runners.direct; +import java.io.IOException; +import java.util.Queue; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ConcurrentMap; +import javax.annotation.Nullable; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; import org.apache.beam.sdk.io.BoundedSource; @@ -28,14 +34,6 @@ import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; -import java.io.IOException; -import java.util.Queue; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.concurrent.ConcurrentMap; - -import javax.annotation.Nullable; - /** * A {@link TransformEvaluatorFactory} that produces {@link TransformEvaluator TransformEvaluators} * for the {@link Bounded Read.Bounded} primitive {@link PTransform}. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/CommittedResult.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/CommittedResult.java index 7e0cd8e1d49a0..5fcf7b313b7f1 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/CommittedResult.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/CommittedResult.java @@ -18,15 +18,12 @@ package org.apache.beam.runners.direct; -import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; -import org.apache.beam.sdk.transforms.AppliedPTransform; -import org.apache.beam.sdk.transforms.View.CreatePCollectionView; - import com.google.auto.value.AutoValue; - import java.util.Set; - import javax.annotation.Nullable; +import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; +import org.apache.beam.sdk.transforms.AppliedPTransform; +import org.apache.beam.sdk.transforms.View.CreatePCollectionView; /** * A {@link TransformResult} that has been committed. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitor.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitor.java index 3300723ad298f..4fdfea0717021 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitor.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitor.java @@ -19,6 +19,12 @@ import static com.google.common.base.Preconditions.checkState; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.Pipeline.PipelineVisitor; import org.apache.beam.sdk.runners.PipelineRunner; @@ -29,13 +35,6 @@ import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.PValue; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - /** * Tracks the {@link AppliedPTransform AppliedPTransforms} that consume each {@link PValue} in the * {@link Pipeline}. This is used to schedule consuming {@link PTransform PTransforms} to consume diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRegistrar.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRegistrar.java index 772777e0c8357..9a34d3d9dfb44 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRegistrar.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRegistrar.java @@ -17,14 +17,13 @@ */ package org.apache.beam.runners.direct; +import com.google.auto.service.AutoService; +import com.google.common.collect.ImmutableList; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptionsRegistrar; import org.apache.beam.sdk.runners.PipelineRunner; import org.apache.beam.sdk.runners.PipelineRunnerRegistrar; -import com.google.auto.service.AutoService; -import com.google.common.collect.ImmutableList; - /** * Contains the {@link PipelineRunnerRegistrar} and {@link PipelineOptionsRegistrar} for the * {@link org.apache.beam.runners.direct.DirectRunner}. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRunner.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRunner.java index 68184dec7d852..b2d61c30aa3d2 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRunner.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectRunner.java @@ -17,6 +17,17 @@ */ package org.apache.beam.runners.direct; +import com.google.common.base.MoreObjects; +import com.google.common.base.Supplier; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import java.io.IOException; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; import org.apache.beam.runners.direct.DirectGroupByKey.DirectGroupByKeyOnly; import org.apache.beam.runners.direct.DirectRunner.DirectPipelineResult; import org.apache.beam.runners.direct.TestStreamEvaluatorFactory.DirectTestStreamFactory; @@ -46,23 +57,9 @@ import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.POutput; import org.apache.beam.sdk.values.PValue; - -import com.google.common.base.MoreObjects; -import com.google.common.base.Supplier; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.io.IOException; -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - /** * An In-Memory implementation of the Dataflow Programming Model. Supports Unbounded * {@link PCollection PCollections}. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java index a4705ddce40d5..4003983613851 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DirectTimerInternals.java @@ -17,15 +17,13 @@ */ package org.apache.beam.runners.direct; +import javax.annotation.Nullable; import org.apache.beam.runners.direct.WatermarkManager.TimerUpdate; import org.apache.beam.runners.direct.WatermarkManager.TimerUpdate.TimerUpdateBuilder; import org.apache.beam.runners.direct.WatermarkManager.TransformWatermarks; import org.apache.beam.sdk.util.TimerInternals; - import org.joda.time.Instant; -import javax.annotation.Nullable; - /** * An implementation of {@link TimerInternals} where all relevant data exists in memory. */ diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DoFnLifecycleManager.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DoFnLifecycleManager.java index 3f4f2c6a880d6..0e15c18c81100 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DoFnLifecycleManager.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DoFnLifecycleManager.java @@ -18,21 +18,18 @@ package org.apache.beam.runners.direct; -import org.apache.beam.sdk.runners.PipelineRunner; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.transforms.OldDoFn; -import org.apache.beam.sdk.util.SerializableUtils; - import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; +import org.apache.beam.sdk.runners.PipelineRunner; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.OldDoFn; +import org.apache.beam.sdk.util.SerializableUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Manages {@link DoFn} setup, teardown, and serialization. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DoFnLifecycleManagerRemovingTransformEvaluator.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DoFnLifecycleManagerRemovingTransformEvaluator.java index 523273cdd1ecd..faa06151c31b7 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DoFnLifecycleManagerRemovingTransformEvaluator.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/DoFnLifecycleManagerRemovingTransformEvaluator.java @@ -19,7 +19,6 @@ package org.apache.beam.runners.direct; import org.apache.beam.sdk.util.WindowedValue; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/EvaluationContext.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/EvaluationContext.java index b9f159a2d5988..5af25bc5e8067 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/EvaluationContext.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/EvaluationContext.java @@ -19,6 +19,17 @@ import static com.google.common.base.Preconditions.checkNotNull; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import com.google.common.util.concurrent.MoreExecutors; +import java.util.Collection; +import java.util.EnumSet; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import javax.annotation.Nullable; import org.apache.beam.runners.direct.CommittedResult.OutputType; import org.apache.beam.runners.direct.DirectGroupByKey.DirectGroupByKeyOnly; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; @@ -42,23 +53,8 @@ import org.apache.beam.sdk.values.PCollection.IsBounded; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.PValue; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; -import com.google.common.util.concurrent.MoreExecutors; - import org.joda.time.Instant; -import java.util.Collection; -import java.util.EnumSet; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -import javax.annotation.Nullable; - /** * The evaluation context for a specific pipeline being executed by the * {@link DirectRunner}. Contains state shared within the execution across all diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java index 35b6239de81c7..401ed7fb80c45 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ExecutorServiceParallelExecutor.java @@ -17,20 +17,6 @@ */ package org.apache.beam.runners.direct; -import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; -import org.apache.beam.runners.direct.WatermarkManager.FiredTimers; -import org.apache.beam.sdk.Pipeline; -import org.apache.beam.sdk.transforms.AppliedPTransform; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.util.KeyedWorkItem; -import org.apache.beam.sdk.util.KeyedWorkItems; -import org.apache.beam.sdk.util.TimeDomain; -import org.apache.beam.sdk.util.TimerInternals.TimerData; -import org.apache.beam.sdk.util.WindowedValue; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PCollectionView; -import org.apache.beam.sdk.values.PValue; - import com.google.auto.value.AutoValue; import com.google.common.base.MoreObjects; import com.google.common.base.Optional; @@ -39,10 +25,6 @@ import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -56,8 +38,22 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; - import javax.annotation.Nullable; +import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; +import org.apache.beam.runners.direct.WatermarkManager.FiredTimers; +import org.apache.beam.sdk.Pipeline; +import org.apache.beam.sdk.transforms.AppliedPTransform; +import org.apache.beam.sdk.transforms.PTransform; +import org.apache.beam.sdk.util.KeyedWorkItem; +import org.apache.beam.sdk.util.KeyedWorkItems; +import org.apache.beam.sdk.util.TimeDomain; +import org.apache.beam.sdk.util.TimerInternals.TimerData; +import org.apache.beam.sdk.util.WindowedValue; +import org.apache.beam.sdk.values.PCollection; +import org.apache.beam.sdk.values.PCollectionView; +import org.apache.beam.sdk.values.PValue; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An {@link PipelineExecutor} that uses an underlying {@link ExecutorService} and diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java index d16ffa0bf5171..2da70bbe56289 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupAlsoByWindowEvaluatorFactory.java @@ -17,6 +17,8 @@ */ package org.apache.beam.runners.direct; +import com.google.common.collect.ImmutableMap; +import java.util.Collections; import org.apache.beam.runners.core.GroupAlsoByWindowViaWindowSetDoFn; import org.apache.beam.runners.direct.DirectExecutionContext.DirectStepContext; import org.apache.beam.runners.direct.DirectGroupByKey.DirectGroupAlsoByWindow; @@ -40,10 +42,6 @@ import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; -import com.google.common.collect.ImmutableMap; - -import java.util.Collections; - /** * The {@link DirectRunner} {@link TransformEvaluatorFactory} for the * {@link GroupByKeyOnly} {@link PTransform}. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupByKeyOnlyEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupByKeyOnlyEvaluatorFactory.java index dbdbdaf4ea348..f085a39fda9bf 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupByKeyOnlyEvaluatorFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/GroupByKeyOnlyEvaluatorFactory.java @@ -17,9 +17,14 @@ */ package org.apache.beam.runners.direct; -import static org.apache.beam.sdk.util.CoderUtils.encodeToByteArray; import static com.google.common.base.Preconditions.checkState; +import static org.apache.beam.sdk.util.CoderUtils.encodeToByteArray; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.apache.beam.runners.direct.DirectGroupByKey.DirectGroupByKeyOnly; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; @@ -37,12 +42,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - /** * The {@link DirectRunner} {@link TransformEvaluatorFactory} for the * {@link GroupByKeyOnly} {@link PTransform}. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactory.java index 8be12fdb36734..d5c0f0c354e60 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactory.java @@ -19,6 +19,8 @@ import static com.google.common.base.Preconditions.checkNotNull; +import com.google.common.collect.HashMultimap; +import com.google.common.collect.SetMultimap; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; import org.apache.beam.sdk.coders.Coder; @@ -29,10 +31,6 @@ import org.apache.beam.sdk.util.MutationDetectors; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.HashMultimap; -import com.google.common.collect.SetMultimap; - import org.joda.time.Instant; /** diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactory.java index b0eb38f13addc..1602f68ab3a90 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactory.java @@ -17,6 +17,8 @@ */ package org.apache.beam.runners.direct; +import java.util.IdentityHashMap; +import java.util.Map; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; @@ -27,9 +29,6 @@ import org.apache.beam.sdk.util.UserCodeException; import org.apache.beam.sdk.util.WindowedValue; -import java.util.IdentityHashMap; -import java.util.Map; - /** * {@link ModelEnforcement} that enforces elements are not modified over the course of processing * an element. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutableListBundleFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutableListBundleFactory.java index 25a0d05cb128d..e79da7baa751b 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutableListBundleFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ImmutableListBundleFactory.java @@ -19,15 +19,13 @@ import static com.google.common.base.Preconditions.checkState; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; import org.apache.beam.sdk.coders.VoidCoder; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.base.MoreObjects; -import com.google.common.collect.ImmutableList; - import org.joda.time.Instant; /** diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitor.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitor.java index 2fea00a6e5d4b..7c4376a2c4b4c 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitor.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitor.java @@ -19,15 +19,14 @@ import static com.google.common.base.Preconditions.checkState; +import java.util.HashSet; +import java.util.Set; import org.apache.beam.sdk.Pipeline.PipelineVisitor; import org.apache.beam.sdk.runners.TransformTreeNode; import org.apache.beam.sdk.transforms.GroupByKey; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.values.PValue; -import java.util.HashSet; -import java.util.Set; - /** * A pipeline visitor that tracks all keyed {@link PValue PValues}. A {@link PValue} is keyed if it * is the result of a {@link PTransform} that produces keyed outputs. A {@link PTransform} that diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/NanosOffsetClock.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/NanosOffsetClock.java index 77fa1967c4345..5a2b18da0f244 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/NanosOffsetClock.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/NanosOffsetClock.java @@ -17,9 +17,8 @@ */ package org.apache.beam.runners.direct; -import org.joda.time.Instant; - import java.util.concurrent.TimeUnit; +import org.joda.time.Instant; /** * A {@link Clock} that uses {@link System#nanoTime()} to track the progress of time. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java index 6ef0ffe29c4a0..85a1c6af13858 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoEvaluator.java @@ -17,6 +17,12 @@ */ package org.apache.beam.runners.direct; +import com.google.common.collect.ImmutableList; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.apache.beam.runners.direct.DirectExecutionContext.DirectStepContext; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; @@ -34,14 +40,6 @@ import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; -import com.google.common.collect.ImmutableList; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - class ParDoEvaluator implements TransformEvaluator { public static ParDoEvaluator create( EvaluationContext evaluationContext, diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactory.java index 2d05e68b66595..6a41adffe6a92 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactory.java @@ -17,6 +17,10 @@ */ package org.apache.beam.runners.direct; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import java.util.Map; import org.apache.beam.runners.direct.DirectExecutionContext.DirectStepContext; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.sdk.transforms.AppliedPTransform; @@ -26,16 +30,9 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionTuple; import org.apache.beam.sdk.values.TupleTag; - -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.Map; - /** * The {@link DirectRunner} {@link TransformEvaluatorFactory} for the * {@link BoundMulti} primitive {@link PTransform}. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactory.java index 97cbfa7f7b2ff..4bb740be012f4 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactory.java @@ -17,6 +17,11 @@ */ package org.apache.beam.runners.direct; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import com.google.common.collect.ImmutableMap; +import java.util.Collections; import org.apache.beam.runners.direct.DirectExecutionContext.DirectStepContext; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.sdk.transforms.AppliedPTransform; @@ -25,17 +30,9 @@ import org.apache.beam.sdk.transforms.ParDo.Bound; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TupleTag; - -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; -import com.google.common.collect.ImmutableMap; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.Collections; - /** * The {@link DirectRunner} {@link TransformEvaluatorFactory} for the * {@link Bound ParDo.Bound} primitive {@link PTransform}. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/PipelineExecutor.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/PipelineExecutor.java index 76df11cc2588f..01a5c54831638 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/PipelineExecutor.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/PipelineExecutor.java @@ -17,12 +17,11 @@ */ package org.apache.beam.runners.direct; +import java.util.Collection; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.sdk.transforms.AppliedPTransform; import org.apache.beam.sdk.transforms.PTransform; -import java.util.Collection; - /** * An executor that schedules and executes {@link AppliedPTransform AppliedPTransforms} for both * source and intermediate {@link PTransform PTransforms}. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/SideInputContainer.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/SideInputContainer.java index 6458215503b94..cd459e4c0f5d7 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/SideInputContainer.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/SideInputContainer.java @@ -19,15 +19,6 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.PaneInfo; -import org.apache.beam.sdk.util.PCollectionViewWindow; -import org.apache.beam.sdk.util.ReadyCheckingSideInputReader; -import org.apache.beam.sdk.util.SideInputReader; -import org.apache.beam.sdk.util.WindowedValue; -import org.apache.beam.sdk.util.WindowingStrategy; -import org.apache.beam.sdk.values.PCollectionView; - import com.google.common.base.MoreObjects; import com.google.common.base.Optional; import com.google.common.cache.CacheBuilder; @@ -36,7 +27,6 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; - import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -44,8 +34,15 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; - import javax.annotation.Nullable; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.transforms.windowing.PaneInfo; +import org.apache.beam.sdk.util.PCollectionViewWindow; +import org.apache.beam.sdk.util.ReadyCheckingSideInputReader; +import org.apache.beam.sdk.util.SideInputReader; +import org.apache.beam.sdk.util.WindowedValue; +import org.apache.beam.sdk.util.WindowingStrategy; +import org.apache.beam.sdk.values.PCollectionView; /** * An in-process container for {@link PCollectionView PCollectionViews}, which provides methods for diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/StepAndKey.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/StepAndKey.java index 18fe04ffe9438..e18b2ac9b6aa6 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/StepAndKey.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/StepAndKey.java @@ -17,11 +17,9 @@ */ package org.apache.beam.runners.direct; -import org.apache.beam.sdk.transforms.AppliedPTransform; - import com.google.common.base.MoreObjects; - import java.util.Objects; +import org.apache.beam.sdk.transforms.AppliedPTransform; /** * A (Step, Key) pair. This is useful as a map key or cache key for things that are available diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/StepTransformResult.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/StepTransformResult.java index 12b18cbb8a7ab..1829e4aea92db 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/StepTransformResult.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/StepTransformResult.java @@ -17,6 +17,12 @@ */ package org.apache.beam.runners.direct; +import com.google.auto.value.AutoValue; +import com.google.common.collect.ImmutableList; +import java.util.Collection; +import java.util.EnumSet; +import java.util.Set; +import javax.annotation.Nullable; import org.apache.beam.runners.direct.CommittedResult.OutputType; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; import org.apache.beam.runners.direct.WatermarkManager.TimerUpdate; @@ -24,18 +30,8 @@ import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.util.state.CopyOnAccessInMemoryStateInternals; - -import com.google.auto.value.AutoValue; -import com.google.common.collect.ImmutableList; - import org.joda.time.Instant; -import java.util.Collection; -import java.util.EnumSet; -import java.util.Set; - -import javax.annotation.Nullable; - /** * An immutable {@link TransformResult}. */ diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TestStreamEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TestStreamEvaluatorFactory.java index 90a83b0b840f4..e9f37bab7567d 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TestStreamEvaluatorFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TestStreamEvaluatorFactory.java @@ -20,6 +20,11 @@ import static com.google.common.base.Preconditions.checkState; +import com.google.common.base.Supplier; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import javax.annotation.Nullable; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; import org.apache.beam.sdk.Pipeline; @@ -41,18 +46,9 @@ import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.POutput; import org.apache.beam.sdk.values.TimestampedValue; - -import com.google.common.base.Supplier; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.util.List; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; - -import javax.annotation.Nullable; - /** * The {@link TransformEvaluatorFactory} for the {@link TestStream} primitive. */ diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java index 3655d26dd553c..e9fa06b107f70 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorFactory.java @@ -17,6 +17,7 @@ */ package org.apache.beam.runners.direct; +import javax.annotation.Nullable; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.Read; @@ -24,8 +25,6 @@ import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.PTransform; -import javax.annotation.Nullable; - /** * A factory for creating instances of {@link TransformEvaluator} for the application of a * {@link PTransform}. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorRegistry.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorRegistry.java index c35e8b14c84a2..9edc50f8f4c6f 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorRegistry.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformEvaluatorRegistry.java @@ -19,6 +19,12 @@ import static com.google.common.base.Preconditions.checkState; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import javax.annotation.Nullable; import org.apache.beam.runners.direct.DirectGroupByKey.DirectGroupAlsoByWindow; import org.apache.beam.runners.direct.DirectGroupByKey.DirectGroupByKeyOnly; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; @@ -29,19 +35,9 @@ import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.windowing.Window; - -import com.google.common.collect.ImmutableMap; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; - -import javax.annotation.Nullable; - /** * A {@link TransformEvaluatorFactory} that delegates to primitive {@link TransformEvaluatorFactory} * implementations based on the type of {@link PTransform} of the application. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutor.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutor.java index d873bf581a8a2..cc6b5b7a93cab 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutor.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutor.java @@ -19,16 +19,14 @@ import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; -import org.apache.beam.sdk.transforms.AppliedPTransform; -import org.apache.beam.sdk.util.WindowedValue; - import java.util.ArrayList; import java.util.Collection; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicReference; - import javax.annotation.Nullable; +import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; +import org.apache.beam.sdk.transforms.AppliedPTransform; +import org.apache.beam.sdk.util.WindowedValue; /** * A {@link Callable} responsible for constructing a {@link TransformEvaluator} from a diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutorServices.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutorServices.java index ea15f03df1b57..876da9d5cd2ea 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutorServices.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformExecutorServices.java @@ -18,7 +18,6 @@ package org.apache.beam.runners.direct; import com.google.common.base.MoreObjects; - import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutorService; diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformResult.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformResult.java index c01fa5647a92d..0b08294ae4b4a 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformResult.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/TransformResult.java @@ -17,6 +17,8 @@ */ package org.apache.beam.runners.direct; +import java.util.Set; +import javax.annotation.Nullable; import org.apache.beam.runners.direct.CommittedResult.OutputType; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; import org.apache.beam.runners.direct.WatermarkManager.TimerUpdate; @@ -25,13 +27,8 @@ import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.util.state.CopyOnAccessInMemoryStateInternals; - import org.joda.time.Instant; -import java.util.Set; - -import javax.annotation.Nullable; - /** * The result of evaluating an {@link AppliedPTransform} with a {@link TransformEvaluator}. */ diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UnboundedReadDeduplicator.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UnboundedReadDeduplicator.java index 0246236e2b4f3..2371d3b9d3da5 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UnboundedReadDeduplicator.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UnboundedReadDeduplicator.java @@ -17,18 +17,15 @@ */ package org.apache.beam.runners.direct; -import org.apache.beam.sdk.coders.ByteArrayCoder; -import org.apache.beam.sdk.io.Read.Unbounded; -import org.apache.beam.sdk.transforms.PTransform; - import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; - -import org.joda.time.Duration; - import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import org.apache.beam.sdk.coders.ByteArrayCoder; +import org.apache.beam.sdk.io.Read.Unbounded; +import org.apache.beam.sdk.transforms.PTransform; +import org.joda.time.Duration; /** * Provides methods to determine if a record is a duplicate within the evaluation of a diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UnboundedReadEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UnboundedReadEvaluatorFactory.java index c4d408b232f04..9f485e0f65ab3 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UnboundedReadEvaluatorFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UnboundedReadEvaluatorFactory.java @@ -17,6 +17,12 @@ */ package org.apache.beam.runners.direct; +import com.google.common.annotations.VisibleForTesting; +import java.io.IOException; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ConcurrentMap; +import javax.annotation.Nullable; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; import org.apache.beam.sdk.io.Read.Unbounded; @@ -29,18 +35,8 @@ import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.annotations.VisibleForTesting; - import org.joda.time.Instant; -import java.io.IOException; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.concurrent.ConcurrentMap; - -import javax.annotation.Nullable; - /** * A {@link TransformEvaluatorFactory} that produces {@link TransformEvaluator TransformEvaluators} * for the {@link Unbounded Read.Unbounded} primitive {@link PTransform}. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UncommittedBundleOutputManager.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UncommittedBundleOutputManager.java index 570dc9031e974..41f7e8d64b396 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UncommittedBundleOutputManager.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/UncommittedBundleOutputManager.java @@ -17,14 +17,13 @@ */ package org.apache.beam.runners.direct; +import java.util.Map; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; import org.apache.beam.sdk.util.DoFnRunners.OutputManager; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.TupleTag; -import java.util.Map; - /** * An {@link OutputManager} that outputs to {@link CommittedBundle Bundles} used by the * {@link DirectRunner}. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ViewEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ViewEvaluatorFactory.java index 3b0de4b6f8df0..40ac7f09ac2ca 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ViewEvaluatorFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/ViewEvaluatorFactory.java @@ -17,6 +17,8 @@ */ package org.apache.beam.runners.direct; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.runners.direct.CommittedResult.OutputType; import org.apache.beam.runners.direct.DirectRunner.PCollectionViewWriter; import org.apache.beam.runners.direct.StepTransformResult.Builder; @@ -34,9 +36,6 @@ import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.POutput; -import java.util.ArrayList; -import java.util.List; - /** * The {@link DirectRunner} {@link TransformEvaluatorFactory} for the * {@link CreatePCollectionView} primitive {@link PTransform}. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WatermarkCallbackExecutor.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WatermarkCallbackExecutor.java index 0f73b1da93494..7961f24c6125c 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WatermarkCallbackExecutor.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WatermarkCallbackExecutor.java @@ -17,19 +17,16 @@ */ package org.apache.beam.runners.direct; -import org.apache.beam.sdk.transforms.AppliedPTransform; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.util.WindowingStrategy; - import com.google.common.collect.ComparisonChain; import com.google.common.collect.Ordering; - -import org.joda.time.Instant; - import java.util.PriorityQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executor; +import org.apache.beam.sdk.transforms.AppliedPTransform; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.util.WindowingStrategy; +import org.joda.time.Instant; /** * Executes callbacks that occur based on the progression of the watermark per-step. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WatermarkManager.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WatermarkManager.java index a44fa501c434f..ff7428dda2cc4 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WatermarkManager.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WatermarkManager.java @@ -19,18 +19,6 @@ import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; -import org.apache.beam.sdk.Pipeline; -import org.apache.beam.sdk.transforms.AppliedPTransform; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.util.TimeDomain; -import org.apache.beam.sdk.util.TimerInternals; -import org.apache.beam.sdk.util.TimerInternals.TimerData; -import org.apache.beam.sdk.util.WindowedValue; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PValue; - import com.google.auto.value.AutoValue; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; @@ -43,9 +31,6 @@ import com.google.common.collect.Ordering; import com.google.common.collect.SortedMultiset; import com.google.common.collect.TreeMultiset; - -import org.joda.time.Instant; - import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -61,8 +46,19 @@ import java.util.TreeSet; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicReference; - import javax.annotation.Nullable; +import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; +import org.apache.beam.sdk.Pipeline; +import org.apache.beam.sdk.transforms.AppliedPTransform; +import org.apache.beam.sdk.transforms.PTransform; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.util.TimeDomain; +import org.apache.beam.sdk.util.TimerInternals; +import org.apache.beam.sdk.util.TimerInternals.TimerData; +import org.apache.beam.sdk.util.WindowedValue; +import org.apache.beam.sdk.values.PCollection; +import org.apache.beam.sdk.values.PValue; +import org.joda.time.Instant; /** * Manages watermarks of {@link PCollection PCollections} and input and output watermarks of diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WindowEvaluatorFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WindowEvaluatorFactory.java index f2e62cb0445d8..19c1a98e7ef50 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WindowEvaluatorFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WindowEvaluatorFactory.java @@ -17,6 +17,9 @@ */ package org.apache.beam.runners.direct; +import com.google.common.collect.Iterables; +import java.util.Collection; +import javax.annotation.Nullable; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; import org.apache.beam.sdk.transforms.AppliedPTransform; @@ -28,15 +31,8 @@ import org.apache.beam.sdk.transforms.windowing.WindowFn; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.Iterables; - import org.joda.time.Instant; -import java.util.Collection; - -import javax.annotation.Nullable; - /** * The {@link DirectRunner} {@link TransformEvaluatorFactory} for the * {@link Bound Window.Bound} primitive {@link PTransform}. diff --git a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java index 1ab3403f45bc2..d74cd56735b61 100644 --- a/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java +++ b/runners/direct-java/src/main/java/org/apache/beam/runners/direct/WriteWithShardingFactory.java @@ -20,6 +20,8 @@ import static com.google.common.base.Preconditions.checkArgument; +import com.google.common.annotations.VisibleForTesting; +import java.util.concurrent.ThreadLocalRandom; import org.apache.beam.sdk.io.Write; import org.apache.beam.sdk.io.Write.Bound; import org.apache.beam.sdk.transforms.Count; @@ -39,13 +41,8 @@ import org.apache.beam.sdk.values.PDone; import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.POutput; - -import com.google.common.annotations.VisibleForTesting; - import org.joda.time.Duration; -import java.util.concurrent.ThreadLocalRandom; - /** * A {@link PTransformOverrideFactory} that overrides {@link Write} {@link PTransform PTransforms} * with an unspecified number of shards with a write with a specified number of shards. The number diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/AggregatorContainerTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/AggregatorContainerTest.java index 035a1b03be070..c8310c90e79a8 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/AggregatorContainerTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/AggregatorContainerTest.java @@ -21,6 +21,9 @@ import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; import org.apache.beam.sdk.transforms.Aggregator; import org.apache.beam.sdk.transforms.Sum.SumIntegerFn; import org.apache.beam.sdk.util.ExecutionContext.StepContext; @@ -28,15 +31,15 @@ import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; /** * Tests for {@link AggregatorContainer}. */ +@RunWith(JUnit4.class) public class AggregatorContainerTest { @Rule diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactoryTest.java index 9bc4f7b7904b3..cbeb733a1daf5 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactoryTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/BoundedReadEvaluatorFactoryTest.java @@ -25,6 +25,11 @@ import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.NoSuchElementException; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; import org.apache.beam.sdk.coders.BigEndianLongCoder; @@ -40,9 +45,6 @@ import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.ImmutableList; - import org.joda.time.Instant; import org.junit.Before; import org.junit.Test; @@ -51,11 +53,6 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.NoSuchElementException; - /** * Tests for {@link BoundedReadEvaluatorFactory}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/CommittedResultTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/CommittedResultTest.java index a8c647e64682f..efc6d2f81abdb 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/CommittedResultTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/CommittedResultTest.java @@ -21,6 +21,11 @@ import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableList; +import java.io.Serializable; +import java.util.Collections; +import java.util.EnumSet; +import java.util.List; import org.apache.beam.runners.direct.CommittedResult.OutputType; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.AppliedPTransform; @@ -31,20 +36,12 @@ import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone; - -import com.google.common.collect.ImmutableList; - import org.hamcrest.Matchers; import org.joda.time.Instant; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.Collections; -import java.util.EnumSet; -import java.util.List; - /** * Tests for {@link CommittedResult}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitorTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitorTest.java index 529316cd30275..1c9b5a6da6d63 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitorTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ConsumerTrackingPipelineVisitorTest.java @@ -20,6 +20,8 @@ import static org.hamcrest.Matchers.emptyIterable; import static org.junit.Assert.assertThat; +import java.io.Serializable; +import java.util.List; import org.apache.beam.sdk.io.CountingInput; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.AppliedPTransform; @@ -35,7 +37,6 @@ import org.apache.beam.sdk.values.PDone; import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.PValue; - import org.hamcrest.Matchers; import org.junit.Rule; import org.junit.Test; @@ -43,9 +44,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.List; - /** * Tests for {@link ConsumerTrackingPipelineVisitor}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRegistrarTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRegistrarTest.java index d5cdee657180c..603e43e30f6be 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRegistrarTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRegistrarTest.java @@ -20,20 +20,17 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import java.util.ServiceLoader; import org.apache.beam.runners.direct.DirectRegistrar.Options; import org.apache.beam.runners.direct.DirectRegistrar.Runner; import org.apache.beam.sdk.options.PipelineOptionsRegistrar; import org.apache.beam.sdk.runners.PipelineRunnerRegistrar; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.ServiceLoader; - /** Tests for {@link DirectRegistrar}. */ @RunWith(JUnit4.class) public class DirectRegistrarTest { diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java index 9739adb0f3662..c7efac388b682 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectRunnerTest.java @@ -22,6 +22,13 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; +import com.fasterxml.jackson.annotation.JsonValue; +import com.google.common.collect.ImmutableMap; +import java.io.Serializable; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.beam.runners.direct.DirectRunner.DirectPipelineResult; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.CoderException; @@ -47,8 +54,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionList; import org.apache.beam.sdk.values.TypeDescriptor; -import com.google.common.collect.ImmutableMap; -import com.fasterxml.jackson.annotation.JsonValue; import org.hamcrest.Matchers; import org.junit.Rule; import org.junit.Test; @@ -56,11 +61,6 @@ import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicInteger; /** * Tests for basic {@link DirectRunner} functionality. diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectTimerInternalsTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectTimerInternalsTest.java index c038910a47595..51cfeedbaca0b 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectTimerInternalsTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DirectTimerInternalsTest.java @@ -29,7 +29,6 @@ import org.apache.beam.sdk.util.TimeDomain; import org.apache.beam.sdk.util.TimerInternals.TimerData; import org.apache.beam.sdk.util.state.StateNamespaces; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Before; diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DoFnLifecycleManagerRemovingTransformEvaluatorTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DoFnLifecycleManagerRemovingTransformEvaluatorTest.java index 67f4ff47789f2..2e4fee24760e9 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DoFnLifecycleManagerRemovingTransformEvaluatorTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DoFnLifecycleManagerRemovingTransformEvaluatorTest.java @@ -25,18 +25,16 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.util.WindowedValue; - import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.ArrayList; -import java.util.List; - /** * Tests for {@link DoFnLifecycleManagerRemovingTransformEvaluator}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DoFnLifecycleManagerTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DoFnLifecycleManagerTest.java index 77b32968f8b4f..1f0af9913acfc 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DoFnLifecycleManagerTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DoFnLifecycleManagerTest.java @@ -25,11 +25,6 @@ import static org.hamcrest.Matchers.theInstance; import static org.junit.Assert.assertThat; -import org.apache.beam.sdk.transforms.OldDoFn; - -import org.hamcrest.Matchers; -import org.junit.Test; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; @@ -38,10 +33,16 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; +import org.apache.beam.sdk.transforms.OldDoFn; +import org.hamcrest.Matchers; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** * Tests for {@link DoFnLifecycleManager}. */ +@RunWith(JUnit4.class) public class DoFnLifecycleManagerTest { private TestFn fn = new TestFn(); private DoFnLifecycleManager mgr = DoFnLifecycleManager.of(fn); diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DoFnLifecycleManagersTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DoFnLifecycleManagersTest.java index 8be3d52763be2..39a4a9d8f39ce 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DoFnLifecycleManagersTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/DoFnLifecycleManagersTest.java @@ -20,10 +20,10 @@ import static org.hamcrest.Matchers.equalTo; -import org.apache.beam.sdk.transforms.OldDoFn; - import com.google.common.collect.ImmutableList; - +import java.util.ArrayList; +import java.util.Collection; +import org.apache.beam.sdk.transforms.OldDoFn; import org.hamcrest.BaseMatcher; import org.hamcrest.Description; import org.hamcrest.Matcher; @@ -34,9 +34,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.ArrayList; -import java.util.Collection; - /** * Tests for {@link DoFnLifecycleManagers}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/EncodabilityEnforcementFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/EncodabilityEnforcementFactoryTest.java index b903ef1eb42f7..e0ccbe5461727 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/EncodabilityEnforcementFactoryTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/EncodabilityEnforcementFactoryTest.java @@ -19,6 +19,10 @@ import static org.hamcrest.Matchers.isA; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Collections; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.sdk.coders.AtomicCoder; import org.apache.beam.sdk.coders.Coder; @@ -31,7 +35,6 @@ import org.apache.beam.sdk.util.UserCodeException; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; - import org.joda.time.Instant; import org.junit.Rule; import org.junit.Test; @@ -39,11 +42,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.Collections; - /** * Tests for {@link EncodabilityEnforcementFactory}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/EvaluationContextTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/EvaluationContextTest.java index 7ac0caadc55bc..f59dbbaf8568e 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/EvaluationContextTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/EvaluationContextTest.java @@ -25,6 +25,13 @@ import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import org.apache.beam.runners.direct.DirectExecutionContext.DirectStepContext; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.PCollectionViewWriter; @@ -61,10 +68,6 @@ import org.apache.beam.sdk.values.PCollection.IsBounded; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.PValue; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; - import org.hamcrest.Matchers; import org.joda.time.Instant; import org.junit.Before; @@ -72,12 +75,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Collection; -import java.util.Collections; -import java.util.Map; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; - /** * Tests for {@link EvaluationContext}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/FlattenEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/FlattenEvaluatorFactoryTest.java index 0bc3036f6399d..1c46c2411414e 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/FlattenEvaluatorFactoryTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/FlattenEvaluatorFactoryTest.java @@ -33,7 +33,6 @@ import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionList; - import org.hamcrest.Matchers; import org.joda.time.Instant; import org.junit.Test; diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ForwardingPTransformTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ForwardingPTransformTest.java index 9ea71d70878d0..6abaf921943e9 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ForwardingPTransformTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ForwardingPTransformTest.java @@ -27,7 +27,6 @@ import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.values.PCollection; - import org.junit.Before; import org.junit.Rule; import org.junit.Test; diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/GroupByKeyEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/GroupByKeyEvaluatorFactoryTest.java index 78736c4f80876..8d1f8bdfaf68b 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/GroupByKeyEvaluatorFactoryTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/GroupByKeyEvaluatorFactoryTest.java @@ -22,6 +22,9 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import com.google.common.collect.HashMultiset; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Multiset; import org.apache.beam.runners.direct.DirectGroupByKey.DirectGroupByKeyOnly; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; @@ -36,11 +39,6 @@ import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.HashMultiset; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Multiset; - import org.hamcrest.BaseMatcher; import org.hamcrest.Description; import org.joda.time.Instant; diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/GroupByKeyOnlyEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/GroupByKeyOnlyEvaluatorFactoryTest.java index 4afd64b52ad42..9f1e916c28cf3 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/GroupByKeyOnlyEvaluatorFactoryTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/GroupByKeyOnlyEvaluatorFactoryTest.java @@ -22,6 +22,9 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import com.google.common.collect.HashMultiset; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Multiset; import org.apache.beam.runners.direct.DirectGroupByKey.DirectGroupByKeyOnly; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; @@ -36,11 +39,6 @@ import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.HashMultiset; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Multiset; - import org.hamcrest.BaseMatcher; import org.hamcrest.Description; import org.joda.time.Instant; diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactoryTest.java index db934e542f61a..d44151ac6556e 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactoryTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityCheckingBundleFactoryTest.java @@ -33,7 +33,6 @@ import org.apache.beam.sdk.util.IllegalMutationException; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; - import org.joda.time.Instant; import org.junit.Before; import org.junit.Rule; diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactoryTest.java index e1be120012251..713ae3510942a 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactoryTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutabilityEnforcementFactoryTest.java @@ -17,6 +17,8 @@ */ package org.apache.beam.runners.direct; +import java.io.Serializable; +import java.util.Collections; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.AppliedPTransform; @@ -27,7 +29,6 @@ import org.apache.beam.sdk.util.IllegalMutationException; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; - import org.joda.time.Instant; import org.junit.Before; import org.junit.Rule; @@ -36,9 +37,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.Collections; - /** * Tests for {@link ImmutabilityEnforcementFactory}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutableListBundleFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutableListBundleFactoryTest.java index 21e4bcbc0819d..43108f8176610 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutableListBundleFactoryTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ImmutableListBundleFactoryTest.java @@ -21,6 +21,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableList; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; import org.apache.beam.sdk.coders.ByteArrayCoder; @@ -36,9 +40,6 @@ import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.ImmutableList; - import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.joda.time.Instant; @@ -49,10 +50,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; - /** * Tests for {@link ImmutableListBundleFactory}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitorTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitorTest.java index 9e273ad84292c..ee6b2b4ed2e85 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitorTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/KeyedPValueTrackingVisitorTest.java @@ -21,6 +21,9 @@ import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableSet; +import java.util.Collections; +import java.util.Set; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.IterableCoder; import org.apache.beam.sdk.coders.KvCoder; @@ -35,9 +38,6 @@ import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.ImmutableSet; - import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -45,9 +45,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Collections; -import java.util.Set; - /** * Tests for {@link KeyedPValueTrackingVisitor}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java index 3208841f61e0e..2a54ecb8aada9 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoEvaluatorTest.java @@ -23,6 +23,13 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import javax.annotation.Nullable; import org.apache.beam.runners.direct.DirectExecutionContext.DirectStepContext; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; @@ -45,9 +52,6 @@ import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TupleTagList; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Iterables; import org.hamcrest.Matchers; import org.joda.time.Instant; import org.junit.Before; @@ -57,10 +61,6 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import javax.annotation.Nullable; /** * Tests for {@link ParDoEvaluator}. diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactoryTest.java index 19094cbfe2843..555219626d72f 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactoryTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoMultiEvaluatorFactoryTest.java @@ -25,6 +25,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import java.io.Serializable; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; import org.apache.beam.runners.direct.WatermarkManager.TimerUpdate; @@ -59,7 +60,6 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; /** * Tests for {@link ParDoMultiEvaluatorFactory}. diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactoryTest.java index a4fd570d86249..60b6dd990b98d 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactoryTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ParDoSingleEvaluatorFactoryTest.java @@ -25,6 +25,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import java.io.Serializable; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; import org.apache.beam.runners.direct.WatermarkManager.TimerUpdate; @@ -56,7 +57,6 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; /** * Tests for {@link ParDoSingleEvaluatorFactory}. diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/SideInputContainerTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/SideInputContainerTest.java index ec589da6eadb1..cc7d88a39e7e6 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/SideInputContainerTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/SideInputContainerTest.java @@ -25,6 +25,13 @@ import static org.junit.Assert.fail; import static org.mockito.Mockito.doAnswer; +import com.google.common.collect.ImmutableList; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; import org.apache.beam.sdk.coders.KvCoder; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.testing.TestPipeline; @@ -44,9 +51,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; - -import com.google.common.collect.ImmutableList; - import org.joda.time.Instant; import org.junit.Before; import org.junit.Rule; @@ -60,13 +64,6 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import java.util.Map; -import java.util.concurrent.Callable; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; - /** * Tests for {@link SideInputContainer}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StepTransformResultTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StepTransformResultTest.java index cfc69bc61d0f9..c06eff9606468 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StepTransformResultTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StepTransformResultTest.java @@ -29,7 +29,6 @@ import org.apache.beam.sdk.transforms.AppliedPTransform; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.values.PCollection; - import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StructuralKeyTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StructuralKeyTest.java index 26514f004ec5d..18aeac688d8a8 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StructuralKeyTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/StructuralKeyTest.java @@ -27,7 +27,6 @@ import org.apache.beam.sdk.coders.ByteArrayCoder; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.coders.VarIntCoder; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/TransformExecutorServicesTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/TransformExecutorServicesTest.java index 04aa96ffdfd11..b085723a3ccb1 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/TransformExecutorServicesTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/TransformExecutorServicesTest.java @@ -22,7 +22,7 @@ import static org.mockito.Mockito.verify; import com.google.common.util.concurrent.MoreExecutors; - +import java.util.concurrent.ExecutorService; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -30,8 +30,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.concurrent.ExecutorService; - /** * Tests for {@link TransformExecutorServices}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/TransformExecutorTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/TransformExecutorTest.java index f6cb8d133d8e3..5af568f9f07ff 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/TransformExecutorTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/TransformExecutorTest.java @@ -27,6 +27,16 @@ import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; +import com.google.common.util.concurrent.MoreExecutors; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.atomic.AtomicBoolean; import org.apache.beam.runners.direct.CommittedResult.OutputType; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.sdk.coders.ByteArrayCoder; @@ -39,9 +49,6 @@ import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.util.concurrent.MoreExecutors; - import org.hamcrest.Matchers; import org.joda.time.Instant; import org.junit.Before; @@ -53,16 +60,6 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.EnumSet; -import java.util.List; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.atomic.AtomicBoolean; - /** * Tests for {@link TransformExecutor}. diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/UnboundedReadDeduplicatorTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/UnboundedReadDeduplicatorTest.java index b3c9012b60c25..7d2a95cd167c7 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/UnboundedReadDeduplicatorTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/UnboundedReadDeduplicatorTest.java @@ -21,18 +21,16 @@ import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; -import org.apache.beam.runners.direct.UnboundedReadDeduplicator.CachedIdDeduplicator; -import org.apache.beam.runners.direct.UnboundedReadDeduplicator.NeverDeduplicator; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import org.apache.beam.runners.direct.UnboundedReadDeduplicator.CachedIdDeduplicator; +import org.apache.beam.runners.direct.UnboundedReadDeduplicator.NeverDeduplicator; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** * Tests for {@link UnboundedReadDeduplicator}. diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/UnboundedReadEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/UnboundedReadEvaluatorFactoryTest.java index 839badfd6f9e7..3a6add6aac97d 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/UnboundedReadEvaluatorFactoryTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/UnboundedReadEvaluatorFactoryTest.java @@ -25,6 +25,17 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import com.google.common.collect.ContiguousSet; +import com.google.common.collect.DiscreteDomain; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Range; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Arrays; +import java.util.List; +import java.util.NoSuchElementException; +import javax.annotation.Nullable; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; import org.apache.beam.sdk.coders.AtomicCoder; @@ -43,12 +54,6 @@ import org.apache.beam.sdk.util.CoderUtils; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.ContiguousSet; -import com.google.common.collect.DiscreteDomain; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Range; - import org.hamcrest.Matchers; import org.joda.time.DateTime; import org.joda.time.Instant; @@ -57,15 +62,6 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.Arrays; -import java.util.List; -import java.util.NoSuchElementException; - -import javax.annotation.Nullable; /** * Tests for {@link UnboundedReadEvaluatorFactory}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ViewEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ViewEvaluatorFactoryTest.java index 68207922855c6..d3ab81d8e7d6a 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ViewEvaluatorFactoryTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/ViewEvaluatorFactoryTest.java @@ -23,6 +23,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import com.google.common.collect.ImmutableList; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.PCollectionViewWriter; import org.apache.beam.sdk.coders.KvCoder; @@ -38,9 +39,6 @@ import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; - -import com.google.common.collect.ImmutableList; - import org.joda.time.Instant; import org.junit.Test; import org.junit.runner.RunWith; diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkCallbackExecutorTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkCallbackExecutorTest.java index b6b2bf58af9a4..1be9a9884a8ca 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkCallbackExecutorTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkCallbackExecutorTest.java @@ -20,6 +20,9 @@ import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.AppliedPTransform; import org.apache.beam.sdk.transforms.Create; @@ -31,7 +34,6 @@ import org.apache.beam.sdk.transforms.windowing.WindowFn; import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.PCollection; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Before; @@ -39,10 +41,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; - /** * Tests for {@link WatermarkCallbackExecutor}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkManagerTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkManagerTest.java index 22f148a4a3f71..d9dc404cdf07a 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkManagerTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WatermarkManagerTest.java @@ -25,6 +25,15 @@ import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import java.io.Serializable; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; +import javax.annotation.Nullable; import org.apache.beam.runners.direct.CommittedResult.OutputType; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; @@ -56,10 +65,6 @@ import org.apache.beam.sdk.values.PCollectionList; import org.apache.beam.sdk.values.PValue; import org.apache.beam.sdk.values.TimestampedValue; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; - import org.hamcrest.BaseMatcher; import org.hamcrest.Description; import org.hamcrest.Matcher; @@ -70,14 +75,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.Collection; -import java.util.Collections; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.Map; -import javax.annotation.Nullable; - /** * Tests for {@link WatermarkManager}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WindowEvaluatorFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WindowEvaluatorFactoryTest.java index 65dcfebe39c4c..63800cfb907bd 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WindowEvaluatorFactoryTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WindowEvaluatorFactoryTest.java @@ -19,11 +19,15 @@ import static org.apache.beam.sdk.WindowMatchers.isSingleWindowedValue; import static org.apache.beam.sdk.WindowMatchers.isWindowedValue; - import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; +import java.util.Collection; +import java.util.Collections; import org.apache.beam.runners.direct.DirectRunner.CommittedBundle; import org.apache.beam.runners.direct.DirectRunner.UncommittedBundle; import org.apache.beam.sdk.coders.Coder; @@ -44,11 +48,6 @@ import org.apache.beam.sdk.transforms.windowing.WindowFn; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Iterables; - import org.hamcrest.Matchers; import org.joda.time.Duration; import org.joda.time.Instant; @@ -59,9 +58,6 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import java.util.Collection; -import java.util.Collections; - /** * Tests for {@link WindowEvaluatorFactory}. */ diff --git a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WriteWithShardingFactoryTest.java b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WriteWithShardingFactoryTest.java index a53bc64ecd7fb..2dd477dd30e4f 100644 --- a/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WriteWithShardingFactoryTest.java +++ b/runners/direct-java/src/test/java/org/apache/beam/runners/direct/WriteWithShardingFactoryTest.java @@ -28,6 +28,18 @@ import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertThat; +import com.google.common.base.Function; +import com.google.common.collect.Iterables; +import java.io.File; +import java.io.FileReader; +import java.io.Reader; +import java.nio.CharBuffer; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.UUID; +import java.util.concurrent.ThreadLocalRandom; import org.apache.beam.runners.direct.WriteWithShardingFactory.KeyBasedOnCountFn; import org.apache.beam.sdk.coders.VarLongCoder; import org.apache.beam.sdk.io.Sink; @@ -46,28 +58,16 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.PDone; - -import com.google.common.base.Function; -import com.google.common.collect.Iterables; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; - -import java.io.File; -import java.io.FileReader; -import java.io.Reader; -import java.nio.CharBuffer; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.UUID; -import java.util.concurrent.ThreadLocalRandom; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** * Tests for {@link WriteWithShardingFactory}. */ +@RunWith(JUnit4.class) public class WriteWithShardingFactoryTest { public static final int INPUT_SIZE = 10000; @Rule public TemporaryFolder tmp = new TemporaryFolder(); diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/TFIDF.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/TFIDF.java index 4deca12d14fa1..0ca94a13463bd 100644 --- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/TFIDF.java +++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/TFIDF.java @@ -17,6 +17,12 @@ */ package org.apache.beam.runners.flink.examples; +import java.io.File; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashSet; +import java.util.Set; import org.apache.beam.runners.flink.FlinkPipelineOptions; import org.apache.beam.runners.flink.FlinkRunner; import org.apache.beam.sdk.Pipeline; @@ -53,17 +59,9 @@ import org.apache.beam.sdk.values.PDone; import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.TupleTag; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.File; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.HashSet; -import java.util.Set; - /** * An example that computes a basic TF-IDF search table for a directory or GCS prefix. * diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/WordCount.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/WordCount.java index fdffd391ba0d3..ab9297f72d8ef 100644 --- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/WordCount.java +++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/WordCount.java @@ -28,7 +28,6 @@ import org.apache.beam.sdk.transforms.Aggregator; import org.apache.beam.sdk.transforms.Count; import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.MapElements; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/AutoComplete.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/AutoComplete.java index aff1a357c8ec0..9b5e31d08bbe0 100644 --- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/AutoComplete.java +++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/AutoComplete.java @@ -17,6 +17,8 @@ */ package org.apache.beam.runners.flink.examples.streaming; +import java.io.IOException; +import java.util.List; import org.apache.beam.runners.flink.FlinkRunner; import org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSocketSource; import org.apache.beam.sdk.Pipeline; @@ -47,12 +49,8 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionList; - import org.joda.time.Duration; -import java.io.IOException; -import java.util.List; - /** * To run the example, first open a socket on a terminal by executing the command: *

  • diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/JoinExamples.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/JoinExamples.java index 458a263331835..bf5dfc453e19a 100644 --- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/JoinExamples.java +++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/JoinExamples.java @@ -35,7 +35,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TupleTag; - import org.joda.time.Duration; /** diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java index 68a9edcc5a8b3..27faefe6d7457 100644 --- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java +++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaIOExamples.java @@ -17,6 +17,11 @@ */ package org.apache.beam.runners.flink.examples.streaming; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.Serializable; +import java.util.Properties; import org.apache.beam.runners.flink.FlinkPipelineOptions; import org.apache.beam.runners.flink.FlinkRunner; import org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedFlinkSink; @@ -33,7 +38,6 @@ import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.values.PCollection; - import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.typeutils.TypeExtractor; import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer08; @@ -42,12 +46,6 @@ import org.apache.flink.streaming.util.serialization.SerializationSchema; import org.apache.flink.streaming.util.serialization.SimpleStringSchema; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.Serializable; -import java.util.Properties; - /** * Recipes/Examples that demonstrate how to read/write data from/to Kafka. */ diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java index 39ce22542c0b4..365fb7b486726 100644 --- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java +++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/KafkaWindowedWordCountExample.java @@ -17,6 +17,7 @@ */ package org.apache.beam.runners.flink.examples.streaming; +import java.util.Properties; import org.apache.beam.runners.flink.FlinkRunner; import org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedFlinkSource; import org.apache.beam.sdk.Pipeline; @@ -35,13 +36,10 @@ import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer08; import org.apache.flink.streaming.util.serialization.SimpleStringSchema; import org.joda.time.Duration; -import java.util.Properties; - public class KafkaWindowedWordCountExample { static final String KAFKA_TOPIC = "test"; // Default kafka topic to read from diff --git a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/WindowedWordCount.java b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/WindowedWordCount.java index fe8e627d1335a..f3361c5716973 100644 --- a/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/WindowedWordCount.java +++ b/runners/flink/examples/src/main/java/org/apache/beam/runners/flink/examples/streaming/WindowedWordCount.java @@ -17,6 +17,7 @@ */ package org.apache.beam.runners.flink.examples.streaming; +import java.io.IOException; import org.apache.beam.runners.flink.FlinkRunner; import org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSocketSource; import org.apache.beam.sdk.Pipeline; @@ -35,13 +36,10 @@ import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.joda.time.Duration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; - /** * To run the example, first open a socket on a terminal by executing the command: *
  • diff --git a/runners/flink/runner/pom.xml b/runners/flink/runner/pom.xml index ca9e5e275fab5..08adc607abf64 100644 --- a/runners/flink/runner/pom.xml +++ b/runners/flink/runner/pom.xml @@ -52,11 +52,17 @@ test + org.apache.beam.sdk.testing.RunnableOnService + none + true + + org.apache.beam:beam-sdks-java-core + [ - "--runner=TestFlinkRunner", - "--streaming=false" + "--runner=TestFlinkRunner", + "--streaming=false" ] @@ -71,7 +77,6 @@ test - true org.apache.beam.sdk.testing.RunnableOnService none true diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/DefaultParallelismFactory.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/DefaultParallelismFactory.java index e512db0e3eaa8..2fe4569e3e406 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/DefaultParallelismFactory.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/DefaultParallelismFactory.java @@ -19,7 +19,6 @@ import org.apache.beam.sdk.options.DefaultValueFactory; import org.apache.beam.sdk.options.PipelineOptions; - import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.configuration.GlobalConfiguration; diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkPipelineExecutionEnvironment.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkPipelineExecutionEnvironment.java index f4d4ea6d546d7..d1977a445adfa 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkPipelineExecutionEnvironment.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkPipelineExecutionEnvironment.java @@ -19,13 +19,13 @@ import static com.google.common.base.Preconditions.checkNotNull; +import java.util.List; import org.apache.beam.runners.flink.translation.FlinkBatchPipelineTranslator; import org.apache.beam.runners.flink.translation.FlinkPipelineTranslator; import org.apache.beam.runners.flink.translation.FlinkStreamingPipelineTranslator; import org.apache.beam.runners.flink.translation.PipelineTranslationOptimizer; import org.apache.beam.runners.flink.translation.TranslationMode; import org.apache.beam.sdk.Pipeline; - import org.apache.flink.api.common.JobExecutionResult; import org.apache.flink.api.java.CollectionEnvironment; import org.apache.flink.api.java.ExecutionEnvironment; @@ -34,8 +34,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.List; - /** * The class that instantiates and manages the execution of a given job. * Depending on if the job is a Streaming or Batch processing one, it creates diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkPipelineOptions.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkPipelineOptions.java index c40473e108530..6d1a8d040f1a5 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkPipelineOptions.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkPipelineOptions.java @@ -18,21 +18,19 @@ package org.apache.beam.runners.flink; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.List; import org.apache.beam.sdk.options.ApplicationNameOptions; import org.apache.beam.sdk.options.Default; import org.apache.beam.sdk.options.DefaultValueFactory; import org.apache.beam.sdk.options.Description; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.StreamingOptions; - -import com.fasterxml.jackson.annotation.JsonIgnore; import org.joda.time.DateTimeUtils; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; -import java.util.List; - /** * Options which can be used to configure a Flink PipelineRunner. */ diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkRunner.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkRunner.java index 47c4877e73fa8..8b1f42e5447cd 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkRunner.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkRunner.java @@ -17,28 +17,40 @@ */ package org.apache.beam.runners.flink; +import com.google.common.base.Joiner; +import com.google.common.collect.ImmutableMap; +import java.io.File; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.URLClassLoader; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; import org.apache.beam.sdk.Pipeline; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderRegistry; +import org.apache.beam.sdk.coders.KvCoder; +import org.apache.beam.sdk.coders.ListCoder; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptionsValidator; import org.apache.beam.sdk.runners.PipelineRunner; +import org.apache.beam.sdk.transforms.Combine; +import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.PTransform; +import org.apache.beam.sdk.transforms.ParDo; +import org.apache.beam.sdk.transforms.View; +import org.apache.beam.sdk.util.InstanceBuilder; +import org.apache.beam.sdk.util.PCollectionViews; +import org.apache.beam.sdk.values.KV; +import org.apache.beam.sdk.values.PCollection; +import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.POutput; - -import com.google.common.base.Joiner; - import org.apache.flink.api.common.JobExecutionResult; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.File; -import java.net.URISyntaxException; -import java.net.URL; -import java.net.URLClassLoader; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - /** * A {@link PipelineRunner} that executes the operations in the * pipeline by first translating them to a Flink Plan and then executing them either locally @@ -54,6 +66,9 @@ public class FlinkRunner extends PipelineRunner { */ private final FlinkPipelineOptions options; + /** Custom transforms implementations. */ + private final Map, Class> overrides; + /** * Construct a runner from the provided options. * @@ -93,6 +108,18 @@ public static FlinkRunner fromOptions(PipelineOptions options) { private FlinkRunner(FlinkPipelineOptions options) { this.options = options; + + ImmutableMap.Builder, Class> builder = ImmutableMap., Class>builder(); + if (options.isStreaming()) { + builder.put(Combine.GloballyAsSingletonView.class, + StreamingCombineGloballyAsSingletonView.class); + builder.put(View.AsMap.class, StreamingViewAsMap.class); + builder.put(View.AsMultimap.class, StreamingViewAsMultimap.class); + builder.put(View.AsSingleton.class, StreamingViewAsSingleton.class); + builder.put(View.AsList.class, StreamingViewAsList.class); + builder.put(View.AsIterable.class, StreamingViewAsIterable.class); + } + overrides = builder.build(); } @Override @@ -135,9 +162,27 @@ public FlinkPipelineOptions getPipelineOptions() { } @Override - public Output apply( - PTransform transform, Input input) { - return super.apply(transform, input); + public OutputT apply( + PTransform transform, InputT input) { + if (overrides.containsKey(transform.getClass())) { + // It is the responsibility of whoever constructs overrides to ensure this is type safe. + @SuppressWarnings("unchecked") + Class> transformClass = + (Class>) transform.getClass(); + + @SuppressWarnings("unchecked") + Class> customTransformClass = + (Class>) overrides.get(transform.getClass()); + + PTransform customTransform = + InstanceBuilder.ofType(customTransformClass) + .withArg(transformClass, transform) + .build(); + + return Pipeline.applyTransform(input, customTransform); + } else { + return super.apply(transform, input); + } } ///////////////////////////////////////////////////////////////////////////// @@ -154,9 +199,10 @@ public String toString() { * @param classLoader The URLClassLoader to use to detect resources to stage. * @return A list of absolute paths to the resources the class loader uses. * @throws IllegalArgumentException If either the class loader is not a URLClassLoader or one - * of the resources the class loader exposes is not a file resource. + * of the resources the class loader exposes is not a file resource. */ - protected static List detectClassPathResourcesToStage(ClassLoader classLoader) { + protected static List detectClassPathResourcesToStage( + ClassLoader classLoader) { if (!(classLoader instanceof URLClassLoader)) { String message = String.format("Unable to use ClassLoader to detect classpath elements. " + "Current ClassLoader is %s, only URLClassLoaders are supported.", classLoader); @@ -176,4 +222,331 @@ protected static List detectClassPathResourcesToStage(ClassLoader classL } return files; } + + /** + * Specialized implementation for + * {@link org.apache.beam.sdk.transforms.View.AsMap View.AsMap} + * for the Flink runner in streaming mode. + */ + private static class StreamingViewAsMap + extends PTransform>, PCollectionView>> { + + @SuppressWarnings("unused") // used via reflection in FlinkRunner#apply() + public StreamingViewAsMap(View.AsMap transform) { + } + + @Override + public PCollectionView> apply(PCollection> input) { + PCollectionView> view = + PCollectionViews.mapView( + input.getPipeline(), + input.getWindowingStrategy(), + input.getCoder()); + + @SuppressWarnings({"rawtypes", "unchecked"}) + KvCoder inputCoder = (KvCoder) input.getCoder(); + try { + inputCoder.getKeyCoder().verifyDeterministic(); + } catch (Coder.NonDeterministicException e) { +// runner.recordViewUsesNonDeterministicKeyCoder(this); + } + + return input + .apply(Combine.globally(new Concatenate>()).withoutDefaults()) + .apply(CreateFlinkPCollectionView., Map>of(view)); + } + + @Override + protected String getKindString() { + return "StreamingViewAsMap"; + } + } + + /** + * Specialized expansion for {@link + * org.apache.beam.sdk.transforms.View.AsMultimap View.AsMultimap} for the + * Flink runner in streaming mode. + */ + private static class StreamingViewAsMultimap + extends PTransform>, PCollectionView>>> { + + /** + * Builds an instance of this class from the overridden transform. + */ + @SuppressWarnings("unused") // used via reflection in FlinkRunner#apply() + public StreamingViewAsMultimap(View.AsMultimap transform) { + } + + @Override + public PCollectionView>> apply(PCollection> input) { + PCollectionView>> view = + PCollectionViews.multimapView( + input.getPipeline(), + input.getWindowingStrategy(), + input.getCoder()); + + @SuppressWarnings({"rawtypes", "unchecked"}) + KvCoder inputCoder = (KvCoder) input.getCoder(); + try { + inputCoder.getKeyCoder().verifyDeterministic(); + } catch (Coder.NonDeterministicException e) { +// runner.recordViewUsesNonDeterministicKeyCoder(this); + } + + return input + .apply(Combine.globally(new Concatenate>()).withoutDefaults()) + .apply(CreateFlinkPCollectionView., Map>>of(view)); + } + + @Override + protected String getKindString() { + return "StreamingViewAsMultimap"; + } + } + + /** + * Specialized implementation for + * {@link org.apache.beam.sdk.transforms.View.AsList View.AsList} for the + * Flink runner in streaming mode. + */ + private static class StreamingViewAsList + extends PTransform, PCollectionView>> { + /** + * Builds an instance of this class from the overridden transform. + */ + @SuppressWarnings("unused") // used via reflection in FlinkRunner#apply() + public StreamingViewAsList(View.AsList transform) {} + + @Override + public PCollectionView> apply(PCollection input) { + PCollectionView> view = + PCollectionViews.listView( + input.getPipeline(), + input.getWindowingStrategy(), + input.getCoder()); + + return input.apply(Combine.globally(new Concatenate()).withoutDefaults()) + .apply(CreateFlinkPCollectionView.>of(view)); + } + + @Override + protected String getKindString() { + return "StreamingViewAsList"; + } + } + + /** + * Specialized implementation for + * {@link org.apache.beam.sdk.transforms.View.AsIterable View.AsIterable} for the + * Flink runner in streaming mode. + */ + private static class StreamingViewAsIterable + extends PTransform, PCollectionView>> { + /** + * Builds an instance of this class from the overridden transform. + */ + @SuppressWarnings("unused") // used via reflection in FlinkRunner#apply() + public StreamingViewAsIterable(View.AsIterable transform) { } + + @Override + public PCollectionView> apply(PCollection input) { + PCollectionView> view = + PCollectionViews.iterableView( + input.getPipeline(), + input.getWindowingStrategy(), + input.getCoder()); + + return input.apply(Combine.globally(new Concatenate()).withoutDefaults()) + .apply(CreateFlinkPCollectionView.>of(view)); + } + + @Override + protected String getKindString() { + return "StreamingViewAsIterable"; + } + } + + private static class WrapAsList extends OldDoFn> { + @Override + public void processElement(ProcessContext c) { + c.output(Arrays.asList(c.element())); + } + } + + /** + * Specialized expansion for + * {@link org.apache.beam.sdk.transforms.View.AsSingleton View.AsSingleton} for the + * Flink runner in streaming mode. + */ + private static class StreamingViewAsSingleton + extends PTransform, PCollectionView> { + private View.AsSingleton transform; + + /** + * Builds an instance of this class from the overridden transform. + */ + @SuppressWarnings("unused") // used via reflection in FlinkRunner#apply() + public StreamingViewAsSingleton(View.AsSingleton transform) { + this.transform = transform; + } + + @Override + public PCollectionView apply(PCollection input) { + Combine.Globally combine = Combine.globally( + new SingletonCombine<>(transform.hasDefaultValue(), transform.defaultValue())); + if (!transform.hasDefaultValue()) { + combine = combine.withoutDefaults(); + } + return input.apply(combine.asSingletonView()); + } + + @Override + protected String getKindString() { + return "StreamingViewAsSingleton"; + } + + private static class SingletonCombine extends Combine.BinaryCombineFn { + private boolean hasDefaultValue; + private T defaultValue; + + SingletonCombine(boolean hasDefaultValue, T defaultValue) { + this.hasDefaultValue = hasDefaultValue; + this.defaultValue = defaultValue; + } + + @Override + public T apply(T left, T right) { + throw new IllegalArgumentException("PCollection with more than one element " + + "accessed as a singleton view. Consider using Combine.globally().asSingleton() to " + + "combine the PCollection into a single value"); + } + + @Override + public T identity() { + if (hasDefaultValue) { + return defaultValue; + } else { + throw new IllegalArgumentException( + "Empty PCollection accessed as a singleton view. " + + "Consider setting withDefault to provide a default value"); + } + } + } + } + + private static class StreamingCombineGloballyAsSingletonView + extends PTransform, PCollectionView> { + Combine.GloballyAsSingletonView transform; + + /** + * Builds an instance of this class from the overridden transform. + */ + @SuppressWarnings("unused") // used via reflection in FlinkRunner#apply() + public StreamingCombineGloballyAsSingletonView( + Combine.GloballyAsSingletonView transform) { + this.transform = transform; + } + + @Override + public PCollectionView apply(PCollection input) { + PCollection combined = + input.apply(Combine.globally(transform.getCombineFn()) + .withoutDefaults() + .withFanout(transform.getFanout())); + + PCollectionView view = PCollectionViews.singletonView( + combined.getPipeline(), + combined.getWindowingStrategy(), + transform.getInsertDefault(), + transform.getInsertDefault() + ? transform.getCombineFn().defaultValue() : null, + combined.getCoder()); + return combined + .apply(ParDo.of(new WrapAsList())) + .apply(CreateFlinkPCollectionView.of(view)); + } + + @Override + protected String getKindString() { + return "StreamingCombineGloballyAsSingletonView"; + } + } + + /** + * Combiner that combines {@code T}s into a single {@code List} containing all inputs. + * + *

    For internal use by {@link StreamingViewAsMap}, {@link StreamingViewAsMultimap}, + * {@link StreamingViewAsList}, {@link StreamingViewAsIterable}. + * They require the input {@link PCollection} fits in memory. + * For a large {@link PCollection} this is expected to crash! + * + * @param the type of elements to concatenate. + */ + private static class Concatenate extends Combine.CombineFn, List> { + @Override + public List createAccumulator() { + return new ArrayList(); + } + + @Override + public List addInput(List accumulator, T input) { + accumulator.add(input); + return accumulator; + } + + @Override + public List mergeAccumulators(Iterable> accumulators) { + List result = createAccumulator(); + for (List accumulator : accumulators) { + result.addAll(accumulator); + } + return result; + } + + @Override + public List extractOutput(List accumulator) { + return accumulator; + } + + @Override + public Coder> getAccumulatorCoder(CoderRegistry registry, Coder inputCoder) { + return ListCoder.of(inputCoder); + } + + @Override + public Coder> getDefaultOutputCoder(CoderRegistry registry, Coder inputCoder) { + return ListCoder.of(inputCoder); + } + } + + /** + * Creates a primitive {@link PCollectionView}. + * + *

    For internal use only by runner implementors. + * + * @param The type of the elements of the input PCollection + * @param The type associated with the {@link PCollectionView} used as a side input + */ + public static class CreateFlinkPCollectionView + extends PTransform>, PCollectionView> { + private PCollectionView view; + + private CreateFlinkPCollectionView(PCollectionView view) { + this.view = view; + } + + public static CreateFlinkPCollectionView of( + PCollectionView view) { + return new CreateFlinkPCollectionView<>(view); + } + + public PCollectionView getView() { + return view; + } + + @Override + public PCollectionView apply(PCollection> input) { + return view; + } + } } diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkRunnerRegistrar.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkRunnerRegistrar.java index 52b2a8ddb5d08..f328279bfba02 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkRunnerRegistrar.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkRunnerRegistrar.java @@ -18,14 +18,13 @@ package org.apache.beam.runners.flink; +import com.google.auto.service.AutoService; +import com.google.common.collect.ImmutableList; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptionsRegistrar; import org.apache.beam.sdk.runners.PipelineRunner; import org.apache.beam.sdk.runners.PipelineRunnerRegistrar; -import com.google.auto.service.AutoService; -import com.google.common.collect.ImmutableList; - /** * AuteService registrar - will register FlinkRunner and FlinkOptions diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkRunnerResult.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkRunnerResult.java index 923d54c566313..dd0733a671c98 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkRunnerResult.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/FlinkRunnerResult.java @@ -17,17 +17,15 @@ */ package org.apache.beam.runners.flink; -import org.apache.beam.sdk.PipelineResult; +import java.io.IOException; +import java.util.Collections; +import java.util.Map; import org.apache.beam.sdk.AggregatorRetrievalException; import org.apache.beam.sdk.AggregatorValues; +import org.apache.beam.sdk.PipelineResult; import org.apache.beam.sdk.transforms.Aggregator; - import org.joda.time.Duration; -import java.io.IOException; -import java.util.Collections; -import java.util.Map; - /** * Result of executing a {@link org.apache.beam.sdk.Pipeline} with Flink. This * has methods to query to job runtime and the final values of diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/TestFlinkRunner.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/TestFlinkRunner.java index 460933fbfccfc..dd231d6e6bdd4 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/TestFlinkRunner.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/TestFlinkRunner.java @@ -26,8 +26,6 @@ import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.POutput; -import org.apache.flink.runtime.client.JobExecutionException; - public class TestFlinkRunner extends PipelineRunner { private FlinkRunner delegate; @@ -59,14 +57,25 @@ OutputT apply(PTransform transform, InputT input) { @Override public FlinkRunnerResult run(Pipeline pipeline) { try { - return delegate.run(pipeline); - } catch (RuntimeException e) { + FlinkRunnerResult result = delegate.run(pipeline); + + return result; + } catch (Throwable e) { // Special case hack to pull out assertion errors from PAssert; instead there should // probably be a better story along the lines of UserCodeException. - if (e.getCause() != null - && e.getCause() instanceof JobExecutionException - && e.getCause().getCause() instanceof AssertionError) { - throw (AssertionError) e.getCause().getCause(); + Throwable cause = e; + Throwable oldCause = e; + do { + if (cause.getCause() == null) { + break; + } + + oldCause = cause; + cause = cause.getCause(); + + } while (!oldCause.equals(cause)); + if (cause instanceof AssertionError) { + throw (AssertionError) cause; } else { throw e; } diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchPipelineTranslator.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchPipelineTranslator.java index 8f9a37a6c2238..66c48b07e23b1 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchPipelineTranslator.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchPipelineTranslator.java @@ -22,7 +22,6 @@ import org.apache.beam.sdk.runners.TransformTreeNode; import org.apache.beam.sdk.transforms.AppliedPTransform; import org.apache.beam.sdk.transforms.PTransform; - import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.io.DiscardingOutputFormat; diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTransformTranslators.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTransformTranslators.java index 01a3ab25b3122..935a9ac439b94 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTransformTranslators.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTransformTranslators.java @@ -17,6 +17,13 @@ */ package org.apache.beam.runners.flink.translation; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.apache.beam.runners.flink.translation.functions.FlinkAssignWindows; import org.apache.beam.runners.flink.translation.functions.FlinkDoFnFunction; import org.apache.beam.runners.flink.translation.functions.FlinkMergingNonShuffleReduceFunction; @@ -39,9 +46,9 @@ import org.apache.beam.sdk.io.Read; import org.apache.beam.sdk.transforms.Combine; import org.apache.beam.sdk.transforms.CombineFnBase; -import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.Flatten; import org.apache.beam.sdk.transforms.GroupByKey; +import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.View; @@ -60,10 +67,6 @@ import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.PValue; import org.apache.beam.sdk.values.TupleTag; - -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; - import org.apache.flink.api.common.functions.FilterFunction; import org.apache.flink.api.common.functions.FlatMapFunction; import org.apache.flink.api.common.typeinfo.TypeInformation; @@ -77,12 +80,6 @@ import org.apache.flink.api.java.operators.SingleInputUdfOperator; import org.apache.flink.util.Collector; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - /** * Translators for transforming {@link PTransform PTransforms} to * Flink {@link DataSet DataSets}. diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTranslationContext.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTranslationContext.java index a73bf135a2b1d..835648e43ea8f 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTranslationContext.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkBatchTranslationContext.java @@ -17,6 +17,8 @@ */ package org.apache.beam.runners.flink.translation; +import java.util.HashMap; +import java.util.Map; import org.apache.beam.runners.flink.translation.types.CoderTypeInformation; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.options.PipelineOptions; @@ -29,14 +31,10 @@ import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.POutput; import org.apache.beam.sdk.values.PValue; - import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.ExecutionEnvironment; -import java.util.HashMap; -import java.util.Map; - /** * Helper for {@link FlinkBatchPipelineTranslator} and translators in * {@link FlinkBatchTransformTranslators}. diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingPipelineTranslator.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingPipelineTranslator.java index 2e655a35f6db6..b12745512c93e 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingPipelineTranslator.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingPipelineTranslator.java @@ -22,16 +22,16 @@ import org.apache.beam.sdk.transforms.AppliedPTransform; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.values.PValue; - import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This is a {@link FlinkPipelineTranslator} for streaming jobs. Its role is to translate the user-provided - * {@link org.apache.beam.sdk.values.PCollection}-based job into a + * This is a {@link FlinkPipelineTranslator} for streaming jobs. Its role is to translate + * the user-provided {@link org.apache.beam.sdk.values.PCollection}-based job into a * {@link org.apache.flink.streaming.api.datastream.DataStream} one. - * */ + * + */ public class FlinkStreamingPipelineTranslator extends FlinkPipelineTranslator { private static final Logger LOG = LoggerFactory.getLogger(FlinkStreamingPipelineTranslator.class); @@ -55,8 +55,10 @@ public CompositeBehavior enterCompositeTransform(TransformTreeNode node) { PTransform transform = node.getTransform(); if (transform != null) { - StreamTransformTranslator translator = FlinkStreamingTransformTranslators.getTranslator(transform); - if (translator != null) { + StreamTransformTranslator translator = + FlinkStreamingTransformTranslators.getTranslator(transform); + + if (translator != null && applyCanTranslate(transform, node, translator)) { applyStreamingTransform(transform, node, translator); LOG.info(genSpaces(this.depth) + "translated-" + formatNodeName(node)); return CompositeBehavior.DO_NOT_ENTER_TRANSFORM; @@ -79,10 +81,13 @@ public void visitPrimitiveTransform(TransformTreeNode node) { // currently visiting and translate it into its Flink alternative. PTransform transform = node.getTransform(); - StreamTransformTranslator translator = FlinkStreamingTransformTranslators.getTranslator(transform); - if (translator == null) { + StreamTransformTranslator translator = + FlinkStreamingTransformTranslators.getTranslator(transform); + + if (translator == null && applyCanTranslate(transform, node, translator)) { LOG.info(node.getTransform().getClass().toString()); - throw new UnsupportedOperationException("The transform " + transform + " is currently not supported."); + throw new UnsupportedOperationException( + "The transform " + transform + " is currently not supported."); } applyStreamingTransform(transform, node, translator); } @@ -92,7 +97,10 @@ public void visitValue(PValue value, TransformTreeNode producer) { // do nothing here } - private > void applyStreamingTransform(PTransform transform, TransformTreeNode node, StreamTransformTranslator translator) { + private > void applyStreamingTransform( + PTransform transform, + TransformTreeNode node, + StreamTransformTranslator translator) { @SuppressWarnings("unchecked") T typedTransform = (T) transform; @@ -106,13 +114,41 @@ public void visitValue(PValue value, TransformTreeNode producer) { typedTranslator.translateNode(typedTransform, streamingContext); } + private > boolean applyCanTranslate( + PTransform transform, + TransformTreeNode node, + StreamTransformTranslator translator) { + + @SuppressWarnings("unchecked") + T typedTransform = (T) transform; + + @SuppressWarnings("unchecked") + StreamTransformTranslator typedTranslator = (StreamTransformTranslator) translator; + + streamingContext.setCurrentTransform(AppliedPTransform.of( + node.getFullName(), node.getInput(), node.getOutput(), (PTransform) transform)); + + return typedTranslator.canTranslate(typedTransform, streamingContext); + } + /** * The interface that every Flink translator of a Beam operator should implement. * This interface is for streaming jobs. For examples of such translators see * {@link FlinkStreamingTransformTranslators}. */ - public interface StreamTransformTranslator { - void translateNode(Type transform, FlinkStreamingTranslationContext context); + abstract static class StreamTransformTranslator { + + /** + * Translate the given transform. + */ + abstract void translateNode(T transform, FlinkStreamingTranslationContext context); + + /** + * Returns true iff this translator can translate the given transform. + */ + boolean canTranslate(T transform, FlinkStreamingTranslationContext context) { + return true; + } } private static String formatNodeName(TransformTreeNode node) { diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTransformTranslators.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTransformTranslators.java index 5b55d42bbe9f2..07e2191d4e0c2 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTransformTranslators.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTransformTranslators.java @@ -18,29 +18,40 @@ package org.apache.beam.runners.flink.translation; +import com.google.api.client.util.Maps; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.beam.runners.flink.FlinkRunner; import org.apache.beam.runners.flink.translation.types.CoderTypeInformation; import org.apache.beam.runners.flink.translation.types.FlinkCoder; -import org.apache.beam.runners.flink.translation.wrappers.SourceInputFormat; -import org.apache.beam.runners.flink.translation.wrappers.streaming.FlinkGroupAlsoByWindowWrapper; -import org.apache.beam.runners.flink.translation.wrappers.streaming.FlinkGroupByKeyWrapper; -import org.apache.beam.runners.flink.translation.wrappers.streaming.FlinkParDoBoundMultiWrapper; -import org.apache.beam.runners.flink.translation.wrappers.streaming.FlinkParDoBoundWrapper; -import org.apache.beam.runners.flink.translation.wrappers.streaming.io.FlinkStreamingCreateFunction; +import org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator; +import org.apache.beam.runners.flink.translation.wrappers.streaming.SingletonKeyedWorkItem; +import org.apache.beam.runners.flink.translation.wrappers.streaming.SingletonKeyedWorkItemCoder; +import org.apache.beam.runners.flink.translation.wrappers.streaming.WindowDoFnOperator; +import org.apache.beam.runners.flink.translation.wrappers.streaming.WorkItemKeySelector; +import org.apache.beam.runners.flink.translation.wrappers.streaming.io.BoundedSourceWrapper; import org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedFlinkSink; import org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedFlinkSource; import org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSourceWrapper; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.KvCoder; -import org.apache.beam.sdk.io.BoundedSource; +import org.apache.beam.sdk.coders.VoidCoder; import org.apache.beam.sdk.io.Read; import org.apache.beam.sdk.io.Sink; import org.apache.beam.sdk.io.TextIO; import org.apache.beam.sdk.io.Write; import org.apache.beam.sdk.transforms.Combine; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.Flatten; import org.apache.beam.sdk.transforms.GroupByKey; +import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.join.RawUnionValue; @@ -50,39 +61,36 @@ import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.transforms.windowing.WindowFn; +import org.apache.beam.sdk.util.AppliedCombineFn; +import org.apache.beam.sdk.util.Reshuffle; +import org.apache.beam.sdk.util.SystemReduceFn; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; +import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.PValue; import org.apache.beam.sdk.values.TupleTag; - -import com.google.api.client.util.Maps; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; - -import org.apache.flink.api.common.functions.FilterFunction; import org.apache.flink.api.common.functions.FlatMapFunction; +import org.apache.flink.api.common.functions.MapFunction; +import org.apache.flink.api.common.functions.RichFlatMapFunction; import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.core.fs.FileSystem; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.DataStreamSink; +import org.apache.flink.streaming.api.datastream.DataStreamSource; import org.apache.flink.streaming.api.datastream.KeyedStream; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks; +import org.apache.flink.streaming.api.operators.OneInputStreamOperator; +import org.apache.flink.streaming.api.operators.TwoInputStreamOperator; +import org.apache.flink.streaming.api.transformations.TwoInputTransformation; import org.apache.flink.util.Collector; import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - /** * This class contains all the mappings between Beam and Flink * streaming transformations. The {@link FlinkStreamingPipelineTranslator} @@ -96,26 +104,32 @@ public class FlinkStreamingTransformTranslators { // -------------------------------------------------------------------------------------------- @SuppressWarnings("rawtypes") - private static final Map, FlinkStreamingPipelineTranslator.StreamTransformTranslator> TRANSLATORS = new HashMap<>(); + private static final Map< + Class, + FlinkStreamingPipelineTranslator.StreamTransformTranslator> TRANSLATORS = new HashMap<>(); // here you can find all the available translators. static { - TRANSLATORS.put(Create.Values.class, new CreateStreamingTranslator()); TRANSLATORS.put(Read.Bounded.class, new BoundedReadSourceTranslator()); TRANSLATORS.put(Read.Unbounded.class, new UnboundedReadSourceTranslator()); - TRANSLATORS.put(ParDo.Bound.class, new ParDoBoundStreamingTranslator()); + TRANSLATORS.put(Write.Bound.class, new WriteSinkStreamingTranslator()); TRANSLATORS.put(TextIO.Write.Bound.class, new TextIOWriteBoundStreamingTranslator()); - TRANSLATORS.put(Write.Bound.class, new WriteSinkStreamingTranslator()); + TRANSLATORS.put(ParDo.Bound.class, new ParDoBoundStreamingTranslator()); + TRANSLATORS.put(ParDo.BoundMulti.class, new ParDoBoundMultiStreamingTranslator()); TRANSLATORS.put(Window.Bound.class, new WindowBoundTranslator()); + TRANSLATORS.put(Flatten.FlattenPCollectionList.class, new FlattenPCollectionTranslator()); + TRANSLATORS.put( + FlinkRunner.CreateFlinkPCollectionView.class, new CreateViewStreamingTranslator()); + + TRANSLATORS.put(Reshuffle.class, new ReshuffleTranslatorStreaming()); TRANSLATORS.put(GroupByKey.class, new GroupByKeyTranslator()); TRANSLATORS.put(Combine.PerKey.class, new CombinePerKeyTranslator()); - TRANSLATORS.put(Flatten.FlattenPCollectionList.class, new FlattenPCollectionTranslator()); - TRANSLATORS.put(ParDo.BoundMulti.class, new ParDoBoundMultiStreamingTranslator()); } - public static FlinkStreamingPipelineTranslator.StreamTransformTranslator getTranslator(PTransform transform) { + public static FlinkStreamingPipelineTranslator.StreamTransformTranslator getTranslator( + PTransform transform) { return TRANSLATORS.get(transform.getClass()); } @@ -123,52 +137,17 @@ public static FlinkStreamingPipelineTranslator.StreamTransformTranslator getT // Transformation Implementations // -------------------------------------------------------------------------------------------- - private static class CreateStreamingTranslator implements - FlinkStreamingPipelineTranslator.StreamTransformTranslator> { - - @Override - public void translateNode(Create.Values transform, FlinkStreamingTranslationContext context) { - PCollection output = context.getOutput(transform); - Iterable elements = transform.getElements(); - - // we need to serialize the elements to byte arrays, since they might contain - // elements that are not serializable by Java serialization. We deserialize them - // in the FlatMap function using the Coder. - - List serializedElements = Lists.newArrayList(); - Coder elementCoder = output.getCoder(); - for (OUT element: elements) { - ByteArrayOutputStream bao = new ByteArrayOutputStream(); - try { - elementCoder.encode(element, bao, Coder.Context.OUTER); - serializedElements.add(bao.toByteArray()); - } catch (IOException e) { - throw new RuntimeException("Could not serialize Create elements using Coder: " + e); - } - } - - - DataStream initDataSet = context.getExecutionEnvironment().fromElements(1); - - FlinkStreamingCreateFunction createFunction = - new FlinkStreamingCreateFunction<>(serializedElements, elementCoder); - - WindowedValue.ValueOnlyWindowedValueCoder windowCoder = WindowedValue.getValueOnlyCoder(elementCoder); - TypeInformation> outputType = new CoderTypeInformation<>(windowCoder); - - DataStream> outputDataStream = initDataSet.flatMap(createFunction) - .returns(outputType); - - context.setOutputDataStream(output, outputDataStream); - } - } - + private static class TextIOWriteBoundStreamingTranslator + extends FlinkStreamingPipelineTranslator.StreamTransformTranslator< + TextIO.Write.Bound> { - private static class TextIOWriteBoundStreamingTranslator implements FlinkStreamingPipelineTranslator.StreamTransformTranslator> { - private static final Logger LOG = LoggerFactory.getLogger(TextIOWriteBoundStreamingTranslator.class); + private static final Logger LOG = + LoggerFactory.getLogger(TextIOWriteBoundStreamingTranslator.class); @Override - public void translateNode(TextIO.Write.Bound transform, FlinkStreamingTranslationContext context) { + public void translateNode( + TextIO.Write.Bound transform, + FlinkStreamingTranslationContext context) { PValue input = context.getInput(transform); DataStream> inputDataStream = context.getInputDataStream(input); @@ -179,17 +158,25 @@ public void translateNode(TextIO.Write.Bound transform, FlinkStreamingTransla String shardNameTemplate = transform.getShardNameTemplate(); // TODO: Implement these. We need Flink support for this. - LOG.warn("Translation of TextIO.Write.needsValidation not yet supported. Is: {}.", needsValidation); - LOG.warn("Translation of TextIO.Write.filenameSuffix not yet supported. Is: {}.", filenameSuffix); - LOG.warn("Translation of TextIO.Write.shardNameTemplate not yet supported. Is: {}.", shardNameTemplate); - - DataStream dataSink = inputDataStream.flatMap(new FlatMapFunction, String>() { - @Override - public void flatMap(WindowedValue value, Collector out) throws Exception { - out.collect(value.getValue().toString()); - } - }); - DataStreamSink output = dataSink.writeAsText(filenamePrefix, FileSystem.WriteMode.OVERWRITE); + LOG.warn( + "Translation of TextIO.Write.needsValidation not yet supported. Is: {}.", + needsValidation); + LOG.warn( + "Translation of TextIO.Write.filenameSuffix not yet supported. Is: {}.", + filenameSuffix); + LOG.warn( + "Translation of TextIO.Write.shardNameTemplate not yet supported. Is: {}.", + shardNameTemplate); + + DataStream dataSink = inputDataStream + .flatMap(new FlatMapFunction, String>() { + @Override + public void flatMap(WindowedValue value, Collector out) throws Exception { + out.collect(value.getValue().toString()); + } + }); + DataStreamSink output = + dataSink.writeAsText(filenamePrefix, FileSystem.WriteMode.OVERWRITE); if (numShards > 0) { output.setParallelism(numShards); @@ -197,7 +184,8 @@ public void flatMap(WindowedValue value, Collector out) throws Except } } - private static class WriteSinkStreamingTranslator implements FlinkStreamingPipelineTranslator.StreamTransformTranslator> { + private static class WriteSinkStreamingTranslator + extends FlinkStreamingPipelineTranslator.StreamTransformTranslator> { @Override public void translateNode(Write.Bound transform, FlinkStreamingTranslationContext context) { @@ -206,7 +194,8 @@ public void translateNode(Write.Bound transform, FlinkStreamingTranslationCon Sink sink = transform.getSink(); if (!(sink instanceof UnboundedFlinkSink)) { - throw new UnsupportedOperationException("At the time, only unbounded Flink sinks are supported."); + throw new UnsupportedOperationException( + "At the time, only unbounded Flink sinks are supported."); } DataStream> inputDataSet = context.getInputDataStream(input); @@ -220,38 +209,23 @@ public void flatMap(WindowedValue value, Collector out) throws Except } } - private static class BoundedReadSourceTranslator - implements FlinkStreamingPipelineTranslator.StreamTransformTranslator> { + private static class UnboundedReadSourceTranslator + extends FlinkStreamingPipelineTranslator.StreamTransformTranslator> { @Override - public void translateNode(Read.Bounded transform, FlinkStreamingTranslationContext context) { - - BoundedSource boundedSource = transform.getSource(); - PCollection output = context.getOutput(transform); - - TypeInformation> typeInfo = context.getTypeInfo(output); - - DataStream> source = context.getExecutionEnvironment().createInput( - new SourceInputFormat<>( - boundedSource, - context.getPipelineOptions()), - typeInfo); - - context.setOutputDataStream(output, source); - } - } - - private static class UnboundedReadSourceTranslator implements FlinkStreamingPipelineTranslator.StreamTransformTranslator> { - - @Override - public void translateNode(Read.Unbounded transform, FlinkStreamingTranslationContext context) { + public void translateNode( + Read.Unbounded transform, + FlinkStreamingTranslationContext context) { PCollection output = context.getOutput(transform); DataStream> source; if (transform.getSource().getClass().equals(UnboundedFlinkSource.class)) { @SuppressWarnings("unchecked") - UnboundedFlinkSource flinkSourceFunction = (UnboundedFlinkSource) transform.getSource(); - final AssignerWithPeriodicWatermarks flinkAssigner = flinkSourceFunction.getFlinkTimestampAssigner(); + UnboundedFlinkSource flinkSourceFunction = + (UnboundedFlinkSource) transform.getSource(); + + final AssignerWithPeriodicWatermarks flinkAssigner = + flinkSourceFunction.getFlinkTimestampAssigner(); DataStream flinkSource = context.getExecutionEnvironment() .addSource(flinkSourceFunction.getFlinkSource()); @@ -280,9 +254,12 @@ public void flatMap(T s, Collector> collector) throws Exception context.getPipelineOptions(), transform.getSource(), context.getExecutionEnvironment().getParallelism()); - source = context.getExecutionEnvironment().addSource(sourceWrapper).name(transform.getName()); + source = context + .getExecutionEnvironment() + .addSource(sourceWrapper).name(transform.getName()); } catch (Exception e) { - throw new RuntimeException("Error while translating UnboundedSource: " + transform.getSource(), e); + throw new RuntimeException( + "Error while translating UnboundedSource: " + transform.getSource(), e); } } @@ -290,60 +267,381 @@ public void flatMap(T s, Collector> collector) throws Exception } } - private static class ParDoBoundStreamingTranslator implements FlinkStreamingPipelineTranslator.StreamTransformTranslator> { + private static class BoundedReadSourceTranslator + extends FlinkStreamingPipelineTranslator.StreamTransformTranslator> { @Override - public void translateNode(ParDo.Bound transform, FlinkStreamingTranslationContext context) { - PCollection output = context.getOutput(transform); - - final WindowingStrategy windowingStrategy = - (WindowingStrategy) - context.getOutput(transform).getWindowingStrategy(); - - WindowedValue.WindowedValueCoder outputStreamCoder = WindowedValue.getFullCoder(output.getCoder(), - windowingStrategy.getWindowFn().windowCoder()); - CoderTypeInformation> outputWindowedValueCoder = - new CoderTypeInformation<>(outputStreamCoder); - - FlinkParDoBoundWrapper doFnWrapper = new FlinkParDoBoundWrapper<>( - context.getPipelineOptions(), windowingStrategy, transform.getFn()); - DataStream> inputDataStream = context.getInputDataStream(context.getInput(transform)); - SingleOutputStreamOperator> outDataStream = inputDataStream - .flatMap(doFnWrapper) - .name(transform.getName()) - .returns(outputWindowedValueCoder); + public void translateNode( + Read.Bounded transform, + FlinkStreamingTranslationContext context) { + PCollection output = context.getOutput(transform); - context.setOutputDataStream(context.getOutput(transform), outDataStream); + DataStream> source; + try { + transform.getSource(); + BoundedSourceWrapper sourceWrapper = + new BoundedSourceWrapper<>( + context.getPipelineOptions(), + transform.getSource(), + context.getExecutionEnvironment().getParallelism()); + source = context + .getExecutionEnvironment() + .addSource(sourceWrapper).name(transform.getName()); + } catch (Exception e) { + throw new RuntimeException( + "Error while translating BoundedSource: " + transform.getSource(), e); + } + + context.setOutputDataStream(output, source); } } - public static class WindowBoundTranslator implements FlinkStreamingPipelineTranslator.StreamTransformTranslator> { + private static class ParDoBoundStreamingTranslator + extends FlinkStreamingPipelineTranslator.StreamTransformTranslator< + ParDo.Bound> { @Override - public void translateNode(Window.Bound transform, FlinkStreamingTranslationContext context) { - PValue input = context.getInput(transform); - DataStream> inputDataStream = context.getInputDataStream(input); + public void translateNode( + ParDo.Bound transform, + FlinkStreamingTranslationContext context) { + + WindowingStrategy windowingStrategy = + context.getOutput(transform).getWindowingStrategy(); - final WindowingStrategy windowingStrategy = - (WindowingStrategy) - context.getOutput(transform).getWindowingStrategy(); + TypeInformation> typeInfo = + context.getTypeInfo(context.getOutput(transform)); - final WindowFn windowFn = windowingStrategy.getWindowFn(); + List> sideInputs = transform.getSideInputs(); - WindowedValue.WindowedValueCoder outputStreamCoder = WindowedValue.getFullCoder( - context.getInput(transform).getCoder(), windowingStrategy.getWindowFn().windowCoder()); - CoderTypeInformation> outputWindowedValueCoder = - new CoderTypeInformation<>(outputStreamCoder); + @SuppressWarnings("unchecked") + PCollection inputPCollection = (PCollection) context.getInput(transform); - final FlinkParDoBoundWrapper windowDoFnAssigner = new FlinkParDoBoundWrapper<>( - context.getPipelineOptions(), windowingStrategy, createWindowAssigner(windowFn)); + TypeInformation> inputTypeInfo = + context.getTypeInfo(inputPCollection); - SingleOutputStreamOperator> windowedStream = - inputDataStream.flatMap(windowDoFnAssigner).returns(outputWindowedValueCoder); - context.setOutputDataStream(context.getOutput(transform), windowedStream); + if (sideInputs.isEmpty()) { + DoFnOperator> doFnOperator = + new DoFnOperator<>( + transform.getFn(), + inputTypeInfo, + new TupleTag("main output"), + Collections.>emptyList(), + new DoFnOperator.DefaultOutputManagerFactory>(), + windowingStrategy, + new HashMap>(), /* side-input mapping */ + Collections.>emptyList(), /* side inputs */ + context.getPipelineOptions()); + + DataStream> inputDataStream = + context.getInputDataStream(context.getInput(transform)); + + SingleOutputStreamOperator> outDataStream = inputDataStream + .transform(transform.getName(), typeInfo, doFnOperator); + + context.setOutputDataStream(context.getOutput(transform), outDataStream); + } else { + Tuple2>, DataStream> transformedSideInputs = + transformSideInputs(sideInputs, context); + + DoFnOperator> doFnOperator = + new DoFnOperator<>( + transform.getFn(), + inputTypeInfo, + new TupleTag("main output"), + Collections.>emptyList(), + new DoFnOperator.DefaultOutputManagerFactory>(), + windowingStrategy, + transformedSideInputs.f0, + sideInputs, + context.getPipelineOptions()); + + DataStream> inputDataStream = + context.getInputDataStream(context.getInput(transform)); + + SingleOutputStreamOperator> outDataStream = inputDataStream + .connect(transformedSideInputs.f1.broadcast()) + .transform(transform.getName(), typeInfo, doFnOperator); + + context.setOutputDataStream(context.getOutput(transform), outDataStream); + + } + } + } + + /** + * Wraps each element in a {@link RawUnionValue} with the given tag id. + */ + private static class ToRawUnion implements MapFunction { + private final int intTag; + + public ToRawUnion(int intTag) { + this.intTag = intTag; } - private static OldDoFn createWindowAssigner(final WindowFn windowFn) { + @Override + public RawUnionValue map(T o) throws Exception { + return new RawUnionValue(intTag, o); + } + } + + private static Tuple2>, DataStream> + transformSideInputs( + Collection> sideInputs, + FlinkStreamingTranslationContext context) { + + // collect all side inputs + Map, Integer> tagToIntMapping = new HashMap<>(); + Map> intToViewMapping = new HashMap<>(); + int count = 0; + for (PCollectionView sideInput: sideInputs) { + TupleTag tag = sideInput.getTagInternal(); + intToViewMapping.put(count, sideInput); + tagToIntMapping.put(tag, count); + count++; + Coder>> coder = sideInput.getCoderInternal(); + } + + + List> inputCoders = new ArrayList<>(); + for (PCollectionView sideInput: sideInputs) { + DataStream sideInputStream = context.getInputDataStream(sideInput); + TypeInformation tpe = sideInputStream.getType(); + if (!(tpe instanceof CoderTypeInformation)) { + throw new IllegalStateException( + "Input Stream TypeInformation is no CoderTypeInformation."); + } + + Coder coder = ((CoderTypeInformation) tpe).getCoder(); + inputCoders.add(coder); + } + + UnionCoder unionCoder = UnionCoder.of(inputCoders); + + CoderTypeInformation unionTypeInformation = + new CoderTypeInformation<>(unionCoder); + + // transform each side input to RawUnionValue and union them + DataStream sideInputUnion = null; + + for (PCollectionView sideInput: sideInputs) { + TupleTag tag = sideInput.getTagInternal(); + final int intTag = tagToIntMapping.get(tag); + DataStream sideInputStream = context.getInputDataStream(sideInput); + DataStream unionValueStream = + sideInputStream.map(new ToRawUnion<>(intTag)).returns(unionTypeInformation); + + if (sideInputUnion == null) { + sideInputUnion = unionValueStream; + } else { + sideInputUnion = sideInputUnion.union(unionValueStream); + } + } + + if (sideInputUnion == null) { + throw new IllegalStateException("No unioned side inputs, this indicates a bug."); + } + + return new Tuple2<>(intToViewMapping, sideInputUnion); + } + + + private static class ParDoBoundMultiStreamingTranslator + extends FlinkStreamingPipelineTranslator.StreamTransformTranslator< + ParDo.BoundMulti> { + + @Override + public void translateNode( + ParDo.BoundMulti transform, + FlinkStreamingTranslationContext context) { + + // we assume that the transformation does not change the windowing strategy. + WindowingStrategy windowingStrategy = + context.getInput(transform).getWindowingStrategy(); + + Map, PCollection> outputs = context.getOutput(transform).getAll(); + + Map, Integer> tagsToLabels = + transformTupleTagsToLabels(transform.getMainOutputTag(), outputs.keySet()); + + List> sideInputs = transform.getSideInputs(); + + SingleOutputStreamOperator unionOutputStream; + + @SuppressWarnings("unchecked") + PCollection inputPCollection = (PCollection) context.getInput(transform); + + TypeInformation> inputTypeInfo = + context.getTypeInfo(inputPCollection); + + if (sideInputs.isEmpty()) { + DoFnOperator doFnOperator = + new DoFnOperator<>( + transform.getFn(), + inputTypeInfo, + transform.getMainOutputTag(), + transform.getSideOutputTags().getAll(), + new DoFnOperator.MultiOutputOutputManagerFactory(tagsToLabels), + windowingStrategy, + new HashMap>(), /* side-input mapping */ + Collections.>emptyList(), /* side inputs */ + context.getPipelineOptions()); + + UnionCoder outputUnionCoder = createUnionCoder(outputs.values()); + + CoderTypeInformation outputUnionTypeInformation = + new CoderTypeInformation<>(outputUnionCoder); + + DataStream> inputDataStream = + context.getInputDataStream(context.getInput(transform)); + + unionOutputStream = inputDataStream + .transform(transform.getName(), outputUnionTypeInformation, doFnOperator); + + } else { + Tuple2>, DataStream> transformedSideInputs = + transformSideInputs(sideInputs, context); + + DoFnOperator doFnOperator = + new DoFnOperator<>( + transform.getFn(), + inputTypeInfo, + transform.getMainOutputTag(), + transform.getSideOutputTags().getAll(), + new DoFnOperator.MultiOutputOutputManagerFactory(tagsToLabels), + windowingStrategy, + transformedSideInputs.f0, + sideInputs, + context.getPipelineOptions()); + + UnionCoder outputUnionCoder = createUnionCoder(outputs.values()); + + CoderTypeInformation outputUnionTypeInformation = + new CoderTypeInformation<>(outputUnionCoder); + + DataStream> inputDataStream = + context.getInputDataStream(context.getInput(transform)); + + unionOutputStream = inputDataStream + .connect(transformedSideInputs.f1.broadcast()) + .transform(transform.getName(), outputUnionTypeInformation, doFnOperator); + } + + for (Map.Entry, PCollection> output : outputs.entrySet()) { + final int outputTag = tagsToLabels.get(output.getKey()); + + TypeInformation outputTypeInfo = + context.getTypeInfo(output.getValue()); + + @SuppressWarnings("unchecked") + DataStream filtered = + unionOutputStream.flatMap(new FlatMapFunction() { + @Override + public void flatMap(RawUnionValue value, Collector out) throws Exception { + System.out.println("FILTERING: " + value); + if (value.getUnionTag() == outputTag) { + System.out.println("EMITTING VALUE: " + value); + out.collect(value.getValue()); + } + } + }).returns(outputTypeInfo); + + context.setOutputDataStream(output.getValue(), filtered); + } + } + + private Map, Integer> transformTupleTagsToLabels( + TupleTag mainTag, + Set> secondaryTags) { + + Map, Integer> tagToLabelMap = Maps.newHashMap(); + int count = 0; + tagToLabelMap.put(mainTag, count++); + for (TupleTag tag : secondaryTags) { + if (!tagToLabelMap.containsKey(tag)) { + tagToLabelMap.put(tag, count++); + } + } + return tagToLabelMap; + } + + private UnionCoder createUnionCoder(Collection> taggedCollections) { + List> outputCoders = Lists.newArrayList(); + for (PCollection coll : taggedCollections) { + WindowedValue.FullWindowedValueCoder windowedValueCoder = + WindowedValue.getFullCoder( + coll.getCoder(), + coll.getWindowingStrategy().getWindowFn().windowCoder()); + outputCoders.add(windowedValueCoder); + } + return UnionCoder.of(outputCoders); + } + } + + private static class CreateViewStreamingTranslator + extends FlinkStreamingPipelineTranslator.StreamTransformTranslator< + FlinkRunner.CreateFlinkPCollectionView> { + + @Override + public void translateNode( + FlinkRunner.CreateFlinkPCollectionView transform, + FlinkStreamingTranslationContext context) { + // just forward + DataStream>> inputDataSet = + context.getInputDataStream(context.getInput(transform)); + + PCollectionView input = transform.getView(); + + context.setOutputDataStream(input, inputDataSet); + } + } + + private static class WindowBoundTranslator + extends FlinkStreamingPipelineTranslator.StreamTransformTranslator> { + + @Override + public void translateNode( + Window.Bound transform, + FlinkStreamingTranslationContext context) { + + @SuppressWarnings("unchecked") + WindowingStrategy windowingStrategy = + (WindowingStrategy) context.getOutput(transform).getWindowingStrategy(); + + TypeInformation> typeInfo = + context.getTypeInfo(context.getOutput(transform)); + + OldDoFn windowAssignerDoFn = + createWindowAssigner(windowingStrategy.getWindowFn()); + + @SuppressWarnings("unchecked") + PCollection inputPCollection = context.getInput(transform); + + TypeInformation> inputTypeInfo = + context.getTypeInfo(inputPCollection); + + DoFnOperator> doFnOperator = new DoFnOperator<>( + windowAssignerDoFn, + inputTypeInfo, + new TupleTag("main output"), + Collections.>emptyList(), + new DoFnOperator.DefaultOutputManagerFactory>(), + windowingStrategy, + new HashMap>(), /* side-input mapping */ + Collections.>emptyList(), /* side inputs */ + context.getPipelineOptions()); + + DataStream> inputDataStream = + context.getInputDataStream(context.getInput(transform)); + + SingleOutputStreamOperator> outDataStream = inputDataStream + .transform(transform.getName(), typeInfo, doFnOperator); + + context.setOutputDataStream(context.getOutput(transform), outDataStream); + } + + private static OldDoFn createWindowAssigner( + final WindowFn windowFn) { + return new OldDoFn() { @Override @@ -373,138 +671,309 @@ public BoundedWindow window() { } } - public static class GroupByKeyTranslator implements FlinkStreamingPipelineTranslator.StreamTransformTranslator> { + private static class ReshuffleTranslatorStreaming + extends FlinkStreamingPipelineTranslator.StreamTransformTranslator> { @Override - public void translateNode(GroupByKey transform, FlinkStreamingTranslationContext context) { - PValue input = context.getInput(transform); - - DataStream>> inputDataStream = context.getInputDataStream(input); - KvCoder inputKvCoder = (KvCoder) context.getInput(transform).getCoder(); + public void translateNode( + Reshuffle transform, + FlinkStreamingTranslationContext context) { - KeyedStream>, K> groupByKStream = FlinkGroupByKeyWrapper - .groupStreamByKey(inputDataStream, inputKvCoder); + DataStream>> inputDataSet = + context.getInputDataStream(context.getInput(transform)); - DataStream>>> groupedByKNWstream = - FlinkGroupAlsoByWindowWrapper.createForIterable(context.getPipelineOptions(), - context.getInput(transform), groupByKStream); + context.setOutputDataStream(context.getOutput(transform), inputDataSet.rebalance()); - context.setOutputDataStream(context.getOutput(transform), groupedByKNWstream); } } - public static class CombinePerKeyTranslator implements FlinkStreamingPipelineTranslator.StreamTransformTranslator> { - - @Override - public void translateNode(Combine.PerKey transform, FlinkStreamingTranslationContext context) { - PValue input = context.getInput(transform); - DataStream>> inputDataStream = context.getInputDataStream(input); - KvCoder inputKvCoder = (KvCoder) context.getInput(transform).getCoder(); - KvCoder outputKvCoder = (KvCoder) context.getOutput(transform).getCoder(); + private static class GroupByKeyTranslator + extends FlinkStreamingPipelineTranslator.StreamTransformTranslator> { - KeyedStream>, K> groupByKStream = FlinkGroupByKeyWrapper - .groupStreamByKey(inputDataStream, inputKvCoder); + @Override + public void translateNode( + GroupByKey transform, + FlinkStreamingTranslationContext context) { + + PCollection> input = context.getInput(transform); + + @SuppressWarnings("unchecked") + WindowingStrategy windowingStrategy = + (WindowingStrategy) input.getWindowingStrategy(); + + KvCoder inputKvCoder = (KvCoder) input.getCoder(); + + SingletonKeyedWorkItemCoder workItemCoder = SingletonKeyedWorkItemCoder.of( + inputKvCoder.getKeyCoder(), + inputKvCoder.getValueCoder(), + input.getWindowingStrategy().getWindowFn().windowCoder()); + + DataStream>> inputDataStream = context.getInputDataStream(input); + + WindowedValue. + FullWindowedValueCoder> windowedWorkItemCoder = + WindowedValue.getFullCoder( + workItemCoder, + input.getWindowingStrategy().getWindowFn().windowCoder()); + + CoderTypeInformation>> workItemTypeInfo = + new CoderTypeInformation<>(windowedWorkItemCoder); + + DataStream>> workItemStream = + inputDataStream + .flatMap(new CombinePerKeyTranslator.ToKeyedWorkItem()) + .returns(workItemTypeInfo).name("ToKeyedWorkItem"); + + KeyedStream< + WindowedValue< + SingletonKeyedWorkItem>, ByteBuffer> keyedWorkItemStream = workItemStream + .keyBy(new WorkItemKeySelector(inputKvCoder.getKeyCoder())); + + SystemReduceFn, Iterable, BoundedWindow> reduceFn = + SystemReduceFn.buffering(inputKvCoder.getValueCoder()); + + TypeInformation>>> outputTypeInfo = + context.getTypeInfo(context.getOutput(transform)); + + DoFnOperator.DefaultOutputManagerFactory< + WindowedValue>>> outputManagerFactory = + new DoFnOperator.DefaultOutputManagerFactory<>(); + + WindowDoFnOperator> doFnOperator = + new WindowDoFnOperator<>( + reduceFn, + (TypeInformation) workItemTypeInfo, + new TupleTag>>("main output"), + Collections.>emptyList(), + outputManagerFactory, + windowingStrategy, + new HashMap>(), /* side-input mapping */ + Collections.>emptyList(), /* side inputs */ + context.getPipelineOptions(), + inputKvCoder.getKeyCoder()); + + // our operator excepts WindowedValue while our input stream + // is WindowedValue, which is fine but Java doesn't like it ... + @SuppressWarnings("unchecked") + SingleOutputStreamOperator>>> outDataStream = + keyedWorkItemStream + .transform( + transform.getName(), + outputTypeInfo, + (OneInputStreamOperator) doFnOperator); - Combine.KeyedCombineFn combineFn = (Combine.KeyedCombineFn) transform.getFn(); - DataStream>> groupedByKNWstream = - FlinkGroupAlsoByWindowWrapper.create(context.getPipelineOptions(), - context.getInput(transform), groupByKStream, combineFn, outputKvCoder); + context.setOutputDataStream(context.getOutput(transform), outDataStream); - context.setOutputDataStream(context.getOutput(transform), groupedByKNWstream); } } - public static class FlattenPCollectionTranslator implements FlinkStreamingPipelineTranslator.StreamTransformTranslator> { + private static class CombinePerKeyTranslator + extends FlinkStreamingPipelineTranslator.StreamTransformTranslator< + Combine.PerKey> { @Override - public void translateNode(Flatten.FlattenPCollectionList transform, FlinkStreamingTranslationContext context) { - List> allInputs = context.getInput(transform).getAll(); - DataStream result = null; - for (PCollection collection : allInputs) { - DataStream current = context.getInputDataStream(collection); - result = (result == null) ? current : result.union(current); - } - context.setOutputDataStream(context.getOutput(transform), result); - } - } - - public static class ParDoBoundMultiStreamingTranslator implements FlinkStreamingPipelineTranslator.StreamTransformTranslator> { + boolean canTranslate( + Combine.PerKey transform, + FlinkStreamingTranslationContext context) { - private final int MAIN_TAG_INDEX = 0; + // if we have a merging window strategy and side inputs we cannot + // translate as a proper combine. We have to group and then run the combine + // over the final grouped values. + PCollection> input = context.getInput(transform); - @Override - public void translateNode(ParDo.BoundMulti transform, FlinkStreamingTranslationContext context) { - - // we assume that the transformation does not change the windowing strategy. - WindowingStrategy windowingStrategy = context.getInput(transform).getWindowingStrategy(); + @SuppressWarnings("unchecked") + WindowingStrategy windowingStrategy = + (WindowingStrategy) input.getWindowingStrategy(); - Map, PCollection> outputs = context.getOutput(transform).getAll(); - Map, Integer> tagsToLabels = transformTupleTagsToLabels( - transform.getMainOutputTag(), outputs.keySet()); + return windowingStrategy.getWindowFn().isNonMerging() || transform.getSideInputs().isEmpty(); + } - UnionCoder intermUnionCoder = getIntermUnionCoder(outputs.values()); - WindowedValue.WindowedValueCoder outputStreamCoder = WindowedValue.getFullCoder( - intermUnionCoder, windowingStrategy.getWindowFn().windowCoder()); + @Override + public void translateNode( + Combine.PerKey transform, + FlinkStreamingTranslationContext context) { + + PCollection> input = context.getInput(transform); + + @SuppressWarnings("unchecked") + WindowingStrategy windowingStrategy = + (WindowingStrategy) input.getWindowingStrategy(); + + KvCoder inputKvCoder = (KvCoder) input.getCoder(); + + SingletonKeyedWorkItemCoder workItemCoder = SingletonKeyedWorkItemCoder.of( + inputKvCoder.getKeyCoder(), + inputKvCoder.getValueCoder(), + input.getWindowingStrategy().getWindowFn().windowCoder()); + + DataStream>> inputDataStream = context.getInputDataStream(input); + + WindowedValue. + FullWindowedValueCoder> windowedWorkItemCoder = + WindowedValue.getFullCoder( + workItemCoder, + input.getWindowingStrategy().getWindowFn().windowCoder()); + + CoderTypeInformation>> workItemTypeInfo = + new CoderTypeInformation<>(windowedWorkItemCoder); + + DataStream>> workItemStream = + inputDataStream + .flatMap(new ToKeyedWorkItem()) + .returns(workItemTypeInfo).name("ToKeyedWorkItem"); + + KeyedStream< + WindowedValue< + SingletonKeyedWorkItem>, ByteBuffer> keyedWorkItemStream = workItemStream + .keyBy(new WorkItemKeySelector(inputKvCoder.getKeyCoder())); + + SystemReduceFn reduceFn = SystemReduceFn.combining( + inputKvCoder.getKeyCoder(), + AppliedCombineFn.withInputCoder( + transform.getFn(), input.getPipeline().getCoderRegistry(), inputKvCoder)); + + TypeInformation>> outputTypeInfo = + context.getTypeInfo(context.getOutput(transform)); + + List> sideInputs = transform.getSideInputs(); + + if (sideInputs.isEmpty()) { + + WindowDoFnOperator doFnOperator = + new WindowDoFnOperator<>( + reduceFn, + (TypeInformation) workItemTypeInfo, + new TupleTag>("main output"), + Collections.>emptyList(), + new DoFnOperator.DefaultOutputManagerFactory>>(), + windowingStrategy, + new HashMap>(), /* side-input mapping */ + Collections.>emptyList(), /* side inputs */ + context.getPipelineOptions(), + inputKvCoder.getKeyCoder()); + + // our operator excepts WindowedValue while our input stream + // is WindowedValue, which is fine but Java doesn't like it ... + @SuppressWarnings("unchecked") + SingleOutputStreamOperator>> outDataStream = + keyedWorkItemStream.transform( + transform.getName(), outputTypeInfo, (OneInputStreamOperator) doFnOperator); - CoderTypeInformation> intermWindowedValueCoder = - new CoderTypeInformation<>(outputStreamCoder); + context.setOutputDataStream(context.getOutput(transform), outDataStream); + } else { + Tuple2>, DataStream> transformSideInputs = + transformSideInputs(sideInputs, context); + + WindowDoFnOperator doFnOperator = + new WindowDoFnOperator<>( + reduceFn, + (TypeInformation) workItemTypeInfo, + new TupleTag>("main output"), + Collections.>emptyList(), + new DoFnOperator.DefaultOutputManagerFactory>>(), + windowingStrategy, + transformSideInputs.f0, + sideInputs, + context.getPipelineOptions(), + inputKvCoder.getKeyCoder()); + + // we have to manually contruct the two-input transform because we're not + // allowed to have only one input keyed, normally. + + TwoInputTransformation< + WindowedValue>, + RawUnionValue, + WindowedValue>> rawFlinkTransform = new TwoInputTransformation<>( + keyedWorkItemStream.getTransformation(), + transformSideInputs.f1.broadcast().getTransformation(), + transform.getName(), + (TwoInputStreamOperator) doFnOperator, + outputTypeInfo, + keyedWorkItemStream.getParallelism()); + + rawFlinkTransform.setStateKeyType(keyedWorkItemStream.getKeyType()); + rawFlinkTransform.setStateKeySelectors(keyedWorkItemStream.getKeySelector(), null); + + @SuppressWarnings({ "unchecked", "rawtypes" }) + SingleOutputStreamOperator>> outDataStream = + new SingleOutputStreamOperator( + keyedWorkItemStream.getExecutionEnvironment(), + rawFlinkTransform) {}; // we have to cheat around the ctor being protected + + keyedWorkItemStream.getExecutionEnvironment().addOperator(rawFlinkTransform); + + context.setOutputDataStream(context.getOutput(transform), outDataStream); + } + } - FlinkParDoBoundMultiWrapper doFnWrapper = new FlinkParDoBoundMultiWrapper<>( - context.getPipelineOptions(), windowingStrategy, transform.getFn(), - transform.getMainOutputTag(), tagsToLabels); + private static class ToKeyedWorkItem + extends RichFlatMapFunction< + WindowedValue>, + WindowedValue>> { + + @Override + public void flatMap( + WindowedValue> inWithMultipleWindows, + Collector>> out) throws Exception { + + // we need to wrap each one work item per window for now + // since otherwise the PushbackSideInputRunner will not correctly + // determine whether side inputs are ready + for (WindowedValue> in : inWithMultipleWindows.explodeWindows()) { + SingletonKeyedWorkItem workItem = + new SingletonKeyedWorkItem<>( + in.getValue().getKey(), + in.withValue(in.getValue().getValue())); + + in.withValue(workItem); + out.collect(in.withValue(workItem)); + } + } + } + } - DataStream> inputDataStream = context.getInputDataStream(context.getInput(transform)); - SingleOutputStreamOperator> intermDataStream = - inputDataStream.flatMap(doFnWrapper).returns(intermWindowedValueCoder); + private static class FlattenPCollectionTranslator + extends FlinkStreamingPipelineTranslator.StreamTransformTranslator< + Flatten.FlattenPCollectionList> { - for (Map.Entry, PCollection> output : outputs.entrySet()) { - final int outputTag = tagsToLabels.get(output.getKey()); + @Override + public void translateNode( + Flatten.FlattenPCollectionList transform, + FlinkStreamingTranslationContext context) { + List> allInputs = context.getInput(transform).getAll(); - WindowedValue.WindowedValueCoder coderForTag = WindowedValue.getFullCoder( - output.getValue().getCoder(), - windowingStrategy.getWindowFn().windowCoder()); + if (allInputs.isEmpty()) { - CoderTypeInformation> windowedValueCoder = - new CoderTypeInformation(coderForTag); + // create an empty dummy source to satisfy downstream operations + // we cannot create an empty source in Flink, therefore we have to + // add the flatMap that simply never forwards the single element + DataStreamSource dummySource = + context.getExecutionEnvironment().fromElements("dummy"); - context.setOutputDataStream(output.getValue(), - intermDataStream.filter(new FilterFunction>() { + DataStream> result = dummySource.flatMap( + new FlatMapFunction>() { @Override - public boolean filter(WindowedValue value) throws Exception { - return value.getValue().getUnionTag() == outputTag; + public void flatMap( + String s, + Collector> collector) throws Exception { + // never return anything } - }).flatMap(new FlatMapFunction, WindowedValue>() { - @Override - public void flatMap(WindowedValue value, Collector> collector) throws Exception { - collector.collect(WindowedValue.of( - value.getValue().getValue(), - value.getTimestamp(), - value.getWindows(), - value.getPane())); - } - }).returns(windowedValueCoder)); - } - } + }).returns( + new CoderTypeInformation<>( + WindowedValue.getFullCoder( + (Coder) VoidCoder.of(), + GlobalWindow.Coder.INSTANCE))); + context.setOutputDataStream(context.getOutput(transform), result); - private Map, Integer> transformTupleTagsToLabels(TupleTag mainTag, Set> secondaryTags) { - Map, Integer> tagToLabelMap = Maps.newHashMap(); - tagToLabelMap.put(mainTag, MAIN_TAG_INDEX); - int count = MAIN_TAG_INDEX + 1; - for (TupleTag tag : secondaryTags) { - if (!tagToLabelMap.containsKey(tag)) { - tagToLabelMap.put(tag, count++); + } else { + DataStream result = null; + for (PCollection collection : allInputs) { + DataStream current = context.getInputDataStream(collection); + result = (result == null) ? current : result.union(current); } + context.setOutputDataStream(context.getOutput(transform), result); } - return tagToLabelMap; - } - - private UnionCoder getIntermUnionCoder(Collection> taggedCollections) { - List> outputCoders = Lists.newArrayList(); - for (PCollection coll : taggedCollections) { - outputCoders.add(coll.getCoder()); - } - return UnionCoder.of(outputCoders); } } } diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTranslationContext.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTranslationContext.java index a75ef034c850c..bc80d42834ff9 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTranslationContext.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/FlinkStreamingTranslationContext.java @@ -19,6 +19,8 @@ import static com.google.common.base.Preconditions.checkNotNull; +import java.util.HashMap; +import java.util.Map; import org.apache.beam.runners.flink.translation.types.CoderTypeInformation; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.options.PipelineOptions; @@ -29,14 +31,10 @@ import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.POutput; import org.apache.beam.sdk.values.PValue; - import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import java.util.HashMap; -import java.util.Map; - /** * Helper for keeping track of which {@link DataStream DataStreams} map * to which {@link PTransform PTransforms}. diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkAssignContext.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkAssignContext.java index 6abb8ffcbb86a..447b1e507e1a5 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkAssignContext.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkAssignContext.java @@ -19,16 +19,12 @@ import static com.google.common.base.Preconditions.checkArgument; +import com.google.common.collect.Iterables; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.WindowFn; import org.apache.beam.sdk.util.WindowedValue; - -import com.google.common.collect.Iterables; - import org.joda.time.Instant; -import java.util.Collection; - /** * {@link org.apache.beam.sdk.transforms.windowing.WindowFn.AssignContext} for * Flink functions. diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkAssignWindows.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkAssignWindows.java index e07e49a2f0609..f241ad0dc5c0f 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkAssignWindows.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkAssignWindows.java @@ -17,15 +17,13 @@ */ package org.apache.beam.runners.flink.translation.functions; +import java.util.Collection; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.WindowFn; import org.apache.beam.sdk.util.WindowedValue; - import org.apache.flink.api.common.functions.FlatMapFunction; import org.apache.flink.util.Collector; -import java.util.Collection; - /** * Flink {@link FlatMapFunction} for implementing * {@link org.apache.beam.sdk.transforms.windowing.Window.Bound}. diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java index fdf1e59e06d39..ac5b345db6783 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkDoFnFunction.java @@ -17,19 +17,17 @@ */ package org.apache.beam.runners.flink.translation.functions; +import java.util.Map; import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.PCollectionView; - import org.apache.flink.api.common.functions.RichMapPartitionFunction; import org.apache.flink.configuration.Configuration; import org.apache.flink.util.Collector; -import java.util.Map; - /** * Encapsulates a {@link OldDoFn} * inside a Flink {@link org.apache.flink.api.common.functions.RichMapPartitionFunction}. @@ -94,8 +92,8 @@ public void mapPartition( } } - // set the windowed value to null so that the logic - // or outputting in finishBundle kicks in + // set the windowed value to null so that the special logic for outputting + // in startBundle/finishBundle kicks in context = context.forWindowedValue(null); this.doFn.finishBundle(context); } diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingNonShuffleReduceFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingNonShuffleReduceFunction.java index 2d360436e52f2..dbaab1706ec5f 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingNonShuffleReduceFunction.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingNonShuffleReduceFunction.java @@ -17,6 +17,13 @@ */ package org.apache.beam.runners.flink.translation.functions; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import java.util.Collections; +import java.util.Comparator; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.CombineFnBase; @@ -31,20 +38,10 @@ import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollectionView; - -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; - import org.apache.flink.api.common.functions.RichGroupReduceFunction; import org.apache.flink.util.Collector; import org.joda.time.Instant; -import java.util.Collections; -import java.util.Comparator; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - /** * Special version of {@link FlinkReduceFunction} that supports merging windows. This * assumes that the windows are {@link IntervalWindow IntervalWindows} and exhibits the diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingPartialReduceFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingPartialReduceFunction.java index c12e4204a3f0a..bc09bdf05c2fd 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingPartialReduceFunction.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingPartialReduceFunction.java @@ -17,6 +17,13 @@ */ package org.apache.beam.runners.flink.translation.functions; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import java.util.Collections; +import java.util.Comparator; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.CombineFnBase; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; @@ -29,19 +36,9 @@ import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollectionView; - -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; - import org.apache.flink.util.Collector; import org.joda.time.Instant; -import java.util.Collections; -import java.util.Comparator; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - /** * Special version of {@link FlinkPartialReduceFunction} that supports merging windows. This * assumes that the windows are {@link IntervalWindow IntervalWindows} and exhibits the diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingReduceFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingReduceFunction.java index 07d1c97415336..4050f47a2ec48 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingReduceFunction.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMergingReduceFunction.java @@ -17,6 +17,15 @@ */ package org.apache.beam.runners.flink.translation.functions; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.CombineFnBase; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; @@ -29,21 +38,9 @@ import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollectionView; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; - import org.apache.flink.util.Collector; import org.joda.time.Instant; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - /** * Special version of {@link FlinkReduceFunction} that supports merging windows. This * assumes that the windows are {@link IntervalWindow IntervalWindows} and exhibits the diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputDoFnFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputDoFnFunction.java index 5013b90d22d5a..9cc84ca659252 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputDoFnFunction.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputDoFnFunction.java @@ -17,6 +17,7 @@ */ package org.apache.beam.runners.flink.translation.functions; +import java.util.Map; import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.OldDoFn; @@ -25,16 +26,13 @@ import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; - import org.apache.flink.api.common.functions.RichMapPartitionFunction; import org.apache.flink.configuration.Configuration; import org.apache.flink.util.Collector; -import java.util.Map; - /** - * Encapsulates a {@link OldDoFn} that uses side outputs - * inside a Flink {@link org.apache.flink.api.common.functions.RichMapPartitionFunction}. + * Encapsulates a {@link OldDoFn} that can emit to multiple + * outputs inside a Flink {@link org.apache.flink.api.common.functions.RichMapPartitionFunction}. * * We get a mapping from {@link org.apache.beam.sdk.values.TupleTag} to output index * and must tag all outputs with the output number. Afterwards a filter will filter out @@ -106,6 +104,9 @@ public void mapPartition( } } + // set the windowed value to null so that the special logic for outputting + // in startBundle/finishBundle kicks in + context = context.forWindowedValue(null); this.doFn.finishBundle(context); } diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputProcessContext.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputProcessContext.java index fab3c856bfea2..153a2d7bdd7f6 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputProcessContext.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputProcessContext.java @@ -17,6 +17,8 @@ */ package org.apache.beam.runners.flink.translation.functions; +import java.util.Collection; +import java.util.Map; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.join.RawUnionValue; @@ -26,14 +28,10 @@ import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; - import org.apache.flink.api.common.functions.RuntimeContext; import org.apache.flink.util.Collector; import org.joda.time.Instant; -import java.util.Collection; -import java.util.Map; - /** * {@link OldDoFn.ProcessContext} for {@link FlinkMultiOutputDoFnFunction} that supports * side outputs. diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputPruningFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputPruningFunction.java index 9205a5520f827..b72750a366d7e 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputPruningFunction.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkMultiOutputPruningFunction.java @@ -19,7 +19,6 @@ import org.apache.beam.sdk.transforms.join.RawUnionValue; import org.apache.beam.sdk.util.WindowedValue; - import org.apache.flink.api.common.functions.FlatMapFunction; import org.apache.flink.util.Collector; diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoElementAssignContext.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoElementAssignContext.java index 98446f9c7ba4a..c89027262cd9c 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoElementAssignContext.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkNoElementAssignContext.java @@ -20,7 +20,6 @@ import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.WindowFn; - import org.joda.time.Instant; /** diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkPartialReduceFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkPartialReduceFunction.java index 2db4b7b53b36f..fa2ce4d46a053 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkPartialReduceFunction.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkPartialReduceFunction.java @@ -17,6 +17,13 @@ */ package org.apache.beam.runners.flink.translation.functions; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.Iterator; +import java.util.Map; import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.CombineFnBase; @@ -30,20 +37,10 @@ import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollectionView; - -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; - import org.apache.flink.api.common.functions.RichGroupCombineFunction; import org.apache.flink.util.Collector; import org.joda.time.Instant; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.Iterator; -import java.util.Map; - /** * This is is the first step for executing a {@link org.apache.beam.sdk.transforms.Combine.PerKey} * on Flink. The second part is {@link FlinkReduceFunction}. This function performs a local diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkProcessContext.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkProcessContext.java index 64b93c89513ef..fa5eb1ab10eec 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkProcessContext.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkProcessContext.java @@ -19,6 +19,12 @@ import static com.google.common.base.Preconditions.checkNotNull; +import com.google.common.collect.Iterables; +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.Map; import org.apache.beam.runners.flink.translation.wrappers.SerializableFnAggregatorWrapper; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.options.PipelineOptions; @@ -34,19 +40,10 @@ import org.apache.beam.sdk.util.state.StateInternals; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; - -import com.google.common.collect.Iterables; - import org.apache.flink.api.common.functions.RuntimeContext; import org.apache.flink.util.Collector; import org.joda.time.Instant; -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.Map; - /** * {@link OldDoFn.ProcessContext} for our Flink Wrappers. */ diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkReduceFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkReduceFunction.java index b1729a42a40f5..c9b24b456c030 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkReduceFunction.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/FlinkReduceFunction.java @@ -17,6 +17,15 @@ */ package org.apache.beam.runners.flink.translation.functions; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.Iterator; +import java.util.List; +import java.util.Map; import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.CombineFnBase; @@ -30,22 +39,10 @@ import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollectionView; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; - import org.apache.flink.api.common.functions.RichGroupReduceFunction; import org.apache.flink.util.Collector; import org.joda.time.Instant; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - /** * This is the second part for executing a {@link org.apache.beam.sdk.transforms.Combine.PerKey} * on Flink, the second part is {@link FlinkReduceFunction}. This function performs the final diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/SideInputInitializer.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/SideInputInitializer.java index a577b684b849f..12222b499d746 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/SideInputInitializer.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/functions/SideInputInitializer.java @@ -17,16 +17,14 @@ */ package org.apache.beam.runners.flink.translation.functions; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.util.WindowedValue; -import org.apache.beam.sdk.values.PCollectionView; - -import org.apache.flink.api.common.functions.BroadcastVariableInitializer; - import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.util.WindowedValue; +import org.apache.beam.sdk.values.PCollectionView; +import org.apache.flink.api.common.functions.BroadcastVariableInitializer; /** * {@link BroadcastVariableInitializer} that initializes the broadcast input as a {@code Map} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/CoderTypeInformation.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/CoderTypeInformation.java index 71cc6b7790cc6..9b449aabc8b59 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/CoderTypeInformation.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/CoderTypeInformation.java @@ -20,7 +20,6 @@ import static com.google.common.base.Preconditions.checkNotNull; import org.apache.beam.sdk.coders.Coder; - import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeinfo.AtomicType; import org.apache.flink.api.common.typeinfo.TypeInformation; @@ -40,6 +39,10 @@ public CoderTypeInformation(Coder coder) { this.coder = coder; } + public Coder getCoder() { + return coder; + } + @Override public boolean isBasicType() { return false; diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/CoderTypeSerializer.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/CoderTypeSerializer.java index 46219511213bb..4eda357f64eb7 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/CoderTypeSerializer.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/CoderTypeSerializer.java @@ -17,21 +17,17 @@ */ package org.apache.beam.runners.flink.translation.types; +import java.io.EOFException; +import java.io.IOException; import org.apache.beam.runners.flink.translation.wrappers.DataInputViewWrapper; import org.apache.beam.runners.flink.translation.wrappers.DataOutputViewWrapper; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.util.CoderUtils; - import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.core.memory.DataInputView; import org.apache.flink.core.memory.DataOutputView; -import java.io.ByteArrayInputStream; -import java.io.EOFException; -import java.io.IOException; -import java.io.ObjectInputStream; - /** * Flink {@link org.apache.flink.api.common.typeutils.TypeSerializer} for * Dataflow {@link org.apache.beam.sdk.coders.Coder Coders}. diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueComparator.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueComparator.java index 69bcb415c3179..667ef4591a3f0 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueComparator.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueComparator.java @@ -17,16 +17,14 @@ */ package org.apache.beam.runners.flink.translation.types; +import java.io.IOException; +import java.util.Arrays; import org.apache.beam.sdk.coders.Coder; - import org.apache.flink.api.common.typeutils.TypeComparator; import org.apache.flink.core.memory.DataInputView; import org.apache.flink.core.memory.DataOutputView; import org.apache.flink.core.memory.MemorySegment; -import java.io.IOException; -import java.util.Arrays; - /** * Flink {@link org.apache.flink.api.common.typeutils.TypeComparator} for Beam values that have * been encoded to byte data by a {@link Coder}. diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueSerializer.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueSerializer.java index 33af8d9577239..f3e667d575a5f 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueSerializer.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueSerializer.java @@ -17,14 +17,12 @@ */ package org.apache.beam.runners.flink.translation.types; +import java.io.IOException; import org.apache.beam.sdk.coders.Coder; - import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.core.memory.DataInputView; import org.apache.flink.core.memory.DataOutputView; -import java.io.IOException; - /** * {@link TypeSerializer} for values that were encoded using a {@link Coder}. */ diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueTypeInformation.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueTypeInformation.java index 46c854f087b81..0315ae3704fb7 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueTypeInformation.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueTypeInformation.java @@ -18,16 +18,12 @@ package org.apache.beam.runners.flink.translation.types; import org.apache.beam.sdk.coders.Coder; - import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeinfo.AtomicType; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeutils.TypeComparator; import org.apache.flink.api.common.typeutils.TypeSerializer; -import org.apache.flink.api.common.typeutils.base.array.BytePrimitiveArraySerializer; - -import java.util.Objects; /** * Flink {@link TypeInformation} for Beam values that have been encoded to byte data diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/FlinkCoder.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/FlinkCoder.java index 3b1e66e8e8ff9..8b90c73a26fbd 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/FlinkCoder.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/FlinkCoder.java @@ -17,6 +17,11 @@ */ package org.apache.beam.runners.flink.translation.types; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Collections; +import java.util.List; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.StandardCoder; import org.apache.flink.api.common.ExecutionConfig; @@ -25,12 +30,6 @@ import org.apache.flink.core.memory.DataInputViewStreamWrapper; import org.apache.flink.core.memory.DataOutputViewStreamWrapper; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.Collections; -import java.util.List; - /** * A Coder that uses Flink's serialization system. * @param The type of the value to be encoded diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/KvKeySelector.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/KvKeySelector.java index 80d20cae9d9bd..9df683620ec1f 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/KvKeySelector.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/types/KvKeySelector.java @@ -21,7 +21,6 @@ import org.apache.beam.sdk.util.CoderUtils; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.KV; - import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.functions.KeySelector; import org.apache.flink.api.java.typeutils.ResultTypeQueryable; diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/utils/SerializedPipelineOptions.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/utils/SerializedPipelineOptions.java index 44af0ea406a7a..0c6cea8dfd29a 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/utils/SerializedPipelineOptions.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/utils/SerializedPipelineOptions.java @@ -20,13 +20,11 @@ import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.sdk.options.PipelineOptions; - import com.fasterxml.jackson.databind.ObjectMapper; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.Serializable; +import org.apache.beam.sdk.options.PipelineOptions; /** * Encapsulates the PipelineOptions in serialized form to ship them to the cluster. diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/DataInputViewWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/DataInputViewWrapper.java index f1b8c7387e40e..82a2c4ed2b9f1 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/DataInputViewWrapper.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/DataInputViewWrapper.java @@ -17,11 +17,10 @@ */ package org.apache.beam.runners.flink.translation.wrappers; -import org.apache.flink.core.memory.DataInputView; - import java.io.EOFException; import java.io.IOException; import java.io.InputStream; +import org.apache.flink.core.memory.DataInputView; /** * Wrapper for {@link DataInputView}. We need this because Flink reads data using a diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/DataOutputViewWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/DataOutputViewWrapper.java index 148f9607c21e6..2cb9b188ff064 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/DataOutputViewWrapper.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/DataOutputViewWrapper.java @@ -17,10 +17,9 @@ */ package org.apache.beam.runners.flink.translation.wrappers; -import org.apache.flink.core.memory.DataOutputView; - import java.io.IOException; import java.io.OutputStream; +import org.apache.flink.core.memory.DataOutputView; /** * Wrapper for {@link org.apache.flink.core.memory.DataOutputView}. We need this because diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/SerializableFnAggregatorWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/SerializableFnAggregatorWrapper.java index 82d3fb8ffae30..25d777a3b1252 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/SerializableFnAggregatorWrapper.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/SerializableFnAggregatorWrapper.java @@ -17,15 +17,12 @@ */ package org.apache.beam.runners.flink.translation.wrappers; -import org.apache.beam.sdk.transforms.Aggregator; -import org.apache.beam.sdk.transforms.Combine; - import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; - -import org.apache.flink.api.common.accumulators.Accumulator; - import java.io.Serializable; +import org.apache.beam.sdk.transforms.Aggregator; +import org.apache.beam.sdk.transforms.Combine; +import org.apache.flink.api.common.accumulators.Accumulator; /** * Wrapper that wraps a {@link org.apache.beam.sdk.transforms.Combine.CombineFn} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/SourceInputFormat.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/SourceInputFormat.java index 1d06b1ac2fc96..443378f8d1f7a 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/SourceInputFormat.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/SourceInputFormat.java @@ -17,6 +17,8 @@ */ package org.apache.beam.runners.flink.translation.wrappers; +import java.io.IOException; +import java.util.List; import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions; import org.apache.beam.sdk.io.BoundedSource; import org.apache.beam.sdk.io.Source; @@ -24,7 +26,6 @@ import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.util.WindowedValue; - import org.apache.flink.api.common.io.DefaultInputSplitAssigner; import org.apache.flink.api.common.io.InputFormat; import org.apache.flink.api.common.io.statistics.BaseStatistics; @@ -34,9 +35,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.List; - /** * Wrapper for executing a {@link Source} as a Flink {@link InputFormat}. diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/SourceInputSplit.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/SourceInputSplit.java index c3672c0c3ce76..e4a738695be01 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/SourceInputSplit.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/SourceInputSplit.java @@ -18,7 +18,6 @@ package org.apache.beam.runners.flink.translation.wrappers; import org.apache.beam.sdk.io.Source; - import org.apache.flink.core.io.InputSplit; /** diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java new file mode 100644 index 0000000000000..000d69f34823e --- /dev/null +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/DoFnOperator.java @@ -0,0 +1,512 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.runners.flink.translation.wrappers.streaming; + +import avro.shaded.com.google.common.base.Preconditions; +import com.google.common.collect.Iterables; +import java.io.IOException; +import java.io.Serializable; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.apache.beam.runners.core.SideInputHandler; +import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions; +import org.apache.beam.runners.flink.translation.wrappers.SerializableFnAggregatorWrapper; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.VoidCoder; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.transforms.Aggregator; +import org.apache.beam.sdk.transforms.Combine; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.OldDoFn; +import org.apache.beam.sdk.transforms.join.RawUnionValue; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.util.CoderUtils; +import org.apache.beam.sdk.util.DoFnRunner; +import org.apache.beam.sdk.util.DoFnRunners; +import org.apache.beam.sdk.util.ExecutionContext; +import org.apache.beam.sdk.util.NullSideInputReader; +import org.apache.beam.sdk.util.PushbackSideInputDoFnRunner; +import org.apache.beam.sdk.util.SideInputReader; +import org.apache.beam.sdk.util.TimerInternals; +import org.apache.beam.sdk.util.WindowedValue; +import org.apache.beam.sdk.util.WindowingStrategy; +import org.apache.beam.sdk.util.state.StateInternals; +import org.apache.beam.sdk.values.PCollectionView; +import org.apache.beam.sdk.values.TupleTag; +import org.apache.flink.api.common.ExecutionConfig; +import org.apache.flink.api.common.functions.ReduceFunction; +import org.apache.flink.api.common.state.ListState; +import org.apache.flink.api.common.state.ListStateDescriptor; +import org.apache.flink.api.common.state.ReducingState; +import org.apache.flink.api.common.state.ReducingStateDescriptor; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.common.typeutils.base.LongSerializer; +import org.apache.flink.api.common.typeutils.base.VoidSerializer; +import org.apache.flink.api.java.typeutils.GenericTypeInfo; +import org.apache.flink.runtime.state.AbstractStateBackend; +import org.apache.flink.runtime.state.KvStateSnapshot; +import org.apache.flink.runtime.state.StateHandle; +import org.apache.flink.streaming.api.operators.AbstractStreamOperator; +import org.apache.flink.streaming.api.operators.ChainingStrategy; +import org.apache.flink.streaming.api.operators.OneInputStreamOperator; +import org.apache.flink.streaming.api.operators.Output; +import org.apache.flink.streaming.api.operators.TwoInputStreamOperator; +import org.apache.flink.streaming.api.watermark.Watermark; +import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; +import org.apache.flink.streaming.runtime.tasks.StreamTaskState; + +/** + * Flink operator for executing {@link DoFn DoFns}. + * + * @param + * @param + * @param + */ +public class DoFnOperator + extends AbstractStreamOperator + implements OneInputStreamOperator, OutputT>, + TwoInputStreamOperator, RawUnionValue, OutputT> { + + protected OldDoFn doFn; + protected final SerializedPipelineOptions serializedOptions; + + protected final TupleTag mainOutputTag; + protected final List> sideOutputTags; + + protected final Collection> sideInputs; + protected final Map> sideInputTagMapping; + + protected final boolean hasSideInputs; + + protected final WindowingStrategy windowingStrategy; + + protected final OutputManagerFactory outputManagerFactory; + + protected transient PushbackSideInputDoFnRunner pushbackDoFnRunner; + + protected transient SideInputHandler sideInputHandler; + + protected transient long currentInputWatermark; + + protected transient long currentOutputWatermark; + + private transient AbstractStateBackend sideInputStateBackend; + + private final ReducingStateDescriptor pushedBackWatermarkDescriptor; + + private final ListStateDescriptor> pushedBackDescriptor; + + private transient Map> restoredSideInputState; + + public DoFnOperator( + OldDoFn doFn, + TypeInformation> inputType, + TupleTag mainOutputTag, + List> sideOutputTags, + OutputManagerFactory outputManagerFactory, + WindowingStrategy windowingStrategy, + Map> sideInputTagMapping, + Collection> sideInputs, + PipelineOptions options) { + this.doFn = doFn; + this.mainOutputTag = mainOutputTag; + this.sideOutputTags = sideOutputTags; + this.sideInputTagMapping = sideInputTagMapping; + this.sideInputs = sideInputs; + this.serializedOptions = new SerializedPipelineOptions(options); + this.windowingStrategy = windowingStrategy; + this.outputManagerFactory = outputManagerFactory; + + this.hasSideInputs = !sideInputs.isEmpty(); + + this.pushedBackWatermarkDescriptor = + new ReducingStateDescriptor<>( + "pushed-back-elements-watermark-hold", + new LongMinReducer(), + LongSerializer.INSTANCE); + + this.pushedBackDescriptor = + new ListStateDescriptor<>("pushed-back-values", inputType); + + setChainingStrategy(ChainingStrategy.ALWAYS); + } + + protected ExecutionContext.StepContext createStepContext() { + return new StepContext(); + } + + // allow overriding this in WindowDoFnOperator because this one dynamically creates + // the DoFn + protected OldDoFn getDoFn() { + return doFn; + } + + @Override + public void open() throws Exception { + super.open(); + + this.doFn = getDoFn(); + + currentInputWatermark = Long.MIN_VALUE; + currentOutputWatermark = currentInputWatermark; + + Aggregator.AggregatorFactory aggregatorFactory = new Aggregator.AggregatorFactory() { + @Override + public Aggregator createAggregatorForDoFn( + Class fnClass, + ExecutionContext.StepContext stepContext, + String aggregatorName, + Combine.CombineFn combine) { + SerializableFnAggregatorWrapper result = + new SerializableFnAggregatorWrapper<>(combine); + + getRuntimeContext().addAccumulator(aggregatorName, result); + return result; + } + }; + + SideInputReader sideInputReader = NullSideInputReader.of(sideInputs); + if (!sideInputs.isEmpty()) { + String operatorIdentifier = + this.getClass().getSimpleName() + "_" + + getRuntimeContext().getIndexOfThisSubtask() + "_sideInput"; + + sideInputStateBackend = this + .getContainingTask() + .createStateBackend(operatorIdentifier, + new GenericTypeInfo<>(ByteBuffer.class).createSerializer(new ExecutionConfig())); + + Preconditions.checkState( + sideInputStateBackend != null, + "Side input state backend cannot be bull"); + + if (restoredSideInputState != null) { + @SuppressWarnings("unchecked,rawtypes") + HashMap castRestored = (HashMap) restoredSideInputState; + sideInputStateBackend.injectKeyValueStateSnapshots(castRestored, 0L); + restoredSideInputState = null; + } + + sideInputStateBackend.setCurrentKey( + ByteBuffer.wrap(CoderUtils.encodeToByteArray(VoidCoder.of(), null))); + + StateInternals sideInputStateInternals = + new FlinkStateInternals<>(sideInputStateBackend, VoidCoder.of()); + + sideInputHandler = new SideInputHandler(sideInputs, sideInputStateInternals); + sideInputReader = sideInputHandler; + } + + DoFnRunner doFnRunner = DoFnRunners.createDefault( + serializedOptions.getPipelineOptions(), + doFn, + sideInputReader, + outputManagerFactory.create(output), + mainOutputTag, + sideOutputTags, + createStepContext(), + aggregatorFactory, + windowingStrategy); + + pushbackDoFnRunner = + PushbackSideInputDoFnRunner.create(doFnRunner, sideInputs, sideInputHandler); + + doFn.setup(); + } + + @Override + public void close() throws Exception { + super.close(); + doFn.teardown(); + } + + protected final long getPushbackWatermarkHold() { + // if we don't have side inputs we never hold the watermark + if (sideInputs.isEmpty()) { + return Long.MAX_VALUE; + } + + try { + Long result = sideInputStateBackend.getPartitionedState( + null, + VoidSerializer.INSTANCE, + pushedBackWatermarkDescriptor).get(); + return result != null ? result : Long.MAX_VALUE; + } catch (Exception e) { + throw new RuntimeException("Error retrieving pushed back watermark state.", e); + } + } + + @Override + public final void processElement( + StreamRecord> streamRecord) throws Exception { + pushbackDoFnRunner.startBundle(); + pushbackDoFnRunner.processElement(streamRecord.getValue()); + pushbackDoFnRunner.finishBundle(); + } + + @Override + public final void processElement1( + StreamRecord> streamRecord) throws Exception { + pushbackDoFnRunner.startBundle(); + Iterable> justPushedBack = + pushbackDoFnRunner.processElementInReadyWindows(streamRecord.getValue()); + + ListState> pushedBack = + sideInputStateBackend.getPartitionedState( + null, + VoidSerializer.INSTANCE, + pushedBackDescriptor); + + ReducingState pushedBackWatermark = + sideInputStateBackend.getPartitionedState( + null, + VoidSerializer.INSTANCE, + pushedBackWatermarkDescriptor); + + for (WindowedValue pushedBackValue : justPushedBack) { + pushedBackWatermark.add(pushedBackValue.getTimestamp().getMillis()); + pushedBack.add(pushedBackValue); + } + pushbackDoFnRunner.finishBundle(); + } + + @Override + public final void processElement2( + StreamRecord streamRecord) throws Exception { + pushbackDoFnRunner.startBundle(); + + @SuppressWarnings("unchecked") + WindowedValue> value = + (WindowedValue>) streamRecord.getValue().getValue(); + + PCollectionView sideInput = sideInputTagMapping.get(streamRecord.getValue().getUnionTag()); + sideInputHandler.addSideInputValue(sideInput, value); + + ListState> pushedBack = + sideInputStateBackend.getPartitionedState( + null, + VoidSerializer.INSTANCE, + pushedBackDescriptor); + + List> newPushedBack = new ArrayList<>(); + for (WindowedValue elem: pushedBack.get()) { + + // we need to set the correct key in case the operator is + // a (keyed) window operator + setKeyContextElement1(new StreamRecord<>(elem)); + + Iterable> justPushedBack = + pushbackDoFnRunner.processElementInReadyWindows(elem); + Iterables.addAll(newPushedBack, justPushedBack); + } + + + ReducingState pushedBackWatermark = + sideInputStateBackend.getPartitionedState( + null, + VoidSerializer.INSTANCE, + pushedBackWatermarkDescriptor); + + pushedBack.clear(); + pushedBackWatermark.clear(); + for (WindowedValue pushedBackValue : newPushedBack) { + pushedBackWatermark.add(pushedBackValue.getTimestamp().getMillis()); + pushedBack.add(pushedBackValue); + } + + pushbackDoFnRunner.finishBundle(); + + // maybe output a new watermark + processWatermark1(new Watermark(currentInputWatermark)); + } + + @Override + public void processWatermark(Watermark mark) throws Exception { + processWatermark1(mark); + } + + @Override + public void processWatermark1(Watermark mark) throws Exception { + this.currentInputWatermark = mark.getTimestamp(); + long potentialOutputWatermark = + Math.min(getPushbackWatermarkHold(), currentInputWatermark); + if (potentialOutputWatermark > currentOutputWatermark) { + currentOutputWatermark = potentialOutputWatermark; + output.emitWatermark(new Watermark(currentOutputWatermark)); + } + } + + @Override + public void processWatermark2(Watermark mark) throws Exception { + // ignore watermarks from the side-input input + } + + @Override + public StreamTaskState snapshotOperatorState( + long checkpointId, + long timestamp) throws Exception { + + StreamTaskState streamTaskState = super.snapshotOperatorState(checkpointId, timestamp); + + if (sideInputStateBackend != null) { + // we have to manually checkpoint the side-input state backend and store + // the handle in the "user state" of the task state + HashMap> sideInputSnapshot = + sideInputStateBackend.snapshotPartitionedState(checkpointId, timestamp); + + if (sideInputSnapshot != null) { + @SuppressWarnings("unchecked,rawtypes") + StateHandle sideInputStateHandle = + (StateHandle) sideInputStateBackend.checkpointStateSerializable( + sideInputSnapshot, checkpointId, timestamp); + + streamTaskState.setFunctionState(sideInputStateHandle); + } + } + + return streamTaskState; + } + + @Override + public void restoreState(StreamTaskState state, long recoveryTimestamp) throws Exception { + super.restoreState(state, recoveryTimestamp); + + @SuppressWarnings("unchecked,rawtypes") + StateHandle>> sideInputStateHandle = + (StateHandle) state.getFunctionState(); + + if (sideInputStateHandle != null) { + restoredSideInputState = sideInputStateHandle.getState(getUserCodeClassloader()); + } + } + + /** + * Factory for creating an {@link DoFnRunners.OutputManager} from + * a Flink {@link Output}. + */ + interface OutputManagerFactory extends Serializable { + DoFnRunners.OutputManager create(Output> output); + } + + /** + * Default implementation of {@link OutputManagerFactory} that creates an + * {@link DoFnRunners.OutputManager} that only writes to + * a single logical output. + */ + public static class DefaultOutputManagerFactory + implements OutputManagerFactory { + @Override + public DoFnRunners.OutputManager create(final Output> output) { + return new DoFnRunners.OutputManager() { + @Override + public void output(TupleTag tag, WindowedValue value) { + // with side outputs we can't get around this because we don't + // know our own output type... + @SuppressWarnings("unchecked") + OutputT castValue = (OutputT) value; + output.collect(new StreamRecord<>(castValue)); + } + }; + } + } + + /** + * Implementation of {@link OutputManagerFactory} that creates an + * {@link DoFnRunners.OutputManager} that can write to multiple logical + * outputs by unioning them in a {@link RawUnionValue}. + */ + public static class MultiOutputOutputManagerFactory + implements OutputManagerFactory { + + Map, Integer> mapping; + + public MultiOutputOutputManagerFactory(Map, Integer> mapping) { + this.mapping = mapping; + } + + @Override + public DoFnRunners.OutputManager create(final Output> output) { + return new DoFnRunners.OutputManager() { + @Override + public void output(TupleTag tag, WindowedValue value) { + int intTag = mapping.get(tag); + output.collect(new StreamRecord<>(new RawUnionValue(intTag, value))); + } + }; + } + } + + /** + * For determining the pushback watermark in a {@link ReducingStateDescriptor}. + */ + private static class LongMinReducer implements ReduceFunction { + @Override + public Long reduce(Long a, Long b) throws Exception { + return Math.min(a, b); + } + } + + /** + * {@link StepContext} for running {@link DoFn DoFns} on Flink. This does not allow + * accessing state or timer internals. + */ + protected class StepContext implements ExecutionContext.StepContext { + + @Override + public String getStepName() { + return null; + } + + @Override + public String getTransformName() { + return null; + } + + @Override + public void noteOutput(WindowedValue output) {} + + @Override + public void noteSideOutput(TupleTag tag, WindowedValue output) {} + + @Override + public void writePCollectionViewData( + TupleTag tag, + Iterable> data, + Coder>> dataCoder, + W window, + Coder windowCoder) throws IOException { + throw new UnsupportedOperationException("Writing side-input data is not supported."); + } + + @Override + public StateInternals stateInternals() { + throw new UnsupportedOperationException("Not supported for regular DoFns."); + } + + @Override + public TimerInternals timerInternals() { + throw new UnsupportedOperationException("Not supported for regular DoFns."); + } + } + +} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkAbstractParDoWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkAbstractParDoWrapper.java deleted file mode 100644 index a9dd865bf2b3c..0000000000000 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkAbstractParDoWrapper.java +++ /dev/null @@ -1,282 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.beam.runners.flink.translation.wrappers.streaming; - -import static com.google.common.base.Preconditions.checkNotNull; - -import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions; -import org.apache.beam.runners.flink.translation.wrappers.SerializableFnAggregatorWrapper; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.transforms.Aggregator; -import org.apache.beam.sdk.transforms.Combine; -import org.apache.beam.sdk.transforms.OldDoFn; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.PaneInfo; -import org.apache.beam.sdk.transforms.windowing.WindowFn; -import org.apache.beam.sdk.util.UserCodeException; -import org.apache.beam.sdk.util.WindowedValue; -import org.apache.beam.sdk.util.WindowingInternals; -import org.apache.beam.sdk.util.WindowingStrategy; -import org.apache.beam.sdk.values.PCollectionView; -import org.apache.beam.sdk.values.TupleTag; - -import org.apache.flink.api.common.accumulators.Accumulator; -import org.apache.flink.api.common.accumulators.AccumulatorHelper; -import org.apache.flink.api.common.functions.RichFlatMapFunction; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.util.Collector; -import org.joda.time.Instant; -import org.joda.time.format.PeriodFormat; - -import java.util.Collection; - -/** - * An abstract class that encapsulates the common code of the the {@link org.apache.beam.sdk.transforms.ParDo.Bound} - * and {@link org.apache.beam.sdk.transforms.ParDo.BoundMulti} wrappers. See the {@link FlinkParDoBoundWrapper} and - * {@link FlinkParDoBoundMultiWrapper} for the actual wrappers of the aforementioned transformations. - * */ -public abstract class FlinkAbstractParDoWrapper extends RichFlatMapFunction, WindowedValue> { - - private final OldDoFn doFn; - private final WindowingStrategy windowingStrategy; - private final SerializedPipelineOptions serializedPipelineOptions; - - private DoFnProcessContext context; - - public FlinkAbstractParDoWrapper(PipelineOptions options, WindowingStrategy windowingStrategy, OldDoFn doFn) { - checkNotNull(options); - checkNotNull(windowingStrategy); - checkNotNull(doFn); - - this.doFn = doFn; - this.serializedPipelineOptions = new SerializedPipelineOptions(options); - this.windowingStrategy = windowingStrategy; - } - - @Override - public void open(Configuration parameters) throws Exception { - doFn.setup(); - } - - @Override - public void close() throws Exception { - if (this.context != null) { - // we have initialized the context - this.doFn.finishBundle(this.context); - } - this.doFn.teardown(); - } - - @Override - public void flatMap(WindowedValue value, Collector> out) throws Exception { - if (this.context == null) { - this.context = new DoFnProcessContext(doFn, out); - this.doFn.startBundle(this.context); - } - - // for each window the element belongs to, create a new copy here. - Collection windows = value.getWindows(); - if (windows.size() <= 1) { - processElement(value); - } else { - for (BoundedWindow window : windows) { - processElement(WindowedValue.of( - value.getValue(), value.getTimestamp(), window, value.getPane())); - } - } - } - - private void processElement(WindowedValue value) throws Exception { - this.context.setElement(value); - doFn.processElement(this.context); - } - - private class DoFnProcessContext extends OldDoFn.ProcessContext { - - private final OldDoFn fn; - - protected final Collector> collector; - - private WindowedValue element; - - private DoFnProcessContext(OldDoFn function, - Collector> outCollector) { - function.super(); - super.setupDelegateAggregators(); - - this.fn = function; - this.collector = outCollector; - } - - public void setElement(WindowedValue value) { - this.element = value; - } - - @Override - public IN element() { - return this.element.getValue(); - } - - @Override - public Instant timestamp() { - return this.element.getTimestamp(); - } - - @Override - public BoundedWindow window() { - if (!(fn instanceof OldDoFn.RequiresWindowAccess)) { - throw new UnsupportedOperationException( - "window() is only available in the context of a OldDoFn marked as RequiresWindowAccess."); - } - - Collection windows = this.element.getWindows(); - if (windows.size() != 1) { - throw new IllegalArgumentException("Each element is expected to belong to 1 window. " + - "This belongs to " + windows.size() + "."); - } - return windows.iterator().next(); - } - - @Override - public PaneInfo pane() { - return this.element.getPane(); - } - - @Override - public WindowingInternals windowingInternals() { - return windowingInternalsHelper(element, collector); - } - - @Override - public PipelineOptions getPipelineOptions() { - return serializedPipelineOptions.getPipelineOptions(); - } - - @Override - public T sideInput(PCollectionView view) { - throw new RuntimeException("sideInput() is not supported in Streaming mode."); - } - - @Override - public void output(OUTDF output) { - outputWithTimestamp(output, this.element.getTimestamp()); - } - - @Override - public void outputWithTimestamp(OUTDF output, Instant timestamp) { - outputWithTimestampHelper(element, output, timestamp, collector); - } - - @Override - public void sideOutput(TupleTag tag, T output) { - sideOutputWithTimestamp(tag, output, this.element.getTimestamp()); - } - - @Override - public void sideOutputWithTimestamp(TupleTag tag, T output, Instant timestamp) { - sideOutputWithTimestampHelper(element, output, timestamp, collector, tag); - } - - @Override - protected Aggregator createAggregatorInternal(String name, Combine.CombineFn combiner) { - Accumulator acc = getRuntimeContext().getAccumulator(name); - if (acc != null) { - AccumulatorHelper.compareAccumulatorTypes(name, - SerializableFnAggregatorWrapper.class, acc.getClass()); - return (Aggregator) acc; - } - - SerializableFnAggregatorWrapper accumulator = - new SerializableFnAggregatorWrapper<>(combiner); - getRuntimeContext().addAccumulator(name, accumulator); - return accumulator; - } - } - - protected void checkTimestamp(WindowedValue ref, Instant timestamp) { - if (timestamp.isBefore(ref.getTimestamp().minus(doFn.getAllowedTimestampSkew()))) { - throw new IllegalArgumentException(String.format( - "Cannot output with timestamp %s. Output timestamps must be no earlier than the " - + "timestamp of the current input (%s) minus the allowed skew (%s). See the " - + "OldDoFn#getAllowedTimestmapSkew() Javadoc for details on changing the allowed skew.", - timestamp, ref.getTimestamp(), - PeriodFormat.getDefault().print(doFn.getAllowedTimestampSkew().toPeriod()))); - } - } - - protected WindowedValue makeWindowedValue( - T output, Instant timestamp, Collection windows, PaneInfo pane) { - final Instant inputTimestamp = timestamp; - final WindowFn windowFn = windowingStrategy.getWindowFn(); - - if (timestamp == null) { - timestamp = BoundedWindow.TIMESTAMP_MIN_VALUE; - } - - if (windows == null) { - try { - windows = windowFn.assignWindows(windowFn.new AssignContext() { - @Override - public Object element() { - throw new UnsupportedOperationException( - "WindowFn attempted to access input element when none was available"); - } - - @Override - public Instant timestamp() { - if (inputTimestamp == null) { - throw new UnsupportedOperationException( - "WindowFn attempted to access input timestamp when none was available"); - } - return inputTimestamp; - } - - @Override - public BoundedWindow window() { - throw new UnsupportedOperationException( - "WindowFn attempted to access input window when none was available"); - } - }); - } catch (Exception e) { - throw UserCodeException.wrap(e); - } - } - - return WindowedValue.of(output, timestamp, windows, pane); - } - - /////////// ABSTRACT METHODS TO BE IMPLEMENTED BY SUBCLASSES ///////////////// - - public abstract void outputWithTimestampHelper( - WindowedValue inElement, - OUTDF output, - Instant timestamp, - Collector> outCollector); - - public abstract void sideOutputWithTimestampHelper( - WindowedValue inElement, - T output, - Instant timestamp, - Collector> outCollector, - TupleTag tag); - - public abstract WindowingInternals windowingInternalsHelper( - WindowedValue inElement, - Collector> outCollector); - -} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupAlsoByWindowWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupAlsoByWindowWrapper.java deleted file mode 100644 index 4fddb53b4b928..0000000000000 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupAlsoByWindowWrapper.java +++ /dev/null @@ -1,644 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.beam.runners.flink.translation.wrappers.streaming; - -import static com.google.common.base.Preconditions.checkNotNull; - -import java.io.Serializable; -import org.apache.beam.runners.core.GroupAlsoByWindowViaWindowSetDoFn; -import org.apache.beam.runners.flink.translation.types.CoderTypeInformation; -import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions; -import org.apache.beam.runners.flink.translation.wrappers.SerializableFnAggregatorWrapper; -import org.apache.beam.runners.flink.translation.wrappers.streaming.state.AbstractFlinkTimerInternals; -import org.apache.beam.runners.flink.translation.wrappers.streaming.state.FlinkStateInternals; -import org.apache.beam.runners.flink.translation.wrappers.streaming.state.StateCheckpointReader; -import org.apache.beam.runners.flink.translation.wrappers.streaming.state.StateCheckpointUtils; -import org.apache.beam.runners.flink.translation.wrappers.streaming.state.StateCheckpointWriter; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.CoderRegistry; -import org.apache.beam.sdk.coders.IterableCoder; -import org.apache.beam.sdk.coders.KvCoder; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.transforms.Aggregator; -import org.apache.beam.sdk.transforms.Combine; -import org.apache.beam.sdk.transforms.OldDoFn; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.OutputTimeFn; -import org.apache.beam.sdk.transforms.windowing.PaneInfo; -import org.apache.beam.sdk.util.AppliedCombineFn; -import org.apache.beam.sdk.util.KeyedWorkItem; -import org.apache.beam.sdk.util.KeyedWorkItems; -import org.apache.beam.sdk.util.SystemReduceFn; -import org.apache.beam.sdk.util.TimerInternals; -import org.apache.beam.sdk.util.WindowedValue; -import org.apache.beam.sdk.util.WindowingInternals; -import org.apache.beam.sdk.util.WindowingStrategy; -import org.apache.beam.sdk.util.state.StateInternals; -import org.apache.beam.sdk.util.state.StateInternalsFactory; -import org.apache.beam.sdk.values.KV; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PCollectionView; -import org.apache.beam.sdk.values.TupleTag; - -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Multimap; - -import org.apache.flink.api.common.accumulators.Accumulator; -import org.apache.flink.api.common.accumulators.AccumulatorHelper; -import org.apache.flink.core.memory.DataInputView; -import org.apache.flink.runtime.state.AbstractStateBackend; -import org.apache.flink.runtime.state.StateHandle; -import org.apache.flink.streaming.api.datastream.DataStream; -import org.apache.flink.streaming.api.datastream.KeyedStream; -import org.apache.flink.streaming.api.operators.AbstractStreamOperator; -import org.apache.flink.streaming.api.operators.ChainingStrategy; -import org.apache.flink.streaming.api.operators.OneInputStreamOperator; -import org.apache.flink.streaming.api.operators.TimestampedCollector; -import org.apache.flink.streaming.api.watermark.Watermark; -import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; -import org.apache.flink.streaming.runtime.tasks.StreamTaskState; -import org.joda.time.Instant; - -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map; -import java.util.Set; - -/** - * This class is the key class implementing all the windowing/triggering logic of Apache Beam. - * To provide full compatibility and support for all the windowing/triggering combinations offered by - * Beam, we opted for a strategy that uses the SDK's code for doing these operations. See the code in - * ({@link org.apache.beam.runners.core.GroupAlsoByWindowsDoFn}. - *

    - * In a nutshell, when the execution arrives to this operator, we expect to have a stream already - * grouped by key. Each of the elements that enter here, registers a timer - * (see {@link TimerInternals#setTimer(TimerInternals.TimerData)} in the - * {@link FlinkGroupAlsoByWindowWrapper#activeTimers}. - * This is essentially a timestamp indicating when to trigger the computation over the window this - * element belongs to. - *

    - * When a watermark arrives, all the registered timers are checked to see which ones are ready to - * fire (see {@link FlinkGroupAlsoByWindowWrapper#processWatermark(Watermark)}). These are deregistered from - * the {@link FlinkGroupAlsoByWindowWrapper#activeTimers} - * list, and are fed into the {@link org.apache.beam.runners.core.GroupAlsoByWindowsDoFn} - * for furhter processing. - */ -public class FlinkGroupAlsoByWindowWrapper - extends AbstractStreamOperator>> - implements OneInputStreamOperator>, WindowedValue>> { - - private static final long serialVersionUID = 1L; - - private SerializedPipelineOptions serializedOptions; - - private transient CoderRegistry coderRegistry; - - private OldDoFn, KV> operator; - - private ProcessContext context; - - private final WindowingStrategy, BoundedWindow> windowingStrategy; - - private final Combine.KeyedCombineFn combineFn; - - private final KvCoder inputKvCoder; - - /** - * State is kept per-key. This data structure keeps this mapping between an active key, i.e. a - * key whose elements are currently waiting to be processed, and its associated state. - */ - private Map> perKeyStateInternals = new HashMap<>(); - - /** - * Timers waiting to be processed. - */ - private Map> activeTimers = new HashMap<>(); - - private FlinkTimerInternals timerInternals = new FlinkTimerInternals(); - - /** - * Creates an DataStream where elements are grouped in windows based on the specified windowing strategy. - * This method assumes that elements are already grouped by key. - *

    - * The difference with {@link #createForIterable(PipelineOptions, PCollection, KeyedStream)} - * is that this method assumes that a combiner function is provided - * (see {@link org.apache.beam.sdk.transforms.Combine.KeyedCombineFn}). - * A combiner helps at increasing the speed and, in most of the cases, reduce the per-window state. - * - * @param options the general job configuration options. - * @param input the input Dataflow {@link org.apache.beam.sdk.values.PCollection}. - * @param groupedStreamByKey the input stream, it is assumed to already be grouped by key. - * @param combiner the combiner to be used. - * @param outputKvCoder the type of the output values. - */ - public static DataStream>> create( - PipelineOptions options, - PCollection input, - KeyedStream>, K> groupedStreamByKey, - Combine.KeyedCombineFn combiner, - KvCoder outputKvCoder) { - checkNotNull(options); - - KvCoder inputKvCoder = (KvCoder) input.getCoder(); - FlinkGroupAlsoByWindowWrapper windower = new FlinkGroupAlsoByWindowWrapper<>(options, - input.getPipeline().getCoderRegistry(), input.getWindowingStrategy(), inputKvCoder, combiner); - - Coder>> windowedOutputElemCoder = WindowedValue.FullWindowedValueCoder.of( - outputKvCoder, - input.getWindowingStrategy().getWindowFn().windowCoder()); - - CoderTypeInformation>> outputTypeInfo = - new CoderTypeInformation<>(windowedOutputElemCoder); - - DataStream>> groupedByKeyAndWindow = groupedStreamByKey - .transform("GroupByWindowWithCombiner", - new CoderTypeInformation<>(outputKvCoder), - windower) - .returns(outputTypeInfo); - - return groupedByKeyAndWindow; - } - - /** - * Creates an DataStream where elements are grouped in windows based on the specified windowing strategy. - * This method assumes that elements are already grouped by key. - *

    - * The difference with {@link #create(PipelineOptions, PCollection, KeyedStream, Combine.KeyedCombineFn, KvCoder)} - * is that this method assumes no combiner function - * (see {@link org.apache.beam.sdk.transforms.Combine.KeyedCombineFn}). - * - * @param options the general job configuration options. - * @param input the input Dataflow {@link org.apache.beam.sdk.values.PCollection}. - * @param groupedStreamByKey the input stream, it is assumed to already be grouped by key. - */ - public static DataStream>>> createForIterable( - PipelineOptions options, - PCollection input, - KeyedStream>, K> groupedStreamByKey) { - checkNotNull(options); - - KvCoder inputKvCoder = (KvCoder) input.getCoder(); - Coder keyCoder = inputKvCoder.getKeyCoder(); - Coder inputValueCoder = inputKvCoder.getValueCoder(); - - FlinkGroupAlsoByWindowWrapper windower = new FlinkGroupAlsoByWindowWrapper(options, - input.getPipeline().getCoderRegistry(), input.getWindowingStrategy(), inputKvCoder, null); - - Coder> valueIterCoder = IterableCoder.of(inputValueCoder); - KvCoder> outputElemCoder = KvCoder.of(keyCoder, valueIterCoder); - - Coder>>> windowedOutputElemCoder = WindowedValue.FullWindowedValueCoder.of( - outputElemCoder, - input.getWindowingStrategy().getWindowFn().windowCoder()); - - CoderTypeInformation>>> outputTypeInfo = - new CoderTypeInformation<>(windowedOutputElemCoder); - - DataStream>>> groupedByKeyAndWindow = groupedStreamByKey - .transform("GroupByWindow", - new CoderTypeInformation<>(windowedOutputElemCoder), - windower) - .returns(outputTypeInfo); - - return groupedByKeyAndWindow; - } - - public static FlinkGroupAlsoByWindowWrapper - createForTesting(PipelineOptions options, - CoderRegistry registry, - WindowingStrategy, BoundedWindow> windowingStrategy, - KvCoder inputCoder, - Combine.KeyedCombineFn combiner) { - checkNotNull(options); - - return new FlinkGroupAlsoByWindowWrapper(options, registry, windowingStrategy, inputCoder, combiner); - } - - private FlinkGroupAlsoByWindowWrapper(PipelineOptions options, - CoderRegistry registry, - WindowingStrategy, BoundedWindow> windowingStrategy, - KvCoder inputCoder, - Combine.KeyedCombineFn combiner) { - checkNotNull(options); - - this.serializedOptions = new SerializedPipelineOptions(checkNotNull(options)); - this.coderRegistry = checkNotNull(registry); - this.inputKvCoder = checkNotNull(inputCoder);//(KvCoder) input.getCoder(); - this.windowingStrategy = checkNotNull(windowingStrategy);//input.getWindowingStrategy(); - this.combineFn = combiner; - this.operator = createGroupAlsoByWindowOperator(); - this.chainingStrategy = ChainingStrategy.ALWAYS; - } - - @Override - public void open() throws Exception { - super.open(); - operator.setup(); - this.context = new ProcessContext(operator, new TimestampedCollector<>(output), this.timerInternals); - operator.startBundle(context); - } - - /** - * Create the adequate {@link org.apache.beam.runners.core.GroupAlsoByWindowsDoFn}, - * if not already created. - * If a {@link org.apache.beam.sdk.transforms.Combine.KeyedCombineFn} was provided, then - * a function with that combiner is created, so that elements are combined as they arrive. This is - * done for speed and (in most of the cases) for reduction of the per-window state. - */ - private OldDoFn, KV> createGroupAlsoByWindowOperator() { - if (this.operator == null) { - - StateInternalsFactory stateInternalsFactory = new GroupAlsoByWindowWrapperStateInternalsFactory(); - - if (this.combineFn == null) { - // Thus VOUT == Iterable - Coder inputValueCoder = inputKvCoder.getValueCoder(); - - this.operator = (OldDoFn) GroupAlsoByWindowViaWindowSetDoFn.create( - (WindowingStrategy) this.windowingStrategy, stateInternalsFactory, SystemReduceFn.buffering(inputValueCoder)); - } else { - Coder inputKeyCoder = inputKvCoder.getKeyCoder(); - - AppliedCombineFn appliedCombineFn = AppliedCombineFn - .withInputCoder(combineFn, coderRegistry, inputKvCoder); - - this.operator = GroupAlsoByWindowViaWindowSetDoFn.create( - (WindowingStrategy) this.windowingStrategy, stateInternalsFactory, SystemReduceFn.combining(inputKeyCoder, appliedCombineFn)); - } - } - return this.operator; - } - - private void processKeyedWorkItem(KeyedWorkItem workItem) throws Exception { - context.setElement(workItem); - operator.processElement(context); - } - - @Override - public void processElement(StreamRecord>> element) throws Exception { - final WindowedValue> windowedValue = element.getValue(); - final KV kv = windowedValue.getValue(); - - final WindowedValue updatedWindowedValue = WindowedValue.of(kv.getValue(), - windowedValue.getTimestamp(), - windowedValue.getWindows(), - windowedValue.getPane()); - - processKeyedWorkItem( - KeyedWorkItems.elementsWorkItem( - kv.getKey(), - Collections.singletonList(updatedWindowedValue))); - } - - @Override - public void processWatermark(Watermark mark) throws Exception { - context.setCurrentInputWatermark(new Instant(mark.getTimestamp())); - - Multimap timers = getTimersReadyToProcess(mark.getTimestamp()); - if (!timers.isEmpty()) { - for (K key : timers.keySet()) { - processKeyedWorkItem(KeyedWorkItems.timersWorkItem(key, timers.get(key))); - } - } - - /** - * This is to take into account the different semantics of the Watermark in Flink and - * in Dataflow. To understand the reasoning behind the Dataflow semantics and its - * watermark holding logic, see the documentation of - * {@link WatermarkHold#addHold(ReduceFn.ProcessValueContext, boolean)} - * */ - long millis = Long.MAX_VALUE; - for (FlinkStateInternals state : perKeyStateInternals.values()) { - Instant watermarkHold = state.getWatermarkHold(); - if (watermarkHold != null && watermarkHold.getMillis() < millis) { - millis = watermarkHold.getMillis(); - } - } - - if (mark.getTimestamp() < millis) { - millis = mark.getTimestamp(); - } - - context.setCurrentOutputWatermark(new Instant(millis)); - - // Don't forget to re-emit the watermark for further operators down the line. - // This is critical for jobs with multiple aggregation steps. - // Imagine a job with a groupByKey() on key K1, followed by a map() that changes - // the key K1 to K2, and another groupByKey() on K2. In this case, if the watermark - // is not re-emitted, the second aggregation would never be triggered, and no result - // will be produced. - output.emitWatermark(new Watermark(millis)); - } - - @Override - public void close() throws Exception { - operator.finishBundle(context); - operator.teardown(); - super.close(); - } - - private void registerActiveTimer(K key, TimerInternals.TimerData timer) { - Set timersForKey = activeTimers.get(key); - if (timersForKey == null) { - timersForKey = new HashSet<>(); - } - timersForKey.add(timer); - activeTimers.put(key, timersForKey); - } - - private void unregisterActiveTimer(K key, TimerInternals.TimerData timer) { - Set timersForKey = activeTimers.get(key); - if (timersForKey != null) { - timersForKey.remove(timer); - if (timersForKey.isEmpty()) { - activeTimers.remove(key); - } else { - activeTimers.put(key, timersForKey); - } - } - } - - /** - * Returns the list of timers that are ready to fire. These are the timers - * that are registered to be triggered at a time before the current watermark. - * We keep these timers in a Set, so that they are deduplicated, as the same - * timer can be registered multiple times. - */ - private Multimap getTimersReadyToProcess(long currentWatermark) { - - // we keep the timers to return in a different list and launch them later - // because we cannot prevent a trigger from registering another trigger, - // which would lead to concurrent modification exception. - Multimap toFire = HashMultimap.create(); - - Iterator>> it = activeTimers.entrySet().iterator(); - while (it.hasNext()) { - Map.Entry> keyWithTimers = it.next(); - - Iterator timerIt = keyWithTimers.getValue().iterator(); - while (timerIt.hasNext()) { - TimerInternals.TimerData timerData = timerIt.next(); - if (timerData.getTimestamp().isBefore(currentWatermark)) { - toFire.put(keyWithTimers.getKey(), timerData); - timerIt.remove(); - } - } - - if (keyWithTimers.getValue().isEmpty()) { - it.remove(); - } - } - return toFire; - } - - /** - * Gets the state associated with the specified key. - * - * @param key the key whose state we want. - * @return The {@link FlinkStateInternals} - * associated with that key. - */ - private FlinkStateInternals getStateInternalsForKey(K key) { - FlinkStateInternals stateInternals = perKeyStateInternals.get(key); - if (stateInternals == null) { - Coder windowCoder = this.windowingStrategy.getWindowFn().windowCoder(); - OutputTimeFn outputTimeFn = this.windowingStrategy.getOutputTimeFn(); - stateInternals = new FlinkStateInternals<>(key, inputKvCoder.getKeyCoder(), windowCoder, outputTimeFn); - perKeyStateInternals.put(key, stateInternals); - } - return stateInternals; - } - - private class FlinkTimerInternals extends AbstractFlinkTimerInternals { - @Override - public void setTimer(TimerData timerKey) { - registerActiveTimer(context.element().key(), timerKey); - } - - @Override - public void deleteTimer(TimerData timerKey) { - unregisterActiveTimer(context.element().key(), timerKey); - } - } - - private class ProcessContext extends GroupAlsoByWindowViaWindowSetDoFn>.ProcessContext { - - private final FlinkTimerInternals timerInternals; - - private final TimestampedCollector>> collector; - - private KeyedWorkItem element; - - public ProcessContext(OldDoFn, KV> function, - TimestampedCollector>> outCollector, - FlinkTimerInternals timerInternals) { - function.super(); - super.setupDelegateAggregators(); - - this.collector = checkNotNull(outCollector); - this.timerInternals = checkNotNull(timerInternals); - } - - public void setElement(KeyedWorkItem element) { - this.element = element; - } - - public void setCurrentInputWatermark(Instant watermark) { - this.timerInternals.setCurrentInputWatermark(watermark); - } - - public void setCurrentOutputWatermark(Instant watermark) { - this.timerInternals.setCurrentOutputWatermark(watermark); - } - - @Override - public KeyedWorkItem element() { - return this.element; - } - - @Override - public Instant timestamp() { - throw new UnsupportedOperationException("timestamp() is not available when processing KeyedWorkItems."); - } - - @Override - public PipelineOptions getPipelineOptions() { - return serializedOptions.getPipelineOptions(); - } - - @Override - public void output(KV output) { - throw new UnsupportedOperationException( - "output() is not available when processing KeyedWorkItems."); - } - - @Override - public void outputWithTimestamp(KV output, Instant timestamp) { - throw new UnsupportedOperationException( - "outputWithTimestamp() is not available when processing KeyedWorkItems."); - } - - @Override - public PaneInfo pane() { - throw new UnsupportedOperationException("pane() is not available when processing KeyedWorkItems."); - } - - @Override - public BoundedWindow window() { - throw new UnsupportedOperationException( - "window() is not available when processing KeyedWorkItems."); - } - - @Override - public WindowingInternals, KV> windowingInternals() { - return new WindowingInternals, KV>() { - - @Override - public StateInternals stateInternals() { - throw new UnsupportedOperationException("stateInternals() is not available"); - } - - @Override - public void outputWindowedValue(KV output, Instant timestamp, Collection windows, PaneInfo pane) { - // TODO: No need to represent timestamp twice. - collector.setAbsoluteTimestamp(timestamp.getMillis()); - collector.collect(WindowedValue.of(output, timestamp, windows, pane)); - - } - - @Override - public TimerInternals timerInternals() { - return timerInternals; - } - - @Override - public Collection windows() { - throw new UnsupportedOperationException("windows() is not available in Streaming mode."); - } - - @Override - public PaneInfo pane() { - throw new UnsupportedOperationException("pane() is not available in Streaming mode."); - } - - @Override - public void writePCollectionViewData(TupleTag tag, Iterable> data, Coder elemCoder) throws IOException { - throw new RuntimeException("writePCollectionViewData() not available in Streaming mode."); - } - - @Override - public T sideInput(PCollectionView view, BoundedWindow mainInputWindow) { - throw new RuntimeException("sideInput() is not available in Streaming mode."); - } - }; - } - - @Override - public T sideInput(PCollectionView view) { - throw new RuntimeException("sideInput() is not supported in Streaming mode."); - } - - @Override - public void sideOutput(TupleTag tag, T output) { - // ignore the side output, this can happen when a user does not register - // side outputs but then outputs using a freshly created TupleTag. - throw new RuntimeException("sideOutput() is not available when grouping by window."); - } - - @Override - public void sideOutputWithTimestamp(TupleTag tag, T output, Instant timestamp) { - sideOutput(tag, output); - } - - @Override - protected Aggregator createAggregatorInternal(String name, Combine.CombineFn combiner) { - Accumulator acc = getRuntimeContext().getAccumulator(name); - if (acc != null) { - AccumulatorHelper.compareAccumulatorTypes(name, - SerializableFnAggregatorWrapper.class, acc.getClass()); - return (Aggregator) acc; - } - - SerializableFnAggregatorWrapper accumulator = - new SerializableFnAggregatorWrapper<>(combiner); - getRuntimeContext().addAccumulator(name, accumulator); - return accumulator; - } - } - - ////////////// Checkpointing implementation //////////////// - - @Override - public StreamTaskState snapshotOperatorState(long checkpointId, long timestamp) throws Exception { - StreamTaskState taskState = super.snapshotOperatorState(checkpointId, timestamp); - AbstractStateBackend.CheckpointStateOutputView out = getStateBackend().createCheckpointStateOutputView(checkpointId, timestamp); - StateCheckpointWriter writer = StateCheckpointWriter.create(out); - Coder keyCoder = inputKvCoder.getKeyCoder(); - - // checkpoint the timers - StateCheckpointUtils.encodeTimers(activeTimers, writer, keyCoder); - - // checkpoint the state - StateCheckpointUtils.encodeState(perKeyStateInternals, writer, keyCoder); - - // checkpoint the timerInternals - context.timerInternals.encodeTimerInternals(context, writer, - inputKvCoder, windowingStrategy.getWindowFn().windowCoder()); - - taskState.setOperatorState(out.closeAndGetHandle()); - return taskState; - } - - @Override - public void restoreState(StreamTaskState taskState, long recoveryTimestamp) throws Exception { - super.restoreState(taskState, recoveryTimestamp); - - final ClassLoader userClassloader = getUserCodeClassloader(); - - Coder windowCoder = this.windowingStrategy.getWindowFn().windowCoder(); - Coder keyCoder = inputKvCoder.getKeyCoder(); - - @SuppressWarnings("unchecked") - StateHandle inputState = (StateHandle) taskState.getOperatorState(); - DataInputView in = inputState.getState(userClassloader); - StateCheckpointReader reader = new StateCheckpointReader(in); - - // restore the timers - this.activeTimers = StateCheckpointUtils.decodeTimers(reader, windowCoder, keyCoder); - - // restore the state - this.perKeyStateInternals = StateCheckpointUtils.decodeState( - reader, windowingStrategy.getOutputTimeFn(), keyCoder, windowCoder, userClassloader); - - // restore the timerInternals. - this.timerInternals.restoreTimerInternals(reader, inputKvCoder, windowCoder); - } - - private class GroupAlsoByWindowWrapperStateInternalsFactory implements - StateInternalsFactory, Serializable { - - @Override - public StateInternals stateInternalsForKey(K key) { - return getStateInternalsForKey(key); - } - } -} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupByKeyWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupByKeyWrapper.java deleted file mode 100644 index 6b69d547cf123..0000000000000 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkGroupByKeyWrapper.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.beam.runners.flink.translation.wrappers.streaming; - -import org.apache.beam.runners.flink.translation.types.CoderTypeInformation; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.KvCoder; -import org.apache.beam.sdk.coders.VoidCoder; -import org.apache.beam.sdk.util.WindowedValue; -import org.apache.beam.sdk.values.KV; - -import org.apache.flink.api.common.typeinfo.TypeInformation; -import org.apache.flink.api.java.functions.KeySelector; -import org.apache.flink.api.java.typeutils.ResultTypeQueryable; -import org.apache.flink.streaming.api.datastream.DataStream; -import org.apache.flink.streaming.api.datastream.KeyedStream; - -/** - * This class groups the elements by key. It assumes that already the incoming stream - * is composed of [Key,Value] pairs. - * */ -public class FlinkGroupByKeyWrapper { - - /** - * Just an auxiliary interface to bypass the fact that java anonymous classes cannot implement - * multiple interfaces. - */ - private interface KeySelectorWithQueryableResultType extends KeySelector>, K>, ResultTypeQueryable { - } - - public static KeyedStream>, K> groupStreamByKey(DataStream>> inputDataStream, KvCoder inputKvCoder) { - final Coder keyCoder = inputKvCoder.getKeyCoder(); - final TypeInformation keyTypeInfo = new CoderTypeInformation<>(keyCoder); - final boolean isKeyVoid = keyCoder instanceof VoidCoder; - - return inputDataStream.keyBy( - new KeySelectorWithQueryableResultType() { - - @Override - public K getKey(WindowedValue> value) throws Exception { - return isKeyVoid ? (K) VoidValue.INSTANCE : - value.getValue().getKey(); - } - - @Override - public TypeInformation getProducedType() { - return keyTypeInfo; - } - }); - } - - // special type to return as key for null key - public static class VoidValue { - private VoidValue() {} - - public static VoidValue INSTANCE = new VoidValue(); - } -} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundMultiWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundMultiWrapper.java deleted file mode 100644 index 0ea0cabb3f30a..0000000000000 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundMultiWrapper.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.beam.runners.flink.translation.wrappers.streaming; - -import static com.google.common.base.Preconditions.checkNotNull; - -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.transforms.OldDoFn; -import org.apache.beam.sdk.transforms.join.RawUnionValue; -import org.apache.beam.sdk.util.WindowedValue; -import org.apache.beam.sdk.util.WindowingInternals; -import org.apache.beam.sdk.util.WindowingStrategy; -import org.apache.beam.sdk.values.TupleTag; - -import org.apache.flink.util.Collector; -import org.joda.time.Instant; - -import java.util.Map; - -/** - * A wrapper for the {@link org.apache.beam.sdk.transforms.ParDo.BoundMulti} Beam transformation. - * */ -public class FlinkParDoBoundMultiWrapper extends FlinkAbstractParDoWrapper { - - private final TupleTag mainTag; - private final Map, Integer> outputLabels; - - public FlinkParDoBoundMultiWrapper(PipelineOptions options, WindowingStrategy windowingStrategy, OldDoFn doFn, TupleTag mainTag, Map, Integer> tagsToLabels) { - super(options, windowingStrategy, doFn); - this.mainTag = checkNotNull(mainTag); - this.outputLabels = checkNotNull(tagsToLabels); - } - - @Override - public void outputWithTimestampHelper(WindowedValue inElement, OUT output, Instant timestamp, Collector> collector) { - checkTimestamp(inElement, timestamp); - Integer index = outputLabels.get(mainTag); - collector.collect(makeWindowedValue( - new RawUnionValue(index, output), - timestamp, - inElement.getWindows(), - inElement.getPane())); - } - - @Override - public void sideOutputWithTimestampHelper(WindowedValue inElement, T output, Instant timestamp, Collector> collector, TupleTag tag) { - checkTimestamp(inElement, timestamp); - Integer index = outputLabels.get(tag); - if (index != null) { - collector.collect(makeWindowedValue( - new RawUnionValue(index, output), - timestamp, - inElement.getWindows(), - inElement.getPane())); - } - } - - @Override - public WindowingInternals windowingInternalsHelper(WindowedValue inElement, Collector> outCollector) { - throw new RuntimeException("FlinkParDoBoundMultiWrapper is just an internal operator serving as " + - "an intermediate transformation for the ParDo.BoundMulti translation. windowingInternals() " + - "is not available in this class."); - } -} \ No newline at end of file diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundWrapper.java deleted file mode 100644 index 6be94b20f9cb4..0000000000000 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkParDoBoundWrapper.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.beam.runners.flink.translation.wrappers.streaming; - -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.transforms.OldDoFn; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.PaneInfo; -import org.apache.beam.sdk.util.TimerInternals; -import org.apache.beam.sdk.util.WindowedValue; -import org.apache.beam.sdk.util.WindowingInternals; -import org.apache.beam.sdk.util.WindowingStrategy; -import org.apache.beam.sdk.util.state.StateInternals; -import org.apache.beam.sdk.values.PCollectionView; -import org.apache.beam.sdk.values.TupleTag; - -import org.apache.flink.util.Collector; -import org.joda.time.Instant; - -import java.io.IOException; -import java.util.Collection; - -/** - * A wrapper for the {@link org.apache.beam.sdk.transforms.ParDo.Bound} Beam transformation. - * */ -public class FlinkParDoBoundWrapper extends FlinkAbstractParDoWrapper { - - public FlinkParDoBoundWrapper(PipelineOptions options, WindowingStrategy windowingStrategy, OldDoFn doFn) { - super(options, windowingStrategy, doFn); - } - - @Override - public void outputWithTimestampHelper(WindowedValue inElement, OUT output, Instant timestamp, Collector> collector) { - checkTimestamp(inElement, timestamp); - collector.collect(makeWindowedValue( - output, - timestamp, - inElement.getWindows(), - inElement.getPane())); - } - - @Override - public void sideOutputWithTimestampHelper(WindowedValue inElement, T output, Instant timestamp, Collector> outCollector, TupleTag tag) { - // ignore the side output, this can happen when a user does not register - // side outputs but then outputs using a freshly created TupleTag. - throw new RuntimeException("sideOutput() not not available in ParDo.Bound()."); - } - - @Override - public WindowingInternals windowingInternalsHelper(final WindowedValue inElement, final Collector> collector) { - return new WindowingInternals() { - @Override - public StateInternals stateInternals() { - throw new NullPointerException("StateInternals are not available for ParDo.Bound()."); - } - - @Override - public void outputWindowedValue(OUT output, Instant timestamp, Collection windows, PaneInfo pane) { - collector.collect(makeWindowedValue(output, timestamp, windows, pane)); - } - - @Override - public TimerInternals timerInternals() { - throw new NullPointerException("TimeInternals are not available for ParDo.Bound()."); - } - - @Override - public Collection windows() { - return inElement.getWindows(); - } - - @Override - public PaneInfo pane() { - return inElement.getPane(); - } - - @Override - public void writePCollectionViewData(TupleTag tag, Iterable> data, Coder elemCoder) throws IOException { - throw new RuntimeException("writePCollectionViewData() not supported in Streaming mode."); - } - - @Override - public T sideInput(PCollectionView view, BoundedWindow mainInputWindow) { - throw new RuntimeException("sideInput() not implemented."); - } - }; - } -} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkStateInternals.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkStateInternals.java new file mode 100644 index 0000000000000..2e10400591080 --- /dev/null +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkStateInternals.java @@ -0,0 +1,1035 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.runners.flink.translation.wrappers.streaming; + +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; +import org.apache.beam.runners.flink.translation.types.CoderTypeInformation; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.coders.InstantCoder; +import org.apache.beam.sdk.transforms.Combine; +import org.apache.beam.sdk.transforms.CombineWithContext; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.transforms.windowing.OutputTimeFn; +import org.apache.beam.sdk.util.CoderUtils; +import org.apache.beam.sdk.util.CombineContextFactory; +import org.apache.beam.sdk.util.state.AccumulatorCombiningState; +import org.apache.beam.sdk.util.state.BagState; +import org.apache.beam.sdk.util.state.ReadableState; +import org.apache.beam.sdk.util.state.State; +import org.apache.beam.sdk.util.state.StateContext; +import org.apache.beam.sdk.util.state.StateContexts; +import org.apache.beam.sdk.util.state.StateInternals; +import org.apache.beam.sdk.util.state.StateNamespace; +import org.apache.beam.sdk.util.state.StateTag; +import org.apache.beam.sdk.util.state.ValueState; +import org.apache.beam.sdk.util.state.WatermarkHoldState; +import org.apache.flink.api.common.state.ListStateDescriptor; +import org.apache.flink.api.common.state.ValueStateDescriptor; +import org.apache.flink.api.common.typeutils.base.StringSerializer; +import org.apache.flink.runtime.state.AbstractStateBackend; +import org.joda.time.Instant; + +/** + * {@link StateInternals} that uses a Flink {@link AbstractStateBackend} to + * manage state. + * + *

    Note: In the Flink streaming runner the key is always encoded + * using an {@link Coder} and stored in a {@link ByteBuffer}. + */ +public class FlinkStateInternals implements StateInternals { + + private final Coder keyCoder; + + private final AbstractStateBackend flinkStateBackend; + + // on recovery, these will no be properly set because we don't + // know which watermark hold states there are in the Flink State Backend + private final Map watermarkHolds = new HashMap<>(); + + public FlinkStateInternals(AbstractStateBackend flinkStateBackend, Coder keyCoder) { + this.flinkStateBackend = flinkStateBackend; + this.keyCoder = keyCoder; + } + + /** + * Returns the minimum over all watermark holds. + */ + public Instant watermarkHold() { + long min = Long.MAX_VALUE; + for (Instant hold: watermarkHolds.values()) { + min = Math.min(min, hold.getMillis()); + } + return new Instant(min); + } + + @Override + public K getKey() { + ByteBuffer keyBytes = (ByteBuffer) flinkStateBackend.getCurrentKey(); + try { + return CoderUtils.decodeFromByteArray(keyCoder, keyBytes.array()); + } catch (CoderException e) { + throw new RuntimeException("Error decoding key.", e); + } + } + + @Override + public T state( + final StateNamespace namespace, + StateTag address) { + + return state(namespace, address, StateContexts.nullContext()); + } + + @Override + public T state( + final StateNamespace namespace, + StateTag address, + final StateContext context) { + + return address.bind(new StateTag.StateBinder() { + + @Override + public ValueState bindValue( + StateTag> address, + Coder coder) { + + return new FlinkValueState<>(flinkStateBackend, address, namespace, coder); + } + + @Override + public BagState bindBag( + StateTag> address, + Coder elemCoder) { + + return new FlinkBagState<>(flinkStateBackend, address, namespace, elemCoder); + } + + @Override + public + AccumulatorCombiningState + bindCombiningValue( + StateTag> address, + Coder accumCoder, + Combine.CombineFn combineFn) { + + return new FlinkAccumulatorCombiningState<>( + flinkStateBackend, address, combineFn, namespace, accumCoder); + } + + @Override + public + AccumulatorCombiningState bindKeyedCombiningValue( + StateTag> address, + Coder accumCoder, + final Combine.KeyedCombineFn combineFn) { + return new FlinkKeyedAccumulatorCombiningState<>( + flinkStateBackend, + address, + combineFn, + namespace, + accumCoder, + FlinkStateInternals.this); + } + + @Override + public + AccumulatorCombiningState bindKeyedCombiningValueWithContext( + StateTag> address, + Coder accumCoder, + CombineWithContext.KeyedCombineFnWithContext< + ? super K, InputT, AccumT, OutputT> combineFn) { + return new FlinkAccumulatorCombiningStateWithContext<>( + flinkStateBackend, + address, + combineFn, + namespace, + accumCoder, + FlinkStateInternals.this, + CombineContextFactory.createFromStateContext(context)); + } + + @Override + public WatermarkHoldState bindWatermark( + StateTag> address, + OutputTimeFn outputTimeFn) { + + return new FlinkWatermarkHoldState<>( + flinkStateBackend, FlinkStateInternals.this, address, namespace, outputTimeFn); + } + }); + } + + private static class FlinkValueState implements ValueState { + + private final StateNamespace namespace; + private final StateTag> address; + private final ValueStateDescriptor flinkStateDescriptor; + private final AbstractStateBackend flinkStateBackend; + + FlinkValueState( + AbstractStateBackend flinkStateBackend, + StateTag> address, + StateNamespace namespace, + Coder coder) { + + this.namespace = namespace; + this.address = address; + this.flinkStateBackend = flinkStateBackend; + + CoderTypeInformation typeInfo = new CoderTypeInformation<>(coder); + + flinkStateDescriptor = new ValueStateDescriptor<>(address.getId(), typeInfo, null); + } + + @Override + public void write(T input) { + try { + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).update(input); + } catch (Exception e) { + throw new RuntimeException("Error updating state.", e); + } + } + + @Override + public ValueState readLater() { + return this; + } + + @Override + public T read() { + try { + return flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).value(); + } catch (Exception e) { + throw new RuntimeException("Error reading state.", e); + } + } + + @Override + public void clear() { + try { + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).clear(); + } catch (Exception e) { + throw new RuntimeException("Error clearing state.", e); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + FlinkValueState that = (FlinkValueState) o; + + return namespace.equals(that.namespace) && address.equals(that.address); + + } + + @Override + public int hashCode() { + int result = namespace.hashCode(); + result = 31 * result + address.hashCode(); + return result; + } + } + + private static class FlinkBagState implements BagState { + + private final StateNamespace namespace; + private final StateTag> address; + private final ListStateDescriptor flinkStateDescriptor; + private final AbstractStateBackend flinkStateBackend; + + FlinkBagState( + AbstractStateBackend flinkStateBackend, + StateTag> address, + StateNamespace namespace, + Coder coder) { + + this.namespace = namespace; + this.address = address; + this.flinkStateBackend = flinkStateBackend; + + CoderTypeInformation typeInfo = new CoderTypeInformation<>(coder); + + flinkStateDescriptor = new ListStateDescriptor<>(address.getId(), typeInfo); + } + + @Override + public void add(T input) { + try { + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).add(input); + } catch (Exception e) { + throw new RuntimeException("Error adding to bag state.", e); + } + } + + @Override + public BagState readLater() { + return this; + } + + @Override + public Iterable read() { + try { + return flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).get(); + } catch (Exception e) { + throw new RuntimeException("Error reading state.", e); + } + } + + @Override + public ReadableState isEmpty() { + return new ReadableState() { + @Override + public Boolean read() { + try { + Iterable result = flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).get(); + return Iterables.isEmpty(result); + } catch (Exception e) { + throw new RuntimeException("Error reading state.", e); + } + + } + + @Override + public ReadableState readLater() { + return this; + } + }; + } + + @Override + public void clear() { + try { + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).clear(); + } catch (Exception e) { + throw new RuntimeException("Error clearing state.", e); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + FlinkBagState that = (FlinkBagState) o; + + return namespace.equals(that.namespace) && address.equals(that.address); + + } + + @Override + public int hashCode() { + int result = namespace.hashCode(); + result = 31 * result + address.hashCode(); + return result; + } + } + + private static class FlinkAccumulatorCombiningState + implements AccumulatorCombiningState { + + private final StateNamespace namespace; + private final StateTag> address; + private final Combine.CombineFn combineFn; + private final ValueStateDescriptor flinkStateDescriptor; + private final AbstractStateBackend flinkStateBackend; + + FlinkAccumulatorCombiningState( + AbstractStateBackend flinkStateBackend, + StateTag> address, + Combine.CombineFn combineFn, + StateNamespace namespace, + Coder accumCoder) { + + this.namespace = namespace; + this.address = address; + this.combineFn = combineFn; + this.flinkStateBackend = flinkStateBackend; + + CoderTypeInformation typeInfo = new CoderTypeInformation<>(accumCoder); + + flinkStateDescriptor = new ValueStateDescriptor<>(address.getId(), typeInfo, null); + } + + @Override + public AccumulatorCombiningState readLater() { + return this; + } + + @Override + public void add(InputT value) { + try { + org.apache.flink.api.common.state.ValueState state = + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor); + + AccumT current = state.value(); + if (current == null) { + current = combineFn.createAccumulator(); + } + current = combineFn.addInput(current, value); + state.update(current); + } catch (Exception e) { + throw new RuntimeException("Error adding to state." , e); + } + } + + @Override + public void addAccum(AccumT accum) { + try { + org.apache.flink.api.common.state.ValueState state = + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor); + + AccumT current = state.value(); + if (current == null) { + state.update(accum); + } else { + current = combineFn.mergeAccumulators(Lists.newArrayList(current, accum)); + state.update(current); + } + } catch (Exception e) { + throw new RuntimeException("Error adding to state.", e); + } + } + + @Override + public AccumT getAccum() { + try { + return flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).value(); + } catch (Exception e) { + throw new RuntimeException("Error reading state.", e); + } + } + + @Override + public AccumT mergeAccumulators(Iterable accumulators) { + return combineFn.mergeAccumulators(accumulators); + } + + @Override + public OutputT read() { + try { + org.apache.flink.api.common.state.ValueState state = + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor); + + AccumT accum = state.value(); + if (accum != null) { + return combineFn.extractOutput(accum); + } else { + return combineFn.extractOutput(combineFn.createAccumulator()); + } + } catch (Exception e) { + throw new RuntimeException("Error reading state.", e); + } + } + + @Override + public ReadableState isEmpty() { + return new ReadableState() { + @Override + public Boolean read() { + try { + return flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).value() == null; + } catch (Exception e) { + throw new RuntimeException("Error reading state.", e); + } + + } + + @Override + public ReadableState readLater() { + return this; + } + }; + } + + @Override + public void clear() { + try { + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).clear(); + } catch (Exception e) { + throw new RuntimeException("Error clearing state.", e); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + FlinkAccumulatorCombiningState that = + (FlinkAccumulatorCombiningState) o; + + return namespace.equals(that.namespace) && address.equals(that.address); + + } + + @Override + public int hashCode() { + int result = namespace.hashCode(); + result = 31 * result + address.hashCode(); + return result; + } + } + + private static class FlinkKeyedAccumulatorCombiningState + implements AccumulatorCombiningState { + + private final StateNamespace namespace; + private final StateTag> address; + private final Combine.KeyedCombineFn combineFn; + private final ValueStateDescriptor flinkStateDescriptor; + private final AbstractStateBackend flinkStateBackend; + private final FlinkStateInternals flinkStateInternals; + + FlinkKeyedAccumulatorCombiningState( + AbstractStateBackend flinkStateBackend, + StateTag> address, + Combine.KeyedCombineFn combineFn, + StateNamespace namespace, + Coder accumCoder, + FlinkStateInternals flinkStateInternals) { + + this.namespace = namespace; + this.address = address; + this.combineFn = combineFn; + this.flinkStateBackend = flinkStateBackend; + this.flinkStateInternals = flinkStateInternals; + + CoderTypeInformation typeInfo = new CoderTypeInformation<>(accumCoder); + + flinkStateDescriptor = new ValueStateDescriptor<>(address.getId(), typeInfo, null); + } + + @Override + public AccumulatorCombiningState readLater() { + return this; + } + + @Override + public void add(InputT value) { + try { + org.apache.flink.api.common.state.ValueState state = + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor); + + AccumT current = state.value(); + if (current == null) { + current = combineFn.createAccumulator(flinkStateInternals.getKey()); + } + current = combineFn.addInput(flinkStateInternals.getKey(), current, value); + state.update(current); + } catch (Exception e) { + throw new RuntimeException("Error adding to state." , e); + } + } + + @Override + public void addAccum(AccumT accum) { + try { + org.apache.flink.api.common.state.ValueState state = + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor); + + AccumT current = state.value(); + if (current == null) { + state.update(accum); + } else { + current = combineFn.mergeAccumulators( + flinkStateInternals.getKey(), + Lists.newArrayList(current, accum)); + state.update(current); + } + } catch (Exception e) { + throw new RuntimeException("Error adding to state.", e); + } + } + + @Override + public AccumT getAccum() { + try { + return flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).value(); + } catch (Exception e) { + throw new RuntimeException("Error reading state.", e); + } + } + + @Override + public AccumT mergeAccumulators(Iterable accumulators) { + return combineFn.mergeAccumulators(flinkStateInternals.getKey(), accumulators); + } + + @Override + public OutputT read() { + try { + org.apache.flink.api.common.state.ValueState state = + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor); + + AccumT accum = state.value(); + if (accum != null) { + return combineFn.extractOutput(flinkStateInternals.getKey(), accum); + } else { + return combineFn.extractOutput( + flinkStateInternals.getKey(), + combineFn.createAccumulator(flinkStateInternals.getKey())); + } + } catch (Exception e) { + throw new RuntimeException("Error reading state.", e); + } + } + + @Override + public ReadableState isEmpty() { + return new ReadableState() { + @Override + public Boolean read() { + try { + return flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).value() == null; + } catch (Exception e) { + throw new RuntimeException("Error reading state.", e); + } + + } + + @Override + public ReadableState readLater() { + return this; + } + }; + } + + @Override + public void clear() { + try { + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).clear(); + } catch (Exception e) { + throw new RuntimeException("Error clearing state.", e); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + FlinkKeyedAccumulatorCombiningState that = + (FlinkKeyedAccumulatorCombiningState) o; + + return namespace.equals(that.namespace) && address.equals(that.address); + + } + + @Override + public int hashCode() { + int result = namespace.hashCode(); + result = 31 * result + address.hashCode(); + return result; + } + } + + private static class FlinkAccumulatorCombiningStateWithContext + implements AccumulatorCombiningState { + + private final StateNamespace namespace; + private final StateTag> address; + private final CombineWithContext.KeyedCombineFnWithContext< + ? super K, InputT, AccumT, OutputT> combineFn; + private final ValueStateDescriptor flinkStateDescriptor; + private final AbstractStateBackend flinkStateBackend; + private final FlinkStateInternals flinkStateInternals; + private final CombineWithContext.Context context; + + FlinkAccumulatorCombiningStateWithContext( + AbstractStateBackend flinkStateBackend, + StateTag> address, + CombineWithContext.KeyedCombineFnWithContext< + ? super K, InputT, AccumT, OutputT> combineFn, + StateNamespace namespace, + Coder accumCoder, + FlinkStateInternals flinkStateInternals, + CombineWithContext.Context context) { + + this.namespace = namespace; + this.address = address; + this.combineFn = combineFn; + this.flinkStateBackend = flinkStateBackend; + this.flinkStateInternals = flinkStateInternals; + this.context = context; + + CoderTypeInformation typeInfo = new CoderTypeInformation<>(accumCoder); + + flinkStateDescriptor = new ValueStateDescriptor<>(address.getId(), typeInfo, null); + } + + @Override + public AccumulatorCombiningState readLater() { + return this; + } + + @Override + public void add(InputT value) { + try { + org.apache.flink.api.common.state.ValueState state = + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor); + + AccumT current = state.value(); + if (current == null) { + current = combineFn.createAccumulator(flinkStateInternals.getKey(), context); + } + current = combineFn.addInput(flinkStateInternals.getKey(), current, value, context); + state.update(current); + } catch (Exception e) { + throw new RuntimeException("Error adding to state." , e); + } + } + + @Override + public void addAccum(AccumT accum) { + try { + org.apache.flink.api.common.state.ValueState state = + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor); + + AccumT current = state.value(); + if (current == null) { + state.update(accum); + } else { + current = combineFn.mergeAccumulators( + flinkStateInternals.getKey(), + Lists.newArrayList(current, accum), + context); + state.update(current); + } + } catch (Exception e) { + throw new RuntimeException("Error adding to state.", e); + } + } + + @Override + public AccumT getAccum() { + try { + return flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).value(); + } catch (Exception e) { + throw new RuntimeException("Error reading state.", e); + } + } + + @Override + public AccumT mergeAccumulators(Iterable accumulators) { + return combineFn.mergeAccumulators(flinkStateInternals.getKey(), accumulators, context); + } + + @Override + public OutputT read() { + try { + org.apache.flink.api.common.state.ValueState state = + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor); + + AccumT accum = state.value(); + return combineFn.extractOutput(flinkStateInternals.getKey(), accum, context); + } catch (Exception e) { + throw new RuntimeException("Error reading state.", e); + } + } + + @Override + public ReadableState isEmpty() { + return new ReadableState() { + @Override + public Boolean read() { + try { + return flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).value() == null; + } catch (Exception e) { + throw new RuntimeException("Error reading state.", e); + } + + } + + @Override + public ReadableState readLater() { + return this; + } + }; + } + + @Override + public void clear() { + try { + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).clear(); + } catch (Exception e) { + throw new RuntimeException("Error clearing state.", e); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + FlinkAccumulatorCombiningStateWithContext that = + (FlinkAccumulatorCombiningStateWithContext) o; + + return namespace.equals(that.namespace) && address.equals(that.address); + + } + + @Override + public int hashCode() { + int result = namespace.hashCode(); + result = 31 * result + address.hashCode(); + return result; + } + } + + private static class FlinkWatermarkHoldState + implements WatermarkHoldState { + private final StateTag> address; + private final OutputTimeFn outputTimeFn; + private final StateNamespace namespace; + private final AbstractStateBackend flinkStateBackend; + private final FlinkStateInternals flinkStateInternals; + private final ValueStateDescriptor flinkStateDescriptor; + + public FlinkWatermarkHoldState( + AbstractStateBackend flinkStateBackend, + FlinkStateInternals flinkStateInternals, + StateTag> address, + StateNamespace namespace, + OutputTimeFn outputTimeFn) { + this.address = address; + this.outputTimeFn = outputTimeFn; + this.namespace = namespace; + this.flinkStateBackend = flinkStateBackend; + this.flinkStateInternals = flinkStateInternals; + + CoderTypeInformation typeInfo = new CoderTypeInformation<>(InstantCoder.of()); + flinkStateDescriptor = new ValueStateDescriptor<>(address.getId(), typeInfo, null); + } + + @Override + public OutputTimeFn getOutputTimeFn() { + return outputTimeFn; + } + + @Override + public WatermarkHoldState readLater() { + return this; + } + + @Override + public ReadableState isEmpty() { + return new ReadableState() { + @Override + public Boolean read() { + try { + return flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor).value() == null; + } catch (Exception e) { + throw new RuntimeException("Error reading state.", e); + } + } + + @Override + public ReadableState readLater() { + return this; + } + }; + + } + + @Override + public void add(Instant value) { + try { + org.apache.flink.api.common.state.ValueState state = + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor); + + Instant current = state.value(); + if (current == null) { + state.update(value); + flinkStateInternals.watermarkHolds.put(namespace.stringKey(), value); + } else { + Instant combined = outputTimeFn.combine(current, value); + state.update(combined); + flinkStateInternals.watermarkHolds.put(namespace.stringKey(), combined); + } + } catch (Exception e) { + throw new RuntimeException("Error updating state.", e); + } + } + + @Override + public Instant read() { + try { + org.apache.flink.api.common.state.ValueState state = + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor); + return state.value(); + } catch (Exception e) { + throw new RuntimeException("Error reading state.", e); + } + } + + @Override + public void clear() { + flinkStateInternals.watermarkHolds.remove(namespace.stringKey()); + try { + org.apache.flink.api.common.state.ValueState state = + flinkStateBackend.getPartitionedState( + namespace.stringKey(), + StringSerializer.INSTANCE, + flinkStateDescriptor); + state.clear(); + } catch (Exception e) { + throw new RuntimeException("Error reading state.", e); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + FlinkWatermarkHoldState that = (FlinkWatermarkHoldState) o; + + if (!address.equals(that.address)) { + return false; + } + if (!outputTimeFn.equals(that.outputTimeFn)) { + return false; + } + return namespace.equals(that.namespace); + + } + + @Override + public int hashCode() { + int result = address.hashCode(); + result = 31 * result + outputTimeFn.hashCode(); + result = 31 * result + namespace.hashCode(); + return result; + } + } +} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItem.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItem.java new file mode 100644 index 0000000000000..5751aac781239 --- /dev/null +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItem.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.runners.flink.translation.wrappers.streaming; + +import java.util.Collections; +import org.apache.beam.sdk.util.KeyedWorkItem; +import org.apache.beam.sdk.util.TimerInternals; +import org.apache.beam.sdk.util.WindowedValue; + +public class SingletonKeyedWorkItem implements KeyedWorkItem { + + final K key; + final WindowedValue value; + + public SingletonKeyedWorkItem(K key, WindowedValue value) { + this.key = key; + this.value = value; + } + + @Override + public K key() { + return key; + } + + public WindowedValue value() { + return value; + } + + @Override + public Iterable timersIterable() { + return Collections.EMPTY_LIST; + } + + @Override + public Iterable> elementsIterable() { + return Collections.singletonList(value); + } +} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItemCoder.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItemCoder.java new file mode 100644 index 0000000000000..5e583e9013c27 --- /dev/null +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/SingletonKeyedWorkItemCoder.java @@ -0,0 +1,122 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.runners.flink.translation.wrappers.streaming; + +import static com.google.common.base.Preconditions.checkArgument; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.List; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.coders.StandardCoder; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.util.KeyedWorkItem; +import org.apache.beam.sdk.util.KeyedWorkItemCoder; +import org.apache.beam.sdk.util.PropertyNames; +import org.apache.beam.sdk.util.WindowedValue; + +public class SingletonKeyedWorkItemCoder extends StandardCoder> { + /** + * Create a new {@link KeyedWorkItemCoder} with the provided key coder, element coder, and window + * coder. + */ + public static SingletonKeyedWorkItemCoder of( + Coder keyCoder, Coder elemCoder, Coder windowCoder) { + return new SingletonKeyedWorkItemCoder<>(keyCoder, elemCoder, windowCoder); + } + + @JsonCreator + public static SingletonKeyedWorkItemCoder of( + @JsonProperty(PropertyNames.COMPONENT_ENCODINGS) List> components) { + checkArgument(components.size() == 3, "Expecting 3 components, got %s", components.size()); + @SuppressWarnings("unchecked") + Coder keyCoder = (Coder) components.get(0); + @SuppressWarnings("unchecked") + Coder elemCoder = (Coder) components.get(1); + @SuppressWarnings("unchecked") + Coder windowCoder = (Coder) components.get(2); + return new SingletonKeyedWorkItemCoder<>(keyCoder, elemCoder, windowCoder); + } + + private final Coder keyCoder; + private final Coder elemCoder; + private final Coder windowCoder; + private final WindowedValue.FullWindowedValueCoder valueCoder; + + private SingletonKeyedWorkItemCoder( + Coder keyCoder, Coder elemCoder, Coder windowCoder) { + this.keyCoder = keyCoder; + this.elemCoder = elemCoder; + this.windowCoder = windowCoder; + valueCoder= WindowedValue.FullWindowedValueCoder.of(elemCoder, windowCoder); + } + + public Coder getKeyCoder() { + return keyCoder; + } + + public Coder getElementCoder() { + return elemCoder; + } + + @Override + public void encode(SingletonKeyedWorkItem value, OutputStream outStream, Context context) + throws CoderException, IOException { + Context nestedContext = context.nested(); + keyCoder.encode(value.key(), outStream, nestedContext); + valueCoder.encode(value.value, outStream, nestedContext); + } + + @Override + public SingletonKeyedWorkItem decode(InputStream inStream, Context context) + throws CoderException, IOException { + Context nestedContext = context.nested(); + K key = keyCoder.decode(inStream, nestedContext); + WindowedValue value = valueCoder.decode(inStream, nestedContext); + return new SingletonKeyedWorkItem<>(key, value); + } + + @Override + public List> getCoderArguments() { + return ImmutableList.of(keyCoder, elemCoder, windowCoder); + } + + @Override + public void verifyDeterministic() throws NonDeterministicException { + keyCoder.verifyDeterministic(); + elemCoder.verifyDeterministic(); + windowCoder.verifyDeterministic(); + } + + /** + * {@inheritDoc}. + * + * {@link KeyedWorkItemCoder} is not consistent with equals as it can return a + * {@link KeyedWorkItem} of a type different from the originally encoded type. + */ + @Override + public boolean consistentWithEquals() { + return false; + } + +} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WindowDoFnOperator.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WindowDoFnOperator.java new file mode 100644 index 0000000000000..c6dde5197dce0 --- /dev/null +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WindowDoFnOperator.java @@ -0,0 +1,343 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.runners.flink.translation.wrappers.streaming; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.PriorityQueue; +import java.util.Queue; +import java.util.Set; +import javax.annotation.Nullable; +import org.apache.beam.runners.core.GroupAlsoByWindowViaWindowSetDoFn; +import org.apache.beam.runners.flink.translation.wrappers.DataInputViewWrapper; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.OldDoFn; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.util.ExecutionContext; +import org.apache.beam.sdk.util.KeyedWorkItem; +import org.apache.beam.sdk.util.KeyedWorkItems; +import org.apache.beam.sdk.util.SystemReduceFn; +import org.apache.beam.sdk.util.TimeDomain; +import org.apache.beam.sdk.util.TimerInternals; +import org.apache.beam.sdk.util.WindowedValue; +import org.apache.beam.sdk.util.WindowingStrategy; +import org.apache.beam.sdk.util.state.StateInternals; +import org.apache.beam.sdk.util.state.StateInternalsFactory; +import org.apache.beam.sdk.values.KV; +import org.apache.beam.sdk.values.PCollectionView; +import org.apache.beam.sdk.values.TupleTag; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.java.tuple.Tuple2; +import org.apache.flink.core.memory.DataInputView; +import org.apache.flink.runtime.state.AbstractStateBackend; +import org.apache.flink.runtime.state.StateHandle; +import org.apache.flink.streaming.api.watermark.Watermark; +import org.apache.flink.streaming.runtime.tasks.StreamTaskState; +import org.joda.time.Instant; + +/** + * Flink operator for executing window {@link DoFn DoFns}. + * + * @param + * @param + */ +public class WindowDoFnOperator + extends DoFnOperator, KV, WindowedValue>> { + + private final Coder keyCoder; + private final TimerInternals.TimerDataCoder timerCoder; + + private transient Set> watermarkTimers; + private transient Queue> watermarkTimersQueue; + + private FlinkStateInternals stateInternals; + + private final SystemReduceFn systemReduceFn; + + public WindowDoFnOperator( + SystemReduceFn systemReduceFn, + TypeInformation>> inputType, + TupleTag> mainOutputTag, + List> sideOutputTags, + OutputManagerFactory>> outputManagerFactory, + WindowingStrategy windowingStrategy, + Map> sideInputTagMapping, + Collection> sideInputs, + PipelineOptions options, + Coder keyCoder) { + super( + null, + inputType, + mainOutputTag, + sideOutputTags, + outputManagerFactory, + windowingStrategy, + sideInputTagMapping, + sideInputs, + options); + + this.systemReduceFn = systemReduceFn; + + this.keyCoder = keyCoder; + this.timerCoder = + TimerInternals.TimerDataCoder.of(windowingStrategy.getWindowFn().windowCoder()); + } + + @Override + protected OldDoFn, KV> getDoFn() { + StateInternalsFactory stateInternalsFactory = new StateInternalsFactory() { + @Override + public StateInternals stateInternalsForKey(K key) { + //this will implicitly be keyed by the key of the incoming + // element or by the key of a firing timer + return stateInternals; + } + }; + + // we have to do the unchecked cast because GroupAlsoByWindowViaWindowSetDoFn.create + // has the window type as generic parameter while WindowingStrategy is almost always + // untyped. + @SuppressWarnings("unchecked") + OldDoFn, KV> doFn = + GroupAlsoByWindowViaWindowSetDoFn.create( + windowingStrategy, stateInternalsFactory, (SystemReduceFn) systemReduceFn); + return doFn; + } + + + @Override + public void open() throws Exception { + + // might already be initialized from restoreTimers() + if (watermarkTimers == null) { + watermarkTimers = new HashSet<>(); + + watermarkTimersQueue = new PriorityQueue<>( + 10, + new Comparator>() { + @Override + public int compare( + Tuple2 o1, + Tuple2 o2) { + return o1.f1.compareTo(o2.f1); + } + }); + } + + stateInternals = new FlinkStateInternals<>(getStateBackend(), keyCoder); + + // call super at the end because this will call getDoFn() which requires stateInternals + // to be set + super.open(); + } + + @Override + protected ExecutionContext.StepContext createStepContext() { + return new WindowDoFnOperator.StepContext(); + } + + private void registerEventTimeTimer(TimerInternals.TimerData timer) { + Tuple2 keyedTimer = + new Tuple2<>((ByteBuffer) getStateBackend().getCurrentKey(), timer); + if (watermarkTimers.add(keyedTimer)) { + watermarkTimersQueue.add(keyedTimer); + } + } + + private void deleteEventTimeTimer(TimerInternals.TimerData timer) { + Tuple2 keyedTimer = + new Tuple2<>((ByteBuffer) getStateBackend().getCurrentKey(), timer); + if (watermarkTimers.remove(keyedTimer)) { + watermarkTimersQueue.remove(keyedTimer); + } + + } + + @Override + public void processWatermark(Watermark mark) throws Exception { + processWatermark1(mark); + } + + @Override + public void processWatermark1(Watermark mark) throws Exception { + pushbackDoFnRunner.startBundle(); + + this.currentInputWatermark = mark.getTimestamp(); + + // hold back by the pushed back values waiting for side inputs + long actualInputWatermark = Math.min(getPushbackWatermarkHold(), mark.getTimestamp()); + + boolean fire; + + do { + Tuple2 timer = watermarkTimersQueue.peek(); + if (timer != null && timer.f1.getTimestamp().getMillis() < actualInputWatermark) { + fire = true; + + System.out.println("FIRING: " + timer); + + watermarkTimersQueue.remove(); + watermarkTimers.remove(timer); + + setKeyContext(timer.f0); + + pushbackDoFnRunner.processElement(WindowedValue.valueInGlobalWindow( + KeyedWorkItems.timersWorkItem( + stateInternals.getKey(), + Collections.singletonList(timer.f1)))); + + } else { + fire = false; + } + } while (fire); + + Instant watermarkHold = stateInternals.watermarkHold(); + + long combinedWatermarkHold = Math.min(watermarkHold.getMillis(), getPushbackWatermarkHold()); + + long potentialOutputWatermark = Math.min(currentInputWatermark, combinedWatermarkHold); + + if (potentialOutputWatermark > currentOutputWatermark) { + currentOutputWatermark = potentialOutputWatermark; + output.emitWatermark(new Watermark(currentOutputWatermark)); + } + pushbackDoFnRunner.finishBundle(); + + } + + @Override + public StreamTaskState snapshotOperatorState(long checkpointId, long timestamp) throws Exception { + StreamTaskState result = super.snapshotOperatorState(checkpointId, timestamp); + + AbstractStateBackend.CheckpointStateOutputView outputView = + getStateBackend().createCheckpointStateOutputView(checkpointId, timestamp); + + snapshotTimers(outputView); + + StateHandle handle = outputView.closeAndGetHandle(); + + // this might overwrite stuff that super checkpointed + result.setOperatorState(handle); + + return result; + } + + @Override + public void restoreState(StreamTaskState state, long recoveryTimestamp) throws Exception { + super.restoreState(state, recoveryTimestamp); + + @SuppressWarnings("unchecked") + StateHandle operatorState = + (StateHandle) state.getOperatorState(); + + DataInputView in = operatorState.getState(getUserCodeClassloader()); + + restoreTimers(new DataInputViewWrapper(in)); + } + + private void restoreTimers(InputStream in) throws IOException { + DataInputStream dataIn = new DataInputStream(in); + int numWatermarkTimers = dataIn.readInt(); + + watermarkTimers = new HashSet<>(numWatermarkTimers); + watermarkTimersQueue = new PriorityQueue<>(Math.max(numWatermarkTimers, 1)); + + for (int i = 0; i < numWatermarkTimers; i++) { + int length = dataIn.readInt(); + byte[] keyBytes = new byte[length]; + dataIn.readFully(keyBytes); + TimerInternals.TimerData timerData = timerCoder.decode(dataIn, Coder.Context.NESTED); + Tuple2 keyedTimer = + new Tuple2<>(ByteBuffer.wrap(keyBytes), timerData); + if (watermarkTimers.add(keyedTimer)) { + watermarkTimersQueue.add(keyedTimer); + } + } + } + + private void snapshotTimers(OutputStream out) throws IOException { + DataOutputStream dataOut = new DataOutputStream(out); + dataOut.writeInt(watermarkTimersQueue.size()); + for (Tuple2 timer : watermarkTimersQueue) { + dataOut.writeInt(timer.f0.limit()); + dataOut.write(timer.f0.array(), 0, timer.f0.limit()); + timerCoder.encode(timer.f1, dataOut, Coder.Context.NESTED); + } + } + + /** + * {@link StepContext} for running {@link DoFn DoFns} on Flink. This does now allow + * accessing state or timer internals. + */ + protected class StepContext extends DoFnOperator.StepContext { + + @Override + public TimerInternals timerInternals() { + return new TimerInternals() { + @Override + public void setTimer(TimerData timerKey) { + if (timerKey.getDomain().equals(TimeDomain.EVENT_TIME)) { + registerEventTimeTimer(timerKey); + } else { + throw new UnsupportedOperationException("Processing-time timers not supported."); + } + } + + @Override + public void deleteTimer(TimerData timerKey) { + deleteEventTimeTimer(timerKey); + } + + @Override + public Instant currentProcessingTime() { + return Instant.now(); + } + + @Nullable + @Override + public Instant currentSynchronizedProcessingTime() { + return Instant.now(); + } + + @Override + public Instant currentInputWatermarkTime() { + return new Instant(Math.min(currentInputWatermark, getPushbackWatermarkHold())); + } + + @Nullable + @Override + public Instant currentOutputWatermarkTime() { + return new Instant(currentOutputWatermark); + } + }; + } + } + +} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WorkItemKeySelector.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WorkItemKeySelector.java new file mode 100644 index 0000000000000..51d9e0c9399cb --- /dev/null +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/WorkItemKeySelector.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.runners.flink.translation.wrappers.streaming; + +import java.nio.ByteBuffer; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.util.CoderUtils; +import org.apache.beam.sdk.util.KeyedWorkItem; +import org.apache.beam.sdk.util.WindowedValue; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.api.java.functions.KeySelector; +import org.apache.flink.api.java.typeutils.GenericTypeInfo; +import org.apache.flink.api.java.typeutils.ResultTypeQueryable; + +/** + * {@link KeySelector} that retrieves a key from a {@link KeyedWorkItem}. This will return + * the key as encoded by the provided {@link Coder} in a {@link ByteBuffer}. This ensures + * that all key comparisons/hashing happen on the encoded form. + */ +public class WorkItemKeySelector + implements KeySelector>, ByteBuffer>, ResultTypeQueryable { + + private final Coder keyCoder; + + public WorkItemKeySelector(Coder keyCoder) { + this.keyCoder = keyCoder; + } + + @Override + public ByteBuffer getKey(WindowedValue> value) throws Exception { + K key = value.getValue().key(); + byte[] keyBytes = CoderUtils.encodeToByteArray(keyCoder, key); + return ByteBuffer.wrap(keyBytes); + } + + @Override + public TypeInformation getProducedType() { + return new GenericTypeInfo<>(ByteBuffer.class); + } +} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/BoundedSourceWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/BoundedSourceWrapper.java new file mode 100644 index 0000000000000..3cb93c0519b3e --- /dev/null +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/BoundedSourceWrapper.java @@ -0,0 +1,216 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.runners.flink.translation.wrappers.streaming.io; + +import com.google.common.annotations.VisibleForTesting; +import java.util.ArrayList; +import java.util.List; +import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions; +import org.apache.beam.sdk.io.BoundedSource; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.transforms.windowing.GlobalWindow; +import org.apache.beam.sdk.transforms.windowing.PaneInfo; +import org.apache.beam.sdk.util.WindowedValue; +import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction; +import org.apache.flink.streaming.api.operators.StreamSource; +import org.apache.flink.streaming.api.watermark.Watermark; +import org.joda.time.Instant; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Wrapper for executing {@link BoundedSource UnboundedSources} as a Flink Source. + */ +public class BoundedSourceWrapper + extends RichParallelSourceFunction> { + + private static final Logger LOG = LoggerFactory.getLogger(BoundedSourceWrapper.class); + + /** + * Keep the options so that we can initialize the readers. + */ + private final SerializedPipelineOptions serializedOptions; + + /** + * The split sources. We split them in the constructor to ensure that all parallel + * sources are consistent about the split sources. + */ + private List> splitSources; + + /** + * Make it a field so that we can access it in {@link #close()}. + */ + private transient List> readers; + + /** + * Initialize here and not in run() to prevent races where we cancel a job before run() is + * ever called or run() is called after cancel(). + */ + private volatile boolean isRunning = true; + + @SuppressWarnings("unchecked") + public BoundedSourceWrapper( + PipelineOptions pipelineOptions, + BoundedSource source, + int parallelism) throws Exception { + this.serializedOptions = new SerializedPipelineOptions(pipelineOptions); + + long desiredBundleSize = source.getEstimatedSizeBytes(pipelineOptions) / parallelism; + + // get the splits early. we assume that the generated splits are stable, + // this is necessary so that the mapping of state to source is correct + // when restoring + splitSources = source.splitIntoBundles(desiredBundleSize, pipelineOptions); + } + + @Override + public void run(SourceContext> ctx) throws Exception { + if (!(ctx instanceof StreamSource.ManualWatermarkContext)) { + throw new RuntimeException( + "Cannot emit watermarks, this hints at a misconfiguration/bug."); + } + + // figure out which split sources we're responsible for + int subtaskIndex = getRuntimeContext().getIndexOfThisSubtask(); + int numSubtasks = getRuntimeContext().getNumberOfParallelSubtasks(); + + List> localSources = new ArrayList<>(); + + for (int i = 0; i < splitSources.size(); i++) { + if (i % numSubtasks == subtaskIndex) { + localSources.add(splitSources.get(i)); + } + } + + LOG.info("Bounded Flink Source {}/{} is reading from sources: {}", + subtaskIndex, + numSubtasks, + localSources); + + readers = new ArrayList<>(); + // initialize readers from scratch + for (BoundedSource source : localSources) { + readers.add(source.createReader(serializedOptions.getPipelineOptions())); + } + + if (readers.size() == 1) { + // the easy case, we just read from one reader + BoundedSource.BoundedReader reader = readers.get(0); + + boolean dataAvailable = reader.start(); + if (dataAvailable) { + emitElement(ctx, reader); + } + + while (isRunning) { + dataAvailable = reader.advance(); + + if (dataAvailable) { + emitElement(ctx, reader); + } else { + break; + } + } + } else { + // a bit more complicated, we are responsible for several readers + // loop through them and sleep if none of them had any data + + int currentReader = 0; + + // start each reader and emit data if immediately available + for (BoundedSource.BoundedReader reader : readers) { + boolean dataAvailable = reader.start(); + if (dataAvailable) { + emitElement(ctx, reader); + } + } + + // a flag telling us whether any of the readers had data + // if no reader had data, sleep for bit + boolean hadData = false; + while (isRunning && !readers.isEmpty()) { + BoundedSource.BoundedReader reader = readers.get(currentReader); + boolean dataAvailable = reader.advance(); + + if (dataAvailable) { + emitElement(ctx, reader); + hadData = true; + } else { + readers.remove(currentReader); + currentReader--; + if (readers.isEmpty()) { + break; + } + } + + currentReader = (currentReader + 1) % readers.size(); + if (currentReader == 0 && !hadData) { + Thread.sleep(50); + } else if (currentReader == 0) { + hadData = false; + } + } + + } + + // emit final Long.MAX_VALUE watermark, just to be sure + ctx.emitWatermark(new Watermark(Long.MAX_VALUE)); + } + + /** + * Emit the current element from the given Reader. The reader is guaranteed to have data. + */ + private void emitElement( + SourceContext> ctx, + BoundedSource.BoundedReader reader) { + // make sure that reader state update and element emission are atomic + // with respect to snapshots + synchronized (ctx.getCheckpointLock()) { + + OutputT item = reader.getCurrent(); + Instant timestamp = reader.getCurrentTimestamp(); + + WindowedValue windowedValue = + WindowedValue.of(item, timestamp, GlobalWindow.INSTANCE, PaneInfo.NO_FIRING); + ctx.collectWithTimestamp(windowedValue, timestamp.getMillis()); + } + } + + @Override + public void close() throws Exception { + super.close(); + if (readers != null) { + for (BoundedSource.BoundedReader reader: readers) { + reader.close(); + } + } + } + + @Override + public void cancel() { + isRunning = false; + } + + /** + * Visible so that we can check this in tests. Must not be used for anything else. + */ + @VisibleForTesting + public List> getSplitSources() { + return splitSources; + } +} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/FlinkStreamingCreateFunction.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/FlinkStreamingCreateFunction.java deleted file mode 100644 index 0d72f657ffda0..0000000000000 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/FlinkStreamingCreateFunction.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.beam.runners.flink.translation.wrappers.streaming.io; - -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.transforms.windowing.GlobalWindow; -import org.apache.beam.sdk.transforms.windowing.PaneInfo; -import org.apache.beam.sdk.util.WindowedValue; - -import org.apache.flink.api.common.functions.FlatMapFunction; -import org.apache.flink.util.Collector; -import org.joda.time.Instant; - -import java.io.ByteArrayInputStream; -import java.util.List; - -/** - * This flat map function bootstraps from collection elements and turns them into WindowedValues - * (as required by the Flink runner). - */ -public class FlinkStreamingCreateFunction implements FlatMapFunction> { - - private final List elements; - private final Coder coder; - - public FlinkStreamingCreateFunction(List elements, Coder coder) { - this.elements = elements; - this.coder = coder; - } - - @Override - public void flatMap(IN value, Collector> out) throws Exception { - - for (byte[] element : elements) { - ByteArrayInputStream bai = new ByteArrayInputStream(element); - OUT outValue = coder.decode(bai, Coder.Context.OUTER); - - out.collect(WindowedValue.of(outValue, Instant.now(), GlobalWindow.INSTANCE, PaneInfo.NO_FIRING)); - } - } -} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSink.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSink.java index 098473dec2301..2117e9d193533 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSink.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSink.java @@ -18,23 +18,19 @@ package org.apache.beam.runners.flink.translation.wrappers.streaming.io; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Collection; +import java.util.List; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.io.Sink; import org.apache.beam.sdk.io.UnboundedSource; -import org.apache.beam.sdk.io.Write; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.util.CloudObject; import org.apache.beam.sdk.util.common.ElementByteSizeObserver; import org.apache.flink.streaming.api.functions.sink.SinkFunction; -import org.apache.flink.streaming.api.functions.source.SourceFunction; - -import javax.annotation.Nullable; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.Collection; -import java.util.List; /** * A wrapper translating Flink sinks implementing the {@link SinkFunction} interface, into diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSource.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSource.java index 716ca304e5399..c6e0825648f46 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSource.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedFlinkSource.java @@ -19,18 +19,15 @@ import static com.google.common.base.Preconditions.checkNotNull; +import java.util.List; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.io.UnboundedSource; import org.apache.beam.sdk.options.PipelineOptions; - import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks; import org.apache.flink.streaming.api.functions.IngestionTimeExtractor; import org.apache.flink.streaming.api.functions.source.SourceFunction; -import java.util.List; - -import javax.annotation.Nullable; - /** * A wrapper translating Flink Sources implementing the {@link SourceFunction} interface, into * unbounded Beam sources (see {@link UnboundedSource}). diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSocketSource.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSocketSource.java index 08bdb509da70e..8d37fe70dfb8b 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSocketSource.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSocketSource.java @@ -19,15 +19,6 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.StringUtf8Coder; -import org.apache.beam.sdk.io.UnboundedSource; -import org.apache.beam.sdk.options.PipelineOptions; - -import org.joda.time.Instant; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; @@ -37,8 +28,14 @@ import java.util.Collections; import java.util.List; import java.util.NoSuchElementException; - import javax.annotation.Nullable; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.StringUtf8Coder; +import org.apache.beam.sdk.io.UnboundedSource; +import org.apache.beam.sdk.options.PipelineOptions; +import org.joda.time.Instant; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An example unbounded Beam source that reads input from a socket. This is used mainly for testing and debugging. diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java index 7f26a658bc118..8647322dfb865 100644 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java +++ b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java @@ -17,6 +17,12 @@ */ package org.apache.beam.runners.flink.translation.wrappers.streaming.io; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Function; +import com.google.common.collect.Lists; +import java.io.ByteArrayInputStream; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.KvCoder; @@ -29,11 +35,6 @@ import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.TypeDescriptor; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Function; -import com.google.common.collect.Lists; - import org.apache.commons.io.output.ByteArrayOutputStream; import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.checkpoint.Checkpointed; @@ -46,10 +47,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.ByteArrayInputStream; -import java.util.ArrayList; -import java.util.List; - /** * Wrapper for executing {@link UnboundedSource UnboundedSources} as a Flink Source. */ diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/AbstractFlinkTimerInternals.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/AbstractFlinkTimerInternals.java deleted file mode 100644 index a0b33f802d7c0..0000000000000 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/AbstractFlinkTimerInternals.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.beam.runners.flink.translation.wrappers.streaming.state; - -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.KvCoder; -import org.apache.beam.sdk.transforms.OldDoFn; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.util.TimerInternals; - -import org.joda.time.Instant; - -import java.io.IOException; -import java.io.Serializable; - -import javax.annotation.Nullable; - -/** - * An implementation of Beam's {@link TimerInternals}, that also provides serialization functionality. - * The latter is used when snapshots of the current state are taken, for fault-tolerance. - * */ -public abstract class AbstractFlinkTimerInternals implements TimerInternals, Serializable { - private Instant currentInputWatermark = BoundedWindow.TIMESTAMP_MIN_VALUE; - private Instant currentOutputWatermark = BoundedWindow.TIMESTAMP_MIN_VALUE; - - public void setCurrentInputWatermark(Instant watermark) { - checkIfValidInputWatermark(watermark); - this.currentInputWatermark = watermark; - } - - public void setCurrentOutputWatermark(Instant watermark) { - checkIfValidOutputWatermark(watermark); - this.currentOutputWatermark = watermark; - } - - private void setCurrentInputWatermarkAfterRecovery(Instant watermark) { - if (!currentInputWatermark.isEqual(BoundedWindow.TIMESTAMP_MIN_VALUE)) { - throw new RuntimeException("Explicitly setting the input watermark is only allowed on " + - "initialization after recovery from a node failure. Apparently this is not " + - "the case here as the watermark is already set."); - } - this.currentInputWatermark = watermark; - } - - private void setCurrentOutputWatermarkAfterRecovery(Instant watermark) { - if (!currentOutputWatermark.isEqual(BoundedWindow.TIMESTAMP_MIN_VALUE)) { - throw new RuntimeException("Explicitly setting the output watermark is only allowed on " + - "initialization after recovery from a node failure. Apparently this is not " + - "the case here as the watermark is already set."); - } - this.currentOutputWatermark = watermark; - } - - @Override - public Instant currentProcessingTime() { - return Instant.now(); - } - - @Override - public Instant currentInputWatermarkTime() { - return currentInputWatermark; - } - - @Nullable - @Override - public Instant currentSynchronizedProcessingTime() { - // TODO - return null; - } - - @Override - public Instant currentOutputWatermarkTime() { - return currentOutputWatermark; - } - - private void checkIfValidInputWatermark(Instant newWatermark) { - if (currentInputWatermark.isAfter(newWatermark)) { - throw new IllegalArgumentException(String.format( - "Cannot set current input watermark to %s. Newer watermarks " + - "must be no earlier than the current one (%s).", - newWatermark, currentInputWatermark)); - } - } - - private void checkIfValidOutputWatermark(Instant newWatermark) { - if (currentOutputWatermark.isAfter(newWatermark)) { - throw new IllegalArgumentException(String.format( - "Cannot set current output watermark to %s. Newer watermarks " + - "must be no earlier than the current one (%s).", - newWatermark, currentOutputWatermark)); - } - } - - public void encodeTimerInternals(OldDoFn.ProcessContext context, - StateCheckpointWriter writer, - KvCoder kvCoder, - Coder windowCoder) throws IOException { - if (context == null) { - throw new RuntimeException("The Context has not been initialized."); - } - - writer.setTimestamp(currentInputWatermark); - writer.setTimestamp(currentOutputWatermark); - } - - public void restoreTimerInternals(StateCheckpointReader reader, - KvCoder kvCoder, - Coder windowCoder) throws IOException { - setCurrentInputWatermarkAfterRecovery(reader.getTimestamp()); - setCurrentOutputWatermarkAfterRecovery(reader.getTimestamp()); - } -} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/FlinkStateInternals.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/FlinkStateInternals.java deleted file mode 100644 index e6a43dcb03b6a..0000000000000 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/FlinkStateInternals.java +++ /dev/null @@ -1,733 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.beam.runners.flink.translation.wrappers.streaming.state; - -import static com.google.common.base.Preconditions.checkNotNull; - -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.transforms.Combine; -import org.apache.beam.sdk.transforms.CombineWithContext; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.OutputTimeFn; -import org.apache.beam.sdk.util.state.AccumulatorCombiningState; -import org.apache.beam.sdk.util.state.BagState; -import org.apache.beam.sdk.util.state.ReadableState; -import org.apache.beam.sdk.util.state.State; -import org.apache.beam.sdk.util.state.StateContext; -import org.apache.beam.sdk.util.state.StateInternals; -import org.apache.beam.sdk.util.state.StateNamespace; -import org.apache.beam.sdk.util.state.StateNamespaces; -import org.apache.beam.sdk.util.state.StateTable; -import org.apache.beam.sdk.util.state.StateTag; -import org.apache.beam.sdk.util.state.StateTags; -import org.apache.beam.sdk.util.state.ValueState; -import org.apache.beam.sdk.util.state.WatermarkHoldState; -import org.apache.beam.sdk.values.PCollectionView; - -import com.google.protobuf.ByteString; - -import org.apache.flink.util.InstantiationUtil; -import org.joda.time.Instant; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * An implementation of the Beam {@link StateInternals}. This implementation simply keeps elements in memory. - * This state is periodically checkpointed by Flink, for fault-tolerance. - * - * TODO: State should be rewritten to redirect to Flink per-key state so that coders and combiners don't need - * to be serialized along with encoded values when snapshotting. - */ -public class FlinkStateInternals implements StateInternals { - - private final K key; - - private final Coder keyCoder; - - private final Coder windowCoder; - - private final OutputTimeFn outputTimeFn; - - private Instant watermarkHoldAccessor; - - public FlinkStateInternals(K key, - Coder keyCoder, - Coder windowCoder, - OutputTimeFn outputTimeFn) { - this.key = key; - this.keyCoder = keyCoder; - this.windowCoder = windowCoder; - this.outputTimeFn = outputTimeFn; - } - - public Instant getWatermarkHold() { - return watermarkHoldAccessor; - } - - /** - * This is the interface state has to implement in order for it to be fault tolerant when - * executed by the FlinkRunner. - */ - private interface CheckpointableIF { - - boolean shouldPersist(); - - void persistState(StateCheckpointWriter checkpointBuilder) throws IOException; - } - - protected final StateTable inMemoryState = new StateTable() { - @Override - protected StateTag.StateBinder binderForNamespace(final StateNamespace namespace, final StateContext c) { - return new StateTag.StateBinder() { - - @Override - public ValueState bindValue(StateTag> address, Coder coder) { - return new FlinkInMemoryValue<>(encodeKey(namespace, address), coder); - } - - @Override - public BagState bindBag(StateTag> address, Coder elemCoder) { - return new FlinkInMemoryBag<>(encodeKey(namespace, address), elemCoder); - } - - @Override - public AccumulatorCombiningState bindCombiningValue( - StateTag> address, - Coder accumCoder, Combine.CombineFn combineFn) { - return new FlinkInMemoryKeyedCombiningValue<>(encodeKey(namespace, address), combineFn, accumCoder, c); - } - - @Override - public AccumulatorCombiningState bindKeyedCombiningValue( - StateTag> address, - Coder accumCoder, - Combine.KeyedCombineFn combineFn) { - return new FlinkInMemoryKeyedCombiningValue<>(encodeKey(namespace, address), combineFn, accumCoder, c); - } - - @Override - public AccumulatorCombiningState bindKeyedCombiningValueWithContext( - StateTag> address, - Coder accumCoder, - CombineWithContext.KeyedCombineFnWithContext combineFn) { - return new FlinkInMemoryKeyedCombiningValue<>(encodeKey(namespace, address), combineFn, accumCoder, c); - } - - @Override - public WatermarkHoldState bindWatermark(StateTag> address, OutputTimeFn outputTimeFn) { - return new FlinkWatermarkHoldStateImpl<>(encodeKey(namespace, address), outputTimeFn); - } - }; - } - }; - - @Override - public K getKey() { - return key; - } - - @Override - public StateT state(StateNamespace namespace, StateTag address) { - return inMemoryState.get(namespace, address, null); - } - - @Override - public T state(StateNamespace namespace, StateTag address, StateContext c) { - return inMemoryState.get(namespace, address, c); - } - - public void persistState(StateCheckpointWriter checkpointBuilder) throws IOException { - checkpointBuilder.writeInt(getNoOfElements()); - - for (State location : inMemoryState.values()) { - if (!(location instanceof CheckpointableIF)) { - throw new IllegalStateException(String.format( - "%s wasn't created by %s -- unable to persist it", - location.getClass().getSimpleName(), - getClass().getSimpleName())); - } - ((CheckpointableIF) location).persistState(checkpointBuilder); - } - } - - public void restoreState(StateCheckpointReader checkpointReader, ClassLoader loader) - throws IOException, ClassNotFoundException { - - // the number of elements to read. - int noOfElements = checkpointReader.getInt(); - for (int i = 0; i < noOfElements; i++) { - decodeState(checkpointReader, loader); - } - } - - /** - * We remove the first character which encodes the type of the stateTag ('s' for system - * and 'u' for user). For more details check out the source of - * {@link StateTags.StateTagBase#getId()}. - */ - private void decodeState(StateCheckpointReader reader, ClassLoader loader) - throws IOException, ClassNotFoundException { - - StateType stateItemType = StateType.deserialize(reader); - ByteString stateKey = reader.getTag(); - - // first decode the namespace and the tagId... - String[] namespaceAndTag = stateKey.toStringUtf8().split("\\+"); - if (namespaceAndTag.length != 2) { - throw new IllegalArgumentException("Invalid stateKey " + stateKey.toString() + "."); - } - StateNamespace namespace = StateNamespaces.fromString(namespaceAndTag[0], windowCoder); - - // ... decide if it is a system or user stateTag... - char ownerTag = namespaceAndTag[1].charAt(0); - if (ownerTag != 's' && ownerTag != 'u') { - throw new RuntimeException("Invalid StateTag name."); - } - boolean isSystemTag = ownerTag == 's'; - String tagId = namespaceAndTag[1].substring(1); - - // ...then decode the coder (if there is one)... - Coder coder = null; - switch (stateItemType) { - case VALUE: - case LIST: - case ACCUMULATOR: - ByteString coderBytes = reader.getData(); - coder = InstantiationUtil.deserializeObject(coderBytes.toByteArray(), loader); - break; - case WATERMARK: - break; - } - - // ...then decode the combiner function (if there is one)... - CombineWithContext.KeyedCombineFnWithContext combineFn = null; - switch (stateItemType) { - case ACCUMULATOR: - ByteString combinerBytes = reader.getData(); - combineFn = InstantiationUtil.deserializeObject(combinerBytes.toByteArray(), loader); - break; - case VALUE: - case LIST: - case WATERMARK: - break; - } - - //... and finally, depending on the type of the state being decoded, - // 1) create the adequate stateTag, - // 2) create the state container, - // 3) restore the actual content. - switch (stateItemType) { - case VALUE: { - StateTag stateTag = StateTags.value(tagId, coder); - stateTag = isSystemTag ? StateTags.makeSystemTagInternal(stateTag) : stateTag; - @SuppressWarnings("unchecked") - FlinkInMemoryValue value = (FlinkInMemoryValue) inMemoryState.get(namespace, stateTag, null); - value.restoreState(reader); - break; - } - case WATERMARK: { - @SuppressWarnings("unchecked") - StateTag> stateTag = StateTags.watermarkStateInternal(tagId, outputTimeFn); - stateTag = isSystemTag ? StateTags.makeSystemTagInternal(stateTag) : stateTag; - @SuppressWarnings("unchecked") - FlinkWatermarkHoldStateImpl watermark = (FlinkWatermarkHoldStateImpl) inMemoryState.get(namespace, stateTag, null); - watermark.restoreState(reader); - break; - } - case LIST: { - StateTag stateTag = StateTags.bag(tagId, coder); - stateTag = isSystemTag ? StateTags.makeSystemTagInternal(stateTag) : stateTag; - FlinkInMemoryBag bag = (FlinkInMemoryBag) inMemoryState.get(namespace, stateTag, null); - bag.restoreState(reader); - break; - } - case ACCUMULATOR: { - @SuppressWarnings("unchecked") - StateTag> stateTag = StateTags.keyedCombiningValueWithContext(tagId, (Coder) coder, combineFn); - stateTag = isSystemTag ? StateTags.makeSystemTagInternal(stateTag) : stateTag; - @SuppressWarnings("unchecked") - FlinkInMemoryKeyedCombiningValue combiningValue = - (FlinkInMemoryKeyedCombiningValue) inMemoryState.get(namespace, stateTag, null); - combiningValue.restoreState(reader); - break; - } - default: - throw new RuntimeException("Unknown State Type " + stateItemType + "."); - } - } - - private ByteString encodeKey(StateNamespace namespace, StateTag address) { - StringBuilder sb = new StringBuilder(); - try { - namespace.appendTo(sb); - sb.append('+'); - address.appendTo(sb); - } catch (IOException e) { - throw new RuntimeException(e); - } - return ByteString.copyFromUtf8(sb.toString()); - } - - private int getNoOfElements() { - int noOfElements = 0; - for (State state : inMemoryState.values()) { - if (!(state instanceof CheckpointableIF)) { - throw new RuntimeException("State Implementations used by the " + - "Flink Dataflow Runner should implement the CheckpointableIF interface."); - } - - if (((CheckpointableIF) state).shouldPersist()) { - noOfElements++; - } - } - return noOfElements; - } - - private final class FlinkInMemoryValue implements ValueState, CheckpointableIF { - - private final ByteString stateKey; - private final Coder elemCoder; - - private T value = null; - - public FlinkInMemoryValue(ByteString stateKey, Coder elemCoder) { - this.stateKey = stateKey; - this.elemCoder = elemCoder; - } - - @Override - public void clear() { - value = null; - } - - @Override - public void write(T input) { - this.value = input; - } - - @Override - public T read() { - return value; - } - - @Override - public ValueState readLater() { - // Ignore - return this; - } - - @Override - public boolean shouldPersist() { - return value != null; - } - - @Override - public void persistState(StateCheckpointWriter checkpointBuilder) throws IOException { - if (value != null) { - // serialize the coder. - byte[] coder = InstantiationUtil.serializeObject(elemCoder); - - // encode the value into a ByteString - ByteString.Output stream = ByteString.newOutput(); - elemCoder.encode(value, stream, Coder.Context.OUTER); - ByteString data = stream.toByteString(); - - checkpointBuilder.addValueBuilder() - .setTag(stateKey) - .setData(coder) - .setData(data); - } - } - - public void restoreState(StateCheckpointReader checkpointReader) throws IOException { - ByteString valueContent = checkpointReader.getData(); - T outValue = elemCoder.decode(new ByteArrayInputStream(valueContent.toByteArray()), Coder.Context.OUTER); - write(outValue); - } - } - - private final class FlinkWatermarkHoldStateImpl - implements WatermarkHoldState, CheckpointableIF { - - private final ByteString stateKey; - - private Instant minimumHold = null; - - private OutputTimeFn outputTimeFn; - - public FlinkWatermarkHoldStateImpl(ByteString stateKey, OutputTimeFn outputTimeFn) { - this.stateKey = stateKey; - this.outputTimeFn = outputTimeFn; - } - - @Override - public void clear() { - // Even though we're clearing we can't remove this from the in-memory state map, since - // other users may already have a handle on this WatermarkBagInternal. - minimumHold = null; - watermarkHoldAccessor = null; - } - - @Override - public void add(Instant watermarkHold) { - if (minimumHold == null || minimumHold.isAfter(watermarkHold)) { - watermarkHoldAccessor = watermarkHold; - minimumHold = watermarkHold; - } - } - - @Override - public ReadableState isEmpty() { - return new ReadableState() { - @Override - public Boolean read() { - return minimumHold == null; - } - - @Override - public ReadableState readLater() { - // Ignore - return this; - } - }; - } - - @Override - public OutputTimeFn getOutputTimeFn() { - return outputTimeFn; - } - - @Override - public Instant read() { - return minimumHold; - } - - @Override - public WatermarkHoldState readLater() { - // Ignore - return this; - } - - @Override - public String toString() { - return Objects.toString(minimumHold); - } - - @Override - public boolean shouldPersist() { - return minimumHold != null; - } - - @Override - public void persistState(StateCheckpointWriter checkpointBuilder) throws IOException { - if (minimumHold != null) { - checkpointBuilder.addWatermarkHoldsBuilder() - .setTag(stateKey) - .setTimestamp(minimumHold); - } - } - - public void restoreState(StateCheckpointReader checkpointReader) throws IOException { - Instant watermark = checkpointReader.getTimestamp(); - add(watermark); - } - } - - - private static CombineWithContext.KeyedCombineFnWithContext withContext( - final Combine.KeyedCombineFn combineFn) { - return new CombineWithContext.KeyedCombineFnWithContext() { - @Override - public AccumT createAccumulator(K key, CombineWithContext.Context c) { - return combineFn.createAccumulator(key); - } - - @Override - public AccumT addInput(K key, AccumT accumulator, InputT value, CombineWithContext.Context c) { - return combineFn.addInput(key, accumulator, value); - } - - @Override - public AccumT mergeAccumulators(K key, Iterable accumulators, CombineWithContext.Context c) { - return combineFn.mergeAccumulators(key, accumulators); - } - - @Override - public OutputT extractOutput(K key, AccumT accumulator, CombineWithContext.Context c) { - return combineFn.extractOutput(key, accumulator); - } - }; - } - - private static CombineWithContext.KeyedCombineFnWithContext withKeyAndContext( - final Combine.CombineFn combineFn) { - return new CombineWithContext.KeyedCombineFnWithContext() { - @Override - public AccumT createAccumulator(K key, CombineWithContext.Context c) { - return combineFn.createAccumulator(); - } - - @Override - public AccumT addInput(K key, AccumT accumulator, InputT value, CombineWithContext.Context c) { - return combineFn.addInput(accumulator, value); - } - - @Override - public AccumT mergeAccumulators(K key, Iterable accumulators, CombineWithContext.Context c) { - return combineFn.mergeAccumulators(accumulators); - } - - @Override - public OutputT extractOutput(K key, AccumT accumulator, CombineWithContext.Context c) { - return combineFn.extractOutput(accumulator); - } - }; - } - - private final class FlinkInMemoryKeyedCombiningValue - implements AccumulatorCombiningState, CheckpointableIF { - - private final ByteString stateKey; - private final CombineWithContext.KeyedCombineFnWithContext combineFn; - private final Coder accumCoder; - private final CombineWithContext.Context context; - - private AccumT accum = null; - private boolean isClear = true; - - private FlinkInMemoryKeyedCombiningValue(ByteString stateKey, - Combine.CombineFn combineFn, - Coder accumCoder, - final StateContext stateContext) { - this(stateKey, withKeyAndContext(combineFn), accumCoder, stateContext); - } - - - private FlinkInMemoryKeyedCombiningValue(ByteString stateKey, - Combine.KeyedCombineFn combineFn, - Coder accumCoder, - final StateContext stateContext) { - this(stateKey, withContext(combineFn), accumCoder, stateContext); - } - - private FlinkInMemoryKeyedCombiningValue(ByteString stateKey, - CombineWithContext.KeyedCombineFnWithContext combineFn, - Coder accumCoder, - final StateContext stateContext) { - checkNotNull(combineFn); - checkNotNull(accumCoder); - - this.stateKey = stateKey; - this.combineFn = combineFn; - this.accumCoder = accumCoder; - this.context = new CombineWithContext.Context() { - @Override - public PipelineOptions getPipelineOptions() { - return stateContext.getPipelineOptions(); - } - - @Override - public T sideInput(PCollectionView view) { - return stateContext.sideInput(view); - } - }; - accum = combineFn.createAccumulator(key, context); - } - - @Override - public void clear() { - accum = combineFn.createAccumulator(key, context); - isClear = true; - } - - @Override - public void add(InputT input) { - isClear = false; - accum = combineFn.addInput(key, accum, input, context); - } - - @Override - public AccumT getAccum() { - return accum; - } - - @Override - public ReadableState isEmpty() { - return new ReadableState() { - @Override - public ReadableState readLater() { - // Ignore - return this; - } - - @Override - public Boolean read() { - return isClear; - } - }; - } - - @Override - public void addAccum(AccumT accum) { - isClear = false; - this.accum = combineFn.mergeAccumulators(key, Arrays.asList(this.accum, accum), context); - } - - @Override - public AccumT mergeAccumulators(Iterable accumulators) { - return combineFn.mergeAccumulators(key, accumulators, context); - } - - @Override - public OutputT read() { - return combineFn.extractOutput(key, accum, context); - } - - @Override - public AccumulatorCombiningState readLater() { - // Ignore - return this; - } - - @Override - public boolean shouldPersist() { - return !isClear; - } - - @Override - public void persistState(StateCheckpointWriter checkpointBuilder) throws IOException { - if (!isClear) { - // serialize the coder. - byte[] coder = InstantiationUtil.serializeObject(accumCoder); - - // serialize the combiner. - byte[] combiner = InstantiationUtil.serializeObject(combineFn); - - // encode the accumulator into a ByteString - ByteString.Output stream = ByteString.newOutput(); - accumCoder.encode(accum, stream, Coder.Context.OUTER); - ByteString data = stream.toByteString(); - - // put the flag that the next serialized element is an accumulator - checkpointBuilder.addAccumulatorBuilder() - .setTag(stateKey) - .setData(coder) - .setData(combiner) - .setData(data); - } - } - - public void restoreState(StateCheckpointReader checkpointReader) throws IOException { - ByteString valueContent = checkpointReader.getData(); - AccumT accum = this.accumCoder.decode(new ByteArrayInputStream(valueContent.toByteArray()), Coder.Context.OUTER); - addAccum(accum); - } - } - - private static final class FlinkInMemoryBag implements BagState, CheckpointableIF { - private final List contents = new ArrayList<>(); - - private final ByteString stateKey; - private final Coder elemCoder; - - public FlinkInMemoryBag(ByteString stateKey, Coder elemCoder) { - this.stateKey = stateKey; - this.elemCoder = elemCoder; - } - - @Override - public void clear() { - contents.clear(); - } - - @Override - public Iterable read() { - return contents; - } - - @Override - public BagState readLater() { - // Ignore - return this; - } - - @Override - public void add(T input) { - contents.add(input); - } - - @Override - public ReadableState isEmpty() { - return new ReadableState() { - @Override - public ReadableState readLater() { - // Ignore - return this; - } - - @Override - public Boolean read() { - return contents.isEmpty(); - } - }; - } - - @Override - public boolean shouldPersist() { - return !contents.isEmpty(); - } - - @Override - public void persistState(StateCheckpointWriter checkpointBuilder) throws IOException { - if (!contents.isEmpty()) { - // serialize the coder. - byte[] coder = InstantiationUtil.serializeObject(elemCoder); - - checkpointBuilder.addListUpdatesBuilder() - .setTag(stateKey) - .setData(coder) - .writeInt(contents.size()); - - for (T item : contents) { - // encode the element - ByteString.Output stream = ByteString.newOutput(); - elemCoder.encode(item, stream, Coder.Context.OUTER); - ByteString data = stream.toByteString(); - - // add the data to the checkpoint. - checkpointBuilder.setData(data); - } - } - } - - public void restoreState(StateCheckpointReader checkpointReader) throws IOException { - int noOfValues = checkpointReader.getInt(); - for (int j = 0; j < noOfValues; j++) { - ByteString valueContent = checkpointReader.getData(); - T outValue = elemCoder.decode(new ByteArrayInputStream(valueContent.toByteArray()), Coder.Context.OUTER); - add(outValue); - } - } - } -} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/StateCheckpointReader.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/StateCheckpointReader.java deleted file mode 100644 index 5a843ab28d56a..0000000000000 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/StateCheckpointReader.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.beam.runners.flink.translation.wrappers.streaming.state; - -import org.apache.beam.runners.flink.translation.types.CoderTypeSerializer; - -import com.google.protobuf.ByteString; - -import org.apache.flink.core.memory.DataInputView; -import org.joda.time.Instant; - -import java.io.IOException; -import java.util.concurrent.TimeUnit; - -public class StateCheckpointReader { - - private final DataInputView input; - - public StateCheckpointReader(DataInputView in) { - this.input = in; - } - - public ByteString getTag() throws IOException { - return ByteString.copyFrom(readRawData()); - } - - public String getTagToString() throws IOException { - return input.readUTF(); - } - - public ByteString getData() throws IOException { - return ByteString.copyFrom(readRawData()); - } - - public int getInt() throws IOException { - validate(); - return input.readInt(); - } - - public byte getByte() throws IOException { - validate(); - return input.readByte(); - } - - public Instant getTimestamp() throws IOException { - validate(); - Long watermarkMillis = input.readLong(); - return new Instant(TimeUnit.MICROSECONDS.toMillis(watermarkMillis)); - } - - public K deserializeKey(CoderTypeSerializer keySerializer) throws IOException { - return deserializeObject(keySerializer); - } - - public T deserializeObject(CoderTypeSerializer objectSerializer) throws IOException { - return objectSerializer.deserialize(input); - } - - ///////// Helper Methods /////// - - private byte[] readRawData() throws IOException { - validate(); - int size = input.readInt(); - - byte[] serData = new byte[size]; - int bytesRead = input.read(serData); - if (bytesRead != size) { - throw new RuntimeException("Error while deserializing checkpoint. Not enough bytes in the input stream."); - } - return serData; - } - - private void validate() { - if (this.input == null) { - throw new RuntimeException("StateBackend not initialized yet."); - } - } -} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/StateCheckpointUtils.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/StateCheckpointUtils.java deleted file mode 100644 index 4fbd6f098735f..0000000000000 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/StateCheckpointUtils.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.beam.runners.flink.translation.wrappers.streaming.state; - -import org.apache.beam.runners.flink.translation.types.CoderTypeSerializer; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.OutputTimeFn; -import org.apache.beam.sdk.util.TimeDomain; -import org.apache.beam.sdk.util.TimerInternals; -import org.apache.beam.sdk.util.state.StateNamespace; -import org.apache.beam.sdk.util.state.StateNamespaces; - -import org.joda.time.Instant; - -import java.io.IOException; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -public class StateCheckpointUtils { - - public static void encodeState(Map> perKeyStateInternals, - StateCheckpointWriter writer, Coder keyCoder) throws IOException { - CoderTypeSerializer keySerializer = new CoderTypeSerializer<>(keyCoder); - - int noOfKeys = perKeyStateInternals.size(); - writer.writeInt(noOfKeys); - for (Map.Entry> keyStatePair : perKeyStateInternals.entrySet()) { - K key = keyStatePair.getKey(); - FlinkStateInternals state = keyStatePair.getValue(); - - // encode the key - writer.serializeKey(key, keySerializer); - - // write the associated state - state.persistState(writer); - } - } - - public static Map> decodeState( - StateCheckpointReader reader, - OutputTimeFn outputTimeFn, - Coder keyCoder, - Coder windowCoder, - ClassLoader classLoader) throws IOException, ClassNotFoundException { - - int noOfKeys = reader.getInt(); - Map> perKeyStateInternals = new HashMap<>(noOfKeys); - perKeyStateInternals.clear(); - - CoderTypeSerializer keySerializer = new CoderTypeSerializer<>(keyCoder); - for (int i = 0; i < noOfKeys; i++) { - - // decode the key. - K key = reader.deserializeKey(keySerializer); - - //decode the state associated to the key. - FlinkStateInternals stateForKey = - new FlinkStateInternals<>(key, keyCoder, windowCoder, outputTimeFn); - stateForKey.restoreState(reader, classLoader); - perKeyStateInternals.put(key, stateForKey); - } - return perKeyStateInternals; - } - - ////////////// Encoding/Decoding the Timers //////////////// - - - public static void encodeTimers(Map> allTimers, - StateCheckpointWriter writer, - Coder keyCoder) throws IOException { - CoderTypeSerializer keySerializer = new CoderTypeSerializer<>(keyCoder); - - int noOfKeys = allTimers.size(); - writer.writeInt(noOfKeys); - for (Map.Entry> timersPerKey : allTimers.entrySet()) { - K key = timersPerKey.getKey(); - - // encode the key - writer.serializeKey(key, keySerializer); - - // write the associated timers - Set timers = timersPerKey.getValue(); - encodeTimerDataForKey(writer, timers); - } - } - - public static Map> decodeTimers( - StateCheckpointReader reader, - Coder windowCoder, - Coder keyCoder) throws IOException { - - int noOfKeys = reader.getInt(); - Map> activeTimers = new HashMap<>(noOfKeys); - activeTimers.clear(); - - CoderTypeSerializer keySerializer = new CoderTypeSerializer<>(keyCoder); - for (int i = 0; i < noOfKeys; i++) { - - // decode the key. - K key = reader.deserializeKey(keySerializer); - - // decode the associated timers. - Set timers = decodeTimerDataForKey(reader, windowCoder); - activeTimers.put(key, timers); - } - return activeTimers; - } - - private static void encodeTimerDataForKey(StateCheckpointWriter writer, Set timers) throws IOException { - // encode timers - writer.writeInt(timers.size()); - for (TimerInternals.TimerData timer : timers) { - String stringKey = timer.getNamespace().stringKey(); - - writer.setTag(stringKey); - writer.setTimestamp(timer.getTimestamp()); - writer.writeInt(timer.getDomain().ordinal()); - } - } - - private static Set decodeTimerDataForKey( - StateCheckpointReader reader, Coder windowCoder) throws IOException { - - // decode the timers: first their number and then the content itself. - int noOfTimers = reader.getInt(); - Set timers = new HashSet<>(noOfTimers); - for (int i = 0; i < noOfTimers; i++) { - String stringKey = reader.getTagToString(); - Instant instant = reader.getTimestamp(); - TimeDomain domain = TimeDomain.values()[reader.getInt()]; - - StateNamespace namespace = StateNamespaces.fromString(stringKey, windowCoder); - timers.add(TimerInternals.TimerData.of(namespace, instant, domain)); - } - return timers; - } -} diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/StateCheckpointWriter.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/StateCheckpointWriter.java deleted file mode 100644 index d09157c0806d7..0000000000000 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/StateCheckpointWriter.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.beam.runners.flink.translation.wrappers.streaming.state; - -import org.apache.beam.runners.flink.translation.types.CoderTypeSerializer; - -import com.google.protobuf.ByteString; - -import org.apache.flink.runtime.state.AbstractStateBackend; -import org.joda.time.Instant; - -import java.io.IOException; -import java.util.concurrent.TimeUnit; - -public class StateCheckpointWriter { - - private final AbstractStateBackend.CheckpointStateOutputView output; - - public static StateCheckpointWriter create(AbstractStateBackend.CheckpointStateOutputView output) { - return new StateCheckpointWriter(output); - } - - private StateCheckpointWriter(AbstractStateBackend.CheckpointStateOutputView output) { - this.output = output; - } - - ///////// Creating the serialized versions of the different types of state held by dataflow /////// - - public StateCheckpointWriter addValueBuilder() throws IOException { - validate(); - StateType.serialize(StateType.VALUE, this); - return this; - } - - public StateCheckpointWriter addWatermarkHoldsBuilder() throws IOException { - validate(); - StateType.serialize(StateType.WATERMARK, this); - return this; - } - - public StateCheckpointWriter addListUpdatesBuilder() throws IOException { - validate(); - StateType.serialize(StateType.LIST, this); - return this; - } - - public StateCheckpointWriter addAccumulatorBuilder() throws IOException { - validate(); - StateType.serialize(StateType.ACCUMULATOR, this); - return this; - } - - ///////// Setting the tag for a given state element /////// - - public StateCheckpointWriter setTag(ByteString stateKey) throws IOException { - return writeData(stateKey.toByteArray()); - } - - public StateCheckpointWriter setTag(String stateKey) throws IOException { - output.writeUTF(stateKey); - return this; - } - - - public StateCheckpointWriter serializeKey(K key, CoderTypeSerializer keySerializer) throws IOException { - return serializeObject(key, keySerializer); - } - - public StateCheckpointWriter serializeObject(T object, CoderTypeSerializer objectSerializer) throws IOException { - objectSerializer.serialize(object, output); - return this; - } - - ///////// Write the actual serialized data ////////// - - public StateCheckpointWriter setData(ByteString data) throws IOException { - return writeData(data.toByteArray()); - } - - public StateCheckpointWriter setData(byte[] data) throws IOException { - return writeData(data); - } - - public StateCheckpointWriter setTimestamp(Instant timestamp) throws IOException { - validate(); - output.writeLong(TimeUnit.MILLISECONDS.toMicros(timestamp.getMillis())); - return this; - } - - public StateCheckpointWriter writeInt(int number) throws IOException { - validate(); - output.writeInt(number); - return this; - } - - public StateCheckpointWriter writeByte(byte b) throws IOException { - validate(); - output.writeByte(b); - return this; - } - - ///////// Helper Methods /////// - - private StateCheckpointWriter writeData(byte[] data) throws IOException { - validate(); - output.writeInt(data.length); - output.write(data); - return this; - } - - private void validate() { - if (this.output == null) { - throw new RuntimeException("StateBackend not initialized yet."); - } - } -} \ No newline at end of file diff --git a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/StateType.java b/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/StateType.java deleted file mode 100644 index 58497730dd279..0000000000000 --- a/runners/flink/runner/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/state/StateType.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.beam.runners.flink.translation.wrappers.streaming.state; - -import java.io.IOException; - -/** - * The available types of state, as provided by the Beam SDK. This class is used for serialization/deserialization - * purposes. - * */ -public enum StateType { - - VALUE(0), - - WATERMARK(1), - - LIST(2), - - ACCUMULATOR(3); - - private final int numVal; - - StateType(int value) { - this.numVal = value; - } - - public static void serialize(StateType type, StateCheckpointWriter output) throws IOException { - if (output == null) { - throw new IllegalArgumentException("Cannot write to a null output."); - } - - if(type.numVal < 0 || type.numVal > 3) { - throw new RuntimeException("Unknown State Type " + type + "."); - } - - output.writeByte((byte) type.numVal); - } - - public static StateType deserialize(StateCheckpointReader input) throws IOException { - if (input == null) { - throw new IllegalArgumentException("Cannot read from a null input."); - } - - int typeInt = (int) input.getByte(); - if(typeInt < 0 || typeInt > 3) { - throw new RuntimeException("Unknown State Type " + typeInt + "."); - } - - StateType resultType = null; - for(StateType st: values()) { - if(st.numVal == typeInt) { - resultType = st; - break; - } - } - return resultType; - } -} diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/EncodedValueComparatorTest.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/EncodedValueComparatorTest.java index 68ede89954cbd..10d6d9d357ff8 100644 --- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/EncodedValueComparatorTest.java +++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/EncodedValueComparatorTest.java @@ -22,7 +22,6 @@ import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.util.CoderUtils; - import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeutils.ComparatorTestBase; import org.apache.flink.api.common.typeutils.TypeComparator; diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/FlinkRunnerRegistrarTest.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/FlinkRunnerRegistrarTest.java index ff1025f3ded77..d9d174c8248e9 100644 --- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/FlinkRunnerRegistrarTest.java +++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/FlinkRunnerRegistrarTest.java @@ -22,7 +22,6 @@ import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptionsFactory; - import org.junit.Test; /** diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java index c24d91d9f4c23..32339dce5165f 100644 --- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java +++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/PipelineOptionsTest.java @@ -21,8 +21,10 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; +import java.util.Collections; +import java.util.HashMap; import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions; -import org.apache.beam.runners.flink.translation.wrappers.streaming.FlinkAbstractParDoWrapper; +import org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator; import org.apache.beam.sdk.options.Default; import org.apache.beam.sdk.options.Description; import org.apache.beam.sdk.options.PipelineOptions; @@ -31,20 +33,22 @@ import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.util.WindowedValue; -import org.apache.beam.sdk.util.WindowingInternals; import org.apache.beam.sdk.util.WindowingStrategy; +import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; - import org.apache.commons.lang.SerializationUtils; -import org.apache.flink.util.Collector; +import org.apache.flink.api.common.ExecutionConfig; +import org.apache.flink.api.common.typeinfo.TypeHint; +import org.apache.flink.api.common.typeinfo.TypeInformation; +import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; +import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; import org.joda.time.Instant; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; -import org.mockito.Mockito; /** - * Tests the serialization and deserialization of PipelineOptions. + * Tests for serialization and deserialization of {@link PipelineOptions} in {@link DoFnOperator}. */ public class PipelineOptionsTest { @@ -58,7 +62,7 @@ public interface MyOptions extends FlinkPipelineOptions { private static MyOptions options; private static SerializedPipelineOptions serializedOptions; - private final static String[] args = new String[]{"--testOption=nothing"}; + private static final String[] args = new String[]{"--testOption=nothing"}; @BeforeClass public static void beforeTest() { @@ -74,7 +78,9 @@ public void testDeserialization() { @Test public void testCaching() { - PipelineOptions deserializedOptions = serializedOptions.getPipelineOptions().as(PipelineOptions.class); + PipelineOptions deserializedOptions = + serializedOptions.getPipelineOptions().as(PipelineOptions.class); + assertNotNull(deserializedOptions); assertTrue(deserializedOptions == serializedOptions.getPipelineOptions()); assertTrue(deserializedOptions == serializedOptions.getPipelineOptions()); @@ -87,29 +93,57 @@ public void testNonNull() { } @Test(expected = Exception.class) - public void ParDoBaseClassPipelineOptionsNullTest() { - new TestParDoWrapper(null, WindowingStrategy.globalDefault(), new TestDoFn()); + public void parDoBaseClassPipelineOptionsNullTest() { + DoFnOperator doFnOperator = new DoFnOperator<>( + new TestDoFn(), + TypeInformation.of(new TypeHint>() {}), + new TupleTag<>("main-output"), + Collections.>emptyList(), + new DoFnOperator.DefaultOutputManagerFactory<>(), + WindowingStrategy.globalDefault(), + new HashMap>(), + Collections.>emptyList(), + null); + } /** - * Tests that PipelineOptions are present after serialization + * Tests that PipelineOptions are present after serialization. */ @Test - public void ParDoBaseClassPipelineOptionsSerializationTest() throws Exception { - TestParDoWrapper wrapper = - new TestParDoWrapper(options, WindowingStrategy.globalDefault(), new TestDoFn()); + public void parDoBaseClassPipelineOptionsSerializationTest() throws Exception { + + DoFnOperator doFnOperator = new DoFnOperator<>( + new TestDoFn(), + TypeInformation.of(new TypeHint>() {}), + new TupleTag<>("main-output"), + Collections.>emptyList(), + new DoFnOperator.DefaultOutputManagerFactory<>(), + WindowingStrategy.globalDefault(), + new HashMap>(), + Collections.>emptyList(), + options); + + final byte[] serialized = SerializationUtils.serialize(doFnOperator); + + @SuppressWarnings("unchecked") + DoFnOperator deserialized = + (DoFnOperator) SerializationUtils.deserialize(serialized); - final byte[] serialized = SerializationUtils.serialize(wrapper); - TestParDoWrapper deserialize = (TestParDoWrapper) SerializationUtils.deserialize(serialized); + OneInputStreamOperatorTestHarness, Object> testHarness = + new OneInputStreamOperatorTestHarness<>(deserialized, new ExecutionConfig()); + + testHarness.open(); // execute once to access options - deserialize.flatMap( + testHarness.processElement(new StreamRecord<>( WindowedValue.of( new Object(), Instant.now(), GlobalWindow.INSTANCE, - PaneInfo.NO_FIRING), - Mockito.mock(Collector.class)); + PaneInfo.NO_FIRING))); + + testHarness.close(); } @@ -124,35 +158,4 @@ public void processElement(ProcessContext c) throws Exception { c.getPipelineOptions().as(MyOptions.class).getTestOption()); } } - - private static class TestParDoWrapper extends FlinkAbstractParDoWrapper { - public TestParDoWrapper(PipelineOptions options, WindowingStrategy windowingStrategy, OldDoFn doFn) { - super(options, windowingStrategy, doFn); - } - - - @Override - public WindowingInternals windowingInternalsHelper( - WindowedValue inElement, - Collector outCollector) { - return null; - } - - @Override - public void sideOutputWithTimestampHelper( - WindowedValue inElement, - Object output, - Instant timestamp, - Collector outCollector, - TupleTag tag) {} - - @Override - public void outputWithTimestampHelper( - WindowedValue inElement, - Object output, - Instant timestamp, - Collector outCollector) {} - } - - } diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceITCase.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceITCase.java index 516c7bae75e52..44c90178fefcf 100644 --- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceITCase.java +++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceITCase.java @@ -17,20 +17,17 @@ */ package org.apache.beam.runners.flink; +import com.google.common.base.Joiner; +import java.io.File; +import java.net.URI; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.CountingInput; import org.apache.beam.sdk.io.TextIO; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.base.Joiner; - import org.apache.flink.test.util.JavaProgramTestBase; -import java.io.File; -import java.net.URI; - /** * Reads from a bounded source in batch execution. */ diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceStreamingITCase.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceStreamingITCase.java index ea58d0dd86570..79b78826ede9e 100644 --- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceStreamingITCase.java +++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/ReadSourceStreamingITCase.java @@ -17,14 +17,12 @@ */ package org.apache.beam.runners.flink; +import com.google.common.base.Joiner; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.CountingInput; import org.apache.beam.sdk.io.TextIO; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.ParDo; - -import com.google.common.base.Joiner; - import org.apache.flink.streaming.util.StreamingProgramTestBase; /** diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/WriteSinkITCase.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/WriteSinkITCase.java index f1d9097b5924a..09881463f6c23 100644 --- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/WriteSinkITCase.java +++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/WriteSinkITCase.java @@ -20,6 +20,11 @@ import static org.junit.Assert.assertNotNull; +import com.google.common.base.Joiner; +import java.io.File; +import java.io.IOException; +import java.io.PrintWriter; +import java.net.URI; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.StringUtf8Coder; @@ -27,18 +32,10 @@ import org.apache.beam.sdk.io.Write; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.Create; - -import com.google.common.base.Joiner; - import org.apache.flink.core.fs.FileSystem; import org.apache.flink.core.fs.Path; import org.apache.flink.test.util.JavaProgramTestBase; -import java.io.File; -import java.io.IOException; -import java.io.PrintWriter; -import java.net.URI; - /** * Tests the translation of custom Write.Bound sinks. */ diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/DoFnOperatorTest.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/DoFnOperatorTest.java new file mode 100644 index 0000000000000..5f1b066d620b1 --- /dev/null +++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/DoFnOperatorTest.java @@ -0,0 +1,325 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.runners.flink.streaming; + +import static org.hamcrest.collection.IsIterableContainingInOrder.contains; +import static org.junit.Assert.assertThat; + +import com.google.common.collect.ImmutableList; +import java.util.Collections; +import java.util.HashMap; +import javax.annotation.Nullable; +import org.apache.beam.runners.flink.FlinkPipelineOptions; +import org.apache.beam.runners.flink.translation.types.CoderTypeInformation; +import org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator; +import org.apache.beam.sdk.coders.StringUtf8Coder; +import org.apache.beam.sdk.options.PipelineOptionsFactory; +import org.apache.beam.sdk.testing.PCollectionViewTesting; +import org.apache.beam.sdk.transforms.OldDoFn; +import org.apache.beam.sdk.transforms.join.RawUnionValue; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.transforms.windowing.FixedWindows; +import org.apache.beam.sdk.transforms.windowing.IntervalWindow; +import org.apache.beam.sdk.transforms.windowing.PaneInfo; +import org.apache.beam.sdk.util.WindowedValue; +import org.apache.beam.sdk.util.WindowingStrategy; +import org.apache.beam.sdk.values.PCollectionView; +import org.apache.beam.sdk.values.TupleTag; +import org.apache.flink.shaded.com.google.common.base.Function; +import org.apache.flink.shaded.com.google.common.base.Predicate; +import org.apache.flink.shaded.com.google.common.collect.FluentIterable; +import org.apache.flink.shaded.com.google.common.collect.ImmutableMap; +import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; +import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; +import org.apache.flink.streaming.util.TwoInputStreamOperatorTestHarness; +import org.joda.time.Duration; +import org.joda.time.Instant; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +/** + * Tests for {@link DoFnOperator}. + */ +@RunWith(JUnit4.class) +public class DoFnOperatorTest { + + // views and windows for testing side inputs + private static final long WINDOW_MSECS_1 = 100; + private static final long WINDOW_MSECS_2 = 500; + + private WindowingStrategy windowingStrategy1 = + WindowingStrategy.of(FixedWindows.of(new Duration(WINDOW_MSECS_1))); + + private PCollectionView> view1 = PCollectionViewTesting.testingView( + new TupleTag>>() {}, + new PCollectionViewTesting.IdentityViewFn(), + StringUtf8Coder.of(), + windowingStrategy1); + + private WindowingStrategy windowingStrategy2 = + WindowingStrategy.of(FixedWindows.of(new Duration(WINDOW_MSECS_2))); + + private PCollectionView> view2 = PCollectionViewTesting.testingView( + new TupleTag>>() {}, + new PCollectionViewTesting.IdentityViewFn(), + StringUtf8Coder.of(), + windowingStrategy2); + + @Test + @SuppressWarnings("unchecked") + public void testSingleOutput() throws Exception { + + WindowedValue.ValueOnlyWindowedValueCoder windowedValueCoder = + WindowedValue.getValueOnlyCoder(StringUtf8Coder.of()); + + CoderTypeInformation> coderTypeInfo = + new CoderTypeInformation<>(windowedValueCoder); + + TupleTag outputTag = new TupleTag<>("main-output"); + + DoFnOperator doFnOperator = new DoFnOperator<>( + new IdentityDoFn(), + coderTypeInfo, + outputTag, + Collections.>emptyList(), + new DoFnOperator.DefaultOutputManagerFactory(), + WindowingStrategy.globalDefault(), + new HashMap>(), /* side-input mapping */ + Collections.>emptyList(), /* side inputs */ + PipelineOptionsFactory.as(FlinkPipelineOptions.class)); + + OneInputStreamOperatorTestHarness, String> testHarness = + new OneInputStreamOperatorTestHarness<>(doFnOperator); + + testHarness.open(); + + testHarness.processElement(new StreamRecord<>(WindowedValue.valueInGlobalWindow("Hello"))); + + assertThat( + this.stripStreamRecordFromWindowedValue(testHarness.getOutput()), + contains(WindowedValue.valueInGlobalWindow("Hello"))); + + testHarness.close(); + } + + @Test + @SuppressWarnings("unchecked") + public void testMultiOutputOutput() throws Exception { + + WindowedValue.ValueOnlyWindowedValueCoder windowedValueCoder = + WindowedValue.getValueOnlyCoder(StringUtf8Coder.of()); + + CoderTypeInformation> coderTypeInfo = + new CoderTypeInformation<>(windowedValueCoder); + + TupleTag mainOutput = new TupleTag<>("main-output"); + TupleTag sideOutput1 = new TupleTag<>("side-output-1"); + TupleTag sideOutput2 = new TupleTag<>("side-output-2"); + ImmutableMap, Integer> outputMapping = ImmutableMap., Integer>builder() + .put(mainOutput, 1) + .put(sideOutput1, 2) + .put(sideOutput2, 3) + .build(); + + DoFnOperator doFnOperator = new DoFnOperator<>( + new MultiOutputDoFn(sideOutput1, sideOutput2), + coderTypeInfo, + mainOutput, + ImmutableList.>of(sideOutput1, sideOutput2), + new DoFnOperator.MultiOutputOutputManagerFactory(outputMapping), + WindowingStrategy.globalDefault(), + new HashMap>(), /* side-input mapping */ + Collections.>emptyList(), /* side inputs */ + PipelineOptionsFactory.as(FlinkPipelineOptions.class)); + + OneInputStreamOperatorTestHarness, RawUnionValue> testHarness = + new OneInputStreamOperatorTestHarness<>(doFnOperator); + + testHarness.open(); + + testHarness.processElement(new StreamRecord<>(WindowedValue.valueInGlobalWindow("one"))); + testHarness.processElement(new StreamRecord<>(WindowedValue.valueInGlobalWindow("two"))); + testHarness.processElement(new StreamRecord<>(WindowedValue.valueInGlobalWindow("hello"))); + + assertThat( + this.stripStreamRecordFromRawUnion(testHarness.getOutput()), + contains( + new RawUnionValue(2, WindowedValue.valueInGlobalWindow("side: one")), + new RawUnionValue(3, WindowedValue.valueInGlobalWindow("side: two")), + new RawUnionValue(1, WindowedValue.valueInGlobalWindow("got: hello")), + new RawUnionValue(2, WindowedValue.valueInGlobalWindow("got: hello")), + new RawUnionValue(3, WindowedValue.valueInGlobalWindow("got: hello")))); + + testHarness.close(); + } + + /** + * For now, this test doesn't work because {@link TwoInputStreamOperatorTestHarness} is not + * sufficiently well equipped to handle more complex operators that require a state backend. + * We have to revisit this once we update to a newer version of Flink and also add some more + * tests that verify pushback behaviour and watermark hold behaviour. + * + *

    The behaviour that we would test here is also exercised by the + * {@link org.apache.beam.sdk.testing.RunnableOnService} tests, so the code is not untested. + */ + @Test + @Ignore + @SuppressWarnings("unchecked") + public void testSideInputs() throws Exception { + + WindowedValue.ValueOnlyWindowedValueCoder windowedValueCoder = + WindowedValue.getValueOnlyCoder(StringUtf8Coder.of()); + + CoderTypeInformation> coderTypeInfo = + new CoderTypeInformation<>(windowedValueCoder); + + TupleTag outputTag = new TupleTag<>("main-output"); + + ImmutableMap> sideInputMapping = + ImmutableMap.>builder() + .put(1, view1) + .put(2, view2) + .build(); + + DoFnOperator doFnOperator = new DoFnOperator<>( + new IdentityDoFn(), + coderTypeInfo, + outputTag, + Collections.>emptyList(), + new DoFnOperator.DefaultOutputManagerFactory(), + WindowingStrategy.globalDefault(), + sideInputMapping, /* side-input mapping */ + ImmutableList.>of(view1, view2), /* side inputs */ + PipelineOptionsFactory.as(FlinkPipelineOptions.class)); + + TwoInputStreamOperatorTestHarness, RawUnionValue, String> testHarness = + new TwoInputStreamOperatorTestHarness<>(doFnOperator); + + testHarness.open(); + + IntervalWindow firstWindow = new IntervalWindow(new Instant(0), new Instant(100)); + + // push in some side-input elements + testHarness.processElement2( + new StreamRecord<>( + new RawUnionValue( + 1, + valuesInWindow(ImmutableList.of("hello", "ciao"), new Instant(0), firstWindow)))); + + testHarness.processElement2( + new StreamRecord<>( + new RawUnionValue( + 2, + valuesInWindow(ImmutableList.of("foo", "bar"), new Instant(0), firstWindow)))); + + // push in a regular elements + testHarness.processElement1(new StreamRecord<>(WindowedValue.valueInGlobalWindow("Hello"))); + + assertThat( + this.stripStreamRecordFromWindowedValue(testHarness.getOutput()), + contains(WindowedValue.valueInGlobalWindow("Hello"))); + + testHarness.close(); + } + + private Iterable> stripStreamRecordFromWindowedValue( + Iterable input) { + + return FluentIterable.from(input).filter(new Predicate() { + @Override + public boolean apply(@Nullable Object o) { + return o instanceof StreamRecord && ((StreamRecord) o).getValue() instanceof WindowedValue; + } + }).transform(new Function>() { + @Nullable + @Override + @SuppressWarnings({"unchecked", "rawtypes"}) + public WindowedValue apply(@Nullable Object o) { + if (o instanceof StreamRecord && ((StreamRecord) o).getValue() instanceof WindowedValue) { + return (WindowedValue) ((StreamRecord) o).getValue(); + } + throw new RuntimeException("unreachable"); + } + }); + } + + private Iterable stripStreamRecordFromRawUnion(Iterable input) { + return FluentIterable.from(input).filter(new Predicate() { + @Override + public boolean apply(@Nullable Object o) { + return o instanceof StreamRecord && ((StreamRecord) o).getValue() instanceof RawUnionValue; + } + }).transform(new Function() { + @Nullable + @Override + @SuppressWarnings({"unchecked", "rawtypes"}) + public RawUnionValue apply(@Nullable Object o) { + if (o instanceof StreamRecord && ((StreamRecord) o).getValue() instanceof RawUnionValue) { + return (RawUnionValue) ((StreamRecord) o).getValue(); + } + throw new RuntimeException("unreachable"); + } + }); + } + + private static class MultiOutputDoFn extends OldDoFn { + private TupleTag sideOutput1; + private TupleTag sideOutput2; + + public MultiOutputDoFn(TupleTag sideOutput1, TupleTag sideOutput2) { + this.sideOutput1 = sideOutput1; + this.sideOutput2 = sideOutput2; + } + + @Override + public void processElement(ProcessContext c) throws Exception { + if (c.element().equals("one")) { + c.sideOutput(sideOutput1, "side: one"); + } else if (c.element().equals("two")) { + c.sideOutput(sideOutput2, "side: two"); + } else { + c.output("got: " + c.element()); + c.sideOutput(sideOutput1, "got: " + c.element()); + c.sideOutput(sideOutput2, "got: " + c.element()); + } + } + } + + private static class IdentityDoFn extends OldDoFn { + @Override + public void processElement(OldDoFn.ProcessContext c) throws Exception { + c.output(c.element()); + } + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + private WindowedValue> valuesInWindow( + Iterable values, Instant timestamp, BoundedWindow window) { + return (WindowedValue) WindowedValue.of(values, timestamp, window, PaneInfo.NO_FIRING); + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + private WindowedValue valueInWindow( + T value, Instant timestamp, BoundedWindow window) { + return WindowedValue.of(value, timestamp, window, PaneInfo.NO_FIRING); + } + + +} diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/FlinkStateInternalsTest.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/FlinkStateInternalsTest.java new file mode 100644 index 0000000000000..711ae0001f795 --- /dev/null +++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/FlinkStateInternalsTest.java @@ -0,0 +1,389 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.beam.runners.flink.streaming; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertThat; + +import java.nio.ByteBuffer; +import java.util.Arrays; +import org.apache.beam.runners.flink.translation.wrappers.streaming.FlinkStateInternals; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.coders.StringUtf8Coder; +import org.apache.beam.sdk.coders.VarIntCoder; +import org.apache.beam.sdk.transforms.Sum; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.transforms.windowing.IntervalWindow; +import org.apache.beam.sdk.transforms.windowing.OutputTimeFns; +import org.apache.beam.sdk.util.CoderUtils; +import org.apache.beam.sdk.util.state.AccumulatorCombiningState; +import org.apache.beam.sdk.util.state.BagState; +import org.apache.beam.sdk.util.state.CombiningState; +import org.apache.beam.sdk.util.state.ReadableState; +import org.apache.beam.sdk.util.state.StateMerging; +import org.apache.beam.sdk.util.state.StateNamespace; +import org.apache.beam.sdk.util.state.StateNamespaceForTest; +import org.apache.beam.sdk.util.state.StateTag; +import org.apache.beam.sdk.util.state.StateTags; +import org.apache.beam.sdk.util.state.ValueState; +import org.apache.beam.sdk.util.state.WatermarkHoldState; +import org.apache.flink.api.common.ExecutionConfig; +import org.apache.flink.api.java.typeutils.GenericTypeInfo; +import org.apache.flink.runtime.operators.testutils.DummyEnvironment; +import org.apache.flink.runtime.state.memory.MemoryStateBackend; +import org.hamcrest.Matchers; +import org.joda.time.Instant; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +/** + * Tests for {@link FlinkStateInternals}. This is based on the tests for {@code InMemoryStateInternals}. + */ +@RunWith(JUnit4.class) +public class FlinkStateInternalsTest { + private static final BoundedWindow WINDOW_1 = new IntervalWindow(new Instant(0), new Instant(10)); + private static final StateNamespace NAMESPACE_1 = new StateNamespaceForTest("ns1"); + private static final StateNamespace NAMESPACE_2 = new StateNamespaceForTest("ns2"); + private static final StateNamespace NAMESPACE_3 = new StateNamespaceForTest("ns3"); + + private static final StateTag> STRING_VALUE_ADDR = + StateTags.value("stringValue", StringUtf8Coder.of()); + private static final StateTag> + SUM_INTEGER_ADDR = StateTags.combiningValueFromInputInternal( + "sumInteger", VarIntCoder.of(), new Sum.SumIntegerFn()); + private static final StateTag> STRING_BAG_ADDR = + StateTags.bag("stringBag", StringUtf8Coder.of()); + private static final StateTag> + WATERMARK_EARLIEST_ADDR = + StateTags.watermarkStateInternal("watermark", OutputTimeFns.outputAtEarliestInputTimestamp()); + private static final StateTag> + WATERMARK_LATEST_ADDR = + StateTags.watermarkStateInternal("watermark", OutputTimeFns.outputAtLatestInputTimestamp()); + private static final StateTag> WATERMARK_EOW_ADDR = + StateTags.watermarkStateInternal("watermark", OutputTimeFns.outputAtEndOfWindow()); + + FlinkStateInternals underTest; + + @Before + public void initStateInternals() { + MemoryStateBackend backend = new MemoryStateBackend(); + try { + backend.initializeForJob( + new DummyEnvironment("test", 1, 0), + "test_op", + new GenericTypeInfo<>(ByteBuffer.class).createSerializer(new ExecutionConfig())); + } catch (Exception e) { + throw new RuntimeException(e); + } + underTest = new FlinkStateInternals<>(backend, StringUtf8Coder.of()); + try { + backend.setCurrentKey( + ByteBuffer.wrap(CoderUtils.encodeToByteArray(StringUtf8Coder.of(), "Hello"))); + } catch (CoderException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testValue() throws Exception { + ValueState value = underTest.state(NAMESPACE_1, STRING_VALUE_ADDR); + + assertEquals(underTest.state(NAMESPACE_1, STRING_VALUE_ADDR), value); + assertNotEquals( + underTest.state(NAMESPACE_2, STRING_VALUE_ADDR), + value); + + assertThat(value.read(), Matchers.nullValue()); + value.write("hello"); + assertThat(value.read(), Matchers.equalTo("hello")); + value.write("world"); + assertThat(value.read(), Matchers.equalTo("world")); + + value.clear(); + assertThat(value.read(), Matchers.nullValue()); + assertEquals(underTest.state(NAMESPACE_1, STRING_VALUE_ADDR), value); + + } + + @Test + public void testBag() throws Exception { + BagState value = underTest.state(NAMESPACE_1, STRING_BAG_ADDR); + + assertEquals(value, underTest.state(NAMESPACE_1, STRING_BAG_ADDR)); + assertFalse(value.equals(underTest.state(NAMESPACE_2, STRING_BAG_ADDR))); + + assertThat(value.read(), Matchers.emptyIterable()); + value.add("hello"); + assertThat(value.read(), Matchers.containsInAnyOrder("hello")); + + value.add("world"); + assertThat(value.read(), Matchers.containsInAnyOrder("hello", "world")); + + value.clear(); + assertThat(value.read(), Matchers.emptyIterable()); + assertEquals(underTest.state(NAMESPACE_1, STRING_BAG_ADDR), value); + + } + + @Test + public void testBagIsEmpty() throws Exception { + BagState value = underTest.state(NAMESPACE_1, STRING_BAG_ADDR); + + assertThat(value.isEmpty().read(), Matchers.is(true)); + ReadableState readFuture = value.isEmpty(); + value.add("hello"); + assertThat(readFuture.read(), Matchers.is(false)); + + value.clear(); + assertThat(readFuture.read(), Matchers.is(true)); + } + + @Test + public void testMergeBagIntoSource() throws Exception { + BagState bag1 = underTest.state(NAMESPACE_1, STRING_BAG_ADDR); + BagState bag2 = underTest.state(NAMESPACE_2, STRING_BAG_ADDR); + + bag1.add("Hello"); + bag2.add("World"); + bag1.add("!"); + + StateMerging.mergeBags(Arrays.asList(bag1, bag2), bag1); + + // Reading the merged bag gets both the contents + assertThat(bag1.read(), Matchers.containsInAnyOrder("Hello", "World", "!")); + assertThat(bag2.read(), Matchers.emptyIterable()); + } + + @Test + public void testMergeBagIntoNewNamespace() throws Exception { + BagState bag1 = underTest.state(NAMESPACE_1, STRING_BAG_ADDR); + BagState bag2 = underTest.state(NAMESPACE_2, STRING_BAG_ADDR); + BagState bag3 = underTest.state(NAMESPACE_3, STRING_BAG_ADDR); + + bag1.add("Hello"); + bag2.add("World"); + bag1.add("!"); + + StateMerging.mergeBags(Arrays.asList(bag1, bag2, bag3), bag3); + + // Reading the merged bag gets both the contents + assertThat(bag3.read(), Matchers.containsInAnyOrder("Hello", "World", "!")); + assertThat(bag1.read(), Matchers.emptyIterable()); + assertThat(bag2.read(), Matchers.emptyIterable()); + } + + @Test + public void testCombiningValue() throws Exception { + CombiningState value = underTest.state(NAMESPACE_1, SUM_INTEGER_ADDR); + + // State instances are cached, but depend on the namespace. + assertEquals(value, underTest.state(NAMESPACE_1, SUM_INTEGER_ADDR)); + assertFalse(value.equals(underTest.state(NAMESPACE_2, SUM_INTEGER_ADDR))); + + assertThat(value.read(), Matchers.equalTo(0)); + value.add(2); + assertThat(value.read(), Matchers.equalTo(2)); + + value.add(3); + assertThat(value.read(), Matchers.equalTo(5)); + + value.clear(); + assertThat(value.read(), Matchers.equalTo(0)); + assertEquals(underTest.state(NAMESPACE_1, SUM_INTEGER_ADDR), value); + } + + @Test + public void testCombiningIsEmpty() throws Exception { + CombiningState value = underTest.state(NAMESPACE_1, SUM_INTEGER_ADDR); + + assertThat(value.isEmpty().read(), Matchers.is(true)); + ReadableState readFuture = value.isEmpty(); + value.add(5); + assertThat(readFuture.read(), Matchers.is(false)); + + value.clear(); + assertThat(readFuture.read(), Matchers.is(true)); + } + + @Test + public void testMergeCombiningValueIntoSource() throws Exception { + AccumulatorCombiningState value1 = + underTest.state(NAMESPACE_1, SUM_INTEGER_ADDR); + AccumulatorCombiningState value2 = + underTest.state(NAMESPACE_2, SUM_INTEGER_ADDR); + + value1.add(5); + value2.add(10); + value1.add(6); + + assertThat(value1.read(), Matchers.equalTo(11)); + assertThat(value2.read(), Matchers.equalTo(10)); + + // Merging clears the old values and updates the result value. + StateMerging.mergeCombiningValues(Arrays.asList(value1, value2), value1); + + assertThat(value1.read(), Matchers.equalTo(21)); + assertThat(value2.read(), Matchers.equalTo(0)); + } + + @Test + public void testMergeCombiningValueIntoNewNamespace() throws Exception { + AccumulatorCombiningState value1 = + underTest.state(NAMESPACE_1, SUM_INTEGER_ADDR); + AccumulatorCombiningState value2 = + underTest.state(NAMESPACE_2, SUM_INTEGER_ADDR); + AccumulatorCombiningState value3 = + underTest.state(NAMESPACE_3, SUM_INTEGER_ADDR); + + value1.add(5); + value2.add(10); + value1.add(6); + + StateMerging.mergeCombiningValues(Arrays.asList(value1, value2), value3); + + // Merging clears the old values and updates the result value. + assertThat(value1.read(), Matchers.equalTo(0)); + assertThat(value2.read(), Matchers.equalTo(0)); + assertThat(value3.read(), Matchers.equalTo(21)); + } + + @Test + public void testWatermarkEarliestState() throws Exception { + WatermarkHoldState value = + underTest.state(NAMESPACE_1, WATERMARK_EARLIEST_ADDR); + + // State instances are cached, but depend on the namespace. + assertEquals(value, underTest.state(NAMESPACE_1, WATERMARK_EARLIEST_ADDR)); + assertFalse(value.equals(underTest.state(NAMESPACE_2, WATERMARK_EARLIEST_ADDR))); + + assertThat(value.read(), Matchers.nullValue()); + value.add(new Instant(2000)); + assertThat(value.read(), Matchers.equalTo(new Instant(2000))); + + value.add(new Instant(3000)); + assertThat(value.read(), Matchers.equalTo(new Instant(2000))); + + value.add(new Instant(1000)); + assertThat(value.read(), Matchers.equalTo(new Instant(1000))); + + value.clear(); + assertThat(value.read(), Matchers.equalTo(null)); + assertEquals(underTest.state(NAMESPACE_1, WATERMARK_EARLIEST_ADDR), value); + } + + @Test + public void testWatermarkLatestState() throws Exception { + WatermarkHoldState value = + underTest.state(NAMESPACE_1, WATERMARK_LATEST_ADDR); + + // State instances are cached, but depend on the namespace. + assertEquals(value, underTest.state(NAMESPACE_1, WATERMARK_LATEST_ADDR)); + assertFalse(value.equals(underTest.state(NAMESPACE_2, WATERMARK_LATEST_ADDR))); + + assertThat(value.read(), Matchers.nullValue()); + value.add(new Instant(2000)); + assertThat(value.read(), Matchers.equalTo(new Instant(2000))); + + value.add(new Instant(3000)); + assertThat(value.read(), Matchers.equalTo(new Instant(3000))); + + value.add(new Instant(1000)); + assertThat(value.read(), Matchers.equalTo(new Instant(3000))); + + value.clear(); + assertThat(value.read(), Matchers.equalTo(null)); + assertEquals(underTest.state(NAMESPACE_1, WATERMARK_LATEST_ADDR), value); + } + + @Test + public void testWatermarkEndOfWindowState() throws Exception { + WatermarkHoldState value = underTest.state(NAMESPACE_1, WATERMARK_EOW_ADDR); + + // State instances are cached, but depend on the namespace. + assertEquals(value, underTest.state(NAMESPACE_1, WATERMARK_EOW_ADDR)); + assertFalse(value.equals(underTest.state(NAMESPACE_2, WATERMARK_EOW_ADDR))); + + assertThat(value.read(), Matchers.nullValue()); + value.add(new Instant(2000)); + assertThat(value.read(), Matchers.equalTo(new Instant(2000))); + + value.clear(); + assertThat(value.read(), Matchers.equalTo(null)); + assertEquals(underTest.state(NAMESPACE_1, WATERMARK_EOW_ADDR), value); + } + + @Test + public void testWatermarkStateIsEmpty() throws Exception { + WatermarkHoldState value = + underTest.state(NAMESPACE_1, WATERMARK_EARLIEST_ADDR); + + assertThat(value.isEmpty().read(), Matchers.is(true)); + ReadableState readFuture = value.isEmpty(); + value.add(new Instant(1000)); + assertThat(readFuture.read(), Matchers.is(false)); + + value.clear(); + assertThat(readFuture.read(), Matchers.is(true)); + } + + @Test + public void testMergeEarliestWatermarkIntoSource() throws Exception { + WatermarkHoldState value1 = + underTest.state(NAMESPACE_1, WATERMARK_EARLIEST_ADDR); + WatermarkHoldState value2 = + underTest.state(NAMESPACE_2, WATERMARK_EARLIEST_ADDR); + + value1.add(new Instant(3000)); + value2.add(new Instant(5000)); + value1.add(new Instant(4000)); + value2.add(new Instant(2000)); + + // Merging clears the old values and updates the merged value. + StateMerging.mergeWatermarks(Arrays.asList(value1, value2), value1, WINDOW_1); + + assertThat(value1.read(), Matchers.equalTo(new Instant(2000))); + assertThat(value2.read(), Matchers.equalTo(null)); + } + + @Test + public void testMergeLatestWatermarkIntoSource() throws Exception { + WatermarkHoldState value1 = + underTest.state(NAMESPACE_1, WATERMARK_LATEST_ADDR); + WatermarkHoldState value2 = + underTest.state(NAMESPACE_2, WATERMARK_LATEST_ADDR); + WatermarkHoldState value3 = + underTest.state(NAMESPACE_3, WATERMARK_LATEST_ADDR); + + value1.add(new Instant(3000)); + value2.add(new Instant(5000)); + value1.add(new Instant(4000)); + value2.add(new Instant(2000)); + + // Merging clears the old values and updates the result value. + StateMerging.mergeWatermarks(Arrays.asList(value1, value2), value3, WINDOW_1); + + // Merging clears the old values and updates the result value. + assertThat(value3.read(), Matchers.equalTo(new Instant(5000))); + assertThat(value1.read(), Matchers.equalTo(null)); + assertThat(value2.read(), Matchers.equalTo(null)); + } +} diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupAlsoByWindowTest.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupAlsoByWindowTest.java deleted file mode 100644 index 2d83fb66fb1d3..0000000000000 --- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupAlsoByWindowTest.java +++ /dev/null @@ -1,523 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.beam.runners.flink.streaming; - -import org.apache.beam.runners.flink.FlinkTestPipeline; -import org.apache.beam.runners.flink.translation.wrappers.streaming.FlinkGroupAlsoByWindowWrapper; -import org.apache.beam.sdk.Pipeline; -import org.apache.beam.sdk.coders.KvCoder; -import org.apache.beam.sdk.coders.StringUtf8Coder; -import org.apache.beam.sdk.coders.VarIntCoder; -import org.apache.beam.sdk.transforms.Combine; -import org.apache.beam.sdk.transforms.Sum; -import org.apache.beam.sdk.transforms.windowing.AfterPane; -import org.apache.beam.sdk.transforms.windowing.AfterWatermark; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.FixedWindows; -import org.apache.beam.sdk.transforms.windowing.IntervalWindow; -import org.apache.beam.sdk.transforms.windowing.OutputTimeFns; -import org.apache.beam.sdk.transforms.windowing.PaneInfo; -import org.apache.beam.sdk.transforms.windowing.Repeatedly; -import org.apache.beam.sdk.transforms.windowing.Sessions; -import org.apache.beam.sdk.transforms.windowing.SlidingWindows; -import org.apache.beam.sdk.transforms.windowing.WindowFn; -import org.apache.beam.sdk.util.UserCodeException; -import org.apache.beam.sdk.util.WindowedValue; -import org.apache.beam.sdk.util.WindowingStrategy; -import org.apache.beam.sdk.values.KV; - -import org.apache.flink.streaming.api.watermark.Watermark; -import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; -import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; -import org.apache.flink.streaming.util.StreamingMultipleProgramsTestBase; -import org.apache.flink.streaming.util.TestHarnessUtil; -import org.joda.time.Duration; -import org.joda.time.Instant; -import org.junit.Test; - -import java.util.Collection; -import java.util.Comparator; -import java.util.concurrent.ConcurrentLinkedQueue; - -public class GroupAlsoByWindowTest extends StreamingMultipleProgramsTestBase { - - private final Combine.CombineFn combiner = new Sum.SumIntegerFn(); - - private final WindowingStrategy slidingWindowWithAfterWatermarkTriggerStrategy = - WindowingStrategy.of(SlidingWindows.of(Duration.standardSeconds(10)).every(Duration.standardSeconds(5))) - .withOutputTimeFn(OutputTimeFns.outputAtEarliestInputTimestamp()) - .withTrigger(AfterWatermark.pastEndOfWindow()).withMode(WindowingStrategy.AccumulationMode.ACCUMULATING_FIRED_PANES); - - private final WindowingStrategy sessionWindowingStrategy = - WindowingStrategy.of(Sessions.withGapDuration(Duration.standardSeconds(2))) - .withTrigger(Repeatedly.forever(AfterWatermark.pastEndOfWindow())) - .withMode(WindowingStrategy.AccumulationMode.ACCUMULATING_FIRED_PANES) - .withOutputTimeFn(OutputTimeFns.outputAtEarliestInputTimestamp()) - .withAllowedLateness(Duration.standardSeconds(100)); - - private final WindowingStrategy fixedWindowingStrategy = - WindowingStrategy.of(FixedWindows.of(Duration.standardSeconds(10))) - .withOutputTimeFn(OutputTimeFns.outputAtEarliestInputTimestamp()); - - private final WindowingStrategy fixedWindowWithCountTriggerStrategy = - fixedWindowingStrategy.withTrigger(AfterPane.elementCountAtLeast(5)); - - private final WindowingStrategy fixedWindowWithAfterWatermarkTriggerStrategy = - fixedWindowingStrategy.withTrigger(AfterWatermark.pastEndOfWindow()); - - private final WindowingStrategy fixedWindowWithCompoundTriggerStrategy = - fixedWindowingStrategy.withTrigger( - AfterWatermark.pastEndOfWindow().withEarlyFirings(AfterPane.elementCountAtLeast(5)) - .withLateFirings(AfterPane.elementCountAtLeast(5))); - - /** - * The default accumulation mode is - * {@link org.apache.beam.sdk.util.WindowingStrategy.AccumulationMode#DISCARDING_FIRED_PANES}. - * This strategy changes it to - * {@link org.apache.beam.sdk.util.WindowingStrategy.AccumulationMode#ACCUMULATING_FIRED_PANES} - */ - private final WindowingStrategy fixedWindowWithCompoundTriggerStrategyAcc = - fixedWindowWithCompoundTriggerStrategy - .withMode(WindowingStrategy.AccumulationMode.ACCUMULATING_FIRED_PANES); - - @Test - public void testWithLateness() throws Exception { - WindowingStrategy strategy = WindowingStrategy.of(FixedWindows.of(Duration.standardSeconds(2))) - .withMode(WindowingStrategy.AccumulationMode.ACCUMULATING_FIRED_PANES) - .withOutputTimeFn(OutputTimeFns.outputAtEarliestInputTimestamp()) - .withAllowedLateness(Duration.millis(1000)); - long initialTime = 0L; - Pipeline pipeline = FlinkTestPipeline.createForStreaming(); - - KvCoder inputCoder = KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()); - - FlinkGroupAlsoByWindowWrapper gbwOperaror = - FlinkGroupAlsoByWindowWrapper.createForTesting( - pipeline.getOptions(), - pipeline.getCoderRegistry(), - strategy, - inputCoder, - combiner.asKeyedFn()); - - OneInputStreamOperatorTestHarness>, WindowedValue>> testHarness = - new OneInputStreamOperatorTestHarness<>(gbwOperaror); - testHarness.open(); - - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 1), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 1), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 1000), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 1200), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processWatermark(new Watermark(initialTime + 2000)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 1200), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 1200), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processWatermark(new Watermark(initialTime + 4000)); - - ConcurrentLinkedQueue expectedOutput = new ConcurrentLinkedQueue<>(); - expectedOutput.add(new StreamRecord<>( - WindowedValue.of(KV.of("key1", 4), - new Instant(initialTime + 1), - new IntervalWindow(new Instant(0), new Instant(2000)), - PaneInfo.createPane(true, false, PaneInfo.Timing.ON_TIME, 0, 0)) - , initialTime + 1)); - expectedOutput.add(new Watermark(initialTime + 2000)); - - expectedOutput.add(new StreamRecord<>( - WindowedValue.of(KV.of("key1", 5), - new Instant(initialTime + 1999), - new IntervalWindow(new Instant(0), new Instant(2000)), - PaneInfo.createPane(false, false, PaneInfo.Timing.LATE, 1, 1)) - , initialTime + 1999)); - - - expectedOutput.add(new StreamRecord<>( - WindowedValue.of(KV.of("key1", 6), - new Instant(initialTime + 1999), - new IntervalWindow(new Instant(0), new Instant(2000)), - PaneInfo.createPane(false, false, PaneInfo.Timing.LATE, 2, 2)) - , initialTime + 1999)); - expectedOutput.add(new Watermark(initialTime + 4000)); - - TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator()); - testHarness.close(); - } - - @Test - public void testSessionWindows() throws Exception { - WindowingStrategy strategy = sessionWindowingStrategy; - - long initialTime = 0L; - Pipeline pipeline = FlinkTestPipeline.createForStreaming(); - - KvCoder inputCoder = KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()); - - FlinkGroupAlsoByWindowWrapper gbwOperaror = - FlinkGroupAlsoByWindowWrapper.createForTesting( - pipeline.getOptions(), - pipeline.getCoderRegistry(), - strategy, - inputCoder, - combiner.asKeyedFn()); - - OneInputStreamOperatorTestHarness>, WindowedValue>> testHarness = - new OneInputStreamOperatorTestHarness<>(gbwOperaror); - testHarness.open(); - - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 1), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 1), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 1000), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 3500), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 3700), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 2700), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processWatermark(new Watermark(initialTime + 6000)); - - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 6700), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 6800), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 8900), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 7600), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 5600), null, PaneInfo.NO_FIRING), initialTime + 20)); - - testHarness.processWatermark(new Watermark(initialTime + 12000)); - - ConcurrentLinkedQueue expectedOutput = new ConcurrentLinkedQueue<>(); - expectedOutput.add(new StreamRecord<>( - WindowedValue.of(KV.of("key1", 6), - new Instant(initialTime + 1), - new IntervalWindow(new Instant(1), new Instant(5700)), - PaneInfo.createPane(true, false, PaneInfo.Timing.ON_TIME, 0, 0)) - , initialTime + 1)); - expectedOutput.add(new Watermark(initialTime + 6000)); - - expectedOutput.add(new StreamRecord<>( - WindowedValue.of(KV.of("key1", 11), - new Instant(initialTime + 6700), - new IntervalWindow(new Instant(1), new Instant(10900)), - PaneInfo.createPane(true, false, PaneInfo.Timing.ON_TIME, 0, 0)) - , initialTime + 6700)); - expectedOutput.add(new Watermark(initialTime + 12000)); - - TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator()); - testHarness.close(); - } - - @Test - public void testSlidingWindows() throws Exception { - WindowingStrategy strategy = slidingWindowWithAfterWatermarkTriggerStrategy; - long initialTime = 0L; - OneInputStreamOperatorTestHarness>, WindowedValue>> testHarness = - createTestingOperatorAndState(strategy, initialTime); - ConcurrentLinkedQueue expectedOutput = new ConcurrentLinkedQueue<>(); - testHarness.processWatermark(new Watermark(initialTime + 25000)); - - expectedOutput.add(new StreamRecord<>( - WindowedValue.of(KV.of("key1", 6), - new Instant(initialTime + 5000), - new IntervalWindow(new Instant(0), new Instant(10000)), - PaneInfo.createPane(true, true, PaneInfo.Timing.ON_TIME)) - , initialTime + 5000)); - expectedOutput.add(new StreamRecord<>( - WindowedValue.of(KV.of("key1", 6), - new Instant(initialTime + 1), - new IntervalWindow(new Instant(-5000), new Instant(5000)), - PaneInfo.createPane(true, true, PaneInfo.Timing.ON_TIME)) - , initialTime + 1)); - expectedOutput.add(new Watermark(initialTime + 10000)); - - expectedOutput.add(new StreamRecord<>( - WindowedValue.of(KV.of("key1", 11), - new Instant(initialTime + 15000), - new IntervalWindow(new Instant(10000), new Instant(20000)), - PaneInfo.createPane(true, true, PaneInfo.Timing.ON_TIME)) - , initialTime + 15000)); - expectedOutput.add(new StreamRecord<>( - WindowedValue.of(KV.of("key1", 3), - new Instant(initialTime + 10000), - new IntervalWindow(new Instant(5000), new Instant(15000)), - PaneInfo.createPane(true, true, PaneInfo.Timing.ON_TIME)) - , initialTime + 10000)); - expectedOutput.add(new StreamRecord<>( - WindowedValue.of(KV.of("key2", 1), - new Instant(initialTime + 19500), - new IntervalWindow(new Instant(10000), new Instant(20000)), - PaneInfo.createPane(true, true, PaneInfo.Timing.ON_TIME)) - , initialTime + 19500)); - expectedOutput.add(new Watermark(initialTime + 20000)); - - expectedOutput.add(new StreamRecord<>( - WindowedValue.of(KV.of("key2", 1), - new Instant(initialTime + 20000), - /** - * this is 20000 and not 19500 because of a convention in dataflow where - * timestamps of windowed values in a window cannot be smaller than the - * end of a previous window. Checkout the documentation of the - * {@link WindowFn#getOutputTime(Instant, BoundedWindow)} - */ - new IntervalWindow(new Instant(15000), new Instant(25000)), - PaneInfo.createPane(true, true, PaneInfo.Timing.ON_TIME)) - , initialTime + 20000)); - expectedOutput.add(new StreamRecord<>( - WindowedValue.of(KV.of("key1", 8), - new Instant(initialTime + 20000), - new IntervalWindow(new Instant(15000), new Instant(25000)), - PaneInfo.createPane(true, true, PaneInfo.Timing.ON_TIME)) - , initialTime + 20000)); - expectedOutput.add(new Watermark(initialTime + 25000)); - - TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator()); - testHarness.close(); - } - - @Test - public void testAfterWatermarkProgram() throws Exception { - WindowingStrategy strategy = fixedWindowWithAfterWatermarkTriggerStrategy; - long initialTime = 0L; - OneInputStreamOperatorTestHarness>, WindowedValue>> testHarness = - createTestingOperatorAndState(strategy, initialTime); - ConcurrentLinkedQueue expectedOutput = new ConcurrentLinkedQueue<>(); - - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 6), - new Instant(initialTime + 1), null, PaneInfo.createPane(true, true, PaneInfo.Timing.ON_TIME)), initialTime + 1)); - expectedOutput.add(new Watermark(initialTime + 10000)); - - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 11), - new Instant(initialTime + 10000), null, PaneInfo.createPane(true, true, PaneInfo.Timing.ON_TIME)), initialTime + 10000)); - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key2", 1), - new Instant(initialTime + 19500), null, PaneInfo.createPane(true, true, PaneInfo.Timing.ON_TIME)), initialTime + 19500)); - expectedOutput.add(new Watermark(initialTime + 20000)); - - TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator()); - testHarness.close(); - } - - @Test - public void testAfterCountProgram() throws Exception { - WindowingStrategy strategy = fixedWindowWithCountTriggerStrategy; - - long initialTime = 0L; - OneInputStreamOperatorTestHarness>, WindowedValue>> testHarness = - createTestingOperatorAndState(strategy, initialTime); - ConcurrentLinkedQueue expectedOutput = new ConcurrentLinkedQueue<>(); - - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 5), - new Instant(initialTime + 1), null, PaneInfo.createPane(true, true, PaneInfo.Timing.EARLY)), initialTime + 1)); - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 5), - new Instant(initialTime + 10000), null, PaneInfo.createPane(true, true, PaneInfo.Timing.EARLY)), initialTime + 10000)); - expectedOutput.add(new Watermark(initialTime + 10000)); - - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key2", 1), - new Instant(initialTime + 19500), null, PaneInfo.createPane(true, true, PaneInfo.Timing.ON_TIME, 0, 0)), initialTime + 19500)); - expectedOutput.add(new Watermark(initialTime + 20000)); - TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator()); - - testHarness.close(); - } - - @Test - public void testCompoundProgram() throws Exception { - WindowingStrategy strategy = fixedWindowWithCompoundTriggerStrategy; - - long initialTime = 0L; - OneInputStreamOperatorTestHarness>, WindowedValue>> testHarness = - createTestingOperatorAndState(strategy, initialTime); - ConcurrentLinkedQueue expectedOutput = new ConcurrentLinkedQueue<>(); - - /** - * PaneInfo are: - * isFirst (pane in window), - * isLast, Timing (of triggering), - * index (of pane in the window), - * onTimeIndex (if it the 1st,2nd, ... pane that was fired on time) - * */ - - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 5), - new Instant(initialTime + 1), null, PaneInfo.createPane(true, false, PaneInfo.Timing.EARLY)), initialTime + 1)); - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 5), - new Instant(initialTime + 10000), null, PaneInfo.createPane(true, false, PaneInfo.Timing.EARLY)), initialTime + 10000)); - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 5), - new Instant(initialTime + 19500), null, PaneInfo.createPane(false, false, PaneInfo.Timing.EARLY, 1, -1)), initialTime + 19500)); - - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), - new Instant(initialTime + 1200), null, PaneInfo.createPane(false, true, PaneInfo.Timing.ON_TIME, 1, 0)), initialTime + 1200)); - - expectedOutput.add(new Watermark(initialTime + 10000)); - - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), - new Instant(initialTime + 19500), null, PaneInfo.createPane(false, true, PaneInfo.Timing.ON_TIME, 2, 0)), initialTime + 19500)); - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key2", 1), - new Instant(initialTime + 19500), null, PaneInfo.createPane(true, true, PaneInfo.Timing.ON_TIME)), initialTime + 19500)); - - expectedOutput.add(new Watermark(initialTime + 20000)); - TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator()); - - testHarness.close(); - } - - @Test - public void testCompoundAccumulatingPanesProgram() throws Exception { - WindowingStrategy strategy = fixedWindowWithCompoundTriggerStrategyAcc; - long initialTime = 0L; - OneInputStreamOperatorTestHarness>, WindowedValue>> testHarness = - createTestingOperatorAndState(strategy, initialTime); - ConcurrentLinkedQueue expectedOutput = new ConcurrentLinkedQueue<>(); - - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 5), - new Instant(initialTime + 1), null, PaneInfo.createPane(true, false, PaneInfo.Timing.EARLY)), initialTime + 1)); - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 5), - new Instant(initialTime + 10000), null, PaneInfo.createPane(true, false, PaneInfo.Timing.EARLY)), initialTime + 10000)); - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 10), - new Instant(initialTime + 19500), null, PaneInfo.createPane(false, false, PaneInfo.Timing.EARLY, 1, -1)), initialTime + 19500)); - - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 6), - new Instant(initialTime + 1200), null, PaneInfo.createPane(false, true, PaneInfo.Timing.ON_TIME, 1, 0)), initialTime + 1200)); - - expectedOutput.add(new Watermark(initialTime + 10000)); - - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 11), - new Instant(initialTime + 19500), null, PaneInfo.createPane(false, true, PaneInfo.Timing.ON_TIME, 2, 0)), initialTime + 19500)); - expectedOutput.add(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key2", 1), - new Instant(initialTime + 19500), null, PaneInfo.createPane(true, true, PaneInfo.Timing.ON_TIME)), initialTime + 19500)); - - expectedOutput.add(new Watermark(initialTime + 20000)); - TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator()); - - testHarness.close(); - } - - private OneInputStreamOperatorTestHarness createTestingOperatorAndState(WindowingStrategy strategy, long initialTime) throws Exception { - Pipeline pipeline = FlinkTestPipeline.createForStreaming(); - - KvCoder inputCoder = KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()); - - FlinkGroupAlsoByWindowWrapper gbwOperaror = - FlinkGroupAlsoByWindowWrapper.createForTesting( - pipeline.getOptions(), - pipeline.getCoderRegistry(), - strategy, - inputCoder, - combiner.asKeyedFn()); - - OneInputStreamOperatorTestHarness>, WindowedValue>> testHarness = - new OneInputStreamOperatorTestHarness<>(gbwOperaror); - testHarness.open(); - - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 1), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 1), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 1000), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 1200), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 1200), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 1200), null, PaneInfo.NO_FIRING), initialTime + 20)); - - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 10000), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 12100), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 14200), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 15300), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 16500), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 19500), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 19500), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 19500), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 19500), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 19500), null, PaneInfo.NO_FIRING), initialTime + 20)); - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key1", 1), new Instant(initialTime + 19500), null, PaneInfo.NO_FIRING), initialTime + 20)); - - testHarness.processElement(new StreamRecord<>(makeWindowedValue(strategy, KV.of("key2", 1), new Instant(initialTime + 19500), null, PaneInfo.NO_FIRING), initialTime + 20)); - - testHarness.processWatermark(new Watermark(initialTime + 10000)); - testHarness.processWatermark(new Watermark(initialTime + 20000)); - - return testHarness; - } - - private static class ResultSortComparator implements Comparator { - @Override - public int compare(Object o1, Object o2) { - if (o1 instanceof Watermark && o2 instanceof Watermark) { - Watermark w1 = (Watermark) o1; - Watermark w2 = (Watermark) o2; - return (int) (w1.getTimestamp() - w2.getTimestamp()); - } else { - StreamRecord>> sr0 = (StreamRecord>>) o1; - StreamRecord>> sr1 = (StreamRecord>>) o2; - - int comparison = (int) (sr0.getValue().getTimestamp().getMillis() - sr1.getValue().getTimestamp().getMillis()); - if (comparison != 0) { - return comparison; - } - - comparison = sr0.getValue().getValue().getKey().compareTo(sr1.getValue().getValue().getKey()); - if(comparison == 0) { - comparison = Integer.compare( - sr0.getValue().getValue().getValue(), - sr1.getValue().getValue().getValue()); - } - if(comparison == 0) { - Collection windowsA = sr0.getValue().getWindows(); - Collection windowsB = sr1.getValue().getWindows(); - - if(windowsA.size() != 1 || windowsB.size() != 1) { - throw new IllegalStateException("A value cannot belong to more than one windows after grouping."); - } - - BoundedWindow windowA = (BoundedWindow) windowsA.iterator().next(); - BoundedWindow windowB = (BoundedWindow) windowsB.iterator().next(); - comparison = Long.compare(windowA.maxTimestamp().getMillis(), windowB.maxTimestamp().getMillis()); - } - return comparison; - } - } - } - - private WindowedValue makeWindowedValue(WindowingStrategy strategy, - T output, Instant timestamp, Collection windows, PaneInfo pane) { - final Instant inputTimestamp = timestamp; - final WindowFn windowFn = strategy.getWindowFn(); - - if (timestamp == null) { - timestamp = BoundedWindow.TIMESTAMP_MIN_VALUE; - } - - if (windows == null) { - try { - windows = windowFn.assignWindows(windowFn.new AssignContext() { - @Override - public Object element() { - throw new UnsupportedOperationException( - "WindowFn attempted to access input element when none was available"); - } - - @Override - public Instant timestamp() { - if (inputTimestamp == null) { - throw new UnsupportedOperationException( - "WindowFn attempted to access input timestamp when none was available"); - } - return inputTimestamp; - } - - @Override - public BoundedWindow window() { - throw new UnsupportedOperationException( - "WindowFn attempted to access input windows when none were available"); - } - }); - } catch (Exception e) { - throw UserCodeException.wrap(e); - } - } - - return WindowedValue.of(output, timestamp, windows, pane); - } -} diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupByNullKeyTest.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupByNullKeyTest.java index ca183a855b180..ab98c27733b3c 100644 --- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupByNullKeyTest.java +++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/GroupByNullKeyTest.java @@ -17,28 +17,25 @@ */ package org.apache.beam.runners.flink.streaming; +import com.google.common.base.Joiner; +import java.io.Serializable; +import java.util.Arrays; import org.apache.beam.runners.flink.FlinkTestPipeline; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.TextIO; import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.GroupByKey; +import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.windowing.AfterWatermark; import org.apache.beam.sdk.transforms.windowing.FixedWindows; import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.base.Joiner; - import org.apache.flink.streaming.util.StreamingProgramTestBase; import org.joda.time.Duration; import org.joda.time.Instant; -import java.io.Serializable; -import java.util.Arrays; - public class GroupByNullKeyTest extends StreamingProgramTestBase implements Serializable { diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/StateSerializationTest.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/StateSerializationTest.java deleted file mode 100644 index 6635d329ed121..0000000000000 --- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/StateSerializationTest.java +++ /dev/null @@ -1,338 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.beam.runners.flink.streaming; - -import static org.junit.Assert.assertEquals; - -import org.apache.beam.runners.flink.translation.wrappers.streaming.state.FlinkStateInternals; -import org.apache.beam.runners.flink.translation.wrappers.streaming.state.StateCheckpointReader; -import org.apache.beam.runners.flink.translation.wrappers.streaming.state.StateCheckpointUtils; -import org.apache.beam.runners.flink.translation.wrappers.streaming.state.StateCheckpointWriter; -import org.apache.beam.sdk.coders.CannotProvideCoderException; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.DelegateCoder; -import org.apache.beam.sdk.coders.StringUtf8Coder; -import org.apache.beam.sdk.coders.VarIntCoder; -import org.apache.beam.sdk.transforms.CombineWithContext; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.IntervalWindow; -import org.apache.beam.sdk.transforms.windowing.OutputTimeFns; -import org.apache.beam.sdk.util.TimeDomain; -import org.apache.beam.sdk.util.TimerInternals; -import org.apache.beam.sdk.util.state.AccumulatorCombiningState; -import org.apache.beam.sdk.util.state.BagState; -import org.apache.beam.sdk.util.state.StateNamespace; -import org.apache.beam.sdk.util.state.StateNamespaces; -import org.apache.beam.sdk.util.state.StateTag; -import org.apache.beam.sdk.util.state.StateTags; -import org.apache.beam.sdk.util.state.ValueState; -import org.apache.beam.sdk.util.state.WatermarkHoldState; - -import org.apache.flink.core.memory.DataInputView; -import org.apache.flink.runtime.state.AbstractStateBackend; -import org.apache.flink.runtime.state.memory.MemoryStateBackend; -import org.apache.flink.runtime.util.DataInputDeserializer; -import org.joda.time.Instant; -import org.junit.Test; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map; -import java.util.Set; - -public class StateSerializationTest { - - private static final StateNamespace NAMESPACE_1 = StateNamespaces.global(); - private static final String KEY_PREFIX = "TEST_"; - - // TODO: This can be replaced with the standard Sum.SumIntererFn once the state no longer needs - // to create a StateTag at the point of restoring state. Currently StateTags are compared strictly - // by type and combiners always use KeyedCombineFnWithContext rather than KeyedCombineFn or CombineFn. - private static CombineWithContext.KeyedCombineFnWithContext SUM_COMBINER = - new CombineWithContext.KeyedCombineFnWithContext() { - @Override - public int[] createAccumulator(Object key, CombineWithContext.Context c) { - return new int[1]; - } - - @Override - public int[] addInput(Object key, int[] accumulator, Integer value, CombineWithContext.Context c) { - accumulator[0] += value; - return accumulator; - } - - @Override - public int[] mergeAccumulators(Object key, Iterable accumulators, CombineWithContext.Context c) { - int[] r = new int[1]; - for (int[] a : accumulators) { - r[0] += a[0]; - } - return r; - } - - @Override - public Integer extractOutput(Object key, int[] accumulator, CombineWithContext.Context c) { - return accumulator[0]; - } - }; - - private static Coder INT_ACCUM_CODER = DelegateCoder.of( - VarIntCoder.of(), - new DelegateCoder.CodingFunction() { - @Override - public Integer apply(int[] accumulator) { - return accumulator[0]; - } - - @Override - public boolean equals(Object o) { - return o != null && this.getClass() == o.getClass(); - } - - @Override - public int hashCode() { - return this.getClass().hashCode(); - } - }, - new DelegateCoder.CodingFunction() { - @Override - public int[] apply(Integer value) { - int[] a = new int[1]; - a[0] = value; - return a; - } - - @Override - public boolean equals(Object o) { - return o != null && this.getClass() == o.getClass(); - } - - @Override - public int hashCode() { - return this.getClass().hashCode(); - } - }); - - private static final StateTag> STRING_VALUE_ADDR = - StateTags.value("stringValue", StringUtf8Coder.of()); - private static final StateTag> INT_VALUE_ADDR = - StateTags.value("stringValue", VarIntCoder.of()); - private static final StateTag> SUM_INTEGER_ADDR = - StateTags.keyedCombiningValueWithContext("sumInteger", INT_ACCUM_CODER, SUM_COMBINER); - private static final StateTag> STRING_BAG_ADDR = - StateTags.bag("stringBag", StringUtf8Coder.of()); - private static final StateTag> WATERMARK_BAG_ADDR = - StateTags.watermarkStateInternal("watermark", OutputTimeFns.outputAtEarliestInputTimestamp()); - - private Map> statePerKey = new HashMap<>(); - - private Map> activeTimers = new HashMap<>(); - - private void initializeStateAndTimers() throws CannotProvideCoderException { - for (int i = 0; i < 10; i++) { - String key = KEY_PREFIX + i; - - FlinkStateInternals state = initializeStateForKey(key); - Set timers = new HashSet<>(); - for (int j = 0; j < 5; j++) { - TimerInternals.TimerData timer = TimerInternals - .TimerData.of(NAMESPACE_1, - new Instant(1000 + i + j), TimeDomain.values()[j % 3]); - timers.add(timer); - } - - statePerKey.put(key, state); - activeTimers.put(key, timers); - } - } - - private FlinkStateInternals initializeStateForKey(String key) throws CannotProvideCoderException { - FlinkStateInternals state = createState(key); - - ValueState value = state.state(NAMESPACE_1, STRING_VALUE_ADDR); - value.write("test"); - - ValueState value2 = state.state(NAMESPACE_1, INT_VALUE_ADDR); - value2.write(4); - value2.write(5); - - AccumulatorCombiningState combiningValue = state.state(NAMESPACE_1, SUM_INTEGER_ADDR); - combiningValue.add(1); - combiningValue.add(2); - - WatermarkHoldState watermark = state.state(NAMESPACE_1, WATERMARK_BAG_ADDR); - watermark.add(new Instant(1000)); - - BagState bag = state.state(NAMESPACE_1, STRING_BAG_ADDR); - bag.add("v1"); - bag.add("v2"); - bag.add("v3"); - bag.add("v4"); - return state; - } - - private boolean restoreAndTestState(DataInputView in) throws Exception { - StateCheckpointReader reader = new StateCheckpointReader(in); - final ClassLoader userClassloader = this.getClass().getClassLoader(); - Coder windowCoder = IntervalWindow.getCoder(); - Coder keyCoder = StringUtf8Coder.of(); - - boolean comparisonRes = true; - - for (String key : statePerKey.keySet()) { - comparisonRes &= checkStateForKey(key); - } - - // restore the timers - Map> restoredTimersPerKey = StateCheckpointUtils.decodeTimers(reader, windowCoder, keyCoder); - if (activeTimers.size() != restoredTimersPerKey.size()) { - return false; - } - - for (String key : statePerKey.keySet()) { - Set originalTimers = activeTimers.get(key); - Set restoredTimers = restoredTimersPerKey.get(key); - comparisonRes &= checkTimersForKey(originalTimers, restoredTimers); - } - - // restore the state - Map> restoredPerKeyState = - StateCheckpointUtils.decodeState(reader, OutputTimeFns.outputAtEarliestInputTimestamp(), keyCoder, windowCoder, userClassloader); - if (restoredPerKeyState.size() != statePerKey.size()) { - return false; - } - - for (String key : statePerKey.keySet()) { - FlinkStateInternals originalState = statePerKey.get(key); - FlinkStateInternals restoredState = restoredPerKeyState.get(key); - comparisonRes &= checkStateForKey(originalState, restoredState); - } - return comparisonRes; - } - - private boolean checkStateForKey(String key) throws CannotProvideCoderException { - FlinkStateInternals state = statePerKey.get(key); - - ValueState value = state.state(NAMESPACE_1, STRING_VALUE_ADDR); - boolean comp = value.read().equals("test"); - - ValueState value2 = state.state(NAMESPACE_1, INT_VALUE_ADDR); - comp &= value2.read().equals(5); - - AccumulatorCombiningState combiningValue = state.state(NAMESPACE_1, SUM_INTEGER_ADDR); - comp &= combiningValue.read().equals(3); - - WatermarkHoldState watermark = state.state(NAMESPACE_1, WATERMARK_BAG_ADDR); - comp &= watermark.read().equals(new Instant(1000)); - - BagState bag = state.state(NAMESPACE_1, STRING_BAG_ADDR); - Iterator it = bag.read().iterator(); - int i = 0; - while (it.hasNext()) { - comp &= it.next().equals("v" + (++i)); - } - return comp; - } - - private void storeState(AbstractStateBackend.CheckpointStateOutputView out) throws Exception { - StateCheckpointWriter checkpointBuilder = StateCheckpointWriter.create(out); - Coder keyCoder = StringUtf8Coder.of(); - - // checkpoint the timers - StateCheckpointUtils.encodeTimers(activeTimers, checkpointBuilder, keyCoder); - - // checkpoint the state - StateCheckpointUtils.encodeState(statePerKey, checkpointBuilder, keyCoder); - } - - private boolean checkTimersForKey(Set originalTimers, Set restoredTimers) { - boolean comp = true; - if (restoredTimers == null) { - return false; - } - - if (originalTimers.size() != restoredTimers.size()) { - return false; - } - - for (TimerInternals.TimerData timer : originalTimers) { - comp &= restoredTimers.contains(timer); - } - return comp; - } - - private boolean checkStateForKey(FlinkStateInternals originalState, FlinkStateInternals restoredState) throws CannotProvideCoderException { - if (restoredState == null) { - return false; - } - - ValueState orValue = originalState.state(NAMESPACE_1, STRING_VALUE_ADDR); - ValueState resValue = restoredState.state(NAMESPACE_1, STRING_VALUE_ADDR); - boolean comp = orValue.read().equals(resValue.read()); - - ValueState orIntValue = originalState.state(NAMESPACE_1, INT_VALUE_ADDR); - ValueState resIntValue = restoredState.state(NAMESPACE_1, INT_VALUE_ADDR); - comp &= orIntValue.read().equals(resIntValue.read()); - - AccumulatorCombiningState combOrValue = originalState.state(NAMESPACE_1, SUM_INTEGER_ADDR); - AccumulatorCombiningState combResValue = restoredState.state(NAMESPACE_1, SUM_INTEGER_ADDR); - comp &= combOrValue.read().equals(combResValue.read()); - - WatermarkHoldState orWatermark = originalState.state(NAMESPACE_1, WATERMARK_BAG_ADDR); - WatermarkHoldState resWatermark = restoredState.state(NAMESPACE_1, WATERMARK_BAG_ADDR); - comp &= orWatermark.read().equals(resWatermark.read()); - - BagState orBag = originalState.state(NAMESPACE_1, STRING_BAG_ADDR); - BagState resBag = restoredState.state(NAMESPACE_1, STRING_BAG_ADDR); - - Iterator orIt = orBag.read().iterator(); - Iterator resIt = resBag.read().iterator(); - - while (orIt.hasNext() && resIt.hasNext()) { - comp &= orIt.next().equals(resIt.next()); - } - - return !((orIt.hasNext() && !resIt.hasNext()) || (!orIt.hasNext() && resIt.hasNext())) && comp; - } - - private FlinkStateInternals createState(String key) throws CannotProvideCoderException { - return new FlinkStateInternals<>( - key, - StringUtf8Coder.of(), - IntervalWindow.getCoder(), - OutputTimeFns.outputAtEarliestInputTimestamp()); - } - - @Test - public void test() throws Exception { - StateSerializationTest test = new StateSerializationTest(); - test.initializeStateAndTimers(); - - MemoryStateBackend.MemoryCheckpointOutputStream memBackend = new MemoryStateBackend.MemoryCheckpointOutputStream(32048); - AbstractStateBackend.CheckpointStateOutputView out = new AbstractStateBackend.CheckpointStateOutputView(memBackend); - - test.storeState(out); - - byte[] contents = memBackend.closeAndGetBytes(); - DataInputView in = new DataInputDeserializer(contents, 0, contents.length); - - assertEquals(test.restoreAndTestState(in), true); - } - -} diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TestCountingSource.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TestCountingSource.java index 3ced02ee3038c..9251d42f4404b 100644 --- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TestCountingSource.java +++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TestCountingSource.java @@ -19,6 +19,11 @@ import static org.apache.beam.sdk.util.CoderUtils.encodeToByteArray; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ThreadLocalRandom; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.DelegateCoder; import org.apache.beam.sdk.coders.KvCoder; @@ -26,17 +31,10 @@ import org.apache.beam.sdk.io.UnboundedSource; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.values.KV; - import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.ThreadLocalRandom; -import javax.annotation.Nullable; - /** * An unbounded source for testing the unbounded sources framework code. * diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TopWikipediaSessionsITCase.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TopWikipediaSessionsITCase.java index 7912aee2409f9..64f978fa0f234 100644 --- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TopWikipediaSessionsITCase.java +++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/TopWikipediaSessionsITCase.java @@ -17,6 +17,10 @@ */ package org.apache.beam.runners.flink.streaming; +import com.google.api.services.bigquery.model.TableRow; +import com.google.common.base.Joiner; +import java.io.Serializable; +import java.util.Arrays; import org.apache.beam.runners.flink.FlinkTestPipeline; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.TextIO; @@ -28,17 +32,10 @@ import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - -import com.google.api.services.bigquery.model.TableRow; -import com.google.common.base.Joiner; - import org.apache.flink.streaming.util.StreamingProgramTestBase; import org.joda.time.Duration; import org.joda.time.Instant; -import java.io.Serializable; -import java.util.Arrays; - /** * Session window test diff --git a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/UnboundedSourceWrapperTest.java b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/UnboundedSourceWrapperTest.java index f5a52f57862f2..a70ad49b9f73a 100644 --- a/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/UnboundedSourceWrapperTest.java +++ b/runners/flink/runner/src/test/java/org/apache/beam/runners/flink/streaming/UnboundedSourceWrapperTest.java @@ -23,12 +23,14 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; import org.apache.beam.runners.flink.translation.wrappers.streaming.io.UnboundedSourceWrapper; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.KV; - import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.accumulators.Accumulator; import org.apache.flink.configuration.Configuration; @@ -43,10 +45,6 @@ import org.apache.flink.streaming.runtime.tasks.StreamTask; import org.junit.Test; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; - /** * Tests for {@link UnboundedSourceWrapper}. */ @@ -91,7 +89,7 @@ public void emitWatermark(Watermark watermark) { @Override public void collect( - StreamRecord>> windowedValueStreamRecord) { + StreamRecord>> windowedValueStreamRecord) { count++; if (count >= NUM_ELEMENTS) { diff --git a/runners/google-cloud-dataflow-java/pom.xml b/runners/google-cloud-dataflow-java/pom.xml index 0044823068c04..bf66f388c9440 100644 --- a/runners/google-cloud-dataflow-java/pom.xml +++ b/runners/google-cloud-dataflow-java/pom.xml @@ -60,17 +60,6 @@ true - - - runnable-on-service-tests - - - org/apache/beam/sdk/transforms/ParDoLifecycleTest.java - org/apache/beam/sdk/transforms/ParDoTest.java - - - - - - + diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/AggregatorPipelineExtractor.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/AggregatorPipelineExtractor.java index ac215c9ed1f06..0e79abe9683eb 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/AggregatorPipelineExtractor.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/AggregatorPipelineExtractor.java @@ -17,6 +17,11 @@ */ package org.apache.beam.sdk; +import com.google.common.collect.HashMultimap; +import com.google.common.collect.SetMultimap; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; import org.apache.beam.sdk.Pipeline.PipelineVisitor; import org.apache.beam.sdk.runners.TransformTreeNode; import org.apache.beam.sdk.transforms.Aggregator; @@ -25,13 +30,6 @@ import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.values.PValue; -import com.google.common.collect.HashMultimap; -import com.google.common.collect.SetMultimap; - -import java.util.Collection; -import java.util.Collections; -import java.util.Map; - /** * Retrieves {@link Aggregator Aggregators} at each {@link ParDo} and returns a {@link Map} of * {@link Aggregator} to the {@link PTransform PTransforms} in which it is present. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/AggregatorValues.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/AggregatorValues.java index efaad85b67b54..6297085319e2a 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/AggregatorValues.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/AggregatorValues.java @@ -17,13 +17,12 @@ */ package org.apache.beam.sdk; +import java.util.Collection; +import java.util.Map; import org.apache.beam.sdk.transforms.Aggregator; import org.apache.beam.sdk.transforms.Combine.CombineFn; import org.apache.beam.sdk.transforms.OldDoFn; -import java.util.Collection; -import java.util.Map; - /** * A collection of values associated with an {@link Aggregator}. Aggregators declared in a * {@link OldDoFn} are emitted on a per-{@code OldDoFn}-application basis. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java index 1bbc56f1fad22..53f46f6f06aba 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/Pipeline.java @@ -19,6 +19,15 @@ import static com.google.common.base.Preconditions.checkState; +import com.google.common.collect.HashMultimap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Multimap; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import org.apache.beam.sdk.coders.CoderRegistry; import org.apache.beam.sdk.io.Read; import org.apache.beam.sdk.options.PipelineOptions; @@ -36,21 +45,9 @@ import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.POutput; import org.apache.beam.sdk.values.PValue; - -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Iterables; -import com.google.common.collect.Multimap; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - /** * A {@link Pipeline} manages a directed acyclic graph of {@link PTransform PTransforms}, and the * {@link PCollection PCollections} that the {@link PTransform}s consume and produce. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/PipelineResult.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/PipelineResult.java index edfc9248f527a..d9cdc16a3da47 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/PipelineResult.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/PipelineResult.java @@ -17,12 +17,10 @@ */ package org.apache.beam.sdk; +import java.io.IOException; import org.apache.beam.sdk.transforms.Aggregator; - import org.joda.time.Duration; -import java.io.IOException; - /** * Result of {@link Pipeline#run()}. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/AvroCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/AvroCoder.java index da4db93e19533..7894d14ccd1db 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/AvroCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/AvroCoder.java @@ -19,11 +19,22 @@ import static org.apache.beam.sdk.util.Structs.addString; -import org.apache.beam.sdk.util.CloudObject; -import org.apache.beam.sdk.values.TypeDescriptor; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Serializable; +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.SortedMap; +import java.util.SortedSet; +import javax.annotation.Nullable; import org.apache.avro.Schema; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; @@ -45,22 +56,8 @@ import org.apache.avro.specific.SpecificData; import org.apache.avro.util.ClassUtils; import org.apache.avro.util.Utf8; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Serializable; -import java.lang.reflect.Field; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.SortedMap; -import java.util.SortedSet; - -import javax.annotation.Nullable; +import org.apache.beam.sdk.util.CloudObject; +import org.apache.beam.sdk.values.TypeDescriptor; /** * A {@link Coder} using Avro binary format. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigDecimalCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigDecimalCoder.java index c4b7cd1155109..e2628821c96d2 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigDecimalCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigDecimalCoder.java @@ -20,7 +20,6 @@ import static com.google.common.base.Preconditions.checkNotNull; import com.fasterxml.jackson.annotation.JsonCreator; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigEndianIntegerCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigEndianIntegerCoder.java index bd3bcf3800aae..ac8db120a0225 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigEndianIntegerCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigEndianIntegerCoder.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.coders; import com.fasterxml.jackson.annotation.JsonCreator; - import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.EOFException; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigEndianLongCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigEndianLongCoder.java index 5f5793ed7723a..e0053241f8d03 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigEndianLongCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigEndianLongCoder.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.coders; import com.fasterxml.jackson.annotation.JsonCreator; - import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.EOFException; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigIntegerCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigIntegerCoder.java index a41defed15b21..daba983a83b5b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigIntegerCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/BigIntegerCoder.java @@ -20,7 +20,6 @@ import static com.google.common.base.Preconditions.checkNotNull; import com.fasterxml.jackson.annotation.JsonCreator; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ByteArrayCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ByteArrayCoder.java index 4115825bf3d5f..65e24da8c220e 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ByteArrayCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ByteArrayCoder.java @@ -17,17 +17,14 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.util.ExposedByteArrayOutputStream; -import org.apache.beam.sdk.util.StreamUtils; -import org.apache.beam.sdk.util.VarInt; - -import com.google.common.io.ByteStreams; - import com.fasterxml.jackson.annotation.JsonCreator; - +import com.google.common.io.ByteStreams; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import org.apache.beam.sdk.util.ExposedByteArrayOutputStream; +import org.apache.beam.sdk.util.StreamUtils; +import org.apache.beam.sdk.util.VarInt; /** * A {@link Coder} for {@code byte[]}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ByteCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ByteCoder.java index e146945c8e16e..c912b355b149e 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ByteCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ByteCoder.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.coders; import com.fasterxml.jackson.annotation.JsonCreator; - import java.io.EOFException; import java.io.IOException; import java.io.InputStream; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ByteStringCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ByteStringCoder.java index b03f98a3e174f..c70b9db978e53 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ByteStringCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ByteStringCoder.java @@ -17,16 +17,13 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.util.VarInt; - +import com.fasterxml.jackson.annotation.JsonCreator; import com.google.common.io.ByteStreams; import com.google.protobuf.ByteString; - -import com.fasterxml.jackson.annotation.JsonCreator; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import org.apache.beam.sdk.util.VarInt; /** * A {@link Coder} for {@link ByteString} objects based on their encoded Protocol Buffer form. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/Coder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/Coder.java index 136c00a21796d..688d1f793ab12 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/Coder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/Coder.java @@ -19,15 +19,9 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.annotations.Experimental; -import org.apache.beam.sdk.annotations.Experimental.Kind; -import org.apache.beam.sdk.util.CloudObject; -import org.apache.beam.sdk.util.common.ElementByteSizeObserver; - import com.google.common.base.Joiner; import com.google.common.base.MoreObjects; import com.google.common.base.Objects; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -35,8 +29,11 @@ import java.util.Arrays; import java.util.Collection; import java.util.List; - import javax.annotation.Nullable; +import org.apache.beam.sdk.annotations.Experimental; +import org.apache.beam.sdk.annotations.Experimental.Kind; +import org.apache.beam.sdk.util.CloudObject; +import org.apache.beam.sdk.util.common.ElementByteSizeObserver; /** * A {@link Coder Coder<T>} defines how to encode and decode values of type {@code T} into diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderFactories.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderFactories.java index 480f424182d8c..e1a202a26467a 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderFactories.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderFactories.java @@ -17,8 +17,6 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.values.TypeDescriptor; - import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; @@ -26,6 +24,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import org.apache.beam.sdk.values.TypeDescriptor; /** * Static utility methods for creating and working with {@link Coder}s. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderProviders.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderProviders.java index 3b852e2ab1f0e..c072008d5f288 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderProviders.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderProviders.java @@ -19,15 +19,13 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.util.InstanceBuilder; -import org.apache.beam.sdk.values.TypeDescriptor; - import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; - import java.lang.reflect.InvocationTargetException; import java.util.List; +import org.apache.beam.sdk.util.InstanceBuilder; +import org.apache.beam.sdk.values.TypeDescriptor; /** * Static utility methods for working with {@link CoderProvider CoderProviders}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderRegistry.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderRegistry.java index 411ebe2bbb589..9110de058af6a 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderRegistry.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CoderRegistry.java @@ -19,24 +19,11 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.coders.CannotProvideCoderException.ReasonCode; -import org.apache.beam.sdk.coders.protobuf.ProtoCoder; -import org.apache.beam.sdk.transforms.SerializableFunction; -import org.apache.beam.sdk.util.CoderUtils; -import org.apache.beam.sdk.values.KV; -import org.apache.beam.sdk.values.TimestampedValue; -import org.apache.beam.sdk.values.TypeDescriptor; - import com.google.api.services.bigquery.model.TableRow; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.protobuf.ByteString; - -import org.joda.time.Instant; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; @@ -49,8 +36,17 @@ import java.util.List; import java.util.Map; import java.util.Set; - import javax.annotation.Nullable; +import org.apache.beam.sdk.coders.CannotProvideCoderException.ReasonCode; +import org.apache.beam.sdk.coders.protobuf.ProtoCoder; +import org.apache.beam.sdk.transforms.SerializableFunction; +import org.apache.beam.sdk.util.CoderUtils; +import org.apache.beam.sdk.values.KV; +import org.apache.beam.sdk.values.TimestampedValue; +import org.apache.beam.sdk.values.TypeDescriptor; +import org.joda.time.Instant; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link CoderRegistry} allows registering the default {@link Coder} to use for a Java class, diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CollectionCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CollectionCoder.java index f40d5f029ffb9..7c61e882c35b4 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CollectionCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CollectionCoder.java @@ -19,13 +19,11 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.util.PropertyNames; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; - import java.util.Collection; import java.util.List; +import org.apache.beam.sdk.util.PropertyNames; /** * A {@link CollectionCoder} encodes {@link Collection Collections} in the format diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CustomCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CustomCoder.java index 69412e5849aaf..2614cc178c029 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CustomCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/CustomCoder.java @@ -17,23 +17,19 @@ */ package org.apache.beam.sdk.coders; +import static com.google.common.base.Preconditions.checkNotNull; import static org.apache.beam.sdk.util.Structs.addString; import static org.apache.beam.sdk.util.Structs.addStringList; -import static com.google.common.base.Preconditions.checkNotNull; - -import org.apache.beam.sdk.util.CloudObject; -import org.apache.beam.sdk.util.PropertyNames; -import org.apache.beam.sdk.util.SerializableUtils; -import org.apache.beam.sdk.util.StringUtils; - -import com.google.common.collect.Lists; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; - +import com.google.common.collect.Lists; import java.io.Serializable; import java.util.Collection; +import org.apache.beam.sdk.util.CloudObject; +import org.apache.beam.sdk.util.PropertyNames; +import org.apache.beam.sdk.util.SerializableUtils; +import org.apache.beam.sdk.util.StringUtils; /** * An abstract base class for writing a {@link Coder} class that encodes itself via Java diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DefaultCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DefaultCoder.java index 3eb1253c54cb5..9a976f9d4cc43 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DefaultCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DefaultCoder.java @@ -17,13 +17,12 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.values.PCollection; - import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import org.apache.beam.sdk.values.PCollection; /** * The {@link DefaultCoder} annotation diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DelegateCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DelegateCoder.java index 385c14996367f..e7f9d4ac11f15 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DelegateCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DelegateCoder.java @@ -20,7 +20,6 @@ import com.google.common.base.MoreObjects; import com.google.common.base.Objects; import com.google.common.collect.Lists; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DoubleCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DoubleCoder.java index cb204ecb71ace..4e56914a18ad7 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DoubleCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DoubleCoder.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.coders; import com.fasterxml.jackson.annotation.JsonCreator; - import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.EOFException; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DurationCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DurationCoder.java index c34ce66eb0196..a2458f249cf3d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DurationCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DurationCoder.java @@ -17,15 +17,13 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.util.common.ElementByteSizeObserver; - import com.fasterxml.jackson.annotation.JsonCreator; -import org.joda.time.Duration; -import org.joda.time.ReadableDuration; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import org.apache.beam.sdk.util.common.ElementByteSizeObserver; +import org.joda.time.Duration; +import org.joda.time.ReadableDuration; /** * A {@link Coder} that encodes a joda {@link Duration} as a {@link Long} using the format of diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/InstantCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/InstantCoder.java index d41bd1f21bd08..c0409fba46ccb 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/InstantCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/InstantCoder.java @@ -17,18 +17,14 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.util.common.ElementByteSizeObserver; - -import com.google.common.base.Converter; - import com.fasterxml.jackson.annotation.JsonCreator; -import org.joda.time.Instant; - +import com.google.common.base.Converter; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import org.apache.beam.sdk.util.common.ElementByteSizeObserver; +import org.joda.time.Instant; /** * A {@link Coder} for joda {@link Instant} that encodes it as a big endian {@link Long} diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/IterableCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/IterableCoder.java index 2e5a8cc361c9a..11fb1720deb75 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/IterableCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/IterableCoder.java @@ -17,17 +17,14 @@ */ package org.apache.beam.sdk.coders; -import static org.apache.beam.sdk.util.Structs.addBoolean; - import static com.google.common.base.Preconditions.checkArgument; - -import org.apache.beam.sdk.util.CloudObject; -import org.apache.beam.sdk.util.PropertyNames; +import static org.apache.beam.sdk.util.Structs.addBoolean; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; - import java.util.List; +import org.apache.beam.sdk.util.CloudObject; +import org.apache.beam.sdk.util.PropertyNames; /** * An {@link IterableCoder} encodes any {@link Iterable} in the format diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/IterableLikeCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/IterableLikeCoder.java index 9417d85612015..8680552bddc51 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/IterableLikeCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/IterableLikeCoder.java @@ -19,11 +19,6 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.util.BufferedElementCountingOutputStream; -import org.apache.beam.sdk.util.VarInt; -import org.apache.beam.sdk.util.common.ElementByteSizeObservableIterable; -import org.apache.beam.sdk.util.common.ElementByteSizeObserver; - import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; @@ -35,6 +30,10 @@ import java.util.List; import java.util.Observable; import java.util.Observer; +import org.apache.beam.sdk.util.BufferedElementCountingOutputStream; +import org.apache.beam.sdk.util.VarInt; +import org.apache.beam.sdk.util.common.ElementByteSizeObservableIterable; +import org.apache.beam.sdk.util.common.ElementByteSizeObserver; /** * An abstract base class with functionality for assembling a diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/JAXBCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/JAXBCoder.java index 689f993a1cdeb..748b07d8f6f9f 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/JAXBCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/JAXBCoder.java @@ -17,25 +17,21 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.util.CloudObject; -import org.apache.beam.sdk.util.Structs; -import org.apache.beam.sdk.util.VarInt; - -import com.google.common.io.ByteStreams; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; - +import com.google.common.io.ByteStreams; import java.io.FilterInputStream; import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; - import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.bind.Unmarshaller; +import org.apache.beam.sdk.util.CloudObject; +import org.apache.beam.sdk.util.Structs; +import org.apache.beam.sdk.util.VarInt; /** * A coder for JAXB annotated objects. This coder uses JAXB marshalling/unmarshalling mechanisms diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/KvCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/KvCoder.java index beeb9c5267a66..ad13226b05c43 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/KvCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/KvCoder.java @@ -17,23 +17,20 @@ */ package org.apache.beam.sdk.coders; -import static org.apache.beam.sdk.util.Structs.addBoolean; - import static com.google.common.base.Preconditions.checkArgument; - -import org.apache.beam.sdk.util.CloudObject; -import org.apache.beam.sdk.util.PropertyNames; -import org.apache.beam.sdk.util.common.ElementByteSizeObserver; -import org.apache.beam.sdk.values.KV; +import static org.apache.beam.sdk.util.Structs.addBoolean; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Arrays; import java.util.List; +import org.apache.beam.sdk.util.CloudObject; +import org.apache.beam.sdk.util.PropertyNames; +import org.apache.beam.sdk.util.common.ElementByteSizeObserver; +import org.apache.beam.sdk.values.KV; /** * A {@code KvCoder} encodes {@link KV}s. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ListCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ListCoder.java index f8437a4c929a8..78785017d49cd 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ListCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/ListCoder.java @@ -19,12 +19,10 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.util.PropertyNames; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; - import java.util.List; +import org.apache.beam.sdk.util.PropertyNames; /** * A {@link Coder} for {@link List}, using the format of {@link IterableLikeCoder}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/MapCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/MapCoder.java index 044fff1e413aa..ebe705156ffbd 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/MapCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/MapCoder.java @@ -19,14 +19,9 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.util.PropertyNames; -import org.apache.beam.sdk.util.common.ElementByteSizeObserver; - -import com.google.common.collect.Maps; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; - +import com.google.common.collect.Maps; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; @@ -36,6 +31,8 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import org.apache.beam.sdk.util.PropertyNames; +import org.apache.beam.sdk.util.common.ElementByteSizeObserver; /** * A {@link Coder} for {@link Map Maps} that encodes them according to provided diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/NullableCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/NullableCoder.java index 4001b811db188..44aadbdd88ccb 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/NullableCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/NullableCoder.java @@ -19,21 +19,17 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.util.PropertyNames; -import org.apache.beam.sdk.util.common.ElementByteSizeObserver; - -import com.google.common.base.Optional; -import com.google.common.collect.ImmutableList; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; - +import com.google.common.base.Optional; +import com.google.common.collect.ImmutableList; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.List; - import javax.annotation.Nullable; +import org.apache.beam.sdk.util.PropertyNames; +import org.apache.beam.sdk.util.common.ElementByteSizeObserver; /** * A {@link NullableCoder} encodes nullable values of type {@code T} using a nested diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/SerializableCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/SerializableCoder.java index 0995bdc976e06..46777b9d835ef 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/SerializableCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/SerializableCoder.java @@ -17,12 +17,8 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.util.CloudObject; -import org.apache.beam.sdk.values.TypeDescriptor; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; - import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; @@ -30,6 +26,8 @@ import java.io.ObjectStreamClass; import java.io.OutputStream; import java.io.Serializable; +import org.apache.beam.sdk.util.CloudObject; +import org.apache.beam.sdk.values.TypeDescriptor; /** * A {@link Coder} for Java classes that implement {@link Serializable}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/SetCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/SetCoder.java index fcbb3797a8a29..0d1b0176ae059 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/SetCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/SetCoder.java @@ -19,14 +19,12 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.util.PropertyNames; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; - import java.util.HashSet; import java.util.List; import java.util.Set; +import org.apache.beam.sdk.util.PropertyNames; /** * A {@link SetCoder} encodes any {@link Set} using the format of {@link IterableLikeCoder}. The diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/StandardCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/StandardCoder.java index 8b7b78890d4c0..0e57ed22ea370 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/StandardCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/StandardCoder.java @@ -17,26 +17,23 @@ */ package org.apache.beam.sdk.coders; +import static com.google.common.base.Preconditions.checkNotNull; import static org.apache.beam.sdk.util.Structs.addList; import static org.apache.beam.sdk.util.Structs.addString; import static org.apache.beam.sdk.util.Structs.addStringList; -import static com.google.common.base.Preconditions.checkNotNull; - -import org.apache.beam.sdk.util.CloudObject; -import org.apache.beam.sdk.util.PropertyNames; -import org.apache.beam.sdk.util.common.ElementByteSizeObserver; - import com.google.common.collect.Lists; import com.google.common.io.ByteStreams; import com.google.common.io.CountingOutputStream; - import java.io.ByteArrayOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; +import org.apache.beam.sdk.util.CloudObject; +import org.apache.beam.sdk.util.PropertyNames; +import org.apache.beam.sdk.util.common.ElementByteSizeObserver; /** * An abstract base class to implement a {@link Coder} that defines equality, hashing, and printing diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/StringDelegateCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/StringDelegateCoder.java index c498a8aa27150..80bcae30b386a 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/StringDelegateCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/StringDelegateCoder.java @@ -17,14 +17,13 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.coders.DelegateCoder.CodingFunction; -import org.apache.beam.sdk.coders.protobuf.ProtoCoder; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.InvocationTargetException; import java.util.Collection; +import org.apache.beam.sdk.coders.DelegateCoder.CodingFunction; +import org.apache.beam.sdk.coders.protobuf.ProtoCoder; /** * A {@link Coder} that wraps a {@code Coder} diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/StringUtf8Coder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/StringUtf8Coder.java index 807c00190cdb2..e01dfd8a12193 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/StringUtf8Coder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/StringUtf8Coder.java @@ -17,16 +17,10 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.util.ExposedByteArrayOutputStream; -import org.apache.beam.sdk.util.StreamUtils; -import org.apache.beam.sdk.util.VarInt; - +import com.fasterxml.jackson.annotation.JsonCreator; import com.google.common.base.Utf8; import com.google.common.io.ByteStreams; import com.google.common.io.CountingOutputStream; - -import com.fasterxml.jackson.annotation.JsonCreator; - import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.EOFException; @@ -35,6 +29,9 @@ import java.io.OutputStream; import java.io.UTFDataFormatException; import java.nio.charset.StandardCharsets; +import org.apache.beam.sdk.util.ExposedByteArrayOutputStream; +import org.apache.beam.sdk.util.StreamUtils; +import org.apache.beam.sdk.util.VarInt; /** * A {@link Coder} that encodes {@link String Strings} in UTF-8 encoding. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/TableRowJsonCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/TableRowJsonCoder.java index 42a6dc96cdd9d..a2562f28baf6f 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/TableRowJsonCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/TableRowJsonCoder.java @@ -17,12 +17,10 @@ */ package org.apache.beam.sdk.coders; -import com.google.api.services.bigquery.model.TableRow; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; - +import com.google.api.services.bigquery.model.TableRow; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/TextualIntegerCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/TextualIntegerCoder.java index 156160eb0e7e9..6258b2101c400 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/TextualIntegerCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/TextualIntegerCoder.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.coders; import com.fasterxml.jackson.annotation.JsonCreator; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/VarIntCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/VarIntCoder.java index 20ce7d4c8cadf..baf3be88af59a 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/VarIntCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/VarIntCoder.java @@ -17,15 +17,13 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.util.VarInt; - import com.fasterxml.jackson.annotation.JsonCreator; - import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UTFDataFormatException; +import org.apache.beam.sdk.util.VarInt; /** * A {@link Coder} that encodes {@link Integer Integers} using between 1 and 5 bytes. Negative diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/VarLongCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/VarLongCoder.java index f1a5ea0784119..ee3c501966fe6 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/VarLongCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/VarLongCoder.java @@ -17,15 +17,13 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.util.VarInt; - import com.fasterxml.jackson.annotation.JsonCreator; - import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UTFDataFormatException; +import org.apache.beam.sdk.util.VarInt; /** * A {@link Coder} that encodes {@link Long Longs} using between 1 and 10 bytes. Negative diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/VoidCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/VoidCoder.java index d67d82fbb6a0b..6bd8a0536ebe3 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/VoidCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/VoidCoder.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.coders; import com.fasterxml.jackson.annotation.JsonCreator; - import java.io.InputStream; import java.io.OutputStream; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/protobuf/ProtoCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/protobuf/ProtoCoder.java index 67cec884b8b69..79fb37305ea62 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/protobuf/ProtoCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/protobuf/ProtoCoder.java @@ -19,27 +19,14 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.coders.AtomicCoder; -import org.apache.beam.sdk.coders.CannotProvideCoderException; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.CoderException; -import org.apache.beam.sdk.coders.CoderProvider; -import org.apache.beam.sdk.coders.CoderRegistry; -import org.apache.beam.sdk.util.CloudObject; -import org.apache.beam.sdk.util.Structs; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.TypeDescriptor; - +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.protobuf.ExtensionRegistry; import com.google.protobuf.Message; import com.google.protobuf.Parser; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -52,8 +39,17 @@ import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; - import javax.annotation.Nullable; +import org.apache.beam.sdk.coders.AtomicCoder; +import org.apache.beam.sdk.coders.CannotProvideCoderException; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.coders.CoderProvider; +import org.apache.beam.sdk.coders.CoderRegistry; +import org.apache.beam.sdk.util.CloudObject; +import org.apache.beam.sdk.util.Structs; +import org.apache.beam.sdk.values.PCollection; +import org.apache.beam.sdk.values.TypeDescriptor; /** * A {@link Coder} using Google Protocol Buffers binary format. {@link ProtoCoder} supports both diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/protobuf/ProtobufUtil.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/protobuf/ProtobufUtil.java index b03b748bfa18e..77afb47db49c0 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/protobuf/ProtobufUtil.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/coders/protobuf/ProtobufUtil.java @@ -19,8 +19,6 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.coders.Coder.NonDeterministicException; - import com.google.protobuf.Descriptors.Descriptor; import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.Descriptors.FileDescriptor.Syntax; @@ -28,10 +26,10 @@ import com.google.protobuf.ExtensionRegistry; import com.google.protobuf.ExtensionRegistry.ExtensionInfo; import com.google.protobuf.Message; - import java.lang.reflect.InvocationTargetException; import java.util.HashSet; import java.util.Set; +import org.apache.beam.sdk.coders.Coder.NonDeterministicException; /** * Utility functions for reflecting and analyzing Protocol Buffers classes. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java index 28c283d97dafa..e7c302bed1d70 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroIO.java @@ -20,6 +20,16 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; +import com.google.common.annotations.VisibleForTesting; +import java.io.IOException; +import java.nio.channels.Channels; +import java.nio.channels.WritableByteChannel; +import java.util.regex.Pattern; +import javax.annotation.Nullable; +import org.apache.avro.Schema; +import org.apache.avro.file.DataFileWriter; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.reflect.ReflectData; import org.apache.beam.sdk.coders.AvroCoder; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.VoidCoder; @@ -34,20 +44,6 @@ import org.apache.beam.sdk.values.PDone; import org.apache.beam.sdk.values.PInput; -import com.google.common.annotations.VisibleForTesting; - -import org.apache.avro.Schema; -import org.apache.avro.file.DataFileWriter; -import org.apache.avro.generic.GenericRecord; -import org.apache.avro.reflect.ReflectData; - -import java.io.IOException; -import java.nio.channels.Channels; -import java.nio.channels.WritableByteChannel; -import java.util.regex.Pattern; - -import javax.annotation.Nullable; - /** * {@link PTransform}s for reading and writing Avro files. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroSource.java index d1e0c4dc5e39e..6ef02aa2d5011 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/AvroSource.java @@ -21,29 +21,6 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.annotations.Experimental; -import org.apache.beam.sdk.coders.AvroCoder; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.runners.PipelineRunner; -import org.apache.beam.sdk.util.AvroUtils; -import org.apache.beam.sdk.util.AvroUtils.AvroMetadata; -import org.apache.beam.sdk.values.PCollection; - -import org.apache.avro.Schema; -import org.apache.avro.file.CodecFactory; -import org.apache.avro.file.DataFileConstants; -import org.apache.avro.generic.GenericDatumReader; -import org.apache.avro.generic.GenericRecord; -import org.apache.avro.io.BinaryDecoder; -import org.apache.avro.io.DatumReader; -import org.apache.avro.io.DecoderFactory; -import org.apache.avro.reflect.ReflectData; -import org.apache.avro.reflect.ReflectDatumReader; -import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; -import org.apache.commons.compress.compressors.snappy.SnappyCompressorInputStream; -import org.apache.commons.compress.compressors.xz.XZCompressorInputStream; -import org.apache.commons.compress.utils.CountingInputStream; - import java.io.ByteArrayInputStream; import java.io.EOFException; import java.io.IOException; @@ -57,8 +34,28 @@ import java.util.Collection; import java.util.zip.Inflater; import java.util.zip.InflaterInputStream; - import javax.annotation.concurrent.GuardedBy; +import org.apache.avro.Schema; +import org.apache.avro.file.CodecFactory; +import org.apache.avro.file.DataFileConstants; +import org.apache.avro.generic.GenericDatumReader; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.BinaryDecoder; +import org.apache.avro.io.DatumReader; +import org.apache.avro.io.DecoderFactory; +import org.apache.avro.reflect.ReflectData; +import org.apache.avro.reflect.ReflectDatumReader; +import org.apache.beam.sdk.annotations.Experimental; +import org.apache.beam.sdk.coders.AvroCoder; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.runners.PipelineRunner; +import org.apache.beam.sdk.util.AvroUtils; +import org.apache.beam.sdk.util.AvroUtils.AvroMetadata; +import org.apache.beam.sdk.values.PCollection; +import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; +import org.apache.commons.compress.compressors.snappy.SnappyCompressorInputStream; +import org.apache.commons.compress.compressors.xz.XZCompressorInputStream; +import org.apache.commons.compress.utils.CountingInputStream; // CHECKSTYLE.OFF: JavadocStyle /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BlockBasedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BlockBasedSource.java index 997c77a1273f6..83336ff739132 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BlockBasedSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BlockBasedSource.java @@ -17,13 +17,11 @@ */ package org.apache.beam.sdk.io; -import org.apache.beam.sdk.annotations.Experimental; -import org.apache.beam.sdk.options.PipelineOptions; - import java.io.IOException; import java.util.NoSuchElementException; - import javax.annotation.Nullable; +import org.apache.beam.sdk.annotations.Experimental; +import org.apache.beam.sdk.options.PipelineOptions; /** * A {@code BlockBasedSource} is a {@link FileBasedSource} where a file consists of blocks of diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSource.java index cfdd5814e2735..ede65a96cc998 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSource.java @@ -19,6 +19,13 @@ import static org.apache.beam.sdk.util.StringUtils.approximateSimpleName; +import com.google.api.client.util.BackOff; +import com.google.common.util.concurrent.Uninterruptibles; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.concurrent.TimeUnit; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.options.PipelineOptions; @@ -31,19 +38,9 @@ import org.apache.beam.sdk.util.ValueWithRecordId; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PInput; - -import com.google.api.client.util.BackOff; -import com.google.common.util.concurrent.Uninterruptibles; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.NoSuchElementException; -import java.util.concurrent.TimeUnit; - /** * {@link PTransform} that reads a bounded amount of data from an {@link UnboundedSource}, diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BoundedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BoundedSource.java index 394afa4bb3c8c..5fd7b8adc948d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BoundedSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/BoundedSource.java @@ -17,20 +17,17 @@ */ package org.apache.beam.sdk.io; +import java.io.IOException; +import java.util.List; +import java.util.NoSuchElementException; +import javax.annotation.Nullable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.io.range.OffsetRangeTracker; import org.apache.beam.sdk.io.range.RangeTracker; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; - import org.joda.time.Instant; -import java.io.IOException; -import java.util.List; -import java.util.NoSuchElementException; - -import javax.annotation.Nullable; - /** * A {@link Source} that reads a finite amount of input and, because of that, supports * some additional operations. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CompressedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CompressedSource.java index 11ff90f66af9e..3cd097ca507bf 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CompressedSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CompressedSource.java @@ -20,17 +20,8 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.sdk.annotations.Experimental; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.transforms.display.DisplayData; - import com.google.common.io.ByteStreams; import com.google.common.primitives.Ints; - -import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; -import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; - import java.io.IOException; import java.io.InputStream; import java.io.PushbackInputStream; @@ -42,8 +33,13 @@ import java.util.zip.GZIPInputStream; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; - import javax.annotation.concurrent.GuardedBy; +import org.apache.beam.sdk.annotations.Experimental; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.transforms.display.DisplayData; +import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; +import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; /** * A Source that reads from compressed files. A {@code CompressedSources} wraps a delegate diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CountingInput.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CountingInput.java index a21bc2418fa32..f479215296442 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CountingInput.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CountingInput.java @@ -20,6 +20,7 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import com.google.common.base.Optional; import org.apache.beam.sdk.io.CountingSource.NowTimestampFn; import org.apache.beam.sdk.io.Read.Unbounded; import org.apache.beam.sdk.transforms.PTransform; @@ -28,9 +29,6 @@ import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollection.IsBounded; - -import com.google.common.base.Optional; - import org.joda.time.Duration; import org.joda.time.Instant; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CountingSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CountingSource.java index 403d22eba3195..59a8df8ac9d9f 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CountingSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/CountingSource.java @@ -20,6 +20,10 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.util.List; +import java.util.NoSuchElementException; import org.apache.beam.sdk.coders.AvroCoder; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.DefaultCoder; @@ -29,16 +33,9 @@ import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.SerializableFunction; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.ImmutableList; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.io.IOException; -import java.util.List; -import java.util.NoSuchElementException; - /** * A source that produces longs. When used as a {@link BoundedSource}, {@link CountingSource} * starts at {@code 0} and counts up to a specified maximum. When used as an diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSink.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSink.java index 761d49c9f23b4..ea95f2f5629c1 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSink.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSink.java @@ -21,23 +21,7 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.SerializableCoder; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.transforms.display.DisplayData; -import org.apache.beam.sdk.util.FileIOChannelFactory; -import org.apache.beam.sdk.util.GcsIOChannelFactory; -import org.apache.beam.sdk.util.GcsUtil; -import org.apache.beam.sdk.util.GcsUtil.GcsUtilFactory; -import org.apache.beam.sdk.util.IOChannelFactory; -import org.apache.beam.sdk.util.IOChannelUtils; -import org.apache.beam.sdk.util.MimeTypes; - import com.google.common.collect.Ordering; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.File; import java.io.IOException; import java.io.Serializable; @@ -50,6 +34,19 @@ import java.util.Collection; import java.util.HashSet; import java.util.List; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.SerializableCoder; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.transforms.display.DisplayData; +import org.apache.beam.sdk.util.FileIOChannelFactory; +import org.apache.beam.sdk.util.GcsIOChannelFactory; +import org.apache.beam.sdk.util.GcsUtil; +import org.apache.beam.sdk.util.GcsUtil.GcsUtilFactory; +import org.apache.beam.sdk.util.IOChannelFactory; +import org.apache.beam.sdk.util.IOChannelUtils; +import org.apache.beam.sdk.util.MimeTypes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Abstract {@link Sink} for file-based output. An implementation of FileBasedSink writes file-based diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSource.java index f000f6a71eda9..b073236605360 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/FileBasedSource.java @@ -20,22 +20,12 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.transforms.display.DisplayData; -import org.apache.beam.sdk.util.IOChannelFactory; -import org.apache.beam.sdk.util.IOChannelUtils; - import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; - -import org.joda.time.Instant; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.IOException; import java.nio.channels.ReadableByteChannel; import java.nio.channels.SeekableByteChannel; @@ -48,6 +38,13 @@ import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executors; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.transforms.display.DisplayData; +import org.apache.beam.sdk.util.IOChannelFactory; +import org.apache.beam.sdk.util.IOChannelUtils; +import org.joda.time.Instant; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A common base class for all file-based {@link Source}s. Extend this class to implement your own diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/OffsetBasedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/OffsetBasedSource.java index 8cbcd1f868da3..6c685ffc89a0a 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/OffsetBasedSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/OffsetBasedSource.java @@ -19,19 +19,17 @@ import static com.google.common.base.Preconditions.checkArgument; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.NoSuchElementException; import org.apache.beam.sdk.io.range.OffsetRangeTracker; import org.apache.beam.sdk.io.range.RangeTracker; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.display.DisplayData; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.NoSuchElementException; - /** * A {@link BoundedSource} that uses offsets to define starting and ending positions. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java index 2b2717574e551..b137f1551010a 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubIO.java @@ -19,6 +19,14 @@ import static com.google.common.base.Preconditions.checkState; +import com.google.common.base.Strings; +import java.io.IOException; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.coders.VoidCoder; @@ -41,23 +49,11 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone; import org.apache.beam.sdk.values.PInput; - -import com.google.common.base.Strings; - import org.joda.time.Duration; import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import javax.annotation.Nullable; - /** * Read and Write {@link PTransform}s for Cloud Pub/Sub streams. These transforms create * and consume unbounded {@link PCollection PCollections}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java index 301475198b02f..179abf6da4f6d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSink.java @@ -20,6 +20,16 @@ import static com.google.common.base.Preconditions.checkState; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.hash.Hashing; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; +import java.util.concurrent.ThreadLocalRandom; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.AtomicCoder; import org.apache.beam.sdk.coders.BigEndianLongCoder; import org.apache.beam.sdk.coders.ByteArrayCoder; @@ -52,21 +62,8 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.hash.Hashing; - import org.joda.time.Duration; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; -import java.util.concurrent.ThreadLocalRandom; -import javax.annotation.Nullable; - /** * A PTransform which streams messages to Pubsub. *
      diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java index f99b47142425a..36f154f9c9874 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java @@ -22,6 +22,28 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import com.google.api.client.util.Clock; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Charsets; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.security.GeneralSecurityException; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Queue; +import java.util.Set; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.atomic.AtomicInteger; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.AtomicCoder; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; @@ -49,36 +71,11 @@ import org.apache.beam.sdk.util.PubsubClient.TopicPath; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; - -import com.google.api.client.util.Clock; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Charsets; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; - import org.joda.time.Duration; import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.security.GeneralSecurityException; -import java.util.ArrayDeque; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Queue; -import java.util.Set; -import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.concurrent.atomic.AtomicInteger; -import javax.annotation.Nullable; - /** * A PTransform which streams messages from Pubsub. *
        diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Read.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Read.java index e13ff06dc39db..f99877d84d29a 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Read.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Read.java @@ -19,6 +19,7 @@ import static org.apache.beam.sdk.util.StringUtils.approximateSimpleName; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.display.DisplayData; @@ -27,11 +28,8 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollection.IsBounded; import org.apache.beam.sdk.values.PInput; - import org.joda.time.Duration; -import javax.annotation.Nullable; - /** * A {@link PTransform} for reading from a {@link Source}. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Sink.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Sink.java index 20b1631306d53..1abcc3d1166d5 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Sink.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Sink.java @@ -17,6 +17,7 @@ */ package org.apache.beam.sdk.io; +import java.io.Serializable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.options.PipelineOptions; @@ -24,8 +25,6 @@ import org.apache.beam.sdk.transforms.display.HasDisplayData; import org.apache.beam.sdk.values.PCollection; -import java.io.Serializable; - /** * A {@code Sink} represents a resource that can be written to using the {@link Write} transform. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java index de000351538b7..542d91ca791ea 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Source.java @@ -17,17 +17,15 @@ */ package org.apache.beam.sdk.io; +import java.io.IOException; +import java.io.Serializable; +import java.util.NoSuchElementException; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.transforms.display.HasDisplayData; - import org.joda.time.Instant; -import java.io.IOException; -import java.io.Serializable; -import java.util.NoSuchElementException; - /** * Base class for defining input formats and creating a {@code Source} for reading the input. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java index 64db3f76311cd..ed9a62790afe7 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/TextIO.java @@ -20,23 +20,8 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.Coder.Context; -import org.apache.beam.sdk.coders.StringUtf8Coder; -import org.apache.beam.sdk.coders.VoidCoder; -import org.apache.beam.sdk.io.Read.Bounded; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.display.DisplayData; -import org.apache.beam.sdk.util.IOChannelUtils; -import org.apache.beam.sdk.util.MimeTypes; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PDone; -import org.apache.beam.sdk.values.PInput; - import com.google.common.annotations.VisibleForTesting; import com.google.protobuf.ByteString; - import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; @@ -47,8 +32,20 @@ import java.nio.charset.StandardCharsets; import java.util.NoSuchElementException; import java.util.regex.Pattern; - import javax.annotation.Nullable; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.Coder.Context; +import org.apache.beam.sdk.coders.StringUtf8Coder; +import org.apache.beam.sdk.coders.VoidCoder; +import org.apache.beam.sdk.io.Read.Bounded; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.transforms.PTransform; +import org.apache.beam.sdk.transforms.display.DisplayData; +import org.apache.beam.sdk.util.IOChannelUtils; +import org.apache.beam.sdk.util.MimeTypes; +import org.apache.beam.sdk.values.PCollection; +import org.apache.beam.sdk.values.PDone; +import org.apache.beam.sdk.values.PInput; /** * {@link PTransform}s for reading and writing text files. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/UnboundedSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/UnboundedSource.java index dded8e2493b4c..043f2fcefb19b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/UnboundedSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/UnboundedSource.java @@ -17,17 +17,14 @@ */ package org.apache.beam.sdk.io; -import org.apache.beam.sdk.annotations.Experimental; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.options.PipelineOptions; - -import org.joda.time.Instant; - import java.io.IOException; import java.util.List; import java.util.NoSuchElementException; - import javax.annotation.Nullable; +import org.apache.beam.sdk.annotations.Experimental; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.options.PipelineOptions; +import org.joda.time.Instant; /** * A {@link Source} that reads an unbounded amount of input and, because of that, supports diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Write.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Write.java index fea65ef233984..9d0beb769c444 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Write.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/Write.java @@ -20,6 +20,10 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import com.google.common.collect.Lists; +import java.util.List; +import java.util.UUID; +import java.util.concurrent.ThreadLocalRandom; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.coders.Coder; @@ -44,16 +48,9 @@ import org.apache.beam.sdk.values.PCollection.IsBounded; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.PDone; - -import com.google.api.client.util.Lists; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.List; -import java.util.UUID; -import java.util.concurrent.ThreadLocalRandom; - /** * A {@link PTransform} that writes to a {@link Sink}. A write begins with a sequential global * initialization of a sink, followed by a parallel write, and ends with a sequential finalization diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/XmlSink.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/XmlSink.java index ef50a1c322f58..983eed2468a57 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/XmlSink.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/XmlSink.java @@ -19,6 +19,12 @@ import static com.google.common.base.Preconditions.checkNotNull; +import java.io.OutputStream; +import java.nio.channels.Channels; +import java.nio.channels.WritableByteChannel; +import javax.xml.bind.JAXBContext; +import javax.xml.bind.JAXBException; +import javax.xml.bind.Marshaller; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.io.FileBasedSink.FileBasedWriteOperation; import org.apache.beam.sdk.io.FileBasedSink.FileBasedWriter; @@ -27,14 +33,6 @@ import org.apache.beam.sdk.util.CoderUtils; import org.apache.beam.sdk.values.PCollection; -import java.io.OutputStream; -import java.nio.channels.Channels; -import java.nio.channels.WritableByteChannel; - -import javax.xml.bind.JAXBContext; -import javax.xml.bind.JAXBException; -import javax.xml.bind.Marshaller; - // CHECKSTYLE.OFF: JavadocStyle /** * A {@link Sink} that outputs records as XML-formatted elements. Writes a {@link PCollection} of diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/XmlSource.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/XmlSource.java index 83443a32107e2..e00857ea2bf82 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/XmlSource.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/XmlSource.java @@ -19,14 +19,6 @@ import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.JAXBCoder; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.runners.PipelineRunner; -import org.apache.beam.sdk.transforms.display.DisplayData; - -import org.codehaus.stax2.XMLInputFactory2; - import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -39,7 +31,6 @@ import java.nio.channels.ReadableByteChannel; import java.nio.charset.StandardCharsets; import java.util.NoSuchElementException; - import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBElement; import javax.xml.bind.JAXBException; @@ -51,6 +42,12 @@ import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.JAXBCoder; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.runners.PipelineRunner; +import org.apache.beam.sdk.transforms.display.DisplayData; +import org.codehaus.stax2.XMLInputFactory2; // CHECKSTYLE.OFF: JavadocStyle /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKey.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKey.java index f82a11d532cd4..e4129ff56512b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKey.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKey.java @@ -21,7 +21,6 @@ import com.google.protobuf.ByteString; import com.google.protobuf.ByteString.ByteIterator; - import java.io.Serializable; /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRange.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRange.java index cfd924d3841e8..288124b310ecc 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRange.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRange.java @@ -24,16 +24,14 @@ import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.Serializable; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Arrays; import java.util.List; import java.util.Objects; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A class representing a range of {@link ByteKey ByteKeys}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRangeTracker.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRangeTracker.java index 24bf6e474b4da..7c0f1c0a6d388 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRangeTracker.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/ByteKeyRangeTracker.java @@ -20,13 +20,11 @@ import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkState; +import javax.annotation.Nullable; import org.apache.beam.sdk.io.BoundedSource.BoundedReader; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nullable; - /** * A {@link RangeTracker} for {@link ByteKey ByteKeys} in {@link ByteKeyRange ByteKeyRanges}. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/OffsetRangeTracker.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/OffsetRangeTracker.java index a8d00ee1d16bb..51e2b1ac2a1ce 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/OffsetRangeTracker.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/io/range/OffsetRangeTracker.java @@ -19,10 +19,8 @@ import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.io.BoundedSource.BoundedReader; - import com.google.common.annotations.VisibleForTesting; - +import org.apache.beam.sdk.io.BoundedSource.BoundedReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcpOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcpOptions.java index b2df96e7474f2..b4ae31406448c 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcpOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcpOptions.java @@ -17,21 +17,12 @@ */ package org.apache.beam.sdk.options; -import org.apache.beam.sdk.util.CredentialFactory; -import org.apache.beam.sdk.util.GcpCredentialFactory; -import org.apache.beam.sdk.util.InstanceBuilder; -import org.apache.beam.sdk.util.PathValidator; - +import com.fasterxml.jackson.annotation.JsonIgnore; import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.googleapis.auth.oauth2.GoogleOAuthConstants; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Strings; import com.google.common.io.Files; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -40,8 +31,13 @@ import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; - import javax.annotation.Nullable; +import org.apache.beam.sdk.util.CredentialFactory; +import org.apache.beam.sdk.util.GcpCredentialFactory; +import org.apache.beam.sdk.util.InstanceBuilder; +import org.apache.beam.sdk.util.PathValidator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Options used to configure Google Cloud Platform project and credentials. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcsOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcsOptions.java index 1b3436bf31187..29a29d9f4acda 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcsOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GcsOptions.java @@ -17,22 +17,19 @@ */ package org.apache.beam.sdk.options; -import org.apache.beam.sdk.util.AppEngineEnvironment; -import org.apache.beam.sdk.util.GcsPathValidator; -import org.apache.beam.sdk.util.GcsUtil; -import org.apache.beam.sdk.util.InstanceBuilder; -import org.apache.beam.sdk.util.PathValidator; - +import com.fasterxml.jackson.annotation.JsonIgnore; import com.google.cloud.hadoop.util.AbstractGoogleAsyncWriteChannel; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; - -import com.fasterxml.jackson.annotation.JsonIgnore; - import java.util.concurrent.ExecutorService; import java.util.concurrent.SynchronousQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import org.apache.beam.sdk.util.AppEngineEnvironment; +import org.apache.beam.sdk.util.GcsPathValidator; +import org.apache.beam.sdk.util.GcsUtil; +import org.apache.beam.sdk.util.InstanceBuilder; +import org.apache.beam.sdk.util.PathValidator; /** * Options used to configure Google Cloud Storage. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GoogleApiDebugOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GoogleApiDebugOptions.java index 39cd40e715f81..4932b16ef49f7 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GoogleApiDebugOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/GoogleApiDebugOptions.java @@ -20,7 +20,6 @@ import com.google.api.client.googleapis.services.AbstractGoogleClient; import com.google.api.client.googleapis.services.AbstractGoogleClientRequest; import com.google.api.client.googleapis.services.GoogleClientRequestInitializer; - import java.io.IOException; import java.util.HashMap; import java.util.Map; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionSpec.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionSpec.java index 9a88f70762328..1220e6bc14e27 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionSpec.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionSpec.java @@ -17,10 +17,8 @@ */ package org.apache.beam.sdk.options; -import com.google.auto.value.AutoValue; - import com.fasterxml.jackson.annotation.JsonIgnore; - +import com.google.auto.value.AutoValue; import java.lang.reflect.Method; /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java index 4595fc871d225..db54d0ac76b67 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptions.java @@ -17,6 +17,14 @@ */ package org.apache.beam.sdk.options; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.google.auto.service.AutoService; +import java.lang.reflect.Proxy; +import java.util.ServiceLoader; +import javax.annotation.concurrent.ThreadSafe; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.options.GoogleApiDebugOptions.GoogleApiTracer; import org.apache.beam.sdk.options.ProxyInvocationHandler.Deserializer; @@ -26,17 +34,6 @@ import org.apache.beam.sdk.transforms.DoFn.Context; import org.apache.beam.sdk.transforms.display.HasDisplayData; -import com.google.auto.service.AutoService; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; - -import java.lang.reflect.Proxy; -import java.util.ServiceLoader; -import javax.annotation.concurrent.ThreadSafe; - /** * PipelineOptions are used to configure Pipelines. You can extend {@link PipelineOptions} * to create custom configuration options specific to your {@link Pipeline}, diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java index a795fcd024ffc..43927bc716217 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsFactory.java @@ -20,13 +20,9 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.sdk.options.Validation.Required; -import org.apache.beam.sdk.runners.PipelineRunner; -import org.apache.beam.sdk.runners.PipelineRunnerRegistrar; -import org.apache.beam.sdk.transforms.display.DisplayData; -import org.apache.beam.sdk.util.StringUtils; -import org.apache.beam.sdk.util.common.ReflectHelpers; - +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.databind.JavaType; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Joiner; @@ -49,13 +45,6 @@ import com.google.common.collect.SortedSetMultimap; import com.google.common.collect.TreeBasedTable; import com.google.common.collect.TreeMultimap; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.databind.JavaType; -import com.fasterxml.jackson.databind.ObjectMapper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.beans.BeanInfo; import java.beans.IntrospectionException; import java.beans.Introspector; @@ -81,8 +70,15 @@ import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; - import javax.annotation.Nonnull; +import org.apache.beam.sdk.options.Validation.Required; +import org.apache.beam.sdk.runners.PipelineRunner; +import org.apache.beam.sdk.runners.PipelineRunnerRegistrar; +import org.apache.beam.sdk.transforms.display.DisplayData; +import org.apache.beam.sdk.util.StringUtils; +import org.apache.beam.sdk.util.common.ReflectHelpers; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Constructs a {@link PipelineOptions} or any derived interface that is composable to any other diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsReflector.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsReflector.java index 607bddae6578c..26c65aec890d3 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsReflector.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsReflector.java @@ -17,16 +17,14 @@ */ package org.apache.beam.sdk.options; -import org.apache.beam.sdk.util.common.ReflectHelpers; - import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; - import java.beans.Introspector; import java.lang.reflect.Method; import java.util.Map; import java.util.Set; +import org.apache.beam.sdk.util.common.ReflectHelpers; /** * Utilities to reflect over {@link PipelineOptions}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsRegistrar.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsRegistrar.java index 16cf7cd730022..2ddff42f8eb43 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsRegistrar.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsRegistrar.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.options; import com.google.auto.service.AutoService; - import java.util.ServiceLoader; /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsValidator.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsValidator.java index 70be65fa4acdb..bd54ec39bd74b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsValidator.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/PipelineOptionsValidator.java @@ -20,17 +20,15 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.sdk.options.Validation.Required; -import org.apache.beam.sdk.util.common.ReflectHelpers; - import com.google.common.collect.Collections2; import com.google.common.collect.Ordering; import com.google.common.collect.SortedSetMultimap; import com.google.common.collect.TreeMultimap; - import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.Collection; +import org.apache.beam.sdk.options.Validation.Required; +import org.apache.beam.sdk.util.common.ReflectHelpers; /** * Validates that the {@link PipelineOptions} conforms to all the {@link Validation} criteria. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/ProxyInvocationHandler.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/ProxyInvocationHandler.java index fe67f1685da3c..204ad97e7184b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/options/ProxyInvocationHandler.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/options/ProxyInvocationHandler.java @@ -20,25 +20,6 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.sdk.options.PipelineOptionsFactory.JsonIgnorePredicate; -import org.apache.beam.sdk.options.PipelineOptionsFactory.Registration; -import org.apache.beam.sdk.transforms.display.DisplayData; -import org.apache.beam.sdk.transforms.display.HasDisplayData; -import org.apache.beam.sdk.util.InstanceBuilder; -import org.apache.beam.sdk.util.common.ReflectHelpers; - -import com.google.auto.value.AutoValue; -import com.google.common.base.Defaults; -import com.google.common.base.Function; -import com.google.common.collect.ClassToInstanceMap; -import com.google.common.collect.FluentIterable; -import com.google.common.collect.HashMultimap; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Multimap; -import com.google.common.collect.MutableClassToInstanceMap; - import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; @@ -51,7 +32,17 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.node.ObjectNode; - +import com.google.auto.value.AutoValue; +import com.google.common.base.Defaults; +import com.google.common.base.Function; +import com.google.common.collect.ClassToInstanceMap; +import com.google.common.collect.FluentIterable; +import com.google.common.collect.HashMultimap; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Multimap; +import com.google.common.collect.MutableClassToInstanceMap; import java.beans.PropertyDescriptor; import java.io.IOException; import java.lang.annotation.Annotation; @@ -70,9 +61,14 @@ import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.ThreadLocalRandom; - import javax.annotation.Nullable; import javax.annotation.concurrent.ThreadSafe; +import org.apache.beam.sdk.options.PipelineOptionsFactory.JsonIgnorePredicate; +import org.apache.beam.sdk.options.PipelineOptionsFactory.Registration; +import org.apache.beam.sdk.transforms.display.DisplayData; +import org.apache.beam.sdk.transforms.display.HasDisplayData; +import org.apache.beam.sdk.util.InstanceBuilder; +import org.apache.beam.sdk.util.common.ReflectHelpers; /** * Represents and {@link InvocationHandler} for a {@link Proxy}. The invocation handler uses bean diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/PipelineRunnerRegistrar.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/PipelineRunnerRegistrar.java index 949f5daf9ec47..d72c44f9a37db 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/PipelineRunnerRegistrar.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/PipelineRunnerRegistrar.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.runners; import com.google.auto.service.AutoService; - import java.util.ServiceLoader; /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/TransformHierarchy.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/TransformHierarchy.java index 6b93bbd8a2ad7..0a4bb08a315de 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/TransformHierarchy.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/TransformHierarchy.java @@ -19,16 +19,15 @@ import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.Pipeline; -import org.apache.beam.sdk.values.PInput; -import org.apache.beam.sdk.values.POutput; -import org.apache.beam.sdk.values.PValue; - import java.util.Deque; import java.util.HashMap; import java.util.LinkedList; import java.util.Map; import java.util.Set; +import org.apache.beam.sdk.Pipeline; +import org.apache.beam.sdk.values.PInput; +import org.apache.beam.sdk.values.POutput; +import org.apache.beam.sdk.values.PValue; /** * Captures information about a collection of transformations and their diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/TransformTreeNode.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/TransformTreeNode.java index b5a250b57f1f3..d16b8281dc66c 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/TransformTreeNode.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/TransformTreeNode.java @@ -20,21 +20,19 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.Pipeline.PipelineVisitor; -import org.apache.beam.sdk.Pipeline.PipelineVisitor.CompositeBehavior; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.values.PInput; -import org.apache.beam.sdk.values.POutput; -import org.apache.beam.sdk.values.PValue; - import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Set; - import javax.annotation.Nullable; +import org.apache.beam.sdk.Pipeline.PipelineVisitor; +import org.apache.beam.sdk.Pipeline.PipelineVisitor.CompositeBehavior; +import org.apache.beam.sdk.transforms.PTransform; +import org.apache.beam.sdk.values.PInput; +import org.apache.beam.sdk.values.POutput; +import org.apache.beam.sdk.values.PValue; /** * Provides internal tracking of transform relationships with helper methods diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/CoderProperties.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/CoderProperties.java index e56f01ff7a3e0..910b9395aae85 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/CoderProperties.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/CoderProperties.java @@ -26,24 +26,12 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.Coder.NonDeterministicException; -import org.apache.beam.sdk.coders.CoderException; -import org.apache.beam.sdk.util.CoderUtils; -import org.apache.beam.sdk.util.PropertyNames; -import org.apache.beam.sdk.util.SerializableUtils; -import org.apache.beam.sdk.util.Serializer; -import org.apache.beam.sdk.util.Structs; -import org.apache.beam.sdk.util.UnownedInputStream; -import org.apache.beam.sdk.util.UnownedOutputStream; -import org.apache.beam.sdk.util.common.ElementByteSizeObserver; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.io.ByteStreams; import com.google.common.io.CountingInputStream; import com.google.common.io.CountingOutputStream; - import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -51,6 +39,17 @@ import java.util.Collection; import java.util.Collections; import java.util.List; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.Coder.NonDeterministicException; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.util.CoderUtils; +import org.apache.beam.sdk.util.PropertyNames; +import org.apache.beam.sdk.util.SerializableUtils; +import org.apache.beam.sdk.util.Serializer; +import org.apache.beam.sdk.util.Structs; +import org.apache.beam.sdk.util.UnownedInputStream; +import org.apache.beam.sdk.util.UnownedOutputStream; +import org.apache.beam.sdk.util.common.ElementByteSizeObserver; /** * Properties for use in {@link Coder} tests. These are implemented with junit assertions diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/FileChecksumMatcher.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/FileChecksumMatcher.java index 303efcb392b8a..de6cea383e8c7 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/FileChecksumMatcher.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/FileChecksumMatcher.java @@ -20,20 +20,10 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.PipelineResult; -import org.apache.beam.sdk.util.IOChannelFactory; -import org.apache.beam.sdk.util.IOChannelUtils; - import com.google.common.base.Strings; import com.google.common.hash.HashCode; import com.google.common.hash.Hashing; import com.google.common.io.CharStreams; - -import org.hamcrest.Description; -import org.hamcrest.TypeSafeMatcher; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.IOException; import java.io.Reader; import java.nio.channels.Channels; @@ -41,6 +31,13 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; +import org.apache.beam.sdk.PipelineResult; +import org.apache.beam.sdk.util.IOChannelFactory; +import org.apache.beam.sdk.util.IOChannelUtils; +import org.hamcrest.Description; +import org.hamcrest.TypeSafeMatcher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Matcher to verify file checksum in E2E test. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/MatcherDeserializer.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/MatcherDeserializer.java index 84984709bc64a..6ca07badac929 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/MatcherDeserializer.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/MatcherDeserializer.java @@ -17,17 +17,14 @@ */ package org.apache.beam.sdk.testing; -import org.apache.beam.sdk.util.SerializableUtils; - -import com.google.api.client.util.Base64; - import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.node.ObjectNode; - +import com.google.api.client.util.Base64; import java.io.IOException; +import org.apache.beam.sdk.util.SerializableUtils; /** * MatcherDeserializer is used with Jackson to enable deserialization of SerializableMatchers. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/MatcherSerializer.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/MatcherSerializer.java index 845248604e2b3..2b4584c8cdaca 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/MatcherSerializer.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/MatcherSerializer.java @@ -17,16 +17,13 @@ */ package org.apache.beam.sdk.testing; -import org.apache.beam.sdk.util.SerializableUtils; - -import com.google.api.client.util.Base64; - import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.SerializerProvider; - +import com.google.api.client.util.Base64; import java.io.IOException; +import org.apache.beam.sdk.util.SerializableUtils; /** * MatcherSerializer is used with Jackson to enable serialization of SerializableMatchers. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java index 3f1a741a4dff8..56a1b7c418dae 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PAssert.java @@ -22,6 +22,16 @@ import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertThat; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import java.io.IOException; +import java.io.Serializable; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.NoSuchElementException; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; @@ -63,24 +73,10 @@ import org.apache.beam.sdk.values.PCollectionList; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.PDone; - -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; - import org.joda.time.Duration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.io.Serializable; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Map; -import java.util.NoSuchElementException; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; - /** * An assertion on the contents of a {@link PCollection} incorporated into the pipeline. Such an * assertion can be checked no matter what kind of {@link PipelineRunner} is used. @@ -357,7 +353,7 @@ private static class PCollectionContentsAssert implements IterableAssert { private final SimpleFunction>, Iterable> paneExtractor; public PCollectionContentsAssert(PCollection actual) { - this(actual, IntoGlobalWindow.of(), PaneExtractors.onlyPane()); + this(actual, IntoGlobalWindow.of(), PaneExtractors.allPanes()); } public PCollectionContentsAssert( @@ -1066,14 +1062,7 @@ private GroupedValuesCheckerDoFn(SerializableFunction checkerFn) @ProcessElement public void processElement(ProcessContext c) { - try { - doChecks(c.element(), checkerFn, success, failure); - } catch (Throwable t) { - // Suppress exception in streaming - if (!c.getPipelineOptions().as(StreamingOptions.class).isStreaming()) { - throw t; - } - } + doChecks(c.element(), checkerFn, success, failure); } } @@ -1098,15 +1087,8 @@ private SingletonCheckerDoFn(SerializableFunction checkerFn) { @ProcessElement public void processElement(ProcessContext c) { - try { - ActualT actualContents = Iterables.getOnlyElement(c.element()); - doChecks(actualContents, checkerFn, success, failure); - } catch (Throwable t) { - // Suppress exception in streaming - if (!c.getPipelineOptions().as(StreamingOptions.class).isStreaming()) { - throw t; - } - } + ActualT actualContents = Iterables.getOnlyElement(c.element()); + doChecks(actualContents, checkerFn, success, failure); } } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PaneExtractors.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PaneExtractors.java index 899612b3ef2a1..db72a0cd4c229 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PaneExtractors.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/PaneExtractors.java @@ -19,6 +19,8 @@ import static com.google.common.base.Preconditions.checkState; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.SimpleFunction; import org.apache.beam.sdk.transforms.windowing.PaneInfo; @@ -27,9 +29,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptor; -import java.util.ArrayList; -import java.util.List; - /** * {@link PTransform PTransforms} which take an {@link Iterable} of {@link WindowedValue * WindowedValues} and outputs an {@link Iterable} of all values in the specified pane, dropping the diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatcher.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatcher.java index a465bbec32aa8..4e4299d13b2bc 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatcher.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatcher.java @@ -19,9 +19,8 @@ import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import org.hamcrest.Matcher; - import java.io.Serializable; +import org.hamcrest.Matcher; /** * A {@link Matcher} that is also {@link Serializable}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java index 62a42e4176e89..bd44c4801dcbd 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SerializableMatchers.java @@ -17,28 +17,24 @@ */ package org.apache.beam.sdk.testing; +import com.google.common.base.MoreObjects; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import java.io.Serializable; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.ListCoder; import org.apache.beam.sdk.util.CoderUtils; import org.apache.beam.sdk.util.UserCodeException; import org.apache.beam.sdk.values.KV; - -import com.google.common.base.MoreObjects; - import org.hamcrest.BaseMatcher; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.Matchers; -import java.io.Serializable; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import javax.annotation.Nullable; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; - /** * Static class for building and using {@link SerializableMatcher} instances. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SourceTestUtils.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SourceTestUtils.java index 9ce9c5e518da8..e38e1af456639 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SourceTestUtils.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/SourceTestUtils.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.testing; import static com.google.common.base.Preconditions.checkNotNull; - import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertEquals; @@ -27,21 +26,7 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.io.BoundedSource; -import org.apache.beam.sdk.io.BoundedSource.BoundedReader; -import org.apache.beam.sdk.io.Source; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.transforms.display.DisplayData; -import org.apache.beam.sdk.values.KV; - import com.google.common.collect.ImmutableList; - -import org.joda.time.Instant; -import org.junit.Assert; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -52,8 +37,18 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; - import javax.annotation.Nullable; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.io.BoundedSource; +import org.apache.beam.sdk.io.BoundedSource.BoundedReader; +import org.apache.beam.sdk.io.Source; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.transforms.display.DisplayData; +import org.apache.beam.sdk.values.KV; +import org.joda.time.Instant; +import org.junit.Assert; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Helper functions and test harnesses for checking correctness of {@link Source} diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/StaticWindows.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/StaticWindows.java index 08d23559bb500..949ecac43c813 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/StaticWindows.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/StaticWindows.java @@ -19,17 +19,15 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.NonMergingWindowFn; -import org.apache.beam.sdk.transforms.windowing.WindowFn; - import com.google.common.base.Supplier; import com.google.common.collect.Iterables; - import java.util.Collection; import java.util.Collections; import java.util.Objects; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.transforms.windowing.NonMergingWindowFn; +import org.apache.beam.sdk.transforms.windowing.WindowFn; /** * A {@link WindowFn} that assigns all elements to a static collection of diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java index 98cdeba0bb506..f1bf09d9c2152 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipeline.java @@ -17,35 +17,31 @@ */ package org.apache.beam.sdk.testing; -import org.apache.beam.sdk.Pipeline; -import org.apache.beam.sdk.PipelineResult; -import org.apache.beam.sdk.options.ApplicationNameOptions; -import org.apache.beam.sdk.options.GcpOptions; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.options.PipelineOptions.CheckEnabled; -import org.apache.beam.sdk.options.PipelineOptionsFactory; -import org.apache.beam.sdk.runners.PipelineRunner; -import org.apache.beam.sdk.util.IOChannelUtils; -import org.apache.beam.sdk.util.TestCredential; - -import com.google.common.base.Optional; -import com.google.common.base.Strings; -import com.google.common.collect.Iterators; - import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.TreeNode; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; -import org.junit.experimental.categories.Category; - +import com.google.common.base.Optional; +import com.google.common.base.Strings; +import com.google.common.collect.Iterators; import java.io.IOException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Iterator; import java.util.Map.Entry; - import javax.annotation.Nullable; +import org.apache.beam.sdk.Pipeline; +import org.apache.beam.sdk.PipelineResult; +import org.apache.beam.sdk.options.ApplicationNameOptions; +import org.apache.beam.sdk.options.GcpOptions; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.options.PipelineOptions.CheckEnabled; +import org.apache.beam.sdk.options.PipelineOptionsFactory; +import org.apache.beam.sdk.runners.PipelineRunner; +import org.apache.beam.sdk.util.IOChannelUtils; +import org.apache.beam.sdk.util.TestCredential; +import org.junit.experimental.categories.Category; /** * A creator of test pipelines that can be used inside of tests that can be diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java index c4596c152c5a3..ff553bafa85e6 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestPipelineOptions.java @@ -21,7 +21,6 @@ import org.apache.beam.sdk.options.Default; import org.apache.beam.sdk.options.DefaultValueFactory; import org.apache.beam.sdk.options.PipelineOptions; - import org.hamcrest.BaseMatcher; import org.hamcrest.Description; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestStream.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestStream.java index e2eda32228556..e2730edfe6f4c 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestStream.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestStream.java @@ -21,6 +21,16 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.auto.value.AutoValue; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Collections; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.DurationCoder; @@ -36,24 +46,10 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TimestampedValue; import org.apache.beam.sdk.values.TimestampedValue.TimestampedValueCoder; - -import com.google.auto.value.AutoValue; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableList; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - import org.joda.time.Duration; import org.joda.time.Instant; import org.joda.time.ReadableDuration; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.Collections; -import java.util.List; - /** * A testing input that generates an unbounded {@link PCollection} of elements, advancing the * watermark and processing time as elements are emitted. After all of the specified elements are diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/WindowFnTestUtils.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/WindowFnTestUtils.java index 127721affcddc..63e7903b8e648 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/WindowFnTestUtils.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/WindowFnTestUtils.java @@ -22,16 +22,6 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.GlobalWindow; -import org.apache.beam.sdk.transforms.windowing.IntervalWindow; -import org.apache.beam.sdk.transforms.windowing.OutputTimeFn; -import org.apache.beam.sdk.transforms.windowing.OutputTimeFns; -import org.apache.beam.sdk.transforms.windowing.WindowFn; - -import org.joda.time.Instant; -import org.joda.time.ReadableInstant; - import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -41,8 +31,15 @@ import java.util.List; import java.util.Map; import java.util.Set; - import javax.annotation.Nullable; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.transforms.windowing.GlobalWindow; +import org.apache.beam.sdk.transforms.windowing.IntervalWindow; +import org.apache.beam.sdk.transforms.windowing.OutputTimeFn; +import org.apache.beam.sdk.transforms.windowing.OutputTimeFns; +import org.apache.beam.sdk.transforms.windowing.WindowFn; +import org.joda.time.Instant; +import org.joda.time.ReadableInstant; /** * A utility class for testing {@link WindowFn}s. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/WindowSupplier.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/WindowSupplier.java index 62bc09ffc275b..96091ef87fde6 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/WindowSupplier.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/WindowSupplier.java @@ -17,17 +17,15 @@ */ package org.apache.beam.sdk.testing; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.CoderException; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.util.CoderUtils; - import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; - import java.io.Serializable; import java.util.Collection; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.util.CoderUtils; /** * A {@link Supplier} that returns a static set of {@link BoundedWindow BoundedWindows}. The diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AppliedPTransform.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AppliedPTransform.java index fdbde74e4b13a..d80c116c175c8 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AppliedPTransform.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/AppliedPTransform.java @@ -17,11 +17,10 @@ */ package org.apache.beam.sdk.transforms; -import org.apache.beam.sdk.values.PInput; -import org.apache.beam.sdk.values.POutput; - import com.google.common.base.MoreObjects; import com.google.common.base.Objects; +import org.apache.beam.sdk.values.PInput; +import org.apache.beam.sdk.values.POutput; /** * Represents the application of a {@link PTransform} to a specific input to produce diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ApproximateQuantiles.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ApproximateQuantiles.java index fa87ed89fb0c4..656bd7bbac361 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ApproximateQuantiles.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ApproximateQuantiles.java @@ -19,24 +19,9 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.coders.BigEndianIntegerCoder; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.CoderException; -import org.apache.beam.sdk.coders.CoderRegistry; -import org.apache.beam.sdk.coders.CustomCoder; -import org.apache.beam.sdk.coders.ListCoder; -import org.apache.beam.sdk.transforms.Combine.AccumulatingCombineFn; -import org.apache.beam.sdk.transforms.Combine.AccumulatingCombineFn.Accumulator; -import org.apache.beam.sdk.transforms.display.DisplayData; -import org.apache.beam.sdk.util.WeightedValue; -import org.apache.beam.sdk.util.common.ElementByteSizeObserver; -import org.apache.beam.sdk.values.KV; -import org.apache.beam.sdk.values.PCollection; - import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.UnmodifiableIterator; - import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; @@ -50,8 +35,20 @@ import java.util.Iterator; import java.util.List; import java.util.PriorityQueue; - import javax.annotation.Nullable; +import org.apache.beam.sdk.coders.BigEndianIntegerCoder; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.coders.CoderRegistry; +import org.apache.beam.sdk.coders.CustomCoder; +import org.apache.beam.sdk.coders.ListCoder; +import org.apache.beam.sdk.transforms.Combine.AccumulatingCombineFn; +import org.apache.beam.sdk.transforms.Combine.AccumulatingCombineFn.Accumulator; +import org.apache.beam.sdk.transforms.display.DisplayData; +import org.apache.beam.sdk.util.WeightedValue; +import org.apache.beam.sdk.util.common.ElementByteSizeObserver; +import org.apache.beam.sdk.values.KV; +import org.apache.beam.sdk.values.PCollection; /** * {@code PTransform}s for getting an idea of a {@code PCollection}'s diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ApproximateUnique.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ApproximateUnique.java index 2fa24277661cc..71c2158fa42cb 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ApproximateUnique.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ApproximateUnique.java @@ -17,6 +17,16 @@ */ package org.apache.beam.sdk.transforms; +import com.google.common.hash.Hashing; +import com.google.common.hash.HashingOutputStream; +import com.google.common.io.ByteStreams; +import java.io.IOException; +import java.io.Serializable; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.PriorityQueue; +import org.apache.avro.reflect.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.Coder.Context; import org.apache.beam.sdk.coders.CoderException; @@ -28,19 +38,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; -import com.google.common.hash.Hashing; -import com.google.common.hash.HashingOutputStream; -import com.google.common.io.ByteStreams; - -import org.apache.avro.reflect.Nullable; - -import java.io.IOException; -import java.io.Serializable; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import java.util.PriorityQueue; - /** * {@code PTransform}s for estimating the number of distinct elements * in a {@code PCollection}, or the number of distinct values diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java index 56c0bc4e5c6e2..26f0f660f0743 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Combine.java @@ -19,6 +19,18 @@ import static com.google.common.base.Preconditions.checkState; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.ThreadLocalRandom; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; @@ -58,21 +70,6 @@ import org.apache.beam.sdk.values.TupleTagList; import org.apache.beam.sdk.values.TypeDescriptor; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import java.util.concurrent.ThreadLocalRandom; - /** * {@code PTransform}s for combining {@code PCollection} elements * globally and per-key. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFnBase.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFnBase.java index c73ba542c3591..f98ec96b782e2 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFnBase.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFnBase.java @@ -17,6 +17,10 @@ */ package org.apache.beam.sdk.transforms; +import com.google.common.collect.ImmutableMap; +import java.io.Serializable; +import java.lang.reflect.Type; +import java.lang.reflect.TypeVariable; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderRegistry; @@ -28,12 +32,6 @@ import org.apache.beam.sdk.transforms.display.HasDisplayData; import org.apache.beam.sdk.values.TypeDescriptor; -import com.google.common.collect.ImmutableMap; - -import java.io.Serializable; -import java.lang.reflect.Type; -import java.lang.reflect.TypeVariable; - /** * This class contains the shared interfaces and abstract classes for different types of combine * functions. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java index 61f4888ebb2d9..9fa8ded6b2409 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/CombineFns.java @@ -19,6 +19,24 @@ import static com.google.common.base.Preconditions.checkArgument; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.collect.ArrayListMultimap; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Multimap; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Serializable; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; @@ -37,27 +55,6 @@ import org.apache.beam.sdk.util.PropertyNames; import org.apache.beam.sdk.values.TupleTag; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Multimap; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Serializable; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - /** * Static utility methods that create combine function instances. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java index 195c5d17ed88a..b393a303951d7 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Count.java @@ -17,6 +17,12 @@ */ package org.apache.beam.sdk.transforms; +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.UTFDataFormatException; +import java.util.Iterator; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.CoderRegistry; @@ -26,13 +32,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; -import java.io.EOFException; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.UTFDataFormatException; -import java.util.Iterator; - /** * {@code PTransorm}s to count the elements in a {@link PCollection}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java index 08d0a7ae356ba..e261db2b30f91 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java @@ -19,6 +19,20 @@ import static com.google.common.base.Preconditions.checkArgument; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Function; +import com.google.common.base.Optional; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Objects; +import javax.annotation.Nullable; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.coders.Coder; @@ -37,26 +51,8 @@ import org.apache.beam.sdk.values.TimestampedValue; import org.apache.beam.sdk.values.TimestampedValue.TimestampedValueCoder; import org.apache.beam.sdk.values.TypeDescriptor; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Function; -import com.google.common.base.Optional; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; - import org.joda.time.Instant; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Objects; - -import javax.annotation.Nullable; - /** * {@code Create} takes a collection of elements of type {@code T} * known when the pipeline is constructed and returns a diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java index 2348783f9f616..9f898261c7bb3 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java @@ -21,6 +21,14 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import java.io.Serializable; +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.HashMap; +import java.util.Map; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.Combine.CombineFn; import org.apache.beam.sdk.transforms.OldDoFn.DelegatingAggregator; @@ -32,19 +40,9 @@ import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TypeDescriptor; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.io.Serializable; -import java.lang.annotation.Documented; -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; -import java.util.HashMap; -import java.util.Map; - /** * The argument to {@link ParDo} providing the code to use to process * elements of the input diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnAdapters.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnAdapters.java index 71a148ffafe90..4803d77f57f10 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnAdapters.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnAdapters.java @@ -17,6 +17,7 @@ */ package org.apache.beam.sdk.transforms; +import java.io.IOException; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.transforms.reflect.DoFnInvoker; @@ -28,12 +29,9 @@ import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TypeDescriptor; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.io.IOException; - /** * Utility class containing adapters for running a {@link DoFn} as an {@link OldDoFn}. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java index 4cd410a195973..82c12938fa684 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFnTester.java @@ -17,6 +17,19 @@ */ package org.apache.beam.sdk.transforms; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.options.PipelineOptions; @@ -34,20 +47,7 @@ import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TimestampedValue; import org.apache.beam.sdk.values.TupleTag; -import com.google.common.base.Function; -import com.google.common.base.MoreObjects; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import org.joda.time.Instant; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; /** * A harness for unit-testing a {@link OldDoFn}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java index 2837c40cc3a6a..b590d459f9e1f 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/FlatMapElements.java @@ -17,13 +17,12 @@ */ package org.apache.beam.sdk.transforms; +import java.lang.reflect.ParameterizedType; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptor; import org.apache.beam.sdk.values.TypeDescriptors; -import java.lang.reflect.ParameterizedType; - /** * {@code PTransform}s for mapping a simple function that returns iterables over the elements of a * {@link PCollection} and merging the results. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/IntraBundleParallelization.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/IntraBundleParallelization.java index b5fe60f330d01..1eef0e1b06477 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/IntraBundleParallelization.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/IntraBundleParallelization.java @@ -19,6 +19,10 @@ import static com.google.common.base.Preconditions.checkArgument; +import com.google.common.base.Throwables; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicReference; import org.apache.beam.sdk.options.GcsOptions; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.Combine.CombineFn; @@ -30,15 +34,8 @@ import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TypeDescriptor; - -import com.google.common.base.Throwables; - import org.joda.time.Instant; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Semaphore; -import java.util.concurrent.atomic.AtomicReference; - /** * Provides multi-threading of {@link OldDoFn}s, using threaded execution to * process multiple elements concurrently within a bundle. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Max.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Max.java index eed13fbb70b4e..c44d9b634671b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Max.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Max.java @@ -17,11 +17,10 @@ */ package org.apache.beam.sdk.transforms; -import org.apache.beam.sdk.transforms.Combine.BinaryCombineFn; -import org.apache.beam.sdk.transforms.display.DisplayData; - import java.io.Serializable; import java.util.Comparator; +import org.apache.beam.sdk.transforms.Combine.BinaryCombineFn; +import org.apache.beam.sdk.transforms.display.DisplayData; /** * {@code PTransform}s for computing the maximum of the elements in a {@code PCollection}, or the diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Mean.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Mean.java index 5005f6bafb573..1a0791f993094 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Mean.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Mean.java @@ -17,6 +17,11 @@ */ package org.apache.beam.sdk.transforms; +import com.google.common.base.MoreObjects; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Objects; import org.apache.beam.sdk.coders.AtomicCoder; import org.apache.beam.sdk.coders.BigEndianLongCoder; import org.apache.beam.sdk.coders.Coder; @@ -25,13 +30,6 @@ import org.apache.beam.sdk.coders.DoubleCoder; import org.apache.beam.sdk.transforms.Combine.AccumulatingCombineFn.Accumulator; -import com.google.common.base.MoreObjects; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.Objects; - /** * {@code PTransform}s for computing the arithmetic mean * (a.k.a. average) of the elements in a {@code PCollection}, or the diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Min.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Min.java index 9c9d14f3ad01a..f046779038337 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Min.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Min.java @@ -17,11 +17,10 @@ */ package org.apache.beam.sdk.transforms; -import org.apache.beam.sdk.transforms.Combine.BinaryCombineFn; -import org.apache.beam.sdk.transforms.display.DisplayData; - import java.io.Serializable; import java.util.Comparator; +import org.apache.beam.sdk.transforms.Combine.BinaryCombineFn; +import org.apache.beam.sdk.transforms.display.DisplayData; /** * {@code PTransform}s for computing the minimum of the elements in a {@code PCollection}, or the diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java index 84cd9978c6212..474efef2c3d36 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/OldDoFn.java @@ -21,6 +21,14 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import com.google.common.base.MoreObjects; +import java.io.Serializable; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.UUID; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.annotations.Experimental.Kind; import org.apache.beam.sdk.options.PipelineOptions; @@ -33,20 +41,9 @@ import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TypeDescriptor; - -import com.google.common.base.MoreObjects; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.io.Serializable; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; -import java.util.UUID; - /** * The argument to {@link ParDo} providing the code to use to process * elements of the input diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java index 12ab54d0e3704..19abef90cea1b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/PTransform.java @@ -17,6 +17,9 @@ */ package org.apache.beam.sdk.transforms; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.coders.Coder; @@ -27,10 +30,6 @@ import org.apache.beam.sdk.values.POutput; import org.apache.beam.sdk.values.TypedPValue; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.Serializable; - /** * A {@code PTransform} is an operation that takes an * {@code InputT} (some subtype of {@link PInput}) and produces an diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java index af500ba0c9c39..5efbe9f190240 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ParDo.java @@ -17,6 +17,10 @@ */ package org.apache.beam.sdk.transforms; +import com.google.common.collect.ImmutableList; +import java.io.Serializable; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.coders.CannotProvideCoderException; @@ -34,12 +38,6 @@ import org.apache.beam.sdk.values.TupleTagList; import org.apache.beam.sdk.values.TypedPValue; -import com.google.common.collect.ImmutableList; - -import java.io.Serializable; -import java.util.Arrays; -import java.util.List; - /** * {@link ParDo} is the core element-wise transform in Google Cloud * Dataflow, invoking a user-specified function on each of the elements of the input diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java index 05c94700245a9..9247942f36dec 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Partition.java @@ -17,6 +17,7 @@ */ package org.apache.beam.sdk.transforms; +import java.io.Serializable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.values.PCollection; @@ -25,8 +26,6 @@ import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TupleTagList; -import java.io.Serializable; - /** * {@code Partition} takes a {@code PCollection} and a * {@code PartitionFn}, uses the {@code PartitionFn} to split the diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java index 12ff2b90b4952..eca987a942279 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Sample.java @@ -19,6 +19,9 @@ import static com.google.common.base.Preconditions.checkArgument; +import java.util.ArrayList; +import java.util.List; +import java.util.Random; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderRegistry; @@ -31,10 +34,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; -import java.util.ArrayList; -import java.util.List; -import java.util.Random; - /** * {@code PTransform}s for taking samples of the elements in a * {@code PCollection}, or samples of the values associated with each diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Top.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Top.java index 04cf9e33f6f98..992a341059a6e 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Top.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Top.java @@ -19,6 +19,15 @@ import static com.google.common.base.Preconditions.checkArgument; +import com.google.common.collect.Lists; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.PriorityQueue; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.CoderRegistry; @@ -34,17 +43,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; -import com.google.common.collect.Lists; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.List; -import java.util.PriorityQueue; - /** * {@code PTransform}s for finding the largest (or smallest) set * of elements in a {@code PCollection}, or the largest (or smallest) diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java index 7a97c13d336cc..d750efc9fedc4 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/View.java @@ -17,15 +17,14 @@ */ package org.apache.beam.sdk.transforms; +import java.util.List; +import java.util.Map; import org.apache.beam.sdk.runners.PipelineRunner; import org.apache.beam.sdk.util.PCollectionViews; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; -import java.util.List; -import java.util.Map; - /** * Transforms for creating {@link PCollectionView PCollectionViews} from * {@link PCollection PCollections} (to read them as side inputs). diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ViewFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ViewFn.java index aa3cb0d628e64..767e58e243d44 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ViewFn.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/ViewFn.java @@ -17,11 +17,10 @@ */ package org.apache.beam.sdk.transforms; +import java.io.Serializable; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; -import java.io.Serializable; - /** * A function to adapt a primitive "view" of a {@link PCollection} - some materialization * specified in the Beam model and implemented by the runner - to a user-facing view type diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java index 7b395f5e8dbf4..00ac8e4a392db 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/WithTimestamps.java @@ -24,7 +24,6 @@ import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.transforms.windowing.WindowFn; import org.apache.beam.sdk.values.PCollection; - import org.joda.time.Duration; import org.joda.time.Instant; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/display/DisplayData.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/display/DisplayData.java index 5dcaec892379d..0b92d9fce3947 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/display/DisplayData.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/display/DisplayData.java @@ -20,27 +20,24 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.sdk.transforms.PTransform; - +import com.fasterxml.jackson.annotation.JsonGetter; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonValue; import com.google.auto.value.AutoValue; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Sets; - -import com.fasterxml.jackson.annotation.JsonGetter; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonValue; -import org.apache.avro.reflect.Nullable; -import org.joda.time.Duration; -import org.joda.time.Instant; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; - import java.io.Serializable; import java.util.Collection; import java.util.Map; import java.util.Objects; import java.util.Set; +import org.apache.avro.reflect.Nullable; +import org.apache.beam.sdk.transforms.PTransform; +import org.joda.time.Duration; +import org.joda.time.Instant; +import org.joda.time.format.DateTimeFormatter; +import org.joda.time.format.ISODateTimeFormat; /** * Static display data associated with a pipeline component. Display data is useful for diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java index aa26cbbf68e24..10ba3c9489da7 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResult.java @@ -17,10 +17,21 @@ */ package org.apache.beam.sdk.transforms.join; -import static org.apache.beam.sdk.util.Structs.addObject; - import static com.google.common.base.Preconditions.checkArgument; +import static org.apache.beam.sdk.util.Structs.addObject; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.collect.Iterators; +import com.google.common.collect.PeekingIterator; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.IterableCoder; @@ -31,24 +42,9 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TupleTagList; - -import com.google.common.collect.Iterators; -import com.google.common.collect.PeekingIterator; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import java.util.Objects; - /** * A row result of a {@link CoGroupByKey}. This is a tuple of {@link Iterable}s produced for * a given key, and these can be accessed in different ways. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResultSchema.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResultSchema.java index 29a0c7d731d4b..276cccb3a0531 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResultSchema.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGbkResultSchema.java @@ -19,18 +19,16 @@ import static org.apache.beam.sdk.util.Structs.addList; -import org.apache.beam.sdk.util.CloudObject; -import org.apache.beam.sdk.util.PropertyNames; -import org.apache.beam.sdk.values.TupleTag; -import org.apache.beam.sdk.values.TupleTagList; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; - import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import org.apache.beam.sdk.util.CloudObject; +import org.apache.beam.sdk.util.PropertyNames; +import org.apache.beam.sdk.values.TupleTag; +import org.apache.beam.sdk.values.TupleTagList; /** * A schema for the results of a {@link CoGroupByKey}. This maintains the full diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java index cb06f95354fe5..9546b291d4fc5 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/CoGroupByKey.java @@ -17,6 +17,8 @@ */ package org.apache.beam.sdk.transforms.join; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.KvCoder; import org.apache.beam.sdk.transforms.DoFn; @@ -30,9 +32,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionList; -import java.util.ArrayList; -import java.util.List; - /** * A {@link PTransform} that performs a {@link CoGroupByKey} on a tuple * of tables. A {@link CoGroupByKey} groups results from all diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/KeyedPCollectionTuple.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/KeyedPCollectionTuple.java index 0852c9c25b29b..67b819fddc2cc 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/KeyedPCollectionTuple.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/KeyedPCollectionTuple.java @@ -17,6 +17,9 @@ */ package org.apache.beam.sdk.transforms.join; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.KvCoder; @@ -29,10 +32,6 @@ import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TupleTagList; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - /** * An immutable tuple of keyed {@link PCollection PCollections} * with key type K. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/RawUnionValue.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/RawUnionValue.java index 159b1007c3ea8..07bfe69efc2b4 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/RawUnionValue.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/RawUnionValue.java @@ -48,4 +48,29 @@ public Object getValue() { public String toString() { return unionTag + ":" + value; } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + RawUnionValue that = (RawUnionValue) o; + + if (unionTag != that.unionTag) { + return false; + } + return value != null ? value.equals(that.value) : that.value == null; + + } + + @Override + public int hashCode() { + int result = unionTag; + result = 31 * result + (value != null ? value.hashCode() : 0); + return result; + } } diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/UnionCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/UnionCoder.java index 29240e7bb8638..72ac6e881ffff 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/UnionCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/join/UnionCoder.java @@ -17,20 +17,18 @@ */ package org.apache.beam.sdk.transforms.join; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.CoderException; -import org.apache.beam.sdk.coders.StandardCoder; -import org.apache.beam.sdk.util.PropertyNames; -import org.apache.beam.sdk.util.VarInt; -import org.apache.beam.sdk.util.common.ElementByteSizeObserver; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.List; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.coders.StandardCoder; +import org.apache.beam.sdk.util.PropertyNames; +import org.apache.beam.sdk.util.VarInt; +import org.apache.beam.sdk.util.common.ElementByteSizeObserver; /** * A UnionCoder encodes RawUnionValues. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnInvokers.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnInvokers.java index 73874d767d3c1..68e2ca99886e1 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnInvokers.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnInvokers.java @@ -20,14 +20,14 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.transforms.DoFn.FinishBundle; -import org.apache.beam.sdk.transforms.DoFn.ProcessElement; -import org.apache.beam.sdk.transforms.DoFn.Setup; -import org.apache.beam.sdk.transforms.DoFn.StartBundle; -import org.apache.beam.sdk.transforms.DoFn.Teardown; -import org.apache.beam.sdk.util.UserCodeException; - +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.EnumMap; +import java.util.LinkedHashMap; +import java.util.Map; +import javax.annotation.Nullable; import net.bytebuddy.ByteBuddy; import net.bytebuddy.NamingStrategy; import net.bytebuddy.description.field.FieldDescription; @@ -53,16 +53,13 @@ import net.bytebuddy.jar.asm.MethodVisitor; import net.bytebuddy.jar.asm.Opcodes; import net.bytebuddy.matcher.ElementMatchers; - -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.EnumMap; -import java.util.LinkedHashMap; -import java.util.Map; - -import javax.annotation.Nullable; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.DoFn.FinishBundle; +import org.apache.beam.sdk.transforms.DoFn.ProcessElement; +import org.apache.beam.sdk.transforms.DoFn.Setup; +import org.apache.beam.sdk.transforms.DoFn.StartBundle; +import org.apache.beam.sdk.transforms.DoFn.Teardown; +import org.apache.beam.sdk.util.UserCodeException; /** Dynamically generates {@link DoFnInvoker} instances for invoking a {@link DoFn}. */ public class DoFnInvokers { diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java index 6730140ac309d..181c088820496 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java @@ -17,15 +17,12 @@ */ package org.apache.beam.sdk.transforms.reflect; -import org.apache.beam.sdk.transforms.DoFn; - import com.google.auto.value.AutoValue; - import java.lang.reflect.Method; import java.util.Collections; import java.util.List; - import javax.annotation.Nullable; +import org.apache.beam.sdk.transforms.DoFn; /** * Describes the signature of a {@link DoFn}, in particular, which features it uses, which extra diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignatures.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignatures.java index 80b3b4f5b825a..7e482d5df6e32 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignatures.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignatures.java @@ -20,14 +20,9 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.util.common.ReflectHelpers; - import com.google.common.annotations.VisibleForTesting; import com.google.common.reflect.TypeParameter; import com.google.common.reflect.TypeToken; - import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.lang.reflect.Modifier; @@ -41,6 +36,9 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Map; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.util.common.ReflectHelpers; /** * Parses a {@link DoFn} and computes its {@link DoFnSignature}. See {@link #getOrParseSignature}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterAll.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterAll.java index 27dd1293ad32c..cc8c97fa14bf6 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterAll.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterAll.java @@ -19,17 +19,14 @@ import static com.google.common.base.Preconditions.checkArgument; +import com.google.common.base.Joiner; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.transforms.windowing.Trigger.OnceTrigger; import org.apache.beam.sdk.util.ExecutableTrigger; - -import com.google.common.base.Joiner; - import org.joda.time.Instant; -import java.util.Arrays; -import java.util.List; - /** * Create a {@link Trigger} that fires and finishes once after all of its sub-triggers have fired. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterDelayFromFirstElement.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterDelayFromFirstElement.java index a0fc04d64288a..c4bc94615dc3d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterDelayFromFirstElement.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterDelayFromFirstElement.java @@ -17,6 +17,12 @@ */ package org.apache.beam.sdk.transforms.windowing; +import com.google.common.collect.ImmutableList; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import java.util.List; +import java.util.Locale; +import java.util.Objects; +import javax.annotation.Nullable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.coders.InstantCoder; import org.apache.beam.sdk.transforms.Combine; @@ -31,21 +37,11 @@ import org.apache.beam.sdk.util.state.StateMerging; import org.apache.beam.sdk.util.state.StateTag; import org.apache.beam.sdk.util.state.StateTags; - -import com.google.common.collect.ImmutableList; - import org.joda.time.Duration; import org.joda.time.Instant; import org.joda.time.format.PeriodFormat; import org.joda.time.format.PeriodFormatter; -import java.util.List; -import java.util.Locale; -import java.util.Objects; -import javax.annotation.Nullable; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; - /** * A base class for triggers that happen after a processing time delay from the arrival * of the first element in a pane. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterEach.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterEach.java index dc1e74b099581..629c640eabddf 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterEach.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterEach.java @@ -19,15 +19,12 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.annotations.Experimental; -import org.apache.beam.sdk.util.ExecutableTrigger; - import com.google.common.base.Joiner; - -import org.joda.time.Instant; - import java.util.Arrays; import java.util.List; +import org.apache.beam.sdk.annotations.Experimental; +import org.apache.beam.sdk.util.ExecutableTrigger; +import org.joda.time.Instant; /** * A composite {@link Trigger} that executes its sub-triggers in order. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterFirst.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterFirst.java index 1034cac7d8b9a..6b06cfa09eb3f 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterFirst.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterFirst.java @@ -19,17 +19,14 @@ import static com.google.common.base.Preconditions.checkArgument; +import com.google.common.base.Joiner; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.transforms.windowing.Trigger.OnceTrigger; import org.apache.beam.sdk.util.ExecutableTrigger; - -import com.google.common.base.Joiner; - import org.joda.time.Instant; -import java.util.Arrays; -import java.util.List; - /** * Create a composite {@link Trigger} that fires once after at least one of its sub-triggers have * fired. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterPane.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterPane.java index 73289877c00da..8c128dd32bc22 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterPane.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterPane.java @@ -17,6 +17,9 @@ */ package org.apache.beam.sdk.transforms.windowing; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import java.util.List; +import java.util.Objects; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.coders.VarLongCoder; import org.apache.beam.sdk.transforms.Sum; @@ -27,14 +30,8 @@ import org.apache.beam.sdk.util.state.StateMerging; import org.apache.beam.sdk.util.state.StateTag; import org.apache.beam.sdk.util.state.StateTags; - import org.joda.time.Instant; -import java.util.List; -import java.util.Objects; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; - /** * {@link Trigger}s that fire based on properties of the elements in the current pane. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java index 4c792df518293..f551118b22c05 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTime.java @@ -17,17 +17,14 @@ */ package org.apache.beam.sdk.transforms.windowing; +import java.util.List; +import java.util.Objects; +import javax.annotation.Nullable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.transforms.SerializableFunction; import org.apache.beam.sdk.util.TimeDomain; - import org.joda.time.Instant; -import java.util.List; -import java.util.Objects; - -import javax.annotation.Nullable; - /** * {@code AfterProcessingTime} triggers fire based on the current processing time. They operate in * the real-time domain. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterSynchronizedProcessingTime.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterSynchronizedProcessingTime.java index e8e0fb6f707e5..59ece1073c20f 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterSynchronizedProcessingTime.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterSynchronizedProcessingTime.java @@ -17,17 +17,13 @@ */ package org.apache.beam.sdk.transforms.windowing; -import org.apache.beam.sdk.transforms.SerializableFunction; -import org.apache.beam.sdk.util.TimeDomain; - import com.google.common.base.Objects; - -import org.joda.time.Instant; - import java.util.Collections; import java.util.List; - import javax.annotation.Nullable; +import org.apache.beam.sdk.transforms.SerializableFunction; +import org.apache.beam.sdk.util.TimeDomain; +import org.joda.time.Instant; class AfterSynchronizedProcessingTime extends AfterDelayFromFirstElement { diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterWatermark.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterWatermark.java index 0d2a878e56b21..9690be8385265 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterWatermark.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/AfterWatermark.java @@ -19,20 +19,16 @@ import static com.google.common.base.Preconditions.checkNotNull; +import com.google.common.collect.ImmutableList; +import java.util.List; +import java.util.Objects; +import javax.annotation.Nullable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.transforms.windowing.Trigger.OnceTrigger; import org.apache.beam.sdk.util.ExecutableTrigger; import org.apache.beam.sdk.util.TimeDomain; - -import com.google.common.collect.ImmutableList; - import org.joda.time.Instant; -import java.util.List; -import java.util.Objects; - -import javax.annotation.Nullable; - /** *

        {@code AfterWatermark} triggers fire based on progress of the system watermark. This time is a * lower-bound, sometimes heuristically established, on event times that have been fully processed diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/BoundedWindow.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/BoundedWindow.java index 50ee9b5552c43..3654074174c17 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/BoundedWindow.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/BoundedWindow.java @@ -17,9 +17,8 @@ */ package org.apache.beam.sdk.transforms.windowing; -import org.joda.time.Instant; - import java.util.concurrent.TimeUnit; +import org.joda.time.Instant; /** * A {@code BoundedWindow} represents a finite grouping of elements, with an diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/CalendarWindows.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/CalendarWindows.java index 9653ae84ea944..fada50ad5a4d5 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/CalendarWindows.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/CalendarWindows.java @@ -19,7 +19,6 @@ import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.display.DisplayData; - import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Days; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/DefaultTrigger.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/DefaultTrigger.java index fcea3337b8a4f..d6b72ef603520 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/DefaultTrigger.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/DefaultTrigger.java @@ -17,13 +17,11 @@ */ package org.apache.beam.sdk.transforms.windowing; +import java.util.List; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.util.TimeDomain; - import org.joda.time.Instant; -import java.util.List; - /** * A trigger that is equivalent to {@code Repeatedly.forever(AfterWatermark.pastEndOfWindow())}. * See {@link Repeatedly#forever} and {@link AfterWatermark#pastEndOfWindow} for more details. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/FixedWindows.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/FixedWindows.java index 1e439ff389b39..8683a600e2045 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/FixedWindows.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/FixedWindows.java @@ -17,14 +17,12 @@ */ package org.apache.beam.sdk.transforms.windowing; +import java.util.Objects; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.display.DisplayData; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.util.Objects; - /** * A {@link WindowFn} that windows values into fixed-size timestamp-based windows. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/GlobalWindow.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/GlobalWindow.java index a4712fef76367..58b059aab67ce 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/GlobalWindow.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/GlobalWindow.java @@ -17,14 +17,12 @@ */ package org.apache.beam.sdk.transforms.windowing; +import java.io.InputStream; +import java.io.OutputStream; import org.apache.beam.sdk.coders.AtomicCoder; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.io.InputStream; -import java.io.OutputStream; - /** * The default window into which all data is placed (via {@link GlobalWindows}). */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/GlobalWindows.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/GlobalWindows.java index 002bf2e0b419a..aba00a3122911 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/GlobalWindows.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/GlobalWindows.java @@ -17,12 +17,10 @@ */ package org.apache.beam.sdk.transforms.windowing; -import org.apache.beam.sdk.coders.Coder; - -import org.joda.time.Instant; - import java.util.Collection; import java.util.Collections; +import org.apache.beam.sdk.coders.Coder; +import org.joda.time.Instant; /** * Default {@link WindowFn} that assigns all data to the same window. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/IntervalWindow.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/IntervalWindow.java index 45898e00d26b9..af987966be922 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/IntervalWindow.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/IntervalWindow.java @@ -17,21 +17,19 @@ */ package org.apache.beam.sdk.transforms.windowing; +import com.fasterxml.jackson.annotation.JsonCreator; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; import org.apache.beam.sdk.coders.AtomicCoder; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.DurationCoder; import org.apache.beam.sdk.coders.InstantCoder; - -import com.fasterxml.jackson.annotation.JsonCreator; import org.joda.time.Duration; import org.joda.time.Instant; import org.joda.time.ReadableDuration; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - /** * An implementation of {@link BoundedWindow} that represents an interval from * {@link #start} (inclusive) to {@link #end} (exclusive). diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/InvalidWindows.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/InvalidWindows.java index e0d65e5e98e5c..cbbc937eecb6b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/InvalidWindows.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/InvalidWindows.java @@ -17,12 +17,10 @@ */ package org.apache.beam.sdk.transforms.windowing; +import java.util.Collection; import org.apache.beam.sdk.coders.Coder; - import org.joda.time.Instant; -import java.util.Collection; - /** * A {@link WindowFn} that represents an invalid pipeline state. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Never.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Never.java index 7267d002715bd..e1f5d4da0c821 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Never.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Never.java @@ -17,13 +17,11 @@ */ package org.apache.beam.sdk.transforms.windowing; +import java.util.List; import org.apache.beam.sdk.transforms.GroupByKey; import org.apache.beam.sdk.transforms.windowing.Trigger.OnceTrigger; - import org.joda.time.Instant; -import java.util.List; - /** * A trigger which never fires. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/OrFinallyTrigger.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/OrFinallyTrigger.java index c48f5f47dacad..25b7b34b520b4 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/OrFinallyTrigger.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/OrFinallyTrigger.java @@ -17,14 +17,11 @@ */ package org.apache.beam.sdk.transforms.windowing; -import org.apache.beam.sdk.util.ExecutableTrigger; - import com.google.common.annotations.VisibleForTesting; - -import org.joda.time.Instant; - import java.util.Arrays; import java.util.List; +import org.apache.beam.sdk.util.ExecutableTrigger; +import org.joda.time.Instant; /** * Executes the {@code actual} trigger until it finishes or until the {@code until} trigger fires. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/OutputTimeFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/OutputTimeFn.java index 497a6fbd0c98c..0efd278a644a3 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/OutputTimeFn.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/OutputTimeFn.java @@ -17,14 +17,11 @@ */ package org.apache.beam.sdk.transforms.windowing; -import org.apache.beam.sdk.annotations.Experimental; - import com.google.common.collect.Ordering; - -import org.joda.time.Instant; - import java.io.Serializable; import java.util.Objects; +import org.apache.beam.sdk.annotations.Experimental; +import org.joda.time.Instant; /** * (Experimental) A function from timestamps of input values to the timestamp for a diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/OutputTimeFns.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/OutputTimeFns.java index 363181b22df21..2bcd3192f5ea7 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/OutputTimeFns.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/OutputTimeFns.java @@ -19,14 +19,11 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.annotations.Experimental; - import com.google.common.collect.Iterables; import com.google.common.collect.Ordering; - -import org.joda.time.Instant; - import javax.annotation.Nullable; +import org.apache.beam.sdk.annotations.Experimental; +import org.joda.time.Instant; /** * (Experimental) Static utility methods and provided implementations for diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java index 6ec17f991c0c7..0c87e2271ec92 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PaneInfo.java @@ -20,20 +20,18 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.sdk.coders.AtomicCoder; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.CoderException; -import org.apache.beam.sdk.transforms.GroupByKey; -import org.apache.beam.sdk.transforms.OldDoFn; -import org.apache.beam.sdk.util.VarInt; - import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableMap; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Objects; +import org.apache.beam.sdk.coders.AtomicCoder; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.transforms.GroupByKey; +import org.apache.beam.sdk.transforms.OldDoFn; +import org.apache.beam.sdk.util.VarInt; /** * Provides information about the pane an element belongs to. Every pane is implicitly associated diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PartitioningWindowFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PartitioningWindowFn.java index da2f38ce6cdcf..b84f891464bed 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PartitioningWindowFn.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/PartitioningWindowFn.java @@ -17,10 +17,9 @@ */ package org.apache.beam.sdk.transforms.windowing; -import org.joda.time.Instant; - import java.util.Arrays; import java.util.Collection; +import org.joda.time.Instant; /** * A {@link WindowFn} that places each value into exactly one window based on its timestamp and diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Repeatedly.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Repeatedly.java index 591bbf06a0ea9..88587983dfc20 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Repeatedly.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Repeatedly.java @@ -17,12 +17,10 @@ */ package org.apache.beam.sdk.transforms.windowing; -import org.apache.beam.sdk.util.ExecutableTrigger; - -import org.joda.time.Instant; - import java.util.Arrays; import java.util.List; +import org.apache.beam.sdk.util.ExecutableTrigger; +import org.joda.time.Instant; /** * Repeat a trigger, either until some condition is met or forever. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Sessions.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Sessions.java index 875db12918ae3..14d68f972381b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Sessions.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Sessions.java @@ -17,14 +17,12 @@ */ package org.apache.beam.sdk.transforms.windowing; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.transforms.display.DisplayData; - -import org.joda.time.Duration; - import java.util.Arrays; import java.util.Collection; import java.util.Objects; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.transforms.display.DisplayData; +import org.joda.time.Duration; /** * A {@link WindowFn} windowing values into sessions separated by {@link #gapDuration}-long diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/SlidingWindows.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/SlidingWindows.java index 99367f5557f4a..ec21723384f4b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/SlidingWindows.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/SlidingWindows.java @@ -17,19 +17,17 @@ */ package org.apache.beam.sdk.transforms.windowing; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Objects; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.annotations.Experimental.Kind; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.display.DisplayData; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Objects; - /** * A {@link WindowFn} that windows values into possibly overlapping fixed-size * timestamp-based windows. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Trigger.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Trigger.java index 86801e817799e..a960aa4b7a009 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Trigger.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Trigger.java @@ -17,22 +17,18 @@ */ package org.apache.beam.sdk.transforms.windowing; -import org.apache.beam.sdk.annotations.Experimental; -import org.apache.beam.sdk.util.ExecutableTrigger; -import org.apache.beam.sdk.util.TimeDomain; -import org.apache.beam.sdk.util.state.MergingStateAccessor; -import org.apache.beam.sdk.util.state.StateAccessor; - import com.google.common.base.Joiner; - -import org.joda.time.Instant; - import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.Objects; - import javax.annotation.Nullable; +import org.apache.beam.sdk.annotations.Experimental; +import org.apache.beam.sdk.util.ExecutableTrigger; +import org.apache.beam.sdk.util.TimeDomain; +import org.apache.beam.sdk.util.state.MergingStateAccessor; +import org.apache.beam.sdk.util.state.StateAccessor; +import org.joda.time.Instant; /** * {@code Trigger}s control when the elements for a specific key and window are output. As elements diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java index 9dd069cf99521..52b78584fcb40 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/Window.java @@ -17,6 +17,7 @@ */ package org.apache.beam.sdk.transforms.windowing; +import javax.annotation.Nullable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.annotations.Experimental.Kind; import org.apache.beam.sdk.coders.Coder; @@ -29,11 +30,8 @@ import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.util.WindowingStrategy.AccumulationMode; import org.apache.beam.sdk.values.PCollection; - import org.joda.time.Duration; -import javax.annotation.Nullable; - /** * {@code Window} logically divides up or groups the elements of a * {@link PCollection} into finite windows according to a {@link WindowFn}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/WindowFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/WindowFn.java index d84866b6fb728..abb4ee083f203 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/WindowFn.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/windowing/WindowFn.java @@ -17,17 +17,15 @@ */ package org.apache.beam.sdk.transforms.windowing; +import java.io.Serializable; +import java.util.Collection; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.annotations.Experimental.Kind; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.transforms.display.HasDisplayData; - import org.joda.time.Instant; -import java.io.Serializable; -import java.util.Collection; - /** * The argument to the {@link Window} transform used to assign elements into * windows and to determine how windows are merged. See {@link Window} for more diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ActiveWindowSet.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ActiveWindowSet.java index 02c12c040b2d8..2e0af29732a10 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ActiveWindowSet.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ActiveWindowSet.java @@ -17,13 +17,11 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.WindowFn; - import com.google.common.annotations.VisibleForTesting; - import java.util.Collection; import java.util.Set; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.transforms.windowing.WindowFn; /** * Track which windows are active, and the state address window(s) under which their diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ApiSurface.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ApiSurface.java index 0d6874026cb28..3914bb0e10a10 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ApiSurface.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ApiSurface.java @@ -29,10 +29,6 @@ import com.google.common.reflect.Invokable; import com.google.common.reflect.Parameter; import com.google.common.reflect.TypeToken; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.reflect.Constructor; @@ -49,6 +45,8 @@ import java.util.List; import java.util.Set; import java.util.regex.Pattern; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Represents the API surface of a package prefix. Used for accessing public classes, diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/AppliedCombineFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/AppliedCombineFn.java index 1e7d7c002e191..30b302c9953d2 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/AppliedCombineFn.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/AppliedCombineFn.java @@ -17,6 +17,8 @@ */ package org.apache.beam.sdk.util; +import com.google.common.annotations.VisibleForTesting; +import java.io.Serializable; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderRegistry; @@ -25,10 +27,6 @@ import org.apache.beam.sdk.transforms.CombineWithContext.KeyedCombineFnWithContext; import org.apache.beam.sdk.values.PCollectionView; -import com.google.common.annotations.VisibleForTesting; - -import java.io.Serializable; - /** * A {@link KeyedCombineFnWithContext} with a fixed accumulator coder. This is created from a * specific application of the {@link KeyedCombineFnWithContext}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/AttemptAndTimeBoundedExponentialBackOff.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/AttemptAndTimeBoundedExponentialBackOff.java index df2897ee91e0e..d8050e0a32198 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/AttemptAndTimeBoundedExponentialBackOff.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/AttemptAndTimeBoundedExponentialBackOff.java @@ -21,10 +21,8 @@ import com.google.api.client.util.BackOff; import com.google.api.client.util.NanoClock; - -import java.util.concurrent.TimeUnit; - import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import java.util.concurrent.TimeUnit; /** * Extension of {@link AttemptBoundedExponentialBackOff} that bounds the total time that the backoff diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/AvroUtils.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/AvroUtils.java index 16fc6fa5b3fbd..d855b344fe10d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/AvroUtils.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/AvroUtils.java @@ -17,15 +17,14 @@ */ package org.apache.beam.sdk.util; -import org.apache.avro.file.DataFileConstants; -import org.apache.avro.io.BinaryDecoder; -import org.apache.avro.io.DecoderFactory; - import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.util.Arrays; +import org.apache.avro.file.DataFileConstants; +import org.apache.avro.io.BinaryDecoder; +import org.apache.avro.io.DecoderFactory; /** * A set of utilities for working with Avro files. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BaseExecutionContext.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BaseExecutionContext.java index dd36367f75461..dead76eb3d001 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BaseExecutionContext.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BaseExecutionContext.java @@ -17,16 +17,15 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.util.state.StateInternals; -import org.apache.beam.sdk.values.TupleTag; - import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.util.state.StateInternals; +import org.apache.beam.sdk.values.TupleTag; /** * Base class for implementations of {@link ExecutionContext}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BitSetCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BitSetCoder.java index b406b1a56cc52..fde90af683697 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BitSetCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BitSetCoder.java @@ -17,14 +17,13 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.coders.AtomicCoder; -import org.apache.beam.sdk.coders.ByteArrayCoder; -import org.apache.beam.sdk.coders.CoderException; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.BitSet; +import org.apache.beam.sdk.coders.AtomicCoder; +import org.apache.beam.sdk.coders.ByteArrayCoder; +import org.apache.beam.sdk.coders.CoderException; /** * Coder for the BitSet used to track child-trigger finished states. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BucketingFunction.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BucketingFunction.java index e14aec862a0ea..8740b22710772 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BucketingFunction.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BucketingFunction.java @@ -20,10 +20,9 @@ import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.transforms.Combine; - import java.util.HashMap; import java.util.Map; +import org.apache.beam.sdk.transforms.Combine; /** * Keep track of the minimum/maximum/sum of a set of timestamped long values. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BufferedElementCountingOutputStream.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BufferedElementCountingOutputStream.java index 6335de47378db..c17d92d41f8a8 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BufferedElementCountingOutputStream.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/BufferedElementCountingOutputStream.java @@ -17,13 +17,11 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.coders.Coder.Context; - import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; - import javax.annotation.concurrent.NotThreadSafe; +import org.apache.beam.sdk.coders.Coder.Context; /** * Provides an efficient encoding for {@link Iterable}s containing small values by diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CloudKnownType.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CloudKnownType.java index ad79b989d9239..b22b3c01286d8 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CloudKnownType.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CloudKnownType.java @@ -20,7 +20,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; - import javax.annotation.Nullable; /** A utility for manipulating well-known cloud types. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CloudObject.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CloudObject.java index 27722255e9164..9cab453bdf89e 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CloudObject.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CloudObject.java @@ -22,9 +22,7 @@ import com.google.api.client.json.GenericJson; import com.google.api.client.util.Key; - import java.util.Map; - import javax.annotation.Nullable; /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CoderUtils.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CoderUtils.java index 520ba4a9a6111..36bf78980e546 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CoderUtils.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CoderUtils.java @@ -19,15 +19,6 @@ import static org.apache.beam.sdk.util.Structs.addList; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.CoderException; -import org.apache.beam.sdk.coders.IterableCoder; -import org.apache.beam.sdk.coders.KvCoder; -import org.apache.beam.sdk.values.TypeDescriptor; - -import com.google.api.client.util.Base64; -import com.google.common.base.Throwables; - import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo.As; import com.fasterxml.jackson.annotation.JsonTypeInfo.Id; @@ -38,7 +29,8 @@ import com.fasterxml.jackson.databind.jsontype.impl.TypeIdResolverBase; import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.type.TypeFactory; - +import com.google.api.client.util.Base64; +import com.google.common.base.Throwables; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -47,6 +39,11 @@ import java.lang.ref.SoftReference; import java.lang.reflect.ParameterizedType; import java.lang.reflect.TypeVariable; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.coders.IterableCoder; +import org.apache.beam.sdk.coders.KvCoder; +import org.apache.beam.sdk.values.TypeDescriptor; /** * Utilities for working with Coders. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CombineFnUtil.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CombineFnUtil.java index 351a158c1edfa..a9a0178edf4e6 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CombineFnUtil.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CombineFnUtil.java @@ -17,6 +17,9 @@ */ package org.apache.beam.sdk.util; +import java.io.IOException; +import java.io.NotSerializableException; +import java.io.ObjectOutputStream; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderRegistry; @@ -30,10 +33,6 @@ import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.util.state.StateContext; -import java.io.IOException; -import java.io.NotSerializableException; -import java.io.ObjectOutputStream; - /** * Static utility methods that create combine function instances. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CredentialFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CredentialFactory.java index 3283c8e0c5288..ce29d9192aed8 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CredentialFactory.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/CredentialFactory.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.util; import com.google.api.client.auth.oauth2.Credential; - import java.io.IOException; import java.security.GeneralSecurityException; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Credentials.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Credentials.java index 41065cde41143..1e77f4dc324af 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Credentials.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Credentials.java @@ -19,8 +19,6 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.options.GcpOptions; - import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.extensions.java6.auth.oauth2.AbstractPromptReceiver; import com.google.api.client.extensions.java6.auth.oauth2.AuthorizationCodeInstalledApp; @@ -34,10 +32,6 @@ import com.google.api.client.json.JsonFactory; import com.google.api.client.json.jackson2.JacksonFactory; import com.google.api.client.util.store.FileDataStoreFactory; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.File; import java.io.FileReader; import java.io.IOException; @@ -45,6 +39,9 @@ import java.util.Arrays; import java.util.Collection; import java.util.List; +import org.apache.beam.sdk.options.GcpOptions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Provides support for loading credentials. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/DirectSideInputReader.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/DirectSideInputReader.java index c8d360c09a8f4..30e371656396c 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/DirectSideInputReader.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/DirectSideInputReader.java @@ -17,14 +17,13 @@ */ package org.apache.beam.sdk.util; +import com.google.common.base.Predicate; +import com.google.common.collect.Iterables; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.GlobalWindows; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; -import com.google.common.base.Predicate; -import com.google.common.collect.Iterables; - /** * Basic side input reader wrapping a {@link PTuple} of side input iterables. Encapsulates * conversion according to the {@link PCollectionView} and projection to a particular diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutableTrigger.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutableTrigger.java index bb71fccabddae..088c499daa0b4 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutableTrigger.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutableTrigger.java @@ -20,13 +20,12 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.Trigger; -import org.apache.beam.sdk.transforms.windowing.Trigger.OnceTrigger; - import java.io.Serializable; import java.util.ArrayList; import java.util.List; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.transforms.windowing.Trigger; +import org.apache.beam.sdk.transforms.windowing.Trigger.OnceTrigger; /** * A wrapper around a trigger used during execution. While an actual trigger may appear multiple diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutionContext.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutionContext.java index 1c2f554559e81..82d900c530374 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutionContext.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExecutionContext.java @@ -17,14 +17,13 @@ */ package org.apache.beam.sdk.util; +import java.io.IOException; +import java.util.Collection; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.util.state.StateInternals; import org.apache.beam.sdk.values.TupleTag; -import java.io.IOException; -import java.util.Collection; - /** * Context for the current execution. This is guaranteed to exist during processing, * but does not necessarily persist between different batches of work. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExposedByteArrayInputStream.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExposedByteArrayInputStream.java index eae3e40ad1a7b..020c45308f97d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExposedByteArrayInputStream.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExposedByteArrayInputStream.java @@ -17,11 +17,10 @@ */ package org.apache.beam.sdk.util; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.io.ByteArrayInputStream; import java.io.IOException; -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; - /** * {@link ByteArrayInputStream} that allows accessing the entire internal buffer without copying. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExposedByteArrayOutputStream.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExposedByteArrayOutputStream.java index 48006ea22f4d9..5a98f8419c3cd 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExposedByteArrayOutputStream.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ExposedByteArrayOutputStream.java @@ -17,11 +17,10 @@ */ package org.apache.beam.sdk.util; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.io.ByteArrayOutputStream; import java.io.IOException; -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; - /** * {@link ByteArrayOutputStream} special cased to treat writes of a single byte-array specially. * When calling {@link #toByteArray()} after writing only one {@code byte[]} using diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/FileIOChannelFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/FileIOChannelFactory.java index 92f351bb3af20..a11231bae2f1f 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/FileIOChannelFactory.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/FileIOChannelFactory.java @@ -20,10 +20,6 @@ import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.Iterables; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; @@ -44,6 +40,8 @@ import java.util.LinkedList; import java.util.List; import java.util.regex.Matcher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Implements {@link IOChannelFactory} for local files. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/FinishedTriggersSet.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/FinishedTriggersSet.java index 23aaa5467767a..a9feb7300a17c 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/FinishedTriggersSet.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/FinishedTriggersSet.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.util; import com.google.common.collect.Sets; - import java.util.Set; /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcpCredentialFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcpCredentialFactory.java index 98b82e2d39aa5..0497e750bbe16 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcpCredentialFactory.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcpCredentialFactory.java @@ -17,13 +17,11 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.options.GcpOptions; -import org.apache.beam.sdk.options.PipelineOptions; - import com.google.api.client.auth.oauth2.Credential; - import java.io.IOException; import java.security.GeneralSecurityException; +import org.apache.beam.sdk.options.GcpOptions; +import org.apache.beam.sdk.options.PipelineOptions; /** * Construct an oauth credential to be used by the SDK and the SDK workers. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcsIOChannelFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcsIOChannelFactory.java index 2122c6427bd0d..14090e31c38ed 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcsIOChannelFactory.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcsIOChannelFactory.java @@ -17,15 +17,14 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.options.GcsOptions; -import org.apache.beam.sdk.util.gcsfs.GcsPath; - import java.io.IOException; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import java.util.Collection; import java.util.LinkedList; import java.util.List; +import org.apache.beam.sdk.options.GcsOptions; +import org.apache.beam.sdk.util.gcsfs.GcsPath; /** * Implements IOChannelFactory for GCS. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcsPathValidator.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcsPathValidator.java index 87f9181090597..89363cec4cb23 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcsPathValidator.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcsPathValidator.java @@ -19,12 +19,11 @@ import static com.google.common.base.Preconditions.checkArgument; +import java.io.IOException; import org.apache.beam.sdk.options.GcsOptions; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.util.gcsfs.GcsPath; -import java.io.IOException; - /** * GCP implementation of {@link PathValidator}. Only GCS paths are allowed. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcsUtil.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcsUtil.java index 4e9ee6e62ec88..44a182eeb3d8f 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcsUtil.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/GcsUtil.java @@ -20,21 +20,14 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.sdk.options.DefaultValueFactory; -import org.apache.beam.sdk.options.GcsOptions; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.util.gcsfs.GcsPath; - import com.google.api.client.googleapis.batch.BatchRequest; import com.google.api.client.googleapis.batch.json.JsonBatchCallback; import com.google.api.client.googleapis.json.GoogleJsonError; import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.api.client.http.HttpHeaders; -import com.google.api.client.http.HttpRequestInitializer; import com.google.api.client.util.BackOff; import com.google.api.client.util.Sleeper; import com.google.api.services.storage.Storage; -import com.google.api.services.storage.StorageRequest; import com.google.api.services.storage.model.Objects; import com.google.api.services.storage.model.StorageObject; import com.google.cloud.hadoop.gcsio.GoogleCloudStorageReadChannel; @@ -47,10 +40,11 @@ import com.google.cloud.hadoop.util.RetryDeterminer; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import com.google.common.collect.Lists; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.util.concurrent.ListeningExecutorService; +import com.google.common.util.concurrent.MoreExecutors; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.channels.SeekableByteChannel; @@ -59,12 +53,21 @@ import java.util.Collections; import java.util.LinkedList; import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; - import javax.annotation.Nullable; -import javax.annotation.concurrent.NotThreadSafe; +import org.apache.beam.sdk.options.DefaultValueFactory; +import org.apache.beam.sdk.options.GcsOptions; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.util.gcsfs.GcsPath; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Provides operations on GCS. @@ -110,7 +113,11 @@ public GcsUtil create(PipelineOptions options) { /** * Maximum number of requests permitted in a GCS batch request. */ - private static final int MAX_REQUESTS_PER_BATCH = 1000; + private static final int MAX_REQUESTS_PER_BATCH = 100; + /** + * Maximum number of concurrent batches of requests executing on GCS. + */ + private static final int MAX_CONCURRENT_BATCHES = 256; ///////////////////////////////////////////////////////////////////////////// @@ -125,7 +132,6 @@ public GcsUtil create(PipelineOptions options) { // Exposed for testing. final ExecutorService executorService; - private final BatchHelper batchHelper; /** * Returns true if the given GCS pattern is supported otherwise fails with an * exception. @@ -145,8 +151,6 @@ private GcsUtil( this.storageClient = storageClient; this.uploadBufferSizeBytes = uploadBufferSizeBytes; this.executorService = executorService; - this.batchHelper = new BatchHelper( - storageClient.getRequestFactory().getInitializer(), storageClient, MAX_REQUESTS_PER_BATCH); } // Use this only for testing purposes. @@ -372,154 +376,123 @@ public boolean shouldRetry(IOException e) { } } + private static void executeBatches(List batches) throws IOException { + ListeningExecutorService executor = MoreExecutors.listeningDecorator( + MoreExecutors.getExitingExecutorService( + new ThreadPoolExecutor(MAX_CONCURRENT_BATCHES, MAX_CONCURRENT_BATCHES, + 0L, TimeUnit.MILLISECONDS, + new LinkedBlockingQueue()))); + + List> futures = new LinkedList<>(); + for (final BatchRequest batch : batches) { + futures.add(executor.submit(new Callable() { + public Void call() throws IOException { + batch.execute(); + return null; + } + })); + } + + try { + Futures.allAsList(futures).get(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new IOException("Interrupted while executing batch GCS request", e); + } catch (ExecutionException e) { + throw new IOException("Error executing batch GCS request", e); + } finally { + executor.shutdown(); + } + } + public void copy(List srcFilenames, List destFilenames) throws IOException { + executeBatches(makeCopyBatches(srcFilenames, destFilenames)); + } + + List makeCopyBatches(List srcFilenames, List destFilenames) + throws IOException { checkArgument( srcFilenames.size() == destFilenames.size(), "Number of source files %s must equal number of destination files %s", srcFilenames.size(), destFilenames.size()); + + List batches = new LinkedList<>(); + BatchRequest batch = storageClient.batch(); for (int i = 0; i < srcFilenames.size(); i++) { final GcsPath sourcePath = GcsPath.fromUri(srcFilenames.get(i)); final GcsPath destPath = GcsPath.fromUri(destFilenames.get(i)); - LOG.debug("Copying {} to {}", sourcePath, destPath); - Storage.Objects.Copy copyObject = storageClient.objects().copy(sourcePath.getBucket(), - sourcePath.getObject(), destPath.getBucket(), destPath.getObject(), null); - batchHelper.queue(copyObject, new JsonBatchCallback() { - @Override - public void onSuccess(StorageObject obj, HttpHeaders responseHeaders) { - LOG.debug("Successfully copied {} to {}", sourcePath, destPath); - } - - @Override - public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) throws IOException { - // Do nothing on item not found. - if (!errorExtractor.itemNotFound(e)) { - throw new IOException(e.toString()); - } - LOG.debug("{} does not exist.", sourcePath); - } - }); + enqueueCopy(sourcePath, destPath, batch); + if (batch.size() >= MAX_REQUESTS_PER_BATCH) { + batches.add(batch); + batch = storageClient.batch(); + } } - batchHelper.flush(); - } - - public void remove(Collection filenames) throws IOException { - for (String filename : filenames) { - final GcsPath path = GcsPath.fromUri(filename); - LOG.debug("Removing: " + path); - Storage.Objects.Delete deleteObject = - storageClient.objects().delete(path.getBucket(), path.getObject()); - batchHelper.queue(deleteObject, new JsonBatchCallback() { - @Override - public void onSuccess(Void obj, HttpHeaders responseHeaders) throws IOException { - LOG.debug("Successfully removed {}", path); - } - - @Override - public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) throws IOException { - // Do nothing on item not found. - if (!errorExtractor.itemNotFound(e)) { - throw new IOException(e.toString()); - } - LOG.debug("{} does not exist.", path); - } - }); + if (batch.size() > 0) { + batches.add(batch); } - batchHelper.flush(); + return batches; } - /** - * BatchHelper abstracts out the logic for the maximum requests per batch for GCS. - * - *

        Copy of - * https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/gcs/src/main/java/com/google/cloud/hadoop/gcsio/BatchHelper.java - * - *

        Copied to prevent Dataflow from depending on the Hadoop-related dependencies that are not - * used in Dataflow. Hadoop-related dependencies will be removed from the Google Cloud Storage - * Connector (https://cloud.google.com/hadoop/google-cloud-storage-connector) so that this project - * and others may use the connector without introducing unnecessary dependencies. - * - *

        This class is not thread-safe; create a new BatchHelper instance per single-threaded logical - * grouping of requests. - */ - @NotThreadSafe - private static class BatchHelper { - /** - * Callback that causes a single StorageRequest to be added to the BatchRequest. - */ - protected static interface QueueRequestCallback { - void enqueue() throws IOException; + List makeRemoveBatches(Collection filenames) throws IOException { + List batches = new LinkedList<>(); + for (List filesToDelete : + Lists.partition(Lists.newArrayList(filenames), MAX_REQUESTS_PER_BATCH)) { + BatchRequest batch = storageClient.batch(); + for (String file : filesToDelete) { + enqueueDelete(GcsPath.fromUri(file), batch); + } + batches.add(batch); } + return batches; + } - private final List pendingBatchEntries; - private final BatchRequest batch; - - // Number of requests that can be queued into a single actual HTTP request - // before a sub-batch is sent. - private final long maxRequestsPerBatch; - - // Flag that indicates whether there is an in-progress flush. - private boolean flushing = false; + public void remove(Collection filenames) throws IOException { + executeBatches(makeRemoveBatches(filenames)); + } - /** - * Primary constructor, generally accessed only via the inner Factory class. - */ - public BatchHelper( - HttpRequestInitializer requestInitializer, Storage gcs, long maxRequestsPerBatch) { - this.pendingBatchEntries = new LinkedList<>(); - this.batch = gcs.batch(requestInitializer); - this.maxRequestsPerBatch = maxRequestsPerBatch; - } + private void enqueueCopy(final GcsPath from, final GcsPath to, BatchRequest batch) + throws IOException { + Storage.Objects.Copy copyRequest = storageClient.objects() + .copy(from.getBucket(), from.getObject(), to.getBucket(), to.getObject(), null); + copyRequest.queue(batch, new JsonBatchCallback() { + @Override + public void onSuccess(StorageObject obj, HttpHeaders responseHeaders) { + LOG.debug("Successfully copied {} to {}", from, to); + } - /** - * Adds an additional request to the batch, and possibly flushes the current contents of the - * batch if {@code maxRequestsPerBatch} has been reached. - */ - public void queue(final StorageRequest req, final JsonBatchCallback callback) - throws IOException { - QueueRequestCallback queueCallback = new QueueRequestCallback() { - @Override - public void enqueue() throws IOException { - req.queue(batch, callback); + @Override + public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) throws IOException { + if (errorExtractor.itemNotFound(e)) { + // Do nothing on item not found. + LOG.debug("{} does not exist, assuming this is a retry after deletion.", from); + return; } - }; - pendingBatchEntries.add(queueCallback); - - flushIfPossibleAndRequired(); - } - - // Flush our buffer if we have more pending entries than maxRequestsPerBatch - private void flushIfPossibleAndRequired() throws IOException { - if (pendingBatchEntries.size() > maxRequestsPerBatch) { - flushIfPossible(); + throw new IOException( + String.format("Error trying to copy %s to %s: %s", from, to, e)); } - } + }); + } - // Flush our buffer if we are not already in a flush operation and we have data to flush. - private void flushIfPossible() throws IOException { - if (!flushing && pendingBatchEntries.size() > 0) { - flushing = true; - try { - while (batch.size() < maxRequestsPerBatch && pendingBatchEntries.size() > 0) { - QueueRequestCallback head = pendingBatchEntries.remove(0); - head.enqueue(); - } + private void enqueueDelete(final GcsPath file, BatchRequest batch) throws IOException { + Storage.Objects.Delete deleteRequest = storageClient.objects() + .delete(file.getBucket(), file.getObject()); + deleteRequest.queue(batch, new JsonBatchCallback() { + @Override + public void onSuccess(Void obj, HttpHeaders responseHeaders) { + LOG.debug("Successfully deleted {}", file); + } - batch.execute(); - } finally { - flushing = false; + @Override + public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) throws IOException { + if (errorExtractor.itemNotFound(e)) { + // Do nothing on item not found. + LOG.debug("{} does not exist.", file); + return; } + throw new IOException(String.format("Error trying to delete %s: %s", file, e)); } - } - - - /** - * Sends any currently remaining requests in the batch; should be called at the end of any - * series of batched requests to ensure everything has been sent. - */ - public void flush() throws IOException { - flushIfPossible(); - } + }); } /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/IOChannelUtils.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/IOChannelUtils.java index 913824d821a89..16a6e952f6c49 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/IOChannelUtils.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/IOChannelUtils.java @@ -17,9 +17,6 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.options.GcsOptions; -import org.apache.beam.sdk.options.PipelineOptions; - import java.io.FileNotFoundException; import java.io.IOException; import java.nio.channels.WritableByteChannel; @@ -32,6 +29,8 @@ import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.beam.sdk.options.GcsOptions; +import org.apache.beam.sdk.options.PipelineOptions; /** * Provides utilities for creating read and write channels. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/IdentityWindowFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/IdentityWindowFn.java index a3477e9f4f120..8ca1bfd77b5b7 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/IdentityWindowFn.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/IdentityWindowFn.java @@ -17,6 +17,8 @@ */ package org.apache.beam.sdk.util; +import java.util.Collection; +import java.util.Collections; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.GroupByKey; @@ -26,12 +28,8 @@ import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.transforms.windowing.WindowFn; import org.apache.beam.sdk.values.PCollection; - import org.joda.time.Instant; -import java.util.Collection; -import java.util.Collections; - /** * A {@link WindowFn} that leaves all associations between elements and windows unchanged. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/InstanceBuilder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/InstanceBuilder.java index 08e07ce4e6407..e9e542c0853ce 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/InstanceBuilder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/InstanceBuilder.java @@ -20,18 +20,15 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.values.TypeDescriptor; - import com.google.common.base.Joiner; - import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.LinkedList; import java.util.List; - import javax.annotation.Nullable; +import org.apache.beam.sdk.values.TypeDescriptor; /** * Utility for creating objects dynamically. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/KeyedWorkItemCoder.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/KeyedWorkItemCoder.java index ec5d821382695..a6e3d6c10b0d6 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/KeyedWorkItemCoder.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/KeyedWorkItemCoder.java @@ -19,6 +19,13 @@ import static com.google.common.base.Preconditions.checkArgument; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.List; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.IterableCoder; @@ -28,16 +35,6 @@ import org.apache.beam.sdk.util.TimerInternals.TimerDataCoder; import org.apache.beam.sdk.util.WindowedValue.FullWindowedValueCoder; -import com.google.common.collect.ImmutableList; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.List; - /** * A {@link Coder} for {@link KeyedWorkItem KeyedWorkItems}. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/KeyedWorkItems.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/KeyedWorkItems.java index 11d84d083ba17..74348428331f9 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/KeyedWorkItems.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/KeyedWorkItems.java @@ -17,13 +17,11 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.util.TimerInternals.TimerData; - import com.google.common.base.MoreObjects; import com.google.common.collect.Iterables; - import java.util.Collections; import java.util.Objects; +import org.apache.beam.sdk.util.TimerInternals.TimerData; /** * Static utility methods that provide {@link KeyedWorkItem} implementations. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MergingActiveWindowSet.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MergingActiveWindowSet.java index 4702fe20460b0..6ad63b0f1a0eb 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MergingActiveWindowSet.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MergingActiveWindowSet.java @@ -20,20 +20,9 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.coders.MapCoder; -import org.apache.beam.sdk.coders.SetCoder; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.WindowFn; -import org.apache.beam.sdk.util.state.StateInternals; -import org.apache.beam.sdk.util.state.StateNamespaces; -import org.apache.beam.sdk.util.state.StateTag; -import org.apache.beam.sdk.util.state.StateTags; -import org.apache.beam.sdk.util.state.ValueState; - import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; - import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -44,8 +33,16 @@ import java.util.Map; import java.util.Objects; import java.util.Set; - import javax.annotation.Nullable; +import org.apache.beam.sdk.coders.MapCoder; +import org.apache.beam.sdk.coders.SetCoder; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.transforms.windowing.WindowFn; +import org.apache.beam.sdk.util.state.StateInternals; +import org.apache.beam.sdk.util.state.StateNamespaces; +import org.apache.beam.sdk.util.state.StateTag; +import org.apache.beam.sdk.util.state.StateTags; +import org.apache.beam.sdk.util.state.ValueState; /** * An {@link ActiveWindowSet} for merging {@link WindowFn} implementations. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MovingFunction.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MovingFunction.java index eb0a91af6f066..7b30d346024ef 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MovingFunction.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MovingFunction.java @@ -20,9 +20,8 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.transforms.Combine; - import java.util.Arrays; +import org.apache.beam.sdk.transforms.Combine; /** * Keep track of the moving minimum/maximum/sum of sampled long values. The minimum/maximum/sum diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MutationDetectors.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MutationDetectors.java index ee84c45957c2f..3b593bf0e9448 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MutationDetectors.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/MutationDetectors.java @@ -17,11 +17,10 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.CoderException; - import java.util.Arrays; import java.util.Objects; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; /** * Static methods for creating and working with {@link MutationDetector}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/NonMergingActiveWindowSet.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/NonMergingActiveWindowSet.java index 15a4ebe0f25e1..99d591bb00b00 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/NonMergingActiveWindowSet.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/NonMergingActiveWindowSet.java @@ -17,14 +17,12 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.WindowFn; - import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableSet; - import java.util.Collection; import java.util.Set; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.transforms.windowing.WindowFn; /** * Implementation of {@link ActiveWindowSet} used with {@link WindowFn WindowFns} that don't diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/NoopCredentialFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/NoopCredentialFactory.java index 09f1f9000f0b8..5d9255286a777 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/NoopCredentialFactory.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/NoopCredentialFactory.java @@ -17,12 +17,10 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.options.PipelineOptions; - import com.google.api.client.auth.oauth2.Credential; - import java.io.IOException; import java.security.GeneralSecurityException; +import org.apache.beam.sdk.options.PipelineOptions; /** * Construct an oauth credential to be used by the SDK and the SDK workers. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/NullSideInputReader.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/NullSideInputReader.java index 99319ed142342..3da2dc17b401b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/NullSideInputReader.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/NullSideInputReader.java @@ -17,13 +17,11 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.values.PCollectionView; - import com.google.common.collect.Sets; - import java.util.Collections; import java.util.Set; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.values.PCollectionView; /** * A {@link SideInputReader} representing a well-defined set of views, but not storing diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PCollectionViewWindow.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PCollectionViewWindow.java index a7311e3767bc0..410c8cea68322 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PCollectionViewWindow.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PCollectionViewWindow.java @@ -17,11 +17,10 @@ */ package org.apache.beam.sdk.util; +import java.util.Objects; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.values.PCollectionView; -import java.util.Objects; - /** * A pair of a {@link PCollectionView} and a {@link BoundedWindow}, which can * be thought of as window "of" the view. This is a value class for use e.g. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PCollectionViews.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PCollectionViews.java index 581a98a1a745e..14ae5c8dfcfc9 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PCollectionViews.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PCollectionViews.java @@ -17,25 +17,12 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.Pipeline; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.IterableCoder; -import org.apache.beam.sdk.transforms.ViewFn; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.InvalidWindows; -import org.apache.beam.sdk.values.KV; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PCollectionView; -import org.apache.beam.sdk.values.PValueBase; -import org.apache.beam.sdk.values.TupleTag; - import com.google.common.base.Function; import com.google.common.base.MoreObjects; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Multimap; - import java.io.IOException; import java.util.Collections; import java.util.HashMap; @@ -43,8 +30,18 @@ import java.util.Map; import java.util.NoSuchElementException; import java.util.Objects; - import javax.annotation.Nullable; +import org.apache.beam.sdk.Pipeline; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.IterableCoder; +import org.apache.beam.sdk.transforms.ViewFn; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.transforms.windowing.InvalidWindows; +import org.apache.beam.sdk.values.KV; +import org.apache.beam.sdk.values.PCollection; +import org.apache.beam.sdk.values.PCollectionView; +import org.apache.beam.sdk.values.PValueBase; +import org.apache.beam.sdk.values.TupleTag; /** * Implementations of {@link PCollectionView} shared across the SDK. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PTuple.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PTuple.java index eea7bcdc0f471..d03803fd6d490 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PTuple.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PTuple.java @@ -17,11 +17,10 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.values.TupleTag; - import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; +import org.apache.beam.sdk.values.TupleTag; /** * A {@code PTuple} is an immutable tuple of diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunner.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunner.java index ae3d391bd8094..e8fa4f7e0c23f 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunner.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunner.java @@ -17,14 +17,13 @@ */ package org.apache.beam.sdk.util; +import java.io.Serializable; +import java.util.Collection; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.CombineFnBase.PerKeyCombineFn; import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import java.io.Serializable; -import java.util.Collection; - /** * An interface that runs a {@link PerKeyCombineFn} with unified APIs. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunners.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunners.java index 87870a83f3b41..c537eb36a4594 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunners.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PerKeyCombineFnRunners.java @@ -17,6 +17,8 @@ */ package org.apache.beam.sdk.util; +import com.google.common.collect.Iterables; +import java.util.Collection; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.Combine.KeyedCombineFn; import org.apache.beam.sdk.transforms.CombineFnBase.PerKeyCombineFn; @@ -26,10 +28,6 @@ import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import com.google.common.collect.Iterables; - -import java.util.Collection; - /** * Static utility methods that provide {@link PerKeyCombineFnRunner} implementations * for different keyed combine functions. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubClient.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubClient.java index 9e6c7d2249837..fdcee16b69add 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubClient.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubClient.java @@ -21,12 +21,9 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.options.PubsubOptions; - import com.google.api.client.util.DateTime; import com.google.common.base.Objects; import com.google.common.base.Strings; - import java.io.Closeable; import java.io.IOException; import java.io.Serializable; @@ -34,8 +31,8 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ThreadLocalRandom; - import javax.annotation.Nullable; +import org.apache.beam.sdk.options.PubsubOptions; /** * An (abstract) helper class for talking to Pubsub via an underlying transport. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubGrpcClient.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubGrpcClient.java index ac157fb803098..988b90fec63cb 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubGrpcClient.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubGrpcClient.java @@ -20,9 +20,6 @@ import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.options.GcpOptions; -import org.apache.beam.sdk.options.PubsubOptions; - import com.google.auth.oauth2.GoogleCredentials; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Strings; @@ -50,7 +47,6 @@ import com.google.pubsub.v1.SubscriberGrpc.SubscriberBlockingStub; import com.google.pubsub.v1.Subscription; import com.google.pubsub.v1.Topic; - import io.grpc.Channel; import io.grpc.ClientInterceptors; import io.grpc.ManagedChannel; @@ -58,7 +54,6 @@ import io.grpc.netty.GrpcSslContexts; import io.grpc.netty.NegotiationType; import io.grpc.netty.NettyChannelBuilder; - import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -66,8 +61,9 @@ import java.util.Map; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; - import javax.annotation.Nullable; +import org.apache.beam.sdk.options.GcpOptions; +import org.apache.beam.sdk.options.PubsubOptions; /** * A helper class for talking to Pubsub via grpc. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubJsonClient.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubJsonClient.java index 69c5128df8e07..bdb5c04f8cbf9 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubJsonClient.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubJsonClient.java @@ -20,8 +20,6 @@ import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.options.PubsubOptions; - import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.http.HttpRequestInitializer; import com.google.api.services.pubsub.Pubsub; @@ -42,14 +40,13 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; - import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; - import javax.annotation.Nullable; +import org.apache.beam.sdk.options.PubsubOptions; /** * A Pubsub client using JSON transport. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubTestClient.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubTestClient.java index 88ae6cc8cf101..6e5ba46a7f0cc 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubTestClient.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubTestClient.java @@ -20,12 +20,9 @@ import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.options.PubsubOptions; - import com.google.api.client.util.Clock; import com.google.common.collect.Lists; import com.google.common.collect.Sets; - import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; @@ -34,8 +31,8 @@ import java.util.List; import java.util.Map; import java.util.Set; - import javax.annotation.Nullable; +import org.apache.beam.sdk.options.PubsubOptions; /** * A (partial) implementation of {@link PubsubClient} for use by unit tests. Only suitable for diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReleaseInfo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReleaseInfo.java index ad03c79e01c82..2c0ae4099c804 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReleaseInfo.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReleaseInfo.java @@ -19,13 +19,11 @@ import com.google.api.client.json.GenericJson; import com.google.api.client.util.Key; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.IOException; import java.io.InputStream; import java.util.Properties; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utilities for working with release information. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Reshuffle.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Reshuffle.java index ad33a252e0a73..ebd44bf4ab03e 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Reshuffle.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Reshuffle.java @@ -25,7 +25,6 @@ import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.joda.time.Duration; /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReshuffleTrigger.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReshuffleTrigger.java index f529ce160e2f4..9e2c27d1bc7f1 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReshuffleTrigger.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReshuffleTrigger.java @@ -17,13 +17,11 @@ */ package org.apache.beam.sdk.util; +import java.util.List; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.Trigger; - import org.joda.time.Instant; -import java.util.List; - /** * The trigger used with {@link Reshuffle} which triggers on every element * and never buffers state. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/RetryHttpRequestInitializer.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/RetryHttpRequestInitializer.java index 3857435b08f9f..fa6e9136cd280 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/RetryHttpRequestInitializer.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/RetryHttpRequestInitializer.java @@ -28,18 +28,15 @@ import com.google.api.client.util.ExponentialBackOff; import com.google.api.client.util.NanoClock; import com.google.api.client.util.Sleeper; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Set; - import javax.annotation.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Implements a request initializer that adds retry handlers to all diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SerializableUtils.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SerializableUtils.java index 1e70aaf021b70..354aa5d91182d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SerializableUtils.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SerializableUtils.java @@ -17,17 +17,10 @@ */ package org.apache.beam.sdk.util; +import static com.google.common.base.Preconditions.checkState; import static org.apache.beam.sdk.util.CoderUtils.decodeFromByteArray; import static org.apache.beam.sdk.util.CoderUtils.encodeToByteArray; -import static com.google.common.base.Preconditions.checkState; - -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.CoderException; - -import org.xerial.snappy.SnappyInputStream; -import org.xerial.snappy.SnappyOutputStream; - import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -35,6 +28,10 @@ import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.Arrays; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; +import org.xerial.snappy.SnappyInputStream; +import org.xerial.snappy.SnappyOutputStream; /** * Utilities for working with Serializables. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Serializer.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Serializer.java index 738577d17340b..86a3b8ecc9bf6 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Serializer.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Serializer.java @@ -20,12 +20,10 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; - import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; - import javax.annotation.Nullable; /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SideInputReader.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SideInputReader.java index e99eb57d9a38e..e81c704cf1967 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SideInputReader.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SideInputReader.java @@ -17,11 +17,10 @@ */ package org.apache.beam.sdk.util; +import javax.annotation.Nullable; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.values.PCollectionView; -import javax.annotation.Nullable; - /** * The interface to objects that provide side inputs. Particular implementations * may read a side input directly or use appropriate sorts of caching, etc. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/StringUtils.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/StringUtils.java index bb59373571578..4f81eef4fc7c4 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/StringUtils.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/StringUtils.java @@ -20,14 +20,12 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.sdk.transforms.PTransform; - import com.google.common.base.Joiner; - import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.beam.sdk.transforms.PTransform; /** * Utilities for working with JSON and other human-readable string formats. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Structs.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Structs.java index d6b704bf902e8..d50b74a0526f0 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Structs.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Structs.java @@ -18,13 +18,11 @@ package org.apache.beam.sdk.util; import com.google.api.client.util.Data; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; - import javax.annotation.Nullable; /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SystemDoFnInternal.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SystemDoFnInternal.java index b8a5cd4eb51d3..e9904b2d7660a 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SystemDoFnInternal.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/SystemDoFnInternal.java @@ -17,13 +17,12 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.transforms.OldDoFn; - import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import org.apache.beam.sdk.transforms.OldDoFn; /** * Annotation to mark {@link OldDoFn DoFns} as an internal component of the Dataflow SDK. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TestCredential.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TestCredential.java index 554ac1b8cb5f2..4b81a0ef54f21 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TestCredential.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TestCredential.java @@ -21,7 +21,6 @@ import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.auth.oauth2.TokenResponse; import com.google.api.client.testing.http.MockHttpTransport; - import java.io.IOException; /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimerInternals.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimerInternals.java index eb49b9dd6facd..dd3b773312347 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimerInternals.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TimerInternals.java @@ -20,30 +20,26 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.CoderException; -import org.apache.beam.sdk.coders.InstantCoder; -import org.apache.beam.sdk.coders.StandardCoder; -import org.apache.beam.sdk.coders.StringUtf8Coder; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.util.state.StateNamespace; -import org.apache.beam.sdk.util.state.StateNamespaces; - -import com.google.common.base.MoreObjects; -import com.google.common.collect.ComparisonChain; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import org.joda.time.Instant; - +import com.google.common.base.MoreObjects; +import com.google.common.collect.ComparisonChain; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Arrays; import java.util.List; import java.util.Objects; - import javax.annotation.Nullable; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.coders.InstantCoder; +import org.apache.beam.sdk.coders.StandardCoder; +import org.apache.beam.sdk.coders.StringUtf8Coder; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.util.state.StateNamespace; +import org.apache.beam.sdk.util.state.StateNamespaces; +import org.joda.time.Instant; /** * Encapsulate interaction with time within the execution environment. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Timers.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Timers.java index e8404bda26f58..beb80ec3d51e5 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Timers.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Timers.java @@ -17,12 +17,10 @@ */ package org.apache.beam.sdk.util; +import javax.annotation.Nullable; import org.apache.beam.sdk.annotations.Experimental; - import org.joda.time.Instant; -import javax.annotation.Nullable; - /** * Interface for interacting with time. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Transport.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Transport.java index 2c68dfcd3b7a7..d824207a72339 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Transport.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Transport.java @@ -17,10 +17,6 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.options.BigQueryOptions; -import org.apache.beam.sdk.options.GcsOptions; -import org.apache.beam.sdk.options.PubsubOptions; - import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; import com.google.api.client.http.HttpRequestInitializer; @@ -32,11 +28,13 @@ import com.google.api.services.storage.Storage; import com.google.cloud.hadoop.util.ChainingHttpRequestInitializer; import com.google.common.collect.ImmutableList; - import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.security.GeneralSecurityException; +import org.apache.beam.sdk.options.BigQueryOptions; +import org.apache.beam.sdk.options.GcsOptions; +import org.apache.beam.sdk.options.PubsubOptions; /** * Helpers for cloud communication. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TriggerContextFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TriggerContextFactory.java index 03f1baa8b4939..e09aac2eee751 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TriggerContextFactory.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/TriggerContextFactory.java @@ -17,6 +17,13 @@ */ package org.apache.beam.sdk.util; +import com.google.common.base.Predicate; +import com.google.common.collect.FluentIterable; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import java.util.Collection; +import java.util.Map; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.Trigger; @@ -30,19 +37,8 @@ import org.apache.beam.sdk.util.state.StateNamespace; import org.apache.beam.sdk.util.state.StateNamespaces; import org.apache.beam.sdk.util.state.StateTag; - -import com.google.common.base.Predicate; -import com.google.common.collect.FluentIterable; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Iterables; - import org.joda.time.Instant; -import java.util.Collection; -import java.util.Map; - -import javax.annotation.Nullable; - /** * Factory for creating instances of the various {@link Trigger} contexts. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/UnownedInputStream.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/UnownedInputStream.java index 58c6617e818ac..e7e7bbd20359d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/UnownedInputStream.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/UnownedInputStream.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.util; import com.google.common.base.MoreObjects; - import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/UnownedOutputStream.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/UnownedOutputStream.java index c2f30c6957945..3c9b3bc91c6d4 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/UnownedOutputStream.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/UnownedOutputStream.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.util; import com.google.common.base.MoreObjects; - import java.io.FilterOutputStream; import java.io.IOException; import java.io.OutputStream; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/UploadIdResponseInterceptor.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/UploadIdResponseInterceptor.java index caa0759fddbb0..f685b6974441f 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/UploadIdResponseInterceptor.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/UploadIdResponseInterceptor.java @@ -20,12 +20,10 @@ import com.google.api.client.http.GenericUrl; import com.google.api.client.http.HttpResponse; import com.google.api.client.http.HttpResponseInterceptor; - +import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; - /** * Implements a response intercepter that logs the upload id if the upload * id header exists and it is the first request (does not have upload_id parameter in the request). diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ValueWithRecordId.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ValueWithRecordId.java index f0e48124425e2..d6b5fe3b4b382 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ValueWithRecordId.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ValueWithRecordId.java @@ -19,22 +19,19 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.coders.ByteArrayCoder; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.StandardCoder; -import org.apache.beam.sdk.transforms.OldDoFn; - -import com.google.common.base.MoreObjects; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; - +import com.google.common.base.MoreObjects; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Arrays; import java.util.List; import java.util.Objects; +import org.apache.beam.sdk.coders.ByteArrayCoder; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.StandardCoder; +import org.apache.beam.sdk.transforms.OldDoFn; /** * Immutable struct containing a value as well as a unique id identifying the value. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Values.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Values.java index 71110e1cbe2e2..d8aa046e07901 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Values.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/Values.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.util; import java.util.Map; - import javax.annotation.Nullable; /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowedValue.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowedValue.java index 9d341a1d611e9..a0b4cf5260962 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowedValue.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowedValue.java @@ -17,29 +17,14 @@ */ package org.apache.beam.sdk.util; -import static org.apache.beam.sdk.util.Structs.addBoolean; - import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; - -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.CoderException; -import org.apache.beam.sdk.coders.CollectionCoder; -import org.apache.beam.sdk.coders.InstantCoder; -import org.apache.beam.sdk.coders.StandardCoder; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.transforms.windowing.GlobalWindow; -import org.apache.beam.sdk.transforms.windowing.PaneInfo; -import org.apache.beam.sdk.transforms.windowing.PaneInfo.PaneInfoCoder; -import org.apache.beam.sdk.util.common.ElementByteSizeObserver; - -import com.google.common.base.MoreObjects; -import com.google.common.collect.ImmutableList; +import static org.apache.beam.sdk.util.Structs.addBoolean; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import org.joda.time.Instant; - +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -50,6 +35,17 @@ import java.util.List; import java.util.Objects; import java.util.Set; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.coders.CollectionCoder; +import org.apache.beam.sdk.coders.InstantCoder; +import org.apache.beam.sdk.coders.StandardCoder; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.transforms.windowing.GlobalWindow; +import org.apache.beam.sdk.transforms.windowing.PaneInfo; +import org.apache.beam.sdk.transforms.windowing.PaneInfo.PaneInfoCoder; +import org.apache.beam.sdk.util.common.ElementByteSizeObserver; +import org.joda.time.Instant; /** * An immutable triple of value, timestamp, and windows. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingInternals.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingInternals.java index 3a1b6543d513d..54158d24e5208 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingInternals.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingInternals.java @@ -17,18 +17,16 @@ */ package org.apache.beam.sdk.util; +import java.io.IOException; +import java.util.Collection; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.util.state.StateInternals; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; - import org.joda.time.Instant; -import java.io.IOException; -import java.util.Collection; - /** * Interface that may be required by some (internal) {@code OldDoFn}s to implement windowing. It * should not be necessary for general user code to interact with this at all. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingStrategy.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingStrategy.java index f5ae812636204..b332ed7c8e0ce 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingStrategy.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/WindowingStrategy.java @@ -17,6 +17,10 @@ */ package org.apache.beam.sdk.util; +import com.google.common.base.MoreObjects; +import java.io.Serializable; +import java.util.Collections; +import java.util.Objects; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.DefaultTrigger; @@ -27,16 +31,9 @@ import org.apache.beam.sdk.transforms.windowing.Trigger; import org.apache.beam.sdk.transforms.windowing.Window.ClosingBehavior; import org.apache.beam.sdk.transforms.windowing.WindowFn; - -import com.google.common.base.MoreObjects; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.io.Serializable; -import java.util.Collections; -import java.util.Objects; - /** * A {@code WindowingStrategy} describes the windowing behavior for a specific collection of values. * It has both a {@link WindowFn} describing how elements are assigned to windows and a diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ZipFiles.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ZipFiles.java index c93e18d7706d8..c17de3722e7ce 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ZipFiles.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ZipFiles.java @@ -26,7 +26,7 @@ import com.google.common.io.CharSource; import com.google.common.io.Closer; import com.google.common.io.Files; - +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; @@ -40,8 +40,6 @@ import java.util.zip.ZipFile; import java.util.zip.ZipOutputStream; -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; - /** * Functions for zipping a directory (including a subdirectory) into a ZIP-file * or unzipping it again. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ReflectHelpers.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ReflectHelpers.java index 6db532e5811f2..2034ebaef6c84 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ReflectHelpers.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/common/ReflectHelpers.java @@ -19,7 +19,6 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; - import static java.util.Arrays.asList; import com.google.common.base.Function; @@ -27,7 +26,6 @@ import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Queues; - import java.lang.reflect.GenericArrayType; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; @@ -38,7 +36,6 @@ import java.util.Collections; import java.util.LinkedHashSet; import java.util.Queue; - import javax.annotation.Nonnull; import javax.annotation.Nullable; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/gcsfs/GcsPath.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/gcsfs/GcsPath.java index e629d643715aa..bfcd6da681410 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/gcsfs/GcsPath.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/gcsfs/GcsPath.java @@ -21,7 +21,6 @@ import static com.google.common.base.Strings.isNullOrEmpty; import com.google.api.services.storage.model.StorageObject; - import java.io.File; import java.io.IOException; import java.net.URI; @@ -35,7 +34,6 @@ import java.util.Iterator; import java.util.regex.Matcher; import java.util.regex.Pattern; - import javax.annotation.Nonnull; import javax.annotation.Nullable; diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/CopyOnAccessInMemoryStateInternals.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/CopyOnAccessInMemoryStateInternals.java index 3cc34a694077f..da9aa3ef5ebd1 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/CopyOnAccessInMemoryStateInternals.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/CopyOnAccessInMemoryStateInternals.java @@ -19,6 +19,12 @@ import static com.google.common.base.Preconditions.checkState; +import com.google.common.base.Optional; +import com.google.common.collect.Iterables; +import java.util.Collection; +import java.util.HashSet; +import java.util.Map; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.Combine.CombineFn; import org.apache.beam.sdk.transforms.Combine.KeyedCombineFn; @@ -28,18 +34,8 @@ import org.apache.beam.sdk.util.CombineFnUtil; import org.apache.beam.sdk.util.state.InMemoryStateInternals.InMemoryState; import org.apache.beam.sdk.util.state.StateTag.StateBinder; - -import com.google.common.base.Optional; -import com.google.common.collect.Iterables; - import org.joda.time.Instant; -import java.util.Collection; -import java.util.HashSet; -import java.util.Map; - -import javax.annotation.Nullable; - /** * {@link StateInternals} built on top of an underlying {@link StateTable} that contains instances * of {@link InMemoryState}. Whenever state that exists in the underlying {@link StateTable} is diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/InMemoryStateInternals.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/InMemoryStateInternals.java index 1d5d4325dad27..efb270cfa9ec5 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/InMemoryStateInternals.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/InMemoryStateInternals.java @@ -17,6 +17,11 @@ */ package org.apache.beam.sdk.util.state; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import javax.annotation.Nullable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.annotations.Experimental.Kind; import org.apache.beam.sdk.coders.Coder; @@ -27,16 +32,8 @@ import org.apache.beam.sdk.transforms.windowing.OutputTimeFn; import org.apache.beam.sdk.util.CombineFnUtil; import org.apache.beam.sdk.util.state.StateTag.StateBinder; - import org.joda.time.Instant; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -import javax.annotation.Nullable; - /** * In-memory implementation of {@link StateInternals}. Used in {@code BatchModeExecutionContext} * and for running tests that need state. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/MergingStateAccessor.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/MergingStateAccessor.java index 26af7a324dcc2..fb877cad78f9d 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/MergingStateAccessor.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/MergingStateAccessor.java @@ -17,12 +17,11 @@ */ package org.apache.beam.sdk.util.state; +import java.util.Map; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.annotations.Experimental.Kind; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import java.util.Map; - /** * Interface for accessing persistent state while windows are merging. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateContexts.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateContexts.java index 77bb4eb9c4a6d..d0c566d7accf5 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateContexts.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateContexts.java @@ -17,13 +17,12 @@ */ package org.apache.beam.sdk.util.state; +import javax.annotation.Nullable; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.util.WindowingInternals; import org.apache.beam.sdk.values.PCollectionView; -import javax.annotation.Nullable; - /** * Factory that produces {@link StateContext} based on different inputs. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateInternalsFactory.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateInternalsFactory.java index 05c3b77c13847..019b69d5fd29a 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateInternalsFactory.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateInternalsFactory.java @@ -17,11 +17,10 @@ */ package org.apache.beam.sdk.util.state; +import java.io.Serializable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.annotations.Experimental.Kind; -import java.io.Serializable; - /** * A factory for providing {@link StateInternals} for a particular key. * diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateMerging.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateMerging.java index c41a23e7115b9..12baff9312c0e 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateMerging.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateMerging.java @@ -19,16 +19,13 @@ import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; - -import org.joda.time.Instant; - +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.joda.time.Instant; /** * Helpers for merging state. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateNamespaces.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateNamespaces.java index bfee9765b6610..75f555cfe73a5 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateNamespaces.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateNamespaces.java @@ -17,16 +17,14 @@ */ package org.apache.beam.sdk.util.state; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.CoderException; -import org.apache.beam.sdk.transforms.windowing.BoundedWindow; -import org.apache.beam.sdk.util.CoderUtils; - import com.google.common.base.Splitter; - import java.io.IOException; import java.util.List; import java.util.Objects; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.transforms.windowing.BoundedWindow; +import org.apache.beam.sdk.util.CoderUtils; /** * Factory methods for creating the {@link StateNamespace StateNamespaces}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTable.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTable.java index 2ae651679d9eb..dcd2a7eb00db5 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTable.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTable.java @@ -17,13 +17,11 @@ */ package org.apache.beam.sdk.util.state; -import org.apache.beam.sdk.util.state.StateTag.StateBinder; - import com.google.common.collect.HashBasedTable; import com.google.common.collect.Table; - import java.util.Map; import java.util.Set; +import org.apache.beam.sdk.util.state.StateTag.StateBinder; /** * Table mapping {@code StateNamespace} and {@code StateTag} to a {@code State} instance. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTag.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTag.java index 388b5e1cc7f90..94cba2f5a109b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTag.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTag.java @@ -17,6 +17,8 @@ */ package org.apache.beam.sdk.util.state; +import java.io.IOException; +import java.io.Serializable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.annotations.Experimental.Kind; import org.apache.beam.sdk.coders.Coder; @@ -27,9 +29,6 @@ import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.OutputTimeFn; -import java.io.IOException; -import java.io.Serializable; - /** * An address for persistent state. This includes a unique identifier for the location, the * information necessary to encode the value, and details about the intended access pattern. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTags.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTags.java index e50ad8d00f9d0..b0797b6fac208 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTags.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/StateTags.java @@ -17,6 +17,10 @@ */ package org.apache.beam.sdk.util.state; +import com.google.common.base.MoreObjects; +import java.io.IOException; +import java.io.Serializable; +import java.util.Objects; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.annotations.Experimental.Kind; import org.apache.beam.sdk.coders.CannotProvideCoderException; @@ -28,12 +32,6 @@ import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.OutputTimeFn; -import com.google.common.base.MoreObjects; - -import java.io.IOException; -import java.io.Serializable; -import java.util.Objects; - /** * Static utility methods for creating {@link StateTag} instances. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/WatermarkHoldState.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/WatermarkHoldState.java index f22e9e06f6ca2..415cc6e143e5a 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/WatermarkHoldState.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/state/WatermarkHoldState.java @@ -21,7 +21,6 @@ import org.apache.beam.sdk.annotations.Experimental.Kind; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.OutputTimeFn; - import org.joda.time.Instant; /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/KV.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/KV.java index 1806178736375..dec9a16709e0a 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/KV.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/KV.java @@ -17,17 +17,15 @@ */ package org.apache.beam.sdk.values; -import org.apache.beam.sdk.transforms.Combine; -import org.apache.beam.sdk.transforms.GroupByKey; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.SerializableComparator; - import com.google.common.base.MoreObjects; - import java.io.Serializable; import java.util.Arrays; import java.util.Comparator; import java.util.Objects; +import org.apache.beam.sdk.transforms.Combine; +import org.apache.beam.sdk.transforms.GroupByKey; +import org.apache.beam.sdk.transforms.PTransform; +import org.apache.beam.sdk.transforms.SerializableComparator; /** * An immutable key/value pair. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PBegin.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PBegin.java index e6a2d1f0b9911..f1dbb374a2c39 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PBegin.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PBegin.java @@ -17,14 +17,13 @@ */ package org.apache.beam.sdk.values; +import java.util.Collection; +import java.util.Collections; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.TextIO.Read; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.PTransform; -import java.util.Collection; -import java.util.Collections; - /** * {@link PBegin} is the "input" to a root {@link PTransform}, such as {@link Read Read} or * {@link Create}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionList.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionList.java index 01acca7097d21..4c9e220ea0ef3 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionList.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionList.java @@ -17,19 +17,17 @@ */ package org.apache.beam.sdk.values; -import org.apache.beam.sdk.Pipeline; -import org.apache.beam.sdk.transforms.AppliedPTransform; -import org.apache.beam.sdk.transforms.Flatten; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.Partition; - import com.google.common.collect.ImmutableList; - import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; +import org.apache.beam.sdk.Pipeline; +import org.apache.beam.sdk.transforms.AppliedPTransform; +import org.apache.beam.sdk.transforms.Flatten; +import org.apache.beam.sdk.transforms.PTransform; +import org.apache.beam.sdk.transforms.Partition; /** * A {@link PCollectionList PCollectionList<T>} is an immutable list of homogeneously diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionTuple.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionTuple.java index b44499bac1d06..f6776f07979a3 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionTuple.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionTuple.java @@ -17,6 +17,11 @@ */ package org.apache.beam.sdk.values; +import com.google.common.collect.ImmutableMap; +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.transforms.AppliedPTransform; import org.apache.beam.sdk.transforms.PTransform; @@ -24,13 +29,6 @@ import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.PCollection.IsBounded; -import com.google.common.collect.ImmutableMap; - -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.Map; - /** * A {@link PCollectionTuple} is an immutable tuple of * heterogeneously-typed {@link PCollection PCollections}, "keyed" by diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionView.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionView.java index 20f10718b8f3e..0e5f5949f242e 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionView.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PCollectionView.java @@ -17,6 +17,7 @@ */ package org.apache.beam.sdk.values; +import java.io.Serializable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.View; @@ -24,8 +25,6 @@ import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.util.WindowingStrategy; -import java.io.Serializable; - /** * A {@link PCollectionView PCollectionView<T>} is an immutable view of a {@link PCollection} * as a value of type {@code T} that can be accessed diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PDone.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PDone.java index 7c05703442878..83d6a92328185 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PDone.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PDone.java @@ -17,11 +17,10 @@ */ package org.apache.beam.sdk.values; -import org.apache.beam.sdk.Pipeline; -import org.apache.beam.sdk.transforms.PTransform; - import java.util.Collection; import java.util.Collections; +import org.apache.beam.sdk.Pipeline; +import org.apache.beam.sdk.transforms.PTransform; /** * {@link PDone} is the output of a {@link PTransform} that has a trivial result, diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PInput.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PInput.java index 3faf6b9b80971..98987cda9822b 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PInput.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PInput.java @@ -17,9 +17,8 @@ */ package org.apache.beam.sdk.values; -import org.apache.beam.sdk.Pipeline; - import java.util.Collection; +import org.apache.beam.sdk.Pipeline; /** * The interface for things that might be input to a diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/POutput.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/POutput.java index 299d55dc5e568..6be9215d6df17 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/POutput.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/POutput.java @@ -17,12 +17,11 @@ */ package org.apache.beam.sdk.values; +import java.util.Collection; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.transforms.AppliedPTransform; import org.apache.beam.sdk.transforms.PTransform; -import java.util.Collection; - /** * The interface for things that might be output from a {@link PTransform}. */ diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PValueBase.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PValueBase.java index 8a92fa345f896..685e32fe15b79 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PValueBase.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/PValueBase.java @@ -17,14 +17,13 @@ */ package org.apache.beam.sdk.values; +import java.util.Collection; +import java.util.Collections; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.transforms.AppliedPTransform; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.util.StringUtils; -import java.util.Collection; -import java.util.Collections; - /** * A {@link PValueBase} is an abstract base class that provides * sensible default implementations for methods of {@link PValue}. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TimestampedValue.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TimestampedValue.java index 69bf77de9c15e..f2ad6168bb652 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TimestampedValue.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TimestampedValue.java @@ -20,21 +20,19 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.InstantCoder; -import org.apache.beam.sdk.coders.StandardCoder; -import org.apache.beam.sdk.util.PropertyNames; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import org.joda.time.Instant; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Arrays; import java.util.List; import java.util.Objects; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.InstantCoder; +import org.apache.beam.sdk.coders.StandardCoder; +import org.apache.beam.sdk.util.PropertyNames; +import org.joda.time.Instant; /** * An immutable pair of a value and a timestamp. diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTag.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTag.java index b281a43e4f2b6..a6b63ab1aa3ab 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTag.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTag.java @@ -20,18 +20,15 @@ import static org.apache.beam.sdk.util.Structs.addBoolean; import static org.apache.beam.sdk.util.Structs.addString; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.util.CloudObject; -import org.apache.beam.sdk.util.PropertyNames; - -import com.google.common.collect.HashMultiset; -import com.google.common.collect.Multiset; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; - +import com.google.common.collect.HashMultiset; +import com.google.common.collect.Multiset; import java.io.Serializable; import java.util.Random; +import org.apache.beam.sdk.transforms.ParDo; +import org.apache.beam.sdk.util.CloudObject; +import org.apache.beam.sdk.util.PropertyNames; /** * A {@link TupleTag} is a typed tag to use as the key of a diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTagList.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTagList.java index 8a4beb3ab5053..412678b5b0f56 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTagList.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TupleTagList.java @@ -17,14 +17,12 @@ */ package org.apache.beam.sdk.values; -import org.apache.beam.sdk.transforms.ParDo; - import com.google.common.collect.ImmutableList; - import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import org.apache.beam.sdk.transforms.ParDo; /** * A {@link TupleTagList} is an immutable list of heterogeneously diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypeDescriptor.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypeDescriptor.java index eea720aa20153..724b8b6925cae 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypeDescriptor.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypeDescriptor.java @@ -22,7 +22,6 @@ import com.google.common.reflect.Parameter; import com.google.common.reflect.TypeParameter; import com.google.common.reflect.TypeToken; - import java.io.Serializable; import java.lang.reflect.Field; import java.lang.reflect.Method; @@ -30,7 +29,6 @@ import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import java.util.List; - import javax.annotation.Nullable; /** diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypeDescriptors.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypeDescriptors.java index 0e7b9cb4397a2..84b69d7a03505 100644 --- a/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypeDescriptors.java +++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/values/TypeDescriptors.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.values; import com.google.common.reflect.TypeParameter; - import java.math.BigDecimal; import java.math.BigInteger; import java.util.List; diff --git a/sdks/java/core/src/test/java/org/apache/beam/runners/dataflow/util/GcsPathValidatorTest.java b/sdks/java/core/src/test/java/org/apache/beam/runners/dataflow/util/GcsPathValidatorTest.java index 8913916fde42e..398fa6333a43f 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/runners/dataflow/util/GcsPathValidatorTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/runners/dataflow/util/GcsPathValidatorTest.java @@ -30,7 +30,6 @@ import org.apache.beam.sdk.util.GcsUtil; import org.apache.beam.sdk.util.TestCredential; import org.apache.beam.sdk.util.gcsfs.GcsPath; - import org.junit.Before; import org.junit.Rule; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/AggregatorPipelineExtractorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/AggregatorPipelineExtractorTest.java index 930fbe781b05b..335d81f47bbd3 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/AggregatorPipelineExtractorTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/AggregatorPipelineExtractorTest.java @@ -23,6 +23,12 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.UUID; import org.apache.beam.sdk.Pipeline.PipelineVisitor; import org.apache.beam.sdk.runners.TransformTreeNode; import org.apache.beam.sdk.transforms.Aggregator; @@ -33,10 +39,6 @@ import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.Sum; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; - import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @@ -47,11 +49,6 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.UUID; - /** * Tests for {@link AggregatorPipelineExtractor}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/DataflowMatchers.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/DataflowMatchers.java index 2d2a373d8e9a0..c76dcf8cbf2a3 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/DataflowMatchers.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/DataflowMatchers.java @@ -18,12 +18,10 @@ package org.apache.beam.sdk; import com.google.protobuf.ByteString; - +import java.io.Serializable; import org.hamcrest.Description; import org.hamcrest.TypeSafeMatcher; -import java.io.Serializable; - /** * Matchers that are useful when writing Dataflow tests. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java index d7b3ac54de9d8..66ae79f69a9b0 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/PipelineTest.java @@ -24,6 +24,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; +import com.google.common.collect.ImmutableList; import org.apache.beam.sdk.Pipeline.PipelineExecutionException; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptions.CheckEnabled; @@ -47,9 +48,6 @@ import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.POutput; import org.apache.beam.sdk.values.TupleTag; - -import com.google.common.collect.ImmutableList; - import org.junit.Assert; import org.junit.Rule; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/TestUtils.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/TestUtils.java index 35709ed42daca..1224f10e0e8b3 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/TestUtils.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/TestUtils.java @@ -19,19 +19,17 @@ import static org.junit.Assert.assertThat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; import org.apache.beam.sdk.transforms.Combine.CombineFn; import org.apache.beam.sdk.values.KV; - import org.hamcrest.CoreMatchers; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - /** * Utilities for tests. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/WindowMatchers.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/WindowMatchers.java index 48c25899968d3..3531a8631cbe6 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/WindowMatchers.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/WindowMatchers.java @@ -17,22 +17,19 @@ */ package org.apache.beam.sdk; +import com.google.common.collect.Lists; +import java.util.Collection; +import java.util.Objects; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.IntervalWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.util.WindowedValue; - -import com.google.common.collect.Lists; - import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.hamcrest.TypeSafeMatcher; import org.joda.time.Instant; -import java.util.Collection; -import java.util.Objects; - /** * Matchers that are useful for working with Windowing, Timestamps, etc. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/WindowMatchersTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/WindowMatchersTest.java index 8b108cda2e1a1..89637e2af2702 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/WindowMatchersTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/WindowMatchersTest.java @@ -19,12 +19,10 @@ import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableList; import org.apache.beam.sdk.transforms.windowing.IntervalWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.util.WindowedValue; - -import com.google.common.collect.ImmutableList; - import org.joda.time.Instant; import org.junit.Test; import org.junit.runner.RunWith; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java index 3b13e351300a8..a97acfb5bcd49 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/AvroCoderTest.java @@ -23,20 +23,22 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import org.apache.beam.sdk.Pipeline; -import org.apache.beam.sdk.coders.Coder.Context; -import org.apache.beam.sdk.coders.Coder.NonDeterministicException; -import org.apache.beam.sdk.testing.CoderProperties; -import org.apache.beam.sdk.testing.NeedsRunner; -import org.apache.beam.sdk.testing.PAssert; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.util.CloudObject; -import org.apache.beam.sdk.util.SerializableUtils; -import org.apache.beam.sdk.values.PCollection; - +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.SortedMap; +import java.util.SortedSet; +import java.util.TreeMap; +import java.util.TreeSet; import org.apache.avro.AvroTypeException; import org.apache.avro.Schema; import org.apache.avro.SchemaBuilder; @@ -50,6 +52,19 @@ import org.apache.avro.reflect.Union; import org.apache.avro.specific.SpecificData; import org.apache.avro.util.Utf8; +import org.apache.beam.sdk.Pipeline; +import org.apache.beam.sdk.coders.Coder.Context; +import org.apache.beam.sdk.coders.Coder.NonDeterministicException; +import org.apache.beam.sdk.testing.CoderProperties; +import org.apache.beam.sdk.testing.NeedsRunner; +import org.apache.beam.sdk.testing.PAssert; +import org.apache.beam.sdk.testing.TestPipeline; +import org.apache.beam.sdk.transforms.Create; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.ParDo; +import org.apache.beam.sdk.util.CloudObject; +import org.apache.beam.sdk.util.SerializableUtils; +import org.apache.beam.sdk.values.PCollection; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.Matchers; @@ -60,23 +75,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.SortedMap; -import java.util.SortedSet; -import java.util.TreeMap; -import java.util.TreeSet; - /** Tests for {@link AvroCoder}. */ @RunWith(JUnit4.class) public class AvroCoderTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigDecimalCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigDecimalCoderTest.java index a5e3bc26b46c6..f5d56cbc11f0f 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigDecimalCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigDecimalCoderTest.java @@ -20,17 +20,17 @@ import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableList; +import java.math.BigDecimal; +import java.util.List; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.testing.CoderProperties.TestElementByteSizeObserver; import org.apache.beam.sdk.util.CoderUtils; -import com.google.common.collect.ImmutableList; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.math.BigDecimal; -import java.util.List; /** * Test case for {@link BigDecimalCoder}. diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigEndianIntegerCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigEndianIntegerCoderTest.java index f4c22bb5a0f22..091fda25c164c 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigEndianIntegerCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigEndianIntegerCoderTest.java @@ -17,18 +17,16 @@ */ package org.apache.beam.sdk.coders; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Test case for {@link BigEndianIntegerCoder}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigEndianLongCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigEndianLongCoderTest.java index cf148b76f92cb..3a74e301695d3 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigEndianLongCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigEndianLongCoderTest.java @@ -17,18 +17,16 @@ */ package org.apache.beam.sdk.coders; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Test case for {@link BigEndianLongCoder}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigIntegerCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigIntegerCoderTest.java index d04237807b9fa..6ead941a3d8d3 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigIntegerCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/BigIntegerCoderTest.java @@ -20,17 +20,17 @@ import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableList; +import java.math.BigInteger; +import java.util.List; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.testing.CoderProperties.TestElementByteSizeObserver; import org.apache.beam.sdk.util.CoderUtils; -import com.google.common.collect.ImmutableList; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.math.BigInteger; -import java.util.List; /** * Test case for {@link BigIntegerCoder}. diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ByteArrayCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ByteArrayCoderTest.java index aca0a8304c702..f80a409fced48 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ByteArrayCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ByteArrayCoderTest.java @@ -21,20 +21,18 @@ import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertThat; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.Arrays; -import java.util.List; - /** * Unit tests for {@link ByteArrayCoder}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ByteCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ByteCoderTest.java index 0494fe07d6156..b7673e2274225 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ByteCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ByteCoderTest.java @@ -17,18 +17,16 @@ */ package org.apache.beam.sdk.coders; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Test case for {@link ByteCoder}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ByteStringCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ByteStringCoderTest.java index d65772cf20eb1..1d4c0627f1519 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ByteStringCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ByteStringCoderTest.java @@ -20,22 +20,19 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import com.google.common.collect.ImmutableList; +import com.google.protobuf.ByteString; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.coders.Coder.Context; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - -import com.google.common.collect.ImmutableList; -import com.google.protobuf.ByteString; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Test case for {@link ByteStringCoder}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderFactoriesTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderFactoriesTest.java index a871404ed87bc..4ffc9c1a19847 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderFactoriesTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderFactoriesTest.java @@ -19,13 +19,12 @@ import static org.junit.Assert.assertEquals; +import java.util.Arrays; +import java.util.Collections; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.Collections; - /** * Tests for {@link CoderFactories}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderProvidersTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderProvidersTest.java index 44d17d4f28c3a..44be56daaff0d 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderProvidersTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderProvidersTest.java @@ -20,16 +20,14 @@ import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertThat; +import java.util.Map; import org.apache.beam.sdk.values.TypeDescriptor; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Map; - /** * Tests for {@link CoderFactories}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java index da1540585a840..d690a474ead42 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderRegistryTest.java @@ -21,6 +21,20 @@ import static org.hamcrest.Matchers.containsString; import static org.junit.Assert.assertEquals; +import com.google.cloud.dataflow.sdk.coders.Proto2CoderTestMessages.MessageA; +import com.google.common.collect.ImmutableList; +import com.google.protobuf.Duration; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Serializable; +import java.lang.reflect.Type; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.CoderRegistry.IncompatibleCoderException; import org.apache.beam.sdk.coders.protobuf.ProtoCoder; @@ -35,11 +49,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptor; - -import com.google.cloud.dataflow.sdk.coders.Proto2CoderTestMessages.MessageA; -import com.google.common.collect.ImmutableList; -import com.google.protobuf.Duration; - import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -47,18 +56,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Serializable; -import java.lang.reflect.Type; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; - /** * Tests for CoderRegistry. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderTest.java index b6378fc9bd0d3..ccbffdd15db19 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CoderTest.java @@ -23,17 +23,15 @@ import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertThat; +import java.util.Collections; import org.apache.beam.sdk.coders.Coder.Context; import org.apache.beam.sdk.coders.Coder.NonDeterministicException; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Collections; - /** Tests for constructs defined within {@link Coder}. */ @RunWith(JUnit4.class) public class CoderTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CollectionCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CollectionCoderTest.java index a2bc20dff46e6..6a5d94b5ae511 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CollectionCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CollectionCoderTest.java @@ -17,22 +17,20 @@ */ package org.apache.beam.sdk.coders; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.TreeSet; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.TreeSet; - /** * Test case for {@link CollectionCoder}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CustomCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CustomCoderTest.java index 10259d96da139..31bbdb9e60fb6 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CustomCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/CustomCoderTest.java @@ -17,11 +17,15 @@ */ package org.apache.beam.sdk.coders; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; import org.apache.beam.sdk.util.SerializableUtils; import org.apache.beam.sdk.values.KV; - import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -29,12 +33,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - /** Unit tests for {@link CustomCoder}. */ @RunWith(JUnit4.class) public class CustomCoderTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DefaultCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DefaultCoderTest.java index 1faf58f946390..a8496c946b639 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DefaultCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DefaultCoderTest.java @@ -18,13 +18,13 @@ package org.apache.beam.sdk.coders; import static com.google.common.base.Preconditions.checkArgument; - import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertThat; +import java.io.Serializable; +import java.util.List; import org.apache.beam.sdk.values.TypeDescriptor; - import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -32,9 +32,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.List; - /** * Tests of Coder defaults. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DelegateCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DelegateCoderTest.java index cf770aadbd164..9bb9d51ef47f7 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DelegateCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DelegateCoderTest.java @@ -20,15 +20,8 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; -import org.apache.beam.sdk.testing.CoderProperties; - import com.google.common.collect.Lists; import com.google.common.collect.Sets; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - import java.io.InputStream; import java.io.OutputStream; import java.io.Serializable; @@ -38,6 +31,10 @@ import java.util.HashSet; import java.util.List; import java.util.Set; +import org.apache.beam.sdk.testing.CoderProperties; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** Unit tests for {@link DelegateCoder}. */ @RunWith(JUnit4.class) diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DoubleCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DoubleCoderTest.java index 73fb464cb69a3..f43af304e3880 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DoubleCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DoubleCoderTest.java @@ -17,18 +17,16 @@ */ package org.apache.beam.sdk.coders; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Test case for {@link DoubleCoder}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DurationCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DurationCoderTest.java index e2daefcbf51de..4cb697eff93dd 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DurationCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/DurationCoderTest.java @@ -17,11 +17,11 @@ */ package org.apache.beam.sdk.coders; +import com.google.common.collect.Lists; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - -import com.google.common.collect.Lists; - import org.joda.time.Duration; import org.joda.time.ReadableDuration; import org.junit.Rule; @@ -30,9 +30,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** Unit tests for {@link DurationCoder}. */ @RunWith(JUnit4.class) public class DurationCoderTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/InstantCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/InstantCoderTest.java index c1ce7e6472827..16cb703ca3ab8 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/InstantCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/InstantCoderTest.java @@ -17,12 +17,14 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.testing.CoderProperties; -import org.apache.beam.sdk.util.CoderUtils; - import com.google.common.collect.Lists; import com.google.common.primitives.UnsignedBytes; - +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import org.apache.beam.sdk.testing.CoderProperties; +import org.apache.beam.sdk.util.CoderUtils; import org.joda.time.Instant; import org.junit.Assert; import org.junit.Rule; @@ -31,11 +33,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - /** Unit tests for {@link InstantCoder}. */ @RunWith(JUnit4.class) public class InstantCoderTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/IterableCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/IterableCoderTest.java index 42397b7bf99fd..15ec44bb2d76b 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/IterableCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/IterableCoderTest.java @@ -20,20 +20,18 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; - /** Unit tests for {@link IterableCoder}. */ @RunWith(JUnit4.class) public class IterableCoderTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/JAXBCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/JAXBCoderTest.java index 6b59e525d965c..129be61ae7c1e 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/JAXBCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/JAXBCoderTest.java @@ -20,16 +20,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; -import org.apache.beam.sdk.testing.CoderProperties; -import org.apache.beam.sdk.util.CoderUtils; -import org.apache.beam.sdk.util.SerializableUtils; - import com.google.common.collect.ImmutableList; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -39,8 +30,13 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; - import javax.xml.bind.annotation.XmlRootElement; +import org.apache.beam.sdk.testing.CoderProperties; +import org.apache.beam.sdk.util.CoderUtils; +import org.apache.beam.sdk.util.SerializableUtils; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** Unit tests for {@link JAXBCoder}. */ @RunWith(JUnit4.class) diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/KvCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/KvCoderTest.java index 47fcad79a9147..f0f7d22ea95a8 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/KvCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/KvCoderTest.java @@ -17,23 +17,20 @@ */ package org.apache.beam.sdk.coders; +import com.google.common.collect.ImmutableMap; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; import org.apache.beam.sdk.values.KV; - -import com.google.common.collect.ImmutableMap; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; - /** * Test case for {@link KvCoder}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ListCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ListCoderTest.java index 05b66813c397c..ba9cc9d170ded 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ListCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/ListCoderTest.java @@ -20,20 +20,18 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; - /** Unit tests for {@link ListCoder}. */ @RunWith(JUnit4.class) public class ListCoderTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/MapCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/MapCoderTest.java index b92b656e974e9..dc4a8b527f708 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/MapCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/MapCoderTest.java @@ -20,23 +20,20 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; -import org.apache.beam.sdk.testing.CoderProperties; -import org.apache.beam.sdk.util.CoderUtils; - import com.google.common.collect.ImmutableMap; - -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; +import org.apache.beam.sdk.testing.CoderProperties; +import org.apache.beam.sdk.util.CoderUtils; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** Unit tests for {@link MapCoder}. */ @RunWith(JUnit4.class) diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/NullableCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/NullableCoderTest.java index 5bfbe05c1a46f..61e7e41afa765 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/NullableCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/NullableCoderTest.java @@ -24,21 +24,18 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import org.apache.beam.sdk.testing.CoderProperties; - import com.google.common.collect.ImmutableList; - +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.util.Arrays; +import java.util.List; +import org.apache.beam.sdk.testing.CoderProperties; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.util.Arrays; -import java.util.List; - /** Unit tests for {@link NullableCoder}. */ @RunWith(JUnit4.class) public class NullableCoderTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/PrintBase64Encodings.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/PrintBase64Encodings.java index 9ab66d13b479b..5cdb71974e0c5 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/PrintBase64Encodings.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/PrintBase64Encodings.java @@ -17,14 +17,12 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.util.CoderUtils; - import com.google.common.base.Joiner; import com.google.common.collect.Lists; - import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.List; +import org.apache.beam.sdk.util.CoderUtils; /** * A command-line utility for printing the base-64 encodings of test values, for generating exact diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java index b5465fae0e999..8d344de2831d0 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SerializableCoderTest.java @@ -20,6 +20,12 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.Serializable; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.testing.NeedsRunner; @@ -32,7 +38,6 @@ import org.apache.beam.sdk.util.CoderUtils; import org.apache.beam.sdk.util.Serializer; import org.apache.beam.sdk.values.PCollection; - import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Test; @@ -40,13 +45,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.Serializable; -import java.util.Arrays; -import java.util.LinkedList; -import java.util.List; - /** * Tests SerializableCoder. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SetCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SetCoderTest.java index e353504d30e1c..58b0b8ef580f4 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SetCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/SetCoderTest.java @@ -17,21 +17,19 @@ */ package org.apache.beam.sdk.coders; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Set; -import java.util.TreeSet; - /** * Test case for {@link SetCoder}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/StandardCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/StandardCoderTest.java index 1dc221017e7ed..e3d0537c87658 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/StandardCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/StandardCoderTest.java @@ -17,24 +17,20 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.testing.CoderProperties; - import com.google.common.collect.ImmutableList; - -import org.hamcrest.CoreMatchers; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Arrays; import java.util.Collections; import java.util.List; - import javax.annotation.Nullable; +import org.apache.beam.sdk.testing.CoderProperties; +import org.hamcrest.CoreMatchers; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** * Test case for {@link StandardCoder}. diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/StringDelegateCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/StringDelegateCoderTest.java index 2a4f2e6a3143e..b8a45fd80d739 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/StringDelegateCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/StringDelegateCoderTest.java @@ -17,17 +17,15 @@ */ package org.apache.beam.sdk.coders; +import java.net.URI; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.coders.Coder.NonDeterministicException; import org.apache.beam.sdk.testing.CoderProperties; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.net.URI; -import java.util.Arrays; -import java.util.List; - /** Unit tests for {@link StringDelegateCoder}. */ @RunWith(JUnit4.class) public class StringDelegateCoderTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/StringUtf8CoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/StringUtf8CoderTest.java index 5c810f72c4382..cce04ec425678 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/StringUtf8CoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/StringUtf8CoderTest.java @@ -17,18 +17,16 @@ */ package org.apache.beam.sdk.coders; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Test case for {@link StringUtf8Coder}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/TableRowJsonCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/TableRowJsonCoderTest.java index 03f2339741ffc..3da65221a036e 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/TableRowJsonCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/TableRowJsonCoderTest.java @@ -17,17 +17,14 @@ */ package org.apache.beam.sdk.coders; -import org.apache.beam.sdk.testing.CoderProperties; - import com.google.api.services.bigquery.model.TableRow; - +import java.util.Arrays; +import java.util.List; +import org.apache.beam.sdk.testing.CoderProperties; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Test case for {@link TableRowJsonCoder}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/TextualIntegerCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/TextualIntegerCoderTest.java index 252f8a4863ab9..5012d1371d891 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/TextualIntegerCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/TextualIntegerCoderTest.java @@ -17,18 +17,16 @@ */ package org.apache.beam.sdk.coders; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Test case for {@link TextualIntegerCoder}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/VarIntCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/VarIntCoderTest.java index 3da242d39acec..72200917f78ed 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/VarIntCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/VarIntCoderTest.java @@ -17,18 +17,16 @@ */ package org.apache.beam.sdk.coders; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Test case for {@link VarIntCoder}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/VarLongCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/VarLongCoderTest.java index 4cd221628072b..615d4fcb05656 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/VarLongCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/VarLongCoderTest.java @@ -17,18 +17,16 @@ */ package org.apache.beam.sdk.coders; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Test case for {@link VarLongCoder}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/protobuf/ProtoCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/protobuf/ProtoCoderTest.java index b35adf6308e28..8b889dab26290 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/protobuf/ProtoCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/protobuf/ProtoCoderTest.java @@ -20,6 +20,12 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; +import com.google.cloud.dataflow.sdk.coders.Proto2CoderTestMessages; +import com.google.cloud.dataflow.sdk.coders.Proto2CoderTestMessages.MessageA; +import com.google.cloud.dataflow.sdk.coders.Proto2CoderTestMessages.MessageB; +import com.google.cloud.dataflow.sdk.coders.Proto2CoderTestMessages.MessageC; +import com.google.cloud.dataflow.sdk.coders.Proto2CoderTestMessages.MessageWithMap; +import com.google.common.collect.ImmutableList; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.Coder.NonDeterministicException; @@ -28,14 +34,6 @@ import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; import org.apache.beam.sdk.values.TypeDescriptor; - -import com.google.cloud.dataflow.sdk.coders.Proto2CoderTestMessages; -import com.google.cloud.dataflow.sdk.coders.Proto2CoderTestMessages.MessageA; -import com.google.cloud.dataflow.sdk.coders.Proto2CoderTestMessages.MessageB; -import com.google.cloud.dataflow.sdk.coders.Proto2CoderTestMessages.MessageC; -import com.google.cloud.dataflow.sdk.coders.Proto2CoderTestMessages.MessageWithMap; -import com.google.common.collect.ImmutableList; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/protobuf/ProtobufUtilTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/protobuf/ProtobufUtilTest.java index 14fe4d8db7feb..97368248998b9 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/protobuf/ProtobufUtilTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/coders/protobuf/ProtobufUtilTest.java @@ -20,12 +20,9 @@ import static org.apache.beam.sdk.coders.protobuf.ProtobufUtil.checkProto2Syntax; import static org.apache.beam.sdk.coders.protobuf.ProtobufUtil.getRecursiveDescriptorsForClass; import static org.apache.beam.sdk.coders.protobuf.ProtobufUtil.verifyDeterministic; - import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; -import org.apache.beam.sdk.coders.Coder.NonDeterministicException; - import com.google.cloud.dataflow.sdk.coders.Proto2CoderTestMessages; import com.google.cloud.dataflow.sdk.coders.Proto2CoderTestMessages.MessageA; import com.google.cloud.dataflow.sdk.coders.Proto2CoderTestMessages.MessageB; @@ -39,16 +36,15 @@ import com.google.protobuf.Duration; import com.google.protobuf.ExtensionRegistry; import com.google.protobuf.Message; - +import java.util.HashSet; +import java.util.Set; +import org.apache.beam.sdk.coders.Coder.NonDeterministicException; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.HashSet; -import java.util.Set; - /** * Tests for {@link ProtobufUtil}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroIOGeneratedClassTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroIOGeneratedClassTest.java index 6e26d33d92e4b..ede135f19ce81 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroIOGeneratedClassTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroIOGeneratedClassTest.java @@ -21,14 +21,11 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; -import org.apache.beam.sdk.coders.AvroCoder; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.testing.NeedsRunner; -import org.apache.beam.sdk.testing.PAssert; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.values.PCollection; - +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; import org.apache.avro.Schema; import org.apache.avro.file.DataFileReader; import org.apache.avro.file.DataFileWriter; @@ -38,6 +35,13 @@ import org.apache.avro.io.DatumWriter; import org.apache.avro.specific.SpecificDatumReader; import org.apache.avro.specific.SpecificDatumWriter; +import org.apache.beam.sdk.coders.AvroCoder; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.testing.NeedsRunner; +import org.apache.beam.sdk.testing.PAssert; +import org.apache.beam.sdk.testing.TestPipeline; +import org.apache.beam.sdk.transforms.Create; +import org.apache.beam.sdk.values.PCollection; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -46,12 +50,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - /** * Tests for AvroIO Read and Write transforms, using classes generated from {@code user.avsc}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroIOTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroIOTest.java index 026724add21e9..a8a7746f214bb 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroIOTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroIOTest.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.io; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.hasItem; import static org.junit.Assert.assertEquals; @@ -26,6 +25,19 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterators; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import org.apache.avro.Schema; +import org.apache.avro.file.DataFileReader; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.reflect.Nullable; import org.apache.beam.sdk.coders.AvroCoder; import org.apache.beam.sdk.coders.DefaultCoder; import org.apache.beam.sdk.io.AvroIO.Write.Bound; @@ -40,15 +52,6 @@ import org.apache.beam.sdk.transforms.display.DisplayDataEvaluator; import org.apache.beam.sdk.util.IOChannelUtils; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.base.MoreObjects; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterators; - -import org.apache.avro.Schema; -import org.apache.avro.file.DataFileReader; -import org.apache.avro.generic.GenericRecord; -import org.apache.avro.reflect.Nullable; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Rule; @@ -58,13 +61,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; -import java.util.Set; - /** * Tests for AvroIO Read and Write transforms. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroSourceTest.java index 09405abeece6e..c1b532f8b4d9e 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroSourceTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/AvroSourceTest.java @@ -18,26 +18,24 @@ package org.apache.beam.sdk.io; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import org.apache.beam.sdk.coders.AvroCoder; -import org.apache.beam.sdk.coders.DefaultCoder; -import org.apache.beam.sdk.io.AvroSource.AvroReader; -import org.apache.beam.sdk.io.AvroSource.AvroReader.Seeker; -import org.apache.beam.sdk.io.BlockBasedSource.BlockBasedReader; -import org.apache.beam.sdk.io.BoundedSource.BoundedReader; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.options.PipelineOptionsFactory; -import org.apache.beam.sdk.testing.SourceTestUtils; -import org.apache.beam.sdk.transforms.display.DisplayData; - import com.google.common.base.MoreObjects; - +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.PushbackInputStream; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.Objects; +import java.util.Random; import org.apache.avro.Schema; import org.apache.avro.file.CodecFactory; import org.apache.avro.file.DataFileConstants; @@ -47,6 +45,16 @@ import org.apache.avro.reflect.AvroDefault; import org.apache.avro.reflect.Nullable; import org.apache.avro.reflect.ReflectData; +import org.apache.beam.sdk.coders.AvroCoder; +import org.apache.beam.sdk.coders.DefaultCoder; +import org.apache.beam.sdk.io.AvroSource.AvroReader; +import org.apache.beam.sdk.io.AvroSource.AvroReader.Seeker; +import org.apache.beam.sdk.io.BlockBasedSource.BlockBasedReader; +import org.apache.beam.sdk.io.BoundedSource.BoundedReader; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.options.PipelineOptionsFactory; +import org.apache.beam.sdk.testing.SourceTestUtils; +import org.apache.beam.sdk.transforms.display.DisplayData; import org.hamcrest.Matchers; import org.junit.Rule; import org.junit.Test; @@ -55,18 +63,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.ByteArrayInputStream; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.PushbackInputStream; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.NoSuchElementException; -import java.util.Objects; -import java.util.Random; - /** * Tests for AvroSource. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSourceTest.java index fe9415bbb3a10..7bf0f35576bdd 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSourceTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/BoundedReadFromUnboundedSourceTest.java @@ -18,12 +18,15 @@ package org.apache.beam.sdk.io; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom; - import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.runners.dataflow.TestCountingSource; import org.apache.beam.sdk.testing.PAssert; @@ -33,18 +36,12 @@ import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.joda.time.Duration; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - /** Unit tests for {@link BoundedReadFromUnboundedSource}. */ @RunWith(JUnit4.class) public class BoundedReadFromUnboundedSourceTest implements Serializable{ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CompressedSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CompressedSourceTest.java index 4a9f95088804e..d7c451d00cef4 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CompressedSourceTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CompressedSourceTest.java @@ -19,7 +19,6 @@ import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom; - import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; @@ -30,6 +29,24 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.google.common.io.Files; +import com.google.common.primitives.Bytes; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.channels.ReadableByteChannel; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.Random; +import java.util.zip.GZIPOutputStream; +import javax.annotation.Nullable; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.SerializableCoder; @@ -46,12 +63,6 @@ import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; -import com.google.common.io.Files; -import com.google.common.primitives.Bytes; - import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; import org.hamcrest.Matchers; @@ -64,22 +75,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.nio.ByteBuffer; -import java.nio.channels.ReadableByteChannel; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.List; -import java.util.NoSuchElementException; -import java.util.Random; -import java.util.zip.GZIPOutputStream; - -import javax.annotation.Nullable; - /** * Tests for CompressedSource. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java index 4ec2c9ae02da7..0b92b2698ea38 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingInputTest.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.io; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; @@ -38,7 +37,6 @@ import org.apache.beam.sdk.transforms.SerializableFunction; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.values.PCollection; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java index 0bd91c14846f5..7c5fa1377713f 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/CountingSourceTest.java @@ -23,6 +23,8 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; +import java.io.IOException; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.BoundedSource.BoundedReader; import org.apache.beam.sdk.io.CountingSource.CounterMark; @@ -44,7 +46,6 @@ import org.apache.beam.sdk.util.CoderUtils; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionList; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; @@ -52,9 +53,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.util.List; - /** * Tests of {@link CountingSource}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/FileBasedSinkTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/FileBasedSinkTest.java index d3454da19f138..0fdb11f1e3f04 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/FileBasedSinkTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/FileBasedSinkTest.java @@ -24,18 +24,6 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import org.apache.beam.sdk.io.FileBasedSink.FileBasedWriteOperation; -import org.apache.beam.sdk.io.FileBasedSink.FileBasedWriteOperation.TemporaryFileRetention; -import org.apache.beam.sdk.io.FileBasedSink.FileResult; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.options.PipelineOptionsFactory; - -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - import java.io.BufferedReader; import java.io.File; import java.io.FileOutputStream; @@ -47,6 +35,16 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import org.apache.beam.sdk.io.FileBasedSink.FileBasedWriteOperation; +import org.apache.beam.sdk.io.FileBasedSink.FileBasedWriteOperation.TemporaryFileRetention; +import org.apache.beam.sdk.io.FileBasedSink.FileResult; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.options.PipelineOptionsFactory; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** * Tests for FileBasedSink. diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/FileBasedSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/FileBasedSourceTest.java index c9f4079e5c1aa..5208910590808 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/FileBasedSourceTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/FileBasedSourceTest.java @@ -21,7 +21,6 @@ import static org.apache.beam.sdk.testing.SourceTestUtils.assertSplitAtFractionFails; import static org.apache.beam.sdk.testing.SourceTestUtils.assertSplitAtFractionSucceedsAndConsistent; import static org.apache.beam.sdk.testing.SourceTestUtils.readFromSource; - import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; @@ -29,6 +28,20 @@ import static org.junit.Assert.fail; import static org.mockito.Mockito.when; +import com.google.common.collect.ImmutableList; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.SeekableByteChannel; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.Random; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.StringUtf8Coder; @@ -43,9 +56,6 @@ import org.apache.beam.sdk.util.IOChannelFactory; import org.apache.beam.sdk.util.IOChannelUtils; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.ImmutableList; - import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -54,20 +64,6 @@ import org.junit.runners.JUnit4; import org.mockito.Mockito; -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.channels.ReadableByteChannel; -import java.nio.channels.SeekableByteChannel; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.NoSuchElementException; -import java.util.Random; - /** * Tests code common to all file-based sources. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/OffsetBasedSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/OffsetBasedSourceTest.java index f689f51e97c02..923b4b4ba3c56 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/OffsetBasedSourceTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/OffsetBasedSourceTest.java @@ -19,29 +19,26 @@ import static org.apache.beam.sdk.testing.SourceTestUtils.assertSplitAtFractionExhaustive; import static org.apache.beam.sdk.testing.SourceTestUtils.readFromSource; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.NoSuchElementException; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.io.BoundedSource.BoundedReader; import org.apache.beam.sdk.io.OffsetBasedSource.OffsetBasedReader; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptionsFactory; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.NoSuchElementException; - /** * Tests code common to all offset-based sources. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubIOTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubIOTest.java index 1e9ebf2d1529e..4067055b25c75 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubIOTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubIOTest.java @@ -18,15 +18,14 @@ package org.apache.beam.sdk.io; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.Matchers.hasItem; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; +import java.util.Set; import org.apache.beam.sdk.testing.RunnableOnService; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.transforms.display.DisplayDataEvaluator; - import org.joda.time.Duration; import org.junit.Rule; import org.junit.Test; @@ -35,8 +34,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Set; - /** * Tests for PubsubIO Read and Write transforms. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java index db03a5cce7a0b..4edd9c106ea30 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSinkTest.java @@ -18,6 +18,11 @@ package org.apache.beam.sdk.io; +import com.google.common.collect.ImmutableList; +import com.google.common.hash.Hashing; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.io.PubsubUnboundedSink.RecordIdMethod; import org.apache.beam.sdk.testing.CoderProperties; @@ -31,10 +36,6 @@ import org.apache.beam.sdk.util.PubsubClient.TopicPath; import org.apache.beam.sdk.util.PubsubTestClient; import org.apache.beam.sdk.util.PubsubTestClient.PubsubTestClientFactory; - -import com.google.common.collect.ImmutableList; -import com.google.common.hash.Hashing; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; @@ -42,10 +43,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - /** * Test PubsubUnboundedSink. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSourceTest.java index a19ccc5197e34..c46eca527dcd8 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSourceTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/PubsubUnboundedSourceTest.java @@ -26,6 +26,14 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; +import com.google.api.client.util.Clock; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicLong; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.io.PubsubUnboundedSource.PubsubCheckpoint; import org.apache.beam.sdk.io.PubsubUnboundedSource.PubsubReader; @@ -38,23 +46,12 @@ import org.apache.beam.sdk.util.PubsubClient.SubscriptionPath; import org.apache.beam.sdk.util.PubsubTestClient; import org.apache.beam.sdk.util.PubsubTestClient.PubsubTestClientFactory; - -import com.google.api.client.util.Clock; -import com.google.common.collect.ImmutableList; - import org.joda.time.Instant; import org.junit.After; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicLong; - /** * Test PubsubUnboundedSource. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java index a47ddf2a34cfd..30a8a43e90a56 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java @@ -19,14 +19,16 @@ import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom; - import static org.hamcrest.MatcherAssert.assertThat; +import java.io.IOException; +import java.io.Serializable; +import java.util.List; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.io.UnboundedSource.CheckpointMark; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.display.DisplayData; - import org.joda.time.Duration; import org.junit.Rule; import org.junit.Test; @@ -34,12 +36,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.io.Serializable; -import java.util.List; - -import javax.annotation.Nullable; - /** * Tests for {@link Read}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/TextIOTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/TextIOTest.java index 6fd3093f2155b..358a30f3d2b81 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/TextIOTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/TextIOTest.java @@ -23,7 +23,6 @@ import static org.apache.beam.sdk.TestUtils.NO_LINES_ARRAY; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasValue; - import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasItem; @@ -34,6 +33,27 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; +import com.google.common.collect.ImmutableList; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileOutputStream; +import java.io.FileReader; +import java.io.IOException; +import java.io.OutputStream; +import java.io.PrintStream; +import java.nio.channels.FileChannel; +import java.nio.channels.SeekableByteChannel; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.StandardOpenOption; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Set; +import java.util.zip.GZIPOutputStream; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; +import javax.annotation.Nullable; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.StringUtf8Coder; @@ -59,9 +79,6 @@ import org.apache.beam.sdk.util.IOChannelUtils; import org.apache.beam.sdk.util.gcsfs.GcsPath; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.ImmutableList; - import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream; import org.junit.BeforeClass; import org.junit.Ignore; @@ -76,28 +93,6 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileOutputStream; -import java.io.FileReader; -import java.io.IOException; -import java.io.OutputStream; -import java.io.PrintStream; -import java.nio.channels.FileChannel; -import java.nio.channels.SeekableByteChannel; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.StandardOpenOption; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Set; -import java.util.zip.GZIPOutputStream; -import java.util.zip.ZipEntry; -import java.util.zip.ZipOutputStream; - -import javax.annotation.Nullable; - /** * Tests for TextIO Read and Write transforms. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/WriteTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/WriteTest.java index b9ba53ba6e130..28651884d2982 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/WriteTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/WriteTest.java @@ -28,6 +28,19 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; +import com.google.common.base.MoreObjects; +import com.google.common.base.Optional; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.SerializableCoder; @@ -52,10 +65,6 @@ import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.base.MoreObjects; -import com.google.common.base.Optional; - import org.hamcrest.Matchers; import org.joda.time.Duration; import org.junit.Rule; @@ -65,18 +74,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Objects; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.ThreadLocalRandom; -import java.util.concurrent.atomic.AtomicInteger; - /** * Tests for the Write PTransform. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSinkTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSinkTest.java index ea0db73865d91..2788ea664621e 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSinkTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSinkTest.java @@ -18,26 +18,11 @@ package org.apache.beam.sdk.io; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; -import org.apache.beam.sdk.io.XmlSink.XmlWriteOperation; -import org.apache.beam.sdk.io.XmlSink.XmlWriter; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.options.PipelineOptionsFactory; -import org.apache.beam.sdk.transforms.display.DisplayData; - import com.google.common.collect.Lists; - -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.rules.TemporaryFolder; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - import java.io.BufferedReader; import java.io.File; import java.io.FileOutputStream; @@ -46,10 +31,20 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; - import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; +import org.apache.beam.sdk.io.XmlSink.XmlWriteOperation; +import org.apache.beam.sdk.io.XmlSink.XmlWriter; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.options.PipelineOptionsFactory; +import org.apache.beam.sdk.transforms.display.DisplayData; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** * Tests for XmlSink. diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSourceTest.java index 37e3881888136..1f154d577ccef 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSourceTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/XmlSourceTest.java @@ -21,7 +21,6 @@ import static org.apache.beam.sdk.testing.SourceTestUtils.assertSplitAtFractionFails; import static org.apache.beam.sdk.testing.SourceTestUtils.assertSplitAtFractionSucceedsAndConsistent; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -29,6 +28,18 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; +import com.google.common.collect.ImmutableList; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; +import java.util.Random; +import javax.xml.bind.annotation.XmlAttribute; +import javax.xml.bind.annotation.XmlRootElement; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.Source.Reader; import org.apache.beam.sdk.options.PipelineOptions; @@ -38,9 +49,6 @@ import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.ImmutableList; - import org.hamcrest.Matchers; import org.junit.Ignore; import org.junit.Rule; @@ -51,19 +59,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.List; -import java.util.Random; - -import javax.xml.bind.annotation.XmlAttribute; -import javax.xml.bind.annotation.XmlRootElement; - /** * Tests XmlSource. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyRangeEstimateFractionTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyRangeEstimateFractionTest.java index 50ec2721d3de3..92daf89cad2fc 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyRangeEstimateFractionTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyRangeEstimateFractionTest.java @@ -21,7 +21,6 @@ import static org.junit.Assert.assertThat; import com.google.common.collect.ImmutableList; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyRangeInterpolateKeyTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyRangeInterpolateKeyTest.java index e8010673e13c7..a6445eb537f32 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyRangeInterpolateKeyTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyRangeInterpolateKeyTest.java @@ -23,7 +23,6 @@ import static org.junit.Assert.assertThat; import com.google.common.collect.ImmutableList; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyRangeTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyRangeTest.java index f068ec9be73af..40f6d8fe9a791 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyRangeTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyRangeTest.java @@ -27,14 +27,12 @@ import com.google.common.collect.ImmutableList; import com.google.protobuf.ByteString; - +import java.util.Arrays; +import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Tests for {@link ByteKeyRange}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyTest.java index b8c85ee1292e7..1117ac7ea10fe 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/io/range/ByteKeyTest.java @@ -24,12 +24,11 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import java.util.Arrays; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; - /** * Tests of {@link ByteKey}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GcpOptionsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GcpOptionsTest.java index ec2902e2f538e..34077a2bdfcdf 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GcpOptionsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GcpOptionsTest.java @@ -18,20 +18,21 @@ package org.apache.beam.sdk.options; import static com.google.common.base.Strings.isNullOrEmpty; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; +import com.google.common.collect.ImmutableMap; +import com.google.common.io.Files; +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Map; import org.apache.beam.sdk.options.GcpOptions.DefaultProjectFactory; import org.apache.beam.sdk.testing.RestoreSystemProperties; import org.apache.beam.sdk.util.NoopPathValidator; - -import com.google.common.collect.ImmutableMap; -import com.google.common.io.Files; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -39,11 +40,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.File; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.Map; - /** Tests for {@link GcpOptions}. */ @RunWith(JUnit4.class) public class GcpOptionsTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GoogleApiDebugOptionsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GoogleApiDebugOptionsTest.java index 8e1439bd7de1c..dae7208487e1d 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GoogleApiDebugOptionsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/GoogleApiDebugOptionsTest.java @@ -21,14 +21,12 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.api.services.bigquery.Bigquery.Datasets.Delete; +import com.google.api.services.storage.Storage; import org.apache.beam.sdk.options.GoogleApiDebugOptions.GoogleApiTracer; import org.apache.beam.sdk.util.TestCredential; import org.apache.beam.sdk.util.Transport; - -import com.google.api.services.bigquery.Bigquery.Datasets.Delete; -import com.google.api.services.storage.Storage; - -import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java index 0c1b5963c849a..70c89837ca5b2 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsFactoryTest.java @@ -27,6 +27,18 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.auto.service.AutoService; +import com.google.common.collect.ArrayListMultimap; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ListMultimap; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.util.List; +import java.util.Map; +import java.util.Set; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.PipelineResult; import org.apache.beam.sdk.runners.PipelineRunner; @@ -34,15 +46,6 @@ import org.apache.beam.sdk.testing.CrashingRunner; import org.apache.beam.sdk.testing.ExpectedLogs; import org.apache.beam.sdk.testing.RestoreSystemProperties; - -import com.google.auto.service.AutoService; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ListMultimap; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; import org.hamcrest.Matchers; import org.junit.Rule; import org.junit.Test; @@ -51,12 +54,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.ByteArrayOutputStream; -import java.io.PrintStream; -import java.util.List; -import java.util.Map; -import java.util.Set; - /** Tests for {@link PipelineOptionsFactory}. */ @RunWith(JUnit4.class) public class PipelineOptionsFactoryTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsReflectorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsReflectorTest.java index 8f801c79688a5..7632b50ab279a 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsReflectorTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsReflectorTest.java @@ -26,9 +26,9 @@ import static org.hamcrest.Matchers.isOneOf; import static org.hamcrest.Matchers.not; -import com.google.common.collect.ImmutableSet; - import com.fasterxml.jackson.annotation.JsonIgnore; +import com.google.common.collect.ImmutableSet; +import java.util.Set; import org.hamcrest.FeatureMatcher; import org.hamcrest.Matcher; import org.hamcrest.Matchers; @@ -36,8 +36,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Set; - /** * Unit tests for {@link PipelineOptionsReflector}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsTest.java index b2efa61f69434..012a5b04c55d8 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsTest.java @@ -23,21 +23,19 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import java.io.IOException; +import java.util.List; +import java.util.Set; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.util.List; -import java.util.Set; - /** Unit tests for {@link PipelineOptions}. */ @RunWith(JUnit4.class) public class PipelineOptionsTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsValidatorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsValidatorTest.java index 2b684a8a047c0..80d2a60b678d2 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsValidatorTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/PipelineOptionsValidatorTest.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.options; import org.apache.beam.sdk.testing.CrashingRunner; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/ProxyInvocationHandlerTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/ProxyInvocationHandlerTest.java index 148b590807665..1ba6b43821b6c 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/options/ProxyInvocationHandlerTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/options/ProxyInvocationHandlerTest.java @@ -32,19 +32,23 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import org.apache.beam.sdk.transforms.display.DisplayData; - +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import com.google.common.testing.EqualsTester; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.JsonMappingException; -import com.fasterxml.jackson.databind.ObjectMapper; - +import java.io.IOException; +import java.io.Serializable; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import org.apache.beam.sdk.transforms.display.DisplayData; import org.hamcrest.Matchers; import org.joda.time.Instant; import org.junit.Rule; @@ -55,14 +59,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.io.Serializable; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; - /** Tests for {@link ProxyInvocationHandler}. */ @RunWith(JUnit4.class) public class ProxyInvocationHandlerTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/PipelineRunnerTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/PipelineRunnerTest.java index f9ce018fc4799..e98049738615c 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/PipelineRunnerTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/PipelineRunnerTest.java @@ -27,7 +27,6 @@ import org.apache.beam.sdk.testing.CrashingRunner; import org.apache.beam.sdk.util.GcsUtil; import org.apache.beam.sdk.util.TestCredential; - import org.junit.Assert; import org.junit.Before; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/TransformTreeTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/TransformTreeTest.java index 9009a777a6456..def3a027e9d75 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/TransformTreeTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/TransformTreeTest.java @@ -24,6 +24,9 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import java.io.File; +import java.util.Arrays; +import java.util.EnumSet; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.VoidCoder; @@ -41,7 +44,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionList; import org.apache.beam.sdk.values.PDone; - import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -49,10 +51,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.File; -import java.util.Arrays; -import java.util.EnumSet; - /** * Tests for {@link TransformTreeNode} and {@link TransformHierarchy}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/dataflow/TestCountingSource.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/dataflow/TestCountingSource.java index 10631c2aa6bca..b53d1fccefc27 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/dataflow/TestCountingSource.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/dataflow/TestCountingSource.java @@ -19,6 +19,11 @@ import static org.apache.beam.sdk.util.CoderUtils.encodeToByteArray; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ThreadLocalRandom; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.DelegateCoder; import org.apache.beam.sdk.coders.KvCoder; @@ -26,18 +31,10 @@ import org.apache.beam.sdk.io.UnboundedSource; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.values.KV; - import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.ThreadLocalRandom; - -import javax.annotation.Nullable; - /** * An unbounded source for testing the unbounded sources framework code. * diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/dataflow/TestCountingSourceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/dataflow/TestCountingSourceTest.java index 6ba060e8cfefc..72a4585104cff 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/dataflow/TestCountingSourceTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/runners/dataflow/TestCountingSourceTest.java @@ -22,15 +22,13 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import java.io.IOException; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptionsFactory; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; - /** * Test the TestCountingSource. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/CoderPropertiesTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/CoderPropertiesTest.java index 5bf55b08d6191..f337f36215a92 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/CoderPropertiesTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/CoderPropertiesTest.java @@ -20,13 +20,14 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; +import com.google.common.base.Strings; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; import org.apache.beam.sdk.coders.Coder.Context; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.CustomCoder; import org.apache.beam.sdk.coders.StringUtf8Coder; - -import com.google.common.base.Strings; - import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.Rule; @@ -35,10 +36,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - /** Unit tests for {@link CoderProperties}. */ @RunWith(JUnit4.class) public class CoderPropertiesTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/CrashingRunnerTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/CrashingRunnerTest.java index 041a73ae2d261..c66aa50cb57f7 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/CrashingRunnerTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/CrashingRunnerTest.java @@ -26,7 +26,6 @@ import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.runners.PipelineRunner; import org.apache.beam.sdk.transforms.Create; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/ExpectedLogs.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/ExpectedLogs.java index caf8fd7919d24..a8e3f94250312 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/ExpectedLogs.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/ExpectedLogs.java @@ -19,12 +19,6 @@ import static org.junit.Assert.fail; -import org.hamcrest.Description; -import org.hamcrest.Matcher; -import org.hamcrest.TypeSafeMatcher; -import org.junit.rules.ExternalResource; -import org.junit.rules.TestRule; - import java.util.Collection; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.logging.Formatter; @@ -33,8 +27,12 @@ import java.util.logging.LogRecord; import java.util.logging.Logger; import java.util.logging.SimpleFormatter; - import javax.annotation.concurrent.ThreadSafe; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; +import org.junit.rules.ExternalResource; +import org.junit.rules.TestRule; /** * This {@link TestRule} enables the ability to capture JUL logging events during test execution and diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/ExpectedLogsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/ExpectedLogsTest.java index 1d7e18a0db155..84d55844f8252 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/ExpectedLogsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/ExpectedLogsTest.java @@ -19,13 +19,6 @@ import static org.apache.beam.sdk.testing.SystemNanoTimeSleeper.sleepMillis; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -35,6 +28,12 @@ import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Tests for {@link FastNanoClockAndSleeper}. */ @RunWith(JUnit4.class) diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/FastNanoClockAndSleeper.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/FastNanoClockAndSleeper.java index a97726be545f8..6bfafa5a8637e 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/FastNanoClockAndSleeper.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/FastNanoClockAndSleeper.java @@ -19,7 +19,6 @@ import com.google.api.client.util.NanoClock; import com.google.api.client.util.Sleeper; - import org.junit.rules.ExternalResource; import org.junit.rules.TestRule; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/FastNanoClockAndSleeperTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/FastNanoClockAndSleeperTest.java index 9d15c0fcabb6e..7d209515421b9 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/FastNanoClockAndSleeperTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/FastNanoClockAndSleeperTest.java @@ -20,13 +20,12 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import java.util.concurrent.TimeUnit; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.concurrent.TimeUnit; - /** Tests for {@link FastNanoClockAndSleeper}. */ @RunWith(JUnit4.class) public class FastNanoClockAndSleeperTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/FileChecksumMatcherTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/FileChecksumMatcherTest.java index d94ffe2c47615..b2f2ec853573a 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/FileChecksumMatcherTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/FileChecksumMatcherTest.java @@ -20,9 +20,12 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; +import com.google.common.io.Files; +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; import org.apache.beam.sdk.PipelineResult; import org.apache.beam.sdk.util.IOChannelUtils; -import com.google.common.io.Files; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -32,10 +35,6 @@ import org.mockito.Mock; import org.mockito.Mockito; -import java.io.File; -import java.io.IOException; -import java.nio.charset.StandardCharsets; - /** Tests for {@link FileChecksumMatcher}. */ @RunWith(JUnit4.class) public class FileChecksumMatcherTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java index acc2b48f96442..54ddd3f99f1a9 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java @@ -22,6 +22,13 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.google.common.collect.Iterables; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Serializable; +import java.util.regex.Pattern; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.AtomicCoder; import org.apache.beam.sdk.coders.CoderException; @@ -35,10 +42,6 @@ import org.apache.beam.sdk.util.common.ElementByteSizeObserver; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TimestampedValue; - -import com.google.common.collect.Iterables; - -import com.fasterxml.jackson.annotation.JsonCreator; import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Rule; @@ -48,12 +51,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Serializable; -import java.util.regex.Pattern; - /** * Test case for {@link PAssert}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PCollectionViewTesting.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PCollectionViewTesting.java index 517ed689b87a3..ea8e0af5030bc 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PCollectionViewTesting.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PCollectionViewTesting.java @@ -17,6 +17,12 @@ */ package org.apache.beam.sdk.testing; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import java.util.List; +import java.util.Objects; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.IterableCoder; import org.apache.beam.sdk.transforms.ViewFn; @@ -29,18 +35,9 @@ import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.PValueBase; import org.apache.beam.sdk.values.TupleTag; - -import com.google.common.base.Function; -import com.google.common.base.MoreObjects; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.util.List; -import java.util.Objects; - /** * Methods for creating and using {@link PCollectionView} instances. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PaneExtractorsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PaneExtractorsTest.java index 1487eeae0dda4..ef501d495c50c 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PaneExtractorsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PaneExtractorsTest.java @@ -21,14 +21,12 @@ import static org.hamcrest.Matchers.emptyIterable; import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableList; import org.apache.beam.sdk.transforms.SerializableFunction; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.transforms.windowing.PaneInfo.Timing; import org.apache.beam.sdk.util.WindowedValue; - -import com.google.common.collect.ImmutableList; - import org.joda.time.Instant; import org.junit.Rule; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/RestoreSystemProperties.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/RestoreSystemProperties.java index 423026d28d234..d1d0507a7606e 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/RestoreSystemProperties.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/RestoreSystemProperties.java @@ -17,12 +17,11 @@ */ package org.apache.beam.sdk.testing; -import org.junit.rules.ExternalResource; -import org.junit.rules.TestRule; - import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; +import org.junit.rules.ExternalResource; +import org.junit.rules.TestRule; /** * Saves and restores the current system properties for tests. diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SerializableMatchersTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SerializableMatchersTest.java index 7cbd4a8c8b926..db5ff2e3049bb 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SerializableMatchersTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SerializableMatchersTest.java @@ -22,17 +22,17 @@ import static org.apache.beam.sdk.testing.SerializableMatchers.containsInAnyOrder; import static org.apache.beam.sdk.testing.SerializableMatchers.kvWithKey; import static org.apache.beam.sdk.testing.SerializableMatchers.not; - import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; +import com.google.common.collect.ImmutableList; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Serializable; import org.apache.beam.sdk.coders.AtomicCoder; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.util.SerializableUtils; import org.apache.beam.sdk.values.KV; - -import com.google.common.collect.ImmutableList; - import org.hamcrest.Matchers; import org.junit.Rule; import org.junit.Test; @@ -40,10 +40,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Serializable; - /** * Test case for {@link SerializableMatchers}. * diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SourceTestUtilsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SourceTestUtilsTest.java index f2b332bb7dd3a..efb385de630ad 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SourceTestUtilsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SourceTestUtilsTest.java @@ -20,21 +20,18 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import com.google.common.collect.Sets; +import java.util.List; +import java.util.Set; import org.apache.beam.sdk.io.BoundedSource; import org.apache.beam.sdk.io.BoundedSource.BoundedReader; import org.apache.beam.sdk.io.CountingSource; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptionsFactory; - -import com.google.common.collect.Sets; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.List; -import java.util.Set; - /** * Tests for {@link SourceTestUtils}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/StaticWindowsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/StaticWindowsTest.java index fd715dc5a5674..e6626190006ef 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/StaticWindowsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/StaticWindowsTest.java @@ -19,13 +19,11 @@ import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableList; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.IntervalWindow; import org.apache.beam.sdk.transforms.windowing.WindowFn; - -import com.google.common.collect.ImmutableList; - import org.hamcrest.Matchers; import org.joda.time.Instant; import org.junit.Rule; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SystemNanoTimeSleeper.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SystemNanoTimeSleeper.java index 254a1d57e2b98..810b6f182aaa1 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SystemNanoTimeSleeper.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SystemNanoTimeSleeper.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.testing; import com.google.api.client.util.Sleeper; - import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.LockSupport; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SystemNanoTimeSleeperTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SystemNanoTimeSleeperTest.java index 623224d838967..fe97675e7e7a7 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SystemNanoTimeSleeperTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/SystemNanoTimeSleeperTest.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.testing; import static org.apache.beam.sdk.testing.SystemNanoTimeSleeper.sleepMillis; - import static org.junit.Assert.assertTrue; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestPipelineTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestPipelineTest.java index 0bd789326d3e4..ed65f158753de 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestPipelineTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestPipelineTest.java @@ -23,14 +23,17 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.UUID; import org.apache.beam.sdk.PipelineResult; import org.apache.beam.sdk.options.ApplicationNameOptions; import org.apache.beam.sdk.options.GcpOptions; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.transforms.Create; - -import com.fasterxml.jackson.databind.ObjectMapper; import org.hamcrest.BaseMatcher; import org.hamcrest.Description; import org.junit.Rule; @@ -40,11 +43,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.Date; -import java.util.List; -import java.util.UUID; - /** Tests for {@link TestPipeline}. */ @RunWith(JUnit4.class) public class TestPipelineTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestStreamTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestStreamTest.java index df37d7f967a54..6457f910308f4 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestStreamTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestStreamTest.java @@ -23,6 +23,7 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.junit.Assert.assertThat; +import java.io.Serializable; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.coders.VarIntCoder; @@ -50,7 +51,6 @@ import org.apache.beam.sdk.transforms.windowing.Window.ClosingBehavior; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TimestampedValue; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Rule; @@ -60,8 +60,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; - /** * Tests for {@link TestStream}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/WindowSupplierTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/WindowSupplierTest.java index 178c67ca7956b..1ab4c275eac8f 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/WindowSupplierTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/WindowSupplierTest.java @@ -19,28 +19,28 @@ import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Collections; import org.apache.beam.sdk.coders.AtomicCoder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.IntervalWindow; import org.apache.beam.sdk.util.SerializableUtils; - -import com.google.common.collect.ImmutableList; - import org.hamcrest.Matchers; import org.joda.time.Instant; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.Collections; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** * Tests for {@link WindowSupplier}. */ +@RunWith(JUnit4.class) public class WindowSupplierTest { private final IntervalWindow window = new IntervalWindow(new Instant(0L), new Instant(100L)); private final IntervalWindow otherWindow = diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateQuantilesTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateQuantilesTest.java index fc10d4b29b379..ab1394697c4fa 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateQuantilesTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateQuantilesTest.java @@ -19,10 +19,14 @@ import static org.apache.beam.sdk.TestUtils.checkCombineFn; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.collection.IsIterableContainingInOrder.contains; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; import org.apache.beam.sdk.coders.KvCoder; @@ -34,7 +38,6 @@ import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.hamcrest.CoreMatchers; import org.hamcrest.Description; import org.hamcrest.Matcher; @@ -44,12 +47,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; - /** * Tests for {@link ApproximateQuantiles}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java index 7b6d671aa9351..ba1ddfeaf2cde 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ApproximateUniqueTest.java @@ -18,12 +18,18 @@ package org.apache.beam.sdk.transforms; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.TestUtils; import org.apache.beam.sdk.testing.NeedsRunner; @@ -34,21 +40,11 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; - import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - /** * Tests for the ApproximateUnique aggregator transform. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java index 95ba1aa0d8d14..35f985855dae9 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineFnsTest.java @@ -19,9 +19,15 @@ import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom; - import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Serializable; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; import org.apache.beam.sdk.coders.Coder; @@ -43,9 +49,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; - -import com.google.common.collect.ImmutableList; - import org.hamcrest.Matchers; import org.junit.Rule; import org.junit.Test; @@ -54,13 +57,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Serializable; -import java.util.Arrays; -import java.util.List; - /** * Unit tests for {@link CombineFns}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java index 6421b3b274ecf..77a1d6b03e69c 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CombineTest.java @@ -17,19 +17,31 @@ */ package org.apache.beam.sdk.transforms; +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; +import static com.google.common.base.Preconditions.checkState; import static org.apache.beam.sdk.TestUtils.checkCombineFn; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasNamespace; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom; - -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; -import static com.google.common.base.Preconditions.checkState; - import static org.hamcrest.Matchers.hasItem; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Sets; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Serializable; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Objects; +import java.util.Set; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; import org.apache.beam.sdk.coders.BigEndianLongCoder; @@ -67,13 +79,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.POutput; - -import com.google.common.base.MoreObjects; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; import org.hamcrest.Matchers; import org.joda.time.Duration; import org.junit.Test; @@ -82,16 +87,6 @@ import org.junit.runners.JUnit4; import org.mockito.Mock; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Serializable; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Objects; -import java.util.Set; - /** * Tests for Combine transforms. */ @@ -387,7 +382,7 @@ public void testGlobalCombineWithDefaultsAndTriggers() { PCollection output = input .apply(Window.into(new GlobalWindows()) - .triggering(AfterPane.elementCountAtLeast(1)) + .triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(1))) .accumulatingFiredPanes() .withAllowedLateness(new Duration(0))) .apply(Sum.integersGlobally()) @@ -583,7 +578,13 @@ public void testHotKeyCombiningWithAccumulationMode() { .apply(Sum.integersGlobally().withoutDefaults().withFanout(2)) .apply(ParDo.of(new GetLast())); - PAssert.that(output).containsInAnyOrder(15); + PAssert.that(output).satisfies(new SerializableFunction, Void>() { + @Override + public Void apply(Iterable input) { + assertThat(input, hasItem(15)); + return null; + } + }); pipeline.run(); } diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CountTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CountTest.java index ca898b3bc2b9c..7f77ae7f64c38 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CountTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CountTest.java @@ -18,9 +18,10 @@ package org.apache.beam.sdk.transforms; import static org.apache.beam.sdk.TestUtils.NO_LINES; - import static org.junit.Assert.assertEquals; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.testing.PAssert; @@ -28,15 +29,11 @@ import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Tests for Count. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java index 9db01368cc66b..2a89a1837df73 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/CreateTest.java @@ -21,13 +21,23 @@ import static org.apache.beam.sdk.TestUtils.LINES_ARRAY; import static org.apache.beam.sdk.TestUtils.NO_LINES; import static org.apache.beam.sdk.TestUtils.NO_LINES_ARRAY; - import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Random; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.AtomicCoder; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; @@ -50,10 +60,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TimestampedValue; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; - import org.hamcrest.Matchers; import org.joda.time.Instant; import org.junit.Rule; @@ -63,16 +69,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Random; - /** * Tests for Create. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java index 4e8d06c3e5970..25b909aabbaf3 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnDelegatingAggregatorTest.java @@ -25,7 +25,6 @@ import org.apache.beam.sdk.transforms.Combine.CombineFn; import org.apache.beam.sdk.transforms.OldDoFn.DelegatingAggregator; - import org.junit.Before; import org.junit.Rule; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java index 3fb3193f4d5ad..e5f5cb6547f5b 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTest.java @@ -23,13 +23,13 @@ import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertThat; +import java.io.Serializable; import org.apache.beam.sdk.Pipeline.PipelineExecutionException; import org.apache.beam.sdk.testing.NeedsRunner; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.Combine.CombineFn; import org.apache.beam.sdk.transforms.Max.MaxIntegerFn; import org.apache.beam.sdk.transforms.display.DisplayData; - import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -37,8 +37,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; - /** Tests for {@link DoFn}. */ @RunWith(JUnit4.class) public class DoFnTest implements Serializable { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTesterTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTesterTest.java index e379f11279238..2f1519cc42764 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTesterTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/DoFnTesterTest.java @@ -24,6 +24,7 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; +import java.util.List; import org.apache.beam.sdk.coders.VarIntCoder; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; @@ -32,15 +33,12 @@ import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TimestampedValue; - import org.hamcrest.Matchers; import org.joda.time.Instant; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.List; - /** * Tests for {@link DoFnTester}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FilterTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FilterTest.java index 2edab05418e86..5221f75b956b0 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FilterTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FilterTest.java @@ -18,22 +18,19 @@ package org.apache.beam.sdk.transforms; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.MatcherAssert.assertThat; +import java.io.Serializable; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.RunnableOnService; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.values.PCollection; - import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; - /** * Tests for {@link Filter}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlatMapElementsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlatMapElementsTest.java index cb7892cad5729..311c8de369d45 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlatMapElementsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlatMapElementsTest.java @@ -18,10 +18,15 @@ package org.apache.beam.sdk.transforms; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import java.io.Serializable; +import java.util.Collections; +import java.util.List; +import java.util.Set; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.NeedsRunner; import org.apache.beam.sdk.testing.PAssert; @@ -30,10 +35,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptor; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; - import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -41,11 +42,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.Collections; -import java.util.List; -import java.util.Set; - /** * Tests for {@link FlatMapElements}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java index 3469223c38fe2..d755e28b8548e 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/FlattenTest.java @@ -23,6 +23,13 @@ import static org.apache.beam.sdk.TestUtils.NO_LINES; import static org.apache.beam.sdk.TestUtils.NO_LINES_ARRAY; +import com.google.common.collect.ImmutableSet; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Set; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CollectionCoder; @@ -41,9 +48,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionList; import org.apache.beam.sdk.values.PCollectionView; - -import com.google.common.collect.ImmutableSet; - import org.joda.time.Duration; import org.junit.Assert; import org.junit.Rule; @@ -53,13 +57,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.Set; - /** * Tests for Flatten. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java index afe460fcdc7ce..bea0e2d901851 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/GroupByKeyTest.java @@ -19,7 +19,6 @@ import static org.apache.beam.sdk.TestUtils.KvMatcher.isKv; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.hasItem; import static org.hamcrest.Matchers.empty; @@ -27,6 +26,19 @@ import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.google.common.base.Function; +import com.google.common.collect.Iterables; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ThreadLocalRandom; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.AtomicCoder; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; @@ -51,11 +63,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TimestampedValue; import org.apache.beam.sdk.values.TypeDescriptor; - -import com.google.common.base.Function; -import com.google.common.collect.Iterables; - -import com.fasterxml.jackson.annotation.JsonCreator; import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Assert; @@ -66,17 +73,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ThreadLocalRandom; - /** * Tests for GroupByKey. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/IntraBundleParallelizationTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/IntraBundleParallelizationTest.java index fa2fae96d419c..b9afd35513df8 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/IntraBundleParallelizationTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/IntraBundleParallelizationTest.java @@ -20,7 +20,6 @@ import static org.apache.beam.sdk.testing.SystemNanoTimeSleeper.sleepMillis; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom; - import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -31,20 +30,18 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; +import java.util.ArrayList; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.NeedsRunner; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.display.DisplayData; - import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.ArrayList; -import java.util.concurrent.atomic.AtomicInteger; - /** * Tests for RateLimiter. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/KeysTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/KeysTest.java index cf3094068bf23..fce5b2f205c34 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/KeysTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/KeysTest.java @@ -17,6 +17,7 @@ */ package org.apache.beam.sdk.transforms; +import java.util.Arrays; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; import org.apache.beam.sdk.coders.KvCoder; @@ -26,14 +27,11 @@ import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; - /** * Tests for Keys transform. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/KvSwapTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/KvSwapTest.java index 1a2d7f6ca0606..3598198754156 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/KvSwapTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/KvSwapTest.java @@ -17,6 +17,7 @@ */ package org.apache.beam.sdk.transforms; +import java.util.Arrays; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; import org.apache.beam.sdk.coders.KvCoder; @@ -26,14 +27,11 @@ import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; - /** * Tests for KvSwap transform. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java index 7217bca663fae..4a34c57b58b6c 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MapElementsTest.java @@ -18,11 +18,12 @@ package org.apache.beam.sdk.transforms; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; import static org.junit.Assert.assertThat; +import java.io.Serializable; +import java.util.Set; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.NeedsRunner; import org.apache.beam.sdk.testing.PAssert; @@ -33,7 +34,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptor; - import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -41,9 +41,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.Set; - /** * Tests for {@link MapElements}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MaxTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MaxTest.java index 87fa5541fa6bb..5c78b3f15bb3a 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MaxTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MaxTest.java @@ -19,14 +19,11 @@ import static org.apache.beam.sdk.TestUtils.checkCombineFn; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; -import org.apache.beam.sdk.transforms.display.DisplayData; - import com.google.common.collect.Lists; - +import org.apache.beam.sdk.transforms.display.DisplayData; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MeanTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MeanTest.java index 65c876ef07547..1c94e35a957a6 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MeanTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MeanTest.java @@ -18,23 +18,19 @@ package org.apache.beam.sdk.transforms; import static org.apache.beam.sdk.TestUtils.checkCombineFn; - import static org.junit.Assert.assertEquals; +import com.google.common.collect.Lists; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.transforms.Mean.CountSum; import org.apache.beam.sdk.transforms.Mean.CountSumCoder; - -import com.google.common.collect.Lists; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Tests for Mean. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MinTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MinTest.java index cd03a74c18945..a0eca072e2bf4 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MinTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MinTest.java @@ -20,14 +20,11 @@ import static org.apache.beam.sdk.TestUtils.checkCombineFn; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; -import org.apache.beam.sdk.transforms.display.DisplayData; - import com.google.common.collect.Lists; - +import org.apache.beam.sdk.transforms.display.DisplayData; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpOldDoFn.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpOldDoFn.java index 8f2bd5e4a43a9..c73251000650a 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpOldDoFn.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/NoOpOldDoFn.java @@ -24,7 +24,6 @@ import org.apache.beam.sdk.util.WindowingInternals; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; - import org.joda.time.Instant; /** diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java index 5946d9a0dff94..e7ae135c5683d 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/OldDoFnTest.java @@ -24,6 +24,9 @@ import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableMap; +import java.io.Serializable; +import java.util.Map; import org.apache.beam.sdk.AggregatorValues; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.Pipeline.PipelineExecutionException; @@ -34,9 +37,6 @@ import org.apache.beam.sdk.transforms.Max.MaxIntegerFn; import org.apache.beam.sdk.transforms.Sum.SumIntegerFn; import org.apache.beam.sdk.transforms.display.DisplayData; - -import com.google.common.collect.ImmutableMap; - import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -44,9 +44,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.Map; - /** * Tests for OldDoFn. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PTransformTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PTransformTest.java index f7074c35714ff..bfe8225c9dc93 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PTransformTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PTransformTest.java @@ -22,7 +22,6 @@ import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.values.PCollection; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoLifecycleTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoLifecycleTest.java index 272fea75d791c..f69c867bbe22d 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoLifecycleTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoLifecycleTest.java @@ -25,25 +25,24 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; +import java.io.Serializable; +import java.util.concurrent.atomic.AtomicBoolean; import org.apache.beam.sdk.testing.NeedsRunner; import org.apache.beam.sdk.testing.RunnableOnService; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.values.PCollectionList; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TupleTagList; - import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.concurrent.atomic.AtomicBoolean; - /** * Tests that {@link ParDo} exercises {@link DoFn} methods in the appropriate sequence. */ @RunWith(JUnit4.class) -public class ParDoLifecycleTest { +public class ParDoLifecycleTest implements Serializable { @Test @Category(RunnableOnService.class) public void testOldFnCallSequence() { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java index c384114dc0f43..0a4b3cd271f88 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java @@ -17,6 +17,7 @@ */ package org.apache.beam.sdk.transforms; +import static com.google.common.base.Preconditions.checkNotNull; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasKey; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasType; @@ -24,7 +25,6 @@ import static org.apache.beam.sdk.util.SerializableUtils.serializeToByteArray; import static org.apache.beam.sdk.util.StringUtils.byteArrayToJsonString; import static org.apache.beam.sdk.util.StringUtils.jsonStringToByteArray; -import static com.google.common.base.Preconditions.checkNotNull; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; @@ -34,6 +34,15 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; +import com.fasterxml.jackson.annotation.JsonCreator; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.AtomicCoder; import org.apache.beam.sdk.coders.CoderException; @@ -56,9 +65,6 @@ import org.apache.beam.sdk.values.TimestampedValue; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TupleTagList; - -import com.fasterxml.jackson.annotation.JsonCreator; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Rule; @@ -68,15 +74,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - /** * Tests for ParDo. */ @@ -193,7 +190,9 @@ public void prepare() { @StartBundle public void startBundle(Context c) { - assertEquals(State.UNSTARTED, state); + assertThat(state, + anyOf(equalTo(State.UNSTARTED), equalTo(State.FINISHED))); + state = State.STARTED; outputToAll(c, "started"); } diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PartitionTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PartitionTest.java index 0cc804ef62131..1cbe344adc54b 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PartitionTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/PartitionTest.java @@ -18,11 +18,13 @@ package org.apache.beam.sdk.transforms; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.NeedsRunner; import org.apache.beam.sdk.testing.PAssert; @@ -32,7 +34,6 @@ import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionList; - import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -40,10 +41,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.List; - /** * Tests for {@link Partition}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/RemoveDuplicatesTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/RemoveDuplicatesTest.java index 0e919d2c83b7b..312cba6b45fbe 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/RemoveDuplicatesTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/RemoveDuplicatesTest.java @@ -20,6 +20,10 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.testing.PAssert; @@ -27,17 +31,11 @@ import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - /** * Tests for RemovedDuplicates. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SampleTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SampleTest.java index e7f8cd00930e0..a0555fa60800b 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SampleTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SampleTest.java @@ -17,16 +17,21 @@ */ package org.apache.beam.sdk.transforms; +import static com.google.common.base.Preconditions.checkArgument; import static org.apache.beam.sdk.TestUtils.LINES; import static org.apache.beam.sdk.TestUtils.NO_LINES; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - -import static com.google.common.base.Preconditions.checkArgument; - import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import com.google.common.base.Joiner; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; import org.apache.beam.sdk.coders.StringUtf8Coder; @@ -36,21 +41,11 @@ import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.base.Joiner; - import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.TreeSet; - /** * Tests for Sample transform. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SimpleStatsFnsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SimpleStatsFnsTest.java index dc8355b2620a8..a782ecc201529 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SimpleStatsFnsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SimpleStatsFnsTest.java @@ -19,15 +19,14 @@ import static org.junit.Assert.assertEquals; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; import org.joda.time.Instant; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - /** * Tests of Min, Max, Mean, and Sum. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SumTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SumTest.java index 4af6db19a58d6..b4f723d0d6f80 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SumTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/SumTest.java @@ -18,19 +18,16 @@ package org.apache.beam.sdk.transforms; import static org.apache.beam.sdk.TestUtils.checkCombineFn; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; +import com.google.common.collect.Lists; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; import org.apache.beam.sdk.coders.BigEndianLongCoder; import org.apache.beam.sdk.coders.CoderRegistry; import org.apache.beam.sdk.coders.DoubleCoder; import org.apache.beam.sdk.coders.VarIntCoder; import org.apache.beam.sdk.coders.VarLongCoder; - -import com.google.common.collect.Lists; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/TopTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/TopTest.java index fc0e659f5b9a6..b6242524b6a31 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/TopTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/TopTest.java @@ -18,10 +18,14 @@ package org.apache.beam.sdk.transforms; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; +import java.io.Serializable; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; import org.apache.beam.sdk.coders.KvCoder; @@ -35,7 +39,6 @@ import org.apache.beam.sdk.transforms.windowing.Window.Bound; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.hamcrest.Matchers; import org.joda.time.Duration; import org.junit.Rule; @@ -45,12 +48,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; - /** Tests for Top. */ @RunWith(JUnit4.class) public class TopTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ValuesTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ValuesTest.java index 238ba7b648424..0bf2e2e9232e8 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ValuesTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ValuesTest.java @@ -19,6 +19,7 @@ import static org.junit.Assert.assertEquals; +import java.util.Arrays; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; import org.apache.beam.sdk.coders.KvCoder; @@ -28,14 +29,11 @@ import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; - /** * Tests for Values transform. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ViewTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ViewTest.java index 170e6ce093de8..69b618607057c 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ViewTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ViewTest.java @@ -17,16 +17,26 @@ */ package org.apache.beam.sdk.transforms; -import static org.apache.beam.sdk.values.KV.of; - import static com.google.common.base.Preconditions.checkArgument; - +import static org.apache.beam.sdk.values.KV.of; import static org.hamcrest.Matchers.isA; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.NoSuchElementException; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.Pipeline.PipelineExecutionException; import org.apache.beam.sdk.coders.Coder; @@ -52,9 +62,6 @@ import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TimestampedValue; import org.apache.beam.sdk.values.TypeDescriptor; - -import com.google.common.collect.ImmutableList; - import org.hamcrest.Matchers; import org.joda.time.Duration; import org.joda.time.Instant; @@ -66,18 +73,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.NoSuchElementException; - /** * Tests for {@link View}. See also {@link ParDoTest}, which * provides additional coverage since views can only be diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithKeysTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithKeysTest.java index c23dd3694ad76..f958807fcf771 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithKeysTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithKeysTest.java @@ -19,6 +19,8 @@ import static org.junit.Assert.assertEquals; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.testing.NeedsRunner; @@ -27,15 +29,11 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptor; - import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Tests for ExtractKeys transform. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java index e3814708788ee..923b97c52ee38 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsTest.java @@ -19,6 +19,7 @@ import static org.hamcrest.Matchers.isA; +import java.io.Serializable; import org.apache.beam.sdk.Pipeline.PipelineExecutionException; import org.apache.beam.sdk.testing.NeedsRunner; import org.apache.beam.sdk.testing.PAssert; @@ -26,7 +27,6 @@ import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Rule; @@ -36,8 +36,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; - /** * Tests for {@link WithTimestamps}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluator.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluator.java index dc8c1e9fe90c6..1783a73101d3c 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluator.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluator.java @@ -17,6 +17,9 @@ */ package org.apache.beam.sdk.transforms.display; +import com.google.common.collect.Sets; +import java.util.Objects; +import java.util.Set; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.options.PipelineOptions; @@ -29,11 +32,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.POutput; -import com.google.common.collect.Sets; - -import java.util.Objects; -import java.util.Set; - /** * Test utilities to evaluate the {@link DisplayData} in the context of a {@link PipelineRunner}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java index e2331146ea8df..7630779bb3e18 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataEvaluatorTest.java @@ -18,11 +18,12 @@ package org.apache.beam.sdk.transforms.display; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertThat; +import java.io.Serializable; +import java.util.Set; import org.apache.beam.sdk.io.TextIO; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.PTransform; @@ -30,14 +31,10 @@ import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.POutput; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.Set; - /** * Unit tests for {@link DisplayDataEvaluator}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchers.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchers.java index 025a1f765e5d3..e9db5222232bb 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchers.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchers.java @@ -19,10 +19,9 @@ import static org.hamcrest.Matchers.allOf; -import org.apache.beam.sdk.transforms.display.DisplayData.Item; - import com.google.common.collect.Sets; - +import java.util.Collection; +import org.apache.beam.sdk.transforms.display.DisplayData.Item; import org.hamcrest.CustomTypeSafeMatcher; import org.hamcrest.Description; import org.hamcrest.FeatureMatcher; @@ -32,8 +31,6 @@ import org.joda.time.Duration; import org.joda.time.Instant; -import java.util.Collection; - /** * Hamcrest matcher for making assertions on {@link DisplayData} instances. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchersTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchersTest.java index fa44390ab627f..3ea6830cbc3fd 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchersTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataMatchersTest.java @@ -22,7 +22,6 @@ import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasType; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasValue; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; @@ -30,7 +29,6 @@ import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.display.DisplayData.Builder; import org.apache.beam.sdk.values.PCollection; - import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.StringDescription; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java index e2f38b44190a7..a709bd80af477 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/display/DisplayDataTest.java @@ -24,7 +24,6 @@ import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasType; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasValue; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom; - import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.everyItem; @@ -41,20 +40,23 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableMultimap; +import com.google.common.collect.Multimap; +import com.google.common.testing.EqualsTester; +import java.io.IOException; +import java.io.Serializable; +import java.util.Collection; +import java.util.Map; +import java.util.regex.Pattern; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.display.DisplayData.Builder; import org.apache.beam.sdk.transforms.display.DisplayData.Item; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableMultimap; -import com.google.common.collect.Multimap; -import com.google.common.testing.EqualsTester; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; import org.hamcrest.CustomTypeSafeMatcher; import org.hamcrest.FeatureMatcher; import org.hamcrest.Matcher; @@ -69,12 +71,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.io.Serializable; -import java.util.Collection; -import java.util.Map; -import java.util.regex.Pattern; - /** * Tests for {@link DisplayData} class. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGbkResultCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGbkResultCoderTest.java index 0a0a3f1320200..18ecd9bab2e60 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGbkResultCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGbkResultCoderTest.java @@ -19,6 +19,7 @@ import static org.junit.Assert.assertFalse; +import com.google.common.collect.ImmutableList; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.DoubleCoder; @@ -28,9 +29,6 @@ import org.apache.beam.sdk.transforms.join.CoGbkResult.CoGbkResultCoder; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TupleTagList; - -import com.google.common.collect.ImmutableList; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGbkResultTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGbkResultTest.java index aa7cb5c3e5a80..50a623772d135 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGbkResultTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGbkResultTest.java @@ -22,18 +22,16 @@ import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.sdk.util.common.Reiterable; import org.apache.beam.sdk.util.common.Reiterator; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TupleTagList; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.ArrayList; -import java.util.List; - /** * Tests the CoGbkResult. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java index c6f82ecb0ca41..e8c8b15ddbfdf 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/CoGroupByKeyTest.java @@ -21,6 +21,12 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; +import com.google.common.collect.Iterables; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; import org.apache.beam.sdk.coders.KvCoder; @@ -40,21 +46,12 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TupleTag; - -import com.google.common.collect.Iterables; - import org.joda.time.Duration; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; - /** * Tests for CoGroupByKeyTest. Implements Serializable for anonymous DoFns. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/UnionCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/UnionCoderTest.java index fb80fb65490df..41ba95289dc1d 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/UnionCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/join/UnionCoderTest.java @@ -19,18 +19,16 @@ import static org.junit.Assert.assertEquals; +import java.util.Arrays; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.DoubleCoder; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.util.CloudObject; import org.apache.beam.sdk.util.Serializer; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; - /** * Tests the UnionCoder. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnInvokersTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnInvokersTest.java index 7e756e245f917..9317ea21d5588 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnInvokersTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnInvokersTest.java @@ -24,15 +24,17 @@ import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.util.UserCodeException; - import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; import org.mockito.Mock; import org.mockito.MockitoAnnotations; /** Tests for {@link DoFnInvokers}. */ +@RunWith(JUnit4.class) public class DoFnInvokersTest { /** A convenience struct holding flags that indicate whether a particular method was invoked. */ public static class Invocations { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesTest.java index 1a26df22cc34e..447b993224406 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesTest.java @@ -17,19 +17,16 @@ */ package org.apache.beam.sdk.transforms.reflect; -import org.apache.beam.sdk.transforms.DoFn; - import com.google.common.reflect.TypeToken; - +import java.lang.reflect.Method; +import java.util.List; +import org.apache.beam.sdk.transforms.DoFn; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.lang.reflect.Method; -import java.util.List; - /** Tests for {@link DoFnSignatures}. */ @RunWith(JUnit4.class) public class DoFnSignaturesTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterAllTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterAllTest.java index b7980132cb5fd..b5912296bd84f 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterAllTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterAllTest.java @@ -24,7 +24,6 @@ import org.apache.beam.sdk.transforms.windowing.Trigger.OnceTrigger; import org.apache.beam.sdk.util.TriggerTester; import org.apache.beam.sdk.util.TriggerTester.SimpleTriggerTester; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterEachTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterEachTest.java index df557eb301325..c413c6ed25c4a 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterEachTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterEachTest.java @@ -24,7 +24,6 @@ import org.apache.beam.sdk.transforms.windowing.Trigger.OnceTrigger; import org.apache.beam.sdk.util.TriggerTester; import org.apache.beam.sdk.util.TriggerTester.SimpleTriggerTester; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Before; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterFirstTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterFirstTest.java index a16669425dd11..415060b6c22b7 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterFirstTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterFirstTest.java @@ -25,7 +25,6 @@ import org.apache.beam.sdk.transforms.windowing.Trigger.OnceTrigger; import org.apache.beam.sdk.util.TriggerTester; import org.apache.beam.sdk.util.TriggerTester.SimpleTriggerTester; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Before; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterPaneTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterPaneTest.java index 76ee49c0b4021..38d030ec6be1f 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterPaneTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterPaneTest.java @@ -23,7 +23,6 @@ import org.apache.beam.sdk.util.TriggerTester; import org.apache.beam.sdk.util.TriggerTester.SimpleTriggerTester; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTimeTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTimeTest.java index ea9c2b088e228..13a7acf8ca1e6 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTimeTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterProcessingTimeTest.java @@ -24,7 +24,6 @@ import org.apache.beam.sdk.transforms.windowing.Trigger.OnceTrigger; import org.apache.beam.sdk.util.TriggerTester; import org.apache.beam.sdk.util.TriggerTester.SimpleTriggerTester; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterSynchronizedProcessingTimeTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterSynchronizedProcessingTimeTest.java index 4c089db19c64e..7e6e938f3c8b8 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterSynchronizedProcessingTimeTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterSynchronizedProcessingTimeTest.java @@ -23,7 +23,6 @@ import org.apache.beam.sdk.util.TriggerTester; import org.apache.beam.sdk.util.TriggerTester.SimpleTriggerTester; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterWatermarkTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterWatermarkTest.java index 418f746ea452a..084027b3e5b10 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterWatermarkTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/AfterWatermarkTest.java @@ -26,7 +26,6 @@ import org.apache.beam.sdk.transforms.windowing.Trigger.OnceTrigger; import org.apache.beam.sdk.util.TriggerTester; import org.apache.beam.sdk.util.TriggerTester.SimpleTriggerTester; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Before; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/CalendarWindowsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/CalendarWindowsTest.java index 4598a2777c113..54cdd06195ec7 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/CalendarWindowsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/CalendarWindowsTest.java @@ -20,12 +20,15 @@ import static org.apache.beam.sdk.testing.WindowFnTestUtils.runWindowFn; import static org.apache.beam.sdk.testing.WindowFnTestUtils.set; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; import org.apache.beam.sdk.transforms.display.DisplayData; - import org.joda.time.DateTime; import org.joda.time.DateTimeConstants; import org.joda.time.DateTimeZone; @@ -34,12 +37,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - /** * Tests for CalendarWindows WindowFn. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/DefaultTriggerTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/DefaultTriggerTest.java index 6ed1c8190df0a..673e5554b0087 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/DefaultTriggerTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/DefaultTriggerTest.java @@ -23,7 +23,6 @@ import org.apache.beam.sdk.util.TriggerTester; import org.apache.beam.sdk.util.TriggerTester.SimpleTriggerTester; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/FixedWindowsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/FixedWindowsTest.java index fc1caac40c09a..afa5d60b8a91e 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/FixedWindowsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/FixedWindowsTest.java @@ -20,7 +20,6 @@ import static org.apache.beam.sdk.testing.WindowFnTestUtils.runWindowFn; import static org.apache.beam.sdk.testing.WindowFnTestUtils.set; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.CoreMatchers.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -28,20 +27,18 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; import org.apache.beam.sdk.testing.WindowFnTestUtils; import org.apache.beam.sdk.transforms.display.DisplayData; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - /** * Tests for FixedWindows WindowFn. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/IntervalWindowTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/IntervalWindowTest.java index aaface2d658b5..fdfbdcb1bbcaa 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/IntervalWindowTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/IntervalWindowTest.java @@ -20,20 +20,17 @@ import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; +import com.google.common.collect.Lists; +import java.util.List; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.InstantCoder; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.util.CoderUtils; - -import com.google.common.collect.Lists; - import org.joda.time.Instant; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.List; - /** * Tests for {@link Window}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/NeverTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/NeverTest.java index ed64f84ee46dc..fb2b4d5c730de 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/NeverTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/NeverTest.java @@ -23,7 +23,6 @@ import org.apache.beam.sdk.util.TriggerTester; import org.apache.beam.sdk.util.TriggerTester.SimpleTriggerTester; import org.apache.beam.sdk.values.TimestampedValue; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Before; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/OrFinallyTriggerTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/OrFinallyTriggerTest.java index 93971793cfe21..7289d97d7b633 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/OrFinallyTriggerTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/OrFinallyTriggerTest.java @@ -24,7 +24,6 @@ import org.apache.beam.sdk.transforms.windowing.Trigger.OnceTrigger; import org.apache.beam.sdk.util.TriggerTester; import org.apache.beam.sdk.util.TriggerTester.SimpleTriggerTester; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/PaneInfoTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/PaneInfoTest.java index ea4928ea12df3..1ce2d8c8d6a23 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/PaneInfoTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/PaneInfoTest.java @@ -23,7 +23,6 @@ import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.transforms.windowing.PaneInfo.Timing; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/RepeatedlyTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/RepeatedlyTest.java index 3a33182f0f99b..6e8930dbd8830 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/RepeatedlyTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/RepeatedlyTest.java @@ -27,7 +27,6 @@ import org.apache.beam.sdk.util.TriggerTester; import org.apache.beam.sdk.util.TriggerTester.SimpleTriggerTester; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/SessionsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/SessionsTest.java index a543359e9c76f..b13168818cf89 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/SessionsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/SessionsTest.java @@ -20,30 +20,26 @@ import static org.apache.beam.sdk.testing.WindowFnTestUtils.runWindowFn; import static org.apache.beam.sdk.testing.WindowFnTestUtils.set; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; +import com.google.common.collect.ImmutableList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; import org.apache.beam.sdk.testing.WindowFnTestUtils; import org.apache.beam.sdk.transforms.display.DisplayData; - -import com.google.common.collect.ImmutableList; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - /** * Tests for Sessions WindowFn. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/SlidingWindowsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/SlidingWindowsTest.java index 047a413242b5d..54c01a828bc14 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/SlidingWindowsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/SlidingWindowsTest.java @@ -20,26 +20,23 @@ import static org.apache.beam.sdk.testing.WindowFnTestUtils.runWindowFn; import static org.apache.beam.sdk.testing.WindowFnTestUtils.set; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; import org.apache.beam.sdk.testing.WindowFnTestUtils; import org.apache.beam.sdk.transforms.display.DisplayData; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - /** * Tests for the SlidingWindows WindowFn. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/StubTrigger.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/StubTrigger.java index 06218cf2c25b0..b258a791fd400 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/StubTrigger.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/StubTrigger.java @@ -18,10 +18,8 @@ package org.apache.beam.sdk.transforms.windowing; import com.google.common.collect.Lists; - -import org.joda.time.Instant; - import java.util.List; +import org.joda.time.Instant; /** * No-op {@link OnceTrigger} implementation for testing. diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/TriggerTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/TriggerTest.java index 43c8bd8c5d761..cfc03b29becaf 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/TriggerTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/TriggerTest.java @@ -21,14 +21,13 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import java.util.Arrays; +import java.util.List; import org.joda.time.Instant; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Tests for {@link Trigger}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java index c583860d2834f..9744fc6c78bac 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowTest.java @@ -20,7 +20,6 @@ import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasKey; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFrom; - import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.isOneOf; import static org.hamcrest.Matchers.not; @@ -29,6 +28,7 @@ import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; +import java.io.Serializable; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.Coder.NonDeterministicException; @@ -44,7 +44,6 @@ import org.apache.beam.sdk.util.WindowingStrategy.AccumulationMode; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.TimestampedValue; - import org.hamcrest.Matchers; import org.joda.time.Duration; import org.joda.time.Instant; @@ -56,8 +55,6 @@ import org.junit.runners.JUnit4; import org.mockito.Mockito; -import java.io.Serializable; - /** * Tests for {@link Window}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java index 159e7004f80fa..ab208dd57cc37 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/windowing/WindowingTest.java @@ -17,6 +17,10 @@ */ package org.apache.beam.sdk.transforms.windowing; +import java.io.File; +import java.io.FileOutputStream; +import java.io.PrintStream; +import java.io.Serializable; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.io.TextIO; @@ -34,7 +38,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionList; import org.apache.beam.sdk.values.TimestampedValue; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Rule; @@ -44,11 +47,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.File; -import java.io.FileOutputStream; -import java.io.PrintStream; -import java.io.Serializable; - /** Unit tests for bucketing. */ @RunWith(JUnit4.class) public class WindowingTest implements Serializable { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ApiSurfaceTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ApiSurfaceTest.java index b3f874360777f..4b76277a027a5 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ApiSurfaceTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ApiSurfaceTest.java @@ -28,7 +28,10 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; - +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeDiagnosingMatcher; @@ -36,11 +39,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; - /** * Tests for ApiSurface. These both test the functionality and also that our * public API is conformant to a hard-coded policy. diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/AttemptAndTimeBoundedExponentialBackOffTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/AttemptAndTimeBoundedExponentialBackOffTest.java index 08f5f56310569..59e0fb7dab284 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/AttemptAndTimeBoundedExponentialBackOffTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/AttemptAndTimeBoundedExponentialBackOffTest.java @@ -25,10 +25,8 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import org.apache.beam.sdk.testing.FastNanoClockAndSleeper; - import com.google.api.client.util.BackOff; - +import org.apache.beam.sdk.testing.FastNanoClockAndSleeper; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/AttemptBoundedExponentialBackOffTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/AttemptBoundedExponentialBackOffTest.java index b4a075ccb3714..3cfa961865fb3 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/AttemptBoundedExponentialBackOffTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/AttemptBoundedExponentialBackOffTest.java @@ -26,7 +26,6 @@ import static org.junit.Assert.assertTrue; import com.google.api.client.util.BackOff; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/AvroUtilsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/AvroUtilsTest.java index b72ab9ac13348..d8c345c822c09 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/AvroUtilsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/AvroUtilsTest.java @@ -19,29 +19,27 @@ import static org.junit.Assert.assertEquals; -import org.apache.beam.sdk.coders.AvroCoder; -import org.apache.beam.sdk.coders.DefaultCoder; -import org.apache.beam.sdk.util.AvroUtils.AvroMetadata; - +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Random; import org.apache.avro.Schema; import org.apache.avro.file.CodecFactory; import org.apache.avro.file.DataFileConstants; import org.apache.avro.file.DataFileWriter; import org.apache.avro.io.DatumWriter; import org.apache.avro.reflect.Nullable; +import org.apache.beam.sdk.coders.AvroCoder; +import org.apache.beam.sdk.coders.DefaultCoder; +import org.apache.beam.sdk.util.AvroUtils.AvroMetadata; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Random; - /** * Tests for AvroUtils. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BucketingFunctionTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BucketingFunctionTest.java index ee5a2b3ff4b6b..196b904403881 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BucketingFunctionTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BucketingFunctionTest.java @@ -23,7 +23,6 @@ import static org.junit.Assert.assertTrue; import org.apache.beam.sdk.transforms.Combine; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BufferedElementCountingOutputStreamTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BufferedElementCountingOutputStreamTest.java index 09d8992e0db71..36f7028627eec 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BufferedElementCountingOutputStreamTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/BufferedElementCountingOutputStreamTest.java @@ -21,18 +21,7 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import org.apache.beam.sdk.coders.ByteArrayCoder; -import org.apache.beam.sdk.coders.Coder.Context; - import com.google.common.collect.ImmutableList; - -import org.hamcrest.collection.IsIterableContainingInOrder; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -42,6 +31,14 @@ import java.util.Collections; import java.util.List; import java.util.Random; +import org.apache.beam.sdk.coders.ByteArrayCoder; +import org.apache.beam.sdk.coders.Coder.Context; +import org.hamcrest.collection.IsIterableContainingInOrder; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** * Tests for {@link BufferedElementCountingOutputStream}. diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/CoderUtilsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/CoderUtilsTest.java index 0733c4c707ab0..4bd2f1916692f 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/CoderUtilsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/CoderUtilsTest.java @@ -18,12 +18,13 @@ package org.apache.beam.sdk.util; import static org.apache.beam.sdk.util.CoderUtils.makeCloudEncoding; - import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; +import java.io.InputStream; +import java.io.OutputStream; import org.apache.beam.sdk.coders.AtomicCoder; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; import org.apache.beam.sdk.coders.Coder; @@ -34,7 +35,6 @@ import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.coders.VoidCoder; import org.apache.beam.sdk.testing.CoderPropertiesTest.ClosingCoder; - import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.Rule; @@ -43,9 +43,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.InputStream; -import java.io.OutputStream; - /** * Tests for CoderUtils. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/CombineFnUtilTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/CombineFnUtilTest.java index fdb69e4deff71..fe81275ef6636 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/CombineFnUtilTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/CombineFnUtilTest.java @@ -22,14 +22,16 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.withSettings; +import com.google.common.collect.ImmutableList; +import java.io.ByteArrayOutputStream; +import java.io.NotSerializableException; +import java.io.ObjectOutputStream; +import java.util.List; import org.apache.beam.sdk.transforms.CombineWithContext.CombineFnWithContext; import org.apache.beam.sdk.transforms.CombineWithContext.Context; import org.apache.beam.sdk.transforms.CombineWithContext.KeyedCombineFnWithContext; import org.apache.beam.sdk.transforms.Sum; import org.apache.beam.sdk.util.state.StateContexts; - -import com.google.common.collect.ImmutableList; - import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -37,11 +39,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.ByteArrayOutputStream; -import java.io.NotSerializableException; -import java.io.ObjectOutputStream; -import java.util.List; - /** * Unit tests for {@link CombineFnUtil}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ExecutableTriggerTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ExecutableTriggerTest.java index fdcd1bd8cea8e..1e3a1ff8b9ee0 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ExecutableTriggerTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ExecutableTriggerTest.java @@ -20,17 +20,15 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.Trigger; - import org.joda.time.Instant; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; - /** * Tests for {@link ExecutableTrigger}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ExposedByteArrayInputStreamTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ExposedByteArrayInputStreamTest.java index d717cafd001f6..31cf1a81b4abf 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ExposedByteArrayInputStreamTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ExposedByteArrayInputStreamTest.java @@ -22,13 +22,12 @@ import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertSame; +import java.io.ByteArrayInputStream; +import java.io.IOException; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.ByteArrayInputStream; -import java.io.IOException; - /** Unit tests for {@link ExposedByteArrayInputStream}. */ @RunWith(JUnit4.class) public class ExposedByteArrayInputStreamTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ExposedByteArrayOutputStreamTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ExposedByteArrayOutputStreamTest.java index 9819a9b1a769c..a3a7a1d2d6cfe 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ExposedByteArrayOutputStreamTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ExposedByteArrayOutputStreamTest.java @@ -22,13 +22,12 @@ import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; +import java.io.ByteArrayOutputStream; +import java.io.IOException; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.ByteArrayOutputStream; -import java.io.IOException; - /** Unit tests for {@link ExposedByteArrayOutputStream}. */ @RunWith(JUnit4.class) public class ExposedByteArrayOutputStreamTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/FileIOChannelFactoryTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/FileIOChannelFactoryTest.java index 79e6e5cd6b0c2..011b4f5a977cd 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/FileIOChannelFactoryTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/FileIOChannelFactoryTest.java @@ -25,15 +25,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.io.Files; import com.google.common.io.LineReader; - -import org.hamcrest.Matchers; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.rules.TemporaryFolder; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - import java.io.File; import java.io.FileNotFoundException; import java.io.Reader; @@ -42,6 +33,13 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.util.List; +import org.hamcrest.Matchers; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** Tests for {@link FileIOChannelFactory}. */ @RunWith(JUnit4.class) diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/FinishedTriggersSetTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/FinishedTriggersSetTest.java index b3b18561d3e41..072d264f231c4 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/FinishedTriggersSetTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/FinishedTriggersSetTest.java @@ -21,12 +21,11 @@ import static org.hamcrest.Matchers.theInstance; import static org.junit.Assert.assertThat; +import java.util.HashSet; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.HashSet; - /** * Tests for {@link FinishedTriggersSet}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/GatherAllPanesTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/GatherAllPanesTest.java index a6522efd36c9a..d195623d46343 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/GatherAllPanesTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/GatherAllPanesTest.java @@ -19,6 +19,8 @@ import static org.junit.Assert.fail; +import com.google.common.collect.Iterables; +import java.io.Serializable; import org.apache.beam.sdk.io.CountingInput; import org.apache.beam.sdk.testing.NeedsRunner; import org.apache.beam.sdk.testing.PAssert; @@ -36,9 +38,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionList; import org.apache.beam.sdk.values.TypeDescriptor; - -import com.google.common.collect.Iterables; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; @@ -46,8 +45,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; - /** * Tests for {@link GatherAllPanes}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/GcsIOChannelFactoryTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/GcsIOChannelFactoryTest.java index aae313715c756..6bdb782530d67 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/GcsIOChannelFactoryTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/GcsIOChannelFactoryTest.java @@ -21,7 +21,6 @@ import org.apache.beam.sdk.options.GcsOptions; import org.apache.beam.sdk.options.PipelineOptionsFactory; - import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/GcsUtilTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/GcsUtilTest.java index 49c7bc48ec4d6..681b0aaaa0597 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/GcsUtilTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/GcsUtilTest.java @@ -18,6 +18,8 @@ package org.apache.beam.sdk.util; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; @@ -27,11 +29,7 @@ import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; -import org.apache.beam.sdk.options.GcsOptions; -import org.apache.beam.sdk.options.PipelineOptionsFactory; -import org.apache.beam.sdk.testing.FastNanoClockAndSleeper; -import org.apache.beam.sdk.util.gcsfs.GcsPath; - +import com.google.api.client.googleapis.batch.BatchRequest; import com.google.api.client.googleapis.json.GoogleJsonError.ErrorInfo; import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.api.client.http.HttpRequest; @@ -55,14 +53,6 @@ import com.google.cloud.hadoop.gcsio.GoogleCloudStorageReadChannel; import com.google.cloud.hadoop.util.ClientRequestHelper; import com.google.common.collect.ImmutableList; - -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; -import org.mockito.Mockito; - import java.io.FileNotFoundException; import java.io.IOException; import java.math.BigInteger; @@ -76,6 +66,16 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; +import org.apache.beam.sdk.options.GcsOptions; +import org.apache.beam.sdk.options.PipelineOptionsFactory; +import org.apache.beam.sdk.testing.FastNanoClockAndSleeper; +import org.apache.beam.sdk.util.gcsfs.GcsPath; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; +import org.mockito.Mockito; /** Test case for {@link GcsUtil}. */ @RunWith(JUnit4.class) @@ -490,4 +490,70 @@ public LowLevelHttpRequest buildRequest(String method, String url) throws IOExce HttpResponse response = request.execute(); return GoogleJsonResponseException.from(jsonFactory, response); } + + private static List makeStrings(String s, int n) { + ImmutableList.Builder ret = ImmutableList.builder(); + for (int i = 0; i < n; ++i) { + ret.add(String.format("gs://bucket/%s%d", s, i)); + } + return ret.build(); + } + + private static int sumBatchSizes(List batches) { + int ret = 0; + for (BatchRequest b : batches) { + ret += b.size(); + assertThat(b.size(), greaterThan(0)); + } + return ret; + } + + @Test + public void testMakeCopyBatches() throws IOException { + GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); + + // Small number of files fits in 1 batch + List batches = gcsUtil.makeCopyBatches(makeStrings("s", 3), makeStrings("d", 3)); + assertThat(batches.size(), equalTo(1)); + assertThat(sumBatchSizes(batches), equalTo(3)); + + // 1 batch of files fits in 1 batch + batches = gcsUtil.makeCopyBatches(makeStrings("s", 100), makeStrings("d", 100)); + assertThat(batches.size(), equalTo(1)); + assertThat(sumBatchSizes(batches), equalTo(100)); + + // A little more than 5 batches of files fits in 6 batches + batches = gcsUtil.makeCopyBatches(makeStrings("s", 501), makeStrings("d", 501)); + assertThat(batches.size(), equalTo(6)); + assertThat(sumBatchSizes(batches), equalTo(501)); + } + + @Test + public void testInvalidCopyBatches() throws IOException { + GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); + thrown.expect(IllegalArgumentException.class); + thrown.expectMessage("Number of source files 3"); + + gcsUtil.makeCopyBatches(makeStrings("s", 3), makeStrings("d", 1)); + } + + @Test + public void testMakeRemoveBatches() throws IOException { + GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); + + // Small number of files fits in 1 batch + List batches = gcsUtil.makeRemoveBatches(makeStrings("s", 3)); + assertThat(batches.size(), equalTo(1)); + assertThat(sumBatchSizes(batches), equalTo(3)); + + // 1 batch of files fits in 1 batch + batches = gcsUtil.makeRemoveBatches(makeStrings("s", 100)); + assertThat(batches.size(), equalTo(1)); + assertThat(sumBatchSizes(batches), equalTo(100)); + + // A little more than 5 batches of files fits in 6 batches + batches = gcsUtil.makeRemoveBatches(makeStrings("s", 501)); + assertThat(batches.size(), equalTo(6)); + assertThat(sumBatchSizes(batches), equalTo(501)); + } } diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/IOChannelUtilsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/IOChannelUtilsTest.java index 8a7eb0216954a..d92d3cd2e10af 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/IOChannelUtilsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/IOChannelUtilsTest.java @@ -23,7 +23,8 @@ import static org.junit.Assert.fail; import com.google.common.io.Files; - +import java.io.File; +import java.nio.charset.StandardCharsets; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -31,9 +32,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.File; -import java.nio.charset.StandardCharsets; - /** * Tests for IOChannelUtils. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/IdentitySideInputWindowFn.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/IdentitySideInputWindowFn.java index 705003eb2e6b7..2b48b9fc4dd75 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/IdentitySideInputWindowFn.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/IdentitySideInputWindowFn.java @@ -17,15 +17,14 @@ */ package org.apache.beam.sdk.util; +import java.util.Collection; +import java.util.Collections; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.NonMergingWindowFn; import org.apache.beam.sdk.transforms.windowing.WindowFn; -import java.util.Collection; -import java.util.Collections; - /** * A {@link WindowFn} for use during tests that returns the input window for calls to * {@link #getSideInputWindow(BoundedWindow)}. diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/InstanceBuilderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/InstanceBuilderTest.java index 8d197baa1b7b9..505b2d325a4da 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/InstanceBuilderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/InstanceBuilderTest.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.util; import org.apache.beam.sdk.values.TupleTag; - import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Rule; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/KeyedWorkItemCoderTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/KeyedWorkItemCoderTest.java index 2151f88050578..1974d9e705bc4 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/KeyedWorkItemCoderTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/KeyedWorkItemCoderTest.java @@ -17,15 +17,13 @@ */ package org.apache.beam.sdk.util; +import com.google.common.collect.ImmutableList; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.coders.VarIntCoder; import org.apache.beam.sdk.testing.CoderProperties; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.util.TimerInternals.TimerData; import org.apache.beam.sdk.util.state.StateNamespaces; - -import com.google.common.collect.ImmutableList; - import org.joda.time.Instant; import org.junit.Test; import org.junit.runner.RunWith; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MergingActiveWindowSetTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MergingActiveWindowSetTest.java index 4750af1c8cad3..676a25ab42a0f 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MergingActiveWindowSetTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MergingActiveWindowSetTest.java @@ -22,16 +22,17 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.IntervalWindow; import org.apache.beam.sdk.transforms.windowing.Sessions; import org.apache.beam.sdk.util.state.InMemoryStateInternals; import org.apache.beam.sdk.util.state.StateInternals; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.After; @@ -40,10 +41,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - /** * Test NonMergingActiveWindowSet. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MovingFunctionTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MovingFunctionTest.java index b95f235c430fe..8d57bf4d24b6a 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MovingFunctionTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MovingFunctionTest.java @@ -23,7 +23,6 @@ import static org.junit.Assert.assertTrue; import org.apache.beam.sdk.transforms.Combine; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MutationDetectorsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MutationDetectorsTest.java index 0763912b56678..ebd8297bbf803 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MutationDetectorsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/MutationDetectorsTest.java @@ -17,26 +17,23 @@ */ package org.apache.beam.sdk.util; +import com.google.common.collect.FluentIterable; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; import org.apache.beam.sdk.coders.ByteArrayCoder; import org.apache.beam.sdk.coders.IterableCoder; import org.apache.beam.sdk.coders.ListCoder; import org.apache.beam.sdk.coders.VarIntCoder; - -import com.google.common.collect.FluentIterable; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; - /** * Tests for {@link MutationDetectors}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PTupleTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PTupleTest.java index fe2014a7d3c44..abead0409c2ad 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PTupleTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PTupleTest.java @@ -23,7 +23,6 @@ import static org.junit.Assert.assertTrue; import org.apache.beam.sdk.values.TupleTag; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubClientTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubClientTest.java index 22508572e5a0c..1a99d38c714ff 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubClientTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubClientTest.java @@ -20,22 +20,22 @@ import static org.junit.Assert.assertEquals; +import com.google.common.collect.ImmutableMap; +import java.util.Map; import org.apache.beam.sdk.util.PubsubClient.ProjectPath; import org.apache.beam.sdk.util.PubsubClient.SubscriptionPath; import org.apache.beam.sdk.util.PubsubClient.TopicPath; - -import com.google.common.collect.ImmutableMap; - import org.joda.time.Instant; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; - -import java.util.Map; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** * Tests for helper classes and methods in PubsubClient. */ +@RunWith(JUnit4.class) public class PubsubClientTest { @Rule public ExpectedException thrown = ExpectedException.none(); diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubGrpcClientTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubGrpcClientTest.java index 71ee27c86aae4..b36b93474f3be 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubGrpcClientTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubGrpcClientTest.java @@ -20,11 +20,6 @@ import static org.junit.Assert.assertEquals; -import org.apache.beam.sdk.util.PubsubClient.IncomingMessage; -import org.apache.beam.sdk.util.PubsubClient.OutgoingMessage; -import org.apache.beam.sdk.util.PubsubClient.SubscriptionPath; -import org.apache.beam.sdk.util.PubsubClient.TopicPath; - import com.google.auth.oauth2.GoogleCredentials; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -38,19 +33,24 @@ import com.google.pubsub.v1.PullResponse; import com.google.pubsub.v1.ReceivedMessage; import com.google.pubsub.v1.SubscriberGrpc; - import io.grpc.ManagedChannel; +import java.io.IOException; +import java.util.List; +import org.apache.beam.sdk.util.PubsubClient.IncomingMessage; +import org.apache.beam.sdk.util.PubsubClient.OutgoingMessage; +import org.apache.beam.sdk.util.PubsubClient.SubscriptionPath; +import org.apache.beam.sdk.util.PubsubClient.TopicPath; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; import org.mockito.Mockito; -import java.io.IOException; -import java.util.List; - /** * Tests for PubsubGrpcClient. */ +@RunWith(JUnit4.class) public class PubsubGrpcClientTest { private ManagedChannel mockChannel; private GoogleCredentials mockCredentials; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubJsonClientTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubJsonClientTest.java index dfdc46ecea680..b6d7ccb2c0e38 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubJsonClientTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubJsonClientTest.java @@ -20,11 +20,6 @@ import static org.junit.Assert.assertEquals; -import org.apache.beam.sdk.util.PubsubClient.IncomingMessage; -import org.apache.beam.sdk.util.PubsubClient.OutgoingMessage; -import org.apache.beam.sdk.util.PubsubClient.SubscriptionPath; -import org.apache.beam.sdk.util.PubsubClient.TopicPath; - import com.google.api.services.pubsub.Pubsub; import com.google.api.services.pubsub.model.PublishRequest; import com.google.api.services.pubsub.model.PublishResponse; @@ -34,18 +29,23 @@ import com.google.api.services.pubsub.model.ReceivedMessage; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; - +import java.io.IOException; +import java.util.List; +import org.apache.beam.sdk.util.PubsubClient.IncomingMessage; +import org.apache.beam.sdk.util.PubsubClient.OutgoingMessage; +import org.apache.beam.sdk.util.PubsubClient.SubscriptionPath; +import org.apache.beam.sdk.util.PubsubClient.TopicPath; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; import org.mockito.Mockito; -import java.io.IOException; -import java.util.List; - /** * Tests for PubsubJsonClient. */ +@RunWith(JUnit4.class) public class PubsubJsonClientTest { private Pubsub mockPubsub; private PubsubClient client; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubTestClientTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubTestClientTest.java index d788f1070cecc..b9b1d3f8bc8ef 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubTestClientTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/PubsubTestClientTest.java @@ -20,26 +20,26 @@ import static org.junit.Assert.assertEquals; -import org.apache.beam.sdk.util.PubsubClient.IncomingMessage; -import org.apache.beam.sdk.util.PubsubClient.OutgoingMessage; -import org.apache.beam.sdk.util.PubsubClient.SubscriptionPath; -import org.apache.beam.sdk.util.PubsubClient.TopicPath; -import org.apache.beam.sdk.util.PubsubTestClient.PubsubTestClientFactory; - import com.google.api.client.util.Clock; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Sets; - -import org.junit.Test; - import java.io.IOException; import java.util.List; import java.util.concurrent.atomic.AtomicLong; +import org.apache.beam.sdk.util.PubsubClient.IncomingMessage; +import org.apache.beam.sdk.util.PubsubClient.OutgoingMessage; +import org.apache.beam.sdk.util.PubsubClient.SubscriptionPath; +import org.apache.beam.sdk.util.PubsubClient.TopicPath; +import org.apache.beam.sdk.util.PubsubTestClient.PubsubTestClientFactory; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** * Tests for PubsubTestClient. */ +@RunWith(JUnit4.class) public class PubsubTestClientTest { private static final TopicPath TOPIC = PubsubClient.topicPathFromName("testProject", "testTopic"); private static final SubscriptionPath SUBSCRIPTION = diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ReshuffleTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ReshuffleTest.java index f4ae996143c5e..d990ee066e277 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ReshuffleTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ReshuffleTest.java @@ -19,6 +19,8 @@ import static org.junit.Assert.assertEquals; +import com.google.common.collect.ImmutableList; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.KvCoder; import org.apache.beam.sdk.coders.StringUtf8Coder; @@ -33,17 +35,12 @@ import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.ImmutableList; - import org.joda.time.Duration; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.List; - /** * Tests for {@link Reshuffle}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ReshuffleTriggerTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ReshuffleTriggerTest.java index b17ce81cf4823..83077f4cc37fa 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ReshuffleTriggerTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ReshuffleTriggerTest.java @@ -25,7 +25,6 @@ import org.apache.beam.sdk.transforms.windowing.FixedWindows; import org.apache.beam.sdk.transforms.windowing.IntervalWindow; import org.apache.beam.sdk.transforms.windowing.Trigger; - import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/RetryHttpRequestInitializerTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/RetryHttpRequestInitializerTest.java index 91d74db619c40..71554b573ac37 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/RetryHttpRequestInitializerTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/RetryHttpRequestInitializerTest.java @@ -46,7 +46,11 @@ import com.google.api.client.util.Sleeper; import com.google.api.services.storage.Storage; import com.google.api.services.storage.Storage.Objects.Get; - +import java.io.IOException; +import java.net.SocketTimeoutException; +import java.security.PrivateKey; +import java.util.Arrays; +import java.util.concurrent.atomic.AtomicLong; import org.hamcrest.Matchers; import org.junit.After; import org.junit.Assert; @@ -59,12 +63,6 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import java.io.IOException; -import java.net.SocketTimeoutException; -import java.security.PrivateKey; -import java.util.Arrays; -import java.util.concurrent.atomic.AtomicLong; - /** * Tests for RetryHttpRequestInitializer. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java index 30406fcb10f71..5435a4588e841 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/SerializableUtilsTest.java @@ -19,26 +19,23 @@ import static org.junit.Assert.assertEquals; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Serializable; +import java.util.List; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.DeterministicStandardCoder; - -import com.google.common.collect.ImmutableList; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.Serializable; -import java.util.List; - /** Tests for {@link SerializableUtils}. */ @RunWith(JUnit4.class) public class SerializableUtilsTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StreamUtilsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StreamUtilsTest.java index 60eeb0daa88dc..7a31184dc5853 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StreamUtilsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StreamUtilsTest.java @@ -21,16 +21,15 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** Unit tests for {@link ExposedByteArrayInputStream}. */ @RunWith(JUnit4.class) diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StringUtilsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StringUtilsTest.java index e87bbee58f68c..042e9e3d76ecd 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StringUtilsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StringUtilsTest.java @@ -24,7 +24,6 @@ import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PDone; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StructsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StructsTest.java index 59dbb56ea0e9a..91090d1a2d6df 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StructsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/StructsTest.java @@ -34,18 +34,17 @@ import static org.apache.beam.sdk.util.Structs.getString; import static org.apache.beam.sdk.util.Structs.getStrings; -import org.hamcrest.Matchers; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import org.hamcrest.Matchers; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** * Tests for Structs. diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TimerInternalsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TimerInternalsTest.java index bc2930c65c376..e8ffdb34411e3 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TimerInternalsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TimerInternalsTest.java @@ -30,7 +30,6 @@ import org.apache.beam.sdk.util.TimerInternals.TimerDataCoder; import org.apache.beam.sdk.util.state.StateNamespace; import org.apache.beam.sdk.util.state.StateNamespaces; - import org.joda.time.Instant; import org.junit.Test; import org.junit.runner.RunWith; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TriggerTester.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TriggerTester.java index 4892bbd23c727..a1f1d21fc3dcb 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TriggerTester.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/TriggerTester.java @@ -20,9 +20,21 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; - import static org.junit.Assert.assertTrue; +import com.google.common.base.MoreObjects; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.PriorityQueue; +import java.util.Set; +import javax.annotation.Nullable; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; @@ -41,26 +53,9 @@ import org.apache.beam.sdk.util.state.StateTag; import org.apache.beam.sdk.util.state.WatermarkHoldState; import org.apache.beam.sdk.values.TimestampedValue; - -import com.google.common.base.MoreObjects; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; - import org.joda.time.Duration; import org.joda.time.Instant; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.PriorityQueue; -import java.util.Set; - -import javax.annotation.Nullable; - /** * Test utility that runs a {@link Trigger}, using in-memory stub implementation to provide * the {@link StateInternals}. diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UnownedInputStreamTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UnownedInputStreamTest.java index dac147c9036cc..e3a8d714c1768 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UnownedInputStreamTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UnownedInputStreamTest.java @@ -20,6 +20,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import java.io.ByteArrayInputStream; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -27,8 +28,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.ByteArrayInputStream; - /** Unit tests for {@link UnownedInputStream}. */ @RunWith(JUnit4.class) public class UnownedInputStreamTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UnownedOutputStreamTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UnownedOutputStreamTest.java index 30761e7ef7037..e36b9fbbd9be7 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UnownedOutputStreamTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UnownedOutputStreamTest.java @@ -19,6 +19,7 @@ import static org.junit.Assert.assertEquals; +import java.io.ByteArrayOutputStream; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -26,8 +27,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.ByteArrayOutputStream; - /** Unit tests for {@link UnownedOutputStream}. */ @RunWith(JUnit4.class) public class UnownedOutputStreamTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UploadIdResponseInterceptorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UploadIdResponseInterceptorTest.java index 572897732524c..8b9f77e66fbf3 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UploadIdResponseInterceptorTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UploadIdResponseInterceptorTest.java @@ -17,22 +17,19 @@ */ package org.apache.beam.sdk.util; -import org.apache.beam.sdk.testing.ExpectedLogs; - import com.google.api.client.http.GenericUrl; import com.google.api.client.http.HttpResponse; import com.google.api.client.testing.http.HttpTesting; import com.google.api.client.testing.http.MockHttpTransport; import com.google.api.client.testing.http.MockLowLevelHttpResponse; - +import java.io.IOException; +import org.apache.beam.sdk.testing.ExpectedLogs; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; - /** * A test for {@link UploadIdResponseInterceptor}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UserCodeExceptionTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UserCodeExceptionTest.java index 3be114605928a..a6d1cb57ef636 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UserCodeExceptionTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/UserCodeExceptionTest.java @@ -24,6 +24,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; +import java.io.IOException; import org.hamcrest.Description; import org.hamcrest.FeatureMatcher; import org.hamcrest.Matcher; @@ -34,8 +35,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; - /** * Tests for {@link UserCodeException} functionality. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/VarIntTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/VarIntTest.java index 82a3689d78a76..02136affbe51f 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/VarIntTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/VarIntTest.java @@ -21,17 +21,16 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.EOFException; +import java.io.IOException; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.EOFException; -import java.io.IOException; - /** Unit tests for {@link VarInt}. */ @RunWith(JUnit4.class) public class VarIntTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/WindowedValueTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/WindowedValueTest.java index 90969b7841062..0c69a594df5ab 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/WindowedValueTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/WindowedValueTest.java @@ -22,6 +22,9 @@ import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import java.util.Arrays; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.StringUtf8Coder; @@ -29,18 +32,12 @@ import org.apache.beam.sdk.transforms.windowing.IntervalWindow; import org.apache.beam.sdk.transforms.windowing.PaneInfo; import org.apache.beam.sdk.transforms.windowing.PaneInfo.Timing; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Iterables; - import org.joda.time.Instant; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; - /** Test case for {@link WindowedValue}. */ @RunWith(JUnit4.class) public class WindowedValueTest { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ZipFilesTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ZipFilesTest.java index 31e6b143d3ea7..1c038487bd998 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ZipFilesTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/ZipFilesTest.java @@ -29,14 +29,6 @@ import com.google.common.io.ByteSource; import com.google.common.io.CharSource; import com.google.common.io.Files; - -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - import java.io.File; import java.io.FileOutputStream; import java.io.IOException; @@ -46,6 +38,12 @@ import java.util.Enumeration; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** * Tests for the {@link ZipFiles} class. These tests make sure that the handling diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/common/ReflectHelpersTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/common/ReflectHelpersTest.java index 9a5dc92023d07..e1073da484575 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/common/ReflectHelpersTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/common/ReflectHelpersTest.java @@ -19,15 +19,13 @@ import static org.junit.Assert.assertEquals; +import java.util.List; +import java.util.Map; import org.apache.beam.sdk.values.TypeDescriptor; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.List; -import java.util.Map; - /** * Tests for {@link ReflectHelpers}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/gcsfs/GcsPathTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/gcsfs/GcsPathTest.java index fdd1dfd6e7aa5..5c861845bd3ff 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/gcsfs/GcsPathTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/gcsfs/GcsPathTest.java @@ -23,18 +23,17 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import org.hamcrest.Matchers; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - import java.net.URI; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import java.util.Iterator; import java.util.List; +import org.hamcrest.Matchers; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** * Tests of GcsPath. diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/CopyOnAccessInMemoryStateInternalsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/CopyOnAccessInMemoryStateInternalsTest.java index b7388ee741d79..ad70bcafe7535 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/CopyOnAccessInMemoryStateInternalsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/CopyOnAccessInMemoryStateInternalsTest.java @@ -39,7 +39,6 @@ import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.OutputTimeFn; import org.apache.beam.sdk.transforms.windowing.OutputTimeFns; - import org.joda.time.Instant; import org.junit.Rule; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/InMemoryStateInternalsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/InMemoryStateInternalsTest.java index 48d1a30c4cb87..08a6bc1d29d71 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/InMemoryStateInternalsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/InMemoryStateInternalsTest.java @@ -21,21 +21,19 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; +import java.util.Arrays; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.coders.VarIntCoder; import org.apache.beam.sdk.transforms.Sum; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.IntervalWindow; import org.apache.beam.sdk.transforms.windowing.OutputTimeFns; - import org.hamcrest.Matchers; import org.joda.time.Instant; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; - /** * Tests for {@link InMemoryStateInternals}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/StateNamespacesTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/StateNamespacesTest.java index 385ab6c66e8f5..f546e561b53a5 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/StateNamespacesTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/StateNamespacesTest.java @@ -24,7 +24,6 @@ import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.IntervalWindow; - import org.hamcrest.Matchers; import org.joda.time.Instant; import org.junit.Test; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/StateTagTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/StateTagTest.java index ec7698d0b3811..2c8c9ccd00d86 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/StateTagTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/util/state/StateTagTest.java @@ -30,7 +30,6 @@ import org.apache.beam.sdk.transforms.Min.MinIntegerFn; import org.apache.beam.sdk.transforms.windowing.OutputTimeFns; import org.apache.beam.sdk.util.CombineFnUtil; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/KVTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/KVTest.java index f87e2ae94e358..202d6f34344d7 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/KVTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/KVTest.java @@ -23,13 +23,11 @@ import static org.junit.Assert.assertThat; import com.google.common.collect.ImmutableList; - +import java.util.Comparator; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Comparator; - /** * Tests for KV. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionListTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionListTest.java index af2c14c9bd677..f76bf7e23bf52 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionListTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionListTest.java @@ -21,12 +21,11 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; +import java.util.Collections; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Collections; - /** * Tests for PCollectionLists. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java index 13218b2979c71..1467ae8a52fbf 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PCollectionTupleTest.java @@ -21,6 +21,9 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; +import java.io.Serializable; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.RunnableOnService; @@ -30,16 +33,11 @@ import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.PCollection.IsBounded; - import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.Arrays; -import java.util.List; - /** Unit tests for {@link PCollectionTuple}. */ @RunWith(JUnit4.class) public final class PCollectionTupleTest implements Serializable { diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PDoneTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PDoneTest.java index 5554b313b505f..4000e5db0276d 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PDoneTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/PDoneTest.java @@ -19,6 +19,7 @@ import static org.apache.beam.sdk.TestUtils.LINES; +import java.io.File; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.TextIO; import org.apache.beam.sdk.testing.NeedsRunner; @@ -26,7 +27,6 @@ import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.PTransform; - import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; @@ -35,8 +35,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.File; - /** * Tests for PDone. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypeDescriptorTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypeDescriptorTest.java index 162c991f8f4eb..39472f94ec858 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypeDescriptorTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypeDescriptorTest.java @@ -20,18 +20,16 @@ import static org.junit.Assert.assertEquals; import com.google.common.reflect.TypeToken; - +import java.lang.reflect.Method; +import java.lang.reflect.TypeVariable; +import java.util.List; +import java.util.Set; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.lang.reflect.Method; -import java.lang.reflect.TypeVariable; -import java.util.List; -import java.util.Set; - /** * Tests for TypeDescriptor. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypeDescriptorsTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypeDescriptorsTest.java index 59acdf007ddc3..1bf0fc9cf20da 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypeDescriptorsTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypeDescriptorsTest.java @@ -23,17 +23,15 @@ import static org.apache.beam.sdk.values.TypeDescriptors.lists; import static org.apache.beam.sdk.values.TypeDescriptors.sets; import static org.apache.beam.sdk.values.TypeDescriptors.strings; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; +import java.util.List; +import java.util.Set; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.List; -import java.util.Set; - /** * Tests for {@link TypeDescriptors}. */ diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java index 287223f44b15c..f33b3a2b692d3 100644 --- a/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java +++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/values/TypedPValueTest.java @@ -28,7 +28,6 @@ import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.ParDo; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; diff --git a/sdks/java/extensions/join-library/src/test/java/org/apache/beam/sdk/extensions/joinlibrary/InnerJoinTest.java b/sdks/java/extensions/join-library/src/test/java/org/apache/beam/sdk/extensions/joinlibrary/InnerJoinTest.java index 6622fdc67e0ef..423ab9c5cdcbb 100644 --- a/sdks/java/extensions/join-library/src/test/java/org/apache/beam/sdk/extensions/joinlibrary/InnerJoinTest.java +++ b/sdks/java/extensions/join-library/src/test/java/org/apache/beam/sdk/extensions/joinlibrary/InnerJoinTest.java @@ -17,19 +17,17 @@ */ package org.apache.beam.sdk.extensions.joinlibrary; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.junit.Before; import org.junit.Test; -import java.util.ArrayList; -import java.util.List; - /** * This test Inner Join functionality. */ diff --git a/sdks/java/extensions/join-library/src/test/java/org/apache/beam/sdk/extensions/joinlibrary/OuterLeftJoinTest.java b/sdks/java/extensions/join-library/src/test/java/org/apache/beam/sdk/extensions/joinlibrary/OuterLeftJoinTest.java index 91b0740f30b3c..c32163fd55807 100644 --- a/sdks/java/extensions/join-library/src/test/java/org/apache/beam/sdk/extensions/joinlibrary/OuterLeftJoinTest.java +++ b/sdks/java/extensions/join-library/src/test/java/org/apache/beam/sdk/extensions/joinlibrary/OuterLeftJoinTest.java @@ -17,19 +17,17 @@ */ package org.apache.beam.sdk.extensions.joinlibrary; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.junit.Before; import org.junit.Test; -import java.util.ArrayList; -import java.util.List; - /** * This test Outer Left Join functionality. diff --git a/sdks/java/extensions/join-library/src/test/java/org/apache/beam/sdk/extensions/joinlibrary/OuterRightJoinTest.java b/sdks/java/extensions/join-library/src/test/java/org/apache/beam/sdk/extensions/joinlibrary/OuterRightJoinTest.java index 7977df77d8613..5a45f73e59e7a 100644 --- a/sdks/java/extensions/join-library/src/test/java/org/apache/beam/sdk/extensions/joinlibrary/OuterRightJoinTest.java +++ b/sdks/java/extensions/join-library/src/test/java/org/apache/beam/sdk/extensions/joinlibrary/OuterRightJoinTest.java @@ -17,19 +17,17 @@ */ package org.apache.beam.sdk.extensions.joinlibrary; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.junit.Before; import org.junit.Test; -import java.util.ArrayList; -import java.util.List; - /** * This test Outer Right Join functionality. diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryAvroUtils.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryAvroUtils.java index 48e2258f83fd5..7826559e23f8c 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryAvroUtils.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryAvroUtils.java @@ -26,7 +26,8 @@ import com.google.api.services.bigquery.model.TableSchema; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; - +import java.util.List; +import javax.annotation.Nullable; import org.apache.avro.Schema; import org.apache.avro.Schema.Field; import org.apache.avro.Schema.Type; @@ -34,10 +35,6 @@ import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; -import java.util.List; - -import javax.annotation.Nullable; - /** * A set of utilities for working with Avro files. * diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java index e61dcca993a00..01a8a1c5cd4e8 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java @@ -21,6 +21,60 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.api.client.json.JsonFactory; +import com.google.api.client.util.BackOff; +import com.google.api.client.util.BackOffUtils; +import com.google.api.client.util.Sleeper; +import com.google.api.services.bigquery.Bigquery; +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.JobConfigurationExtract; +import com.google.api.services.bigquery.model.JobConfigurationLoad; +import com.google.api.services.bigquery.model.JobConfigurationQuery; +import com.google.api.services.bigquery.model.JobConfigurationTableCopy; +import com.google.api.services.bigquery.model.JobReference; +import com.google.api.services.bigquery.model.JobStatistics; +import com.google.api.services.bigquery.model.JobStatus; +import com.google.api.services.bigquery.model.TableReference; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; +import com.google.cloud.hadoop.util.ApiErrorExtractor; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.base.Strings; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import com.google.common.io.CountingOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.ObjectInputStream; +import java.io.OutputStream; +import java.io.Serializable; +import java.nio.channels.Channels; +import java.nio.channels.WritableByteChannel; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.annotation.Nullable; +import org.apache.avro.generic.GenericRecord; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.AtomicCoder; import org.apache.beam.sdk.coders.Coder; @@ -75,67 +129,10 @@ import org.apache.beam.sdk.values.PInput; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.TupleTagList; - -import com.google.api.client.json.JsonFactory; -import com.google.api.client.util.BackOff; -import com.google.api.client.util.BackOffUtils; -import com.google.api.client.util.Sleeper; -import com.google.api.services.bigquery.Bigquery; -import com.google.api.services.bigquery.model.Job; -import com.google.api.services.bigquery.model.JobConfigurationExtract; -import com.google.api.services.bigquery.model.JobConfigurationLoad; -import com.google.api.services.bigquery.model.JobConfigurationQuery; -import com.google.api.services.bigquery.model.JobConfigurationTableCopy; -import com.google.api.services.bigquery.model.JobReference; -import com.google.api.services.bigquery.model.JobStatistics; -import com.google.api.services.bigquery.model.JobStatus; -import com.google.api.services.bigquery.model.TableReference; -import com.google.api.services.bigquery.model.TableRow; -import com.google.api.services.bigquery.model.TableSchema; -import com.google.cloud.hadoop.util.ApiErrorExtractor; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Function; -import com.google.common.base.MoreObjects; -import com.google.common.base.Strings; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import com.google.common.io.CountingOutputStream; - -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.avro.generic.GenericRecord; import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.io.InputStream; -import java.io.ObjectInputStream; -import java.io.OutputStream; -import java.io.Serializable; -import java.nio.channels.Channels; -import java.nio.channels.WritableByteChannel; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ThreadLocalRandom; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import javax.annotation.Nullable; - /** * {@link PTransform}s for reading and writing * BigQuery tables. @@ -1423,8 +1420,8 @@ public static class Bound extends PTransform, PDone> { // Maximum number of files in a single partition. static final int MAX_NUM_FILES = 10000; - // Maximum number of bytes in a single partition. - static final long MAX_SIZE_BYTES = 3 * (1L << 40); + // Maximum number of bytes in a single partition -- 11 TiB just under BQ's 12 TiB limit. + static final long MAX_SIZE_BYTES = 11 * (1L << 40); // The maximum number of retry jobs. static final int MAX_RETRY_JOBS = 3; diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java index 0af6df8a33176..c0951fcdeae5e 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServices.java @@ -17,8 +17,6 @@ */ package org.apache.beam.sdk.io.gcp.bigquery; -import org.apache.beam.sdk.options.BigQueryOptions; - import com.google.api.services.bigquery.model.Dataset; import com.google.api.services.bigquery.model.Job; import com.google.api.services.bigquery.model.JobConfigurationExtract; @@ -30,13 +28,12 @@ import com.google.api.services.bigquery.model.Table; import com.google.api.services.bigquery.model.TableReference; import com.google.api.services.bigquery.model.TableRow; - import java.io.IOException; import java.io.Serializable; import java.util.List; import java.util.NoSuchElementException; - import javax.annotation.Nullable; +import org.apache.beam.sdk.options.BigQueryOptions; /** An interface for real, mock, or fake implementations of Cloud BigQuery services. */ interface BigQueryServices extends Serializable { diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java index bd1097f54c0c0..6aff3b097710a 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java @@ -19,13 +19,6 @@ import static com.google.common.base.Preconditions.checkNotNull; -import org.apache.beam.sdk.options.BigQueryOptions; -import org.apache.beam.sdk.options.GcsOptions; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff; -import org.apache.beam.sdk.util.IntervalBoundedExponentialBackOff; -import org.apache.beam.sdk.util.Transport; - import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.api.client.googleapis.services.AbstractGoogleClientRequest; import com.google.api.client.util.BackOff; @@ -51,10 +44,6 @@ import com.google.api.services.bigquery.model.TableRow; import com.google.cloud.hadoop.util.ApiErrorExtractor; import com.google.common.annotations.VisibleForTesting; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.ArrayList; import java.util.LinkedList; @@ -65,8 +54,15 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; - import javax.annotation.Nullable; +import org.apache.beam.sdk.options.BigQueryOptions; +import org.apache.beam.sdk.options.GcsOptions; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff; +import org.apache.beam.sdk.util.IntervalBoundedExponentialBackOff; +import org.apache.beam.sdk.util.Transport; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An implementation of {@link BigQueryServices} that actually communicates with the cloud BigQuery diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableInserter.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableInserter.java index bf038f5e8c509..a64dc9fe9b9af 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableInserter.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableInserter.java @@ -17,10 +17,6 @@ */ package org.apache.beam.sdk.io.gcp.bigquery; -import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition; -import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition; -import org.apache.beam.sdk.options.PipelineOptions; - import com.google.api.client.util.BackOff; import com.google.api.client.util.BackOffUtils; import com.google.api.client.util.ExponentialBackOff; @@ -32,14 +28,14 @@ import com.google.api.services.bigquery.model.TableSchema; import com.google.cloud.hadoop.util.ApiErrorExtractor; import com.google.common.annotations.VisibleForTesting; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.concurrent.TimeUnit; - import javax.annotation.Nullable; +import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition; +import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.WriteDisposition; +import org.apache.beam.sdk.options.PipelineOptions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Inserts rows into BigQuery. diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableRowIterator.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableRowIterator.java index 3afdffaca5952..729da97f7f7ec 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableRowIterator.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableRowIterator.java @@ -21,8 +21,6 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; -import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff; - import com.google.api.client.googleapis.services.AbstractGoogleClientRequest; import com.google.api.client.util.BackOff; import com.google.api.client.util.BackOffUtils; @@ -49,11 +47,6 @@ import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.Uninterruptibles; - -import org.joda.time.Duration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.Collection; import java.util.Collections; @@ -64,8 +57,11 @@ import java.util.Objects; import java.util.Random; import java.util.concurrent.TimeUnit; - import javax.annotation.Nullable; +import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff; +import org.joda.time.Duration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Iterates over all rows in a table. diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java index bfdf4aaf0a637..3a9ffce487f6f 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java @@ -21,6 +21,26 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import com.google.bigtable.v2.MutateRowResponse; +import com.google.bigtable.v2.Mutation; +import com.google.bigtable.v2.Row; +import com.google.bigtable.v2.RowFilter; +import com.google.bigtable.v2.SampleRowKeysResponse; +import com.google.cloud.bigtable.config.BigtableOptions; +import com.google.cloud.bigtable.config.RetryOptions; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.util.concurrent.FutureCallback; +import com.google.common.util.concurrent.Futures; +import com.google.protobuf.ByteString; +import io.grpc.Status; +import java.io.IOException; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.concurrent.ConcurrentLinkedQueue; +import javax.annotation.Nullable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.protobuf.ProtoCoder; @@ -40,32 +60,9 @@ import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone; - -import com.google.bigtable.v2.MutateRowResponse; -import com.google.bigtable.v2.Mutation; -import com.google.bigtable.v2.Row; -import com.google.bigtable.v2.RowFilter; -import com.google.bigtable.v2.SampleRowKeysResponse; -import com.google.cloud.bigtable.config.BigtableOptions; -import com.google.cloud.bigtable.config.RetryOptions; -import com.google.common.base.MoreObjects; -import com.google.common.collect.ImmutableList; -import com.google.common.util.concurrent.FutureCallback; -import com.google.common.util.concurrent.Futures; -import com.google.protobuf.ByteString; - -import io.grpc.Status; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.NoSuchElementException; -import java.util.concurrent.ConcurrentLinkedQueue; -import javax.annotation.Nullable; - /** * A bounded source and sink for Google Cloud Bigtable. * diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableService.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableService.java index 2a7e3a016a96b..ecd38a7468e4b 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableService.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableService.java @@ -17,20 +17,18 @@ */ package org.apache.beam.sdk.io.gcp.bigtable; -import org.apache.beam.sdk.io.gcp.bigtable.BigtableIO.BigtableSource; -import org.apache.beam.sdk.values.KV; - import com.google.bigtable.v2.MutateRowResponse; import com.google.bigtable.v2.Mutation; import com.google.bigtable.v2.Row; import com.google.bigtable.v2.SampleRowKeysResponse; import com.google.common.util.concurrent.ListenableFuture; import com.google.protobuf.ByteString; - import java.io.IOException; import java.io.Serializable; import java.util.List; import java.util.NoSuchElementException; +import org.apache.beam.sdk.io.gcp.bigtable.BigtableIO.BigtableSource; +import org.apache.beam.sdk.values.KV; /** * An interface for real or fake implementations of Cloud Bigtable. diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableServiceImpl.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableServiceImpl.java index b9288df11eeb1..07a183efc9d96 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableServiceImpl.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableServiceImpl.java @@ -17,9 +17,6 @@ */ package org.apache.beam.sdk.io.gcp.bigtable; -import org.apache.beam.sdk.io.gcp.bigtable.BigtableIO.BigtableSource; -import org.apache.beam.sdk.values.KV; - import com.google.bigtable.admin.v2.GetTableRequest; import com.google.bigtable.v2.MutateRowRequest; import com.google.bigtable.v2.MutateRowResponse; @@ -40,16 +37,15 @@ import com.google.common.io.Closer; import com.google.common.util.concurrent.ListenableFuture; import com.google.protobuf.ByteString; - import io.grpc.Status.Code; import io.grpc.StatusRuntimeException; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.List; import java.util.NoSuchElementException; +import org.apache.beam.sdk.io.gcp.bigtable.BigtableIO.BigtableSource; +import org.apache.beam.sdk.values.KV; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An implementation of {@link BigtableService} that actually communicates with the Cloud Bigtable diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java index 852595aa79ab1..c7433d37d1607 100644 --- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java +++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java @@ -30,27 +30,6 @@ import static com.google.datastore.v1.client.DatastoreHelper.makeUpsert; import static com.google.datastore.v1.client.DatastoreHelper.makeValue; -import org.apache.beam.sdk.annotations.Experimental; -import org.apache.beam.sdk.options.GcpOptions; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.transforms.Flatten; -import org.apache.beam.sdk.transforms.GroupByKey; -import org.apache.beam.sdk.transforms.MapElements; -import org.apache.beam.sdk.transforms.PTransform; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.transforms.SimpleFunction; -import org.apache.beam.sdk.transforms.Values; -import org.apache.beam.sdk.transforms.display.DisplayData; -import org.apache.beam.sdk.transforms.display.DisplayData.Builder; -import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff; -import org.apache.beam.sdk.util.RetryHttpRequestInitializer; -import org.apache.beam.sdk.values.KV; -import org.apache.beam.sdk.values.PBegin; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.PDone; - import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.util.BackOff; import com.google.api.client.util.BackOffUtils; @@ -76,16 +55,34 @@ import com.google.datastore.v1.client.DatastoreOptions; import com.google.datastore.v1.client.QuerySplitter; import com.google.protobuf.Int32Value; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.NoSuchElementException; import javax.annotation.Nullable; +import org.apache.beam.sdk.annotations.Experimental; +import org.apache.beam.sdk.options.GcpOptions; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.transforms.Create; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.Flatten; +import org.apache.beam.sdk.transforms.GroupByKey; +import org.apache.beam.sdk.transforms.MapElements; +import org.apache.beam.sdk.transforms.PTransform; +import org.apache.beam.sdk.transforms.ParDo; +import org.apache.beam.sdk.transforms.SimpleFunction; +import org.apache.beam.sdk.transforms.Values; +import org.apache.beam.sdk.transforms.display.DisplayData; +import org.apache.beam.sdk.transforms.display.DisplayData.Builder; +import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff; +import org.apache.beam.sdk.util.RetryHttpRequestInitializer; +import org.apache.beam.sdk.values.KV; +import org.apache.beam.sdk.values.PBegin; +import org.apache.beam.sdk.values.PCollection; +import org.apache.beam.sdk.values.PDone; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** *

        {@link DatastoreV1} provides an API to Read, Write and Delete {@link PCollection PCollections} diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryAvroUtilsTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryAvroUtilsTest.java index 316392fe8f77d..59cf1f7c0d2b5 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryAvroUtilsTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryAvroUtilsTest.java @@ -19,25 +19,22 @@ import static org.junit.Assert.assertEquals; -import org.apache.beam.sdk.coders.AvroCoder; -import org.apache.beam.sdk.coders.DefaultCoder; - import com.google.api.services.bigquery.model.TableFieldSchema; import com.google.api.services.bigquery.model.TableRow; import com.google.api.services.bigquery.model.TableSchema; import com.google.common.collect.Lists; - +import java.util.ArrayList; +import java.util.List; import org.apache.avro.Schema; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericRecord; import org.apache.avro.reflect.Nullable; +import org.apache.beam.sdk.coders.AvroCoder; +import org.apache.beam.sdk.coders.DefaultCoder; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.ArrayList; -import java.util.List; - /** * Tests for {@link BigQueryAvroUtils}. */ diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java index ca60696960e8d..a6d7e2f5e957c 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOTest.java @@ -17,12 +17,10 @@ */ package org.apache.beam.sdk.io.gcp.bigquery; +import static com.google.common.base.Preconditions.checkArgument; import static org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.fromJsonString; import static org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.toJsonString; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; - -import static com.google.common.base.Preconditions.checkArgument; - import static org.hamcrest.Matchers.hasItem; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; @@ -34,6 +32,39 @@ import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.when; +import com.google.api.client.util.Data; +import com.google.api.client.util.Strings; +import com.google.api.services.bigquery.model.ErrorProto; +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.JobConfigurationExtract; +import com.google.api.services.bigquery.model.JobConfigurationLoad; +import com.google.api.services.bigquery.model.JobConfigurationQuery; +import com.google.api.services.bigquery.model.JobConfigurationTableCopy; +import com.google.api.services.bigquery.model.JobReference; +import com.google.api.services.bigquery.model.JobStatistics; +import com.google.api.services.bigquery.model.JobStatistics2; +import com.google.api.services.bigquery.model.JobStatistics4; +import com.google.api.services.bigquery.model.JobStatus; +import com.google.api.services.bigquery.model.Table; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.api.services.bigquery.model.TableReference; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import java.io.File; +import java.io.FileFilter; +import java.io.IOException; +import java.io.Serializable; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Set; +import javax.annotation.Nullable; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.KvCoder; @@ -85,29 +116,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; - -import com.google.api.client.util.Data; -import com.google.api.client.util.Strings; -import com.google.api.services.bigquery.model.ErrorProto; -import com.google.api.services.bigquery.model.Job; -import com.google.api.services.bigquery.model.JobConfigurationExtract; -import com.google.api.services.bigquery.model.JobConfigurationLoad; -import com.google.api.services.bigquery.model.JobConfigurationQuery; -import com.google.api.services.bigquery.model.JobConfigurationTableCopy; -import com.google.api.services.bigquery.model.JobReference; -import com.google.api.services.bigquery.model.JobStatistics; -import com.google.api.services.bigquery.model.JobStatistics2; -import com.google.api.services.bigquery.model.JobStatistics4; -import com.google.api.services.bigquery.model.JobStatus; -import com.google.api.services.bigquery.model.Table; -import com.google.api.services.bigquery.model.TableFieldSchema; -import com.google.api.services.bigquery.model.TableReference; -import com.google.api.services.bigquery.model.TableRow; -import com.google.api.services.bigquery.model.TableSchema; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; - import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; import org.junit.Assert; @@ -124,19 +132,6 @@ import org.mockito.Mockito; import org.mockito.MockitoAnnotations; -import java.io.File; -import java.io.FileFilter; -import java.io.IOException; -import java.io.Serializable; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Set; -import javax.annotation.Nullable; - /** * Tests for BigQueryIO. */ diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImplTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImplTest.java index 686685bd81451..eb5fbe6e05f5d 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImplTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImplTest.java @@ -23,15 +23,6 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.DatasetServiceImpl; -import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.JobServiceImpl; -import org.apache.beam.sdk.options.PipelineOptionsFactory; -import org.apache.beam.sdk.testing.ExpectedLogs; -import org.apache.beam.sdk.testing.FastNanoClockAndSleeper; -import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff; -import org.apache.beam.sdk.util.RetryHttpRequestInitializer; -import org.apache.beam.sdk.util.Transport; - import com.google.api.client.googleapis.json.GoogleJsonError; import com.google.api.client.googleapis.json.GoogleJsonError.ErrorInfo; import com.google.api.client.googleapis.json.GoogleJsonErrorContainer; @@ -55,7 +46,19 @@ import com.google.api.services.bigquery.model.TableRow; import com.google.cloud.hadoop.util.ApiErrorExtractor; import com.google.common.collect.ImmutableList; - +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.List; +import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.DatasetServiceImpl; +import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.JobServiceImpl; +import org.apache.beam.sdk.options.PipelineOptionsFactory; +import org.apache.beam.sdk.testing.ExpectedLogs; +import org.apache.beam.sdk.testing.FastNanoClockAndSleeper; +import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff; +import org.apache.beam.sdk.util.RetryHttpRequestInitializer; +import org.apache.beam.sdk.util.Transport; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -65,12 +68,6 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.List; - /** * Tests for {@link BigQueryServicesImpl}. */ diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableInserterTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableInserterTest.java index dac3911d82a17..fb79c74215d20 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableInserterTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableInserterTest.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.io.gcp.bigquery; import static com.google.common.base.Verify.verifyNotNull; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.fail; @@ -28,12 +27,6 @@ import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.options.PipelineOptionsFactory; -import org.apache.beam.sdk.testing.ExpectedLogs; -import org.apache.beam.sdk.util.RetryHttpRequestInitializer; -import org.apache.beam.sdk.util.Transport; - import com.google.api.client.googleapis.json.GoogleJsonError; import com.google.api.client.googleapis.json.GoogleJsonError.ErrorInfo; import com.google.api.client.googleapis.json.GoogleJsonErrorContainer; @@ -51,7 +44,14 @@ import com.google.api.services.bigquery.model.TableReference; import com.google.cloud.hadoop.util.RetryBoundedBackOff; import com.google.common.collect.ImmutableList; - +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.options.PipelineOptionsFactory; +import org.apache.beam.sdk.testing.ExpectedLogs; +import org.apache.beam.sdk.util.RetryHttpRequestInitializer; +import org.apache.beam.sdk.util.Transport; import org.junit.After; import org.junit.Before; import org.junit.Rule; @@ -62,10 +62,6 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; - /** * Tests of {@link BigQueryTableInserter}. */ diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableRowIteratorTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableRowIteratorTest.java index 457b071cac1ee..ab848f5b37796 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableRowIteratorTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableRowIteratorTest.java @@ -45,7 +45,10 @@ import com.google.api.services.bigquery.model.TableReference; import com.google.api.services.bigquery.model.TableRow; import com.google.api.services.bigquery.model.TableSchema; - +import java.io.IOException; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; import org.junit.After; import org.junit.Before; import org.junit.Rule; @@ -56,11 +59,6 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import java.io.IOException; -import java.util.Arrays; -import java.util.LinkedList; -import java.util.List; - /** * Tests for {@link BigQueryTableRowIterator}. */ diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryUtilTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryUtilTest.java index 89284df48ee33..e539b33b0ee04 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryUtilTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryUtilTest.java @@ -30,10 +30,6 @@ import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; -import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.DatasetServiceImpl; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.options.PipelineOptionsFactory; - import com.google.api.services.bigquery.Bigquery; import com.google.api.services.bigquery.model.Table; import com.google.api.services.bigquery.model.TableCell; @@ -45,7 +41,14 @@ import com.google.api.services.bigquery.model.TableRow; import com.google.api.services.bigquery.model.TableSchema; import com.google.common.collect.ImmutableList; - +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl.DatasetServiceImpl; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.hamcrest.Matchers; import org.junit.After; import org.junit.Assert; @@ -60,12 +63,6 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.LinkedList; -import java.util.List; - /** * Tests for util classes related to BigQuery. */ diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIOTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIOTest.java index adf45a44f38c6..61b404ae441db 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIOTest.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIOTest.java @@ -17,6 +17,8 @@ */ package org.apache.beam.sdk.io.gcp.bigtable; +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Verify.verifyNotNull; import static org.apache.beam.sdk.testing.SourceTestUtils.assertSourcesEqualReferenceSource; import static org.apache.beam.sdk.testing.SourceTestUtils.assertSplitAtFractionExhaustive; import static org.apache.beam.sdk.testing.SourceTestUtils.assertSplitAtFractionFails; @@ -25,31 +27,12 @@ import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasKey; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasLabel; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasValue; - -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Verify.verifyNotNull; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.hasSize; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; -import org.apache.beam.sdk.Pipeline.PipelineExecutionException; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.io.BoundedSource.BoundedReader; -import org.apache.beam.sdk.io.gcp.bigtable.BigtableIO.BigtableSource; -import org.apache.beam.sdk.io.range.ByteKey; -import org.apache.beam.sdk.io.range.ByteKeyRange; -import org.apache.beam.sdk.testing.ExpectedLogs; -import org.apache.beam.sdk.testing.PAssert; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.transforms.Create; -import org.apache.beam.sdk.transforms.display.DisplayData; -import org.apache.beam.sdk.transforms.display.DisplayDataEvaluator; -import org.apache.beam.sdk.values.KV; -import org.apache.beam.sdk.values.PCollection; -import org.apache.beam.sdk.values.TypeDescriptor; - import com.google.bigtable.v2.Cell; import com.google.bigtable.v2.Column; import com.google.bigtable.v2.Family; @@ -69,15 +52,6 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.protobuf.ByteString; - -import org.hamcrest.Matchers; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; @@ -90,8 +64,29 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; - import javax.annotation.Nullable; +import org.apache.beam.sdk.Pipeline.PipelineExecutionException; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.io.BoundedSource.BoundedReader; +import org.apache.beam.sdk.io.gcp.bigtable.BigtableIO.BigtableSource; +import org.apache.beam.sdk.io.range.ByteKey; +import org.apache.beam.sdk.io.range.ByteKeyRange; +import org.apache.beam.sdk.testing.ExpectedLogs; +import org.apache.beam.sdk.testing.PAssert; +import org.apache.beam.sdk.testing.TestPipeline; +import org.apache.beam.sdk.transforms.Create; +import org.apache.beam.sdk.transforms.display.DisplayData; +import org.apache.beam.sdk.transforms.display.DisplayDataEvaluator; +import org.apache.beam.sdk.values.KV; +import org.apache.beam.sdk.values.PCollection; +import org.apache.beam.sdk.values.TypeDescriptor; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** * Unit tests for {@link BigtableIO}. diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableReadIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableReadIT.java index 02d403f9b9ac8..a064bd64235ba 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableReadIT.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableReadIT.java @@ -17,16 +17,14 @@ */ package org.apache.beam.sdk.io.gcp.bigtable; +import com.google.bigtable.v2.Row; +import com.google.cloud.bigtable.config.BigtableOptions; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.Count; import org.apache.beam.sdk.values.PCollection; - -import com.google.bigtable.v2.Row; -import com.google.cloud.bigtable.config.BigtableOptions; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java index ee3a6f97642bb..9afcd60f264a4 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableWriteIT.java @@ -19,14 +19,6 @@ import static org.junit.Assert.assertThat; -import org.apache.beam.sdk.Pipeline; -import org.apache.beam.sdk.io.CountingInput; -import org.apache.beam.sdk.options.PipelineOptionsFactory; -import org.apache.beam.sdk.testing.TestPipeline; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.transforms.ParDo; -import org.apache.beam.sdk.values.KV; - import com.google.bigtable.admin.v2.ColumnFamily; import com.google.bigtable.admin.v2.CreateTableRequest; import com.google.bigtable.admin.v2.DeleteTableRequest; @@ -44,20 +36,25 @@ import com.google.cloud.bigtable.grpc.scanner.ResultScanner; import com.google.common.collect.ImmutableList; import com.google.protobuf.ByteString; - -import org.hamcrest.Matchers; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; - import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; +import org.apache.beam.sdk.Pipeline; +import org.apache.beam.sdk.io.CountingInput; +import org.apache.beam.sdk.options.PipelineOptionsFactory; +import org.apache.beam.sdk.testing.TestPipeline; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.ParDo; +import org.apache.beam.sdk.values.KV; +import org.hamcrest.Matchers; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; /** * End-to-end tests of BigtableWrite. diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1Test.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1Test.java index 31b5da43aa1ea..ab1df2f3e9f5d 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1Test.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1Test.java @@ -17,13 +17,6 @@ */ package org.apache.beam.sdk.io.gcp.datastore; -import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DATASTORE_BATCH_UPDATE_LIMIT; -import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.DEFAULT_BUNDLE_SIZE_BYTES; -import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.QUERY_BATCH_LIMIT; -import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.getEstimatedSizeBytes; -import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.makeRequest; -import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.isValidKey; -import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; import static com.google.datastore.v1.PropertyFilter.Operator.EQUAL; import static com.google.datastore.v1.PropertyOrder.Direction.DESCENDING; import static com.google.datastore.v1.client.DatastoreHelper.makeDelete; @@ -32,6 +25,13 @@ import static com.google.datastore.v1.client.DatastoreHelper.makeOrder; import static com.google.datastore.v1.client.DatastoreHelper.makeUpsert; import static com.google.datastore.v1.client.DatastoreHelper.makeValue; +import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DATASTORE_BATCH_UPDATE_LIMIT; +import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.DEFAULT_BUNDLE_SIZE_BYTES; +import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.QUERY_BATCH_LIMIT; +import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.getEstimatedSizeBytes; +import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.makeRequest; +import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.isValidKey; +import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -47,6 +47,24 @@ import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; +import com.google.datastore.v1.CommitRequest; +import com.google.datastore.v1.Entity; +import com.google.datastore.v1.EntityResult; +import com.google.datastore.v1.Key; +import com.google.datastore.v1.Mutation; +import com.google.datastore.v1.PartitionId; +import com.google.datastore.v1.Query; +import com.google.datastore.v1.QueryResultBatch; +import com.google.datastore.v1.RunQueryRequest; +import com.google.datastore.v1.RunQueryResponse; +import com.google.datastore.v1.client.Datastore; +import com.google.datastore.v1.client.QuerySplitter; +import com.google.protobuf.Int32Value; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DatastoreWriterFn; import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DeleteEntity; import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DeleteEntityFn; @@ -69,21 +87,6 @@ import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.POutput; - -import com.google.datastore.v1.CommitRequest; -import com.google.datastore.v1.Entity; -import com.google.datastore.v1.EntityResult; -import com.google.datastore.v1.Key; -import com.google.datastore.v1.Mutation; -import com.google.datastore.v1.PartitionId; -import com.google.datastore.v1.Query; -import com.google.datastore.v1.QueryResultBatch; -import com.google.datastore.v1.RunQueryRequest; -import com.google.datastore.v1.RunQueryResponse; -import com.google.datastore.v1.client.Datastore; -import com.google.datastore.v1.client.QuerySplitter; -import com.google.protobuf.Int32Value; - import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -96,12 +99,6 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; - /** * Tests for {@link DatastoreV1}. */ diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1ReadIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1ReadIT.java index 8fedc774f661c..99988331f775b 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1ReadIT.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1ReadIT.java @@ -23,6 +23,11 @@ import static org.apache.beam.sdk.io.gcp.datastore.V1TestUtil.makeAncestorKey; import static org.apache.beam.sdk.io.gcp.datastore.V1TestUtil.makeEntity; +import com.google.datastore.v1.Entity; +import com.google.datastore.v1.Key; +import com.google.datastore.v1.Query; +import com.google.datastore.v1.client.Datastore; +import java.util.UUID; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.gcp.datastore.V1TestUtil.UpsertMutationBuilder; import org.apache.beam.sdk.io.gcp.datastore.V1TestUtil.V1TestWriter; @@ -31,20 +36,12 @@ import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.Count; import org.apache.beam.sdk.values.PCollection; - -import com.google.datastore.v1.Entity; -import com.google.datastore.v1.Key; -import com.google.datastore.v1.Query; -import com.google.datastore.v1.client.Datastore; - import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.UUID; - /** * End-to-end tests for Datastore DatastoreV1.Read. */ diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1TestOptions.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1TestOptions.java index 360855fee0f2c..9ebba3a4c5092 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1TestOptions.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1TestOptions.java @@ -18,12 +18,11 @@ package org.apache.beam.sdk.io.gcp.datastore; +import javax.annotation.Nullable; import org.apache.beam.sdk.options.Default; import org.apache.beam.sdk.options.Description; import org.apache.beam.sdk.testing.TestPipelineOptions; -import javax.annotation.Nullable; - /** * DatastoreV1 Datastore related pipeline options. */ diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1TestUtil.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1TestUtil.java index 1e323ec06a28c..a596bb3b76868 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1TestUtil.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1TestUtil.java @@ -25,12 +25,6 @@ import static com.google.datastore.v1.client.DatastoreHelper.makeUpsert; import static com.google.datastore.v1.client.DatastoreHelper.makeValue; -import org.apache.beam.sdk.options.GcpOptions; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.transforms.DoFn; -import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff; -import org.apache.beam.sdk.util.RetryHttpRequestInitializer; - import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.util.BackOff; import com.google.api.client.util.BackOffUtils; @@ -51,16 +45,19 @@ import com.google.datastore.v1.client.DatastoreFactory; import com.google.datastore.v1.client.DatastoreOptions; import com.google.protobuf.Int32Value; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.UUID; import javax.annotation.Nullable; +import org.apache.beam.sdk.options.GcpOptions; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.util.AttemptBoundedExponentialBackOff; +import org.apache.beam.sdk.util.RetryHttpRequestInitializer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; class V1TestUtil { private static final Logger LOG = LoggerFactory.getLogger(V1TestUtil.class); diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1WriteIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1WriteIT.java index b97c05c8c6c4c..fa7c1402a5b14 100644 --- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1WriteIT.java +++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/V1WriteIT.java @@ -22,21 +22,19 @@ import static org.apache.beam.sdk.io.gcp.datastore.V1TestUtil.deleteAllEntities; import static org.junit.Assert.assertEquals; +import java.util.UUID; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.CountingInput; import org.apache.beam.sdk.io.gcp.datastore.V1TestUtil.CreateEntityFn; import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.ParDo; - import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.UUID; - /** * End-to-end tests for Datastore DatastoreV1.Write. */ diff --git a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/AvroHDFSFileSource.java b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/AvroHDFSFileSource.java index 9dc926bf66dac..26299952daf55 100644 --- a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/AvroHDFSFileSource.java +++ b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/AvroHDFSFileSource.java @@ -17,30 +17,27 @@ */ package org.apache.beam.sdk.io.hdfs; -import org.apache.beam.sdk.coders.AvroCoder; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.KvCoder; -import org.apache.beam.sdk.io.BoundedSource; -import org.apache.beam.sdk.options.PipelineOptions; -import org.apache.beam.sdk.util.CoderUtils; -import org.apache.beam.sdk.values.KV; - import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; - +import java.io.IOException; +import java.util.List; +import javax.annotation.Nullable; import org.apache.avro.Schema; import org.apache.avro.mapred.AvroKey; import org.apache.avro.mapreduce.AvroJob; import org.apache.avro.mapreduce.AvroKeyInputFormat; +import org.apache.beam.sdk.coders.AvroCoder; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.KvCoder; +import org.apache.beam.sdk.io.BoundedSource; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.beam.sdk.util.CoderUtils; +import org.apache.beam.sdk.values.KV; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; -import java.io.IOException; -import java.util.List; -import javax.annotation.Nullable; - /** * A {@code BoundedSource} for reading Avro files resident in a Hadoop filesystem. * diff --git a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/AvroWrapperCoder.java b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/AvroWrapperCoder.java index a831afec8110c..c1340c05169de 100644 --- a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/AvroWrapperCoder.java +++ b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/AvroWrapperCoder.java @@ -19,22 +19,20 @@ import static com.google.common.base.Preconditions.checkArgument; -import org.apache.beam.sdk.coders.AvroCoder; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.CoderException; -import org.apache.beam.sdk.coders.StandardCoder; -import org.apache.beam.sdk.util.CloudObject; -import org.apache.beam.sdk.util.PropertyNames; - import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.avro.mapred.AvroWrapper; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Collections; import java.util.List; +import org.apache.avro.mapred.AvroWrapper; +import org.apache.beam.sdk.coders.AvroCoder; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.coders.StandardCoder; +import org.apache.beam.sdk.util.CloudObject; +import org.apache.beam.sdk.util.PropertyNames; /** * A {@code AvroWrapperCoder} is a {@link Coder} for a Java class that implements {@link diff --git a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSink.java b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSink.java index 688447aea6567..0b538b355bc18 100644 --- a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSink.java +++ b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSink.java @@ -19,16 +19,18 @@ import static com.google.common.base.Preconditions.checkState; +import com.google.api.client.util.Maps; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import java.io.IOException; +import java.util.Map; +import java.util.Random; +import java.util.Set; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.io.Sink; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.values.KV; - -import com.google.api.client.util.Maps; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -47,11 +49,6 @@ import org.apache.hadoop.mapreduce.task.JobContextImpl; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; -import java.io.IOException; -import java.util.Map; -import java.util.Random; -import java.util.Set; - /** * A {@code Sink} for writing records to a Hadoop filesystem using a Hadoop file-based output * format. diff --git a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSource.java b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSource.java index de68565fabfd9..3a4d01fe77466 100644 --- a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSource.java +++ b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSource.java @@ -20,6 +20,19 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import java.io.Externalizable; +import java.io.IOException; +import java.io.ObjectInput; +import java.io.ObjectOutput; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.List; +import java.util.ListIterator; +import java.util.NoSuchElementException; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.KvCoder; import org.apache.beam.sdk.coders.VoidCoder; @@ -27,11 +40,6 @@ import org.apache.beam.sdk.io.Read; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.values.KV; - -import com.google.common.base.Function; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; @@ -46,18 +54,6 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; -import java.io.Externalizable; -import java.io.IOException; -import java.io.ObjectInput; -import java.io.ObjectOutput; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.util.List; -import java.util.ListIterator; -import java.util.NoSuchElementException; - -import javax.annotation.Nullable; - /** * A {@code BoundedSource} for reading files resident in a Hadoop filesystem (HDFS) using a * Hadoop file-based input format. diff --git a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/WritableCoder.java b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/WritableCoder.java index 4e913ede2d645..f3569eafaf6e3 100644 --- a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/WritableCoder.java +++ b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/WritableCoder.java @@ -17,20 +17,20 @@ */ package org.apache.beam.sdk.io.hdfs; -import org.apache.beam.sdk.coders.Coder; -import org.apache.beam.sdk.coders.CoderException; -import org.apache.beam.sdk.coders.StandardCoder; -import org.apache.beam.sdk.util.CloudObject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import org.apache.hadoop.io.NullWritable; -import org.apache.hadoop.io.Writable; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.List; +import org.apache.beam.sdk.coders.Coder; +import org.apache.beam.sdk.coders.CoderException; +import org.apache.beam.sdk.coders.StandardCoder; +import org.apache.beam.sdk.util.CloudObject; +import org.apache.hadoop.io.NullWritable; +import org.apache.hadoop.io.Writable; /** * A {@code WritableCoder} is a {@link Coder} for a Java class that implements {@link Writable}. diff --git a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/simpleauth/SimpleAuthAvroHDFSFileSource.java b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/simpleauth/SimpleAuthAvroHDFSFileSource.java index 5dd9673025056..d37ced9b84afe 100644 --- a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/simpleauth/SimpleAuthAvroHDFSFileSource.java +++ b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/simpleauth/SimpleAuthAvroHDFSFileSource.java @@ -17,18 +17,16 @@ */ package org.apache.beam.sdk.io.hdfs.simpleauth; -import org.apache.beam.sdk.coders.AvroCoder; -import org.apache.beam.sdk.io.hdfs.AvroHDFSFileSource; -import org.apache.beam.sdk.io.hdfs.HDFSFileSource; -import org.apache.beam.sdk.options.PipelineOptions; - import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import org.apache.hadoop.mapreduce.InputSplit; - import java.util.List; import javax.annotation.Nullable; +import org.apache.beam.sdk.coders.AvroCoder; +import org.apache.beam.sdk.io.hdfs.AvroHDFSFileSource; +import org.apache.beam.sdk.io.hdfs.HDFSFileSource; +import org.apache.beam.sdk.options.PipelineOptions; +import org.apache.hadoop.mapreduce.InputSplit; /** * Source for Avros on Hadoop/HDFS with Simple Authentication. diff --git a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/simpleauth/SimpleAuthHDFSFileSink.java b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/simpleauth/SimpleAuthHDFSFileSink.java index d0fd8b629085b..e2c2c907f1aa8 100644 --- a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/simpleauth/SimpleAuthHDFSFileSink.java +++ b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/simpleauth/SimpleAuthHDFSFileSink.java @@ -17,6 +17,7 @@ */ package org.apache.beam.sdk.io.hdfs.simpleauth; +import java.security.PrivilegedExceptionAction; import org.apache.beam.sdk.io.Sink; import org.apache.beam.sdk.io.hdfs.HDFSFileSink; import org.apache.beam.sdk.options.PipelineOptions; @@ -25,8 +26,6 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.security.UserGroupInformation; -import java.security.PrivilegedExceptionAction; - /** * A {@code Sink} for writing records to a Hadoop filesystem using a Hadoop file-based output * format with Simple Authentication. diff --git a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/simpleauth/SimpleAuthHDFSFileSource.java b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/simpleauth/SimpleAuthHDFSFileSource.java index 5b768fc2bc76b..6fb340ebd34ff 100644 --- a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/simpleauth/SimpleAuthHDFSFileSource.java +++ b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/simpleauth/SimpleAuthHDFSFileSource.java @@ -17,21 +17,19 @@ */ package org.apache.beam.sdk.io.hdfs.simpleauth; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import java.util.List; +import javax.annotation.Nullable; import org.apache.beam.sdk.io.BoundedSource; import org.apache.beam.sdk.io.Read; import org.apache.beam.sdk.io.hdfs.HDFSFileSource; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.values.KV; - -import com.google.common.base.Function; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; -import java.util.List; -import javax.annotation.Nullable; - /** * Source for Hadoop/HDFS with Simple Authentication. * diff --git a/sdks/java/io/hdfs/src/test/java/org/apache/beam/sdk/io/hdfs/AvroWrapperCoderTest.java b/sdks/java/io/hdfs/src/test/java/org/apache/beam/sdk/io/hdfs/AvroWrapperCoderTest.java index 85cbd46178f73..6ebea3a2baf3c 100644 --- a/sdks/java/io/hdfs/src/test/java/org/apache/beam/sdk/io/hdfs/AvroWrapperCoderTest.java +++ b/sdks/java/io/hdfs/src/test/java/org/apache/beam/sdk/io/hdfs/AvroWrapperCoderTest.java @@ -17,11 +17,10 @@ */ package org.apache.beam.sdk.io.hdfs; -import org.apache.beam.sdk.coders.AvroCoder; -import org.apache.beam.sdk.testing.CoderProperties; - import org.apache.avro.mapred.AvroKey; import org.apache.avro.mapred.AvroValue; +import org.apache.beam.sdk.coders.AvroCoder; +import org.apache.beam.sdk.testing.CoderProperties; import org.junit.Test; /** diff --git a/sdks/java/io/hdfs/src/test/java/org/apache/beam/sdk/io/hdfs/HDFSFileSourceTest.java b/sdks/java/io/hdfs/src/test/java/org/apache/beam/sdk/io/hdfs/HDFSFileSourceTest.java index 67df7bcb4bb41..6145952af4ad1 100644 --- a/sdks/java/io/hdfs/src/test/java/org/apache/beam/sdk/io/hdfs/HDFSFileSourceTest.java +++ b/sdks/java/io/hdfs/src/test/java/org/apache/beam/sdk/io/hdfs/HDFSFileSourceTest.java @@ -24,13 +24,17 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Random; import org.apache.beam.sdk.io.BoundedSource; import org.apache.beam.sdk.io.Source; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.testing.SourceTestUtils; import org.apache.beam.sdk.values.KV; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; @@ -42,12 +46,6 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Random; - /** * Tests for HDFSFileSource. */ diff --git a/sdks/java/io/hdfs/src/test/java/org/apache/beam/sdk/io/hdfs/WritableCoderTest.java b/sdks/java/io/hdfs/src/test/java/org/apache/beam/sdk/io/hdfs/WritableCoderTest.java index ac32c336f998d..e78f850c66357 100644 --- a/sdks/java/io/hdfs/src/test/java/org/apache/beam/sdk/io/hdfs/WritableCoderTest.java +++ b/sdks/java/io/hdfs/src/test/java/org/apache/beam/sdk/io/hdfs/WritableCoderTest.java @@ -18,7 +18,6 @@ package org.apache.beam.sdk.io.hdfs; import org.apache.beam.sdk.testing.CoderProperties; - import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.NullWritable; import org.junit.Test; diff --git a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsCheckpointMark.java b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsCheckpointMark.java index 81c2b826ce8d4..ba8fba9d00aee 100644 --- a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsCheckpointMark.java +++ b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsCheckpointMark.java @@ -17,18 +17,15 @@ */ package org.apache.beam.sdk.io.jms; +import java.util.ArrayList; +import java.util.List; +import javax.jms.Message; import org.apache.beam.sdk.coders.AvroCoder; import org.apache.beam.sdk.coders.DefaultCoder; import org.apache.beam.sdk.io.UnboundedSource; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; - import org.joda.time.Instant; -import java.util.ArrayList; -import java.util.List; - -import javax.jms.Message; - /** * Checkpoint for an unbounded JmsIO.Read. Consists of * JMS destination name, and the latest message ID consumed so far. diff --git a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java index 557fe13a3a372..f92dbd456b79f 100644 --- a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java +++ b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java @@ -20,6 +20,22 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import com.google.common.annotations.VisibleForTesting; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import javax.annotation.Nullable; +import javax.jms.Connection; +import javax.jms.ConnectionFactory; +import javax.jms.Destination; +import javax.jms.MessageConsumer; +import javax.jms.MessageProducer; +import javax.jms.Session; +import javax.jms.TextMessage; import org.apache.beam.sdk.coders.AvroCoder; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.SerializableCoder; @@ -36,31 +52,11 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone; import org.apache.beam.sdk.values.PInput; - -import com.google.common.annotations.VisibleForTesting; - import org.joda.time.Duration; import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Enumeration; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; - -import javax.annotation.Nullable; -import javax.jms.Connection; -import javax.jms.ConnectionFactory; -import javax.jms.Destination; -import javax.jms.MessageConsumer; -import javax.jms.MessageProducer; -import javax.jms.Session; -import javax.jms.TextMessage; - /** * An unbounded source for JMS destinations (queues or topics). * diff --git a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsRecord.java b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsRecord.java index aa0c472e631a6..65a9189d2cea8 100644 --- a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsRecord.java +++ b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsRecord.java @@ -20,7 +20,6 @@ import java.io.Serializable; import java.util.Map; import java.util.Objects; - import javax.jms.Destination; /** diff --git a/sdks/java/io/jms/src/test/java/org/apache/beam/sdk/io/jms/JmsIOTest.java b/sdks/java/io/jms/src/test/java/org/apache/beam/sdk/io/jms/JmsIOTest.java index 020794c7cfbd6..4c3be6d0bae2c 100644 --- a/sdks/java/io/jms/src/test/java/org/apache/beam/sdk/io/jms/JmsIOTest.java +++ b/sdks/java/io/jms/src/test/java/org/apache/beam/sdk/io/jms/JmsIOTest.java @@ -17,6 +17,17 @@ */ package org.apache.beam.sdk.io.jms; +import java.util.ArrayList; +import javax.jms.Connection; +import javax.jms.ConnectionFactory; +import javax.jms.Message; +import javax.jms.MessageConsumer; +import javax.jms.MessageProducer; +import javax.jms.Session; +import javax.jms.TextMessage; +import org.apache.activemq.ActiveMQConnectionFactory; +import org.apache.activemq.broker.BrokerService; +import org.apache.activemq.store.memory.MemoryPersistenceAdapter; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.NeedsRunner; import org.apache.beam.sdk.testing.PAssert; @@ -24,10 +35,6 @@ import org.apache.beam.sdk.transforms.Count; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.values.PCollection; - -import org.apache.activemq.ActiveMQConnectionFactory; -import org.apache.activemq.broker.BrokerService; -import org.apache.activemq.store.memory.MemoryPersistenceAdapter; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -36,16 +43,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.ArrayList; - -import javax.jms.Connection; -import javax.jms.ConnectionFactory; -import javax.jms.Message; -import javax.jms.MessageConsumer; -import javax.jms.MessageProducer; -import javax.jms.Session; -import javax.jms.TextMessage; - /** * Tests of {@link JmsIO}. */ diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaCheckpointMark.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaCheckpointMark.java index 4b6b976fa54dc..664bb6f766ee5 100644 --- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaCheckpointMark.java +++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaCheckpointMark.java @@ -17,16 +17,14 @@ */ package org.apache.beam.sdk.io.kafka; +import java.io.IOException; +import java.io.Serializable; +import java.util.List; import org.apache.beam.sdk.coders.DefaultCoder; import org.apache.beam.sdk.coders.SerializableCoder; import org.apache.beam.sdk.io.UnboundedSource; - import org.apache.kafka.common.TopicPartition; -import java.io.IOException; -import java.io.Serializable; -import java.util.List; - /** * Checkpoint for an unbounded KafkaIO.Read. Consists of Kafka topic name, partition id, * and the latest offset consumed so far. diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java index 8a0c7880e97db..885d5d10b4b7b 100644 --- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java +++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java @@ -21,6 +21,34 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Function; +import com.google.common.base.Joiner; +import com.google.common.base.Optional; +import com.google.common.collect.ComparisonChain; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterators; +import com.google.common.collect.Lists; +import com.google.common.io.Closeables; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Random; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.SynchronousQueue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import javax.annotation.Nullable; import org.apache.beam.sdk.coders.ByteArrayCoder; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; @@ -46,18 +74,6 @@ import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone; import org.apache.beam.sdk.values.PInput; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Function; -import com.google.common.base.Joiner; -import com.google.common.base.Optional; -import com.google.common.collect.ComparisonChain; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; -import com.google.common.io.Closeables; - import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; @@ -79,25 +95,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Random; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.SynchronousQueue; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import javax.annotation.Nullable; - /** * An unbounded source and a sink for Kafka topics. * Kafka version 0.9 and above are supported. @@ -1076,7 +1073,6 @@ private void updateLatestOffsets() { @Override public Instant getWatermark() { if (curRecord == null) { - LOG.warn("{}: getWatermark() : no records have been read yet.", name); return initialWatermark; } diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaRecord.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaRecord.java index 76e688b178521..fa202e103e921 100644 --- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaRecord.java +++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaRecord.java @@ -17,10 +17,9 @@ */ package org.apache.beam.sdk.io.kafka; -import org.apache.beam.sdk.values.KV; - import java.io.Serializable; import java.util.Arrays; +import org.apache.beam.sdk.values.KV; /** * KafkaRecord contains key and value of the record as well as metadata for the record (topic name, diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaRecordCoder.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaRecordCoder.java index 8a3e7f51441d9..736a752f7741f 100644 --- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaRecordCoder.java +++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaRecordCoder.java @@ -17,6 +17,12 @@ */ package org.apache.beam.sdk.io.kafka; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.List; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.KvCoder; @@ -27,14 +33,6 @@ import org.apache.beam.sdk.util.PropertyNames; import org.apache.beam.sdk.values.KV; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.List; - /** * {@link Coder} for {@link KafkaRecord}. */ diff --git a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java index 9a89c3621bcd3..772efe1ec2941 100644 --- a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java +++ b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOTest.java @@ -21,6 +21,21 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import javax.annotation.Nullable; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.Pipeline.PipelineExecutionException; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; @@ -45,10 +60,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollectionList; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; - import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.MockConsumer; @@ -66,20 +77,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import javax.annotation.Nullable; - /** * Tests of {@link KafkaSource}. */ diff --git a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/CombineJava8Test.java b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/CombineJava8Test.java index 132247b2474bf..98d99cefd93dc 100644 --- a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/CombineJava8Test.java +++ b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/CombineJava8Test.java @@ -21,6 +21,8 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.not; +import com.google.common.collect.Iterables; +import java.io.Serializable; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; @@ -28,9 +30,6 @@ import org.apache.beam.sdk.util.SerializableUtils; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - -import com.google.common.collect.Iterables; - import org.junit.Assume; import org.junit.Rule; import org.junit.Test; @@ -38,8 +37,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; - /** * Java 8 Tests for {@link Combine}. */ diff --git a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/FilterJava8Test.java b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/FilterJava8Test.java index 3c83be28ca668..afd1c8b48ca5a 100644 --- a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/FilterJava8Test.java +++ b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/FilterJava8Test.java @@ -17,13 +17,13 @@ */ package org.apache.beam.sdk.transforms; +import java.io.Serializable; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.RunnableOnService; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.values.PCollection; - import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -31,8 +31,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; - /** * Java 8 Tests for {@link Filter}. */ diff --git a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/FlatMapElementsJava8Test.java b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/FlatMapElementsJava8Test.java index 5ee10d1083373..70cc04da9aff3 100644 --- a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/FlatMapElementsJava8Test.java +++ b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/FlatMapElementsJava8Test.java @@ -17,23 +17,20 @@ */ package org.apache.beam.sdk.transforms; +import com.google.common.collect.ImmutableList; +import java.io.Serializable; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptor; - -import com.google.common.collect.ImmutableList; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; -import java.util.List; - /** * Java 8 Tests for {@link FlatMapElements}. */ diff --git a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/MapElementsJava8Test.java b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/MapElementsJava8Test.java index 339e431be27aa..9b556b9556863 100644 --- a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/MapElementsJava8Test.java +++ b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/MapElementsJava8Test.java @@ -17,18 +17,16 @@ */ package org.apache.beam.sdk.transforms; +import java.io.Serializable; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptor; - import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; - /** * Java 8 tests for {@link MapElements}. */ diff --git a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/PartitionJava8Test.java b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/PartitionJava8Test.java index c8283dbf04114..0aeb41ffdc926 100644 --- a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/PartitionJava8Test.java +++ b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/PartitionJava8Test.java @@ -19,20 +19,18 @@ import static org.junit.Assert.assertEquals; +import java.io.Serializable; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.values.PCollectionList; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; - /** * Java 8 Tests for {@link Filter}. */ diff --git a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/RemoveDuplicatesJava8Test.java b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/RemoveDuplicatesJava8Test.java index aa4c01548c823..99266d487cdde 100644 --- a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/RemoveDuplicatesJava8Test.java +++ b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/RemoveDuplicatesJava8Test.java @@ -23,23 +23,20 @@ import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertThat; +import com.google.common.collect.HashMultimap; +import com.google.common.collect.Multimap; +import java.util.HashSet; +import java.util.Set; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptor; - -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Multimap; - import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.HashSet; -import java.util.Set; - /** * Java 8 tests for {@link RemoveDuplicates}. */ diff --git a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithKeysJava8Test.java b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithKeysJava8Test.java index a89e281e68307..a5b9cb1239a0f 100644 --- a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithKeysJava8Test.java +++ b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithKeysJava8Test.java @@ -25,7 +25,6 @@ import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptor; - import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; diff --git a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsJava8Test.java b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsJava8Test.java index 03aa64719fc92..5f1e74bd2ddd9 100644 --- a/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsJava8Test.java +++ b/sdks/java/java8tests/src/test/java/org/apache/beam/sdk/transforms/WithTimestampsJava8Test.java @@ -17,20 +17,18 @@ */ package org.apache.beam.sdk.transforms; +import java.io.Serializable; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.RunnableOnService; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.joda.time.Instant; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.Serializable; - /** * Java 8 tests for {@link WithTimestamps}. */ diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/DebuggingWordCount.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/DebuggingWordCount.java index bc55c062fabe1..e9f433396e6e5 100644 --- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/DebuggingWordCount.java +++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/DebuggingWordCount.java @@ -18,6 +18,9 @@ package ${package}; import ${package}.WordCount; +import java.util.Arrays; +import java.util.List; +import java.util.regex.Pattern; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.TextIO; import org.apache.beam.sdk.options.Default; @@ -30,15 +33,9 @@ import org.apache.beam.sdk.transforms.Sum; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.Arrays; -import java.util.List; -import java.util.regex.Pattern; - - /** * An example that verifies word counts in Shakespeare and includes Dataflow best practices. * diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WindowedWordCount.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WindowedWordCount.java index ffe8b881ca5a4..17bf7ca6154be 100644 --- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WindowedWordCount.java +++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/WindowedWordCount.java @@ -17,8 +17,14 @@ */ package ${package}; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.api.services.bigquery.model.TableReference; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; import ${package}.common.DataflowExampleUtils; - +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.PipelineResult; import org.apache.beam.sdk.io.PubsubIO; @@ -33,22 +39,11 @@ import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; - -import com.google.api.services.bigquery.model.TableFieldSchema; -import com.google.api.services.bigquery.model.TableReference; -import com.google.api.services.bigquery.model.TableRow; -import com.google.api.services.bigquery.model.TableSchema; - import org.joda.time.Duration; import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - - /** * An example that counts words in text, and can run over either unbounded or bounded input * collections. diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/DataflowExampleUtils.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/DataflowExampleUtils.java index fa29fddcdf999..9e6be788d0e4f 100644 --- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/DataflowExampleUtils.java +++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/DataflowExampleUtils.java @@ -17,12 +17,6 @@ */ package ${package}.common; -import org.apache.beam.runners.dataflow.BlockingDataflowRunner; -import org.apache.beam.runners.dataflow.DataflowPipelineJob; -import org.apache.beam.runners.dataflow.DataflowRunner; -import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; -import org.apache.beam.runners.dataflow.util.MonitoringUtil; - import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.api.client.googleapis.services.AbstractGoogleClientRequest; import com.google.api.services.bigquery.Bigquery; @@ -36,21 +30,24 @@ import com.google.api.services.dataflow.Dataflow; import com.google.api.services.pubsub.Pubsub; import com.google.api.services.pubsub.model.Topic; -import org.apache.beam.sdk.Pipeline; -import org.apache.beam.sdk.PipelineResult; -import org.apache.beam.sdk.io.TextIO; -import org.apache.beam.sdk.options.BigQueryOptions; -import org.apache.beam.sdk.transforms.IntraBundleParallelization; -import org.apache.beam.sdk.util.Transport; import com.google.common.collect.Lists; import com.google.common.collect.Sets; - import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Set; - import javax.servlet.http.HttpServletResponse; +import org.apache.beam.runners.dataflow.BlockingDataflowRunner; +import org.apache.beam.runners.dataflow.DataflowPipelineJob; +import org.apache.beam.runners.dataflow.DataflowRunner; +import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; +import org.apache.beam.runners.dataflow.util.MonitoringUtil; +import org.apache.beam.sdk.Pipeline; +import org.apache.beam.sdk.PipelineResult; +import org.apache.beam.sdk.io.TextIO; +import org.apache.beam.sdk.options.BigQueryOptions; +import org.apache.beam.sdk.transforms.IntraBundleParallelization; +import org.apache.beam.sdk.util.Transport; /** * The utility class that sets up and tears down external resources, starts the Google Cloud Pub/Sub diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/ExampleBigQueryTableOptions.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/ExampleBigQueryTableOptions.java index 279f2e098dde1..79fa865eb5299 100644 --- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/ExampleBigQueryTableOptions.java +++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/ExampleBigQueryTableOptions.java @@ -17,14 +17,13 @@ */ package ${package}.common; +import com.google.api.services.bigquery.model.TableSchema; import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; import org.apache.beam.sdk.options.Default; import org.apache.beam.sdk.options.DefaultValueFactory; import org.apache.beam.sdk.options.Description; import org.apache.beam.sdk.options.PipelineOptions; -import com.google.api.services.bigquery.model.TableSchema; - /** * Options that can be used to configure BigQuery tables in Dataflow examples. * The project defaults to the project being used to run the example. diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/PubsubFileInjector.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/PubsubFileInjector.java index 9b347da6089ba..58e082148a4a3 100644 --- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/PubsubFileInjector.java +++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/main/java/common/PubsubFileInjector.java @@ -17,6 +17,12 @@ */ package ${package}.common; +import com.google.api.services.pubsub.Pubsub; +import com.google.api.services.pubsub.model.PublishRequest; +import com.google.api.services.pubsub.model.PubsubMessage; +import com.google.common.collect.ImmutableMap; +import java.io.IOException; +import java.util.Arrays; import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.TextIO; @@ -28,14 +34,6 @@ import org.apache.beam.sdk.transforms.IntraBundleParallelization; import org.apache.beam.sdk.util.Transport; -import com.google.api.services.pubsub.Pubsub; -import com.google.api.services.pubsub.model.PublishRequest; -import com.google.api.services.pubsub.model.PubsubMessage; -import com.google.common.collect.ImmutableMap; - -import java.io.IOException; -import java.util.Arrays; - /** * A batch Dataflow pipeline for injecting a set of GCS files into * a PubSub topic line by line. Empty lines are skipped. diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/test/java/DebuggingWordCountTest.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/test/java/DebuggingWordCountTest.java index 4d2e324c1be53..dfa1a75d4135e 100644 --- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/test/java/DebuggingWordCountTest.java +++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/test/java/DebuggingWordCountTest.java @@ -18,16 +18,14 @@ package ${package}; import com.google.common.io.Files; - +import java.io.File; +import java.nio.charset.StandardCharsets; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.io.File; -import java.nio.charset.StandardCharsets; - /** * Tests for {@link DebuggingWordCount}. */ diff --git a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/test/java/WordCountTest.java b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/test/java/WordCountTest.java index debfc78623a2e..875d3d757a79c 100644 --- a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/test/java/WordCountTest.java +++ b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/src/test/java/WordCountTest.java @@ -20,6 +20,8 @@ import ${package}.WordCount.CountWords; import ${package}.WordCount.ExtractWordsFn; import ${package}.WordCount.FormatAsTextFn; +import java.util.Arrays; +import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.testing.PAssert; @@ -29,7 +31,6 @@ import org.apache.beam.sdk.transforms.DoFnTester; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.values.PCollection; - import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.Test; @@ -37,8 +38,6 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -import java.util.Arrays; -import java.util.List; /** * Tests of WordCount. diff --git a/sdks/java/maven-archetypes/starter/src/main/resources/archetype-resources/src/main/java/StarterPipeline.java b/sdks/java/maven-archetypes/starter/src/main/resources/archetype-resources/src/main/java/StarterPipeline.java index 6a1c41b0dc87a..0b21aa650a8df 100644 --- a/sdks/java/maven-archetypes/starter/src/main/resources/archetype-resources/src/main/java/StarterPipeline.java +++ b/sdks/java/maven-archetypes/starter/src/main/resources/archetype-resources/src/main/java/StarterPipeline.java @@ -22,7 +22,6 @@ import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.ParDo; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/sdks/java/maven-archetypes/starter/src/test/resources/projects/basic/reference/src/main/java/it/pkg/StarterPipeline.java b/sdks/java/maven-archetypes/starter/src/test/resources/projects/basic/reference/src/main/java/it/pkg/StarterPipeline.java index 7c13350aee522..b332442cadcd7 100644 --- a/sdks/java/maven-archetypes/starter/src/test/resources/projects/basic/reference/src/main/java/it/pkg/StarterPipeline.java +++ b/sdks/java/maven-archetypes/starter/src/test/resources/projects/basic/reference/src/main/java/it/pkg/StarterPipeline.java @@ -22,7 +22,6 @@ import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.transforms.ParDo; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/AvroCoderBenchmark.java b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/AvroCoderBenchmark.java index 39b31ef309087..35d5add1471ae 100644 --- a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/AvroCoderBenchmark.java +++ b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/AvroCoderBenchmark.java @@ -17,9 +17,10 @@ */ package org.apache.beam.sdk.microbenchmarks.coders; +import java.io.IOException; +import java.util.Arrays; import org.apache.beam.sdk.coders.AvroCoder; import org.apache.beam.sdk.coders.DefaultCoder; - import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Param; @@ -28,9 +29,6 @@ import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; -import java.io.IOException; -import java.util.Arrays; - /** * Benchmarks for {@link AvroCoder}. */ diff --git a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/ByteArrayCoderBenchmark.java b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/ByteArrayCoderBenchmark.java index df20a15d4ec21..78909752a5d46 100644 --- a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/ByteArrayCoderBenchmark.java +++ b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/ByteArrayCoderBenchmark.java @@ -17,8 +17,9 @@ */ package org.apache.beam.sdk.microbenchmarks.coders; +import java.io.IOException; +import java.util.Arrays; import org.apache.beam.sdk.coders.ByteArrayCoder; - import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Param; @@ -27,9 +28,6 @@ import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; -import java.io.IOException; -import java.util.Arrays; - /** * Benchmarks for {@link ByteArrayCoder}. */ diff --git a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/CoderBenchmarking.java b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/CoderBenchmarking.java index 8523cb2a6a1f4..c92215d0afc54 100644 --- a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/CoderBenchmarking.java +++ b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/CoderBenchmarking.java @@ -17,11 +17,10 @@ */ package org.apache.beam.sdk.microbenchmarks.coders; +import java.io.IOException; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.util.CoderUtils; -import java.io.IOException; - /** * Utilities for writing coder benchmarks. */ diff --git a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/StringUtf8CoderBenchmark.java b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/StringUtf8CoderBenchmark.java index c0bcb453944b9..540c95899c21d 100644 --- a/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/StringUtf8CoderBenchmark.java +++ b/sdks/java/microbenchmarks/src/main/java/org/apache/beam/sdk/microbenchmarks/coders/StringUtf8CoderBenchmark.java @@ -17,8 +17,9 @@ */ package org.apache.beam.sdk.microbenchmarks.coders; +import java.io.IOException; +import java.util.Arrays; import org.apache.beam.sdk.coders.StringUtf8Coder; - import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Param; @@ -27,9 +28,6 @@ import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; -import java.io.IOException; -import java.util.Arrays; - /** * Benchmarks for {@link StringUtf8Coder}. */