diff --git a/README.md b/README.md index 67fbc4c8e337..1113ce2fb25f 100644 --- a/README.md +++ b/README.md @@ -128,18 +128,20 @@ must [supply credentials](#authentication) and a project ID if running this snip import com.google.gcloud.bigquery.BigQuery; import com.google.gcloud.bigquery.BigQueryOptions; import com.google.gcloud.bigquery.Field; +import com.google.gcloud.bigquery.Job; import com.google.gcloud.bigquery.JobStatus; import com.google.gcloud.bigquery.JobInfo; import com.google.gcloud.bigquery.LoadJobConfiguration; import com.google.gcloud.bigquery.Schema; import com.google.gcloud.bigquery.StandardTableDefinition; +import com.google.gcloud.bigquery.Table; import com.google.gcloud.bigquery.TableId; import com.google.gcloud.bigquery.TableInfo; BigQuery bigquery = BigQueryOptions.defaultInstance().service(); TableId tableId = TableId.of("dataset", "table"); -TableInfo info = bigquery.getTable(tableId); -if (info == null) { +Table table = bigquery.getTable(tableId); +if (table == null) { System.out.println("Creating table " + tableId); Field integerField = Field.of("fieldName", Field.Type.integer()); Schema schema = Schema.of(integerField); @@ -147,11 +149,9 @@ if (info == null) { } else { System.out.println("Loading data into table " + tableId); LoadJobConfiguration configuration = LoadJobConfiguration.of(tableId, "gs://bucket/path"); - JobInfo loadJob = JobInfo.of(configuration); - loadJob = bigquery.create(loadJob); - while (loadJob.status().state() != JobStatus.State.DONE) { + Job loadJob = bigquery.create(JobInfo.of(configuration)); + while (!loadJob.isDone()) { Thread.sleep(1000L); - loadJob = bigquery.getJob(loadJob.jobId()); } if (loadJob.status().error() != null) { System.out.println("Job completed with errors"); diff --git a/gcloud-java-bigquery/README.md b/gcloud-java-bigquery/README.md index 3f3678f41a04..1a4e48dfd4fd 100644 --- a/gcloud-java-bigquery/README.md +++ b/gcloud-java-bigquery/README.md @@ -114,6 +114,7 @@ with only one string field. Add the following imports at the top of your file: import com.google.gcloud.bigquery.Field; import com.google.gcloud.bigquery.Schema; import com.google.gcloud.bigquery.StandardTableDefinition; +import com.google.gcloud.bigquery.Table; import com.google.gcloud.bigquery.TableId; import com.google.gcloud.bigquery.TableInfo; ``` @@ -127,7 +128,7 @@ Field stringField = Field.of("StringField", Field.Type.string()); Schema schema = Schema.of(stringField); // Create a table StandardTableDefinition tableDefinition = StandardTableDefinition.of(schema); -TableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, tableDefinition)); +Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); ``` #### Loading data into a table @@ -216,6 +217,7 @@ import com.google.gcloud.bigquery.QueryRequest; import com.google.gcloud.bigquery.QueryResponse; import com.google.gcloud.bigquery.Schema; import com.google.gcloud.bigquery.StandardTableDefinition; +import com.google.gcloud.bigquery.Table; import com.google.gcloud.bigquery.TableId; import com.google.gcloud.bigquery.TableInfo; @@ -242,7 +244,7 @@ public class GcloudBigQueryExample { Schema schema = Schema.of(stringField); // Create a table StandardTableDefinition tableDefinition = StandardTableDefinition.of(schema); - TableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, tableDefinition)); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); // Define rows to insert Map firstRow = new HashMap<>(); diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java index a1b23aba4d5d..551ba813cacb 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java @@ -206,7 +206,7 @@ private DatasetOption(BigQueryRpc.Option option, Object value) { /** * Returns an option to specify the dataset's fields to be returned by the RPC call. If this * option is not provided all dataset's fields are returned. {@code DatasetOption.fields} can - * be used to specify only the fields of interest. {@link DatasetInfo#datasetId()} is always + * be used to specify only the fields of interest. {@link Dataset#datasetId()} is always * returned, even if not specified. */ public static DatasetOption fields(DatasetField... fields) { @@ -275,8 +275,8 @@ private TableOption(BigQueryRpc.Option option, Object value) { /** * Returns an option to specify the table's fields to be returned by the RPC call. If this * option is not provided all table's fields are returned. {@code TableOption.fields} can be - * used to specify only the fields of interest. {@link TableInfo#tableId()} and type (which is - * part of {@link TableInfo#definition()}) are always returned, even if not specified. + * used to specify only the fields of interest. {@link Table#tableId()} and type (which is part + * of {@link Table#definition()}) are always returned, even if not specified. */ public static TableOption fields(TableField... fields) { return new TableOption(BigQueryRpc.Option.FIELDS, TableField.selector(fields)); @@ -369,7 +369,7 @@ public static JobListOption startPageToken(String pageToken) { /** * Returns an option to specify the job's fields to be returned by the RPC call. If this option * is not provided all job's fields are returned. {@code JobOption.fields()} can be used to - * specify only the fields of interest. {@link JobInfo#jobId()}, {@link JobStatus#state()}, + * specify only the fields of interest. {@link Job#jobId()}, {@link JobStatus#state()}, * {@link JobStatus#error()} as well as type-specific configuration (e.g. * {@link QueryJobConfiguration#query()} for Query Jobs) are always returned, even if not * specified. {@link JobField#SELF_LINK} and {@link JobField#ETAG} can not be selected when @@ -397,7 +397,7 @@ private JobOption(BigQueryRpc.Option option, Object value) { /** * Returns an option to specify the job's fields to be returned by the RPC call. If this option * is not provided all job's fields are returned. {@code JobOption.fields()} can be used to - * specify only the fields of interest. {@link JobInfo#jobId()} as well as type-specific + * specify only the fields of interest. {@link Job#jobId()} as well as type-specific * configuration (e.g. {@link QueryJobConfiguration#query()} for Query Jobs) are always * returned, even if not specified. */ @@ -457,46 +457,45 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * * @throws BigQueryException upon failure */ - DatasetInfo create(DatasetInfo dataset, DatasetOption... options) throws BigQueryException; + Dataset create(DatasetInfo dataset, DatasetOption... options) throws BigQueryException; /** * Creates a new table. * * @throws BigQueryException upon failure */ - TableInfo create(TableInfo table, TableOption... options) throws BigQueryException; + Table create(TableInfo table, TableOption... options) throws BigQueryException; /** * Creates a new job. * * @throws BigQueryException upon failure */ - JobInfo create(JobInfo job, JobOption... options) throws BigQueryException; + Job create(JobInfo job, JobOption... options) throws BigQueryException; /** * Returns the requested dataset or {@code null} if not found. * * @throws BigQueryException upon failure */ - DatasetInfo getDataset(String datasetId, DatasetOption... options) throws BigQueryException; + Dataset getDataset(String datasetId, DatasetOption... options) throws BigQueryException; /** * Returns the requested dataset or {@code null} if not found. * * @throws BigQueryException upon failure */ - DatasetInfo getDataset(DatasetId datasetId, DatasetOption... options) throws BigQueryException; + Dataset getDataset(DatasetId datasetId, DatasetOption... options) throws BigQueryException; /** * Lists the project's datasets. This method returns partial information on each dataset - * ({@link DatasetInfo#datasetId()}, {@link DatasetInfo#friendlyName()} and - * {@link DatasetInfo#id()}). To get complete information use either - * {@link #getDataset(String, DatasetOption...)} or + * ({@link Dataset#datasetId()}, {@link Dataset#friendlyName()} and {@link Dataset#id()}). To get + * complete information use either {@link #getDataset(String, DatasetOption...)} or * {@link #getDataset(DatasetId, DatasetOption...)}. * * @throws BigQueryException upon failure */ - Page listDatasets(DatasetListOption... options) throws BigQueryException; + Page listDatasets(DatasetListOption... options) throws BigQueryException; /** * Deletes the requested dataset. @@ -535,54 +534,50 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * * @throws BigQueryException upon failure */ - DatasetInfo update(DatasetInfo dataset, DatasetOption... options) throws BigQueryException; + Dataset update(DatasetInfo dataset, DatasetOption... options) throws BigQueryException; /** * Updates table information. * * @throws BigQueryException upon failure */ - TableInfo update(TableInfo table, TableOption... options) throws BigQueryException; + Table update(TableInfo table, TableOption... options) throws BigQueryException; /** * Returns the requested table or {@code null} if not found. * * @throws BigQueryException upon failure */ - TableInfo getTable(String datasetId, String tableId, TableOption... options) - throws BigQueryException; + Table getTable(String datasetId, String tableId, TableOption... options) throws BigQueryException; /** * Returns the requested table or {@code null} if not found. * * @throws BigQueryException upon failure */ - TableInfo getTable(TableId tableId, TableOption... options) - throws BigQueryException; + Table getTable(TableId tableId, TableOption... options) throws BigQueryException; /** * Lists the tables in the dataset. This method returns partial information on each table - * ({@link TableInfo#tableId()}, {@link TableInfo#friendlyName()}, {@link TableInfo#id()} and - * type, which is part of {@link TableInfo#definition()}). To get complete information use either + * ({@link TableInfo#tableId()}, {@link Table#friendlyName()}, {@link Table#id()} and type, which + * is part of {@link Table#definition()}). To get complete information use either * {@link #getTable(TableId, TableOption...)} or * {@link #getTable(String, String, TableOption...)}. * * @throws BigQueryException upon failure */ - Page listTables(String datasetId, TableListOption... options) - throws BigQueryException; + Page listTables(String datasetId, TableListOption... options) throws BigQueryException; /** * Lists the tables in the dataset. This method returns partial information on each table - * ({@link TableInfo#tableId()}, {@link TableInfo#friendlyName()}, {@link TableInfo#id()} and - * type, which is part of {@link TableInfo#definition()}). To get complete information use either + * ({@link TableInfo#tableId()}, {@link Table#friendlyName()}, {@link Table#id()} and type, which + * is part of {@link Table#definition()}). To get complete information use either * {@link #getTable(TableId, TableOption...)} or * {@link #getTable(String, String, TableOption...)}. * * @throws BigQueryException upon failure */ - Page listTables(DatasetId datasetId, TableListOption... options) - throws BigQueryException; + Page
listTables(DatasetId datasetId, TableListOption... options) throws BigQueryException; /** * Sends an insert all request. @@ -612,21 +607,21 @@ Page> listTableData(TableId tableId, TableDataListOption... opt * * @throws BigQueryException upon failure */ - JobInfo getJob(String jobId, JobOption... options) throws BigQueryException; + Job getJob(String jobId, JobOption... options) throws BigQueryException; /** * Returns the requested job or {@code null} if not found. * * @throws BigQueryException upon failure */ - JobInfo getJob(JobId jobId, JobOption... options) throws BigQueryException; + Job getJob(JobId jobId, JobOption... options) throws BigQueryException; /** * Lists the jobs. * * @throws BigQueryException upon failure */ - Page listJobs(JobListOption... options) throws BigQueryException; + Page listJobs(JobListOption... options) throws BigQueryException; /** * Sends a job cancel request. This call will return immediately. The job status can then be diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java index de74bdcac89c..68f22c0e5bfd 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java @@ -19,10 +19,7 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.gcloud.RetryHelper.runWithRetries; -import com.google.api.services.bigquery.model.Dataset; import com.google.api.services.bigquery.model.GetQueryResultsResponse; -import com.google.api.services.bigquery.model.Job; -import com.google.api.services.bigquery.model.Table; import com.google.api.services.bigquery.model.TableDataInsertAllRequest; import com.google.api.services.bigquery.model.TableDataInsertAllRequest.Rows; import com.google.api.services.bigquery.model.TableRow; @@ -46,7 +43,7 @@ final class BigQueryImpl extends BaseService implements BigQuery { - private static class DatasetPageFetcher implements NextPageFetcher { + private static class DatasetPageFetcher implements NextPageFetcher { private static final long serialVersionUID = -3057564042439021278L; private final Map requestOptions; @@ -60,12 +57,12 @@ private static class DatasetPageFetcher implements NextPageFetcher } @Override - public Page nextPage() { + public Page nextPage() { return listDatasets(serviceOptions, requestOptions); } } - private static class TablePageFetcher implements NextPageFetcher { + private static class TablePageFetcher implements NextPageFetcher
{ private static final long serialVersionUID = 8611248840504201187L; private final Map requestOptions; @@ -81,12 +78,12 @@ private static class TablePageFetcher implements NextPageFetcher { } @Override - public Page nextPage() { + public Page
nextPage() { return listTables(dataset, serviceOptions, requestOptions); } } - private static class JobPageFetcher implements NextPageFetcher { + private static class JobPageFetcher implements NextPageFetcher { private static final long serialVersionUID = 8536533282558245472L; private final Map requestOptions; @@ -100,7 +97,7 @@ private static class JobPageFetcher implements NextPageFetcher { } @Override - public Page nextPage() { + public Page nextPage() { return listJobs(serviceOptions, requestOptions); } } @@ -156,96 +153,108 @@ public QueryResult nextPage() { } @Override - public DatasetInfo create(DatasetInfo dataset, DatasetOption... options) - throws BigQueryException { - final Dataset datasetPb = dataset.setProjectId(options().projectId()).toPb(); + public Dataset create(DatasetInfo dataset, DatasetOption... options) throws BigQueryException { + final com.google.api.services.bigquery.model.Dataset datasetPb = + dataset.setProjectId(options().projectId()).toPb(); final Map optionsMap = optionMap(options); try { - return DatasetInfo.fromPb(runWithRetries(new Callable() { - @Override - public Dataset call() { - return bigQueryRpc.create(datasetPb, optionsMap); - } - }, options().retryParams(), EXCEPTION_HANDLER)); + return Dataset.fromPb(this, + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Dataset call() { + return bigQueryRpc.create(datasetPb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @Override - public TableInfo create(TableInfo table, TableOption... options) - throws BigQueryException { - final Table tablePb = table.setProjectId(options().projectId()).toPb(); + public Table create(TableInfo table, TableOption... options) throws BigQueryException { + final com.google.api.services.bigquery.model.Table tablePb = + table.setProjectId(options().projectId()).toPb(); final Map optionsMap = optionMap(options); try { - return TableInfo.fromPb(runWithRetries(new Callable
() { - @Override - public Table call() { - return bigQueryRpc.create(tablePb, optionsMap); - } - }, options().retryParams(), EXCEPTION_HANDLER)); + return Table.fromPb(this, + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Table call() { + return bigQueryRpc.create(tablePb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @Override - public JobInfo create(JobInfo job, JobOption... options) throws BigQueryException { - final Job jobPb = job.setProjectId(options().projectId()).toPb(); + public Job create(JobInfo job, JobOption... options) throws BigQueryException { + final com.google.api.services.bigquery.model.Job jobPb = + job.setProjectId(options().projectId()).toPb(); final Map optionsMap = optionMap(options); try { - return JobInfo.fromPb(runWithRetries(new Callable() { - @Override - public Job call() { - return bigQueryRpc.create(jobPb, optionsMap); - } - }, options().retryParams(), EXCEPTION_HANDLER)); + return Job.fromPb(this, + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Job call() { + return bigQueryRpc.create(jobPb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @Override - public DatasetInfo getDataset(String datasetId, DatasetOption... options) - throws BigQueryException { + public Dataset getDataset(String datasetId, DatasetOption... options) throws BigQueryException { return getDataset(DatasetId.of(datasetId), options); } @Override - public DatasetInfo getDataset(final DatasetId datasetId, DatasetOption... options) + public Dataset getDataset(final DatasetId datasetId, DatasetOption... options) throws BigQueryException { final Map optionsMap = optionMap(options); try { - Dataset answer = runWithRetries(new Callable() { - @Override - public Dataset call() { - return bigQueryRpc.getDataset(datasetId.dataset(), optionsMap); - } - }, options().retryParams(), EXCEPTION_HANDLER); - return answer == null ? null : DatasetInfo.fromPb(answer); + com.google.api.services.bigquery.model.Dataset answer = + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Dataset call() { + return bigQueryRpc.getDataset(datasetId.dataset(), optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : Dataset.fromPb(this, answer); } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @Override - public Page listDatasets(DatasetListOption... options) throws BigQueryException { + public Page listDatasets(DatasetListOption... options) throws BigQueryException { return listDatasets(options(), optionMap(options)); } - private static Page listDatasets(final BigQueryOptions serviceOptions, + private static Page listDatasets(final BigQueryOptions serviceOptions, final Map optionsMap) { try { - BigQueryRpc.Tuple> result = - runWithRetries(new Callable>>() { - @Override - public BigQueryRpc.Tuple> call() { - return serviceOptions.rpc().listDatasets(optionsMap); - } - }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + BigQueryRpc.Tuple> result = + runWithRetries(new Callable>>() { + @Override + public BigQueryRpc.Tuple> call() { + return serviceOptions.rpc().listDatasets(optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); String cursor = result.x(); return new PageImpl<>(new DatasetPageFetcher(serviceOptions, cursor, optionsMap), cursor, - Iterables.transform(result.y(), DatasetInfo.FROM_PB_FUNCTION)); + Iterables.transform(result.y(), + new Function() { + @Override + public Dataset apply(com.google.api.services.bigquery.model.Dataset dataset) { + return Dataset.fromPb(serviceOptions.service(), dataset); + } + })); } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); } @@ -292,87 +301,96 @@ public Boolean call() { } @Override - public DatasetInfo update(DatasetInfo dataset, DatasetOption... options) - throws BigQueryException { - final Dataset datasetPb = dataset.setProjectId(options().projectId()).toPb(); + public Dataset update(DatasetInfo dataset, DatasetOption... options) throws BigQueryException { + final com.google.api.services.bigquery.model.Dataset datasetPb = + dataset.setProjectId(options().projectId()).toPb(); final Map optionsMap = optionMap(options); try { - return DatasetInfo.fromPb(runWithRetries(new Callable() { - @Override - public Dataset call() { - return bigQueryRpc.patch(datasetPb, optionsMap); - } - }, options().retryParams(), EXCEPTION_HANDLER)); + return Dataset.fromPb(this, + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Dataset call() { + return bigQueryRpc.patch(datasetPb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @Override - public TableInfo update(TableInfo table, TableOption... options) - throws BigQueryException { - final Table tablePb = table.setProjectId(options().projectId()).toPb(); + public Table update(TableInfo table, TableOption... options) throws BigQueryException { + final com.google.api.services.bigquery.model.Table tablePb = + table.setProjectId(options().projectId()).toPb(); final Map optionsMap = optionMap(options); try { - return TableInfo.fromPb(runWithRetries(new Callable
() { - @Override - public Table call() { - return bigQueryRpc.patch(tablePb, optionsMap); - } - }, options().retryParams(), EXCEPTION_HANDLER)); + return Table.fromPb(this, + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Table call() { + return bigQueryRpc.patch(tablePb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @Override - public TableInfo getTable(final String datasetId, final String tableId, - TableOption... options) throws BigQueryException { + public Table getTable(final String datasetId, final String tableId, TableOption... options) + throws BigQueryException { return getTable(TableId.of(datasetId, tableId), options); } @Override - public TableInfo getTable(final TableId tableId, TableOption... options) - throws BigQueryException { + public Table getTable(final TableId tableId, TableOption... options) throws BigQueryException { final Map optionsMap = optionMap(options); try { - Table answer = runWithRetries(new Callable
() { - @Override - public Table call() { - return bigQueryRpc.getTable(tableId.dataset(), tableId.table(), optionsMap); - } - }, options().retryParams(), EXCEPTION_HANDLER); - return answer == null ? null : TableInfo.fromPb(answer); + com.google.api.services.bigquery.model.Table answer = + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Table call() { + return bigQueryRpc.getTable(tableId.dataset(), tableId.table(), optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : Table.fromPb(this, answer); } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @Override - public Page listTables(String datasetId, TableListOption... options) + public Page
listTables(String datasetId, TableListOption... options) throws BigQueryException { return listTables(datasetId, options(), optionMap(options)); } @Override - public Page listTables(DatasetId datasetId, TableListOption... options) + public Page
listTables(DatasetId datasetId, TableListOption... options) throws BigQueryException { return listTables(datasetId.dataset(), options(), optionMap(options)); } - private static Page listTables(final String datasetId, final BigQueryOptions + private static Page
listTables(final String datasetId, final BigQueryOptions serviceOptions, final Map optionsMap) { try { - BigQueryRpc.Tuple> result = - runWithRetries(new Callable>>() { + BigQueryRpc.Tuple> result = + runWithRetries(new Callable>>() { @Override - public BigQueryRpc.Tuple> call() { - return serviceOptions.rpc().listTables(datasetId, optionsMap); - } + public BigQueryRpc.Tuple> + call() { + return serviceOptions.rpc().listTables(datasetId, optionsMap); + } }, serviceOptions.retryParams(), EXCEPTION_HANDLER); String cursor = result.x(); - Iterable tables = Iterables.transform(result.y(), - TableInfo.FROM_PB_FUNCTION); + Iterable
tables = Iterables.transform(result.y(), + new Function() { + @Override + public Table apply(com.google.api.services.bigquery.model.Table table) { + return Table.fromPb(serviceOptions.service(), table); + } + }); return new PageImpl<>(new TablePageFetcher(datasetId, serviceOptions, cursor, optionsMap), cursor, tables); } catch (RetryHelper.RetryHelperException e) { @@ -441,43 +459,51 @@ public List apply(TableRow rowPb) { } @Override - public JobInfo getJob(String jobId, JobOption... options) throws BigQueryException { + public Job getJob(String jobId, JobOption... options) throws BigQueryException { return getJob(JobId.of(jobId), options); } @Override - public JobInfo getJob(final JobId jobId, JobOption... options) - throws BigQueryException { + public Job getJob(final JobId jobId, JobOption... options) throws BigQueryException { final Map optionsMap = optionMap(options); try { - Job answer = runWithRetries(new Callable() { - @Override - public Job call() { - return bigQueryRpc.getJob(jobId.job(), optionsMap); - } - }, options().retryParams(), EXCEPTION_HANDLER); - return answer == null ? null : JobInfo.fromPb(answer); + com.google.api.services.bigquery.model.Job answer = + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Job call() { + return bigQueryRpc.getJob(jobId.job(), optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : Job.fromPb(this, answer); } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @Override - public Page listJobs(JobListOption... options) throws BigQueryException { + public Page listJobs(JobListOption... options) throws BigQueryException { return listJobs(options(), optionMap(options)); } - private static Page listJobs(final BigQueryOptions serviceOptions, + private static Page listJobs(final BigQueryOptions serviceOptions, final Map optionsMap) { - BigQueryRpc.Tuple> result = - runWithRetries(new Callable>>() { + BigQueryRpc.Tuple> result = + runWithRetries(new Callable>>() { @Override - public BigQueryRpc.Tuple> call() { + public BigQueryRpc.Tuple> + call() { return serviceOptions.rpc().listJobs(optionsMap); } }, serviceOptions.retryParams(), EXCEPTION_HANDLER); String cursor = result.x(); - Iterable jobs = Iterables.transform(result.y(), JobInfo.FROM_PB_FUNCTION); + Iterable jobs = Iterables.transform(result.y(), + new Function() { + @Override + public Job apply(com.google.api.services.bigquery.model.Job job) { + return Job.fromPb(serviceOptions.service(), job); + } + }); return new PageImpl<>(new JobPageFetcher(serviceOptions, cursor, optionsMap), cursor, jobs); } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Dataset.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Dataset.java index a0914bb17409..4c46c72745a3 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Dataset.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Dataset.java @@ -16,18 +16,13 @@ package com.google.gcloud.bigquery; -import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import com.google.common.base.Function; -import com.google.common.collect.Iterators; import com.google.gcloud.Page; -import com.google.gcloud.PageImpl; import java.io.IOException; import java.io.ObjectInputStream; -import java.io.Serializable; -import java.util.Iterator; +import java.util.List; import java.util.Objects; /** @@ -35,89 +30,108 @@ * *

Objects of this class are immutable. Operations that modify the dataset like {@link #update} * return a new object. To get a {@code Dataset} object with the most recent information use - * {@link #reload}. + * {@link #reload}. {@code Dataset} adds a layer of service-related functionality over + * {@link DatasetInfo}. *

*/ -public final class Dataset { +public final class Dataset extends DatasetInfo { - private final BigQuery bigquery; - private final DatasetInfo info; + private static final long serialVersionUID = -4272921483363065593L; - private static class TablePageFetcher implements PageImpl.NextPageFetcher
{ + private final BigQueryOptions options; + private transient BigQuery bigquery; - private static final long serialVersionUID = 6906197848579250598L; + static final class Builder extends DatasetInfo.Builder { - private final BigQueryOptions options; - private final Page infoPage; + private final BigQuery bigquery; + private final DatasetInfo.BuilderImpl infoBuilder; - TablePageFetcher(BigQueryOptions options, Page infoPage) { - this.options = options; - this.infoPage = infoPage; + private Builder(BigQuery bigquery) { + this.bigquery = bigquery; + this.infoBuilder = new DatasetInfo.BuilderImpl(); + } + + private Builder(Dataset dataset) { + this.bigquery = dataset.bigquery; + this.infoBuilder = new DatasetInfo.BuilderImpl(dataset); } @Override - public Page
nextPage() { - Page nextInfoPage = infoPage.nextPage(); - return new PageImpl<>(new TablePageFetcher(options, nextInfoPage), - nextInfoPage.nextPageCursor(), new LazyTableIterable(options, nextInfoPage.values())); + public Builder datasetId(DatasetId datasetId) { + infoBuilder.datasetId(datasetId); + return this; + } + + @Override + public Builder acl(List acl) { + infoBuilder.acl(acl); + return this; + } + + @Override + Builder creationTime(Long creationTime) { + infoBuilder.creationTime(creationTime); + return this; + } + + @Override + public Builder defaultTableLifetime(Long defaultTableLifetime) { + infoBuilder.defaultTableLifetime(defaultTableLifetime); + return this; } - } - private static class LazyTableIterable implements Iterable
, Serializable { + @Override + public Builder description(String description) { + infoBuilder.description(description); + return this; + } - private static final long serialVersionUID = 3312744215731674032L; + @Override + Builder etag(String etag) { + infoBuilder.etag(etag); + return this; + } - private final BigQueryOptions options; - private final Iterable infoIterable; - private transient BigQuery bigquery; + @Override + public Builder friendlyName(String friendlyName) { + infoBuilder.friendlyName(friendlyName); + return this; + } - public LazyTableIterable(BigQueryOptions options, Iterable infoIterable) { - this.options = options; - this.infoIterable = infoIterable; - this.bigquery = options.service(); + @Override + Builder id(String id) { + infoBuilder.id(id); + return this; } - private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { - in.defaultReadObject(); - this.bigquery = options.service(); + @Override + Builder lastModified(Long lastModified) { + infoBuilder.lastModified(lastModified); + return this; } @Override - public Iterator
iterator() { - return Iterators.transform(infoIterable.iterator(), new Function() { - @Override - public Table apply(TableInfo tableInfo) { - return new Table(bigquery, tableInfo); - } - }); + public Builder location(String location) { + infoBuilder.location(location); + return this; } @Override - public int hashCode() { - return Objects.hash(options, infoIterable); + Builder selfLink(String selfLink) { + infoBuilder.selfLink(selfLink); + return this; } @Override - public boolean equals(Object obj) { - if (!(obj instanceof LazyTableIterable)) { - return false; - } - LazyTableIterable other = (LazyTableIterable) obj; - return Objects.equals(options, other.options) - && Objects.equals(infoIterable, other.infoIterable); + public Dataset build() { + return new Dataset(bigquery, infoBuilder); } } - /** - * Constructs a {@code Dataset} object for the provided {@code DatasetInfo}. The BigQuery service - * is used to issue requests. - * - * @param bigquery the BigQuery service used for issuing requests - * @param info dataset's info - */ - public Dataset(BigQuery bigquery, DatasetInfo info) { + Dataset(BigQuery bigquery, DatasetInfo.BuilderImpl infoBuilder) { + super(infoBuilder); this.bigquery = checkNotNull(bigquery); - this.info = checkNotNull(info); + this.options = bigquery.options(); } /** @@ -131,15 +145,7 @@ public Dataset(BigQuery bigquery, DatasetInfo info) { * @throws BigQueryException upon failure */ public static Dataset get(BigQuery bigquery, String dataset, BigQuery.DatasetOption... options) { - DatasetInfo info = bigquery.getDataset(dataset, options); - return info != null ? new Dataset(bigquery, info) : null; - } - - /** - * Returns the dataset's information. - */ - public DatasetInfo info() { - return info; + return bigquery.getDataset(dataset, options); } /** @@ -149,7 +155,7 @@ public DatasetInfo info() { * @throws BigQueryException upon failure */ public boolean exists() { - return bigquery.getDataset(info.datasetId(), BigQuery.DatasetOption.fields()) != null; + return bigquery.getDataset(datasetId(), BigQuery.DatasetOption.fields()) != null; } /** @@ -161,23 +167,19 @@ public boolean exists() { * @throws BigQueryException upon failure */ public Dataset reload(BigQuery.DatasetOption... options) { - return Dataset.get(bigquery, info.datasetId().dataset(), options); + return Dataset.get(bigquery, datasetId().dataset(), options); } /** - * Updates the dataset's information. Dataset's user-defined id cannot be changed. A new - * {@code Dataset} object is returned. + * Updates the dataset's information with this dataset's information. Dataset's user-defined id + * cannot be changed. A new {@code Dataset} object is returned. * - * @param datasetInfo new dataset's information. User-defined id must match the one of the current - * dataset * @param options dataset options * @return a {@code Dataset} object with updated information * @throws BigQueryException upon failure */ - public Dataset update(DatasetInfo datasetInfo, BigQuery.DatasetOption... options) { - checkArgument(Objects.equals(datasetInfo.datasetId().dataset(), - info.datasetId().dataset()), "Dataset's user-defined ids must match"); - return new Dataset(bigquery, bigquery.update(datasetInfo, options)); + public Dataset update(BigQuery.DatasetOption... options) { + return bigquery.update(this, options); } /** @@ -187,7 +189,7 @@ public Dataset update(DatasetInfo datasetInfo, BigQuery.DatasetOption... options * @throws BigQueryException upon failure */ public boolean delete() { - return bigquery.delete(info.datasetId()); + return bigquery.delete(datasetId()); } /** @@ -197,10 +199,7 @@ public boolean delete() { * @throws BigQueryException upon failure */ public Page
list(BigQuery.TableListOption... options) { - Page infoPage = bigquery.listTables(info.datasetId(), options); - BigQueryOptions bigqueryOptions = bigquery.options(); - return new PageImpl<>(new TablePageFetcher(bigqueryOptions, infoPage), - infoPage.nextPageCursor(), new LazyTableIterable(bigqueryOptions, infoPage.values())); + return bigquery.listTables(datasetId(), options); } /** @@ -211,8 +210,7 @@ public Page
list(BigQuery.TableListOption... options) { * @throws BigQueryException upon failure */ public Table get(String table, BigQuery.TableOption... options) { - TableInfo tableInfo = bigquery.getTable(TableId.of(info.datasetId().dataset(), table), options); - return tableInfo != null ? new Table(bigquery, tableInfo) : null; + return bigquery.getTable(TableId.of(datasetId().dataset(), table), options); } /** @@ -225,8 +223,8 @@ public Table get(String table, BigQuery.TableOption... options) { * @throws BigQueryException upon failure */ public Table create(String table, TableDefinition definition, BigQuery.TableOption... options) { - TableInfo tableInfo = TableInfo.of(TableId.of(info.datasetId().dataset(), table), definition); - return new Table(bigquery, bigquery.create(tableInfo, options)); + TableInfo tableInfo = TableInfo.of(TableId.of(datasetId().dataset(), table), definition); + return bigquery.create(tableInfo, options); } /** @@ -235,4 +233,42 @@ public Table create(String table, TableDefinition definition, BigQuery.TableOpti public BigQuery bigquery() { return bigquery; } + + public static Builder builder(BigQuery bigquery, DatasetId datasetId) { + return new Builder(bigquery).datasetId(datasetId); + } + + /** + * Returns a builder for a {@code DatasetInfo} object given it's user-defined id. + */ + public static Builder builder(BigQuery bigquery, String datasetId) { + return builder(bigquery, DatasetId.of(datasetId)); + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Dataset + && Objects.equals(toPb(), ((Dataset) obj).toPb()) + && Objects.equals(options, ((Dataset) obj).options); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), options); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + this.bigquery = options.service(); + } + + static Dataset fromPb(BigQuery bigquery, + com.google.api.services.bigquery.model.Dataset datasetPb) { + return new Dataset(bigquery, new DatasetInfo.BuilderImpl(datasetPb)); + } } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetInfo.java index c6330308c8ce..4480031a7331 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetInfo.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetInfo.java @@ -39,7 +39,7 @@ * @see * Managing Jobs, Datasets, and Projects */ -public final class DatasetInfo implements Serializable { +public class DatasetInfo implements Serializable { static final Function FROM_PB_FUNCTION = new Function() { @@ -70,7 +70,72 @@ public Dataset apply(DatasetInfo datasetInfo) { private final String location; private final String selfLink; - public static final class Builder { + public abstract static class Builder { + + /** + * Sets the dataset identity. + */ + public abstract Builder datasetId(DatasetId datasetId); + + /** + * Sets the dataset's access control configuration. + * + * @see Access Control + */ + public abstract Builder acl(List acl); + + abstract Builder creationTime(Long creationTime); + + /** + * Sets the default lifetime of all tables in the dataset, in milliseconds. The minimum value is + * 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the + * dataset will have an expirationTime property set to the creation time plus the value in this + * property, and changing the value will only affect new tables, not existing ones. When the + * expirationTime for a given table is reached, that table will be deleted automatically. If a + * table's expirationTime is modified or removed before the table expires, or if you provide an + * explicit expirationTime when creating a table, that value takes precedence over the default + * expiration time indicated by this property. This property is experimental and might be + * subject to change or removed. + */ + public abstract Builder defaultTableLifetime(Long defaultTableLifetime); + + /** + * Sets a user-friendly description for the dataset. + */ + public abstract Builder description(String description); + + abstract Builder etag(String etag); + + /** + * Sets a user-friendly name for the dataset. + */ + public abstract Builder friendlyName(String friendlyName); + + abstract Builder id(String id); + + abstract Builder lastModified(Long lastModified); + + /** + * Sets the geographic location where the dataset should reside. This property is experimental + * and might be subject to change or removed. + * + * @see Dataset + * Location + */ + public abstract Builder location(String location); + + abstract Builder selfLink(String selfLink); + + /** + * Creates a {@code DatasetInfo} object. + */ + public abstract DatasetInfo build(); + } + + /** + * Base class for a {@code DatasetInfo} builder. + */ + static final class BuilderImpl extends Builder { private DatasetId datasetId; private List acl; @@ -84,9 +149,9 @@ public static final class Builder { private String location; private String selfLink; - private Builder() {} + BuilderImpl() {} - private Builder(DatasetInfo datasetInfo) { + BuilderImpl(DatasetInfo datasetInfo) { this.datasetId = datasetInfo.datasetId; this.acl = datasetInfo.acl; this.creationTime = datasetInfo.creationTime; @@ -100,103 +165,103 @@ private Builder(DatasetInfo datasetInfo) { this.selfLink = datasetInfo.selfLink; } - /** - * Sets the dataset identity. - */ + BuilderImpl(com.google.api.services.bigquery.model.Dataset datasetPb) { + if (datasetPb.getDatasetReference() != null) { + this.datasetId = DatasetId.fromPb(datasetPb.getDatasetReference()); + } + if (datasetPb.getAccess() != null) { + this.acl = Lists.transform(datasetPb.getAccess(), new Function() { + @Override + public Acl apply(Dataset.Access accessPb) { + return Acl.fromPb(accessPb); + } + }); + } + this.creationTime = datasetPb.getCreationTime(); + this.defaultTableLifetime = datasetPb.getDefaultTableExpirationMs(); + this.description = datasetPb.getDescription(); + this.etag = datasetPb.getEtag(); + this.friendlyName = datasetPb.getFriendlyName(); + this.id = datasetPb.getId(); + this.lastModified = datasetPb.getLastModifiedTime(); + this.location = datasetPb.getLocation(); + this.selfLink = datasetPb.getSelfLink(); + } + + @Override public Builder datasetId(DatasetId datasetId) { this.datasetId = checkNotNull(datasetId); return this; } - /** - * Sets the dataset's access control configuration. - * - * @see Access Control - */ + @Override public Builder acl(List acl) { this.acl = acl != null ? ImmutableList.copyOf(acl) : null; return this; } + @Override Builder creationTime(Long creationTime) { this.creationTime = creationTime; return this; } - /** - * Sets the default lifetime of all tables in the dataset, in milliseconds. The minimum value is - * 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the - * dataset will have an expirationTime property set to the creation time plus the value in this - * property, and changing the value will only affect new tables, not existing ones. When the - * expirationTime for a given table is reached, that table will be deleted automatically. If a - * table's expirationTime is modified or removed before the table expires, or if you provide an - * explicit expirationTime when creating a table, that value takes precedence over the default - * expiration time indicated by this property. This property is experimental and might be - * subject to change or removed. - */ + @Override public Builder defaultTableLifetime(Long defaultTableLifetime) { this.defaultTableLifetime = firstNonNull(defaultTableLifetime, Data.nullOf(Long.class)); return this; } - /** - * Sets a user-friendly description for the dataset. - */ + @Override public Builder description(String description) { this.description = firstNonNull(description, Data.nullOf(String.class)); return this; } + @Override Builder etag(String etag) { this.etag = etag; return this; } - /** - * Sets a user-friendly name for the dataset. - */ + @Override public Builder friendlyName(String friendlyName) { this.friendlyName = firstNonNull(friendlyName, Data.nullOf(String.class)); return this; } + @Override Builder id(String id) { this.id = id; return this; } + @Override Builder lastModified(Long lastModified) { this.lastModified = lastModified; return this; } - /** - * Sets the geographic location where the dataset should reside. This property is experimental - * and might be subject to change or removed. - * - * @see Dataset - * Location - */ + @Override public Builder location(String location) { this.location = firstNonNull(location, Data.nullOf(String.class)); return this; } + @Override Builder selfLink(String selfLink) { this.selfLink = selfLink; return this; } - /** - * Creates a {@code DatasetInfo} object. - */ + @Override public DatasetInfo build() { return new DatasetInfo(this); } } - private DatasetInfo(Builder builder) { + DatasetInfo(BuilderImpl builder) { datasetId = checkNotNull(builder.datasetId); acl = builder.acl; creationTime = builder.creationTime; @@ -301,10 +366,10 @@ public String selfLink() { } /** - * Returns a builder for the {@code DatasetInfo} object. + * Returns a builder for the dataset object. */ public Builder toBuilder() { - return new Builder(this); + return new BuilderImpl(this); } @Override @@ -331,7 +396,8 @@ public int hashCode() { @Override public boolean equals(Object obj) { - return obj instanceof DatasetInfo && Objects.equals(toPb(), ((DatasetInfo) obj).toPb()); + return obj.getClass().equals(DatasetInfo.class) + && Objects.equals(toPb(), ((DatasetInfo) obj).toPb()); } DatasetInfo setProjectId(String projectId) { @@ -380,65 +446,27 @@ public Dataset.Access apply(Acl acl) { } /** - * Returns a builder for the DatasetInfo object given it's user-defined id. + * Returns a builder for a {@code DatasetInfo} object given it's identity. */ - public static Builder builder(String datasetId) { - return new Builder().datasetId(DatasetId.of(datasetId)); + public static Builder builder(DatasetId datasetId) { + return new BuilderImpl().datasetId(datasetId); } /** - * Returns a builder for the DatasetInfo object given it's project and user-defined id. + * Returns a builder for a {@code DatasetInfo} object given it's user-defined id. */ - public static Builder builder(String projectId, String datasetId) { - return new Builder().datasetId(DatasetId.of(projectId, datasetId)); + public static Builder builder(String datasetId) { + return builder(DatasetId.of(datasetId)); } /** - * Returns a builder for the DatasetInfo object given it's identity. + * Returns a builder for the DatasetInfo object given it's project and user-defined id. */ - public static Builder builder(DatasetId datasetId) { - return new Builder().datasetId(datasetId); + public static Builder builder(String projectId, String datasetId) { + return builder(DatasetId.of(projectId, datasetId)); } static DatasetInfo fromPb(Dataset datasetPb) { - Builder builder = builder(datasetPb.getDatasetReference().getProjectId(), - datasetPb.getDatasetReference().getDatasetId()); - if (datasetPb.getAccess() != null) { - builder.acl(Lists.transform(datasetPb.getAccess(), - new Function() { - @Override - public Acl apply(Dataset.Access accessPb) { - return Acl.fromPb(accessPb); - } - })); - } - if (datasetPb.getCreationTime() != null) { - builder.creationTime(datasetPb.getCreationTime()); - } - if (datasetPb.getDefaultTableExpirationMs() != null) { - builder.defaultTableLifetime(datasetPb.getDefaultTableExpirationMs()); - } - if (datasetPb.getDescription() != null) { - builder.description(datasetPb.getDescription()); - } - if (datasetPb.getEtag() != null) { - builder.etag(datasetPb.getEtag()); - } - if (datasetPb.getFriendlyName() != null) { - builder.friendlyName(datasetPb.getFriendlyName()); - } - if (datasetPb.getId() != null) { - builder.id(datasetPb.getId()); - } - if (datasetPb.getLastModifiedTime() != null) { - builder.lastModified(datasetPb.getLastModifiedTime()); - } - if (datasetPb.getLocation() != null) { - builder.location(datasetPb.getLocation()); - } - if (datasetPb.getSelfLink() != null) { - builder.selfLink(datasetPb.getSelfLink()); - } - return builder.build(); + return new BuilderImpl(datasetPb).build(); } } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Job.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Job.java index c0d7ddc29c37..8f2a822d376f 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Job.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Job.java @@ -18,28 +18,98 @@ import static com.google.common.base.Preconditions.checkNotNull; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.util.Objects; + /** * A Google BigQuery Job. * *

Objects of this class are immutable. To get a {@code Job} object with the most recent - * information use {@link #reload}. + * information use {@link #reload}. {@code Job} adds a layer of service-related functionality over + * {@link JobInfo}. *

*/ -public final class Job { +public final class Job extends JobInfo { - private final BigQuery bigquery; - private final JobInfo info; + private static final long serialVersionUID = -4324100991693024704L; - /** - * Constructs a {@code Job} object for the provided {@code JobInfo}. The BigQuery service - * is used to issue requests. - * - * @param bigquery the BigQuery service used for issuing requests - * @param info jobs's info - */ - public Job(BigQuery bigquery, JobInfo info) { + private final BigQueryOptions options; + private transient BigQuery bigquery; + + static final class Builder extends JobInfo.Builder { + + private final BigQuery bigquery; + private final JobInfo.BuilderImpl infoBuilder; + + private Builder(BigQuery bigquery) { + this.bigquery = bigquery; + this.infoBuilder = new JobInfo.BuilderImpl(); + } + + private Builder(Job job) { + this.bigquery = job.bigquery; + this.infoBuilder = new JobInfo.BuilderImpl(job); + } + + @Override + Builder etag(String etag) { + infoBuilder.etag(etag); + return this; + } + + @Override + Builder id(String id) { + infoBuilder.id(id); + return this; + } + + @Override + public Builder jobId(JobId jobId) { + infoBuilder.jobId(jobId); + return this; + } + + @Override + Builder selfLink(String selfLink) { + infoBuilder.selfLink(selfLink); + return this; + } + + @Override + Builder status(JobStatus status) { + infoBuilder.status(status); + return this; + } + + @Override + Builder statistics(JobStatistics statistics) { + infoBuilder.statistics(statistics); + return this; + } + + @Override + Builder userEmail(String userEmail) { + infoBuilder.userEmail(userEmail); + return this; + } + + @Override + public Builder configuration(JobConfiguration configuration) { + infoBuilder.configuration(configuration); + return this; + } + + @Override + public Job build() { + return new Job(bigquery, infoBuilder); + } + } + + Job(BigQuery bigquery, JobInfo.BuilderImpl infoBuilder) { + super(infoBuilder); this.bigquery = checkNotNull(bigquery); - this.info = checkNotNull(info); + this.options = bigquery.options(); } /** @@ -53,15 +123,7 @@ public Job(BigQuery bigquery, JobInfo info) { * @throws BigQueryException upon failure */ public static Job get(BigQuery bigquery, String job, BigQuery.JobOption... options) { - JobInfo info = bigquery.getJob(job, options); - return info != null ? new Job(bigquery, info) : null; - } - - /** - * Returns the job's information. - */ - public JobInfo info() { - return info; + return bigquery.getJob(job, options); } /** @@ -71,7 +133,7 @@ public JobInfo info() { * @throws BigQueryException upon failure */ public boolean exists() { - return bigquery.getJob(info.jobId(), BigQuery.JobOption.fields()) != null; + return bigquery.getJob(jobId(), BigQuery.JobOption.fields()) != null; } /** @@ -90,8 +152,7 @@ public boolean exists() { * @throws BigQueryException upon failure */ public boolean isDone() { - JobInfo job = bigquery.getJob(info.jobId(), - BigQuery.JobOption.fields(BigQuery.JobField.STATUS)); + Job job = bigquery.getJob(jobId(), BigQuery.JobOption.fields(BigQuery.JobField.STATUS)); return job != null && job.status().state() == JobStatus.State.DONE; } @@ -103,7 +164,7 @@ public boolean isDone() { * @throws BigQueryException upon failure */ public Job reload(BigQuery.JobOption... options) { - return Job.get(bigquery, info.jobId().job(), options); + return Job.get(bigquery, jobId().job(), options); } /** @@ -114,7 +175,7 @@ public Job reload(BigQuery.JobOption... options) { * @throws BigQueryException upon failure */ public boolean cancel() { - return bigquery.cancel(info.jobId()); + return bigquery.cancel(jobId()); } /** @@ -123,4 +184,34 @@ public boolean cancel() { public BigQuery bigquery() { return bigquery; } + + static Builder builder(BigQuery bigquery, JobConfiguration configuration) { + return new Builder(bigquery).configuration(configuration); + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Job + && Objects.equals(toPb(), ((Job) obj).toPb()) + && Objects.equals(options, ((Job) obj).options); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), options); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + this.bigquery = options.service(); + } + + static Job fromPb(BigQuery bigquery, com.google.api.services.bigquery.model.Job jobPb) { + return new Job(bigquery, new JobInfo.BuilderImpl(jobPb)); + } } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobInfo.java index 47135b6d97d0..322fa8ae6884 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobInfo.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobInfo.java @@ -32,7 +32,7 @@ * * @see Jobs */ -public final class JobInfo implements Serializable { +public class JobInfo implements Serializable { static final Function FROM_PB_FUNCTION = new Function() { @@ -41,8 +41,18 @@ public JobInfo apply(Job pb) { return JobInfo.fromPb(pb); } }; + private static final long serialVersionUID = -3272941007234620265L; + private final String etag; + private final String id; + private final JobId jobId; + private final String selfLink; + private final JobStatus status; + private final JobStatistics statistics; + private final String userEmail; + private final JobConfiguration configuration; + /** * Specifies whether the job is allowed to create new tables. */ @@ -78,16 +88,44 @@ public enum WriteDisposition { WRITE_EMPTY } - private final String etag; - private final String id; - private final JobId jobId; - private final String selfLink; - private final JobStatus status; - private final JobStatistics statistics; - private final String userEmail; - private final JobConfiguration configuration; + /** + * Base class for a {@code JobInfo} builder. + */ + public abstract static class Builder { + + abstract Builder etag(String etag); + + abstract Builder id(String id); + + /** + * Sets the job identity. + */ + public abstract Builder jobId(JobId jobId); + + abstract Builder selfLink(String selfLink); + + abstract Builder status(JobStatus status); + + abstract Builder statistics(JobStatistics statistics); - public static final class Builder { + abstract Builder userEmail(String userEmail); + + /** + * Sets a configuration for the {@code JobInfo} object. Use {@link CopyJobConfiguration} for a + * job that copies an existing table. Use {@link ExtractJobConfiguration} for a job that exports + * a table to Google Cloud Storage. Use {@link LoadJobConfiguration} for a job that loads data + * from Google Cloud Storage into a table. Use {@link QueryJobConfiguration} for a job that runs + * a query. + */ + public abstract Builder configuration(JobConfiguration configuration); + + /** + * Creates a {@code JobInfo} object. + */ + public abstract JobInfo build(); + } + + static final class BuilderImpl extends Builder { private String etag; private String id; @@ -98,9 +136,9 @@ public static final class Builder { private String userEmail; private JobConfiguration configuration; - private Builder() {} + BuilderImpl() {} - private Builder(JobInfo jobInfo) { + BuilderImpl(JobInfo jobInfo) { this.etag = jobInfo.etag; this.id = jobInfo.id; this.jobId = jobInfo.jobId; @@ -111,7 +149,7 @@ private Builder(JobInfo jobInfo) { this.configuration = jobInfo.configuration; } - protected Builder(Job jobPb) { + BuilderImpl(Job jobPb) { this.etag = jobPb.getEtag(); this.id = jobPb.getId(); if (jobPb.getJobReference() != null) { @@ -128,55 +166,61 @@ protected Builder(Job jobPb) { this.configuration = JobConfiguration.fromPb(jobPb.getConfiguration()); } + @Override Builder etag(String etag) { this.etag = etag; return this; } + @Override Builder id(String id) { this.id = id; return this; } - /** - * Sets the job identity. - */ + @Override public Builder jobId(JobId jobId) { this.jobId = jobId; return this; } + @Override Builder selfLink(String selfLink) { this.selfLink = selfLink; return this; } + @Override Builder status(JobStatus status) { this.status = status; return this; } + @Override Builder statistics(JobStatistics statistics) { this.statistics = statistics; return this; } + @Override Builder userEmail(String userEmail) { this.userEmail = userEmail; return this; } + @Override public Builder configuration(JobConfiguration configuration) { this.configuration = configuration; return this; } + @Override public JobInfo build() { return new JobInfo(this); } } - private JobInfo(Builder builder) { + JobInfo(BuilderImpl builder) { this.jobId = builder.jobId; this.etag = builder.etag; this.id = builder.id; @@ -248,10 +292,10 @@ public C configuration() { } /** - * Returns a builder for the job. + * Returns a builder for the job object. */ public Builder toBuilder() { - return new Builder(this); + return new BuilderImpl(this); } @Override @@ -275,7 +319,7 @@ public int hashCode() { @Override public boolean equals(Object obj) { - return obj instanceof JobInfo && Objects.equals(toPb(), ((JobInfo) obj).toPb()); + return obj.getClass().equals(JobInfo.class) && Objects.equals(toPb(), ((JobInfo) obj).toPb()); } JobInfo setProjectId(String projectId) { @@ -301,19 +345,40 @@ Job toPb() { return jobPb; } + /** + * Returns a builder for a {@code JobInfo} object given the job configuration. Use + * {@link CopyJobConfiguration} for a job that copies an existing table. Use + * {@link ExtractJobConfiguration} for a job that exports a table to Google Cloud Storage. Use + * {@link LoadJobConfiguration} for a job that loads data from Google Cloud Storage into a table. + * Use {@link QueryJobConfiguration} for a job that runs a query. + */ public static Builder builder(JobConfiguration configuration) { - return new Builder().configuration(configuration); + return new BuilderImpl().configuration(configuration); } + /** + * Returns a {@code JobInfo} object given the job configuration. Use {@link CopyJobConfiguration} + * for a job that copies an existing table. Use {@link ExtractJobConfiguration} for a job that + * exports a table to Google Cloud Storage. Use {@link LoadJobConfiguration} for a job that loads + * data from Google Cloud Storage into a table. Use {@link QueryJobConfiguration} for a job that + * runs a query. + */ public static JobInfo of(JobConfiguration configuration) { return builder(configuration).build(); } + /** + * Returns a builder for a {@code JobInfo} object given the job identity and configuration. Use + * {@link CopyJobConfiguration} for a job that copies an existing table. Use + * {@link ExtractJobConfiguration} for a job that exports a table to Google Cloud Storage. Use + * {@link LoadJobConfiguration} for a job that loads data from Google Cloud Storage into a table. + * Use {@link QueryJobConfiguration} for a job that runs a query. + */ public static JobInfo of(JobId jobId, JobConfiguration configuration) { return builder(configuration).jobId(jobId).build(); } static JobInfo fromPb(Job jobPb) { - return new Builder(jobPb).build(); + return new BuilderImpl(jobPb).build(); } } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Table.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Table.java index cb45c52afd7e..aa1dcfecaca3 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Table.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Table.java @@ -16,12 +16,13 @@ package com.google.gcloud.bigquery; -import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.collect.ImmutableList; import com.google.gcloud.Page; +import java.io.IOException; +import java.io.ObjectInputStream; import java.util.List; import java.util.Objects; @@ -30,24 +31,102 @@ * *

Objects of this class are immutable. Operations that modify the table like {@link #update} * return a new object. To get a {@code Table} object with the most recent information use - * {@link #reload}. + * {@link #reload}. {@code Table} adds a layer of service-related functionality over + * {@link TableInfo}. *

*/ -public final class Table { +public final class Table extends TableInfo { - private final BigQuery bigquery; - private final TableInfo info; + private static final long serialVersionUID = 5744556727066570096L; - /** - * Constructs a {@code Table} object for the provided {@code TableInfo}. The BigQuery service - * is used to issue requests. - * - * @param bigquery the BigQuery service used for issuing requests - * @param info table's info - */ - public Table(BigQuery bigquery, TableInfo info) { + private final BigQueryOptions options; + private transient BigQuery bigquery; + + static class Builder extends TableInfo.Builder { + + private final BigQuery bigquery; + private final TableInfo.BuilderImpl infoBuilder; + + Builder(BigQuery bigquery) { + this.bigquery = bigquery; + this.infoBuilder = new TableInfo.BuilderImpl(); + } + + Builder(Table table) { + this.bigquery = table.bigquery; + this.infoBuilder = new TableInfo.BuilderImpl(table); + } + + @Override + Builder creationTime(Long creationTime) { + infoBuilder.creationTime(creationTime); + return this; + } + + @Override + public Builder description(String description) { + infoBuilder.description(description); + return this; + } + + @Override + Builder etag(String etag) { + infoBuilder.etag(etag); + return this; + } + + @Override + public Builder expirationTime(Long expirationTime) { + infoBuilder.expirationTime(expirationTime); + return this; + } + + @Override + public Builder friendlyName(String friendlyName) { + infoBuilder.friendlyName(friendlyName); + return this; + } + + @Override + Builder id(String id) { + infoBuilder.id(id); + return this; + } + + @Override + Builder lastModifiedTime(Long lastModifiedTime) { + infoBuilder.lastModifiedTime(lastModifiedTime); + return this; + } + + @Override + Builder selfLink(String selfLink) { + infoBuilder.selfLink(selfLink); + return this; + } + + @Override + public Builder tableId(TableId tableId) { + infoBuilder.tableId(tableId); + return this; + } + + @Override + public Builder definition(TableDefinition definition) { + infoBuilder.definition(definition); + return this; + } + + @Override + public Table build() { + return new Table(bigquery, infoBuilder); + } + } + + Table(BigQuery bigquery, TableInfo.BuilderImpl infoBuilder) { + super(infoBuilder); this.bigquery = checkNotNull(bigquery); - this.info = checkNotNull(info); + this.options = bigquery.options(); } /** @@ -77,15 +156,7 @@ public static Table get(BigQuery bigquery, String dataset, String table, * @throws BigQueryException upon failure */ public static Table get(BigQuery bigquery, TableId table, BigQuery.TableOption... options) { - TableInfo info = bigquery.getTable(table, options); - return info != null ? new Table(bigquery, info) : null; - } - - /** - * Returns the table's information. - */ - public TableInfo info() { - return info; + return bigquery.getTable(table, options); } /** @@ -95,7 +166,7 @@ public TableInfo info() { * @throws BigQueryException upon failure */ public boolean exists() { - return bigquery.getTable(info.tableId(), BigQuery.TableOption.fields()) != null; + return bigquery.getTable(tableId(), BigQuery.TableOption.fields()) != null; } /** @@ -106,25 +177,19 @@ public boolean exists() { * @throws BigQueryException upon failure */ public Table reload(BigQuery.TableOption... options) { - return Table.get(bigquery, info.tableId(), options); + return Table.get(bigquery, tableId(), options); } /** - * Updates the table's information. Dataset's and table's user-defined ids cannot be changed. A - * new {@code Table} object is returned. + * Updates the table's information with this table's information. Dataset's and table's + * user-defined ids cannot be changed. A new {@code Table} object is returned. * - * @param tableInfo new table's information. Dataset's and table's user-defined ids must match the - * ones of the current table * @param options dataset options * @return a {@code Table} object with updated information * @throws BigQueryException upon failure */ - public Table update(TableInfo tableInfo, BigQuery.TableOption... options) { - checkArgument(Objects.equals(tableInfo.tableId().dataset(), - info.tableId().dataset()), "Dataset's user-defined ids must match"); - checkArgument(Objects.equals(tableInfo.tableId().table(), - info.tableId().table()), "Table's user-defined ids must match"); - return new Table(bigquery, bigquery.update(tableInfo, options)); + public Table update(BigQuery.TableOption... options) { + return bigquery.update(this, options); } /** @@ -134,7 +199,7 @@ public Table update(TableInfo tableInfo, BigQuery.TableOption... options) { * @throws BigQueryException upon failure */ public boolean delete() { - return bigquery.delete(info.tableId()); + return bigquery.delete(tableId()); } /** @@ -144,7 +209,7 @@ public boolean delete() { * @throws BigQueryException upon failure */ InsertAllResponse insert(Iterable rows) throws BigQueryException { - return bigquery.insertAll(InsertAllRequest.of(info.tableId(), rows)); + return bigquery.insertAll(InsertAllRequest.of(tableId(), rows)); } /** @@ -160,7 +225,7 @@ InsertAllResponse insert(Iterable rows) throws Big */ InsertAllResponse insert(Iterable rows, boolean skipInvalidRows, boolean ignoreUnknownValues) throws BigQueryException { - InsertAllRequest request = InsertAllRequest.builder(info.tableId(), rows) + InsertAllRequest request = InsertAllRequest.builder(tableId(), rows) .skipInvalidRows(skipInvalidRows) .ignoreUnknownValues(ignoreUnknownValues) .build(); @@ -174,7 +239,7 @@ InsertAllResponse insert(Iterable rows, boolean sk * @throws BigQueryException upon failure */ Page> list(BigQuery.TableDataListOption... options) throws BigQueryException { - return bigquery.listTableData(info.tableId(), options); + return bigquery.listTableData(tableId(), options); } /** @@ -193,15 +258,15 @@ Job copy(String destinationDataset, String destinationTable, BigQuery.JobOption. /** * Starts a BigQuery Job to copy the current table to the provided destination table. Returns the - * started {@link Job} object. + * started {@link Job} object. ddd * * @param destinationTable the destination table of the copy job * @param options job options * @throws BigQueryException upon failure */ Job copy(TableId destinationTable, BigQuery.JobOption... options) throws BigQueryException { - CopyJobConfiguration configuration = CopyJobConfiguration.of(destinationTable, info.tableId()); - return new Job(bigquery, bigquery.create(JobInfo.of(configuration), options)); + CopyJobConfiguration configuration = CopyJobConfiguration.of(destinationTable, tableId()); + return bigquery.create(JobInfo.of(configuration), options); } /** @@ -232,8 +297,8 @@ Job extract(String format, String destinationUri, BigQuery.JobOption... options) Job extract(String format, List destinationUris, BigQuery.JobOption... options) throws BigQueryException { ExtractJobConfiguration extractConfiguration = - ExtractJobConfiguration.of(info.tableId(), destinationUris, format); - return new Job(bigquery, bigquery.create(JobInfo.of(extractConfiguration), options)); + ExtractJobConfiguration.of(tableId(), destinationUris, format); + return bigquery.create(JobInfo.of(extractConfiguration), options); } /** @@ -263,8 +328,8 @@ Job load(FormatOptions format, String sourceUri, BigQuery.JobOption... options) */ Job load(FormatOptions format, List sourceUris, BigQuery.JobOption... options) throws BigQueryException { - LoadJobConfiguration loadConfig = LoadJobConfiguration.of(info.tableId(), sourceUris, format); - return new Job(bigquery, bigquery.create(JobInfo.of(loadConfig), options)); + LoadJobConfiguration loadConfig = LoadJobConfiguration.of(tableId(), sourceUris, format); + return bigquery.create(JobInfo.of(loadConfig), options); } /** @@ -273,4 +338,34 @@ Job load(FormatOptions format, List sourceUris, BigQuery.JobOption... op public BigQuery bigquery() { return bigquery; } + + static Builder builder(BigQuery bigquery, TableId tableId, TableDefinition definition) { + return new Builder(bigquery).tableId(tableId).definition(definition); + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Table + && Objects.equals(toPb(), ((Table) obj).toPb()) + && Objects.equals(options, ((Table) obj).options); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), options); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + this.bigquery = options.service(); + } + + static Table fromPb(BigQuery bigquery, com.google.api.services.bigquery.model.Table tablePb) { + return new Table(bigquery, new TableInfo.BuilderImpl(tablePb)); + } } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java index 814b8cac1e97..2c035a5c3926 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java @@ -23,7 +23,6 @@ import com.google.api.services.bigquery.model.Table; import com.google.common.base.Function; import com.google.common.base.MoreObjects; -import com.google.common.base.MoreObjects.ToStringHelper; import java.io.Serializable; import java.math.BigInteger; @@ -36,7 +35,7 @@ * * @see Managing Tables */ -public final class TableInfo implements Serializable { +public class TableInfo implements Serializable { static final Function FROM_PB_FUNCTION = new Function() { @@ -67,9 +66,55 @@ public Table apply(TableInfo tableInfo) { private final TableDefinition definition; /** - * Builder for tables. + * Base class for a {@code JobInfo} builder. */ - public static class Builder { + public abstract static class Builder { + + abstract Builder creationTime(Long creationTime); + + /** + * Sets a user-friendly description for the table. + */ + public abstract Builder description(String description); + + abstract Builder etag(String etag); + + /** + * Sets the time when this table expires, in milliseconds since the epoch. If not present, the + * table will persist indefinitely. Expired tables will be deleted and their storage reclaimed. + */ + public abstract Builder expirationTime(Long expirationTime); + + /** + * Sets a user-friendly name for the table. + */ + public abstract Builder friendlyName(String friendlyName); + + abstract Builder id(String id); + + abstract Builder lastModifiedTime(Long lastModifiedTime); + + abstract Builder selfLink(String selfLink); + + /** + * Sets the table identity. + */ + public abstract Builder tableId(TableId tableId); + + /** + * Sets the table definition. Use {@link StandardTableDefinition} to create simple BigQuery + * table. Use {@link ViewDefinition} to create a BigQuery view. Use + * {@link ExternalTableDefinition} to create a BigQuery a table backed by external data. + */ + public abstract Builder definition(TableDefinition definition); + + /** + * Creates a {@code TableInfo} object. + */ + public abstract TableInfo build(); + } + + static class BuilderImpl extends Builder { private String etag; private String id; @@ -82,9 +127,9 @@ public static class Builder { private Long lastModifiedTime; private TableDefinition definition; - private Builder() {} + BuilderImpl() {} - private Builder(TableInfo tableInfo) { + BuilderImpl(TableInfo tableInfo) { this.etag = tableInfo.etag; this.id = tableInfo.id; this.selfLink = tableInfo.selfLink; @@ -97,7 +142,7 @@ private Builder(TableInfo tableInfo) { this.definition = tableInfo.definition; } - private Builder(Table tablePb) { + BuilderImpl(Table tablePb) { this.tableId = TableId.fromPb(tablePb.getTableReference()); if (tablePb.getLastModifiedTime() != null) { this.lastModifiedTime(tablePb.getLastModifiedTime().longValue()); @@ -112,83 +157,73 @@ private Builder(Table tablePb) { this.definition = TableDefinition.fromPb(tablePb); } + @Override Builder creationTime(Long creationTime) { this.creationTime = creationTime; return this; } - /** - * Sets a user-friendly description for the table. - */ + @Override public Builder description(String description) { this.description = firstNonNull(description, Data.nullOf(String.class)); return this; } + @Override Builder etag(String etag) { this.etag = etag; return this; } - /** - * Sets the time when this table expires, in milliseconds since the epoch. If not present, the - * table will persist indefinitely. Expired tables will be deleted and their storage reclaimed. - */ + @Override public Builder expirationTime(Long expirationTime) { this.expirationTime = firstNonNull(expirationTime, Data.nullOf(Long.class)); return this; } - /** - * Sets a user-friendly name for the table. - */ + @Override public Builder friendlyName(String friendlyName) { this.friendlyName = firstNonNull(friendlyName, Data.nullOf(String.class)); return this; } + @Override Builder id(String id) { this.id = id; return this; } + @Override Builder lastModifiedTime(Long lastModifiedTime) { this.lastModifiedTime = lastModifiedTime; return this; } + @Override Builder selfLink(String selfLink) { this.selfLink = selfLink; return this; } - /** - * Sets the table identity. - */ + @Override public Builder tableId(TableId tableId) { this.tableId = checkNotNull(tableId); return this; } - /** - * Sets the table definition. Use {@link StandardTableDefinition} to create simple BigQuery - * table. Use {@link ViewDefinition} to create a BigQuery view. Use - * {@link ExternalTableDefinition} to create a BigQuery a table backed by external data. - */ + @Override public Builder definition(TableDefinition definition) { this.definition = checkNotNull(definition); return this; } - /** - * Creates a {@code TableInfo} object. - */ + @Override public TableInfo build() { return new TableInfo(this); } } - private TableInfo(Builder builder) { + TableInfo(BuilderImpl builder) { this.tableId = checkNotNull(builder.tableId); this.etag = builder.etag; this.id = builder.id; @@ -275,13 +310,14 @@ public T definition() { } /** - * Returns a builder for the object. + * Returns a builder for the table object. */ public Builder toBuilder() { - return new Builder(this); + return new BuilderImpl(this); } - ToStringHelper toStringHelper() { + @Override + public String toString() { return MoreObjects.toStringHelper(this) .add("tableId", tableId) .add("etag", etag) @@ -292,12 +328,8 @@ ToStringHelper toStringHelper() { .add("expirationTime", expirationTime) .add("creationTime", creationTime) .add("lastModifiedTime", lastModifiedTime) - .add("definition", definition); - } - - @Override - public String toString() { - return toStringHelper().toString(); + .add("definition", definition) + .toString(); } @Override @@ -307,18 +339,25 @@ public int hashCode() { @Override public boolean equals(Object obj) { - return obj instanceof TableInfo && Objects.equals(toPb(), ((TableInfo) obj).toPb()); + return obj.getClass().equals(TableInfo.class) + && Objects.equals(toPb(), ((TableInfo) obj).toPb()); } /** - * Returns a builder for a {@code TableInfo} object given table identity and definition. + * Returns a builder for a {@code TableInfo} object given table identity and definition. Use + * {@link StandardTableDefinition} to create simple BigQuery table. Use {@link ViewDefinition} to + * create a BigQuery view. Use {@link ExternalTableDefinition} to create a BigQuery a table backed + * by external data. */ public static Builder builder(TableId tableId, TableDefinition definition) { - return new Builder().tableId(tableId).definition(definition); + return new BuilderImpl().tableId(tableId).definition(definition); } /** - * Returns a {@code TableInfo} object given table identity and definition. + * Returns a {@code TableInfo} object given table identity and definition. Use + * {@link StandardTableDefinition} to create simple BigQuery table. Use {@link ViewDefinition} to + * create a BigQuery view. Use {@link ExternalTableDefinition} to create a BigQuery a table backed + * by external data. */ public static TableInfo of(TableId tableId, TableDefinition definition) { return builder(tableId, definition).build(); @@ -345,6 +384,6 @@ Table toPb() { } static TableInfo fromPb(Table tablePb) { - return new Builder(tablePb).build(); + return new BuilderImpl(tablePb).build(); } } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/package-info.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/package-info.java index a249768f9d8d..6a54a183eaec 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/package-info.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/package-info.java @@ -21,8 +21,8 @@ *
 {@code
  * BigQuery bigquery = BigQueryOptions.defaultInstance().service();
  * TableId tableId = TableId.of("dataset", "table");
- * TableInfo info = bigquery.getTable(tableId);
- * if (info == null) {
+ * Table table = bigquery.getTable(tableId);
+ * if (table == null) {
  *   System.out.println("Creating table " + tableId);
  *   Field integerField = Field.of("fieldName", Field.Type.integer());
  *   Schema schema = Schema.of(integerField);
@@ -30,11 +30,9 @@
  * } else {
  *   System.out.println("Loading data into table " + tableId);
  *   LoadJobConfiguration configuration = LoadJobConfiguration.of(tableId, "gs://bucket/path");
- *   JobInfo loadJob = JobInfo.of(configuration);
- *   loadJob = bigquery.create(loadJob);
- *   while (loadJob.status().state() != JobStatus.State.DONE) {
+ *   Job loadJob = bigquery.create(JobInfo.of(configuration));
+ *   while (!loadJob.isDone()) {
  *     Thread.sleep(1000L);
- *     loadJob = bigquery.getJob(loadJob.jobId());
  *   }
  *   if (loadJob.status().error() != null) {
  *     System.out.println("Job completed with errors");
diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java
index afad9041e802..385ee6dcc8bd 100644
--- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java
+++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java
@@ -26,10 +26,7 @@
 import static org.junit.Assert.assertSame;
 import static org.junit.Assert.assertTrue;
 
-import com.google.api.services.bigquery.model.Dataset;
 import com.google.api.services.bigquery.model.ErrorProto;
-import com.google.api.services.bigquery.model.Job;
-import com.google.api.services.bigquery.model.Table;
 import com.google.api.services.bigquery.model.TableCell;
 import com.google.api.services.bigquery.model.TableDataInsertAllRequest;
 import com.google.api.services.bigquery.model.TableDataInsertAllResponse;
@@ -287,8 +284,9 @@ public void testCreateDataset() {
         .andReturn(DATASET_INFO_WITH_PROJECT.toPb());
     EasyMock.replay(bigqueryRpcMock);
     bigquery = options.service();
-    DatasetInfo dataset = bigquery.create(DATASET_INFO);
-    assertEquals(DATASET_INFO_WITH_PROJECT, dataset);
+    Dataset dataset = bigquery.create(DATASET_INFO);
+    assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)),
+        dataset);
   }
 
   @Test
@@ -299,13 +297,14 @@ public void testCreateDatasetWithSelectedFields() {
         .andReturn(DATASET_INFO_WITH_PROJECT.toPb());
     EasyMock.replay(bigqueryRpcMock);
     bigquery = options.service();
-    DatasetInfo dataset = bigquery.create(DATASET_INFO, DATASET_OPTION_FIELDS);
+    Dataset dataset = bigquery.create(DATASET_INFO, DATASET_OPTION_FIELDS);
     String selector = (String) capturedOptions.getValue().get(DATASET_OPTION_FIELDS.rpcOption());
     assertTrue(selector.contains("datasetReference"));
     assertTrue(selector.contains("access"));
     assertTrue(selector.contains("etag"));
     assertEquals(28, selector.length());
-    assertEquals(DATASET_INFO_WITH_PROJECT, dataset);
+    assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)),
+        dataset);
   }
 
   @Test
@@ -314,8 +313,9 @@ public void testGetDataset() {
         .andReturn(DATASET_INFO_WITH_PROJECT.toPb());
     EasyMock.replay(bigqueryRpcMock);
     bigquery = options.service();
-    DatasetInfo dataset = bigquery.getDataset(DATASET);
-    assertEquals(DATASET_INFO_WITH_PROJECT, dataset);
+    Dataset dataset = bigquery.getDataset(DATASET);
+    assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)),
+        dataset);
   }
 
   @Test
@@ -324,8 +324,9 @@ public void testGetDatasetFromDatasetId() {
         .andReturn(DATASET_INFO_WITH_PROJECT.toPb());
     EasyMock.replay(bigqueryRpcMock);
     bigquery = options.service();
-    DatasetInfo dataset = bigquery.getDataset(DatasetId.of(PROJECT, DATASET));
-    assertEquals(DATASET_INFO_WITH_PROJECT, dataset);
+    Dataset dataset = bigquery.getDataset(DatasetId.of(PROJECT, DATASET));
+    assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)),
+        dataset);
   }
 
   @Test
@@ -335,54 +336,58 @@ public void testGetDatasetWithSelectedFields() {
         .andReturn(DATASET_INFO_WITH_PROJECT.toPb());
     EasyMock.replay(bigqueryRpcMock);
     bigquery = options.service();
-    DatasetInfo dataset = bigquery.getDataset(DATASET, DATASET_OPTION_FIELDS);
+    Dataset dataset = bigquery.getDataset(DATASET, DATASET_OPTION_FIELDS);
     String selector = (String) capturedOptions.getValue().get(DATASET_OPTION_FIELDS.rpcOption());
     assertTrue(selector.contains("datasetReference"));
     assertTrue(selector.contains("access"));
     assertTrue(selector.contains("etag"));
     assertEquals(28, selector.length());
-    assertEquals(DATASET_INFO_WITH_PROJECT, dataset);
+    assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)),
+        dataset);
   }
 
   @Test
   public void testListDatasets() {
     String cursor = "cursor";
-    ImmutableList datasetList = ImmutableList.of(DATASET_INFO_WITH_PROJECT,
-        OTHER_DATASET_INFO);
-    Tuple> result =
+    bigquery = options.service();
+    ImmutableList datasetList = ImmutableList.of(
+        new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)),
+        new Dataset(bigquery, new DatasetInfo.BuilderImpl(OTHER_DATASET_INFO)));
+    Tuple> result =
         Tuple.of(cursor, Iterables.transform(datasetList, DatasetInfo.TO_PB_FUNCTION));
     EasyMock.expect(bigqueryRpcMock.listDatasets(EMPTY_RPC_OPTIONS)).andReturn(result);
     EasyMock.replay(bigqueryRpcMock);
-    bigquery = options.service();
-    Page page = bigquery.listDatasets();
+    Page page = bigquery.listDatasets();
     assertEquals(cursor, page.nextPageCursor());
     assertArrayEquals(datasetList.toArray(), Iterables.toArray(page.values(), DatasetInfo.class));
   }
 
   @Test
   public void testListEmptyDatasets() {
-    ImmutableList datasets = ImmutableList.of();
-    Tuple> result = Tuple.>of(null, datasets);
+    ImmutableList datasets = ImmutableList.of();
+    Tuple> result =
+        Tuple.>of(null, datasets);
     EasyMock.expect(bigqueryRpcMock.listDatasets(EMPTY_RPC_OPTIONS)).andReturn(result);
     EasyMock.replay(bigqueryRpcMock);
     bigquery = options.service();
-    Page page = bigquery.listDatasets();
+    Page page = bigquery.listDatasets();
     assertNull(page.nextPageCursor());
     assertArrayEquals(ImmutableList.of().toArray(),
-        Iterables.toArray(page.values(), DatasetInfo.class));
+        Iterables.toArray(page.values(), Dataset.class));
   }
 
   @Test
   public void testListDatasetsWithOptions() {
     String cursor = "cursor";
-    ImmutableList datasetList = ImmutableList.of(DATASET_INFO_WITH_PROJECT,
-        OTHER_DATASET_INFO);
-    Tuple> result =
+    bigquery = options.service();
+    ImmutableList datasetList = ImmutableList.of(
+        new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)),
+        new Dataset(bigquery, new DatasetInfo.BuilderImpl(OTHER_DATASET_INFO)));
+    Tuple> result =
         Tuple.of(cursor, Iterables.transform(datasetList, DatasetInfo.TO_PB_FUNCTION));
     EasyMock.expect(bigqueryRpcMock.listDatasets(DATASET_LIST_OPTIONS)).andReturn(result);
     EasyMock.replay(bigqueryRpcMock);
-    bigquery = options.service();
-    Page page = bigquery.listDatasets(DATASET_LIST_ALL, DATASET_LIST_PAGE_TOKEN,
+    Page page = bigquery.listDatasets(DATASET_LIST_ALL, DATASET_LIST_PAGE_TOKEN,
         DATASET_LIST_MAX_RESULTS);
     assertEquals(cursor, page.nextPageCursor());
     assertArrayEquals(datasetList.toArray(), Iterables.toArray(page.values(), DatasetInfo.class));
@@ -422,8 +427,9 @@ public void testUpdateDataset() {
         .andReturn(updatedDatasetInfoWithProject.toPb());
     EasyMock.replay(bigqueryRpcMock);
     bigquery = options.service();
-    DatasetInfo dataset = bigquery.update(updatedDatasetInfo);
-    assertEquals(updatedDatasetInfoWithProject, dataset);
+    Dataset dataset = bigquery.update(updatedDatasetInfo);
+    assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(updatedDatasetInfoWithProject)),
+        dataset);
   }
 
   @Test
@@ -438,13 +444,14 @@ public void testUpdateDatasetWithSelectedFields() {
         .andReturn(updatedDatasetInfoWithProject.toPb());
     EasyMock.replay(bigqueryRpcMock);
     bigquery = options.service();
-    DatasetInfo dataset = bigquery.update(updatedDatasetInfo, DATASET_OPTION_FIELDS);
+    Dataset dataset = bigquery.update(updatedDatasetInfo, DATASET_OPTION_FIELDS);
     String selector = (String) capturedOptions.getValue().get(DATASET_OPTION_FIELDS.rpcOption());
     assertTrue(selector.contains("datasetReference"));
     assertTrue(selector.contains("access"));
     assertTrue(selector.contains("etag"));
     assertEquals(28, selector.length());
-    assertEquals(updatedDatasetInfoWithProject, dataset);
+    assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(updatedDatasetInfoWithProject)),
+        dataset);
   }
 
   @Test
@@ -453,8 +460,8 @@ public void testCreateTable() {
         .andReturn(TABLE_INFO_WITH_PROJECT.toPb());
     EasyMock.replay(bigqueryRpcMock);
     bigquery = options.service();
-    TableInfo table = bigquery.create(TABLE_INFO);
-    assertEquals(TABLE_INFO_WITH_PROJECT, table);
+    Table table = bigquery.create(TABLE_INFO);
+    assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table);
   }
 
   @Test
@@ -465,13 +472,13 @@ public void testCreateTableWithSelectedFields() {
         .andReturn(TABLE_INFO_WITH_PROJECT.toPb());
     EasyMock.replay(bigqueryRpcMock);
     bigquery = options.service();
-    TableInfo table = bigquery.create(TABLE_INFO, TABLE_OPTION_FIELDS);
+    Table table = bigquery.create(TABLE_INFO, TABLE_OPTION_FIELDS);
     String selector = (String) capturedOptions.getValue().get(TABLE_OPTION_FIELDS.rpcOption());
     assertTrue(selector.contains("tableReference"));
     assertTrue(selector.contains("schema"));
     assertTrue(selector.contains("etag"));
     assertEquals(31, selector.length());
-    assertEquals(TABLE_INFO_WITH_PROJECT, table);
+    assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table);
   }
 
   @Test
@@ -480,8 +487,8 @@ public void testGetTable() {
         .andReturn(TABLE_INFO_WITH_PROJECT.toPb());
     EasyMock.replay(bigqueryRpcMock);
     bigquery = options.service();
-    TableInfo table = bigquery.getTable(DATASET, TABLE);
-    assertEquals(TABLE_INFO_WITH_PROJECT, table);
+    Table table = bigquery.getTable(DATASET, TABLE);
+    assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table);
   }
 
   @Test
@@ -490,8 +497,8 @@ public void testGetTableFromTableId() {
         .andReturn(TABLE_INFO_WITH_PROJECT.toPb());
     EasyMock.replay(bigqueryRpcMock);
     bigquery = options.service();
-    TableInfo table = bigquery.getTable(TABLE_ID);
-    assertEquals(TABLE_INFO_WITH_PROJECT, table);
+    Table table = bigquery.getTable(TABLE_ID);
+    assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table);
   }
 
   @Test
@@ -501,59 +508,61 @@ public void testGetTableWithSelectedFields() {
         .andReturn(TABLE_INFO_WITH_PROJECT.toPb());
     EasyMock.replay(bigqueryRpcMock);
     bigquery = options.service();
-    TableInfo table = bigquery.getTable(TABLE_ID, TABLE_OPTION_FIELDS);
+    Table table = bigquery.getTable(TABLE_ID, TABLE_OPTION_FIELDS);
     String selector = (String) capturedOptions.getValue().get(TABLE_OPTION_FIELDS.rpcOption());
     assertTrue(selector.contains("tableReference"));
     assertTrue(selector.contains("schema"));
     assertTrue(selector.contains("etag"));
     assertEquals(31, selector.length());
-    assertEquals(TABLE_INFO_WITH_PROJECT, table);
+    assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table);
   }
 
   @Test
   public void testListTables() {
     String cursor = "cursor";
-    ImmutableList tableList =
-        ImmutableList.of(TABLE_INFO_WITH_PROJECT, OTHER_TABLE_INFO);
-    Tuple> result =
+    bigquery = options.service();
+    ImmutableList
tableList = ImmutableList.of( + new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), + new Table(bigquery, new TableInfo.BuilderImpl(OTHER_TABLE_INFO))); + Tuple> result = Tuple.of(cursor, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); EasyMock.expect(bigqueryRpcMock.listTables(DATASET, EMPTY_RPC_OPTIONS)).andReturn(result); EasyMock.replay(bigqueryRpcMock); - bigquery = options.service(); - Page page = bigquery.listTables(DATASET); + Page
page = bigquery.listTables(DATASET); assertEquals(cursor, page.nextPageCursor()); - assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), TableInfo.class)); + assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), Table.class)); } @Test public void testListTablesFromDatasetId() { String cursor = "cursor"; - ImmutableList tableList = - ImmutableList.of(TABLE_INFO_WITH_PROJECT, OTHER_TABLE_INFO); - Tuple> result = + bigquery = options.service(); + ImmutableList
tableList = ImmutableList.of( + new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), + new Table(bigquery, new TableInfo.BuilderImpl(OTHER_TABLE_INFO))); + Tuple> result = Tuple.of(cursor, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); EasyMock.expect(bigqueryRpcMock.listTables(DATASET, EMPTY_RPC_OPTIONS)).andReturn(result); EasyMock.replay(bigqueryRpcMock); - bigquery = options.service(); - Page page = bigquery.listTables(DatasetId.of(PROJECT, DATASET)); + Page
page = bigquery.listTables(DatasetId.of(PROJECT, DATASET)); assertEquals(cursor, page.nextPageCursor()); - assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), TableInfo.class)); + assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), Table.class)); } @Test public void testListTablesWithOptions() { String cursor = "cursor"; - ImmutableList tableList = - ImmutableList.of(TABLE_INFO_WITH_PROJECT, OTHER_TABLE_INFO); - Tuple> result = + bigquery = options.service(); + ImmutableList
tableList = ImmutableList.of( + new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), + new Table(bigquery, new TableInfo.BuilderImpl(OTHER_TABLE_INFO))); + Tuple> result = Tuple.of(cursor, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); EasyMock.expect(bigqueryRpcMock.listTables(DATASET, TABLE_LIST_OPTIONS)).andReturn(result); EasyMock.replay(bigqueryRpcMock); - bigquery = options.service(); - Page page = bigquery.listTables(DATASET, TABLE_LIST_MAX_RESULTS, - TABLE_LIST_PAGE_TOKEN); + Page
page = bigquery.listTables(DATASET, TABLE_LIST_MAX_RESULTS, TABLE_LIST_PAGE_TOKEN); assertEquals(cursor, page.nextPageCursor()); - assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), TableInfo.class)); + assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), Table.class)); } @Test @@ -582,8 +591,9 @@ public void testUpdateTable() { .andReturn(updatedTableInfoWithProject.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - TableInfo table = bigquery.update(updatedTableInfo); - assertEquals(updatedTableInfoWithProject, table); + Table table = bigquery.update(updatedTableInfo); + assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(updatedTableInfoWithProject)), + table); } @Test @@ -597,13 +607,14 @@ public void testUpdateTableWithSelectedFields() { capture(capturedOptions))).andReturn(updatedTableInfoWithProject.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - TableInfo table = bigquery.update(updatedTableInfo, TABLE_OPTION_FIELDS); + Table table = bigquery.update(updatedTableInfo, TABLE_OPTION_FIELDS); String selector = (String) capturedOptions.getValue().get(TABLE_OPTION_FIELDS.rpcOption()); assertTrue(selector.contains("tableReference")); assertTrue(selector.contains("schema")); assertTrue(selector.contains("etag")); assertEquals(31, selector.length()); - assertEquals(updatedTableInfoWithProject, table); + assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(updatedTableInfoWithProject)), + table); } @Test @@ -627,8 +638,7 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { return new TableDataInsertAllRequest.Rows().setInsertId(rowToInsert.id()) .setJson(rowToInsert.content()); } - }) - ).setSkipInvalidRows(false).setIgnoreUnknownValues(true).setTemplateSuffix("suffix"); + })).setSkipInvalidRows(false).setIgnoreUnknownValues(true).setTemplateSuffix("suffix"); TableDataInsertAllResponse responsePb = new TableDataInsertAllResponse().setInsertErrors( ImmutableList.of(new TableDataInsertAllResponse.InsertErrors().setIndex(0L).setErrors( ImmutableList.of(new ErrorProto().setMessage("ErrorMessage"))))); @@ -731,57 +741,57 @@ public void testListTableDataWithOptions() { @Test public void testCreateQueryJob() { EasyMock.expect(bigqueryRpcMock.create( - JobInfo.of(QUERY_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) - .andReturn(COMPLETE_QUERY_JOB.toPb()); + JobInfo.of(QUERY_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_QUERY_JOB.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - JobInfo job = bigquery.create(QUERY_JOB); - assertEquals(COMPLETE_QUERY_JOB, job); + Job job = bigquery.create(QUERY_JOB); + assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_QUERY_JOB)), job); } @Test public void testCreateLoadJob() { EasyMock.expect(bigqueryRpcMock.create( - JobInfo.of(LOAD_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) - .andReturn(COMPLETE_LOAD_JOB.toPb()); + JobInfo.of(LOAD_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_LOAD_JOB.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - JobInfo job = bigquery.create(LOAD_JOB); - assertEquals(COMPLETE_LOAD_JOB, job); + Job job = bigquery.create(LOAD_JOB); + assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_LOAD_JOB)), job); } @Test public void testCreateCopyJob() { EasyMock.expect(bigqueryRpcMock.create( - JobInfo.of(COPY_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) - .andReturn(COMPLETE_COPY_JOB.toPb()); + JobInfo.of(COPY_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_COPY_JOB.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - JobInfo job = bigquery.create(COPY_JOB); - assertEquals(COMPLETE_COPY_JOB, job); + Job job = bigquery.create(COPY_JOB); + assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); } @Test public void testCreateExtractJob() { EasyMock.expect(bigqueryRpcMock.create( - JobInfo.of(EXTRACT_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) - .andReturn(COMPLETE_EXTRACT_JOB.toPb()); + JobInfo.of(EXTRACT_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_EXTRACT_JOB.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - JobInfo job = bigquery.create(EXTRACT_JOB); - assertEquals(COMPLETE_EXTRACT_JOB, job); + Job job = bigquery.create(EXTRACT_JOB); + assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_EXTRACT_JOB)), job); } @Test public void testCreateJobWithSelectedFields() { Capture> capturedOptions = Capture.newInstance(); EasyMock.expect(bigqueryRpcMock.create( - eq(JobInfo.of(QUERY_JOB_CONFIGURATION_WITH_PROJECT).toPb()), capture(capturedOptions))) - .andReturn(COMPLETE_QUERY_JOB.toPb()); + eq(JobInfo.of(QUERY_JOB_CONFIGURATION_WITH_PROJECT).toPb()), capture(capturedOptions))) + .andReturn(COMPLETE_QUERY_JOB.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - JobInfo job = bigquery.create(QUERY_JOB, JOB_OPTION_FIELDS); - assertEquals(COMPLETE_QUERY_JOB, job); + Job job = bigquery.create(QUERY_JOB, JOB_OPTION_FIELDS); + assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_QUERY_JOB)), job); String selector = (String) capturedOptions.getValue().get(JOB_OPTION_FIELDS.rpcOption()); assertTrue(selector.contains("jobReference")); assertTrue(selector.contains("configuration")); @@ -795,8 +805,8 @@ public void testGetJob() { .andReturn(COMPLETE_COPY_JOB.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - JobInfo job = bigquery.getJob(JOB); - assertEquals(COMPLETE_COPY_JOB, job); + Job job = bigquery.getJob(JOB); + assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); } @Test @@ -805,67 +815,76 @@ public void testGetJobFromJobId() { .andReturn(COMPLETE_COPY_JOB.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - JobInfo job = bigquery.getJob(JobId.of(PROJECT, JOB)); - assertEquals(COMPLETE_COPY_JOB, job); + Job job = bigquery.getJob(JobId.of(PROJECT, JOB)); + assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); } @Test public void testListJobs() { String cursor = "cursor"; - ImmutableList jobList = ImmutableList.of(COMPLETE_QUERY_JOB, COMPLETE_LOAD_JOB); - Tuple> result = - Tuple.of(cursor, Iterables.transform(jobList, new Function() { - @Override - public Job apply(JobInfo jobInfo) { - return jobInfo.toPb(); - } - })); + bigquery = options.service(); + ImmutableList jobList = ImmutableList.of( + new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_QUERY_JOB)), + new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_LOAD_JOB))); + Tuple> result = + Tuple.of(cursor, Iterables.transform(jobList, + new Function() { + @Override + public com.google.api.services.bigquery.model.Job apply(Job job) { + return job.toPb(); + } + })); EasyMock.expect(bigqueryRpcMock.listJobs(EMPTY_RPC_OPTIONS)).andReturn(result); EasyMock.replay(bigqueryRpcMock); - bigquery = options.service(); - Page page = bigquery.listJobs(); + Page page = bigquery.listJobs(); assertEquals(cursor, page.nextPageCursor()); - assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), JobInfo.class)); + assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), Job.class)); } @Test public void testListJobsWithOptions() { String cursor = "cursor"; - ImmutableList jobList = ImmutableList.of(COMPLETE_QUERY_JOB, COMPLETE_LOAD_JOB); - Tuple> result = - Tuple.of(cursor, Iterables.transform(jobList, new Function() { - @Override - public Job apply(JobInfo jobInfo) { - return jobInfo.toPb(); - } - })); + bigquery = options.service(); + ImmutableList jobList = ImmutableList.of( + new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_QUERY_JOB)), + new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_LOAD_JOB))); + Tuple> result = + Tuple.of(cursor, Iterables.transform(jobList, + new Function() { + @Override + public com.google.api.services.bigquery.model.Job apply(Job job) { + return job.toPb(); + } + })); EasyMock.expect(bigqueryRpcMock.listJobs(JOB_LIST_OPTIONS)).andReturn(result); EasyMock.replay(bigqueryRpcMock); - bigquery = options.service(); - Page page = bigquery.listJobs(JOB_LIST_ALL_USERS, JOB_LIST_STATE_FILTER, + Page page = bigquery.listJobs(JOB_LIST_ALL_USERS, JOB_LIST_STATE_FILTER, JOB_LIST_PAGE_TOKEN, JOB_LIST_MAX_RESULTS); assertEquals(cursor, page.nextPageCursor()); - assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), JobInfo.class)); + assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), Job.class)); } @Test public void testListJobsWithSelectedFields() { String cursor = "cursor"; Capture> capturedOptions = Capture.newInstance(); - ImmutableList jobList = ImmutableList.of(COMPLETE_QUERY_JOB, COMPLETE_LOAD_JOB); - Tuple> result = - Tuple.of(cursor, Iterables.transform(jobList, new Function() { - @Override - public Job apply(JobInfo jobInfo) { - return jobInfo.toPb(); - } - })); + bigquery = options.service(); + ImmutableList jobList = ImmutableList.of( + new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_QUERY_JOB)), + new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_LOAD_JOB))); + Tuple> result = + Tuple.of(cursor, Iterables.transform(jobList, + new Function() { + @Override + public com.google.api.services.bigquery.model.Job apply(Job job) { + return job.toPb(); + } + })); EasyMock.expect(bigqueryRpcMock.listJobs(capture(capturedOptions))).andReturn(result); EasyMock.replay(bigqueryRpcMock); - bigquery = options.service(); - Page page = bigquery.listJobs(JOB_LIST_OPTION_FIELD); + Page page = bigquery.listJobs(JOB_LIST_OPTION_FIELD); assertEquals(cursor, page.nextPageCursor()); - assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), JobInfo.class)); + assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), Job.class)); String selector = (String) capturedOptions.getValue().get(JOB_OPTION_FIELDS.rpcOption()); assertTrue(selector.contains("etag,jobs(")); assertTrue(selector.contains("configuration")); @@ -1030,8 +1049,9 @@ public void testRetryableException() { .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); - DatasetInfo dataset = bigquery.getDataset(DATASET); - assertEquals(DATASET_INFO_WITH_PROJECT, dataset); + Dataset dataset = bigquery.getDataset(DATASET); + assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), + dataset); } @Test diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetTest.java index 455212e16d3a..a8a9404b3056 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetTest.java @@ -16,32 +16,44 @@ package com.google.gcloud.bigquery; +import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.createStrictMock; +import static org.easymock.EasyMock.eq; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; import com.google.gcloud.Page; import com.google.gcloud.PageImpl; import org.junit.After; -import org.junit.Before; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; -import java.util.Iterator; +import java.util.List; public class DatasetTest { private static final DatasetId DATASET_ID = DatasetId.of("dataset"); + private static final List ACCESS_RULES = ImmutableList.of( + Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), + Acl.of(new Acl.View(TableId.of("dataset", "table")))); + private static final Long CREATION_TIME = System.currentTimeMillis(); + private static final Long DEFAULT_TABLE_EXPIRATION = CREATION_TIME + 100; + private static final String DESCRIPTION = "description"; + private static final String ETAG = "0xFF00"; + private static final String FRIENDLY_NAME = "friendlyDataset"; + private static final String ID = "P/D:1"; + private static final Long LAST_MODIFIED = CREATION_TIME + 50; + private static final String LOCATION = ""; + private static final String SELF_LINK = "http://bigquery/p/d"; private static final DatasetInfo DATASET_INFO = DatasetInfo.builder(DATASET_ID).build(); private static final Field FIELD = Field.of("FieldName", Field.Type.integer()); private static final StandardTableDefinition TABLE_DEFINITION = @@ -49,219 +61,291 @@ public class DatasetTest { private static final ViewDefinition VIEW_DEFINITION = ViewDefinition.of("QUERY"); private static final ExternalTableDefinition EXTERNAL_TABLE_DEFINITION = ExternalTableDefinition.of(ImmutableList.of("URI"), Schema.of(), FormatOptions.csv()); - private static final Iterable TABLE_INFO_RESULTS = ImmutableList.of( - TableInfo.builder(TableId.of("dataset", "table1"), TABLE_DEFINITION).build(), - TableInfo.builder(TableId.of("dataset", "table2"), VIEW_DEFINITION).build(), - TableInfo.builder(TableId.of("dataset", "table2"), EXTERNAL_TABLE_DEFINITION).build()); + private static final TableInfo TABLE_INFO1 = + TableInfo.builder(TableId.of("dataset", "table1"), TABLE_DEFINITION).build(); + private static final TableInfo TABLE_INFO2 = + TableInfo.builder(TableId.of("dataset", "table2"), VIEW_DEFINITION).build(); + private static final TableInfo TABLE_INFO3 = + TableInfo.builder(TableId.of("dataset", "table3"), EXTERNAL_TABLE_DEFINITION).build(); - @Rule - public ExpectedException thrown = ExpectedException.none(); + private BigQuery serviceMockReturnsOptions = createStrictMock(BigQuery.class); + private BigQueryOptions mockOptions = createMock(BigQueryOptions.class); private BigQuery bigquery; + private Dataset expectedDataset; private Dataset dataset; - @Before - public void setUp() throws Exception { + private void initializeExpectedDataset(int optionsCalls) { + expect(serviceMockReturnsOptions.options()).andReturn(mockOptions).times(optionsCalls); + replay(serviceMockReturnsOptions); bigquery = createStrictMock(BigQuery.class); - dataset = new Dataset(bigquery, DATASET_INFO); + expectedDataset = new Dataset(serviceMockReturnsOptions, new Dataset.BuilderImpl(DATASET_INFO)); + } + + private void initializeDataset() { + dataset = new Dataset(bigquery, new Dataset.BuilderImpl(DATASET_INFO)); } @After public void tearDown() throws Exception { - verify(bigquery); + verify(bigquery, serviceMockReturnsOptions); } @Test - public void testInfo() throws Exception { - assertEquals(DATASET_INFO, dataset.info()); + public void testBuilder() { + initializeExpectedDataset(2); replay(bigquery); + Dataset builtDataset = Dataset.builder(serviceMockReturnsOptions, DATASET_ID) + .acl(ACCESS_RULES) + .creationTime(CREATION_TIME) + .defaultTableLifetime(DEFAULT_TABLE_EXPIRATION) + .description(DESCRIPTION) + .etag(ETAG) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModified(LAST_MODIFIED) + .location(LOCATION) + .selfLink(SELF_LINK) + .build(); + assertEquals(DATASET_ID, builtDataset.datasetId()); + assertEquals(ACCESS_RULES, builtDataset.acl()); + assertEquals(CREATION_TIME, builtDataset.creationTime()); + assertEquals(DEFAULT_TABLE_EXPIRATION, builtDataset.defaultTableLifetime()); + assertEquals(DESCRIPTION, builtDataset.description()); + assertEquals(ETAG, builtDataset.etag()); + assertEquals(FRIENDLY_NAME, builtDataset.friendlyName()); + assertEquals(ID, builtDataset.id()); + assertEquals(LAST_MODIFIED, builtDataset.lastModified()); + assertEquals(LOCATION, builtDataset.location()); + assertEquals(SELF_LINK, builtDataset.selfLink()); } @Test - public void testBigQuery() throws Exception { - assertSame(bigquery, dataset.bigquery()); + public void testToBuilder() { + initializeExpectedDataset(4); replay(bigquery); + compareDataset(expectedDataset, expectedDataset.toBuilder().build()); } @Test public void testExists_True() throws Exception { + initializeExpectedDataset(1); BigQuery.DatasetOption[] expectedOptions = {BigQuery.DatasetOption.fields()}; - expect(bigquery.getDataset(DATASET_ID, expectedOptions)).andReturn(DATASET_INFO); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getDataset(DATASET_INFO.datasetId(), expectedOptions)) + .andReturn(expectedDataset); replay(bigquery); + initializeDataset(); assertTrue(dataset.exists()); } @Test public void testExists_False() throws Exception { + initializeExpectedDataset(1); BigQuery.DatasetOption[] expectedOptions = {BigQuery.DatasetOption.fields()}; - expect(bigquery.getDataset(DATASET_ID, expectedOptions)).andReturn(null); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getDataset(DATASET_INFO.datasetId(), expectedOptions)).andReturn(null); replay(bigquery); + initializeDataset(); assertFalse(dataset.exists()); } @Test public void testReload() throws Exception { + initializeExpectedDataset(4); DatasetInfo updatedInfo = DATASET_INFO.toBuilder().description("Description").build(); - expect(bigquery.getDataset(DATASET_ID.dataset())).andReturn(updatedInfo); + Dataset expectedDataset = + new Dataset(serviceMockReturnsOptions, new DatasetInfo.BuilderImpl(updatedInfo)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getDataset(DATASET_INFO.datasetId().dataset())).andReturn(expectedDataset); replay(bigquery); + initializeDataset(); Dataset updatedDataset = dataset.reload(); - assertSame(bigquery, updatedDataset.bigquery()); - assertEquals(updatedInfo, updatedDataset.info()); + compareDataset(expectedDataset, updatedDataset); } @Test public void testReloadNull() throws Exception { - expect(bigquery.getDataset(DATASET_ID.dataset())).andReturn(null); + initializeExpectedDataset(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getDataset(DATASET_INFO.datasetId().dataset())).andReturn(null); replay(bigquery); + initializeDataset(); assertNull(dataset.reload()); } @Test public void testReloadWithOptions() throws Exception { + initializeExpectedDataset(4); DatasetInfo updatedInfo = DATASET_INFO.toBuilder().description("Description").build(); - expect(bigquery.getDataset(DATASET_ID.dataset(), BigQuery.DatasetOption.fields())) - .andReturn(updatedInfo); + Dataset expectedDataset = + new Dataset(serviceMockReturnsOptions, new DatasetInfo.BuilderImpl(updatedInfo)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getDataset(DATASET_INFO.datasetId().dataset(), BigQuery.DatasetOption.fields())) + .andReturn(expectedDataset); replay(bigquery); + initializeDataset(); Dataset updatedDataset = dataset.reload(BigQuery.DatasetOption.fields()); - assertSame(bigquery, updatedDataset.bigquery()); - assertEquals(updatedInfo, updatedDataset.info()); + compareDataset(expectedDataset, updatedDataset); } @Test - public void testUpdate() throws Exception { - DatasetInfo updatedInfo = DATASET_INFO.toBuilder().description("Description").build(); - expect(bigquery.update(updatedInfo)).andReturn(updatedInfo); + public void testUpdate() { + initializeExpectedDataset(4); + Dataset expectedUpdatedDataset = expectedDataset.toBuilder().description("Description").build(); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.update(eq(expectedDataset))).andReturn(expectedUpdatedDataset); replay(bigquery); - Dataset updatedDataset = dataset.update(updatedInfo); - assertSame(bigquery, updatedDataset.bigquery()); - assertEquals(updatedInfo, updatedDataset.info()); + initializeDataset(); + Dataset actualUpdatedDataset = dataset.update(); + compareDataset(expectedUpdatedDataset, actualUpdatedDataset); } @Test - public void testUpdateWithDifferentId() throws Exception { - DatasetInfo updatedInfo = DATASET_INFO.toBuilder() - .datasetId(DatasetId.of("dataset2")) - .description("Description") - .build(); + public void testUpdateWithOptions() { + initializeExpectedDataset(4); + Dataset expectedUpdatedDataset = expectedDataset.toBuilder().description("Description").build(); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.update(eq(expectedDataset), eq(BigQuery.DatasetOption.fields()))) + .andReturn(expectedUpdatedDataset); replay(bigquery); - thrown.expect(IllegalArgumentException.class); - dataset.update(updatedInfo); + initializeDataset(); + Dataset actualUpdatedDataset = dataset.update(BigQuery.DatasetOption.fields()); + compareDataset(expectedUpdatedDataset, actualUpdatedDataset); } @Test - public void testUpdateWithOptions() throws Exception { - DatasetInfo updatedInfo = DATASET_INFO.toBuilder().description("Description").build(); - expect(bigquery.update(updatedInfo, BigQuery.DatasetOption.fields())).andReturn(updatedInfo); + public void testDeleteTrue() { + initializeExpectedDataset(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.delete(DATASET_INFO.datasetId())).andReturn(true); replay(bigquery); - Dataset updatedDataset = dataset.update(updatedInfo, BigQuery.DatasetOption.fields()); - assertSame(bigquery, updatedDataset.bigquery()); - assertEquals(updatedInfo, updatedDataset.info()); + initializeDataset(); + assertTrue(dataset.delete()); } @Test - public void testDelete() throws Exception { - expect(bigquery.delete(DATASET_INFO.datasetId())).andReturn(true); + public void testDeleteFalse() { + initializeExpectedDataset(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.delete(DATASET_INFO.datasetId())).andReturn(false); replay(bigquery); - assertTrue(dataset.delete()); + initializeDataset(); + assertFalse(dataset.delete()); } @Test public void testList() throws Exception { - BigQueryOptions bigqueryOptions = createStrictMock(BigQueryOptions.class); - PageImpl tableInfoPage = new PageImpl<>(null, "c", TABLE_INFO_RESULTS); - expect(bigquery.listTables(DATASET_INFO.datasetId())).andReturn(tableInfoPage); - expect(bigquery.options()).andReturn(bigqueryOptions); - expect(bigqueryOptions.service()).andReturn(bigquery); - replay(bigquery, bigqueryOptions); + initializeExpectedDataset(4); + List
tableResults = ImmutableList.of( + new Table(serviceMockReturnsOptions, new Table.BuilderImpl(TABLE_INFO1)), + new Table(serviceMockReturnsOptions, new Table.BuilderImpl(TABLE_INFO2)), + new Table(serviceMockReturnsOptions, new Table.BuilderImpl(TABLE_INFO3))); + PageImpl
expectedPage = new PageImpl<>(null, "c", tableResults); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.listTables(DATASET_INFO.datasetId())).andReturn(expectedPage); + replay(bigquery); + initializeDataset(); Page
tablePage = dataset.list(); - Iterator tableInfoIterator = tableInfoPage.values().iterator(); - Iterator
tableIterator = tablePage.values().iterator(); - while (tableInfoIterator.hasNext() && tableIterator.hasNext()) { - assertEquals(tableInfoIterator.next(), tableIterator.next().info()); - } - assertFalse(tableInfoIterator.hasNext()); - assertFalse(tableIterator.hasNext()); - assertEquals(tableInfoPage.nextPageCursor(), tablePage.nextPageCursor()); - verify(bigqueryOptions); + assertArrayEquals(tableResults.toArray(), Iterables.toArray(tablePage.values(), Table.class)); + assertEquals(expectedPage.nextPageCursor(), tablePage.nextPageCursor()); } @Test public void testListWithOptions() throws Exception { - BigQueryOptions bigqueryOptions = createStrictMock(BigQueryOptions.class); - PageImpl tableInfoPage = new PageImpl<>(null, "c", TABLE_INFO_RESULTS); + initializeExpectedDataset(4); + List
tableResults = ImmutableList.of( + new Table(serviceMockReturnsOptions, new Table.BuilderImpl(TABLE_INFO1)), + new Table(serviceMockReturnsOptions, new Table.BuilderImpl(TABLE_INFO2)), + new Table(serviceMockReturnsOptions, new Table.BuilderImpl(TABLE_INFO3))); + PageImpl
expectedPage = new PageImpl<>(null, "c", tableResults); + expect(bigquery.options()).andReturn(mockOptions); expect(bigquery.listTables(DATASET_INFO.datasetId(), BigQuery.TableListOption.maxResults(10L))) - .andReturn(tableInfoPage); - expect(bigquery.options()).andReturn(bigqueryOptions); - expect(bigqueryOptions.service()).andReturn(bigquery); - replay(bigquery, bigqueryOptions); + .andReturn(expectedPage); + replay(bigquery); + initializeDataset(); Page
tablePage = dataset.list(BigQuery.TableListOption.maxResults(10L)); - Iterator tableInfoIterator = tableInfoPage.values().iterator(); - Iterator
tableIterator = tablePage.values().iterator(); - while (tableInfoIterator.hasNext() && tableIterator.hasNext()) { - assertEquals(tableInfoIterator.next(), tableIterator.next().info()); - } - assertFalse(tableInfoIterator.hasNext()); - assertFalse(tableIterator.hasNext()); - assertEquals(tableInfoPage.nextPageCursor(), tablePage.nextPageCursor()); - verify(bigqueryOptions); + assertArrayEquals(tableResults.toArray(), Iterables.toArray(tablePage.values(), Table.class)); + assertEquals(expectedPage.nextPageCursor(), tablePage.nextPageCursor()); } @Test public void testGet() throws Exception { - TableInfo info = TableInfo.builder(TableId.of("dataset", "table1"), TABLE_DEFINITION).build(); - expect(bigquery.getTable(TableId.of("dataset", "table1"))).andReturn(info); + initializeExpectedDataset(2); + Table expectedTable = + new Table(serviceMockReturnsOptions, new TableInfo.BuilderImpl(TABLE_INFO1)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getTable(TABLE_INFO1.tableId())).andReturn(expectedTable); replay(bigquery); - Table table = dataset.get("table1"); + initializeDataset(); + Table table = dataset.get(TABLE_INFO1.tableId().table()); assertNotNull(table); - assertEquals(info, table.info()); + assertEquals(expectedTable, table); } @Test public void testGetNull() throws Exception { - expect(bigquery.getTable(TableId.of("dataset", "table1"))).andReturn(null); + initializeExpectedDataset(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getTable(TABLE_INFO1.tableId())).andReturn(null); replay(bigquery); - assertNull(dataset.get("table1")); + initializeDataset(); + assertNull(dataset.get(TABLE_INFO1.tableId().table())); } @Test public void testGetWithOptions() throws Exception { - TableInfo info = TableInfo.builder(TableId.of("dataset", "table1"), TABLE_DEFINITION).build(); - expect(bigquery.getTable(TableId.of("dataset", "table1"), BigQuery.TableOption.fields())) - .andReturn(info); + initializeExpectedDataset(2); + Table expectedTable = + new Table(serviceMockReturnsOptions, new TableInfo.BuilderImpl(TABLE_INFO1)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getTable(TABLE_INFO1.tableId(), BigQuery.TableOption.fields())) + .andReturn(expectedTable); replay(bigquery); - Table table = dataset.get("table1", BigQuery.TableOption.fields()); + initializeDataset(); + Table table = dataset.get(TABLE_INFO1.tableId().table(), BigQuery.TableOption.fields()); assertNotNull(table); - assertEquals(info, table.info()); + assertEquals(expectedTable, table); } @Test public void testCreateTable() throws Exception { - TableInfo info = TableInfo.builder(TableId.of("dataset", "table1"), TABLE_DEFINITION).build(); - expect(bigquery.create(info)).andReturn(info); + initializeExpectedDataset(2); + Table expectedTable = + new Table(serviceMockReturnsOptions, new TableInfo.BuilderImpl(TABLE_INFO1)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.create(TABLE_INFO1)).andReturn(expectedTable); replay(bigquery); - Table table = dataset.create("table1", TABLE_DEFINITION); - assertEquals(info, table.info()); + initializeDataset(); + Table table = dataset.create(TABLE_INFO1.tableId().table(), TABLE_DEFINITION); + assertEquals(expectedTable, table); } @Test public void testCreateTableWithOptions() throws Exception { - TableInfo info = TableInfo.builder(TableId.of("dataset", "table1"), TABLE_DEFINITION).build(); - expect(bigquery.create(info, BigQuery.TableOption.fields())).andReturn(info); + initializeExpectedDataset(2); + Table expectedTable = + new Table(serviceMockReturnsOptions, new TableInfo.BuilderImpl(TABLE_INFO1)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.create(TABLE_INFO1, BigQuery.TableOption.fields())).andReturn(expectedTable); replay(bigquery); - Table table = dataset.create("table1", TABLE_DEFINITION, BigQuery.TableOption.fields()); - assertEquals(info, table.info()); + initializeDataset(); + Table table = dataset.create(TABLE_INFO1.tableId().table(), TABLE_DEFINITION, + BigQuery.TableOption.fields()); + assertEquals(expectedTable, table); } @Test public void testStaticGet() throws Exception { - expect(bigquery.getDataset(DATASET_INFO.datasetId().dataset())).andReturn(DATASET_INFO); + initializeExpectedDataset(3); + expect(bigquery.getDataset(DATASET_INFO.datasetId().dataset())).andReturn(expectedDataset); replay(bigquery); Dataset loadedDataset = Dataset.get(bigquery, DATASET_INFO.datasetId().dataset()); - assertNotNull(loadedDataset); - assertEquals(DATASET_INFO, loadedDataset.info()); + compareDataset(expectedDataset, loadedDataset); } @Test public void testStaticGetNull() throws Exception { + initializeExpectedDataset(1); expect(bigquery.getDataset(DATASET_INFO.datasetId().dataset())).andReturn(null); replay(bigquery); assertNull(Dataset.get(bigquery, DATASET_INFO.datasetId().dataset())); @@ -269,12 +353,33 @@ public void testStaticGetNull() throws Exception { @Test public void testStaticGetWithOptions() throws Exception { + initializeExpectedDataset(3); expect(bigquery.getDataset(DATASET_INFO.datasetId().dataset(), BigQuery.DatasetOption.fields())) - .andReturn(DATASET_INFO); + .andReturn(expectedDataset); replay(bigquery); - Dataset loadedDataset = Dataset.get(bigquery, DATASET_INFO.datasetId().dataset(), - BigQuery.DatasetOption.fields()); - assertNotNull(loadedDataset); - assertEquals(DATASET_INFO, loadedDataset.info()); + Dataset loadedDataset = + Dataset.get(bigquery, DATASET_INFO.datasetId().dataset(), BigQuery.DatasetOption.fields()); + compareDataset(expectedDataset, loadedDataset); + } + + private void compareDataset(Dataset expected, Dataset value) { + assertEquals(expected, value); + compareDatasetInfo(expected, value); + assertEquals(expected.bigquery().options(), value.bigquery().options()); + } + + private void compareDatasetInfo(DatasetInfo expected, DatasetInfo value) { + assertEquals(expected, value); + assertEquals(expected.datasetId(), value.datasetId()); + assertEquals(expected.description(), value.description()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.friendlyName(), value.friendlyName()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.location(), value.location()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.acl(), value.acl()); + assertEquals(expected.creationTime(), value.creationTime()); + assertEquals(expected.defaultTableLifetime(), value.defaultTableLifetime()); + assertEquals(expected.lastModified(), value.lastModified()); } } diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java index 0928c04ea6d2..dbd8cda02b9f 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java @@ -58,7 +58,7 @@ public class ITBigQueryTest { - private static final Logger log = Logger.getLogger(ITBigQueryTest.class.getName()); + private static final Logger LOG = Logger.getLogger(ITBigQueryTest.class.getName()); private static final String DATASET = RemoteBigQueryHelper.generateDatasetName(); private static final String DESCRIPTION = "Test dataset"; private static final String OTHER_DATASET = RemoteBigQueryHelper.generateDatasetName(); @@ -157,10 +157,9 @@ public static void beforeClass() throws IOException, InterruptedException { .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) .schema(TABLE_SCHEMA) .build(); - JobInfo job = bigquery.create(JobInfo.of(configuration)); - while (job.status().state() != JobStatus.State.DONE) { + Job job = bigquery.create(JobInfo.of(configuration)); + while (!job.isDone()) { Thread.sleep(1000); - job = bigquery.getJob(job.jobId()); } assertNull(job.status().error()); } @@ -171,15 +170,15 @@ public static void afterClass() throws ExecutionException, InterruptedException RemoteBigQueryHelper.forceDelete(bigquery, DATASET); } if (storage != null && !RemoteGcsHelper.forceDelete(storage, BUCKET, 10, TimeUnit.SECONDS)) { - if (log.isLoggable(Level.WARNING)) { - log.log(Level.WARNING, "Deletion of bucket {0} timed out, bucket is not empty", BUCKET); + if (LOG.isLoggable(Level.WARNING)) { + LOG.log(Level.WARNING, "Deletion of bucket {0} timed out, bucket is not empty", BUCKET); } } } @Test public void testGetDataset() { - DatasetInfo dataset = bigquery.getDataset(DATASET); + Dataset dataset = bigquery.getDataset(DATASET); assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); assertEquals(DATASET, dataset.datasetId().dataset()); assertEquals(DESCRIPTION, dataset.description()); @@ -192,7 +191,7 @@ public void testGetDataset() { @Test public void testGetDatasetWithSelectedFields() { - DatasetInfo dataset = bigquery.getDataset(DATASET, + Dataset dataset = bigquery.getDataset(DATASET, DatasetOption.fields(DatasetField.CREATION_TIME)); assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); assertEquals(DATASET, dataset.datasetId().dataset()); @@ -210,29 +209,29 @@ public void testGetDatasetWithSelectedFields() { @Test public void testUpdateDataset() { - DatasetInfo dataset = bigquery.create(DatasetInfo.builder(OTHER_DATASET) + Dataset dataset = bigquery.create(DatasetInfo.builder(OTHER_DATASET) .description("Some Description") .build()); assertNotNull(dataset); assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); assertEquals(OTHER_DATASET, dataset.datasetId().dataset()); assertEquals("Some Description", dataset.description()); - DatasetInfo updatedDataset = + Dataset updatedDataset = bigquery.update(dataset.toBuilder().description("Updated Description").build()); assertEquals("Updated Description", updatedDataset.description()); - assertTrue(bigquery.delete(OTHER_DATASET)); + assertTrue(dataset.delete()); } @Test public void testUpdateDatasetWithSelectedFields() { - DatasetInfo dataset = bigquery.create(DatasetInfo.builder(OTHER_DATASET) + Dataset dataset = bigquery.create(DatasetInfo.builder(OTHER_DATASET) .description("Some Description") .build()); assertNotNull(dataset); assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); assertEquals(OTHER_DATASET, dataset.datasetId().dataset()); assertEquals("Some Description", dataset.description()); - DatasetInfo updatedDataset = + Dataset updatedDataset = bigquery.update(dataset.toBuilder().description("Updated Description").build(), DatasetOption.fields(DatasetField.DESCRIPTION)); assertEquals("Updated Description", updatedDataset.description()); @@ -245,7 +244,7 @@ public void testUpdateDatasetWithSelectedFields() { assertNull(updatedDataset.lastModified()); assertNull(updatedDataset.location()); assertNull(updatedDataset.selfLink()); - assertTrue(bigquery.delete(OTHER_DATASET)); + assertTrue(dataset.delete()); } @Test @@ -258,21 +257,21 @@ public void testCreateAndGetTable() { String tableName = "test_create_and_get_table"; TableId tableId = TableId.of(DATASET, tableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); - TableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, tableDefinition)); - assertNotNull(createdTableInfo); - assertEquals(DATASET, createdTableInfo.tableId().dataset()); - assertEquals(tableName, createdTableInfo.tableId().table()); - TableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName); - assertNotNull(remoteTableInfo); - assertTrue(remoteTableInfo.definition() instanceof StandardTableDefinition); - assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId()); - assertEquals(TableDefinition.Type.TABLE, remoteTableInfo.definition().type()); - assertEquals(TABLE_SCHEMA, remoteTableInfo.definition().schema()); - assertNotNull(remoteTableInfo.creationTime()); - assertNotNull(remoteTableInfo.lastModifiedTime()); - assertNotNull(remoteTableInfo.definition().numBytes()); - assertNotNull(remoteTableInfo.definition().numRows()); - assertTrue(bigquery.delete(DATASET, tableName)); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.tableId().dataset()); + assertEquals(tableName, createdTable.tableId().table()); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + assertTrue(remoteTable.definition() instanceof StandardTableDefinition); + assertEquals(createdTable.tableId(), remoteTable.tableId()); + assertEquals(TableDefinition.Type.TABLE, remoteTable.definition().type()); + assertEquals(TABLE_SCHEMA, remoteTable.definition().schema()); + assertNotNull(remoteTable.creationTime()); + assertNotNull(remoteTable.lastModifiedTime()); + assertNotNull(remoteTable.definition().numBytes()); + assertNotNull(remoteTable.definition().numRows()); + assertTrue(remoteTable.delete()); } @Test @@ -280,22 +279,22 @@ public void testCreateAndGetTableWithSelectedField() { String tableName = "test_create_and_get_selected_fields_table"; TableId tableId = TableId.of(DATASET, tableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); - TableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, tableDefinition)); - assertNotNull(createdTableInfo); - assertEquals(DATASET, createdTableInfo.tableId().dataset()); - assertEquals(tableName, createdTableInfo.tableId().table()); - TableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName, + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.tableId().dataset()); + assertEquals(tableName, createdTable.tableId().table()); + Table remoteTable = bigquery.getTable(DATASET, tableName, TableOption.fields(TableField.CREATION_TIME)); - assertNotNull(remoteTableInfo); - assertTrue(remoteTableInfo.definition() instanceof StandardTableDefinition); - assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId()); - assertEquals(TableDefinition.Type.TABLE, remoteTableInfo.definition().type()); - assertNotNull(remoteTableInfo.creationTime()); - assertNull(remoteTableInfo.definition().schema()); - assertNull(remoteTableInfo.lastModifiedTime()); - assertNull(remoteTableInfo.definition().numBytes()); - assertNull(remoteTableInfo.definition().numRows()); - assertTrue(bigquery.delete(DATASET, tableName)); + assertNotNull(remoteTable); + assertTrue(remoteTable.definition() instanceof StandardTableDefinition); + assertEquals(createdTable.tableId(), remoteTable.tableId()); + assertEquals(TableDefinition.Type.TABLE, remoteTable.definition().type()); + assertNotNull(remoteTable.creationTime()); + assertNull(remoteTable.definition().schema()); + assertNull(remoteTable.lastModifiedTime()); + assertNull(remoteTable.definition().numBytes()); + assertNull(remoteTable.definition().numRows()); + assertTrue(remoteTable.delete()); } @Test @@ -305,15 +304,15 @@ public void testCreateExternalTable() throws InterruptedException { ExternalTableDefinition externalTableDefinition = ExternalTableDefinition.of( "gs://" + BUCKET + "/" + JSON_LOAD_FILE, TABLE_SCHEMA, FormatOptions.json()); TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); - TableInfo createdTableInfo = bigquery.create(tableInfo); - assertNotNull(createdTableInfo); - assertEquals(DATASET, createdTableInfo.tableId().dataset()); - assertEquals(tableName, createdTableInfo.tableId().table()); - TableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName); - assertNotNull(remoteTableInfo); - assertTrue(remoteTableInfo.definition() instanceof ExternalTableDefinition); - assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId()); - assertEquals(TABLE_SCHEMA, remoteTableInfo.definition().schema()); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.tableId().dataset()); + assertEquals(tableName, createdTable.tableId().table()); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + assertTrue(remoteTable.definition() instanceof ExternalTableDefinition); + assertEquals(createdTable.tableId(), remoteTable.tableId()); + assertEquals(TABLE_SCHEMA, remoteTable.definition().schema()); QueryRequest request = QueryRequest.builder( "SELECT TimestampField, StringField, IntegerField, BooleanField FROM " + DATASET + "." + tableName) @@ -345,7 +344,7 @@ public void testCreateExternalTable() throws InterruptedException { rowCount++; } assertEquals(4, rowCount); - assertTrue(bigquery.delete(DATASET, tableName)); + assertTrue(remoteTable.delete()); } @Test @@ -356,14 +355,14 @@ public void testCreateViewTable() throws InterruptedException { ViewDefinition.of("SELECT TimestampField, StringField, BooleanField FROM " + DATASET + "." + TABLE_ID.table()); TableInfo tableInfo = TableInfo.of(tableId, viewDefinition); - TableInfo createdTableInfo = bigquery.create(tableInfo); - assertNotNull(createdTableInfo); - assertEquals(DATASET, createdTableInfo.tableId().dataset()); - assertEquals(tableName, createdTableInfo.tableId().table()); - TableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName); - assertNotNull(remoteTableInfo); - assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId()); - assertTrue(remoteTableInfo.definition() instanceof ViewDefinition); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.tableId().dataset()); + assertEquals(tableName, createdTable.tableId().table()); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + assertEquals(createdTable.tableId(), remoteTable.tableId()); + assertTrue(remoteTable.definition() instanceof ViewDefinition); Schema expectedSchema = Schema.builder() .addField( Field.builder("TimestampField", Field.Type.timestamp()) @@ -378,7 +377,7 @@ public void testCreateViewTable() throws InterruptedException { .mode(Field.Mode.NULLABLE) .build()) .build(); - assertEquals(expectedSchema, remoteTableInfo.definition().schema()); + assertEquals(expectedSchema, remoteTable.definition().schema()); QueryRequest request = QueryRequest.builder("SELECT * FROM " + tableName) .defaultDataset(DatasetId.of(DATASET)) .maxWaitTime(60000L) @@ -403,7 +402,7 @@ public void testCreateViewTable() throws InterruptedException { rowCount++; } assertEquals(2, rowCount); - assertTrue(bigquery.delete(DATASET, tableName)); + assertTrue(remoteTable.delete()); } @Test @@ -411,18 +410,18 @@ public void testListTables() { String tableName = "test_list_tables"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); - TableInfo createdTableInfo = bigquery.create(tableInfo); - assertNotNull(createdTableInfo); - Page tables = bigquery.listTables(DATASET); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + Page
tables = bigquery.listTables(DATASET); boolean found = false; - Iterator tableIterator = tables.values().iterator(); + Iterator
tableIterator = tables.values().iterator(); while (tableIterator.hasNext() && !found) { - if (tableIterator.next().tableId().equals(createdTableInfo.tableId())) { + if (tableIterator.next().tableId().equals(createdTable.tableId())) { found = true; } } assertTrue(found); - assertTrue(bigquery.delete(DATASET, tableName)); + assertTrue(createdTable.delete()); } @Test @@ -430,15 +429,15 @@ public void testUpdateTable() { String tableName = "test_update_table"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); - TableInfo createdTableInfo = bigquery.create(tableInfo); - assertNotNull(createdTableInfo); - TableInfo updatedTableInfo = bigquery.update(tableInfo.toBuilder() - .description("newDescription").build()); - assertEquals(DATASET, updatedTableInfo.tableId().dataset()); - assertEquals(tableName, updatedTableInfo.tableId().table()); - assertEquals(TABLE_SCHEMA, updatedTableInfo.definition().schema()); - assertEquals("newDescription", updatedTableInfo.description()); - assertTrue(bigquery.delete(DATASET, tableName)); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + Table updatedTable = + bigquery.update(tableInfo.toBuilder().description("newDescription").build()); + assertEquals(DATASET, updatedTable.tableId().dataset()); + assertEquals(tableName, updatedTable.tableId().table()); + assertEquals(TABLE_SCHEMA, updatedTable.definition().schema()); + assertEquals("newDescription", updatedTable.description()); + assertTrue(updatedTable.delete()); } @Test @@ -446,19 +445,19 @@ public void testUpdateTableWithSelectedFields() { String tableName = "test_update_with_selected_fields_table"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); - TableInfo createdTableInfo = bigquery.create(tableInfo); - assertNotNull(createdTableInfo); - TableInfo updatedTableInfo = bigquery.update(tableInfo.toBuilder().description("newDescr") - .build(), TableOption.fields(TableField.DESCRIPTION)); - assertTrue(updatedTableInfo.definition() instanceof StandardTableDefinition); - assertEquals(DATASET, updatedTableInfo.tableId().dataset()); - assertEquals(tableName, updatedTableInfo.tableId().table()); - assertEquals("newDescr", updatedTableInfo.description()); - assertNull(updatedTableInfo.definition().schema()); - assertNull(updatedTableInfo.lastModifiedTime()); - assertNull(updatedTableInfo.definition().numBytes()); - assertNull(updatedTableInfo.definition().numRows()); - assertTrue(bigquery.delete(DATASET, tableName)); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + Table updatedTable = bigquery.update(tableInfo.toBuilder().description("newDescr").build(), + TableOption.fields(TableField.DESCRIPTION)); + assertTrue(updatedTable.definition() instanceof StandardTableDefinition); + assertEquals(DATASET, updatedTable.tableId().dataset()); + assertEquals(tableName, updatedTable.tableId().table()); + assertEquals("newDescr", updatedTable.description()); + assertNull(updatedTable.definition().schema()); + assertNull(updatedTable.lastModifiedTime()); + assertNull(updatedTable.definition().numBytes()); + assertNull(updatedTable.definition().numRows()); + assertTrue(createdTable.delete()); } @Test @@ -544,14 +543,14 @@ public void testInsertAllWithSuffix() throws InterruptedException { assertFalse(response.hasErrors()); assertEquals(0, response.insertErrors().size()); String newTableName = tableName + "_suffix"; - TableInfo suffixTable = bigquery.getTable(DATASET, newTableName, TableOption.fields()); + Table suffixTable = bigquery.getTable(DATASET, newTableName, TableOption.fields()); // wait until the new table is created. If the table is never created the test will time-out while (suffixTable == null) { Thread.sleep(1000L); suffixTable = bigquery.getTable(DATASET, newTableName, TableOption.fields()); } assertTrue(bigquery.delete(TableId.of(DATASET, tableName))); - assertTrue(bigquery.delete(TableId.of(DATASET, newTableName))); + assertTrue(suffixTable.delete()); } @Test @@ -655,15 +654,15 @@ public void testQuery() throws InterruptedException { rowCount++; } assertEquals(2, rowCount); - JobInfo queryJob = bigquery.getJob(response.jobId()); + Job queryJob = bigquery.getJob(response.jobId()); JobStatistics.QueryStatistics statistics = queryJob.statistics(); assertNotNull(statistics.queryPlan()); } @Test public void testListJobs() { - Page jobs = bigquery.listJobs(); - for (JobInfo job : jobs.values()) { + Page jobs = bigquery.listJobs(); + for (Job job : jobs.values()) { assertNotNull(job.jobId()); assertNotNull(job.statistics()); assertNotNull(job.status()); @@ -674,8 +673,8 @@ public void testListJobs() { @Test public void testListJobsWithSelectedFields() { - Page jobs = bigquery.listJobs(JobListOption.fields(JobField.USER_EMAIL)); - for (JobInfo job : jobs.values()) { + Page jobs = bigquery.listJobs(JobListOption.fields(JobField.USER_EMAIL)); + for (Job job : jobs.values()) { assertNotNull(job.jobId()); assertNotNull(job.status()); assertNotNull(job.userEmail()); @@ -691,16 +690,15 @@ public void testCreateAndGetJob() throws InterruptedException { TableId sourceTable = TableId.of(DATASET, sourceTableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(sourceTable, tableDefinition); - TableInfo createdTableInfo = bigquery.create(tableInfo); - assertNotNull(createdTableInfo); - assertEquals(DATASET, createdTableInfo.tableId().dataset()); - assertEquals(sourceTableName, createdTableInfo.tableId().table()); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.tableId().dataset()); + assertEquals(sourceTableName, createdTable.tableId().table()); TableId destinationTable = TableId.of(DATASET, destinationTableName); CopyJobConfiguration copyJobConfiguration = CopyJobConfiguration.of(destinationTable, sourceTable); - JobInfo job = JobInfo.of(copyJobConfiguration); - JobInfo createdJob = bigquery.create(job); - JobInfo remoteJob = bigquery.getJob(createdJob.jobId()); + Job createdJob = bigquery.create(JobInfo.of(copyJobConfiguration)); + Job remoteJob = bigquery.getJob(createdJob.jobId()); assertEquals(createdJob.jobId(), remoteJob.jobId()); CopyJobConfiguration createdConfiguration = createdJob.configuration(); CopyJobConfiguration remoteConfiguration = remoteJob.configuration(); @@ -713,7 +711,7 @@ public void testCreateAndGetJob() throws InterruptedException { assertNotNull(remoteJob.status()); assertEquals(createdJob.selfLink(), remoteJob.selfLink()); assertEquals(createdJob.userEmail(), remoteJob.userEmail()); - assertTrue(bigquery.delete(DATASET, sourceTableName)); + assertTrue(createdTable.delete()); assertTrue(bigquery.delete(DATASET, destinationTableName)); } @@ -724,14 +722,13 @@ public void testCreateAndGetJobWithSelectedFields() throws InterruptedException TableId sourceTable = TableId.of(DATASET, sourceTableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(sourceTable, tableDefinition); - TableInfo createdTableInfo = bigquery.create(tableInfo); - assertNotNull(createdTableInfo); - assertEquals(DATASET, createdTableInfo.tableId().dataset()); - assertEquals(sourceTableName, createdTableInfo.tableId().table()); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.tableId().dataset()); + assertEquals(sourceTableName, createdTable.tableId().table()); TableId destinationTable = TableId.of(DATASET, destinationTableName); CopyJobConfiguration configuration = CopyJobConfiguration.of(destinationTable, sourceTable); - JobInfo createdJob = - bigquery.create(JobInfo.of(configuration), JobOption.fields(JobField.ETAG)); + Job createdJob = bigquery.create(JobInfo.of(configuration), JobOption.fields(JobField.ETAG)); CopyJobConfiguration createdConfiguration = createdJob.configuration(); assertNotNull(createdJob.jobId()); assertNotNull(createdConfiguration.sourceTables()); @@ -741,7 +738,7 @@ public void testCreateAndGetJobWithSelectedFields() throws InterruptedException assertNull(createdJob.status()); assertNull(createdJob.selfLink()); assertNull(createdJob.userEmail()); - JobInfo remoteJob = bigquery.getJob(createdJob.jobId(), JobOption.fields(JobField.ETAG)); + Job remoteJob = bigquery.getJob(createdJob.jobId(), JobOption.fields(JobField.ETAG)); CopyJobConfiguration remoteConfiguration = remoteJob.configuration(); assertEquals(createdJob.jobId(), remoteJob.jobId()); assertEquals(createdConfiguration.sourceTables(), remoteConfiguration.sourceTables()); @@ -753,7 +750,7 @@ public void testCreateAndGetJobWithSelectedFields() throws InterruptedException assertNull(remoteJob.status()); assertNull(remoteJob.selfLink()); assertNull(remoteJob.userEmail()); - assertTrue(bigquery.delete(DATASET, sourceTableName)); + assertTrue(createdTable.delete()); assertTrue(bigquery.delete(DATASET, destinationTableName)); } @@ -764,25 +761,24 @@ public void testCopyJob() throws InterruptedException { TableId sourceTable = TableId.of(DATASET, sourceTableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(sourceTable, tableDefinition); - TableInfo createdTableInfo = bigquery.create(tableInfo); - assertNotNull(createdTableInfo); - assertEquals(DATASET, createdTableInfo.tableId().dataset()); - assertEquals(sourceTableName, createdTableInfo.tableId().table()); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.tableId().dataset()); + assertEquals(sourceTableName, createdTable.tableId().table()); TableId destinationTable = TableId.of(DATASET, destinationTableName); CopyJobConfiguration configuration = CopyJobConfiguration.of(destinationTable, sourceTable); - JobInfo remoteJob = bigquery.create(JobInfo.of(configuration)); - while (remoteJob.status().state() != JobStatus.State.DONE) { + Job remoteJob = bigquery.create(JobInfo.of(configuration)); + while (!remoteJob.isDone()) { Thread.sleep(1000); - remoteJob = bigquery.getJob(remoteJob.jobId()); } assertNull(remoteJob.status().error()); - TableInfo remoteTableInfo = bigquery.getTable(DATASET, destinationTableName); - assertNotNull(remoteTableInfo); - assertEquals(destinationTable.dataset(), remoteTableInfo.tableId().dataset()); - assertEquals(destinationTableName, remoteTableInfo.tableId().table()); - assertEquals(TABLE_SCHEMA, remoteTableInfo.definition().schema()); - assertTrue(bigquery.delete(DATASET, sourceTableName)); - assertTrue(bigquery.delete(DATASET, destinationTableName)); + Table remoteTable = bigquery.getTable(DATASET, destinationTableName); + assertNotNull(remoteTable); + assertEquals(destinationTable.dataset(), remoteTable.tableId().dataset()); + assertEquals(destinationTableName, remoteTable.tableId().table()); + assertEquals(TABLE_SCHEMA, remoteTable.definition().schema()); + assertTrue(createdTable.delete()); + assertTrue(remoteTable.delete()); } @Test @@ -797,10 +793,9 @@ public void testQueryJob() throws InterruptedException { .defaultDataset(DatasetId.of(DATASET)) .destinationTable(destinationTable) .build(); - JobInfo remoteJob = bigquery.create(JobInfo.of(configuration)); - while (remoteJob.status().state() != JobStatus.State.DONE) { + Job remoteJob = bigquery.create(JobInfo.of(configuration)); + while (!remoteJob.isDone()) { Thread.sleep(1000); - remoteJob = bigquery.getJob(remoteJob.jobId()); } assertNull(remoteJob.status().error()); @@ -826,7 +821,7 @@ public void testQueryJob() throws InterruptedException { } assertEquals(2, rowCount); assertTrue(bigquery.delete(DATASET, tableName)); - JobInfo queryJob = bigquery.getJob(remoteJob.jobId()); + Job queryJob = bigquery.getJob(remoteJob.jobId()); JobStatistics.QueryStatistics statistics = queryJob.statistics(); assertNotNull(statistics.queryPlan()); } @@ -839,11 +834,9 @@ public void testExtractJob() throws InterruptedException { LoadJobConfiguration.builder(destinationTable, "gs://" + BUCKET + "/" + LOAD_FILE) .schema(SIMPLE_SCHEMA) .build(); - JobInfo remoteLoadJob = - bigquery.create(JobInfo.of(configuration)); - while (remoteLoadJob.status().state() != JobStatus.State.DONE) { + Job remoteLoadJob = bigquery.create(JobInfo.of(configuration)); + while (!remoteLoadJob.isDone()) { Thread.sleep(1000); - remoteLoadJob = bigquery.getJob(remoteLoadJob.jobId()); } assertNull(remoteLoadJob.status().error()); @@ -851,11 +844,9 @@ public void testExtractJob() throws InterruptedException { ExtractJobConfiguration.builder(destinationTable, "gs://" + BUCKET + "/" + EXTRACT_FILE) .printHeader(false) .build(); - JobInfo extractJob = JobInfo.of(extractConfiguration); - JobInfo remoteExtractJob = bigquery.create(extractJob); - while (remoteExtractJob.status().state() != JobStatus.State.DONE) { + Job remoteExtractJob = bigquery.create(JobInfo.of(extractConfiguration)); + while (!remoteExtractJob.isDone()) { Thread.sleep(1000); - remoteExtractJob = bigquery.getJob(remoteExtractJob.jobId()); } assertNull(remoteExtractJob.status().error()); assertEquals(CSV_CONTENT, @@ -872,11 +863,10 @@ public void testCancelJob() throws InterruptedException { .defaultDataset(DatasetId.of(DATASET)) .destinationTable(destinationTable) .build(); - JobInfo remoteJob = bigquery.create(JobInfo.of(configuration)); - assertTrue(bigquery.cancel(remoteJob.jobId())); - while (remoteJob.status().state() != JobStatus.State.DONE) { + Job remoteJob = bigquery.create(JobInfo.of(configuration)); + assertTrue(remoteJob.cancel()); + while (!remoteJob.isDone()) { Thread.sleep(1000); - remoteJob = bigquery.getJob(remoteJob.jobId()); } assertNull(remoteJob.status().error()); } diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobTest.java index 90b602d978e0..1aeff06272da 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobTest.java @@ -16,161 +16,264 @@ package com.google.gcloud.bigquery; +import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.createStrictMock; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import org.junit.After; -import org.junit.Before; import org.junit.Test; public class JobTest { - private static final JobId JOB_ID = JobId.of("dataset", "job"); + private static final JobId JOB_ID = JobId.of("project", "job"); private static final TableId TABLE_ID1 = TableId.of("dataset", "table1"); private static final TableId TABLE_ID2 = TableId.of("dataset", "table2"); - private static final JobInfo JOB_INFO = - JobInfo.of(JOB_ID, CopyJobConfiguration.of(TABLE_ID1, TABLE_ID2)); + private static final String ETAG = "etag"; + private static final String ID = "id"; + private static final String SELF_LINK = "selfLink"; + private static final String EMAIL = "email"; + private static final JobStatus JOB_STATUS = new JobStatus(JobStatus.State.DONE); + private static final JobStatistics COPY_JOB_STATISTICS = JobStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .build(); + private static final CopyJobConfiguration COPY_CONFIGURATION = + CopyJobConfiguration.of(TABLE_ID1, TABLE_ID2); + private static final JobInfo JOB_INFO = JobInfo.builder(COPY_CONFIGURATION) + .jobId(JOB_ID) + .statistics(COPY_JOB_STATISTICS) + .jobId(JOB_ID) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .status(JOB_STATUS) + .build(); + private BigQuery serviceMockReturnsOptions = createStrictMock(BigQuery.class); + private BigQueryOptions mockOptions = createMock(BigQueryOptions.class); private BigQuery bigquery; + private Job expectedJob; private Job job; - @Before - public void setUp() throws Exception { + private void initializeExpectedJob(int optionsCalls) { + expect(serviceMockReturnsOptions.options()).andReturn(mockOptions).times(optionsCalls); + replay(serviceMockReturnsOptions); bigquery = createStrictMock(BigQuery.class); - job = new Job(bigquery, JOB_INFO); + expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(JOB_INFO)); + } + + private void initializeJob() { + job = new Job(bigquery, new JobInfo.BuilderImpl(JOB_INFO)); } @After public void tearDown() throws Exception { - verify(bigquery); + verify(bigquery, serviceMockReturnsOptions); } @Test - public void testInfo() throws Exception { - assertEquals(JOB_INFO, job.info()); + public void testBuilder() { + initializeExpectedJob(2); replay(bigquery); + Job builtJob = Job.builder(serviceMockReturnsOptions, COPY_CONFIGURATION) + .jobId(JOB_ID) + .statistics(COPY_JOB_STATISTICS) + .jobId(JOB_ID) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .status(JOB_STATUS) + .build(); + assertEquals(ETAG, builtJob.etag()); + assertEquals(ID, builtJob.id()); + assertEquals(SELF_LINK, builtJob.selfLink()); + assertEquals(EMAIL, builtJob.userEmail()); + assertEquals(JOB_ID, builtJob.jobId()); + assertEquals(JOB_STATUS, builtJob.status()); + assertEquals(COPY_CONFIGURATION, builtJob.configuration()); + assertEquals(COPY_JOB_STATISTICS, builtJob.statistics()); + assertSame(serviceMockReturnsOptions, builtJob.bigquery()); } @Test - public void testBigQuery() throws Exception { - assertSame(bigquery, job.bigquery()); + public void testToBuilder() { + initializeExpectedJob(4); replay(bigquery); + compareJob(expectedJob, expectedJob.toBuilder().build()); } @Test public void testExists_True() throws Exception { + initializeExpectedJob(1); BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields()}; - expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)).andReturn(JOB_INFO); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)).andReturn(expectedJob); replay(bigquery); + initializeJob(); assertTrue(job.exists()); } @Test public void testExists_False() throws Exception { + initializeExpectedJob(1); BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields()}; + expect(bigquery.options()).andReturn(mockOptions); expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)).andReturn(null); replay(bigquery); + initializeJob(); assertFalse(job.exists()); } @Test public void testIsDone_True() throws Exception { + initializeExpectedJob(2); + BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; JobStatus status = createStrictMock(JobStatus.class); expect(status.state()).andReturn(JobStatus.State.DONE); - BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; + expect(bigquery.options()).andReturn(mockOptions); expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)) - .andReturn(JOB_INFO.toBuilder().status(status).build()); + .andReturn(expectedJob.toBuilder().status(status).build()); replay(status, bigquery); + initializeJob(); assertTrue(job.isDone()); verify(status); } @Test public void testIsDone_False() throws Exception { + initializeExpectedJob(2); + BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; JobStatus status = createStrictMock(JobStatus.class); expect(status.state()).andReturn(JobStatus.State.RUNNING); - BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; + expect(bigquery.options()).andReturn(mockOptions); expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)) - .andReturn(JOB_INFO.toBuilder().status(status).build()); + .andReturn(expectedJob.toBuilder().status(status).build()); replay(status, bigquery); + initializeJob(); assertFalse(job.isDone()); verify(status); } @Test public void testIsDone_NotExists() throws Exception { + initializeExpectedJob(1); BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; + expect(bigquery.options()).andReturn(mockOptions); expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)).andReturn(null); replay(bigquery); + initializeJob(); assertFalse(job.isDone()); } @Test public void testReload() throws Exception { + initializeExpectedJob(4); JobInfo updatedInfo = JOB_INFO.toBuilder().etag("etag").build(); - expect(bigquery.getJob(JOB_INFO.jobId().job())).andReturn(updatedInfo); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(updatedInfo)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getJob(JOB_INFO.jobId().job())).andReturn(expectedJob); replay(bigquery); + initializeJob(); Job updatedJob = job.reload(); - assertSame(bigquery, updatedJob.bigquery()); - assertEquals(updatedInfo, updatedJob.info()); + compareJob(expectedJob, updatedJob); } @Test public void testReloadNull() throws Exception { + initializeExpectedJob(1); + expect(bigquery.options()).andReturn(mockOptions); expect(bigquery.getJob(JOB_INFO.jobId().job())).andReturn(null); replay(bigquery); + initializeJob(); assertNull(job.reload()); } @Test public void testReloadWithOptions() throws Exception { + initializeExpectedJob(4); JobInfo updatedInfo = JOB_INFO.toBuilder().etag("etag").build(); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(updatedInfo)); + expect(bigquery.options()).andReturn(mockOptions); expect(bigquery.getJob(JOB_INFO.jobId().job(), BigQuery.JobOption.fields())) - .andReturn(updatedInfo); + .andReturn(expectedJob); replay(bigquery); + initializeJob(); Job updatedJob = job.reload(BigQuery.JobOption.fields()); - assertSame(bigquery, updatedJob.bigquery()); - assertEquals(updatedInfo, updatedJob.info()); + compareJob(expectedJob, updatedJob); } @Test public void testCancel() throws Exception { + initializeExpectedJob(1); + expect(bigquery.options()).andReturn(mockOptions); expect(bigquery.cancel(JOB_INFO.jobId())).andReturn(true); replay(bigquery); + initializeJob(); assertTrue(job.cancel()); } @Test public void testGet() throws Exception { - expect(bigquery.getJob(JOB_INFO.jobId().job())).andReturn(JOB_INFO); + initializeExpectedJob(3); + expect(bigquery.getJob(JOB_INFO.jobId().job())).andReturn(expectedJob); replay(bigquery); Job loadedJob = Job.get(bigquery, JOB_INFO.jobId().job()); - assertNotNull(loadedJob); - assertEquals(JOB_INFO, loadedJob.info()); + compareJob(expectedJob, loadedJob); } @Test public void testGetNull() throws Exception { + initializeExpectedJob(1); expect(bigquery.getJob(JOB_INFO.jobId().job())).andReturn(null); replay(bigquery); - assertNull(Job.get(bigquery, JOB_INFO.jobId().job())); + Job loadedJob = Job.get(bigquery, JOB_INFO.jobId().job()); + assertNull(loadedJob); } @Test public void testGetWithOptions() throws Exception { + initializeExpectedJob(3); expect(bigquery.getJob(JOB_INFO.jobId().job(), BigQuery.JobOption.fields())) - .andReturn(JOB_INFO); + .andReturn(expectedJob); replay(bigquery); Job loadedJob = Job.get(bigquery, JOB_INFO.jobId().job(), BigQuery.JobOption.fields()); - assertNotNull(loadedJob); - assertEquals(JOB_INFO, loadedJob.info()); + compareJob(expectedJob, loadedJob); + } + + @Test + public void testBigquery() { + initializeExpectedJob(1); + replay(bigquery); + assertSame(serviceMockReturnsOptions, expectedJob.bigquery()); + } + + private void compareJob(Job expected, Job value) { + assertEquals(expected, value); + compareJobInfo(expected, value); + assertEquals(expected.bigquery().options(), value.bigquery().options()); + } + + private void compareJobInfo(JobInfo expected, JobInfo value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.jobId(), value.jobId()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.status(), value.status()); + assertEquals(expected.statistics(), value.statistics()); + assertEquals(expected.userEmail(), value.userEmail()); + assertEquals(expected.configuration(), value.configuration()); } } diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java index 62a88c1860cd..267ae161b7aa 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java @@ -65,6 +65,7 @@ public class RemoteBigQueryHelperTest { @Rule public ExpectedException thrown = ExpectedException.none(); + @Test public void testForceDelete() throws InterruptedException, ExecutionException { BigQuery bigqueryMock = EasyMock.createMock(BigQuery.class); diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java index 3b16593a7f79..270c35c10efd 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java @@ -16,13 +16,14 @@ package com.google.gcloud.bigquery; +import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.createStrictMock; +import static org.easymock.EasyMock.eq; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; @@ -35,16 +36,21 @@ import com.google.gcloud.bigquery.InsertAllRequest.RowToInsert; import org.junit.After; -import org.junit.Before; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import java.util.Iterator; import java.util.List; public class TableTest { + private static final String ETAG = "etag"; + private static final String ID = "project:dataset:table1"; + private static final String SELF_LINK = "selfLink"; + private static final String FRIENDLY_NAME = "friendlyName"; + private static final String DESCRIPTION = "description"; + private static final Long CREATION_TIME = 10L; + private static final Long EXPIRATION_TIME = 100L; + private static final Long LAST_MODIFIED_TIME = 20L; private static final TableId TABLE_ID1 = TableId.of("dataset", "table1"); private static final TableId TABLE_ID2 = TableId.of("dataset", "table2"); private static final CopyJobConfiguration COPY_JOB_CONFIGURATION = @@ -77,148 +83,198 @@ public class TableTest { private static final Iterable> ROWS = ImmutableList.of( (List) ImmutableList.of(FIELD_VALUE1), ImmutableList.of(FIELD_VALUE2)); - @Rule - public ExpectedException thrown = ExpectedException.none(); + private BigQuery serviceMockReturnsOptions = createStrictMock(BigQuery.class); + private BigQueryOptions mockOptions = createMock(BigQueryOptions.class); private BigQuery bigquery; + private Table expectedTable; private Table table; - @Before - public void setUp() throws Exception { + private void initializeExpectedTable(int optionsCalls) { + expect(serviceMockReturnsOptions.options()).andReturn(mockOptions).times(optionsCalls); + replay(serviceMockReturnsOptions); bigquery = createStrictMock(BigQuery.class); - table = new Table(bigquery, TABLE_INFO); + expectedTable = new Table(serviceMockReturnsOptions, new TableInfo.BuilderImpl(TABLE_INFO)); + } + + private void initializeTable() { + table = new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO)); } @After public void tearDown() throws Exception { - verify(bigquery); + verify(bigquery, serviceMockReturnsOptions); } @Test - public void testInfo() throws Exception { - assertEquals(TABLE_INFO, table.info()); + public void testBuilder() { + initializeExpectedTable(2); replay(bigquery); + Table builtTable = Table.builder(serviceMockReturnsOptions, TABLE_ID1, TABLE_DEFINITION) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .expirationTime(EXPIRATION_TIME) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModifiedTime(LAST_MODIFIED_TIME) + .selfLink(SELF_LINK) + .build(); + assertEquals(TABLE_ID1, builtTable.tableId()); + assertEquals(CREATION_TIME, builtTable.creationTime()); + assertEquals(DESCRIPTION, builtTable.description()); + assertEquals(ETAG, builtTable.etag()); + assertEquals(EXPIRATION_TIME, builtTable.expirationTime()); + assertEquals(FRIENDLY_NAME, builtTable.friendlyName()); + assertEquals(ID, builtTable.id()); + assertEquals(LAST_MODIFIED_TIME, builtTable.lastModifiedTime()); + assertEquals(TABLE_DEFINITION, builtTable.definition()); + assertEquals(SELF_LINK, builtTable.selfLink()); + assertSame(serviceMockReturnsOptions, builtTable.bigquery()); } @Test - public void testBigQuery() throws Exception { - assertSame(bigquery, table.bigquery()); + public void testToBuilder() { + initializeExpectedTable(4); replay(bigquery); + compareTable(expectedTable, expectedTable.toBuilder().build()); } @Test public void testExists_True() throws Exception { + initializeExpectedTable(1); BigQuery.TableOption[] expectedOptions = {BigQuery.TableOption.fields()}; - expect(bigquery.getTable(TABLE_INFO.tableId(), expectedOptions)).andReturn(TABLE_INFO); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getTable(TABLE_INFO.tableId(), expectedOptions)).andReturn(expectedTable); replay(bigquery); + initializeTable(); assertTrue(table.exists()); } @Test public void testExists_False() throws Exception { + initializeExpectedTable(1); BigQuery.TableOption[] expectedOptions = {BigQuery.TableOption.fields()}; + expect(bigquery.options()).andReturn(mockOptions); expect(bigquery.getTable(TABLE_INFO.tableId(), expectedOptions)).andReturn(null); replay(bigquery); + initializeTable(); assertFalse(table.exists()); } @Test public void testReload() throws Exception { + initializeExpectedTable(4); TableInfo updatedInfo = TABLE_INFO.toBuilder().description("Description").build(); - expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(updatedInfo); + Table expectedTable = + new Table(serviceMockReturnsOptions, new TableInfo.BuilderImpl(updatedInfo)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(expectedTable); replay(bigquery); + initializeTable(); Table updatedTable = table.reload(); - assertSame(bigquery, updatedTable.bigquery()); - assertEquals(updatedInfo, updatedTable.info()); + compareTable(expectedTable, updatedTable); } @Test public void testReloadNull() throws Exception { + initializeExpectedTable(1); + expect(bigquery.options()).andReturn(mockOptions); expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(null); replay(bigquery); + initializeTable(); assertNull(table.reload()); } @Test public void testReloadWithOptions() throws Exception { + initializeExpectedTable(4); TableInfo updatedInfo = TABLE_INFO.toBuilder().description("Description").build(); + Table expectedTable = + new Table(serviceMockReturnsOptions, new TableInfo.BuilderImpl(updatedInfo)); + expect(bigquery.options()).andReturn(mockOptions); expect(bigquery.getTable(TABLE_INFO.tableId(), BigQuery.TableOption.fields())) - .andReturn(updatedInfo); + .andReturn(expectedTable); replay(bigquery); + initializeTable(); Table updatedTable = table.reload(BigQuery.TableOption.fields()); - assertSame(bigquery, updatedTable.bigquery()); - assertEquals(updatedInfo, updatedTable.info()); - } - - @Test - public void testUpdate() throws Exception { - TableInfo updatedInfo = TABLE_INFO.toBuilder().description("Description").build(); - expect(bigquery.update(updatedInfo)).andReturn(updatedInfo); - replay(bigquery); - Table updatedTable = table.update(updatedInfo); - assertSame(bigquery, updatedTable.bigquery()); - assertEquals(updatedInfo, updatedTable.info()); + compareTable(expectedTable, updatedTable); } @Test - public void testUpdateWithDifferentId() throws Exception { - TableInfo updatedInfo = TABLE_INFO.toBuilder() - .tableId(TableId.of("dataset", "table3")) - .description("Description") - .build(); + public void testUpdate() { + initializeExpectedTable(4); + Table expectedUpdatedTable = expectedTable.toBuilder().description("Description").build(); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.update(eq(expectedTable))).andReturn(expectedUpdatedTable); replay(bigquery); - thrown.expect(IllegalArgumentException.class); - table.update(updatedInfo); + initializeTable(); + Table actualUpdatedTable = table.update(); + compareTable(expectedUpdatedTable, actualUpdatedTable); } @Test - public void testUpdateWithDifferentDatasetId() throws Exception { - TableInfo updatedInfo = TABLE_INFO.toBuilder() - .tableId(TableId.of("dataset1", "table1")) - .description("Description") - .build(); + public void testUpdateWithOptions() { + initializeExpectedTable(4); + Table expectedUpdatedTable = expectedTable.toBuilder().description("Description").build(); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.update(eq(expectedTable), eq(BigQuery.TableOption.fields()))) + .andReturn(expectedUpdatedTable); replay(bigquery); - thrown.expect(IllegalArgumentException.class); - table.update(updatedInfo); + initializeTable(); + Table actualUpdatedTable = table.update(BigQuery.TableOption.fields()); + compareTable(expectedUpdatedTable, actualUpdatedTable); } @Test - public void testUpdateWithOptions() throws Exception { - TableInfo updatedInfo = TABLE_INFO.toBuilder().description("Description").build(); - expect(bigquery.update(updatedInfo, BigQuery.TableOption.fields())).andReturn(updatedInfo); + public void testDeleteTrue() { + initializeExpectedTable(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.delete(TABLE_INFO.tableId())).andReturn(true); replay(bigquery); - Table updatedTable = table.update(updatedInfo, BigQuery.TableOption.fields()); - assertSame(bigquery, updatedTable.bigquery()); - assertEquals(updatedInfo, updatedTable.info()); + initializeTable(); + assertTrue(table.delete()); } @Test - public void testDelete() throws Exception { - expect(bigquery.delete(TABLE_INFO.tableId())).andReturn(true); + public void testDeleteFalse() { + initializeExpectedTable(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.delete(TABLE_INFO.tableId())).andReturn(false); replay(bigquery); - assertTrue(table.delete()); + initializeTable(); + assertFalse(table.delete()); } @Test public void testInsert() throws Exception { + initializeExpectedTable(1); + expect(bigquery.options()).andReturn(mockOptions); expect(bigquery.insertAll(INSERT_ALL_REQUEST)).andReturn(EMPTY_INSERT_ALL_RESPONSE); replay(bigquery); + initializeTable(); InsertAllResponse response = table.insert(ROWS_TO_INSERT); assertSame(EMPTY_INSERT_ALL_RESPONSE, response); } @Test public void testInsertComplete() throws Exception { + initializeExpectedTable(1); + expect(bigquery.options()).andReturn(mockOptions); expect(bigquery.insertAll(INSERT_ALL_REQUEST_COMPLETE)).andReturn(EMPTY_INSERT_ALL_RESPONSE); replay(bigquery); + initializeTable(); InsertAllResponse response = table.insert(ROWS_TO_INSERT, true, true); assertSame(EMPTY_INSERT_ALL_RESPONSE, response); } @Test public void testList() throws Exception { + initializeExpectedTable(1); + expect(bigquery.options()).andReturn(mockOptions); PageImpl> tableDataPage = new PageImpl<>(null, "c", ROWS); expect(bigquery.listTableData(TABLE_ID1)).andReturn(tableDataPage); replay(bigquery); + initializeTable(); Page> dataPage = table.list(); Iterator> tableDataIterator = tableDataPage.values().iterator(); Iterator> dataIterator = dataPage.values().iterator(); @@ -227,10 +283,13 @@ public void testList() throws Exception { @Test public void testListWithOptions() throws Exception { + initializeExpectedTable(1); + expect(bigquery.options()).andReturn(mockOptions); PageImpl> tableDataPage = new PageImpl<>(null, "c", ROWS); expect(bigquery.listTableData(TABLE_ID1, BigQuery.TableDataListOption.maxResults(10L))) .andReturn(tableDataPage); replay(bigquery); + initializeTable(); Page> dataPage = table.list(BigQuery.TableDataListOption.maxResults(10L)); Iterator> tableDataIterator = tableDataPage.values().iterator(); Iterator> dataIterator = dataPage.values().iterator(); @@ -239,85 +298,106 @@ public void testListWithOptions() throws Exception { @Test public void testCopyFromString() throws Exception { - expect(bigquery.create(COPY_JOB_INFO)).andReturn(COPY_JOB_INFO); + initializeExpectedTable(2); + expect(bigquery.options()).andReturn(mockOptions); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(COPY_JOB_INFO)); + expect(bigquery.create(COPY_JOB_INFO)) + .andReturn(expectedJob); replay(bigquery); + initializeTable(); Job job = table.copy(TABLE_ID2.dataset(), TABLE_ID2.table()); - assertSame(bigquery, job.bigquery()); - assertEquals(COPY_JOB_INFO, job.info()); + assertSame(expectedJob, job); } @Test public void testCopyFromId() throws Exception { - expect(bigquery.create(COPY_JOB_INFO)).andReturn(COPY_JOB_INFO); + initializeExpectedTable(2); + expect(bigquery.options()).andReturn(mockOptions); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(COPY_JOB_INFO)); + expect(bigquery.create(COPY_JOB_INFO)).andReturn(expectedJob); replay(bigquery); - Job job = table.copy(TABLE_ID2); - assertSame(bigquery, job.bigquery()); - assertEquals(COPY_JOB_INFO, job.info()); + initializeTable(); + Job job = table.copy(TABLE_ID2.dataset(), TABLE_ID2.table()); + assertSame(expectedJob, job); } @Test public void testLoadDataUri() throws Exception { - expect(bigquery.create(LOAD_JOB_INFO)).andReturn(LOAD_JOB_INFO); + initializeExpectedTable(2); + expect(bigquery.options()).andReturn(mockOptions); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(LOAD_JOB_INFO)); + expect(bigquery.create(LOAD_JOB_INFO)).andReturn(expectedJob); replay(bigquery); + initializeTable(); Job job = table.load(FormatOptions.json(), "URI"); - assertSame(bigquery, job.bigquery()); - assertEquals(LOAD_JOB_INFO, job.info()); + assertSame(expectedJob, job); } @Test public void testLoadDataUris() throws Exception { - expect(bigquery.create(LOAD_JOB_INFO)).andReturn(LOAD_JOB_INFO); + initializeExpectedTable(2); + expect(bigquery.options()).andReturn(mockOptions); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(LOAD_JOB_INFO)); + expect(bigquery.create(LOAD_JOB_INFO)).andReturn(expectedJob); replay(bigquery); + initializeTable(); Job job = table.load(FormatOptions.json(), ImmutableList.of("URI")); - assertSame(bigquery, job.bigquery()); - assertEquals(LOAD_JOB_INFO, job.info()); + assertSame(expectedJob, job); } @Test public void testExtractDataUri() throws Exception { - expect(bigquery.create(EXTRACT_JOB_INFO)).andReturn(EXTRACT_JOB_INFO); + initializeExpectedTable(2); + expect(bigquery.options()).andReturn(mockOptions); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(EXTRACT_JOB_INFO)); + expect(bigquery.create(EXTRACT_JOB_INFO)).andReturn(expectedJob); replay(bigquery); + initializeTable(); Job job = table.extract("CSV", "URI"); - assertSame(bigquery, job.bigquery()); - assertEquals(EXTRACT_JOB_INFO, job.info()); + assertSame(expectedJob, job); } @Test public void testExtractDataUris() throws Exception { - expect(bigquery.create(EXTRACT_JOB_INFO)).andReturn(EXTRACT_JOB_INFO); + initializeExpectedTable(2); + expect(bigquery.options()).andReturn(mockOptions); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(EXTRACT_JOB_INFO)); + expect(bigquery.create(EXTRACT_JOB_INFO)).andReturn(expectedJob); replay(bigquery); + initializeTable(); Job job = table.extract("CSV", ImmutableList.of("URI")); - assertSame(bigquery, job.bigquery()); - assertEquals(EXTRACT_JOB_INFO, job.info()); + assertSame(expectedJob, job); } @Test public void testGetFromId() throws Exception { - expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(TABLE_INFO); + initializeExpectedTable(3); + expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(expectedTable); replay(bigquery); Table loadedTable = Table.get(bigquery, TABLE_INFO.tableId()); - assertNotNull(loadedTable); - assertEquals(TABLE_INFO, loadedTable.info()); + compareTable(expectedTable, loadedTable); } @Test public void testGetFromStrings() throws Exception { - expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(TABLE_INFO); + initializeExpectedTable(3); + expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(expectedTable); replay(bigquery); Table loadedTable = Table.get(bigquery, TABLE_ID1.dataset(), TABLE_ID1.table()); - assertNotNull(loadedTable); - assertEquals(TABLE_INFO, loadedTable.info()); + compareTable(expectedTable, loadedTable); } @Test public void testGetFromIdNull() throws Exception { + initializeExpectedTable(1); expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(null); replay(bigquery); - assertNull(Table.get(bigquery, TABLE_INFO.tableId())); + assertNull(Table.get(bigquery, TABLE_ID1)); } @Test public void testGetFromStringsNull() throws Exception { + initializeExpectedTable(1); expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(null); replay(bigquery); assertNull(Table.get(bigquery, TABLE_ID1.dataset(), TABLE_ID1.table())); @@ -325,22 +405,51 @@ public void testGetFromStringsNull() throws Exception { @Test public void testGetFromIdWithOptions() throws Exception { + initializeExpectedTable(3); expect(bigquery.getTable(TABLE_INFO.tableId(), BigQuery.TableOption.fields())) - .andReturn(TABLE_INFO); + .andReturn(expectedTable); replay(bigquery); Table loadedTable = Table.get(bigquery, TABLE_INFO.tableId(), BigQuery.TableOption.fields()); - assertNotNull(loadedTable); - assertEquals(TABLE_INFO, loadedTable.info()); + compareTable(expectedTable, loadedTable); } @Test public void testGetFromStringsWithOptions() throws Exception { + initializeExpectedTable(3); expect(bigquery.getTable(TABLE_INFO.tableId(), BigQuery.TableOption.fields())) - .andReturn(TABLE_INFO); + .andReturn(expectedTable); replay(bigquery); Table loadedTable = Table.get(bigquery, TABLE_ID1.dataset(), TABLE_ID1.table(), BigQuery.TableOption.fields()); - assertNotNull(loadedTable); - assertEquals(TABLE_INFO, loadedTable.info()); + compareTable(expectedTable, loadedTable); + } + + @Test + public void testBigquery() { + initializeExpectedTable(1); + replay(bigquery); + assertSame(serviceMockReturnsOptions, expectedTable.bigquery()); + } + + private void compareTable(Table expected, Table value) { + assertEquals(expected, value); + compareTableInfo(expected, value); + assertEquals(expected.bigquery().options(), value.bigquery().options()); + } + + private void compareTableInfo(TableInfo expected, TableInfo value) { + assertEquals(expected, value); + assertEquals(expected.tableId(), value.tableId()); + assertEquals(expected.definition(), value.definition()); + assertEquals(expected.creationTime(), value.creationTime()); + assertEquals(expected.description(), value.description()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.expirationTime(), value.expirationTime()); + assertEquals(expected.friendlyName(), value.friendlyName()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.lastModifiedTime(), value.lastModifiedTime()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.definition(), value.definition()); + assertEquals(expected.hashCode(), value.hashCode()); } } diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java index 1b1478eb5be5..ee42a0732a88 100644 --- a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java @@ -22,6 +22,7 @@ import com.google.gcloud.bigquery.BigQueryError; import com.google.gcloud.bigquery.BigQueryOptions; import com.google.gcloud.bigquery.CopyJobConfiguration; +import com.google.gcloud.bigquery.Dataset; import com.google.gcloud.bigquery.DatasetId; import com.google.gcloud.bigquery.DatasetInfo; import com.google.gcloud.bigquery.ExternalTableDefinition; @@ -29,14 +30,15 @@ import com.google.gcloud.bigquery.Field; import com.google.gcloud.bigquery.FieldValue; import com.google.gcloud.bigquery.FormatOptions; +import com.google.gcloud.bigquery.Job; import com.google.gcloud.bigquery.JobId; import com.google.gcloud.bigquery.JobInfo; -import com.google.gcloud.bigquery.JobStatus; import com.google.gcloud.bigquery.LoadJobConfiguration; import com.google.gcloud.bigquery.QueryRequest; import com.google.gcloud.bigquery.QueryResponse; import com.google.gcloud.bigquery.Schema; import com.google.gcloud.bigquery.StandardTableDefinition; +import com.google.gcloud.bigquery.Table; import com.google.gcloud.bigquery.TableId; import com.google.gcloud.bigquery.TableInfo; import com.google.gcloud.bigquery.ViewDefinition; @@ -176,7 +178,7 @@ Void parse(String... args) throws Exception { private static class ListDatasetsAction extends NoArgsAction { @Override public void run(BigQuery bigquery, Void arg) { - Iterator datasetInfoIterator = bigquery.listDatasets().iterateAll(); + Iterator datasetInfoIterator = bigquery.listDatasets().iterateAll(); while (datasetInfoIterator.hasNext()) { System.out.println(datasetInfoIterator.next()); } @@ -211,7 +213,7 @@ public String params() { private static class ListTablesAction extends DatasetAction { @Override public void run(BigQuery bigquery, DatasetId datasetId) { - Iterator tableInfoIterator = bigquery.listTables(datasetId).iterateAll(); + Iterator
tableInfoIterator = bigquery.listTables(datasetId).iterateAll(); while (tableInfoIterator.hasNext()) { System.out.println(tableInfoIterator.next()); } @@ -355,7 +357,7 @@ public String params() { private static class ListJobsAction extends NoArgsAction { @Override public void run(BigQuery bigquery, Void arg) { - Iterator datasetInfoIterator = bigquery.listJobs().iterateAll(); + Iterator datasetInfoIterator = bigquery.listJobs().iterateAll(); while (datasetInfoIterator.hasNext()) { System.out.println(datasetInfoIterator.next()); } @@ -521,11 +523,10 @@ private abstract static class JobRunAction extends BigQueryAction { @Override void run(BigQuery bigquery, JobInfo job) throws Exception { System.out.println("Creating job"); - JobInfo startedJob = bigquery.create(job); - while (startedJob.status().state() != JobStatus.State.DONE) { + Job startedJob = bigquery.create(job); + while (!startedJob.isDone()) { System.out.println("Waiting for job " + startedJob.jobId().job() + " to complete"); Thread.sleep(1000L); - startedJob = bigquery.getJob(startedJob.jobId()); } if (startedJob.status().error() == null) { System.out.println("Job " + startedJob.jobId().job() + " succeeded");