Skip to content
This repository has been archived by the owner on Oct 3, 2023. It is now read-only.

fix(samples): removed env variables and buckets from creating bq. #526

Merged
merged 8 commits into from
Sep 16, 2022
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@

import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.Schema;
import product.setup.ProductsCreateBigqueryTable;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
Expand All @@ -35,11 +37,9 @@ public static void main(String... args) throws IOException {
String validEventsTable = "events";
String invalidEventsTable = "events_some_invalid";
String eventsSchemaFilePath = "src/main/resources/events_schema.json";
String validEventsSourceFile =
String.format("gs://%s/user_events.json", EventsCreateGcsBucket.getBucketName());
String validEventsSourceFile = ProductsCreateBigqueryTable.class.getResource("/user_events.json").getPath();
dfirova marked this conversation as resolved.
Show resolved Hide resolved
String invalidEventsSourceFile =
String.format(
"gs://%s/user_events_some_invalid.json", EventsCreateGcsBucket.getBucketName());
ProductsCreateBigqueryTable.class.getResource("/user_events_some_invalid.json").getPath();

BufferedReader bufferedReader = new BufferedReader(new FileReader(eventsSchemaFilePath));
String jsonToString = bufferedReader.lines().collect(Collectors.joining());
Expand All @@ -49,8 +49,8 @@ public static void main(String... args) throws IOException {

createBqDataset(dataset);
createBqTable(dataset, validEventsTable, eventsSchema);
uploadDataToBqTable(dataset, validEventsTable, validEventsSourceFile, eventsSchema);
uploadDataToBqTable(dataset, validEventsTable, validEventsSourceFile);
createBqTable(dataset, invalidEventsTable, eventsSchema);
uploadDataToBqTable(dataset, invalidEventsTable, invalidEventsSourceFile, eventsSchema);
uploadDataToBqTable(dataset, invalidEventsTable, invalidEventsSourceFile);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,9 @@ public static void main(String... args) throws IOException {
String validProductsTable = "products";
String invalidProductsTable = "products_some_invalid";
String productSchemaFilePath = "src/main/resources/product_schema.json";
String validProductsSourceFile =
String.format("gs://%s/products.json", ProductsCreateGcsBucket.getBucketName());
String validProductsSourceFile = ProductsCreateBigqueryTable.class.getResource("/products.json").getPath();
String invalidProductsSourceFile =
String.format(
"gs://%s/products_some_invalid.json", ProductsCreateGcsBucket.getBucketName());
ProductsCreateBigqueryTable.class.getResource("products_some_invalid.json").getPath();

BufferedReader bufferedReader = new BufferedReader(new FileReader(productSchemaFilePath));
String jsonToString = bufferedReader.lines().collect(Collectors.joining());
Expand All @@ -49,8 +47,8 @@ public static void main(String... args) throws IOException {

createBqDataset(dataset);
createBqTable(dataset, validProductsTable, productSchema);
uploadDataToBqTable(dataset, validProductsTable, validProductsSourceFile, productSchema);
uploadDataToBqTable(dataset, validProductsTable, validProductsSourceFile);
createBqTable(dataset, invalidProductsTable, productSchema);
uploadDataToBqTable(dataset, invalidProductsTable, invalidProductsSourceFile, productSchema);
uploadDataToBqTable(dataset, invalidProductsTable, invalidProductsSourceFile);
}
}
61 changes: 37 additions & 24 deletions samples/interactive-tutorials/src/main/java/setup/SetupCleanup.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,24 +23,25 @@
import com.google.api.gax.rpc.NotFoundException;
import com.google.cloud.ServiceOptions;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption;
import com.google.cloud.bigquery.BigQueryException;
import com.google.cloud.bigquery.DatasetInfo;
import com.google.cloud.bigquery.BigQueryOptions;
import com.google.cloud.bigquery.Dataset;
import com.google.cloud.bigquery.DatasetId;
import com.google.cloud.bigquery.DatasetInfo;
import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.FieldList;
import com.google.cloud.bigquery.FormatOptions;
import com.google.cloud.bigquery.Job;
import com.google.cloud.bigquery.JobInfo;
import com.google.cloud.bigquery.LegacySQLTypeName;
import com.google.cloud.bigquery.LoadJobConfiguration;
import com.google.cloud.bigquery.BigQueryException;
import com.google.cloud.bigquery.TableId;
import com.google.cloud.bigquery.Schema;
import com.google.cloud.bigquery.StandardTableDefinition;
import com.google.cloud.bigquery.DatasetId;
import com.google.cloud.bigquery.TableDefinition;
import com.google.cloud.bigquery.TableId;
import com.google.cloud.bigquery.StandardTableDefinition;
import com.google.cloud.bigquery.TableInfo;
import com.google.cloud.bigquery.WriteChannelConfiguration;
import com.google.cloud.bigquery.Job;
import com.google.cloud.bigquery.JobId;
import com.google.cloud.bigquery.FormatOptions;
import com.google.cloud.bigquery.TableDataWriteChannel;
import com.google.cloud.bigquery.LegacySQLTypeName;
import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.FieldList;
import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption;
import com.google.cloud.retail.v2.CreateProductRequest;
import com.google.cloud.retail.v2.DeleteProductRequest;
import com.google.cloud.retail.v2.FulfillmentInfo;
Expand Down Expand Up @@ -71,7 +72,10 @@
import com.google.protobuf.Int32Value;
import com.google.protobuf.Timestamp;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.channels.Channels;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.util.Arrays;
Expand Down Expand Up @@ -350,26 +354,35 @@ public static void createBqTable(String datasetName, String tableName, Schema sc
}

public static void uploadDataToBqTable(
String datasetName, String tableName, String sourceUri, Schema schema) {
String datasetName, String tableName, String sourceUri) {
try {
BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
TableId tableId = TableId.of(datasetName, tableName);
LoadJobConfiguration loadConfig =
LoadJobConfiguration.newBuilder(tableId, sourceUri)
.setFormatOptions(FormatOptions.json())
.setSchema(schema)
.build();
Job job = bigquery.create(JobInfo.of(loadConfig));
job = job.waitFor();

WriteChannelConfiguration writeChannelConfiguration =
WriteChannelConfiguration.newBuilder(tableId).setFormatOptions(FormatOptions.json()).build();

String jobName = "jobId_" + UUID.randomUUID();
JobId jobId = JobId.newBuilder().setLocation("us").setJob(jobName).build();

try (TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
OutputStream stream = Channels.newOutputStream(writer)) {
Files.copy(Paths.get(sourceUri), stream);
}

Job job = bigquery.getJob(jobId);
Job completedJob = job.waitFor();
if (job.isDone()) {
System.out.printf("Json from GCS successfully loaded in a table '%s'.%n", tableName);
System.out.printf("Json successfully loaded in a table '%s'.%n", tableName);
} else {
System.out.println(
"BigQuery was unable to load into the table due to an error:"
+ job.getStatus().getError());
"BigQuery was unable to load into the table due to an error:"
+ job.getStatus().getError());
}
} catch (BigQueryException | InterruptedException e) {
System.out.printf("Column not added during load append: %s%n", e.getMessage());
} catch (IOException e) {
System.out.printf("Error copying file: %s%n", e.getMessage());
}
}

Expand Down