Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

🐛 Destination BigQuery Denormalized: Fixed compilation error #5917

Merged
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/publish-command.yml
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ jobs:
SOURCE_AWS_CLOUDTRAIL_CREDS: ${{ secrets.SOURCE_AWS_CLOUDTRAIL_CREDS }}
AZURE_STORAGE_INTEGRATION_TEST_CREDS: ${{ secrets.AZURE_STORAGE_INTEGRATION_TEST_CREDS }}
BIGQUERY_INTEGRATION_TEST_CREDS: ${{ secrets.BIGQUERY_INTEGRATION_TEST_CREDS }}
BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS: ${{ secrets.BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS }}
SOURCE_BING_ADS_CREDS: ${{ secrets.SOURCE_BING_ADS_CREDS }}
BIGQUERY_TEST_CREDS: ${{ secrets.BIGQUERY_TEST_CREDS }}
BRAINTREE_TEST_CREDS: ${{ secrets.BRAINTREE_TEST_CREDS }}
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/test-command.yml
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ jobs:
AWS_REDSHIFT_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_REDSHIFT_INTEGRATION_TEST_CREDS }}
AZURE_STORAGE_INTEGRATION_TEST_CREDS: ${{ secrets.AZURE_STORAGE_INTEGRATION_TEST_CREDS }}
BIGQUERY_INTEGRATION_TEST_CREDS: ${{ secrets.BIGQUERY_INTEGRATION_TEST_CREDS }}
BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS: ${{ secrets.BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS }}
SOURCE_BING_ADS_CREDS: ${{ secrets.SOURCE_BING_ADS_CREDS }}
BIGQUERY_TEST_CREDS: ${{ secrets.BIGQUERY_TEST_CREDS }}
BRAINTREE_TEST_CREDS: ${{ secrets.BRAINTREE_TEST_CREDS }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,9 @@ protected String getTargetTableName(String streamName) {
protected AirbyteMessageConsumer getRecordConsumer(BigQuery bigquery,
Map<AirbyteStreamNameNamespacePair, BigQueryWriteConfig> writeConfigs,
ConfiguredAirbyteCatalog catalog,
Consumer<AirbyteMessage> outputRecordCollector) {
Consumer<AirbyteMessage> outputRecordCollector,
boolean isGcsUploadingMode,
boolean isKeepFilesInGcs) {
return new BigQueryDenormalizedRecordConsumer(bigquery, writeConfigs, catalog, outputRecordCollector, getNamingResolver());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ public BigQueryDenormalizedRecordConsumer(BigQuery bigquery,
ConfiguredAirbyteCatalog catalog,
Consumer<AirbyteMessage> outputRecordCollector,
StandardNameTransformer namingResolver) {
super(bigquery, writeConfigs, catalog, outputRecordCollector);
super(bigquery, writeConfigs, catalog, outputRecordCollector, false, false);
this.namingResolver = namingResolver;
invalidKeys = new HashSet<>();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ void setup(TestInfo info) throws IOException {
final String credentialsJsonString = new String(Files.readAllBytes(CREDENTIALS_PATH));
final JsonNode credentialsJson = Jsons.deserialize(credentialsJsonString);

final String projectId = credentialsJson.get(BigQueryDestination.CONFIG_PROJECT_ID).asText();
final String projectId = credentialsJson.get(BigQueryConsts.CONFIG_PROJECT_ID).asText();
final ServiceAccountCredentials credentials = ServiceAccountCredentials.fromStream(new ByteArrayInputStream(credentialsJsonString.getBytes()));
bigquery = BigQueryOptions.newBuilder()
.setProjectId(projectId)
Expand All @@ -124,10 +124,10 @@ void setup(TestInfo info) throws IOException {
dataset = bigquery.create(datasetInfo);

config = Jsons.jsonNode(ImmutableMap.builder()
.put(BigQueryDestination.CONFIG_PROJECT_ID, projectId)
.put(BigQueryDestination.CONFIG_CREDS, credentialsJsonString)
.put(BigQueryDestination.CONFIG_DATASET_ID, datasetId)
.put(BigQueryDestination.CONFIG_DATASET_LOCATION, datasetLocation)
.put(BigQueryConsts.CONFIG_PROJECT_ID, projectId)
.put(BigQueryConsts.CONFIG_CREDS, credentialsJsonString)
.put(BigQueryConsts.CONFIG_DATASET_ID, datasetId)
.put(BigQueryConsts.CONFIG_DATASET_LOCATION, datasetLocation)
.put(BIG_QUERY_CLIENT_CHUNK_SIZE, 10)
.build());

Expand Down
4 changes: 2 additions & 2 deletions tools/bin/ci_credentials.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ function write_standard_creds() {

# Please maintain this organisation and alphabetise.
write_standard_creds destination-bigquery "$BIGQUERY_INTEGRATION_TEST_CREDS" "credentials.json"
write_standard_creds destination-bigquery-denormalized "$BIGQUERY_INTEGRATION_TEST_CREDS" "credentials.json"
write_standard_creds destination-bigquery-denormalized "$BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS" "credentials.json"
write_standard_creds destination-gcs "$DESTINATION_GCS_CREDS"
write_standard_creds destination-kvdb "$DESTINATION_KVDB_TEST_CREDS"
write_standard_creds destination-keen "$DESTINATION_KEEN_TEST_CREDS"
Expand All @@ -41,7 +41,7 @@ write_standard_creds destination-snowflake "$SNOWFLAKE_GCS_COPY_INTEGRATION_TEST
write_standard_creds destination-snowflake "$SNOWFLAKE_S3_COPY_INTEGRATION_TEST_CREDS" "copy_s3_config.json"
write_standard_creds destination-snowflake "$SNOWFLAKE_INTEGRATION_TEST_CREDS" "insert_config.json"

write_standard_creds base-normalization "$BIGQUERY_INTEGRATION_TEST_CREDS" "bigquery.json"
write_standard_creds base-normalization "$BIGQUERY_DENORMALIZED_INTEGRATION_TEST_CREDS" "bigquery.json"
write_standard_creds base-normalization "$SNOWFLAKE_INTEGRATION_TEST_CREDS" "snowflake.json"
write_standard_creds base-normalization "$AWS_REDSHIFT_INTEGRATION_TEST_CREDS" "redshift.json"
write_standard_creds base-normalization "$AWS_ORACLE_INTEGRATION_TEST_CREDS" "oracle.json"
Expand Down