Skip to content

Commit

Permalink
Replace ImmutableMap.Builder.build() with buildOrThrow()
Browse files Browse the repository at this point in the history
Starting with Guava 31, the `build()` method is discouraged and will be
deprecated in upcoming releases. See
google/guava@4bbe12c
for context.

The change itself was done mechanically, with IntelliJ's structural
search, then fixed line breaks with regular search for
`^(\s+)(.put(All)?\(.*\))(\.buildOrThrow\(\))`, then occurrences of
`).buildOrThrow()` fixed manually, then end-of-line searched for in the
diff (as they got removed by the structural search/replace) and also
restored manually.
  • Loading branch information
findepi committed Jan 18, 2022
1 parent 4d0d1cf commit 96133c7
Show file tree
Hide file tree
Showing 494 changed files with 920 additions and 923 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ public static Map<String, String> toProperties(List<ClientSessionProperty> sessi
}
builder.put(name, sessionProperty.getValue());
}
return builder.build();
return builder.buildOrThrow();
}

public static Map<String, String> toResourceEstimates(List<ClientResourceEstimate> estimates)
Expand All @@ -253,7 +253,7 @@ public static Map<String, String> toResourceEstimates(List<ClientResourceEstimat
for (ClientResourceEstimate estimate : estimates) {
builder.put(estimate.getResource(), estimate.getEstimate());
}
return builder.build();
return builder.buildOrThrow();
}

public static Map<String, String> toExtraCredentials(List<ClientExtraCredential> extraCredentials)
Expand All @@ -262,7 +262,7 @@ public static Map<String, String> toExtraCredentials(List<ClientExtraCredential>
for (ClientExtraCredential credential : extraCredentials) {
builder.put(credential.getName(), credential.getValue());
}
return builder.build();
return builder.buildOrThrow();
}

public static final class ClientResourceEstimate
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ public AppConfigurationEntry[] getAppConfigurationEntry(String name)
principal.ifPresent(value -> options.put("principal", value));

return new AppConfigurationEntry[] {
new AppConfigurationEntry(Krb5LoginModule.class.getName(), REQUIRED, options.build())
new AppConfigurationEntry(Krb5LoginModule.class.getName(), REQUIRED, options.buildOrThrow())
};
}
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ abstract class AbstractTrinoResultSet
.put("interval day to second", TrinoIntervalDayTime.class)
.put("map", Map.class)
.put("row", Row.class)
.build();
.buildOrThrow();

@VisibleForTesting
static final TypeConversions TYPE_CONVERSIONS =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ enum SslVerificationMode
for (ConnectionProperty<?> property : ALL_PROPERTIES) {
property.getDefault().ifPresent(value -> defaults.put(property.getKey(), value));
}
DEFAULTS = defaults.build();
DEFAULTS = defaults.buildOrThrow();
}

private ConnectionProperties() {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ private Map<String, String> getStatementSessionProperties()
if (queryTimeoutSeconds.get() > 0) {
sessionProperties.put("query_max_run_time", queryTimeoutSeconds.get() + "s");
}
return sessionProperties.build();
return sessionProperties.buildOrThrow();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ public void setupServer()
.put("hive.metastore", "file")
.put("hive.metastore.catalog.dir", server.getBaseDataDir().resolve("hive").toAbsolutePath().toString())
.put("hive.security", "sql-standard")
.build());
.buildOrThrow());
server.installPlugin(new BlackHolePlugin());
server.createCatalog("blackhole", "blackhole", ImmutableMap.of());

Expand Down Expand Up @@ -350,7 +350,7 @@ public void testExtraCredentials()
.put("test.token.foo", "bar")
.put("test.token.abc", "xyz")
.put("colon", "-::-")
.build();
.buildOrThrow();
TrinoConnection trinoConnection = connection.unwrap(TrinoConnection.class);
assertEquals(trinoConnection.getExtraCredentials(), expectedCredentials);
assertEquals(listExtraCredentials(connection), expectedCredentials);
Expand Down Expand Up @@ -591,7 +591,7 @@ private static Map<String, String> listExtraCredentials(Connection connection)
builder.put(rs.getString("name"), rs.getString("value"));
}
}
return builder.build();
return builder.buildOrThrow();
}

private static Set<String> listCurrentRoles(Connection connection)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ public void setup()
.put("http-server.https.keystore.path", new File(getResource("localhost.keystore").toURI()).getPath())
.put("http-server.https.keystore.key", "changeit")
.put("web-ui.enabled", "false")
.build())
.buildOrThrow())
.build();
server.installPlugin(new TpchPlugin());
server.createCatalog(TEST_CATALOG, "tpch");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ public void setupServer()
.setProperties(ImmutableMap.<String, String>builder()
.put("testing-warning-collector.add-warnings", "true")
.put("testing-warning-collector.preloaded-warnings", String.valueOf(PRELOADED_WARNINGS))
.build())
.buildOrThrow())
.build();
server.installPlugin(new BlackHolePlugin());
server.createCatalog("blackhole", "blackhole");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ public void setupServer()
.put("hive.metastore", "file")
.put("hive.metastore.catalog.dir", server.getBaseDataDir().resolve("hive").toAbsolutePath().toString())
.put("hive.security", "sql-standard")
.build());
.buildOrThrow());

countingMockConnector = new CountingMockConnector();
server.installPlugin(countingMockConnector.getPlugin());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ public void setup()
.put("http-server.https.enabled", "true")
.put("http-server.https.keystore.path", new File(getResource("localhost.keystore").toURI()).getPath())
.put("http-server.https.keystore.key", "changeit")
.build())
.buildOrThrow())
.build();
server.installPlugin(new TpchPlugin());
server.createCatalog(TEST_CATALOG, "tpch");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ public void setup()
.put("http-server.https.enabled", "true")
.put("http-server.https.keystore.path", new File(getResource("localhost.keystore").toURI()).getPath())
.put("http-server.https.keystore.key", "changeit")
.build())
.buildOrThrow())
.build();

server.getInstance(Key.get(PasswordAuthenticatorManager.class)).setAuthenticators(TestTrinoDriverImpersonateUser::authenticate);
Expand Down
8 changes: 4 additions & 4 deletions core/trino-main/src/main/java/io/trino/Session.java
Original file line number Diff line number Diff line change
Expand Up @@ -138,13 +138,13 @@ public Session(
connectorProperties.entrySet().stream()
.map(entry -> Maps.immutableEntry(entry.getKey(), ImmutableMap.copyOf(entry.getValue())))
.forEach(catalogPropertiesBuilder::put);
this.connectorProperties = catalogPropertiesBuilder.build();
this.connectorProperties = catalogPropertiesBuilder.buildOrThrow();

ImmutableMap.Builder<String, Map<String, String>> unprocessedCatalogPropertiesBuilder = ImmutableMap.builder();
unprocessedCatalogProperties.entrySet().stream()
.map(entry -> Maps.immutableEntry(entry.getKey(), ImmutableMap.copyOf(entry.getValue())))
.forEach(unprocessedCatalogPropertiesBuilder::put);
this.unprocessedCatalogProperties = unprocessedCatalogPropertiesBuilder.build();
this.unprocessedCatalogProperties = unprocessedCatalogPropertiesBuilder.buildOrThrow();

checkArgument(transactionId.isEmpty() || unprocessedCatalogProperties.isEmpty(), "Catalog session properties cannot be set if there is an open transaction");

Expand Down Expand Up @@ -352,7 +352,7 @@ public Session beginTransactionId(TransactionId transactionId, TransactionManage
Optional.of(transactionId),
clientTransactionSupport,
Identity.from(identity)
.withConnectorRoles(connectorRoles.build())
.withConnectorRoles(connectorRoles.buildOrThrow())
.build(),
source,
catalog,
Expand All @@ -369,7 +369,7 @@ public Session beginTransactionId(TransactionId transactionId, TransactionManage
resourceEstimates,
start,
systemProperties,
connectorProperties.build(),
connectorProperties.buildOrThrow(),
ImmutableMap.of(),
sessionPropertyManager,
preparedStatements,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,13 +129,13 @@ public SessionRepresentation(
for (Entry<CatalogName, Map<String, String>> entry : catalogProperties.entrySet()) {
catalogPropertiesBuilder.put(entry.getKey(), ImmutableMap.copyOf(entry.getValue()));
}
this.catalogProperties = catalogPropertiesBuilder.build();
this.catalogProperties = catalogPropertiesBuilder.buildOrThrow();

ImmutableMap.Builder<String, Map<String, String>> unprocessedCatalogPropertiesBuilder = ImmutableMap.builder();
for (Entry<String, Map<String, String>> entry : unprocessedCatalogProperties.entrySet()) {
unprocessedCatalogPropertiesBuilder.put(entry.getKey(), ImmutableMap.copyOf(entry.getValue()));
}
this.unprocessedCatalogProperties = unprocessedCatalogPropertiesBuilder.build();
this.unprocessedCatalogProperties = unprocessedCatalogPropertiesBuilder.buildOrThrow();
}

@JsonProperty
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ public Map<SchemaTableName, List<ColumnMetadata>> listTableColumns(ConnectorSess
builder.put(tableMetadata.getTable(), tableMetadata.getColumns());
}
}
return builder.build();
return builder.buildOrThrow();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ TABLE_SCHEMA_COLUMN, toNullableValue(schemaName))))
.map(CatalogSchemaName::getSchemaName)
.collect(toVarcharDomain())
.simplify(MAX_DOMAIN_SIZE))
.build());
.buildOrThrow());
}

List<CatalogSchemaTableName> tables = schemas.stream()
Expand Down Expand Up @@ -227,7 +227,7 @@ TABLE_NAME_COLUMN, toNullableValue(schemaTableName.getTableName()))))
.map(catalogSchemaTableName -> catalogSchemaTableName.getSchemaTableName().getTableName())
.collect(toVarcharDomain())
.simplify(MAX_DOMAIN_SIZE))
.build());
.buildOrThrow());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ public StatsAndCosts getForSubplan(PlanNode root)
filteredCosts.put(node.getId(), costs.get(node.getId()));
}
}
return new StatsAndCosts(filteredStats.build(), filteredCosts.build());
return new StatsAndCosts(filteredStats.buildOrThrow(), filteredCosts.buildOrThrow());
}

public static StatsAndCosts create(PlanNode root, StatsProvider statsProvider, CostProvider costProvider)
Expand All @@ -87,15 +87,15 @@ public static StatsAndCosts create(PlanNode root, StatsProvider statsProvider, C
stats.put(node.getId(), statsProvider.getStats(node));
costs.put(node.getId(), costProvider.getCost(node));
}
return new StatsAndCosts(stats.build(), costs.build());
return new StatsAndCosts(stats.buildOrThrow(), costs.buildOrThrow());
}

public static StatsAndCosts create(StageInfo stageInfo)
{
ImmutableMap.Builder<PlanNodeId, PlanNodeStatsEstimate> planNodeStats = ImmutableMap.builder();
ImmutableMap.Builder<PlanNodeId, PlanCostEstimate> planNodeCosts = ImmutableMap.builder();
reconstructStatsAndCosts(stageInfo, planNodeStats, planNodeCosts);
return new StatsAndCosts(planNodeStats.build(), planNodeCosts.build());
return new StatsAndCosts(planNodeStats.buildOrThrow(), planNodeCosts.buildOrThrow());
}

private static void reconstructStatsAndCosts(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ private SqlTaskExecution(
}
}
}
this.driverRunnerFactoriesWithSplitLifeCycle = driverRunnerFactoriesWithSplitLifeCycle.build();
this.driverRunnerFactoriesWithSplitLifeCycle = driverRunnerFactoriesWithSplitLifeCycle.buildOrThrow();
this.driverRunnerFactoriesWithDriverGroupLifeCycle = driverRunnerFactoriesWithDriverGroupLifeCycle.build();
this.driverRunnerFactoriesWithTaskLifeCycle = driverRunnerFactoriesWithTaskLifeCycle.build();

Expand Down Expand Up @@ -1176,8 +1176,8 @@ public Status(TaskContext taskContext, Map<Integer, PipelineExecutionStrategy> p
}
this.pipelineWithTaskLifeCycleCount = pipelineWithTaskLifeCycleCount;
this.pipelineWithDriverGroupLifeCycleCount = pipelineWithDriverGroupLifeCycleCount;
this.perPipelineAndLifespan = perPipelineAndLifespan.build();
this.perPipeline = perPipeline.build();
this.perPipelineAndLifespan = perPipelineAndLifespan.buildOrThrow();
this.perPipeline = perPipeline.buildOrThrow();
}

public synchronized void setNoMoreLifespans()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ public OutputBuffers withBuffer(OutputBufferId bufferId, int partition)
ImmutableMap.<OutputBufferId, Integer>builder()
.putAll(buffers)
.put(bufferId, partition)
.build());
.buildOrThrow());
}

public OutputBuffers withBuffers(Map<OutputBufferId, Integer> buffers)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ private static Map<String, NetworkLocation> loadTopologyFile(File topologyFile)

topology.put(parts.get(0), new NetworkLocation(segments));
}
return topology.build();
return topology.buildOrThrow();
}

private static RuntimeException invalidFile(int lineNumber, String message)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public PartitionedOutputBufferManager(PartitioningHandle partitioningHandle, int
}

outputBuffers = createInitialEmptyOutputBuffers(requireNonNull(partitioningHandle, "partitioningHandle is null"))
.withBuffers(partitions.build())
.withBuffers(partitions.buildOrThrow())
.withNoMoreBufferIds();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ public static PipelinedStageExecution createPipelinedStageExecution(
failureDetector,
executor,
bucketToPartition,
exchangeSources.build(),
exchangeSources.buildOrThrow(),
attempt);
execution.initialize();
return execution;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -510,12 +510,12 @@ private static StageManager create(
}
StageManager stageManager = new StageManager(
queryStateMachine,
stages.build(),
stages.buildOrThrow(),
coordinatorStagesInTopologicalOrder.build(),
distributedStagesInTopologicalOrder.build(),
rootStageId,
children.build(),
parents.build());
children.buildOrThrow(),
parents.buildOrThrow());
stageManager.initialize();
return stageManager;
}
Expand Down Expand Up @@ -804,7 +804,7 @@ private static Map<PlanFragmentId, OutputBufferManager> createOutputBuffersForSt
}
}

return result.build();
return result.buildOrThrow();
}

private static OutputBufferManager createSingleStreamOutputBuffer(SqlStage stage)
Expand All @@ -827,7 +827,7 @@ private static Map<PlanFragmentId, Optional<int[]>> createBucketToPartitionForSt
}
}

return result.build();
return result.buildOrThrow();
}

private CoordinatorStagesScheduler(
Expand Down Expand Up @@ -1171,7 +1171,7 @@ public static PipelinedDistributedStagesScheduler create(
schedulerStats,
stageManager,
executionPolicy.createExecutionSchedule(stageExecutions.values()),
stageSchedulers.build(),
stageSchedulers.buildOrThrow(),
ImmutableMap.copyOf(stageExecutions),
dynamicFilterService);
distributedStagesScheduler.initialize();
Expand All @@ -1192,7 +1192,7 @@ private static Map<PlanFragmentId, Optional<int[]>> createBucketToPartitionMap(
result.put(childStage.getFragment().getId(), bucketToPartition);
}
}
return result.build();
return result.buildOrThrow();
}

private static Optional<int[]> getBucketToPartition(
Expand Down Expand Up @@ -1251,7 +1251,7 @@ else if (partitioningHandle.equals(SCALED_WRITER_DISTRIBUTION)) {
result.put(fragmentId, outputBufferManager);
}
}
return result.build();
return result.buildOrThrow();
}

private static StageScheduler createStageScheduler(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ public TopologyAwareNodeSelectorFactory(
}

this.placementCounters = placementCounters.build();
this.placementCountersByName = placementCountersByName.build();
this.placementCountersByName = placementCountersByName.buildOrThrow();
}

public Map<String, CounterStat> getPlacementCountersByName()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ public Map<ServiceDescriptor, Stats> getStats()
for (MonitoringTask task : tasks.values()) {
builder.put(task.getService(), task.getStats());
}
return builder.build();
return builder.buildOrThrow();
}

@VisibleForTesting
Expand Down Expand Up @@ -515,7 +515,7 @@ public synchronized Map<String, Double> getRecentFailuresByType()
for (Map.Entry<Class<? extends Throwable>, DecayCounter> entry : failureCountByType.entrySet()) {
builder.put(entry.getKey().getName(), entry.getValue().getCount());
}
return builder.build();
return builder.buildOrThrow();
}
}
}
Loading

0 comments on commit 96133c7

Please sign in to comment.