Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Get rid of "unused variable" warnings #31876

Merged
merged 31 commits into from
Sep 26, 2018
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
3fd9cf6
WIP remove unused variable warnings
Jul 6, 2018
6ff4802
Remove TODO
Jul 9, 2018
efc0a46
iter
Jul 9, 2018
ae0b69d
Remove version variables, leftovers from 6861d357
Jul 10, 2018
ebd5f95
Merge branch 'master' into remove-unused-variables
Jul 10, 2018
076c36c
ScoreMode isn't used anymore, its CombineFunctions now
Jul 10, 2018
a2ec6cd
iter
Jul 10, 2018
d7b5e41
iter
Jul 10, 2018
77820c7
Remove loop that is never going to be executed
Jul 10, 2018
ca69bdc
Remove unused params from SSource and Walker
Jul 10, 2018
793fbe0
Merge branch 'master' into remove-unused-variables
Jul 10, 2018
4aa37f2
Merge branch 'master' into remove-unused-variables
Jul 11, 2018
b276197
Merge branch 'master' into remove-unused-variables
Jul 11, 2018
ef4af7d
Merge branch 'master' into remove-unused-variables
Jul 16, 2018
4d58df5
Merge branch 'master' into remove-unused-variables
Jul 17, 2018
9a4c0cb
Merge branch 'master' into remove-unused-variables
Jul 18, 2018
2720ae3
Re-add accidentally removed import
Jul 18, 2018
4bb6045
Merge branch 'master' into remove-unused-variables
Jul 20, 2018
fb96b0c
Merge branch 'master' into remove-unused-variables
Jul 24, 2018
c61e6e9
Merge branch 'master' into remove-unused-variables
Aug 6, 2018
643f2b5
Merge branch 'master' into remove-unused-variables
Aug 8, 2018
95f1adf
Merge branch 'master' into remove-unused-variables
Aug 27, 2018
fd344db
Merge branch 'master' into remove-unused-variables
Sep 3, 2018
95d8624
Merge branch 'master' into remove-unused-variables
Sep 17, 2018
ff0d7f4
Merge branch 'master' into remove-unused-variables
Sep 20, 2018
52979f1
Merge branch 'master' into remove-unused-variables
Sep 25, 2018
654f350
iteration: adressing comments
Sep 25, 2018
f407932
Muting MovAvgIT#testBadModelParams
Sep 25, 2018
21118b1
iter
Sep 25, 2018
460a7fa
Revert removing code in TemplateUpdateServiceTests
Sep 25, 2018
a5cc0c8
Merge branch 'master' into remove-unused-variables
Sep 26, 2018
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ public void setupCluster() throws Exception {
String category = categories[j] = uniqueCategories[catIndex++ % uniqueCategories.length];
Control control = categoryToControl.get(category);
if (control == null) {
categoryToControl.put(category, control = new Control(category));
categoryToControl.put(category, control = new Control());
}
control.articleIds.add(id);
}
Expand Down Expand Up @@ -457,13 +457,8 @@ public void testPostCollectAllLeafReaders() throws Exception {
}

private static final class Control {

final Set<String> articleIds = new HashSet<>();
final Set<String> commentIds = new HashSet<>();
final Map<String, Set<String>> commenterToCommentId = new HashMap<>();

private Control(String category) {
// category not used any further
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1036,7 +1036,7 @@ private static class ScoreTerm {
this.score = score;
}

public void update(String word, String topField, float score) {
void update(String word, String topField, float score) {
this.word = word;
this.topField = topField;
this.score = score;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -309,10 +309,8 @@ public void parse(ParseContext context) throws IOException {
token = context.parser().nextToken();
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is highlighted as unused in my IDE.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thats weird, doesn't show up in my IDE and token is definitly used here. Can you check again?

double lat = context.parser().doubleValue();
token = context.parser().nextToken();
@SuppressWarnings("unused")
Double alt = Double.NaN;
if (token == XContentParser.Token.VALUE_NUMBER) {
alt = GeoPoint.assertZValue(ignoreZValue.value(), context.parser().doubleValue());
GeoPoint.assertZValue(ignoreZValue.value(), context.parser().doubleValue());
} else if (token != XContentParser.Token.END_ARRAY) {
throw new ElasticsearchParseException("[{}] field type does not accept > 3 dimensions", CONTENT_TYPE);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,13 @@ public CompositeAggregationBuilder size(int size) {
return this;
}

/**
* @return the number of composite buckets. Defaults to {@code 10}.
*/
public int size() {
return size;
}

@Override
protected AggregatorFactory<?> doBuild(SearchContext context, AggregatorFactory<?> parent,
AggregatorFactories.Builder subfactoriesBuilder) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,12 @@
import org.elasticsearch.client.AdminClient;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.IndicesAdminClient;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
Expand All @@ -55,7 +53,6 @@
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
Expand Down Expand Up @@ -297,51 +294,6 @@ public void testClusterStateUpdate() throws InterruptedException {
return null;
}).when(mockIndicesAdminClient).deleteTemplate(any(DeleteIndexTemplateRequest.class), any(ActionListener.class));

new TemplateUpgradeService(Settings.EMPTY, mockClient, clusterService, threadPool,
Arrays.asList(
templates -> {
assertNull(templates.put("added_test_template", IndexTemplateMetaData.builder("added_test_template")
.patterns(Collections.singletonList("*")).build()));
return templates;
},
templates -> {
assertNotNull(templates.remove("removed_test_template"));
return templates;
},
templates -> {
assertNotNull(templates.put("changed_test_template", IndexTemplateMetaData.builder("changed_test_template")
.patterns(Collections.singletonList("*")).order(10).build()));
return templates;
}
)) {

@Override
void tryFinishUpgrade(AtomicBoolean anyUpgradeFailed) {
super.tryFinishUpgrade(anyUpgradeFailed);
finishInvocation.release();
}

@Override
void upgradeTemplates(Map<String, BytesReference> changes, Set<String> deletions) {
super.upgradeTemplates(changes, deletions);
updateInvocation.release();
}

@Override
Optional<Tuple<Map<String, BytesReference>, Set<String>>>
calculateTemplateChanges(ImmutableOpenMap<String, IndexTemplateMetaData> templates) {
final Optional<Tuple<Map<String, BytesReference>, Set<String>>> ans = super.calculateTemplateChanges(templates);
calculateInvocation.release();
return ans;
}

@Override
public void clusterChanged(ClusterChangedEvent event) {
super.clusterChanged(event);
changedInvocation.release();
}
};

ClusterState prevState = ClusterState.EMPTY_STATE;
ClusterState state = ClusterState.builder(prevState).nodes(DiscoveryNodes.builder()
.add(new DiscoveryNode("node1", "node1", buildNewFakeTransportAddress(), emptyMap(), MASTER_DATA_ROLES, Version.CURRENT)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -298,10 +298,10 @@ public void testParsingAndToQuery12() throws IOException {
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.DEFAULT);
}

private void assertGeoDistanceRangeQuery(String query, double lat, double lon, double distance, DistanceUnit distanceUnit) throws IOException {
// just parse the query
private void assertGeoDistanceRangeQuery(String query, double lat, double lon, double distance, DistanceUnit distanceUnit)
throws IOException {
parseQuery(query).toQuery(createShardContext());
// TODO: what can we check?
// TODO: what can we check? See https://github.com/elastic/elasticsearch/issues/34043
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for creating the issue!

}

public void testFromJson() throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@
import org.elasticsearch.test.ESIntegTestCase.Scope;
import org.elasticsearch.test.InternalTestCluster;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.test.store.MockFSDirectoryService;
import org.elasticsearch.test.store.MockFSIndexStore;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.test.transport.StubbableTransport;
Expand Down Expand Up @@ -550,7 +549,6 @@ public void testDisconnectsWhileRecovering() throws Exception {
final Settings nodeSettings = Settings.builder()
.put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(), "100ms")
.put(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.getKey(), "1s")
.put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING.getKey(), false) // restarted recoveries will delete temp files and write them again
.build();
// start a master node
internalCluster().startNode(nodeSettings);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -979,9 +979,7 @@ public void testTwoMovAvgsWithPredictions() {
}

public void testBadModelParams() {
try {
@SuppressWarnings("unused")
SearchResponse response = client()
expectThrows(SearchPhaseExecutionException.class, () -> client()
.prepareSearch("idx").setTypes("type")
.addAggregation(
histogram("histo").field(INTERVAL_FIELD).interval(interval)
Expand All @@ -991,11 +989,7 @@ public void testBadModelParams() {
.window(10)
.modelBuilder(randomModelBuilder(100))
.gapPolicy(gapPolicy))
).execute().actionGet();
} catch (SearchPhaseExecutionException e) {
// All good
}

).execute().actionGet());
}

public void testHoltWintersMinimization() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -974,7 +974,6 @@ public void testSuggestWithManyCandidates() throws InterruptedException, Executi
suggest.size(1);
searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest);
assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006");
// assertThat(total, lessThan(1000L)); // Takes many seconds without fix - just for debugging
}

public void testSuggestWithFieldAlias() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ public void testThreadNames() throws Exception {
}
}
logger.info("pre node threads are {}", preNodeStartThreadNames);
internalCluster().startNode();
logger.info("do some indexing, flushing, optimize, and searches");
int numDocs = randomIntBetween(2, 100);
IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,10 +63,6 @@ public class MockFSDirectoryService extends FsDirectoryService {
Setting.doubleSetting("index.store.mock.random.io_exception_rate_on_open", 0.0d, 0.0d, Property.IndexScope, Property.NodeScope);
public static final Setting<Double> RANDOM_IO_EXCEPTION_RATE_SETTING =
Setting.doubleSetting("index.store.mock.random.io_exception_rate", 0.0d, 0.0d, Property.IndexScope, Property.NodeScope);
public static final Setting<Boolean> RANDOM_PREVENT_DOUBLE_WRITE_SETTING =
Setting.boolSetting("index.store.mock.random.prevent_double_write", true, Property.IndexScope, Property.NodeScope);
public static final Setting<Boolean> RANDOM_NO_DELETE_OPEN_FILE_SETTING =
Setting.boolSetting("index.store.mock.random.no_delete_open_file", true, Property.IndexScope, Property.NodeScope);
public static final Setting<Boolean> CRASH_INDEX_SETTING =
Setting.boolSetting("index.store.mock.random.crash_index", true, Property.IndexScope, Property.NodeScope);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,6 @@ public List<Setting<?>> getSettings() {
return Arrays.asList(INDEX_CHECK_INDEX_ON_CLOSE_SETTING,
MockFSDirectoryService.CRASH_INDEX_SETTING,
MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_SETTING,
MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING,
MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE_SETTING,
MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING);
}

Expand All @@ -86,6 +84,7 @@ public void onIndexModule(IndexModule indexModule) {
super(indexSettings);
}

@Override
public DirectoryService newDirectoryService(ShardPath path) {
return new MockFSDirectoryService(indexSettings, this, path);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,13 +61,12 @@ public void testQueryFilter() {

public void testLimit() {
QueryContainer container = new QueryContainer().withLimit(10).addGroups(singletonList(new GroupByColumnKey("1", "field")));
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
int size = randomIntBetween(1, 10);
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, size);
Builder aggBuilder = sourceBuilder.aggregations();
assertEquals(1, aggBuilder.count());
@SuppressWarnings("unused")
CompositeAggregationBuilder composite = (CompositeAggregationBuilder) aggBuilder.getAggregatorFactories().get(0);
// TODO: cannot access size
//assertEquals(10, composite.size());
assertEquals(size, composite.size());
}

public void testSortNoneSpecified() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
*/
package org.elasticsearch.xpack.watcher.notification.pagerduty;

import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.settings.SecureSetting;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.settings.Setting;
Expand All @@ -30,7 +29,7 @@ public class PagerDutyAccount {
private final HttpClient httpClient;
private final IncidentEventDefaults eventDefaults;

PagerDutyAccount(String name, Settings accountSettings, Settings serviceSettings, HttpClient httpClient, Logger logger) {
PagerDutyAccount(String name, Settings accountSettings, Settings serviceSettings, HttpClient httpClient) {
this.name = name;
this.serviceKey = getServiceKey(name, accountSettings, serviceSettings);
this.httpClient = httpClient;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ public PagerDutyService(Settings settings, HttpClient httpClient, ClusterSetting

@Override
protected PagerDutyAccount createAccount(String name, Settings accountSettings) {
return new PagerDutyAccount(name, accountSettings, accountSettings, httpClient, logger);
return new PagerDutyAccount(name, accountSettings, accountSettings, httpClient);
}

public static List<Setting<?>> getSettings() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
*/
package org.elasticsearch.xpack.watcher.test;

import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
Expand Down Expand Up @@ -181,7 +180,7 @@ protected boolean timeWarped() {
public void _setup() throws Exception {
if (timeWarped()) {
timeWarp = new TimeWarp(internalCluster().getInstances(ScheduleTriggerEngineMock.class),
(ClockMock)getInstanceFromMaster(Clock.class), logger);
(ClockMock)getInstanceFromMaster(Clock.class));
}

if (internalCluster().size() > 0) {
Expand Down Expand Up @@ -542,7 +541,7 @@ protected static class TimeWarp {
private final List<ScheduleTriggerEngineMock> schedulers;
private final ClockMock clock;

TimeWarp(Iterable<ScheduleTriggerEngineMock> schedulers, ClockMock clock, Logger logger) {
TimeWarp(Iterable<ScheduleTriggerEngineMock> schedulers, ClockMock clock) {
this.schedulers = StreamSupport.stream(schedulers.spliterator(), false).collect(Collectors.toList());
this.clock = clock;
}
Expand Down